1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
45 #include "tree-object-size.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
73 enum strlen_range_kind
{
74 /* Compute the exact constant string length. */
76 /* Compute the maximum constant string length. */
78 /* Compute a range of string lengths bounded by object sizes. When
79 the length of a string cannot be determined, consider as the upper
80 bound the size of the enclosing object the string may be a member
81 or element of. Also determine the size of the largest character
82 array the string may refer to. */
84 /* Determine the integer value of the argument (not string length). */
89 get_range_strlen (tree
, bitmap
*, strlen_range_kind
, c_strlen_data
*, unsigned);
91 /* Return true when DECL can be referenced from current unit.
92 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
93 We can get declarations that are not possible to reference for various
96 1) When analyzing C++ virtual tables.
97 C++ virtual tables do have known constructors even
98 when they are keyed to other compilation unit.
99 Those tables can contain pointers to methods and vars
100 in other units. Those methods have both STATIC and EXTERNAL
102 2) In WHOPR mode devirtualization might lead to reference
103 to method that was partitioned elsehwere.
104 In this case we have static VAR_DECL or FUNCTION_DECL
105 that has no corresponding callgraph/varpool node
107 3) COMDAT functions referred by external vtables that
108 we devirtualize only during final compilation stage.
109 At this time we already decided that we will not output
110 the function body and thus we can't reference the symbol
114 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
117 struct cgraph_node
*node
;
120 if (DECL_ABSTRACT_P (decl
))
123 /* We are concerned only about static/external vars and functions. */
124 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
125 || !VAR_OR_FUNCTION_DECL_P (decl
))
128 /* Static objects can be referred only if they are defined and not optimized
130 if (!TREE_PUBLIC (decl
))
132 if (DECL_EXTERNAL (decl
))
134 /* Before we start optimizing unreachable code we can be sure all
135 static objects are defined. */
136 if (symtab
->function_flags_ready
)
138 snode
= symtab_node::get (decl
);
139 if (!snode
|| !snode
->definition
)
141 node
= dyn_cast
<cgraph_node
*> (snode
);
142 return !node
|| !node
->inlined_to
;
145 /* We will later output the initializer, so we can refer to it.
146 So we are concerned only when DECL comes from initializer of
147 external var or var that has been optimized out. */
149 || !VAR_P (from_decl
)
150 || (!DECL_EXTERNAL (from_decl
)
151 && (vnode
= varpool_node::get (from_decl
)) != NULL
152 && vnode
->definition
)
154 && (vnode
= varpool_node::get (from_decl
)) != NULL
155 && vnode
->in_other_partition
))
157 /* We are folding reference from external vtable. The vtable may reffer
158 to a symbol keyed to other compilation unit. The other compilation
159 unit may be in separate DSO and the symbol may be hidden. */
160 if (DECL_VISIBILITY_SPECIFIED (decl
)
161 && DECL_EXTERNAL (decl
)
162 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
163 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
165 /* When function is public, we always can introduce new reference.
166 Exception are the COMDAT functions where introducing a direct
167 reference imply need to include function body in the curren tunit. */
168 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
170 /* We have COMDAT. We are going to check if we still have definition
171 or if the definition is going to be output in other partition.
172 Bypass this when gimplifying; all needed functions will be produced.
174 As observed in PR20991 for already optimized out comdat virtual functions
175 it may be tempting to not necessarily give up because the copy will be
176 output elsewhere when corresponding vtable is output.
177 This is however not possible - ABI specify that COMDATs are output in
178 units where they are used and when the other unit was compiled with LTO
179 it is possible that vtable was kept public while the function itself
181 if (!symtab
->function_flags_ready
)
184 snode
= symtab_node::get (decl
);
186 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
187 && (!snode
->in_other_partition
188 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
190 node
= dyn_cast
<cgraph_node
*> (snode
);
191 return !node
|| !node
->inlined_to
;
194 /* Create a temporary for TYPE for a statement STMT. If the current function
195 is in SSA form, a SSA name is created. Otherwise a temporary register
199 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
201 if (gimple_in_ssa_p (cfun
))
202 return make_ssa_name (type
, stmt
);
204 return create_tmp_reg (type
);
207 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
208 acceptable form for is_gimple_min_invariant.
209 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
212 canonicalize_constructor_val (tree cval
, tree from_decl
)
214 if (CONSTANT_CLASS_P (cval
))
217 tree orig_cval
= cval
;
219 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
220 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
222 tree ptr
= TREE_OPERAND (cval
, 0);
223 if (is_gimple_min_invariant (ptr
))
224 cval
= build1_loc (EXPR_LOCATION (cval
),
225 ADDR_EXPR
, TREE_TYPE (ptr
),
226 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
228 fold_convert (ptr_type_node
,
229 TREE_OPERAND (cval
, 1))));
231 if (TREE_CODE (cval
) == ADDR_EXPR
)
233 tree base
= NULL_TREE
;
234 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
236 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
238 TREE_OPERAND (cval
, 0) = base
;
241 base
= get_base_address (TREE_OPERAND (cval
, 0));
245 if (VAR_OR_FUNCTION_DECL_P (base
)
246 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
248 if (TREE_TYPE (base
) == error_mark_node
)
251 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
252 but since the use can be in a debug stmt we can't. */
254 else if (TREE_CODE (base
) == FUNCTION_DECL
)
256 /* Make sure we create a cgraph node for functions we'll reference.
257 They can be non-existent if the reference comes from an entry
258 of an external vtable for example. */
259 cgraph_node::get_create (base
);
261 /* Fixup types in global initializers. */
262 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
263 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
265 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
266 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
269 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
270 if (TREE_CODE (cval
) == INTEGER_CST
)
272 if (TREE_OVERFLOW_P (cval
))
273 cval
= drop_tree_overflow (cval
);
274 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
275 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
281 /* If SYM is a constant variable with known value, return the value.
282 NULL_TREE is returned otherwise. */
285 get_symbol_constant_value (tree sym
)
287 tree val
= ctor_for_folding (sym
);
288 if (val
!= error_mark_node
)
292 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
293 if (val
&& is_gimple_min_invariant (val
))
298 /* Variables declared 'const' without an initializer
299 have zero as the initializer if they may not be
300 overridden at link or run time. */
302 && is_gimple_reg_type (TREE_TYPE (sym
)))
303 return build_zero_cst (TREE_TYPE (sym
));
311 /* Subroutine of fold_stmt. We perform constant folding of the
312 memory reference tree EXPR. */
315 maybe_fold_reference (tree expr
)
317 tree result
= NULL_TREE
;
319 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
320 || TREE_CODE (expr
) == REALPART_EXPR
321 || TREE_CODE (expr
) == IMAGPART_EXPR
)
322 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
323 result
= fold_unary_loc (EXPR_LOCATION (expr
),
326 TREE_OPERAND (expr
, 0));
327 else if (TREE_CODE (expr
) == BIT_FIELD_REF
328 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
329 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
332 TREE_OPERAND (expr
, 0),
333 TREE_OPERAND (expr
, 1),
334 TREE_OPERAND (expr
, 2));
336 result
= fold_const_aggregate_ref (expr
);
338 if (result
&& is_gimple_min_invariant (result
))
344 /* Return true if EXPR is an acceptable right-hand-side for a
345 GIMPLE assignment. We validate the entire tree, not just
346 the root node, thus catching expressions that embed complex
347 operands that are not permitted in GIMPLE. This function
348 is needed because the folding routines in fold-const.c
349 may return such expressions in some cases, e.g., an array
350 access with an embedded index addition. It may make more
351 sense to have folding routines that are sensitive to the
352 constraints on GIMPLE operands, rather than abandoning any
353 any attempt to fold if the usual folding turns out to be too
357 valid_gimple_rhs_p (tree expr
)
359 enum tree_code code
= TREE_CODE (expr
);
361 switch (TREE_CODE_CLASS (code
))
363 case tcc_declaration
:
364 if (!is_gimple_variable (expr
))
369 /* All constants are ok. */
373 /* GENERIC allows comparisons with non-boolean types, reject
374 those for GIMPLE. Let vector-typed comparisons pass - rules
375 for GENERIC and GIMPLE are the same here. */
376 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
377 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
378 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
379 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
384 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
385 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
390 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
400 if (is_gimple_min_invariant (expr
))
402 t
= TREE_OPERAND (expr
, 0);
403 while (handled_component_p (t
))
405 /* ??? More checks needed, see the GIMPLE verifier. */
406 if ((TREE_CODE (t
) == ARRAY_REF
407 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
408 && !is_gimple_val (TREE_OPERAND (t
, 1)))
410 t
= TREE_OPERAND (t
, 0);
412 if (!is_gimple_id (t
))
418 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
420 if ((code
== COND_EXPR
421 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
422 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
423 || !is_gimple_val (TREE_OPERAND (expr
, 1))
424 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
435 case tcc_exceptional
:
436 if (code
== CONSTRUCTOR
)
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
441 if (!is_gimple_val (elt
))
445 if (code
!= SSA_NAME
)
450 if (code
== BIT_FIELD_REF
)
451 return is_gimple_val (TREE_OPERAND (expr
, 0));
462 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
463 replacement rhs for the statement or NULL_TREE if no simplification
464 could be made. It is assumed that the operands have been previously
468 fold_gimple_assign (gimple_stmt_iterator
*si
)
470 gimple
*stmt
= gsi_stmt (*si
);
471 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
472 location_t loc
= gimple_location (stmt
);
474 tree result
= NULL_TREE
;
476 switch (get_gimple_rhs_class (subcode
))
478 case GIMPLE_SINGLE_RHS
:
480 tree rhs
= gimple_assign_rhs1 (stmt
);
482 if (TREE_CLOBBER_P (rhs
))
485 if (REFERENCE_CLASS_P (rhs
))
486 return maybe_fold_reference (rhs
);
488 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
490 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
491 if (is_gimple_min_invariant (val
))
493 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
496 vec
<cgraph_node
*>targets
497 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
498 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
500 if (dump_enabled_p ())
502 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
503 "resolving virtual function address "
504 "reference to function %s\n",
505 targets
.length () == 1
506 ? targets
[0]->name ()
509 if (targets
.length () == 1)
511 val
= fold_convert (TREE_TYPE (val
),
512 build_fold_addr_expr_loc
513 (loc
, targets
[0]->decl
));
514 STRIP_USELESS_TYPE_CONVERSION (val
);
517 /* We cannot use __builtin_unreachable here because it
518 cannot have address taken. */
519 val
= build_int_cst (TREE_TYPE (val
), 0);
525 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
527 tree ref
= TREE_OPERAND (rhs
, 0);
528 if (TREE_CODE (ref
) == MEM_REF
529 && integer_zerop (TREE_OPERAND (ref
, 1)))
531 result
= TREE_OPERAND (ref
, 0);
532 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
534 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
539 else if (TREE_CODE (rhs
) == CONSTRUCTOR
540 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
542 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
546 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
547 if (! CONSTANT_CLASS_P (val
))
550 return build_vector_from_ctor (TREE_TYPE (rhs
),
551 CONSTRUCTOR_ELTS (rhs
));
554 else if (DECL_P (rhs
)
555 && is_gimple_reg_type (TREE_TYPE (rhs
)))
556 return get_symbol_constant_value (rhs
);
560 case GIMPLE_UNARY_RHS
:
563 case GIMPLE_BINARY_RHS
:
566 case GIMPLE_TERNARY_RHS
:
567 result
= fold_ternary_loc (loc
, subcode
,
568 TREE_TYPE (gimple_assign_lhs (stmt
)),
569 gimple_assign_rhs1 (stmt
),
570 gimple_assign_rhs2 (stmt
),
571 gimple_assign_rhs3 (stmt
));
575 STRIP_USELESS_TYPE_CONVERSION (result
);
576 if (valid_gimple_rhs_p (result
))
581 case GIMPLE_INVALID_RHS
:
589 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
590 adjusting the replacement stmts location and virtual operands.
591 If the statement has a lhs the last stmt in the sequence is expected
592 to assign to that lhs. */
595 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
597 gimple
*stmt
= gsi_stmt (*si_p
);
599 if (gimple_has_location (stmt
))
600 annotate_all_with_location (stmts
, gimple_location (stmt
));
602 /* First iterate over the replacement statements backward, assigning
603 virtual operands to their defining statements. */
604 gimple
*laststore
= NULL
;
605 for (gimple_stmt_iterator i
= gsi_last (stmts
);
606 !gsi_end_p (i
); gsi_prev (&i
))
608 gimple
*new_stmt
= gsi_stmt (i
);
609 if ((gimple_assign_single_p (new_stmt
)
610 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
611 || (is_gimple_call (new_stmt
)
612 && (gimple_call_flags (new_stmt
)
613 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
617 vdef
= gimple_vdef (stmt
);
619 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
620 gimple_set_vdef (new_stmt
, vdef
);
621 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
622 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
623 laststore
= new_stmt
;
627 /* Second iterate over the statements forward, assigning virtual
628 operands to their uses. */
629 tree reaching_vuse
= gimple_vuse (stmt
);
630 for (gimple_stmt_iterator i
= gsi_start (stmts
);
631 !gsi_end_p (i
); gsi_next (&i
))
633 gimple
*new_stmt
= gsi_stmt (i
);
634 /* If the new statement possibly has a VUSE, update it with exact SSA
635 name we know will reach this one. */
636 if (gimple_has_mem_ops (new_stmt
))
637 gimple_set_vuse (new_stmt
, reaching_vuse
);
638 gimple_set_modified (new_stmt
, true);
639 if (gimple_vdef (new_stmt
))
640 reaching_vuse
= gimple_vdef (new_stmt
);
643 /* If the new sequence does not do a store release the virtual
644 definition of the original statement. */
646 && reaching_vuse
== gimple_vuse (stmt
))
648 tree vdef
= gimple_vdef (stmt
);
650 && TREE_CODE (vdef
) == SSA_NAME
)
652 unlink_stmt_vdef (stmt
);
653 release_ssa_name (vdef
);
657 /* Finally replace the original statement with the sequence. */
658 gsi_replace_with_seq (si_p
, stmts
, false);
661 /* Helper function for update_gimple_call and
662 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
663 with GIMPLE_CALL NEW_STMT. */
666 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
669 tree lhs
= gimple_call_lhs (stmt
);
670 gimple_call_set_lhs (new_stmt
, lhs
);
671 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
672 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
673 gimple_move_vops (new_stmt
, stmt
);
674 gimple_set_location (new_stmt
, gimple_location (stmt
));
675 if (gimple_block (new_stmt
) == NULL_TREE
)
676 gimple_set_block (new_stmt
, gimple_block (stmt
));
677 gsi_replace (si_p
, new_stmt
, false);
680 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
681 with number of arguments NARGS, where the arguments in GIMPLE form
682 follow NARGS argument. */
685 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
688 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
690 gcc_assert (is_gimple_call (stmt
));
691 va_start (ap
, nargs
);
692 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
693 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
698 /* Return true if EXPR is a CALL_EXPR suitable for representation
699 as a single GIMPLE_CALL statement. If the arguments require
700 further gimplification, return false. */
703 valid_gimple_call_p (tree expr
)
707 if (TREE_CODE (expr
) != CALL_EXPR
)
710 nargs
= call_expr_nargs (expr
);
711 for (i
= 0; i
< nargs
; i
++)
713 tree arg
= CALL_EXPR_ARG (expr
, i
);
714 if (is_gimple_reg_type (TREE_TYPE (arg
)))
716 if (!is_gimple_val (arg
))
720 if (!is_gimple_lvalue (arg
))
727 /* Convert EXPR into a GIMPLE value suitable for substitution on the
728 RHS of an assignment. Insert the necessary statements before
729 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
730 is replaced. If the call is expected to produces a result, then it
731 is replaced by an assignment of the new RHS to the result variable.
732 If the result is to be ignored, then the call is replaced by a
733 GIMPLE_NOP. A proper VDEF chain is retained by making the first
734 VUSE and the last VDEF of the whole sequence be the same as the replaced
735 statement and using new SSA names for stores in between. */
738 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
741 gimple
*stmt
, *new_stmt
;
742 gimple_stmt_iterator i
;
743 gimple_seq stmts
= NULL
;
745 stmt
= gsi_stmt (*si_p
);
747 gcc_assert (is_gimple_call (stmt
));
749 if (valid_gimple_call_p (expr
))
751 /* The call has simplified to another call. */
752 tree fn
= CALL_EXPR_FN (expr
);
754 unsigned nargs
= call_expr_nargs (expr
);
755 vec
<tree
> args
= vNULL
;
761 args
.safe_grow_cleared (nargs
, true);
763 for (i
= 0; i
< nargs
; i
++)
764 args
[i
] = CALL_EXPR_ARG (expr
, i
);
767 new_stmt
= gimple_build_call_vec (fn
, args
);
768 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
773 lhs
= gimple_call_lhs (stmt
);
774 if (lhs
== NULL_TREE
)
776 push_gimplify_context (gimple_in_ssa_p (cfun
));
777 gimplify_and_add (expr
, &stmts
);
778 pop_gimplify_context (NULL
);
780 /* We can end up with folding a memcpy of an empty class assignment
781 which gets optimized away by C++ gimplification. */
782 if (gimple_seq_empty_p (stmts
))
784 if (gimple_in_ssa_p (cfun
))
786 unlink_stmt_vdef (stmt
);
789 gsi_replace (si_p
, gimple_build_nop (), false);
795 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
796 new_stmt
= gimple_build_assign (lhs
, tmp
);
797 i
= gsi_last (stmts
);
798 gsi_insert_after_without_update (&i
, new_stmt
,
799 GSI_CONTINUE_LINKING
);
802 gsi_replace_with_seq_vops (si_p
, stmts
);
806 /* Replace the call at *GSI with the gimple value VAL. */
809 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
811 gimple
*stmt
= gsi_stmt (*gsi
);
812 tree lhs
= gimple_call_lhs (stmt
);
816 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
817 val
= fold_convert (TREE_TYPE (lhs
), val
);
818 repl
= gimple_build_assign (lhs
, val
);
821 repl
= gimple_build_nop ();
822 tree vdef
= gimple_vdef (stmt
);
823 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
825 unlink_stmt_vdef (stmt
);
826 release_ssa_name (vdef
);
828 gsi_replace (gsi
, repl
, false);
831 /* Replace the call at *GSI with the new call REPL and fold that
835 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
837 gimple
*stmt
= gsi_stmt (*gsi
);
838 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
839 gimple_set_location (repl
, gimple_location (stmt
));
840 gimple_move_vops (repl
, stmt
);
841 gsi_replace (gsi
, repl
, false);
845 /* Return true if VAR is a VAR_DECL or a component thereof. */
848 var_decl_component_p (tree var
)
851 while (handled_component_p (inner
))
852 inner
= TREE_OPERAND (inner
, 0);
853 return (DECL_P (inner
)
854 || (TREE_CODE (inner
) == MEM_REF
855 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
858 /* Return TRUE if the SIZE argument, representing the size of an
859 object, is in a range of values of which exactly zero is valid. */
862 size_must_be_zero_p (tree size
)
864 if (integer_zerop (size
))
867 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
870 tree type
= TREE_TYPE (size
);
871 int prec
= TYPE_PRECISION (type
);
873 /* Compute the value of SSIZE_MAX, the largest positive value that
874 can be stored in ssize_t, the signed counterpart of size_t. */
875 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
876 value_range
valid_range (build_int_cst (type
, 0),
877 wide_int_to_tree (type
, ssize_max
));
880 get_range_query (cfun
)->range_of_expr (vr
, size
);
882 get_global_range_query ()->range_of_expr (vr
, size
);
883 if (vr
.undefined_p ())
884 vr
.set_varying (TREE_TYPE (size
));
885 vr
.intersect (&valid_range
);
889 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
890 diagnose (otherwise undefined) overlapping copies without preventing
891 folding. When folded, GCC guarantees that overlapping memcpy has
892 the same semantics as memmove. Call to the library memcpy need not
893 provide the same guarantee. Return false if no simplification can
897 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
898 tree dest
, tree src
, enum built_in_function code
)
900 gimple
*stmt
= gsi_stmt (*gsi
);
901 tree lhs
= gimple_call_lhs (stmt
);
902 tree len
= gimple_call_arg (stmt
, 2);
903 location_t loc
= gimple_location (stmt
);
905 /* If the LEN parameter is a constant zero or in range where
906 the only valid value is zero, return DEST. */
907 if (size_must_be_zero_p (len
))
910 if (gimple_call_lhs (stmt
))
911 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
913 repl
= gimple_build_nop ();
914 tree vdef
= gimple_vdef (stmt
);
915 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
917 unlink_stmt_vdef (stmt
);
918 release_ssa_name (vdef
);
920 gsi_replace (gsi
, repl
, false);
924 /* If SRC and DEST are the same (and not volatile), return
925 DEST{,+LEN,+LEN-1}. */
926 if (operand_equal_p (src
, dest
, 0))
928 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
929 It's safe and may even be emitted by GCC itself (see bug
931 unlink_stmt_vdef (stmt
);
932 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
933 release_ssa_name (gimple_vdef (stmt
));
936 gsi_replace (gsi
, gimple_build_nop (), false);
943 /* We cannot (easily) change the type of the copy if it is a storage
944 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
945 modify the storage order of objects (see storage_order_barrier_p). */
947 = POINTER_TYPE_P (TREE_TYPE (src
))
948 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
950 = POINTER_TYPE_P (TREE_TYPE (dest
))
951 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
952 tree destvar
, srcvar
, srcoff
;
953 unsigned int src_align
, dest_align
;
954 unsigned HOST_WIDE_INT tmp_len
;
957 /* Build accesses at offset zero with a ref-all character type. */
959 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
962 /* If we can perform the copy efficiently with first doing all loads and
963 then all stores inline it that way. Currently efficiently means that
964 we can load all the memory with a single set operation and that the
965 total size is less than MOVE_MAX * MOVE_RATIO. */
966 src_align
= get_pointer_alignment (src
);
967 dest_align
= get_pointer_alignment (dest
);
968 if (tree_fits_uhwi_p (len
)
971 * MOVE_RATIO (optimize_function_for_size_p (cfun
))))
973 /* FIXME: Don't transform copies from strings with known length.
974 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
975 from being handled, and the case was XFAILed for that reason.
976 Now that it is handled and the XFAIL removed, as soon as other
977 strlenopt tests that rely on it for passing are adjusted, this
978 hack can be removed. */
979 && !c_strlen (src
, 1)
980 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
981 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
983 && AGGREGATE_TYPE_P (srctype
)
984 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
986 && AGGREGATE_TYPE_P (desttype
)
987 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
989 unsigned ilen
= tree_to_uhwi (len
);
990 if (pow2p_hwi (ilen
))
992 /* Detect out-of-bounds accesses without issuing warnings.
993 Avoid folding out-of-bounds copies but to avoid false
994 positives for unreachable code defer warning until after
995 DCE has worked its magic.
996 -Wrestrict is still diagnosed. */
997 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1000 if (warning
!= OPT_Wrestrict
)
1003 scalar_int_mode mode
;
1004 if (int_mode_for_size (ilen
* 8, 0).exists (&mode
)
1005 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
1006 && have_insn_for (SET
, mode
)
1007 /* If the destination pointer is not aligned we must be able
1008 to emit an unaligned store. */
1009 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1010 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1011 || (optab_handler (movmisalign_optab
, mode
)
1012 != CODE_FOR_nothing
)))
1014 tree type
= build_nonstandard_integer_type (ilen
* 8, 1);
1015 tree srctype
= type
;
1016 tree desttype
= type
;
1017 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1018 srctype
= build_aligned_type (type
, src_align
);
1019 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1020 tree tem
= fold_const_aggregate_ref (srcmem
);
1023 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1024 && targetm
.slow_unaligned_access (mode
, src_align
)
1025 && (optab_handler (movmisalign_optab
, mode
)
1026 == CODE_FOR_nothing
))
1031 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1033 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1035 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1037 gimple_assign_set_lhs (new_stmt
, srcmem
);
1038 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1039 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1041 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1042 desttype
= build_aligned_type (type
, dest_align
);
1044 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1047 gimple_move_vops (new_stmt
, stmt
);
1050 gsi_replace (gsi
, new_stmt
, false);
1053 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1060 if (code
== BUILT_IN_MEMMOVE
)
1062 /* Both DEST and SRC must be pointer types.
1063 ??? This is what old code did. Is the testing for pointer types
1066 If either SRC is readonly or length is 1, we can use memcpy. */
1067 if (!dest_align
|| !src_align
)
1069 if (readonly_data_expr (src
)
1070 || (tree_fits_uhwi_p (len
)
1071 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1072 >= tree_to_uhwi (len
))))
1074 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1077 gimple_call_set_fndecl (stmt
, fn
);
1078 gimple_call_set_arg (stmt
, 0, dest
);
1079 gimple_call_set_arg (stmt
, 1, src
);
1084 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1085 if (TREE_CODE (src
) == ADDR_EXPR
1086 && TREE_CODE (dest
) == ADDR_EXPR
)
1088 tree src_base
, dest_base
, fn
;
1089 poly_int64 src_offset
= 0, dest_offset
= 0;
1090 poly_uint64 maxsize
;
1092 srcvar
= TREE_OPERAND (src
, 0);
1093 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1094 if (src_base
== NULL
)
1096 destvar
= TREE_OPERAND (dest
, 0);
1097 dest_base
= get_addr_base_and_unit_offset (destvar
,
1099 if (dest_base
== NULL
)
1100 dest_base
= destvar
;
1101 if (!poly_int_tree_p (len
, &maxsize
))
1103 if (SSA_VAR_P (src_base
)
1104 && SSA_VAR_P (dest_base
))
1106 if (operand_equal_p (src_base
, dest_base
, 0)
1107 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1108 dest_offset
, maxsize
))
1111 else if (TREE_CODE (src_base
) == MEM_REF
1112 && TREE_CODE (dest_base
) == MEM_REF
)
1114 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1115 TREE_OPERAND (dest_base
, 0), 0))
1117 poly_offset_int full_src_offset
1118 = mem_ref_offset (src_base
) + src_offset
;
1119 poly_offset_int full_dest_offset
1120 = mem_ref_offset (dest_base
) + dest_offset
;
1121 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1122 full_dest_offset
, maxsize
))
1128 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1131 gimple_call_set_fndecl (stmt
, fn
);
1132 gimple_call_set_arg (stmt
, 0, dest
);
1133 gimple_call_set_arg (stmt
, 1, src
);
1138 /* If the destination and source do not alias optimize into
1140 if ((is_gimple_min_invariant (dest
)
1141 || TREE_CODE (dest
) == SSA_NAME
)
1142 && (is_gimple_min_invariant (src
)
1143 || TREE_CODE (src
) == SSA_NAME
))
1146 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1147 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1148 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1151 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1154 gimple_call_set_fndecl (stmt
, fn
);
1155 gimple_call_set_arg (stmt
, 0, dest
);
1156 gimple_call_set_arg (stmt
, 1, src
);
1165 if (!tree_fits_shwi_p (len
))
1168 || (AGGREGATE_TYPE_P (srctype
)
1169 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1172 || (AGGREGATE_TYPE_P (desttype
)
1173 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1175 /* In the following try to find a type that is most natural to be
1176 used for the memcpy source and destination and that allows
1177 the most optimization when memcpy is turned into a plain assignment
1178 using that type. In theory we could always use a char[len] type
1179 but that only gains us that the destination and source possibly
1180 no longer will have their address taken. */
1181 if (TREE_CODE (srctype
) == ARRAY_TYPE
1182 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1183 srctype
= TREE_TYPE (srctype
);
1184 if (TREE_CODE (desttype
) == ARRAY_TYPE
1185 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1186 desttype
= TREE_TYPE (desttype
);
1187 if (TREE_ADDRESSABLE (srctype
)
1188 || TREE_ADDRESSABLE (desttype
))
1191 /* Make sure we are not copying using a floating-point mode or
1192 a type whose size possibly does not match its precision. */
1193 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1194 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1195 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1196 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1197 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1198 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1199 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1200 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1208 src_align
= get_pointer_alignment (src
);
1209 dest_align
= get_pointer_alignment (dest
);
1211 /* Choose between src and destination type for the access based
1212 on alignment, whether the access constitutes a register access
1213 and whether it may actually expose a declaration for SSA rewrite
1214 or SRA decomposition. Also try to expose a string constant, we
1215 might be able to concatenate several of them later into a single
1217 destvar
= NULL_TREE
;
1219 if (TREE_CODE (dest
) == ADDR_EXPR
1220 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1221 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1222 && dest_align
>= TYPE_ALIGN (desttype
)
1223 && (is_gimple_reg_type (desttype
)
1224 || src_align
>= TYPE_ALIGN (desttype
)))
1225 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1226 else if (TREE_CODE (src
) == ADDR_EXPR
1227 && var_decl_component_p (TREE_OPERAND (src
, 0))
1228 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1229 && src_align
>= TYPE_ALIGN (srctype
)
1230 && (is_gimple_reg_type (srctype
)
1231 || dest_align
>= TYPE_ALIGN (srctype
)))
1232 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1233 /* FIXME: Don't transform copies from strings with known original length.
1234 As soon as strlenopt tests that rely on it for passing are adjusted,
1235 this hack can be removed. */
1236 else if (gimple_call_alloca_for_var_p (stmt
)
1237 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1238 && integer_zerop (srcoff
)
1239 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1240 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1241 srctype
= TREE_TYPE (srcvar
);
1245 /* Now that we chose an access type express the other side in
1246 terms of it if the target allows that with respect to alignment
1248 if (srcvar
== NULL_TREE
)
1250 if (src_align
>= TYPE_ALIGN (desttype
))
1251 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1254 if (STRICT_ALIGNMENT
)
1256 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1258 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1261 else if (destvar
== NULL_TREE
)
1263 if (dest_align
>= TYPE_ALIGN (srctype
))
1264 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1267 if (STRICT_ALIGNMENT
)
1269 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1271 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1275 /* Same as above, detect out-of-bounds accesses without issuing
1276 warnings. Avoid folding out-of-bounds copies but to avoid
1277 false positives for unreachable code defer warning until
1278 after DCE has worked its magic.
1279 -Wrestrict is still diagnosed. */
1280 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1281 dest
, src
, len
, len
,
1283 if (warning
!= OPT_Wrestrict
)
1287 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1289 tree tem
= fold_const_aggregate_ref (srcvar
);
1292 if (! is_gimple_min_invariant (srcvar
))
1294 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1295 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1297 gimple_assign_set_lhs (new_stmt
, srcvar
);
1298 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1299 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1301 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1302 goto set_vop_and_replace
;
1305 /* We get an aggregate copy. If the source is a STRING_CST, then
1306 directly use its type to perform the copy. */
1307 if (TREE_CODE (srcvar
) == STRING_CST
)
1310 /* Or else, use an unsigned char[] type to perform the copy in order
1311 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1312 types or float modes behavior on copying. */
1315 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1316 tree_to_uhwi (len
));
1318 if (src_align
> TYPE_ALIGN (srctype
))
1319 srctype
= build_aligned_type (srctype
, src_align
);
1320 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1323 if (dest_align
> TYPE_ALIGN (desttype
))
1324 desttype
= build_aligned_type (desttype
, dest_align
);
1325 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1326 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1328 set_vop_and_replace
:
1329 gimple_move_vops (new_stmt
, stmt
);
1332 gsi_replace (gsi
, new_stmt
, false);
1335 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1339 gimple_seq stmts
= NULL
;
1340 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1342 else if (code
== BUILT_IN_MEMPCPY
)
1344 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1345 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1346 TREE_TYPE (dest
), dest
, len
);
1351 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1352 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1353 gsi_replace (gsi
, repl
, false);
1357 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1358 to built-in memcmp (a, b, len). */
1361 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1363 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1368 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1370 gimple
*stmt
= gsi_stmt (*gsi
);
1371 tree a
= gimple_call_arg (stmt
, 0);
1372 tree b
= gimple_call_arg (stmt
, 1);
1373 tree len
= gimple_call_arg (stmt
, 2);
1375 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1376 replace_call_with_call_and_fold (gsi
, repl
);
1381 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1382 to built-in memmove (dest, src, len). */
1385 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1387 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1392 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1393 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1394 len) into memmove (dest, src, len). */
1396 gimple
*stmt
= gsi_stmt (*gsi
);
1397 tree src
= gimple_call_arg (stmt
, 0);
1398 tree dest
= gimple_call_arg (stmt
, 1);
1399 tree len
= gimple_call_arg (stmt
, 2);
1401 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1402 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1403 replace_call_with_call_and_fold (gsi
, repl
);
1408 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1409 to built-in memset (dest, 0, len). */
1412 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1414 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1419 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1421 gimple
*stmt
= gsi_stmt (*gsi
);
1422 tree dest
= gimple_call_arg (stmt
, 0);
1423 tree len
= gimple_call_arg (stmt
, 1);
1425 gimple_seq seq
= NULL
;
1426 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1427 gimple_seq_add_stmt_without_update (&seq
, repl
);
1428 gsi_replace_with_seq_vops (gsi
, seq
);
1434 /* Fold function call to builtin memset or bzero at *GSI setting the
1435 memory of size LEN to VAL. Return whether a simplification was made. */
1438 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1440 gimple
*stmt
= gsi_stmt (*gsi
);
1442 unsigned HOST_WIDE_INT length
, cval
;
1444 /* If the LEN parameter is zero, return DEST. */
1445 if (integer_zerop (len
))
1447 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1451 if (! tree_fits_uhwi_p (len
))
1454 if (TREE_CODE (c
) != INTEGER_CST
)
1457 tree dest
= gimple_call_arg (stmt
, 0);
1459 if (TREE_CODE (var
) != ADDR_EXPR
)
1462 var
= TREE_OPERAND (var
, 0);
1463 if (TREE_THIS_VOLATILE (var
))
1466 etype
= TREE_TYPE (var
);
1467 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1468 etype
= TREE_TYPE (etype
);
1470 if (!INTEGRAL_TYPE_P (etype
)
1471 && !POINTER_TYPE_P (etype
))
1474 if (! var_decl_component_p (var
))
1477 length
= tree_to_uhwi (len
);
1478 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1479 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1480 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1481 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1484 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1487 if (!type_has_mode_precision_p (etype
))
1488 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1489 TYPE_UNSIGNED (etype
));
1491 if (integer_zerop (c
))
1495 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1498 cval
= TREE_INT_CST_LOW (c
);
1502 cval
|= (cval
<< 31) << 1;
1505 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1506 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1507 gimple_move_vops (store
, stmt
);
1508 gimple_set_location (store
, gimple_location (stmt
));
1509 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1510 if (gimple_call_lhs (stmt
))
1512 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1513 gsi_replace (gsi
, asgn
, false);
1517 gimple_stmt_iterator gsi2
= *gsi
;
1519 gsi_remove (&gsi2
, true);
1525 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1528 get_range_strlen_tree (tree arg
, bitmap
*visited
, strlen_range_kind rkind
,
1529 c_strlen_data
*pdata
, unsigned eltsize
)
1531 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1533 /* The length computed by this invocation of the function. */
1534 tree val
= NULL_TREE
;
1536 /* True if VAL is an optimistic (tight) bound determined from
1537 the size of the character array in which the string may be
1538 stored. In that case, the computed VAL is used to set
1540 bool tight_bound
= false;
1542 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1543 if (TREE_CODE (arg
) == ADDR_EXPR
1544 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1546 tree op
= TREE_OPERAND (arg
, 0);
1547 if (integer_zerop (TREE_OPERAND (op
, 1)))
1549 tree aop0
= TREE_OPERAND (op
, 0);
1550 if (TREE_CODE (aop0
) == INDIRECT_REF
1551 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1552 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1555 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1556 && rkind
== SRK_LENRANGE
)
1558 /* Fail if an array is the last member of a struct object
1559 since it could be treated as a (fake) flexible array
1561 tree idx
= TREE_OPERAND (op
, 1);
1563 arg
= TREE_OPERAND (op
, 0);
1564 tree optype
= TREE_TYPE (arg
);
1565 if (tree dom
= TYPE_DOMAIN (optype
))
1566 if (tree bound
= TYPE_MAX_VALUE (dom
))
1567 if (TREE_CODE (bound
) == INTEGER_CST
1568 && TREE_CODE (idx
) == INTEGER_CST
1569 && tree_int_cst_lt (bound
, idx
))
1574 if (rkind
== SRK_INT_VALUE
)
1576 /* We are computing the maximum value (not string length). */
1578 if (TREE_CODE (val
) != INTEGER_CST
1579 || tree_int_cst_sgn (val
) < 0)
1584 c_strlen_data lendata
= { };
1585 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1587 if (!val
&& lendata
.decl
)
1589 /* ARG refers to an unterminated const character array.
1590 DATA.DECL with size DATA.LEN. */
1591 val
= lendata
.minlen
;
1592 pdata
->decl
= lendata
.decl
;
1596 /* Set if VAL represents the maximum length based on array size (set
1597 when exact length cannot be determined). */
1598 bool maxbound
= false;
1600 if (!val
&& rkind
== SRK_LENRANGE
)
1602 if (TREE_CODE (arg
) == ADDR_EXPR
)
1603 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1606 if (TREE_CODE (arg
) == ARRAY_REF
)
1608 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1610 /* Determine the "innermost" array type. */
1611 while (TREE_CODE (optype
) == ARRAY_TYPE
1612 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1613 optype
= TREE_TYPE (optype
);
1615 /* Avoid arrays of pointers. */
1616 tree eltype
= TREE_TYPE (optype
);
1617 if (TREE_CODE (optype
) != ARRAY_TYPE
1618 || !INTEGRAL_TYPE_P (eltype
))
1621 /* Fail when the array bound is unknown or zero. */
1622 val
= TYPE_SIZE_UNIT (optype
);
1624 || TREE_CODE (val
) != INTEGER_CST
1625 || integer_zerop (val
))
1628 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1631 /* Set the minimum size to zero since the string in
1632 the array could have zero length. */
1633 pdata
->minlen
= ssize_int (0);
1637 else if (TREE_CODE (arg
) == COMPONENT_REF
1638 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1641 /* Use the type of the member array to determine the upper
1642 bound on the length of the array. This may be overly
1643 optimistic if the array itself isn't NUL-terminated and
1644 the caller relies on the subsequent member to contain
1645 the NUL but that would only be considered valid if
1646 the array were the last member of a struct. */
1648 tree fld
= TREE_OPERAND (arg
, 1);
1650 tree optype
= TREE_TYPE (fld
);
1652 /* Determine the "innermost" array type. */
1653 while (TREE_CODE (optype
) == ARRAY_TYPE
1654 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1655 optype
= TREE_TYPE (optype
);
1657 /* Fail when the array bound is unknown or zero. */
1658 val
= TYPE_SIZE_UNIT (optype
);
1660 || TREE_CODE (val
) != INTEGER_CST
1661 || integer_zerop (val
))
1663 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1666 /* Set the minimum size to zero since the string in
1667 the array could have zero length. */
1668 pdata
->minlen
= ssize_int (0);
1670 /* The array size determined above is an optimistic bound
1671 on the length. If the array isn't nul-terminated the
1672 length computed by the library function would be greater.
1673 Even though using strlen to cross the subobject boundary
1674 is undefined, avoid drawing conclusions from the member
1675 type about the length here. */
1678 else if (TREE_CODE (arg
) == MEM_REF
1679 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1680 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1681 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1683 /* Handle a MEM_REF into a DECL accessing an array of integers,
1684 being conservative about references to extern structures with
1685 flexible array members that can be initialized to arbitrary
1686 numbers of elements as an extension (static structs are okay).
1687 FIXME: Make this less conservative -- see
1688 component_ref_size in tree.c. */
1689 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1690 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1691 && (decl_binds_to_current_def_p (ref
)
1692 || !array_at_struct_end_p (arg
)))
1694 /* Fail if the offset is out of bounds. Such accesses
1695 should be diagnosed at some point. */
1696 val
= DECL_SIZE_UNIT (ref
);
1698 || TREE_CODE (val
) != INTEGER_CST
1699 || integer_zerop (val
))
1702 poly_offset_int psiz
= wi::to_offset (val
);
1703 poly_offset_int poff
= mem_ref_offset (arg
);
1704 if (known_le (psiz
, poff
))
1707 pdata
->minlen
= ssize_int (0);
1709 /* Subtract the offset and one for the terminating nul. */
1712 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1713 /* Since VAL reflects the size of a declared object
1714 rather the type of the access it is not a tight bound. */
1717 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1719 /* Avoid handling pointers to arrays. GCC might misuse
1720 a pointer to an array of one bound to point to an array
1721 object of a greater bound. */
1722 tree argtype
= TREE_TYPE (arg
);
1723 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1725 val
= TYPE_SIZE_UNIT (argtype
);
1727 || TREE_CODE (val
) != INTEGER_CST
1728 || integer_zerop (val
))
1730 val
= wide_int_to_tree (TREE_TYPE (val
),
1731 wi::sub (wi::to_wide (val
), 1));
1733 /* Set the minimum size to zero since the string in
1734 the array could have zero length. */
1735 pdata
->minlen
= ssize_int (0);
1744 /* Adjust the lower bound on the string length as necessary. */
1746 || (rkind
!= SRK_STRLEN
1747 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1748 && TREE_CODE (val
) == INTEGER_CST
1749 && tree_int_cst_lt (val
, pdata
->minlen
)))
1750 pdata
->minlen
= val
;
1752 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1754 /* Adjust the tighter (more optimistic) string length bound
1755 if necessary and proceed to adjust the more conservative
1757 if (TREE_CODE (val
) == INTEGER_CST
)
1759 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1760 pdata
->maxbound
= val
;
1763 pdata
->maxbound
= val
;
1765 else if (pdata
->maxbound
|| maxbound
)
1766 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1767 if VAL corresponds to the maximum length determined based
1768 on the type of the object. */
1769 pdata
->maxbound
= val
;
1773 /* VAL computed above represents an optimistically tight bound
1774 on the length of the string based on the referenced object's
1775 or subobject's type. Determine the conservative upper bound
1776 based on the enclosing object's size if possible. */
1777 if (rkind
== SRK_LENRANGE
)
1780 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1783 /* When the call above fails due to a non-constant offset
1784 assume the offset is zero and use the size of the whole
1785 enclosing object instead. */
1786 base
= get_base_address (arg
);
1789 /* If the base object is a pointer no upper bound on the length
1790 can be determined. Otherwise the maximum length is equal to
1791 the size of the enclosing object minus the offset of
1792 the referenced subobject minus 1 (for the terminating nul). */
1793 tree type
= TREE_TYPE (base
);
1794 if (TREE_CODE (type
) == POINTER_TYPE
1795 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1796 || !(val
= DECL_SIZE_UNIT (base
)))
1797 val
= build_all_ones_cst (size_type_node
);
1800 val
= DECL_SIZE_UNIT (base
);
1801 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1802 size_int (offset
+ 1));
1811 /* Adjust the more conservative bound if possible/necessary
1812 and fail otherwise. */
1813 if (rkind
!= SRK_STRLEN
)
1815 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1816 || TREE_CODE (val
) != INTEGER_CST
)
1819 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1820 pdata
->maxlen
= val
;
1823 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1825 /* Fail if the length of this ARG is different from that
1826 previously determined from another ARG. */
1831 pdata
->maxlen
= val
;
1832 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1835 /* For an ARG referencing one or more strings, try to obtain the range
1836 of their lengths, or the size of the largest array ARG referes to if
1837 the range of lengths cannot be determined, and store all in *PDATA.
1838 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1839 the maximum constant value.
1840 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1841 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1842 length or if we are unable to determine the length, return false.
1843 VISITED is a bitmap of visited variables.
1844 RKIND determines the kind of value or range to obtain (see
1846 Set PDATA->DECL if ARG refers to an unterminated constant array.
1847 On input, set ELTSIZE to 1 for normal single byte character strings,
1848 and either 2 or 4 for wide characer strings (the size of wchar_t).
1849 Return true if *PDATA was successfully populated and false otherwise. */
1852 get_range_strlen (tree arg
, bitmap
*visited
,
1853 strlen_range_kind rkind
,
1854 c_strlen_data
*pdata
, unsigned eltsize
)
1857 if (TREE_CODE (arg
) != SSA_NAME
)
1858 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1860 /* If ARG is registered for SSA update we cannot look at its defining
1862 if (name_registered_for_update_p (arg
))
1865 /* If we were already here, break the infinite cycle. */
1867 *visited
= BITMAP_ALLOC (NULL
);
1868 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1872 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1874 switch (gimple_code (def_stmt
))
1877 /* The RHS of the statement defining VAR must either have a
1878 constant length or come from another SSA_NAME with a constant
1880 if (gimple_assign_single_p (def_stmt
)
1881 || gimple_assign_unary_nop_p (def_stmt
))
1883 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1884 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1886 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1888 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1889 gimple_assign_rhs3 (def_stmt
) };
1891 for (unsigned int i
= 0; i
< 2; i
++)
1892 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1894 if (rkind
!= SRK_LENRANGE
)
1896 /* Set the upper bound to the maximum to prevent
1897 it from being adjusted in the next iteration but
1898 leave MINLEN and the more conservative MAXBOUND
1899 determined so far alone (or leave them null if
1900 they haven't been set yet). That the MINLEN is
1901 in fact zero can be determined from MAXLEN being
1902 unbounded but the discovered minimum is used for
1904 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1911 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1912 must have a constant length. */
1913 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1915 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1917 /* If this PHI has itself as an argument, we cannot
1918 determine the string length of this argument. However,
1919 if we can find a constant string length for the other
1920 PHI args then we can still be sure that this is a
1921 constant string length. So be optimistic and just
1922 continue with the next argument. */
1923 if (arg
== gimple_phi_result (def_stmt
))
1926 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1928 if (rkind
!= SRK_LENRANGE
)
1930 /* Set the upper bound to the maximum to prevent
1931 it from being adjusted in the next iteration but
1932 leave MINLEN and the more conservative MAXBOUND
1933 determined so far alone (or leave them null if
1934 they haven't been set yet). That the MINLEN is
1935 in fact zero can be determined from MAXLEN being
1936 unbounded but the discovered minimum is used for
1938 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1948 /* Try to obtain the range of the lengths of the string(s) referenced
1949 by ARG, or the size of the largest array ARG refers to if the range
1950 of lengths cannot be determined, and store all in *PDATA which must
1951 be zero-initialized on input except PDATA->MAXBOUND may be set to
1952 a non-null tree node other than INTEGER_CST to request to have it
1953 set to the length of the longest string in a PHI. ELTSIZE is
1954 the expected size of the string element in bytes: 1 for char and
1955 some power of 2 for wide characters.
1956 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1957 for optimization. Returning false means that a nonzero PDATA->MINLEN
1958 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1959 is -1 (in that case, the actual range is indeterminate, i.e.,
1960 [0, PTRDIFF_MAX - 2]. */
1963 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1965 bitmap visited
= NULL
;
1966 tree maxbound
= pdata
->maxbound
;
1968 if (!get_range_strlen (arg
, &visited
, SRK_LENRANGE
, pdata
, eltsize
))
1970 /* On failure extend the length range to an impossible maximum
1971 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1972 members can stay unchanged regardless. */
1973 pdata
->minlen
= ssize_int (0);
1974 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1976 else if (!pdata
->minlen
)
1977 pdata
->minlen
= ssize_int (0);
1979 /* If it's unchanged from it initial non-null value, set the conservative
1980 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1981 if (maxbound
&& pdata
->maxbound
== maxbound
)
1982 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1985 BITMAP_FREE (visited
);
1987 return !integer_all_onesp (pdata
->maxlen
);
1990 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1991 For ARG of pointer types, NONSTR indicates if the caller is prepared
1992 to handle unterminated strings. For integer ARG and when RKIND ==
1993 SRK_INT_VALUE, NONSTR must be null.
1995 If an unterminated array is discovered and our caller handles
1996 unterminated arrays, then bubble up the offending DECL and
1997 return the maximum size. Otherwise return NULL. */
2000 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
2002 /* A non-null NONSTR is meaningless when determining the maximum
2003 value of an integer ARG. */
2004 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
2005 /* ARG must have an integral type when RKIND says so. */
2006 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
2008 bitmap visited
= NULL
;
2010 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2012 c_strlen_data lendata
= { };
2013 if (!get_range_strlen (arg
, &visited
, rkind
, &lendata
, /* eltsize = */1))
2014 lendata
.maxlen
= NULL_TREE
;
2015 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2016 lendata
.maxlen
= NULL_TREE
;
2019 BITMAP_FREE (visited
);
2023 /* For callers prepared to handle unterminated arrays set
2024 *NONSTR to point to the declaration of the array and return
2025 the maximum length/size. */
2026 *nonstr
= lendata
.decl
;
2027 return lendata
.maxlen
;
2030 /* Fail if the constant array isn't nul-terminated. */
2031 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2034 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2035 true, strictly less than) the lower bound of SIZE at compile time and false
2039 known_lower (gimple
*stmt
, tree len
, tree size
, bool strict
= false)
2041 if (len
== NULL_TREE
)
2044 wide_int size_range
[2];
2045 wide_int len_range
[2];
2046 if (get_range (len
, stmt
, len_range
) && get_range (size
, stmt
, size_range
))
2049 return wi::ltu_p (len_range
[1], size_range
[0]);
2051 return wi::leu_p (len_range
[1], size_range
[0]);
2057 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2058 If LEN is not NULL, it represents the length of the string to be
2059 copied. Return NULL_TREE if no simplification can be made. */
2062 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2063 tree dest
, tree src
)
2065 gimple
*stmt
= gsi_stmt (*gsi
);
2066 location_t loc
= gimple_location (stmt
);
2069 /* If SRC and DEST are the same (and not volatile), return DEST. */
2070 if (operand_equal_p (src
, dest
, 0))
2072 /* Issue -Wrestrict unless the pointers are null (those do
2073 not point to objects and so do not indicate an overlap;
2074 such calls could be the result of sanitization and jump
2076 if (!integer_zerop (dest
) && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
2078 tree func
= gimple_call_fndecl (stmt
);
2080 warning_at (loc
, OPT_Wrestrict
,
2081 "%qD source argument is the same as destination",
2085 replace_call_with_value (gsi
, dest
);
2089 if (optimize_function_for_size_p (cfun
))
2092 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2096 /* Set to non-null if ARG refers to an unterminated array. */
2098 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2102 /* Avoid folding calls with unterminated arrays. */
2103 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
2104 warn_string_no_nul (loc
, stmt
, "strcpy", src
, nonstr
);
2105 suppress_warning (stmt
, OPT_Wstringop_overread
);
2112 len
= fold_convert_loc (loc
, size_type_node
, len
);
2113 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2114 len
= force_gimple_operand_gsi (gsi
, len
, true,
2115 NULL_TREE
, true, GSI_SAME_STMT
);
2116 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2117 replace_call_with_call_and_fold (gsi
, repl
);
2121 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2122 If SLEN is not NULL, it represents the length of the source string.
2123 Return NULL_TREE if no simplification can be made. */
2126 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2127 tree dest
, tree src
, tree len
)
2129 gimple
*stmt
= gsi_stmt (*gsi
);
2130 location_t loc
= gimple_location (stmt
);
2131 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2133 /* If the LEN parameter is zero, return DEST. */
2134 if (integer_zerop (len
))
2136 /* Avoid warning if the destination refers to an array/pointer
2137 decorate with attribute nonstring. */
2140 tree fndecl
= gimple_call_fndecl (stmt
);
2142 /* Warn about the lack of nul termination: the result is not
2143 a (nul-terminated) string. */
2144 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2145 if (slen
&& !integer_zerop (slen
))
2146 warning_at (loc
, OPT_Wstringop_truncation
,
2147 "%qD destination unchanged after copying no bytes "
2148 "from a string of length %E",
2151 warning_at (loc
, OPT_Wstringop_truncation
,
2152 "%qD destination unchanged after copying no bytes",
2156 replace_call_with_value (gsi
, dest
);
2160 /* We can't compare slen with len as constants below if len is not a
2162 if (TREE_CODE (len
) != INTEGER_CST
)
2165 /* Now, we must be passed a constant src ptr parameter. */
2166 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2167 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2170 /* The size of the source string including the terminating nul. */
2171 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2173 /* We do not support simplification of this case, though we do
2174 support it when expanding trees into RTL. */
2175 /* FIXME: generate a call to __builtin_memset. */
2176 if (tree_int_cst_lt (ssize
, len
))
2179 /* Diagnose truncation that leaves the copy unterminated. */
2180 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2182 /* OK transform into builtin memcpy. */
2183 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2187 len
= fold_convert_loc (loc
, size_type_node
, len
);
2188 len
= force_gimple_operand_gsi (gsi
, len
, true,
2189 NULL_TREE
, true, GSI_SAME_STMT
);
2190 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2191 replace_call_with_call_and_fold (gsi
, repl
);
2196 /* Fold function call to builtin strchr or strrchr.
2197 If both arguments are constant, evaluate and fold the result,
2198 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2199 In general strlen is significantly faster than strchr
2200 due to being a simpler operation. */
2202 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2204 gimple
*stmt
= gsi_stmt (*gsi
);
2205 tree str
= gimple_call_arg (stmt
, 0);
2206 tree c
= gimple_call_arg (stmt
, 1);
2207 location_t loc
= gimple_location (stmt
);
2211 if (!gimple_call_lhs (stmt
))
2214 /* Avoid folding if the first argument is not a nul-terminated array.
2215 Defer warning until later. */
2216 if (!check_nul_terminated_array (NULL_TREE
, str
))
2219 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2221 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2225 replace_call_with_value (gsi
, integer_zero_node
);
2229 tree len
= build_int_cst (size_type_node
, p1
- p
);
2230 gimple_seq stmts
= NULL
;
2231 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2232 POINTER_PLUS_EXPR
, str
, len
);
2233 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2234 gsi_replace_with_seq_vops (gsi
, stmts
);
2238 if (!integer_zerop (c
))
2241 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2242 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2244 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2248 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2249 replace_call_with_call_and_fold (gsi
, repl
);
2257 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2262 /* Create newstr = strlen (str). */
2263 gimple_seq stmts
= NULL
;
2264 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2265 gimple_set_location (new_stmt
, loc
);
2266 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2267 gimple_call_set_lhs (new_stmt
, len
);
2268 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2270 /* Create (str p+ strlen (str)). */
2271 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2272 POINTER_PLUS_EXPR
, str
, len
);
2273 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2274 gsi_replace_with_seq_vops (gsi
, stmts
);
2275 /* gsi now points at the assignment to the lhs, get a
2276 stmt iterator to the strlen.
2277 ??? We can't use gsi_for_stmt as that doesn't work when the
2278 CFG isn't built yet. */
2279 gimple_stmt_iterator gsi2
= *gsi
;
2285 /* Fold function call to builtin strstr.
2286 If both arguments are constant, evaluate and fold the result,
2287 additionally fold strstr (x, "") into x and strstr (x, "c")
2288 into strchr (x, 'c'). */
2290 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2292 gimple
*stmt
= gsi_stmt (*gsi
);
2293 if (!gimple_call_lhs (stmt
))
2296 tree haystack
= gimple_call_arg (stmt
, 0);
2297 tree needle
= gimple_call_arg (stmt
, 1);
2299 /* Avoid folding if either argument is not a nul-terminated array.
2300 Defer warning until later. */
2301 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2302 || !check_nul_terminated_array (NULL_TREE
, needle
))
2305 const char *q
= c_getstr (needle
);
2309 if (const char *p
= c_getstr (haystack
))
2311 const char *r
= strstr (p
, q
);
2315 replace_call_with_value (gsi
, integer_zero_node
);
2319 tree len
= build_int_cst (size_type_node
, r
- p
);
2320 gimple_seq stmts
= NULL
;
2322 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2324 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2325 gsi_replace_with_seq_vops (gsi
, stmts
);
2329 /* For strstr (x, "") return x. */
2332 replace_call_with_value (gsi
, haystack
);
2336 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2339 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2342 tree c
= build_int_cst (integer_type_node
, q
[0]);
2343 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2344 replace_call_with_call_and_fold (gsi
, repl
);
2352 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2355 Return NULL_TREE if no simplification was possible, otherwise return the
2356 simplified form of the call as a tree.
2358 The simplified form may be a constant or other expression which
2359 computes the same value, but in a more efficient manner (including
2360 calls to other builtin functions).
2362 The call may contain arguments which need to be evaluated, but
2363 which are not useful to determine the result of the call. In
2364 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2365 COMPOUND_EXPR will be an argument which must be evaluated.
2366 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2367 COMPOUND_EXPR in the chain will contain the tree for the simplified
2368 form of the builtin function call. */
2371 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2373 gimple
*stmt
= gsi_stmt (*gsi
);
2374 location_t loc
= gimple_location (stmt
);
2376 const char *p
= c_getstr (src
);
2378 /* If the string length is zero, return the dst parameter. */
2379 if (p
&& *p
== '\0')
2381 replace_call_with_value (gsi
, dst
);
2385 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2388 /* See if we can store by pieces into (dst + strlen(dst)). */
2390 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2391 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2393 if (!strlen_fn
|| !memcpy_fn
)
2396 /* If the length of the source string isn't computable don't
2397 split strcat into strlen and memcpy. */
2398 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2402 /* Create strlen (dst). */
2403 gimple_seq stmts
= NULL
, stmts2
;
2404 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2405 gimple_set_location (repl
, loc
);
2406 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2407 gimple_call_set_lhs (repl
, newdst
);
2408 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2410 /* Create (dst p+ strlen (dst)). */
2411 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2412 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2413 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2415 len
= fold_convert_loc (loc
, size_type_node
, len
);
2416 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2417 build_int_cst (size_type_node
, 1));
2418 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2419 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2421 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2422 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2423 if (gimple_call_lhs (stmt
))
2425 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2426 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2427 gsi_replace_with_seq_vops (gsi
, stmts
);
2428 /* gsi now points at the assignment to the lhs, get a
2429 stmt iterator to the memcpy call.
2430 ??? We can't use gsi_for_stmt as that doesn't work when the
2431 CFG isn't built yet. */
2432 gimple_stmt_iterator gsi2
= *gsi
;
2438 gsi_replace_with_seq_vops (gsi
, stmts
);
2444 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2445 are the arguments to the call. */
2448 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2450 gimple
*stmt
= gsi_stmt (*gsi
);
2451 tree dest
= gimple_call_arg (stmt
, 0);
2452 tree src
= gimple_call_arg (stmt
, 1);
2453 tree size
= gimple_call_arg (stmt
, 2);
2459 /* If the SRC parameter is "", return DEST. */
2460 if (p
&& *p
== '\0')
2462 replace_call_with_value (gsi
, dest
);
2466 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2469 /* If __builtin_strcat_chk is used, assume strcat is available. */
2470 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2474 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2475 replace_call_with_call_and_fold (gsi
, repl
);
2479 /* Simplify a call to the strncat builtin. */
2482 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2484 gimple
*stmt
= gsi_stmt (*gsi
);
2485 tree dst
= gimple_call_arg (stmt
, 0);
2486 tree src
= gimple_call_arg (stmt
, 1);
2487 tree len
= gimple_call_arg (stmt
, 2);
2488 tree src_len
= c_strlen (src
, 1);
2490 /* If the requested length is zero, or the src parameter string
2491 length is zero, return the dst parameter. */
2492 if (integer_zerop (len
) || (src_len
&& integer_zerop (src_len
)))
2494 replace_call_with_value (gsi
, dst
);
2498 /* Return early if the requested len is less than the string length.
2499 Warnings will be issued elsewhere later. */
2500 if (!src_len
|| known_lower (stmt
, len
, src_len
, true))
2503 unsigned HOST_WIDE_INT dstsize
;
2504 bool found_dstsize
= compute_builtin_object_size (dst
, 1, &dstsize
);
2506 /* Warn on constant LEN. */
2507 if (TREE_CODE (len
) == INTEGER_CST
)
2509 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstringop_overflow_
);
2511 if (!nowarn
&& found_dstsize
)
2513 int cmpdst
= compare_tree_int (len
, dstsize
);
2517 tree fndecl
= gimple_call_fndecl (stmt
);
2519 /* Strncat copies (at most) LEN bytes and always appends
2520 the terminating NUL so the specified bound should never
2521 be equal to (or greater than) the size of the destination.
2522 If it is, the copy could overflow. */
2523 location_t loc
= gimple_location (stmt
);
2524 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2526 ? G_("%qD specified bound %E equals "
2528 : G_("%qD specified bound %E exceeds "
2529 "destination size %wu"),
2530 fndecl
, len
, dstsize
);
2532 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2536 if (!nowarn
&& TREE_CODE (src_len
) == INTEGER_CST
2537 && tree_int_cst_compare (src_len
, len
) == 0)
2539 tree fndecl
= gimple_call_fndecl (stmt
);
2540 location_t loc
= gimple_location (stmt
);
2542 /* To avoid possible overflow the specified bound should also
2543 not be equal to the length of the source, even when the size
2544 of the destination is unknown (it's not an uncommon mistake
2545 to specify as the bound to strncpy the length of the source). */
2546 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2547 "%qD specified bound %E equals source length",
2549 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2553 if (!known_lower (stmt
, src_len
, len
))
2556 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2558 /* If the replacement _DECL isn't initialized, don't do the
2563 /* Otherwise, emit a call to strcat. */
2564 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2565 replace_call_with_call_and_fold (gsi
, repl
);
2569 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2573 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2575 gimple
*stmt
= gsi_stmt (*gsi
);
2576 tree dest
= gimple_call_arg (stmt
, 0);
2577 tree src
= gimple_call_arg (stmt
, 1);
2578 tree len
= gimple_call_arg (stmt
, 2);
2579 tree size
= gimple_call_arg (stmt
, 3);
2584 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2585 if ((p
&& *p
== '\0')
2586 || integer_zerop (len
))
2588 replace_call_with_value (gsi
, dest
);
2592 if (! integer_all_onesp (size
))
2594 tree src_len
= c_strlen (src
, 1);
2595 if (known_lower (stmt
, src_len
, len
))
2597 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2598 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2602 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2603 replace_call_with_call_and_fold (gsi
, repl
);
2609 /* If __builtin_strncat_chk is used, assume strncat is available. */
2610 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2614 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2615 replace_call_with_call_and_fold (gsi
, repl
);
2619 /* Build and append gimple statements to STMTS that would load a first
2620 character of a memory location identified by STR. LOC is location
2621 of the statement. */
2624 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2628 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2629 tree cst_uchar_ptr_node
2630 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2631 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2633 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2634 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2635 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2637 gimple_assign_set_lhs (stmt
, var
);
2638 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2643 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2646 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2648 gimple
*stmt
= gsi_stmt (*gsi
);
2649 tree callee
= gimple_call_fndecl (stmt
);
2650 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2652 tree type
= integer_type_node
;
2653 tree str1
= gimple_call_arg (stmt
, 0);
2654 tree str2
= gimple_call_arg (stmt
, 1);
2655 tree lhs
= gimple_call_lhs (stmt
);
2657 tree bound_node
= NULL_TREE
;
2658 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2660 /* Handle strncmp and strncasecmp functions. */
2661 if (gimple_call_num_args (stmt
) == 3)
2663 bound_node
= gimple_call_arg (stmt
, 2);
2664 if (tree_fits_uhwi_p (bound_node
))
2665 bound
= tree_to_uhwi (bound_node
);
2668 /* If the BOUND parameter is zero, return zero. */
2671 replace_call_with_value (gsi
, integer_zero_node
);
2675 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2676 if (operand_equal_p (str1
, str2
, 0))
2678 replace_call_with_value (gsi
, integer_zero_node
);
2682 /* Initially set to the number of characters, including the terminating
2683 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2684 the array Sx is not terminated by a nul.
2685 For nul-terminated strings then adjusted to their length so that
2686 LENx == NULPOSx holds. */
2687 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2688 const char *p1
= getbyterep (str1
, &len1
);
2689 const char *p2
= getbyterep (str2
, &len2
);
2691 /* The position of the terminating nul character if one exists, otherwise
2692 a value greater than LENx. */
2693 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2697 size_t n
= strnlen (p1
, len1
);
2704 size_t n
= strnlen (p2
, len2
);
2709 /* For known strings, return an immediate value. */
2713 bool known_result
= false;
2717 case BUILT_IN_STRCMP
:
2718 case BUILT_IN_STRCMP_EQ
:
2719 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2722 r
= strcmp (p1
, p2
);
2723 known_result
= true;
2726 case BUILT_IN_STRNCMP
:
2727 case BUILT_IN_STRNCMP_EQ
:
2729 if (bound
== HOST_WIDE_INT_M1U
)
2732 /* Reduce the bound to be no more than the length
2733 of the shorter of the two strings, or the sizes
2734 of the unterminated arrays. */
2735 unsigned HOST_WIDE_INT n
= bound
;
2737 if (len1
== nulpos1
&& len1
< n
)
2739 if (len2
== nulpos2
&& len2
< n
)
2742 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2745 r
= strncmp (p1
, p2
, n
);
2746 known_result
= true;
2749 /* Only handleable situation is where the string are equal (result 0),
2750 which is already handled by operand_equal_p case. */
2751 case BUILT_IN_STRCASECMP
:
2753 case BUILT_IN_STRNCASECMP
:
2755 if (bound
== HOST_WIDE_INT_M1U
)
2757 r
= strncmp (p1
, p2
, bound
);
2759 known_result
= true;
2768 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2773 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2774 || fcode
== BUILT_IN_STRCMP
2775 || fcode
== BUILT_IN_STRCMP_EQ
2776 || fcode
== BUILT_IN_STRCASECMP
;
2778 location_t loc
= gimple_location (stmt
);
2780 /* If the second arg is "", return *(const unsigned char*)arg1. */
2781 if (p2
&& *p2
== '\0' && nonzero_bound
)
2783 gimple_seq stmts
= NULL
;
2784 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2787 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2788 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2791 gsi_replace_with_seq_vops (gsi
, stmts
);
2795 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2796 if (p1
&& *p1
== '\0' && nonzero_bound
)
2798 gimple_seq stmts
= NULL
;
2799 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2803 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2804 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2805 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2807 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2808 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2811 gsi_replace_with_seq_vops (gsi
, stmts
);
2815 /* If BOUND is one, return an expression corresponding to
2816 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2817 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2819 gimple_seq stmts
= NULL
;
2820 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2821 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2825 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2826 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2827 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2829 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2830 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2831 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2833 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2834 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2837 gsi_replace_with_seq_vops (gsi
, stmts
);
2841 /* If BOUND is greater than the length of one constant string,
2842 and the other argument is also a nul-terminated string, replace
2843 strncmp with strcmp. */
2844 if (fcode
== BUILT_IN_STRNCMP
2845 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2846 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2847 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2849 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2852 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2853 replace_call_with_call_and_fold (gsi
, repl
);
2860 /* Fold a call to the memchr pointed by GSI iterator. */
2863 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2865 gimple
*stmt
= gsi_stmt (*gsi
);
2866 tree lhs
= gimple_call_lhs (stmt
);
2867 tree arg1
= gimple_call_arg (stmt
, 0);
2868 tree arg2
= gimple_call_arg (stmt
, 1);
2869 tree len
= gimple_call_arg (stmt
, 2);
2871 /* If the LEN parameter is zero, return zero. */
2872 if (integer_zerop (len
))
2874 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2879 if (TREE_CODE (arg2
) != INTEGER_CST
2880 || !tree_fits_uhwi_p (len
)
2881 || !target_char_cst_p (arg2
, &c
))
2884 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2885 unsigned HOST_WIDE_INT string_length
;
2886 const char *p1
= getbyterep (arg1
, &string_length
);
2890 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2893 tree mem_size
, offset_node
;
2894 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2895 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2896 ? 0 : tree_to_uhwi (offset_node
);
2897 /* MEM_SIZE is the size of the array the string literal
2899 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2900 gcc_checking_assert (string_length
<= string_size
);
2901 if (length
<= string_size
)
2903 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2909 unsigned HOST_WIDE_INT offset
= r
- p1
;
2910 gimple_seq stmts
= NULL
;
2911 if (lhs
!= NULL_TREE
)
2913 tree offset_cst
= build_int_cst (sizetype
, offset
);
2914 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2916 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2919 gimple_seq_add_stmt_without_update (&stmts
,
2920 gimple_build_nop ());
2922 gsi_replace_with_seq_vops (gsi
, stmts
);
2930 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2931 to the call. IGNORE is true if the value returned
2932 by the builtin will be ignored. UNLOCKED is true is true if this
2933 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2934 the known length of the string. Return NULL_TREE if no simplification
2938 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2939 tree arg0
, tree arg1
,
2942 gimple
*stmt
= gsi_stmt (*gsi
);
2944 /* If we're using an unlocked function, assume the other unlocked
2945 functions exist explicitly. */
2946 tree
const fn_fputc
= (unlocked
2947 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2948 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2949 tree
const fn_fwrite
= (unlocked
2950 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2951 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2953 /* If the return value is used, don't do the transformation. */
2954 if (gimple_call_lhs (stmt
))
2957 /* Get the length of the string passed to fputs. If the length
2958 can't be determined, punt. */
2959 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2961 || TREE_CODE (len
) != INTEGER_CST
)
2964 switch (compare_tree_int (len
, 1))
2966 case -1: /* length is 0, delete the call entirely . */
2967 replace_call_with_value (gsi
, integer_zero_node
);
2970 case 0: /* length is 1, call fputc. */
2972 const char *p
= c_getstr (arg0
);
2978 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2980 (integer_type_node
, p
[0]), arg1
);
2981 replace_call_with_call_and_fold (gsi
, repl
);
2986 case 1: /* length is greater than 1, call fwrite. */
2988 /* If optimizing for size keep fputs. */
2989 if (optimize_function_for_size_p (cfun
))
2991 /* New argument list transforming fputs(string, stream) to
2992 fwrite(string, 1, len, stream). */
2996 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2997 size_one_node
, len
, arg1
);
2998 replace_call_with_call_and_fold (gsi
, repl
);
3007 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3008 DEST, SRC, LEN, and SIZE are the arguments to the call.
3009 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3010 code of the builtin. If MAXLEN is not NULL, it is maximum length
3011 passed as third argument. */
3014 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
3015 tree dest
, tree src
, tree len
, tree size
,
3016 enum built_in_function fcode
)
3018 gimple
*stmt
= gsi_stmt (*gsi
);
3019 location_t loc
= gimple_location (stmt
);
3020 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3023 /* If SRC and DEST are the same (and not volatile), return DEST
3024 (resp. DEST+LEN for __mempcpy_chk). */
3025 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
3027 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3029 replace_call_with_value (gsi
, dest
);
3034 gimple_seq stmts
= NULL
;
3035 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3036 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3037 TREE_TYPE (dest
), dest
, len
);
3038 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3039 replace_call_with_value (gsi
, temp
);
3044 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3045 if (! integer_all_onesp (size
)
3046 && !known_lower (stmt
, len
, size
)
3047 && !known_lower (stmt
, maxlen
, size
))
3049 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3050 least try to optimize (void) __mempcpy_chk () into
3051 (void) __memcpy_chk () */
3052 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3054 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3058 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3059 replace_call_with_call_and_fold (gsi
, repl
);
3066 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3067 mem{cpy,pcpy,move,set} is available. */
3070 case BUILT_IN_MEMCPY_CHK
:
3071 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3073 case BUILT_IN_MEMPCPY_CHK
:
3074 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3076 case BUILT_IN_MEMMOVE_CHK
:
3077 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3079 case BUILT_IN_MEMSET_CHK
:
3080 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3089 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3090 replace_call_with_call_and_fold (gsi
, repl
);
3094 /* Print a message in the dump file recording transformation of FROM to TO. */
3097 dump_transformation (gcall
*from
, gcall
*to
)
3099 if (dump_enabled_p ())
3100 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, from
, "simplified %T to %T\n",
3101 gimple_call_fn (from
), gimple_call_fn (to
));
3104 /* Fold a call to the __st[rp]cpy_chk builtin.
3105 DEST, SRC, and SIZE are the arguments to the call.
3106 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3107 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3108 strings passed as second argument. */
3111 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3113 tree src
, tree size
,
3114 enum built_in_function fcode
)
3116 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3117 location_t loc
= gimple_location (stmt
);
3118 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3121 /* If SRC and DEST are the same (and not volatile), return DEST. */
3122 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3124 /* Issue -Wrestrict unless the pointers are null (those do
3125 not point to objects and so do not indicate an overlap;
3126 such calls could be the result of sanitization and jump
3128 if (!integer_zerop (dest
)
3129 && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
3131 tree func
= gimple_call_fndecl (stmt
);
3133 warning_at (loc
, OPT_Wrestrict
,
3134 "%qD source argument is the same as destination",
3138 replace_call_with_value (gsi
, dest
);
3142 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3143 if (! integer_all_onesp (size
))
3145 len
= c_strlen (src
, 1);
3146 if (!known_lower (stmt
, len
, size
, true)
3147 && !known_lower (stmt
, maxlen
, size
, true))
3149 if (fcode
== BUILT_IN_STPCPY_CHK
)
3154 /* If return value of __stpcpy_chk is ignored,
3155 optimize into __strcpy_chk. */
3156 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3160 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3161 replace_call_with_call_and_fold (gsi
, repl
);
3165 if (! len
|| TREE_SIDE_EFFECTS (len
))
3168 /* If c_strlen returned something, but not provably less than size,
3169 transform __strcpy_chk into __memcpy_chk. */
3170 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3174 gimple_seq stmts
= NULL
;
3175 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3176 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3177 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3178 build_int_cst (size_type_node
, 1));
3179 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3180 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3181 replace_call_with_call_and_fold (gsi
, repl
);
3186 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3187 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
&& !ignore
3188 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3192 gcall
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3193 dump_transformation (stmt
, repl
);
3194 replace_call_with_call_and_fold (gsi
, repl
);
3198 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3199 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3200 length passed as third argument. IGNORE is true if return value can be
3201 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3204 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3205 tree dest
, tree src
,
3206 tree len
, tree size
,
3207 enum built_in_function fcode
)
3209 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3210 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3213 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3214 if (! integer_all_onesp (size
)
3215 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3217 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3219 /* If return value of __stpncpy_chk is ignored,
3220 optimize into __strncpy_chk. */
3221 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3224 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3225 replace_call_with_call_and_fold (gsi
, repl
);
3232 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3233 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
&& !ignore
3234 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3238 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3239 dump_transformation (stmt
, repl
);
3240 replace_call_with_call_and_fold (gsi
, repl
);
3244 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3245 Return NULL_TREE if no simplification can be made. */
3248 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3250 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3251 location_t loc
= gimple_location (stmt
);
3252 tree dest
= gimple_call_arg (stmt
, 0);
3253 tree src
= gimple_call_arg (stmt
, 1);
3256 /* If the result is unused, replace stpcpy with strcpy. */
3257 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3259 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3262 gimple_call_set_fndecl (stmt
, fn
);
3267 /* Set to non-null if ARG refers to an unterminated array. */
3268 c_strlen_data data
= { };
3269 /* The size of the unterminated array if SRC referes to one. */
3271 /* True if the size is exact/constant, false if it's the lower bound
3274 tree len
= c_strlen (src
, 1, &data
, 1);
3276 || TREE_CODE (len
) != INTEGER_CST
)
3278 data
.decl
= unterminated_array (src
, &size
, &exact
);
3285 /* Avoid folding calls with unterminated arrays. */
3286 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
3287 warn_string_no_nul (loc
, stmt
, "stpcpy", src
, data
.decl
, size
,
3289 suppress_warning (stmt
, OPT_Wstringop_overread
);
3293 if (optimize_function_for_size_p (cfun
)
3294 /* If length is zero it's small enough. */
3295 && !integer_zerop (len
))
3298 /* If the source has a known length replace stpcpy with memcpy. */
3299 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3303 gimple_seq stmts
= NULL
;
3304 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3305 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3306 tem
, build_int_cst (size_type_node
, 1));
3307 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3308 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3309 gimple_move_vops (repl
, stmt
);
3310 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3311 /* Replace the result with dest + len. */
3313 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3314 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3315 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3316 POINTER_PLUS_EXPR
, dest
, tem
);
3317 gsi_replace (gsi
, ret
, false);
3318 /* Finally fold the memcpy call. */
3319 gimple_stmt_iterator gsi2
= *gsi
;
3325 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3326 NULL_TREE if a normal call should be emitted rather than expanding
3327 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3328 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3329 passed as second argument. */
3332 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3333 enum built_in_function fcode
)
3335 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3336 tree dest
, size
, len
, fn
, fmt
, flag
;
3337 const char *fmt_str
;
3339 /* Verify the required arguments in the original call. */
3340 if (gimple_call_num_args (stmt
) < 5)
3343 dest
= gimple_call_arg (stmt
, 0);
3344 len
= gimple_call_arg (stmt
, 1);
3345 flag
= gimple_call_arg (stmt
, 2);
3346 size
= gimple_call_arg (stmt
, 3);
3347 fmt
= gimple_call_arg (stmt
, 4);
3349 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3350 if (! integer_all_onesp (size
)
3351 && !known_lower (stmt
, len
, size
) && !known_lower (stmt
, maxlen
, size
))
3354 if (!init_target_chars ())
3357 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3358 or if format doesn't contain % chars or is "%s". */
3359 if (! integer_zerop (flag
))
3361 fmt_str
= c_getstr (fmt
);
3362 if (fmt_str
== NULL
)
3364 if (strchr (fmt_str
, target_percent
) != NULL
3365 && strcmp (fmt_str
, target_percent_s
))
3369 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3371 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3372 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3376 /* Replace the called function and the first 5 argument by 3 retaining
3377 trailing varargs. */
3378 gimple_call_set_fndecl (stmt
, fn
);
3379 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3380 gimple_call_set_arg (stmt
, 0, dest
);
3381 gimple_call_set_arg (stmt
, 1, len
);
3382 gimple_call_set_arg (stmt
, 2, fmt
);
3383 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3384 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3385 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3390 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3391 Return NULL_TREE if a normal call should be emitted rather than
3392 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3393 or BUILT_IN_VSPRINTF_CHK. */
3396 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3397 enum built_in_function fcode
)
3399 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3400 tree dest
, size
, len
, fn
, fmt
, flag
;
3401 const char *fmt_str
;
3402 unsigned nargs
= gimple_call_num_args (stmt
);
3404 /* Verify the required arguments in the original call. */
3407 dest
= gimple_call_arg (stmt
, 0);
3408 flag
= gimple_call_arg (stmt
, 1);
3409 size
= gimple_call_arg (stmt
, 2);
3410 fmt
= gimple_call_arg (stmt
, 3);
3414 if (!init_target_chars ())
3417 /* Check whether the format is a literal string constant. */
3418 fmt_str
= c_getstr (fmt
);
3419 if (fmt_str
!= NULL
)
3421 /* If the format doesn't contain % args or %%, we know the size. */
3422 if (strchr (fmt_str
, target_percent
) == 0)
3424 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3425 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3427 /* If the format is "%s" and first ... argument is a string literal,
3428 we know the size too. */
3429 else if (fcode
== BUILT_IN_SPRINTF_CHK
3430 && strcmp (fmt_str
, target_percent_s
) == 0)
3436 arg
= gimple_call_arg (stmt
, 4);
3437 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3438 len
= c_strlen (arg
, 1);
3443 if (! integer_all_onesp (size
) && !known_lower (stmt
, len
, size
, true))
3446 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3447 or if format doesn't contain % chars or is "%s". */
3448 if (! integer_zerop (flag
))
3450 if (fmt_str
== NULL
)
3452 if (strchr (fmt_str
, target_percent
) != NULL
3453 && strcmp (fmt_str
, target_percent_s
))
3457 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3458 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3459 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3463 /* Replace the called function and the first 4 argument by 2 retaining
3464 trailing varargs. */
3465 gimple_call_set_fndecl (stmt
, fn
);
3466 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3467 gimple_call_set_arg (stmt
, 0, dest
);
3468 gimple_call_set_arg (stmt
, 1, fmt
);
3469 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3470 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3471 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3476 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3477 ORIG may be null if this is a 2-argument call. We don't attempt to
3478 simplify calls with more than 3 arguments.
3480 Return true if simplification was possible, otherwise false. */
3483 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3485 gimple
*stmt
= gsi_stmt (*gsi
);
3487 /* Verify the required arguments in the original call. We deal with two
3488 types of sprintf() calls: 'sprintf (str, fmt)' and
3489 'sprintf (dest, "%s", orig)'. */
3490 if (gimple_call_num_args (stmt
) > 3)
3493 tree orig
= NULL_TREE
;
3494 if (gimple_call_num_args (stmt
) == 3)
3495 orig
= gimple_call_arg (stmt
, 2);
3497 /* Check whether the format is a literal string constant. */
3498 tree fmt
= gimple_call_arg (stmt
, 1);
3499 const char *fmt_str
= c_getstr (fmt
);
3500 if (fmt_str
== NULL
)
3503 tree dest
= gimple_call_arg (stmt
, 0);
3505 if (!init_target_chars ())
3508 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3512 /* If the format doesn't contain % args or %%, use strcpy. */
3513 if (strchr (fmt_str
, target_percent
) == NULL
)
3515 /* Don't optimize sprintf (buf, "abc", ptr++). */
3519 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3520 'format' is known to contain no % formats. */
3521 gimple_seq stmts
= NULL
;
3522 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3524 /* Propagate the NO_WARNING bit to avoid issuing the same
3525 warning more than once. */
3526 copy_warning (repl
, stmt
);
3528 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3529 if (tree lhs
= gimple_call_lhs (stmt
))
3531 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3533 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3534 gsi_replace_with_seq_vops (gsi
, stmts
);
3535 /* gsi now points at the assignment to the lhs, get a
3536 stmt iterator to the memcpy call.
3537 ??? We can't use gsi_for_stmt as that doesn't work when the
3538 CFG isn't built yet. */
3539 gimple_stmt_iterator gsi2
= *gsi
;
3545 gsi_replace_with_seq_vops (gsi
, stmts
);
3551 /* If the format is "%s", use strcpy if the result isn't used. */
3552 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3554 /* Don't crash on sprintf (str1, "%s"). */
3558 /* Don't fold calls with source arguments of invalid (nonpointer)
3560 if (!POINTER_TYPE_P (TREE_TYPE (orig
)))
3563 tree orig_len
= NULL_TREE
;
3564 if (gimple_call_lhs (stmt
))
3566 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3571 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3572 gimple_seq stmts
= NULL
;
3573 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3575 /* Propagate the NO_WARNING bit to avoid issuing the same
3576 warning more than once. */
3577 copy_warning (repl
, stmt
);
3579 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3580 if (tree lhs
= gimple_call_lhs (stmt
))
3582 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3583 TREE_TYPE (orig_len
)))
3584 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3585 repl
= gimple_build_assign (lhs
, orig_len
);
3586 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3587 gsi_replace_with_seq_vops (gsi
, stmts
);
3588 /* gsi now points at the assignment to the lhs, get a
3589 stmt iterator to the memcpy call.
3590 ??? We can't use gsi_for_stmt as that doesn't work when the
3591 CFG isn't built yet. */
3592 gimple_stmt_iterator gsi2
= *gsi
;
3598 gsi_replace_with_seq_vops (gsi
, stmts
);
3606 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3607 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3608 attempt to simplify calls with more than 4 arguments.
3610 Return true if simplification was possible, otherwise false. */
3613 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3615 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3616 tree dest
= gimple_call_arg (stmt
, 0);
3617 tree destsize
= gimple_call_arg (stmt
, 1);
3618 tree fmt
= gimple_call_arg (stmt
, 2);
3619 tree orig
= NULL_TREE
;
3620 const char *fmt_str
= NULL
;
3622 if (gimple_call_num_args (stmt
) > 4)
3625 if (gimple_call_num_args (stmt
) == 4)
3626 orig
= gimple_call_arg (stmt
, 3);
3628 /* Check whether the format is a literal string constant. */
3629 fmt_str
= c_getstr (fmt
);
3630 if (fmt_str
== NULL
)
3633 if (!init_target_chars ())
3636 /* If the format doesn't contain % args or %%, use strcpy. */
3637 if (strchr (fmt_str
, target_percent
) == NULL
)
3639 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3643 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3647 tree len
= build_int_cstu (TREE_TYPE (destsize
), strlen (fmt_str
));
3649 /* We could expand this as
3650 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3652 memcpy (str, fmt_with_nul_at_cstm1, cst);
3653 but in the former case that might increase code size
3654 and in the latter case grow .rodata section too much.
3656 if (!known_lower (stmt
, len
, destsize
, true))
3659 gimple_seq stmts
= NULL
;
3660 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3661 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3662 if (tree lhs
= gimple_call_lhs (stmt
))
3664 repl
= gimple_build_assign (lhs
,
3665 fold_convert (TREE_TYPE (lhs
), len
));
3666 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3667 gsi_replace_with_seq_vops (gsi
, stmts
);
3668 /* gsi now points at the assignment to the lhs, get a
3669 stmt iterator to the memcpy call.
3670 ??? We can't use gsi_for_stmt as that doesn't work when the
3671 CFG isn't built yet. */
3672 gimple_stmt_iterator gsi2
= *gsi
;
3678 gsi_replace_with_seq_vops (gsi
, stmts
);
3684 /* If the format is "%s", use strcpy if the result isn't used. */
3685 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3687 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3691 /* Don't crash on snprintf (str1, cst, "%s"). */
3695 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3697 /* We could expand this as
3698 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3700 memcpy (str1, str2_with_nul_at_cstm1, cst);
3701 but in the former case that might increase code size
3702 and in the latter case grow .rodata section too much.
3704 if (!known_lower (stmt
, orig_len
, destsize
, true))
3707 /* Convert snprintf (str1, cst, "%s", str2) into
3708 strcpy (str1, str2) if strlen (str2) < cst. */
3709 gimple_seq stmts
= NULL
;
3710 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3711 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3712 if (tree lhs
= gimple_call_lhs (stmt
))
3714 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3715 TREE_TYPE (orig_len
)))
3716 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3717 repl
= gimple_build_assign (lhs
, orig_len
);
3718 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3719 gsi_replace_with_seq_vops (gsi
, stmts
);
3720 /* gsi now points at the assignment to the lhs, get a
3721 stmt iterator to the memcpy call.
3722 ??? We can't use gsi_for_stmt as that doesn't work when the
3723 CFG isn't built yet. */
3724 gimple_stmt_iterator gsi2
= *gsi
;
3730 gsi_replace_with_seq_vops (gsi
, stmts
);
3738 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3739 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3740 more than 3 arguments, and ARG may be null in the 2-argument case.
3742 Return NULL_TREE if no simplification was possible, otherwise return the
3743 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3744 code of the function to be simplified. */
3747 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3748 tree fp
, tree fmt
, tree arg
,
3749 enum built_in_function fcode
)
3751 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3752 tree fn_fputc
, fn_fputs
;
3753 const char *fmt_str
= NULL
;
3755 /* If the return value is used, don't do the transformation. */
3756 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3759 /* Check whether the format is a literal string constant. */
3760 fmt_str
= c_getstr (fmt
);
3761 if (fmt_str
== NULL
)
3764 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3766 /* If we're using an unlocked function, assume the other
3767 unlocked functions exist explicitly. */
3768 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3769 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3773 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3774 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3777 if (!init_target_chars ())
3780 /* If the format doesn't contain % args or %%, use strcpy. */
3781 if (strchr (fmt_str
, target_percent
) == NULL
)
3783 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3787 /* If the format specifier was "", fprintf does nothing. */
3788 if (fmt_str
[0] == '\0')
3790 replace_call_with_value (gsi
, NULL_TREE
);
3794 /* When "string" doesn't contain %, replace all cases of
3795 fprintf (fp, string) with fputs (string, fp). The fputs
3796 builtin will take care of special cases like length == 1. */
3799 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3800 replace_call_with_call_and_fold (gsi
, repl
);
3805 /* The other optimizations can be done only on the non-va_list variants. */
3806 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3809 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3810 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3812 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3816 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3817 replace_call_with_call_and_fold (gsi
, repl
);
3822 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3823 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3826 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3830 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3831 replace_call_with_call_and_fold (gsi
, repl
);
3839 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3840 FMT and ARG are the arguments to the call; we don't fold cases with
3841 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3843 Return NULL_TREE if no simplification was possible, otherwise return the
3844 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3845 code of the function to be simplified. */
3848 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3849 tree arg
, enum built_in_function fcode
)
3851 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3852 tree fn_putchar
, fn_puts
, newarg
;
3853 const char *fmt_str
= NULL
;
3855 /* If the return value is used, don't do the transformation. */
3856 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3859 /* Check whether the format is a literal string constant. */
3860 fmt_str
= c_getstr (fmt
);
3861 if (fmt_str
== NULL
)
3864 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3866 /* If we're using an unlocked function, assume the other
3867 unlocked functions exist explicitly. */
3868 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3869 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3873 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3874 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3877 if (!init_target_chars ())
3880 if (strcmp (fmt_str
, target_percent_s
) == 0
3881 || strchr (fmt_str
, target_percent
) == NULL
)
3885 if (strcmp (fmt_str
, target_percent_s
) == 0)
3887 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3890 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3893 str
= c_getstr (arg
);
3899 /* The format specifier doesn't contain any '%' characters. */
3900 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3906 /* If the string was "", printf does nothing. */
3909 replace_call_with_value (gsi
, NULL_TREE
);
3913 /* If the string has length of 1, call putchar. */
3916 /* Given printf("c"), (where c is any one character,)
3917 convert "c"[0] to an int and pass that to the replacement
3919 newarg
= build_int_cst (integer_type_node
, str
[0]);
3922 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3923 replace_call_with_call_and_fold (gsi
, repl
);
3929 /* If the string was "string\n", call puts("string"). */
3930 size_t len
= strlen (str
);
3931 if ((unsigned char)str
[len
- 1] == target_newline
3932 && (size_t) (int) len
== len
3937 /* Create a NUL-terminated string that's one char shorter
3938 than the original, stripping off the trailing '\n'. */
3939 newstr
= xstrdup (str
);
3940 newstr
[len
- 1] = '\0';
3941 newarg
= build_string_literal (len
, newstr
);
3945 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3946 replace_call_with_call_and_fold (gsi
, repl
);
3951 /* We'd like to arrange to call fputs(string,stdout) here,
3952 but we need stdout and don't have a way to get it yet. */
3957 /* The other optimizations can be done only on the non-va_list variants. */
3958 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3961 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3962 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3964 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3968 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3969 replace_call_with_call_and_fold (gsi
, repl
);
3974 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3975 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3977 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3982 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3983 replace_call_with_call_and_fold (gsi
, repl
);
3993 /* Fold a call to __builtin_strlen with known length LEN. */
3996 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3998 gimple
*stmt
= gsi_stmt (*gsi
);
3999 tree arg
= gimple_call_arg (stmt
, 0);
4004 c_strlen_data lendata
= { };
4005 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
4007 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
4008 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4010 /* The range of lengths refers to either a single constant
4011 string or to the longest and shortest constant string
4012 referenced by the argument of the strlen() call, or to
4013 the strings that can possibly be stored in the arrays
4014 the argument refers to. */
4015 minlen
= wi::to_wide (lendata
.minlen
);
4016 maxlen
= wi::to_wide (lendata
.maxlen
);
4020 unsigned prec
= TYPE_PRECISION (sizetype
);
4022 minlen
= wi::shwi (0, prec
);
4023 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4026 if (minlen
== maxlen
)
4028 /* Fold the strlen call to a constant. */
4029 tree type
= TREE_TYPE (lendata
.minlen
);
4030 tree len
= force_gimple_operand_gsi (gsi
,
4031 wide_int_to_tree (type
, minlen
),
4032 true, NULL
, true, GSI_SAME_STMT
);
4033 replace_call_with_value (gsi
, len
);
4037 /* Set the strlen() range to [0, MAXLEN]. */
4038 if (tree lhs
= gimple_call_lhs (stmt
))
4039 set_strlen_range (lhs
, minlen
, maxlen
);
4044 /* Fold a call to __builtin_acc_on_device. */
4047 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4049 /* Defer folding until we know which compiler we're in. */
4050 if (symtab
->state
!= EXPANSION
)
4053 unsigned val_host
= GOMP_DEVICE_HOST
;
4054 unsigned val_dev
= GOMP_DEVICE_NONE
;
4056 #ifdef ACCEL_COMPILER
4057 val_host
= GOMP_DEVICE_NOT_HOST
;
4058 val_dev
= ACCEL_COMPILER_acc_device
;
4061 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4063 tree host_eq
= make_ssa_name (boolean_type_node
);
4064 gimple
*host_ass
= gimple_build_assign
4065 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4066 gimple_set_location (host_ass
, loc
);
4067 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4069 tree dev_eq
= make_ssa_name (boolean_type_node
);
4070 gimple
*dev_ass
= gimple_build_assign
4071 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4072 gimple_set_location (dev_ass
, loc
);
4073 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4075 tree result
= make_ssa_name (boolean_type_node
);
4076 gimple
*result_ass
= gimple_build_assign
4077 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4078 gimple_set_location (result_ass
, loc
);
4079 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4081 replace_call_with_value (gsi
, result
);
4086 /* Fold realloc (0, n) -> malloc (n). */
4089 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4091 gimple
*stmt
= gsi_stmt (*gsi
);
4092 tree arg
= gimple_call_arg (stmt
, 0);
4093 tree size
= gimple_call_arg (stmt
, 1);
4095 if (operand_equal_p (arg
, null_pointer_node
, 0))
4097 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4100 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4101 replace_call_with_call_and_fold (gsi
, repl
);
4108 /* Number of bytes into which any type but aggregate or vector types
4110 static constexpr size_t clear_padding_unit
4111 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4112 /* Buffer size on which __builtin_clear_padding folding code works. */
4113 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4115 /* Data passed through __builtin_clear_padding folding. */
4116 struct clear_padding_struct
{
4118 /* 0 during __builtin_clear_padding folding, nonzero during
4119 clear_type_padding_in_mask. In that case, instead of clearing the
4120 non-padding bits in union_ptr array clear the padding bits in there. */
4124 gimple_stmt_iterator
*gsi
;
4125 /* Alignment of buf->base + 0. */
4127 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4129 /* Number of padding bytes before buf->off that don't have padding clear
4130 code emitted yet. */
4131 HOST_WIDE_INT padding_bytes
;
4132 /* The size of the whole object. Never emit code to touch
4133 buf->base + buf->sz or following bytes. */
4135 /* Number of bytes recorded in buf->buf. */
4137 /* When inside union, instead of emitting code we and bits inside of
4138 the union_ptr array. */
4139 unsigned char *union_ptr
;
4140 /* Set bits mean padding bits that need to be cleared by the builtin. */
4141 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4144 /* Emit code to clear padding requested in BUF->buf - set bits
4145 in there stand for padding that should be cleared. FULL is true
4146 if everything from the buffer should be flushed, otherwise
4147 it can leave up to 2 * clear_padding_unit bytes for further
4151 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4153 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4154 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4156 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4157 size_t end
= buf
->size
;
4159 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4160 * clear_padding_unit
);
4161 size_t padding_bytes
= buf
->padding_bytes
;
4164 if (buf
->clear_in_mask
)
4166 /* During clear_type_padding_in_mask, clear the padding
4167 bits set in buf->buf in the buf->union_ptr mask. */
4168 for (size_t i
= 0; i
< end
; i
++)
4170 if (buf
->buf
[i
] == (unsigned char) ~0)
4174 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4177 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4182 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4186 buf
->padding_bytes
= 0;
4190 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4193 buf
->padding_bytes
= padding_bytes
;
4197 /* Inside of a union, instead of emitting any code, instead
4198 clear all bits in the union_ptr buffer that are clear
4199 in buf. Whole padding bytes don't clear anything. */
4200 for (size_t i
= 0; i
< end
; i
++)
4202 if (buf
->buf
[i
] == (unsigned char) ~0)
4207 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4214 buf
->padding_bytes
= 0;
4218 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4221 buf
->padding_bytes
= padding_bytes
;
4225 size_t wordsize
= UNITS_PER_WORD
;
4226 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4228 size_t nonzero_first
= wordsize
;
4229 size_t nonzero_last
= 0;
4230 size_t zero_first
= wordsize
;
4231 size_t zero_last
= 0;
4232 bool all_ones
= true, bytes_only
= true;
4233 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4234 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4236 gcc_assert (wordsize
> 1);
4241 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4245 if (nonzero_first
== wordsize
)
4247 nonzero_first
= j
- i
;
4248 nonzero_last
= j
- i
;
4250 if (nonzero_last
!= j
- i
)
4252 nonzero_last
= j
+ 1 - i
;
4256 if (zero_first
== wordsize
)
4258 zero_last
= j
+ 1 - i
;
4260 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4266 size_t padding_end
= i
;
4269 if (nonzero_first
== 0
4270 && nonzero_last
== wordsize
4273 /* All bits are padding and we had some padding
4274 before too. Just extend it. */
4275 padding_bytes
+= wordsize
;
4278 if (all_ones
&& nonzero_first
== 0)
4280 padding_bytes
+= nonzero_last
;
4281 padding_end
+= nonzero_last
;
4282 nonzero_first
= wordsize
;
4285 else if (bytes_only
&& nonzero_first
== 0)
4287 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4288 padding_bytes
+= zero_first
;
4289 padding_end
+= zero_first
;
4292 if (padding_bytes
== 1)
4294 atype
= char_type_node
;
4295 src
= build_zero_cst (char_type_node
);
4299 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4300 src
= build_constructor (atype
, NULL
);
4302 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4303 build_int_cst (buf
->alias_type
,
4304 buf
->off
+ padding_end
4306 gimple
*g
= gimple_build_assign (dst
, src
);
4307 gimple_set_location (g
, buf
->loc
);
4308 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4310 buf
->padding_bytes
= 0;
4312 if (nonzero_first
== wordsize
)
4313 /* All bits in a word are 0, there are no padding bits. */
4315 if (all_ones
&& nonzero_last
== wordsize
)
4317 /* All bits between nonzero_first and end of word are padding
4318 bits, start counting padding_bytes. */
4319 padding_bytes
= nonzero_last
- nonzero_first
;
4324 /* If bitfields aren't involved in this word, prefer storing
4325 individual bytes or groups of them over performing a RMW
4326 operation on the whole word. */
4327 gcc_assert (i
+ zero_last
<= end
);
4328 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4333 for (k
= j
; k
< i
+ zero_last
; k
++)
4334 if (buf
->buf
[k
] == 0)
4336 HOST_WIDE_INT off
= buf
->off
+ j
;
4340 atype
= char_type_node
;
4341 src
= build_zero_cst (char_type_node
);
4345 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4346 src
= build_constructor (atype
, NULL
);
4348 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4350 build_int_cst (buf
->alias_type
, off
));
4351 gimple
*g
= gimple_build_assign (dst
, src
);
4352 gimple_set_location (g
, buf
->loc
);
4353 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4357 if (nonzero_last
== wordsize
)
4358 padding_bytes
= nonzero_last
- zero_last
;
4361 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4363 if (nonzero_last
- nonzero_first
<= eltsz
4364 && ((nonzero_first
& ~(eltsz
- 1))
4365 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4369 type
= char_type_node
;
4371 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4373 size_t start
= nonzero_first
& ~(eltsz
- 1);
4374 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4376 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4377 atype
= build_aligned_type (type
, buf
->align
);
4378 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4379 build_int_cst (buf
->alias_type
, off
));
4383 && nonzero_first
== start
4384 && nonzero_last
== start
+ eltsz
)
4385 src
= build_zero_cst (type
);
4388 src
= make_ssa_name (type
);
4389 g
= gimple_build_assign (src
, unshare_expr (dst
));
4390 gimple_set_location (g
, buf
->loc
);
4391 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4392 tree mask
= native_interpret_expr (type
,
4393 buf
->buf
+ i
+ start
,
4395 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4396 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4397 tree src_masked
= make_ssa_name (type
);
4398 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4400 gimple_set_location (g
, buf
->loc
);
4401 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4404 g
= gimple_build_assign (dst
, src
);
4405 gimple_set_location (g
, buf
->loc
);
4406 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4416 if (padding_bytes
== 1)
4418 atype
= char_type_node
;
4419 src
= build_zero_cst (char_type_node
);
4423 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4424 src
= build_constructor (atype
, NULL
);
4426 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4427 build_int_cst (buf
->alias_type
,
4430 gimple
*g
= gimple_build_assign (dst
, src
);
4431 gimple_set_location (g
, buf
->loc
);
4432 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4434 size_t end_rem
= end
% UNITS_PER_WORD
;
4435 buf
->off
+= end
- end_rem
;
4436 buf
->size
= end_rem
;
4437 memset (buf
->buf
, 0, buf
->size
);
4438 buf
->padding_bytes
= 0;
4442 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4445 buf
->padding_bytes
= padding_bytes
;
4449 /* Append PADDING_BYTES padding bytes. */
4452 clear_padding_add_padding (clear_padding_struct
*buf
,
4453 HOST_WIDE_INT padding_bytes
)
4455 if (padding_bytes
== 0)
4457 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4458 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4459 clear_padding_flush (buf
, false);
4460 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4461 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4463 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4464 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4465 buf
->size
= clear_padding_buf_size
;
4466 clear_padding_flush (buf
, false);
4467 gcc_assert (buf
->padding_bytes
);
4468 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4469 is guaranteed to be all ones. */
4470 padding_bytes
+= buf
->size
;
4471 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4472 memset (buf
->buf
, ~0, buf
->size
);
4473 buf
->off
+= padding_bytes
- buf
->size
;
4474 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4478 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4479 buf
->size
+= padding_bytes
;
4483 static void clear_padding_type (clear_padding_struct
*, tree
,
4484 HOST_WIDE_INT
, bool);
4486 /* Clear padding bits of union type TYPE. */
4489 clear_padding_union (clear_padding_struct
*buf
, tree type
,
4490 HOST_WIDE_INT sz
, bool for_auto_init
)
4492 clear_padding_struct
*union_buf
;
4493 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4494 size_t start_size
= 0;
4497 start_off
= buf
->off
+ buf
->size
;
4498 next_off
= start_off
+ sz
;
4499 start_size
= start_off
% UNITS_PER_WORD
;
4500 start_off
-= start_size
;
4501 clear_padding_flush (buf
, true);
4506 if (sz
+ buf
->size
> clear_padding_buf_size
)
4507 clear_padding_flush (buf
, false);
4508 union_buf
= XALLOCA (clear_padding_struct
);
4509 union_buf
->loc
= buf
->loc
;
4510 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4511 union_buf
->base
= NULL_TREE
;
4512 union_buf
->alias_type
= NULL_TREE
;
4513 union_buf
->gsi
= NULL
;
4514 union_buf
->align
= 0;
4516 union_buf
->padding_bytes
= 0;
4518 union_buf
->size
= 0;
4519 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4520 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4522 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4523 memset (union_buf
->union_ptr
, ~0, sz
);
4526 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4527 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4529 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4531 if (TREE_TYPE (field
) == error_mark_node
)
4533 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4534 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4535 if (!buf
->clear_in_mask
&& !for_auto_init
)
4536 error_at (buf
->loc
, "flexible array member %qD does not have "
4537 "well defined padding bits for %qs",
4538 field
, "__builtin_clear_padding");
4541 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4542 gcc_assert (union_buf
->size
== 0);
4543 union_buf
->off
= start_off
;
4544 union_buf
->size
= start_size
;
4545 memset (union_buf
->buf
, ~0, start_size
);
4546 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
, for_auto_init
);
4547 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4548 clear_padding_flush (union_buf
, true);
4551 if (buf
== union_buf
)
4553 buf
->off
= next_off
;
4554 buf
->size
= next_off
% UNITS_PER_WORD
;
4555 buf
->off
-= buf
->size
;
4556 memset (buf
->buf
, ~0, buf
->size
);
4558 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4562 unsigned char *union_ptr
= union_buf
->union_ptr
;
4565 clear_padding_flush (buf
, false);
4566 HOST_WIDE_INT this_sz
4567 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4568 clear_padding_buf_size
- buf
->size
);
4569 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4570 buf
->size
+= this_sz
;
4571 union_ptr
+= this_sz
;
4574 XDELETE (union_buf
->union_ptr
);
4578 /* The only known floating point formats with padding bits are the
4579 IEEE extended ones. */
4582 clear_padding_real_needs_padding_p (tree type
)
4584 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4586 && fmt
->signbit_ro
== fmt
->signbit_rw
4587 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4590 /* Return true if TYPE might contain any padding bits. */
4593 clear_padding_type_may_have_padding_p (tree type
)
4595 switch (TREE_CODE (type
))
4603 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4605 return clear_padding_real_needs_padding_p (type
);
4611 /* Emit a runtime loop:
4612 for (; buf.base != end; buf.base += sz)
4613 __builtin_clear_padding (buf.base); */
4616 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
,
4617 tree end
, bool for_auto_init
)
4619 tree l1
= create_artificial_label (buf
->loc
);
4620 tree l2
= create_artificial_label (buf
->loc
);
4621 tree l3
= create_artificial_label (buf
->loc
);
4622 gimple
*g
= gimple_build_goto (l2
);
4623 gimple_set_location (g
, buf
->loc
);
4624 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4625 g
= gimple_build_label (l1
);
4626 gimple_set_location (g
, buf
->loc
);
4627 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4628 clear_padding_type (buf
, type
, buf
->sz
, for_auto_init
);
4629 clear_padding_flush (buf
, true);
4630 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4631 size_int (buf
->sz
));
4632 gimple_set_location (g
, buf
->loc
);
4633 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4634 g
= gimple_build_label (l2
);
4635 gimple_set_location (g
, buf
->loc
);
4636 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4637 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4638 gimple_set_location (g
, buf
->loc
);
4639 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4640 g
= gimple_build_label (l3
);
4641 gimple_set_location (g
, buf
->loc
);
4642 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4645 /* Clear padding bits for TYPE. Called recursively from
4646 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4647 the __builtin_clear_padding is not called by the end user,
4648 instead, it's inserted by the compiler to initialize the
4649 paddings of automatic variable. Therefore, we should not
4650 emit the error messages for flexible array members to confuse
4654 clear_padding_type (clear_padding_struct
*buf
, tree type
,
4655 HOST_WIDE_INT sz
, bool for_auto_init
)
4657 switch (TREE_CODE (type
))
4660 HOST_WIDE_INT cur_pos
;
4662 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4663 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4665 tree ftype
= TREE_TYPE (field
);
4666 if (DECL_BIT_FIELD (field
))
4668 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4671 HOST_WIDE_INT pos
= int_byte_position (field
);
4675 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4676 bpos
%= BITS_PER_UNIT
;
4678 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4679 if (pos
+ end
> cur_pos
)
4681 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4682 cur_pos
= pos
+ end
;
4684 gcc_assert (cur_pos
> pos
4685 && ((unsigned HOST_WIDE_INT
) buf
->size
4686 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4687 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4688 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4689 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4690 " in %qs", "__builtin_clear_padding");
4691 else if (BYTES_BIG_ENDIAN
)
4694 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4695 *p
&= ~(((1 << fldsz
) - 1)
4696 << (BITS_PER_UNIT
- bpos
- fldsz
));
4701 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4703 fldsz
-= BITS_PER_UNIT
- bpos
;
4705 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4706 p
+= fldsz
/ BITS_PER_UNIT
;
4707 fldsz
%= BITS_PER_UNIT
;
4709 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4714 /* Little endian. */
4715 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4716 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4721 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4723 fldsz
-= BITS_PER_UNIT
- bpos
;
4725 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4726 p
+= fldsz
/ BITS_PER_UNIT
;
4727 fldsz
%= BITS_PER_UNIT
;
4729 *p
&= ~((1 << fldsz
) - 1);
4733 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4735 if (ftype
== error_mark_node
)
4737 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4738 && !COMPLETE_TYPE_P (ftype
));
4739 if (!buf
->clear_in_mask
&& !for_auto_init
)
4740 error_at (buf
->loc
, "flexible array member %qD does not "
4741 "have well defined padding bits for %qs",
4742 field
, "__builtin_clear_padding");
4744 else if (is_empty_type (TREE_TYPE (field
)))
4748 HOST_WIDE_INT pos
= int_byte_position (field
);
4751 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4752 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4753 clear_padding_add_padding (buf
, pos
- cur_pos
);
4755 clear_padding_type (buf
, TREE_TYPE (field
),
4756 fldsz
, for_auto_init
);
4760 gcc_assert (sz
>= cur_pos
);
4761 clear_padding_add_padding (buf
, sz
- cur_pos
);
4764 HOST_WIDE_INT nelts
, fldsz
;
4765 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4770 && sz
> 8 * UNITS_PER_WORD
4771 && buf
->union_ptr
== NULL
4772 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4774 /* For sufficiently large array of more than one elements,
4775 emit a runtime loop to keep code size manageable. */
4776 tree base
= buf
->base
;
4777 unsigned int prev_align
= buf
->align
;
4778 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4779 HOST_WIDE_INT prev_sz
= buf
->sz
;
4780 clear_padding_flush (buf
, true);
4781 tree elttype
= TREE_TYPE (type
);
4782 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4783 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4784 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4785 base
, size_int (off
));
4786 gimple_set_location (g
, buf
->loc
);
4787 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4788 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4790 gimple_set_location (g
, buf
->loc
);
4791 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4793 buf
->align
= TYPE_ALIGN (elttype
);
4796 clear_padding_emit_loop (buf
, elttype
, end
, for_auto_init
);
4799 buf
->align
= prev_align
;
4800 buf
->size
= off
% UNITS_PER_WORD
;
4801 buf
->off
= off
- buf
->size
;
4802 memset (buf
->buf
, 0, buf
->size
);
4805 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4806 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4809 clear_padding_union (buf
, type
, sz
, for_auto_init
);
4812 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4813 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4814 clear_padding_flush (buf
, false);
4815 if (clear_padding_real_needs_padding_p (type
))
4817 /* Use native_interpret_expr + native_encode_expr to figure out
4818 which bits are padding. */
4819 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4820 tree cst
= native_interpret_expr (type
, buf
->buf
+ buf
->size
, sz
);
4821 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4822 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4823 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4824 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4825 buf
->buf
[buf
->size
+ i
] ^= ~0;
4828 memset (buf
->buf
+ buf
->size
, 0, sz
);
4832 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4833 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4834 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4837 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4838 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4839 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4840 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
, for_auto_init
);
4843 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4844 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4845 clear_padding_flush (buf
, false);
4846 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4850 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4851 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4852 clear_padding_flush (buf
, false);
4853 memset (buf
->buf
+ buf
->size
, 0, sz
);
4859 /* Clear padding bits of TYPE in MASK. */
4862 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4864 clear_padding_struct buf
;
4865 buf
.loc
= UNKNOWN_LOCATION
;
4866 buf
.clear_in_mask
= true;
4867 buf
.base
= NULL_TREE
;
4868 buf
.alias_type
= NULL_TREE
;
4872 buf
.padding_bytes
= 0;
4873 buf
.sz
= int_size_in_bytes (type
);
4875 buf
.union_ptr
= mask
;
4876 clear_padding_type (&buf
, type
, buf
.sz
, false);
4877 clear_padding_flush (&buf
, true);
4880 /* Fold __builtin_clear_padding builtin. */
4883 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4885 gimple
*stmt
= gsi_stmt (*gsi
);
4886 gcc_assert (gimple_call_num_args (stmt
) == 3);
4887 tree ptr
= gimple_call_arg (stmt
, 0);
4888 tree typearg
= gimple_call_arg (stmt
, 1);
4889 /* the 3rd argument of __builtin_clear_padding is to distinguish whether
4890 this call is made by the user or by the compiler for automatic variable
4892 bool for_auto_init
= (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt
, 2));
4893 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4894 location_t loc
= gimple_location (stmt
);
4895 clear_padding_struct buf
;
4896 gimple_stmt_iterator gsiprev
= *gsi
;
4897 /* This should be folded during the lower pass. */
4898 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4899 gcc_assert (COMPLETE_TYPE_P (type
));
4900 gsi_prev (&gsiprev
);
4903 buf
.clear_in_mask
= false;
4905 buf
.alias_type
= NULL_TREE
;
4907 buf
.align
= get_pointer_alignment (ptr
);
4908 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4909 buf
.align
= MAX (buf
.align
, talign
);
4911 buf
.padding_bytes
= 0;
4913 buf
.sz
= int_size_in_bytes (type
);
4914 buf
.union_ptr
= NULL
;
4915 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4916 sorry_at (loc
, "%s not supported for variable length aggregates",
4917 "__builtin_clear_padding");
4918 /* The implementation currently assumes 8-bit host and target
4919 chars which is the case for all currently supported targets
4920 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4921 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4922 sorry_at (loc
, "%s not supported on this target",
4923 "__builtin_clear_padding");
4924 else if (!clear_padding_type_may_have_padding_p (type
))
4926 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4928 tree sz
= TYPE_SIZE_UNIT (type
);
4929 tree elttype
= type
;
4930 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4931 while (TREE_CODE (elttype
) == ARRAY_TYPE
4932 && int_size_in_bytes (elttype
) < 0)
4933 elttype
= TREE_TYPE (elttype
);
4934 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4935 gcc_assert (eltsz
>= 0);
4938 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4939 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4940 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4941 gimple_set_location (g
, loc
);
4942 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4943 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4944 gimple_set_location (g
, loc
);
4945 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4947 buf
.align
= TYPE_ALIGN (elttype
);
4948 buf
.alias_type
= build_pointer_type (elttype
);
4949 clear_padding_emit_loop (&buf
, elttype
, end
, for_auto_init
);
4954 if (!is_gimple_mem_ref_addr (buf
.base
))
4956 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
4957 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4958 gimple_set_location (g
, loc
);
4959 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4961 buf
.alias_type
= build_pointer_type (type
);
4962 clear_padding_type (&buf
, type
, buf
.sz
, for_auto_init
);
4963 clear_padding_flush (&buf
, true);
4966 gimple_stmt_iterator gsiprev2
= *gsi
;
4967 gsi_prev (&gsiprev2
);
4968 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
4969 gsi_replace (gsi
, gimple_build_nop (), true);
4972 gsi_remove (gsi
, true);
4978 /* Fold the non-target builtin at *GSI and return whether any simplification
4982 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
4984 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
4985 tree callee
= gimple_call_fndecl (stmt
);
4987 /* Give up for always_inline inline builtins until they are
4989 if (avoid_folding_inline_builtin (callee
))
4992 unsigned n
= gimple_call_num_args (stmt
);
4993 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
4997 return gimple_fold_builtin_bcmp (gsi
);
4998 case BUILT_IN_BCOPY
:
4999 return gimple_fold_builtin_bcopy (gsi
);
5000 case BUILT_IN_BZERO
:
5001 return gimple_fold_builtin_bzero (gsi
);
5003 case BUILT_IN_MEMSET
:
5004 return gimple_fold_builtin_memset (gsi
,
5005 gimple_call_arg (stmt
, 1),
5006 gimple_call_arg (stmt
, 2));
5007 case BUILT_IN_MEMCPY
:
5008 case BUILT_IN_MEMPCPY
:
5009 case BUILT_IN_MEMMOVE
:
5010 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5011 gimple_call_arg (stmt
, 1), fcode
);
5012 case BUILT_IN_SPRINTF_CHK
:
5013 case BUILT_IN_VSPRINTF_CHK
:
5014 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5015 case BUILT_IN_STRCAT_CHK
:
5016 return gimple_fold_builtin_strcat_chk (gsi
);
5017 case BUILT_IN_STRNCAT_CHK
:
5018 return gimple_fold_builtin_strncat_chk (gsi
);
5019 case BUILT_IN_STRLEN
:
5020 return gimple_fold_builtin_strlen (gsi
);
5021 case BUILT_IN_STRCPY
:
5022 return gimple_fold_builtin_strcpy (gsi
,
5023 gimple_call_arg (stmt
, 0),
5024 gimple_call_arg (stmt
, 1));
5025 case BUILT_IN_STRNCPY
:
5026 return gimple_fold_builtin_strncpy (gsi
,
5027 gimple_call_arg (stmt
, 0),
5028 gimple_call_arg (stmt
, 1),
5029 gimple_call_arg (stmt
, 2));
5030 case BUILT_IN_STRCAT
:
5031 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5032 gimple_call_arg (stmt
, 1));
5033 case BUILT_IN_STRNCAT
:
5034 return gimple_fold_builtin_strncat (gsi
);
5035 case BUILT_IN_INDEX
:
5036 case BUILT_IN_STRCHR
:
5037 return gimple_fold_builtin_strchr (gsi
, false);
5038 case BUILT_IN_RINDEX
:
5039 case BUILT_IN_STRRCHR
:
5040 return gimple_fold_builtin_strchr (gsi
, true);
5041 case BUILT_IN_STRSTR
:
5042 return gimple_fold_builtin_strstr (gsi
);
5043 case BUILT_IN_STRCMP
:
5044 case BUILT_IN_STRCMP_EQ
:
5045 case BUILT_IN_STRCASECMP
:
5046 case BUILT_IN_STRNCMP
:
5047 case BUILT_IN_STRNCMP_EQ
:
5048 case BUILT_IN_STRNCASECMP
:
5049 return gimple_fold_builtin_string_compare (gsi
);
5050 case BUILT_IN_MEMCHR
:
5051 return gimple_fold_builtin_memchr (gsi
);
5052 case BUILT_IN_FPUTS
:
5053 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5054 gimple_call_arg (stmt
, 1), false);
5055 case BUILT_IN_FPUTS_UNLOCKED
:
5056 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5057 gimple_call_arg (stmt
, 1), true);
5058 case BUILT_IN_MEMCPY_CHK
:
5059 case BUILT_IN_MEMPCPY_CHK
:
5060 case BUILT_IN_MEMMOVE_CHK
:
5061 case BUILT_IN_MEMSET_CHK
:
5062 return gimple_fold_builtin_memory_chk (gsi
,
5063 gimple_call_arg (stmt
, 0),
5064 gimple_call_arg (stmt
, 1),
5065 gimple_call_arg (stmt
, 2),
5066 gimple_call_arg (stmt
, 3),
5068 case BUILT_IN_STPCPY
:
5069 return gimple_fold_builtin_stpcpy (gsi
);
5070 case BUILT_IN_STRCPY_CHK
:
5071 case BUILT_IN_STPCPY_CHK
:
5072 return gimple_fold_builtin_stxcpy_chk (gsi
,
5073 gimple_call_arg (stmt
, 0),
5074 gimple_call_arg (stmt
, 1),
5075 gimple_call_arg (stmt
, 2),
5077 case BUILT_IN_STRNCPY_CHK
:
5078 case BUILT_IN_STPNCPY_CHK
:
5079 return gimple_fold_builtin_stxncpy_chk (gsi
,
5080 gimple_call_arg (stmt
, 0),
5081 gimple_call_arg (stmt
, 1),
5082 gimple_call_arg (stmt
, 2),
5083 gimple_call_arg (stmt
, 3),
5085 case BUILT_IN_SNPRINTF_CHK
:
5086 case BUILT_IN_VSNPRINTF_CHK
:
5087 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5089 case BUILT_IN_FPRINTF
:
5090 case BUILT_IN_FPRINTF_UNLOCKED
:
5091 case BUILT_IN_VFPRINTF
:
5092 if (n
== 2 || n
== 3)
5093 return gimple_fold_builtin_fprintf (gsi
,
5094 gimple_call_arg (stmt
, 0),
5095 gimple_call_arg (stmt
, 1),
5097 ? gimple_call_arg (stmt
, 2)
5101 case BUILT_IN_FPRINTF_CHK
:
5102 case BUILT_IN_VFPRINTF_CHK
:
5103 if (n
== 3 || n
== 4)
5104 return gimple_fold_builtin_fprintf (gsi
,
5105 gimple_call_arg (stmt
, 0),
5106 gimple_call_arg (stmt
, 2),
5108 ? gimple_call_arg (stmt
, 3)
5112 case BUILT_IN_PRINTF
:
5113 case BUILT_IN_PRINTF_UNLOCKED
:
5114 case BUILT_IN_VPRINTF
:
5115 if (n
== 1 || n
== 2)
5116 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5118 ? gimple_call_arg (stmt
, 1)
5119 : NULL_TREE
, fcode
);
5121 case BUILT_IN_PRINTF_CHK
:
5122 case BUILT_IN_VPRINTF_CHK
:
5123 if (n
== 2 || n
== 3)
5124 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5126 ? gimple_call_arg (stmt
, 2)
5127 : NULL_TREE
, fcode
);
5129 case BUILT_IN_ACC_ON_DEVICE
:
5130 return gimple_fold_builtin_acc_on_device (gsi
,
5131 gimple_call_arg (stmt
, 0));
5132 case BUILT_IN_REALLOC
:
5133 return gimple_fold_builtin_realloc (gsi
);
5135 case BUILT_IN_CLEAR_PADDING
:
5136 return gimple_fold_builtin_clear_padding (gsi
);
5141 /* Try the generic builtin folder. */
5142 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5143 tree result
= fold_call_stmt (stmt
, ignore
);
5147 STRIP_NOPS (result
);
5149 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5150 gimplify_and_update_call_from_tree (gsi
, result
);
5157 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5158 function calls to constants, where possible. */
5161 fold_internal_goacc_dim (const gimple
*call
)
5163 int axis
= oacc_get_ifn_dim_arg (call
);
5164 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5165 tree result
= NULL_TREE
;
5166 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5168 switch (gimple_call_internal_fn (call
))
5170 case IFN_GOACC_DIM_POS
:
5171 /* If the size is 1, we know the answer. */
5173 result
= build_int_cst (type
, 0);
5175 case IFN_GOACC_DIM_SIZE
:
5176 /* If the size is not dynamic, we know the answer. */
5178 result
= build_int_cst (type
, size
);
5187 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5188 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5189 &var where var is only addressable because of such calls. */
5192 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5194 if (gimple_call_num_args (stmt
) != 6
5195 || !flag_inline_atomics
5197 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5198 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5199 || !gimple_vdef (stmt
)
5200 || !gimple_vuse (stmt
))
5203 tree fndecl
= gimple_call_fndecl (stmt
);
5204 switch (DECL_FUNCTION_CODE (fndecl
))
5206 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5207 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5208 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5209 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5210 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5216 tree expected
= gimple_call_arg (stmt
, 1);
5217 if (TREE_CODE (expected
) != ADDR_EXPR
5218 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5221 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5222 if (!is_gimple_reg_type (etype
)
5223 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5224 || TREE_THIS_VOLATILE (etype
)
5225 || VECTOR_TYPE_P (etype
)
5226 || TREE_CODE (etype
) == COMPLEX_TYPE
5227 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5228 might not preserve all the bits. See PR71716. */
5229 || SCALAR_FLOAT_TYPE_P (etype
)
5230 || maybe_ne (TYPE_PRECISION (etype
),
5231 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5234 tree weak
= gimple_call_arg (stmt
, 3);
5235 if (!integer_zerop (weak
) && !integer_onep (weak
))
5238 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5239 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5240 machine_mode mode
= TYPE_MODE (itype
);
5242 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5244 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5247 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5254 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5256 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5257 i = IMAGPART_EXPR <t>;
5259 e = REALPART_EXPR <t>; */
5262 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5264 gimple
*stmt
= gsi_stmt (*gsi
);
5265 tree fndecl
= gimple_call_fndecl (stmt
);
5266 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5267 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5268 tree ctype
= build_complex_type (itype
);
5269 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5270 bool throws
= false;
5272 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5274 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5275 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5276 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5278 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5279 build1 (VIEW_CONVERT_EXPR
, itype
,
5280 gimple_assign_lhs (g
)));
5281 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5283 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5284 + int_size_in_bytes (itype
);
5285 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5286 gimple_call_arg (stmt
, 0),
5287 gimple_assign_lhs (g
),
5288 gimple_call_arg (stmt
, 2),
5289 build_int_cst (integer_type_node
, flag
),
5290 gimple_call_arg (stmt
, 4),
5291 gimple_call_arg (stmt
, 5));
5292 tree lhs
= make_ssa_name (ctype
);
5293 gimple_call_set_lhs (g
, lhs
);
5294 gimple_move_vops (g
, stmt
);
5295 tree oldlhs
= gimple_call_lhs (stmt
);
5296 if (stmt_can_throw_internal (cfun
, stmt
))
5299 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5301 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5302 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5303 gimple_call_set_lhs (stmt
, NULL_TREE
);
5304 gsi_replace (gsi
, g
, true);
5307 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5308 build1 (IMAGPART_EXPR
, itype
, lhs
));
5311 gsi_insert_on_edge_immediate (e
, g
);
5312 *gsi
= gsi_for_stmt (g
);
5315 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5316 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5317 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5319 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5320 build1 (REALPART_EXPR
, itype
, lhs
));
5321 if (throws
&& oldlhs
== NULL_TREE
)
5323 gsi_insert_on_edge_immediate (e
, g
);
5324 *gsi
= gsi_for_stmt (g
);
5327 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5328 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5330 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5332 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5333 gimple_assign_lhs (g
)));
5334 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5336 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5337 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5341 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5342 doesn't fit into TYPE. The test for overflow should be regardless of
5343 -fwrapv, and even for unsigned types. */
5346 arith_overflowed_p (enum tree_code code
, const_tree type
,
5347 const_tree arg0
, const_tree arg1
)
5349 widest2_int warg0
= widest2_int_cst (arg0
);
5350 widest2_int warg1
= widest2_int_cst (arg1
);
5354 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5355 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5356 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5357 default: gcc_unreachable ();
5359 signop sign
= TYPE_SIGN (type
);
5360 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5362 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5365 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5366 for the memory it references, otherwise return null. VECTYPE is the
5367 type of the memory vector. */
5370 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5372 tree ptr
= gimple_call_arg (call
, 0);
5373 tree alias_align
= gimple_call_arg (call
, 1);
5374 tree mask
= gimple_call_arg (call
, 2);
5375 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5378 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5379 if (TYPE_ALIGN (vectype
) != align
)
5380 vectype
= build_aligned_type (vectype
, align
);
5381 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5382 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5385 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5388 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5390 tree lhs
= gimple_call_lhs (call
);
5394 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5396 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5397 gimple_set_location (new_stmt
, gimple_location (call
));
5398 gimple_move_vops (new_stmt
, call
);
5399 gsi_replace (gsi
, new_stmt
, false);
5405 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5408 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5410 tree rhs
= gimple_call_arg (call
, 3);
5411 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5413 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5414 gimple_set_location (new_stmt
, gimple_location (call
));
5415 gimple_move_vops (new_stmt
, call
);
5416 gsi_replace (gsi
, new_stmt
, false);
5422 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5423 The statement may be replaced by another statement, e.g., if the call
5424 simplifies to a constant value. Return true if any changes were made.
5425 It is assumed that the operands have been previously folded. */
5428 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5430 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5432 bool changed
= false;
5434 /* Check for virtual calls that became direct calls. */
5435 callee
= gimple_call_fn (stmt
);
5436 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5438 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5440 if (dump_file
&& virtual_method_call_p (callee
)
5441 && !possible_polymorphic_call_target_p
5442 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5443 (OBJ_TYPE_REF_EXPR (callee
)))))
5446 "Type inheritance inconsistent devirtualization of ");
5447 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5448 fprintf (dump_file
, " to ");
5449 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5450 fprintf (dump_file
, "\n");
5453 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5456 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5459 vec
<cgraph_node
*>targets
5460 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5461 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5463 tree lhs
= gimple_call_lhs (stmt
);
5464 if (dump_enabled_p ())
5466 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5467 "folding virtual function call to %s\n",
5468 targets
.length () == 1
5469 ? targets
[0]->name ()
5470 : "__builtin_unreachable");
5472 if (targets
.length () == 1)
5474 tree fndecl
= targets
[0]->decl
;
5475 gimple_call_set_fndecl (stmt
, fndecl
);
5477 /* If changing the call to __cxa_pure_virtual
5478 or similar noreturn function, adjust gimple_call_fntype
5480 if (gimple_call_noreturn_p (stmt
)
5481 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5482 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5483 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5485 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5486 /* If the call becomes noreturn, remove the lhs. */
5488 && gimple_call_noreturn_p (stmt
)
5489 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5490 || should_remove_lhs_p (lhs
)))
5492 if (TREE_CODE (lhs
) == SSA_NAME
)
5494 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5495 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5496 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5497 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5499 gimple_call_set_lhs (stmt
, NULL_TREE
);
5501 maybe_remove_unused_call_args (cfun
, stmt
);
5505 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5506 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
5507 gimple_set_location (new_stmt
, gimple_location (stmt
));
5508 /* If the call had a SSA name as lhs morph that into
5509 an uninitialized value. */
5510 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5512 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5513 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5514 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5515 set_ssa_default_def (cfun
, var
, lhs
);
5517 gimple_move_vops (new_stmt
, stmt
);
5518 gsi_replace (gsi
, new_stmt
, false);
5525 /* Check for indirect calls that became direct calls, and then
5526 no longer require a static chain. */
5527 if (gimple_call_chain (stmt
))
5529 tree fn
= gimple_call_fndecl (stmt
);
5530 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5532 gimple_call_set_chain (stmt
, NULL
);
5540 /* Check for builtins that CCP can handle using information not
5541 available in the generic fold routines. */
5542 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5544 if (gimple_fold_builtin (gsi
))
5547 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5549 changed
|= targetm
.gimple_fold_builtin (gsi
);
5551 else if (gimple_call_internal_p (stmt
))
5553 enum tree_code subcode
= ERROR_MARK
;
5554 tree result
= NULL_TREE
;
5555 bool cplx_result
= false;
5556 tree overflow
= NULL_TREE
;
5557 switch (gimple_call_internal_fn (stmt
))
5559 case IFN_BUILTIN_EXPECT
:
5560 result
= fold_builtin_expect (gimple_location (stmt
),
5561 gimple_call_arg (stmt
, 0),
5562 gimple_call_arg (stmt
, 1),
5563 gimple_call_arg (stmt
, 2),
5566 case IFN_UBSAN_OBJECT_SIZE
:
5568 tree offset
= gimple_call_arg (stmt
, 1);
5569 tree objsize
= gimple_call_arg (stmt
, 2);
5570 if (integer_all_onesp (objsize
)
5571 || (TREE_CODE (offset
) == INTEGER_CST
5572 && TREE_CODE (objsize
) == INTEGER_CST
5573 && tree_int_cst_le (offset
, objsize
)))
5575 replace_call_with_value (gsi
, NULL_TREE
);
5581 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5583 replace_call_with_value (gsi
, NULL_TREE
);
5587 case IFN_UBSAN_BOUNDS
:
5589 tree index
= gimple_call_arg (stmt
, 1);
5590 tree bound
= gimple_call_arg (stmt
, 2);
5591 if (TREE_CODE (index
) == INTEGER_CST
5592 && TREE_CODE (bound
) == INTEGER_CST
)
5594 index
= fold_convert (TREE_TYPE (bound
), index
);
5595 if (TREE_CODE (index
) == INTEGER_CST
5596 && tree_int_cst_le (index
, bound
))
5598 replace_call_with_value (gsi
, NULL_TREE
);
5604 case IFN_GOACC_DIM_SIZE
:
5605 case IFN_GOACC_DIM_POS
:
5606 result
= fold_internal_goacc_dim (stmt
);
5608 case IFN_UBSAN_CHECK_ADD
:
5609 subcode
= PLUS_EXPR
;
5611 case IFN_UBSAN_CHECK_SUB
:
5612 subcode
= MINUS_EXPR
;
5614 case IFN_UBSAN_CHECK_MUL
:
5615 subcode
= MULT_EXPR
;
5617 case IFN_ADD_OVERFLOW
:
5618 subcode
= PLUS_EXPR
;
5621 case IFN_SUB_OVERFLOW
:
5622 subcode
= MINUS_EXPR
;
5625 case IFN_MUL_OVERFLOW
:
5626 subcode
= MULT_EXPR
;
5630 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5632 case IFN_MASK_STORE
:
5633 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5638 if (subcode
!= ERROR_MARK
)
5640 tree arg0
= gimple_call_arg (stmt
, 0);
5641 tree arg1
= gimple_call_arg (stmt
, 1);
5642 tree type
= TREE_TYPE (arg0
);
5645 tree lhs
= gimple_call_lhs (stmt
);
5646 if (lhs
== NULL_TREE
)
5649 type
= TREE_TYPE (TREE_TYPE (lhs
));
5651 if (type
== NULL_TREE
)
5653 /* x = y + 0; x = y - 0; x = y * 0; */
5654 else if (integer_zerop (arg1
))
5655 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5656 /* x = 0 + y; x = 0 * y; */
5657 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5658 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5660 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5661 result
= integer_zero_node
;
5662 /* x = y * 1; x = 1 * y; */
5663 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5665 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5667 else if (TREE_CODE (arg0
) == INTEGER_CST
5668 && TREE_CODE (arg1
) == INTEGER_CST
)
5671 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5672 fold_convert (type
, arg1
));
5674 result
= int_const_binop (subcode
, arg0
, arg1
);
5675 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5678 overflow
= build_one_cst (type
);
5685 if (result
== integer_zero_node
)
5686 result
= build_zero_cst (type
);
5687 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5689 if (TREE_CODE (result
) == INTEGER_CST
)
5691 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5693 overflow
= build_one_cst (type
);
5695 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5696 && TYPE_UNSIGNED (type
))
5697 || (TYPE_PRECISION (type
)
5698 < (TYPE_PRECISION (TREE_TYPE (result
))
5699 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5700 && !TYPE_UNSIGNED (type
)))))
5703 result
= fold_convert (type
, result
);
5710 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5711 result
= drop_tree_overflow (result
);
5714 if (overflow
== NULL_TREE
)
5715 overflow
= build_zero_cst (TREE_TYPE (result
));
5716 tree ctype
= build_complex_type (TREE_TYPE (result
));
5717 if (TREE_CODE (result
) == INTEGER_CST
5718 && TREE_CODE (overflow
) == INTEGER_CST
)
5719 result
= build_complex (ctype
, result
, overflow
);
5721 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5722 ctype
, result
, overflow
);
5724 gimplify_and_update_call_from_tree (gsi
, result
);
5733 /* Return true whether NAME has a use on STMT. */
5736 has_use_on_stmt (tree name
, gimple
*stmt
)
5738 imm_use_iterator iter
;
5739 use_operand_p use_p
;
5740 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5741 if (USE_STMT (use_p
) == stmt
)
5746 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5749 Replaces *GSI with the simplification result in RCODE and OPS
5750 and the associated statements in *SEQ. Does the replacement
5751 according to INPLACE and returns true if the operation succeeded. */
5754 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5755 gimple_match_op
*res_op
,
5756 gimple_seq
*seq
, bool inplace
)
5758 gimple
*stmt
= gsi_stmt (*gsi
);
5759 tree
*ops
= res_op
->ops
;
5760 unsigned int num_ops
= res_op
->num_ops
;
5762 /* Play safe and do not allow abnormals to be mentioned in
5763 newly created statements. See also maybe_push_res_to_seq.
5764 As an exception allow such uses if there was a use of the
5765 same SSA name on the old stmt. */
5766 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5767 if (TREE_CODE (ops
[i
]) == SSA_NAME
5768 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5769 && !has_use_on_stmt (ops
[i
], stmt
))
5772 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5773 for (unsigned int i
= 0; i
< 2; ++i
)
5774 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5775 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5776 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5779 /* Don't insert new statements when INPLACE is true, even if we could
5780 reuse STMT for the final statement. */
5781 if (inplace
&& !gimple_seq_empty_p (*seq
))
5784 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5786 gcc_assert (res_op
->code
.is_tree_code ());
5787 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
5788 /* GIMPLE_CONDs condition may not throw. */
5789 && (!flag_exceptions
5790 || !cfun
->can_throw_non_call_exceptions
5791 || !operation_could_trap_p (res_op
->code
,
5792 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5794 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
5795 else if (res_op
->code
== SSA_NAME
)
5796 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5797 build_zero_cst (TREE_TYPE (ops
[0])));
5798 else if (res_op
->code
== INTEGER_CST
)
5800 if (integer_zerop (ops
[0]))
5801 gimple_cond_make_false (cond_stmt
);
5803 gimple_cond_make_true (cond_stmt
);
5807 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5810 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5811 build_zero_cst (TREE_TYPE (res
)));
5815 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5817 fprintf (dump_file
, "gimple_simplified to ");
5818 if (!gimple_seq_empty_p (*seq
))
5819 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5820 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5823 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5826 else if (is_gimple_assign (stmt
)
5827 && res_op
->code
.is_tree_code ())
5830 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
5832 maybe_build_generic_op (res_op
);
5833 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
5834 res_op
->op_or_null (0),
5835 res_op
->op_or_null (1),
5836 res_op
->op_or_null (2));
5837 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5839 fprintf (dump_file
, "gimple_simplified to ");
5840 if (!gimple_seq_empty_p (*seq
))
5841 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5842 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5845 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5849 else if (res_op
->code
.is_fn_code ()
5850 && gimple_call_combined_fn (stmt
) == res_op
->code
)
5852 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5853 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5854 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5855 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5857 fprintf (dump_file
, "gimple_simplified to ");
5858 if (!gimple_seq_empty_p (*seq
))
5859 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5860 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5862 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5867 if (gimple_has_lhs (stmt
))
5869 tree lhs
= gimple_get_lhs (stmt
);
5870 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5872 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5874 fprintf (dump_file
, "gimple_simplified to ");
5875 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5877 gsi_replace_with_seq_vops (gsi
, *seq
);
5887 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5890 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5895 if (TREE_CODE (*t
) == ADDR_EXPR
)
5896 t
= &TREE_OPERAND (*t
, 0);
5898 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5899 generic vector extension. The actual vector referenced is
5900 view-converted to an array type for this purpose. If the index
5901 is constant the canonical representation in the middle-end is a
5902 BIT_FIELD_REF so re-write the former to the latter here. */
5903 if (TREE_CODE (*t
) == ARRAY_REF
5904 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5905 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5906 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5908 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5909 if (VECTOR_TYPE_P (vtype
))
5911 tree low
= array_ref_low_bound (*t
);
5912 if (TREE_CODE (low
) == INTEGER_CST
)
5914 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5916 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5917 wi::to_widest (low
));
5918 idx
= wi::mul (idx
, wi::to_widest
5919 (TYPE_SIZE (TREE_TYPE (*t
))));
5921 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5922 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5924 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5926 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5927 TYPE_SIZE (TREE_TYPE (*t
)),
5928 wide_int_to_tree (bitsizetype
, idx
));
5936 while (handled_component_p (*t
))
5937 t
= &TREE_OPERAND (*t
, 0);
5939 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5940 of invariant addresses into a SSA name MEM_REF address. */
5941 if (TREE_CODE (*t
) == MEM_REF
5942 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5944 tree addr
= TREE_OPERAND (*t
, 0);
5945 if (TREE_CODE (addr
) == ADDR_EXPR
5946 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5947 || handled_component_p (TREE_OPERAND (addr
, 0))))
5951 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
5960 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
5961 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
5962 TREE_OPERAND (*t
, 1),
5963 size_int (coffset
));
5966 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
5967 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
5970 /* Canonicalize back MEM_REFs to plain reference trees if the object
5971 accessed is a decl that has the same access semantics as the MEM_REF. */
5972 if (TREE_CODE (*t
) == MEM_REF
5973 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
5974 && integer_zerop (TREE_OPERAND (*t
, 1))
5975 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
5977 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5978 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
5979 if (/* Same volatile qualification. */
5980 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
5981 /* Same TBAA behavior with -fstrict-aliasing. */
5982 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
5983 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
5984 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
5985 /* Same alignment. */
5986 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
5987 /* We have to look out here to not drop a required conversion
5988 from the rhs to the lhs if *t appears on the lhs or vice-versa
5989 if it appears on the rhs. Thus require strict type
5991 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
5993 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
5998 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
5999 && TREE_CODE (*t
) == MEM_REF
6000 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
6004 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6008 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6010 if (mem_ref_offset (base
).to_shwi (&moffset
))
6013 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6016 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6023 /* Canonicalize TARGET_MEM_REF in particular with respect to
6024 the indexes becoming constant. */
6025 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6027 tree tem
= maybe_fold_tmr (*t
);
6031 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6032 recompute_tree_invariant_for_addr_expr (*orig_t
);
6040 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6041 distinguishes both cases. */
6044 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6046 bool changed
= false;
6047 gimple
*stmt
= gsi_stmt (*gsi
);
6048 bool nowarning
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
6050 fold_defer_overflow_warnings ();
6052 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6054 ??? This shouldn't be done in generic folding but in the
6055 propagation helpers which also know whether an address was
6057 Also canonicalize operand order. */
6058 switch (gimple_code (stmt
))
6061 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6063 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6064 if ((REFERENCE_CLASS_P (*rhs
)
6065 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6066 && maybe_canonicalize_mem_ref_addr (rhs
))
6068 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6069 if (REFERENCE_CLASS_P (*lhs
)
6070 && maybe_canonicalize_mem_ref_addr (lhs
))
6075 /* Canonicalize operand order. */
6076 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6077 if (TREE_CODE_CLASS (code
) == tcc_comparison
6078 || commutative_tree_code (code
)
6079 || commutative_ternary_tree_code (code
))
6081 tree rhs1
= gimple_assign_rhs1 (stmt
);
6082 tree rhs2
= gimple_assign_rhs2 (stmt
);
6083 if (tree_swap_operands_p (rhs1
, rhs2
))
6085 gimple_assign_set_rhs1 (stmt
, rhs2
);
6086 gimple_assign_set_rhs2 (stmt
, rhs1
);
6087 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6088 gimple_assign_set_rhs_code (stmt
,
6089 swap_tree_comparison (code
));
6097 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6099 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
6100 if (REFERENCE_CLASS_P (*arg
)
6101 && maybe_canonicalize_mem_ref_addr (arg
))
6104 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
6106 && REFERENCE_CLASS_P (*lhs
)
6107 && maybe_canonicalize_mem_ref_addr (lhs
))
6113 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6114 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6116 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6117 tree op
= TREE_VALUE (link
);
6118 if (REFERENCE_CLASS_P (op
)
6119 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6122 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6124 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6125 tree op
= TREE_VALUE (link
);
6126 if ((REFERENCE_CLASS_P (op
)
6127 || TREE_CODE (op
) == ADDR_EXPR
)
6128 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6134 if (gimple_debug_bind_p (stmt
))
6136 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6138 && (REFERENCE_CLASS_P (*val
)
6139 || TREE_CODE (*val
) == ADDR_EXPR
)
6140 && maybe_canonicalize_mem_ref_addr (val
, true))
6146 /* Canonicalize operand order. */
6147 tree lhs
= gimple_cond_lhs (stmt
);
6148 tree rhs
= gimple_cond_rhs (stmt
);
6149 if (tree_swap_operands_p (lhs
, rhs
))
6151 gcond
*gc
= as_a
<gcond
*> (stmt
);
6152 gimple_cond_set_lhs (gc
, rhs
);
6153 gimple_cond_set_rhs (gc
, lhs
);
6154 gimple_cond_set_code (gc
,
6155 swap_tree_comparison (gimple_cond_code (gc
)));
6162 /* Dispatch to pattern-based folding. */
6164 || is_gimple_assign (stmt
)
6165 || gimple_code (stmt
) == GIMPLE_COND
)
6167 gimple_seq seq
= NULL
;
6168 gimple_match_op res_op
;
6169 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6170 valueize
, valueize
))
6172 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6175 gimple_seq_discard (seq
);
6179 stmt
= gsi_stmt (*gsi
);
6181 /* Fold the main computation performed by the statement. */
6182 switch (gimple_code (stmt
))
6186 /* Try to canonicalize for boolean-typed X the comparisons
6187 X == 0, X == 1, X != 0, and X != 1. */
6188 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6189 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6191 tree lhs
= gimple_assign_lhs (stmt
);
6192 tree op1
= gimple_assign_rhs1 (stmt
);
6193 tree op2
= gimple_assign_rhs2 (stmt
);
6194 tree type
= TREE_TYPE (op1
);
6196 /* Check whether the comparison operands are of the same boolean
6197 type as the result type is.
6198 Check that second operand is an integer-constant with value
6200 if (TREE_CODE (op2
) == INTEGER_CST
6201 && (integer_zerop (op2
) || integer_onep (op2
))
6202 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6204 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6205 bool is_logical_not
= false;
6207 /* X == 0 and X != 1 is a logical-not.of X
6208 X == 1 and X != 0 is X */
6209 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6210 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6211 is_logical_not
= true;
6213 if (is_logical_not
== false)
6214 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6215 /* Only for one-bit precision typed X the transformation
6216 !X -> ~X is valied. */
6217 else if (TYPE_PRECISION (type
) == 1)
6218 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6219 /* Otherwise we use !X -> X ^ 1. */
6221 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6222 build_int_cst (type
, 1));
6228 unsigned old_num_ops
= gimple_num_ops (stmt
);
6229 tree lhs
= gimple_assign_lhs (stmt
);
6230 tree new_rhs
= fold_gimple_assign (gsi
);
6232 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6233 TREE_TYPE (new_rhs
)))
6234 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6237 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6239 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6246 changed
|= gimple_fold_call (gsi
, inplace
);
6250 if (gimple_debug_bind_p (stmt
))
6252 tree val
= gimple_debug_bind_get_value (stmt
);
6254 && REFERENCE_CLASS_P (val
))
6256 tree tem
= maybe_fold_reference (val
);
6259 gimple_debug_bind_set_value (stmt
, tem
);
6264 && TREE_CODE (val
) == ADDR_EXPR
)
6266 tree ref
= TREE_OPERAND (val
, 0);
6267 tree tem
= maybe_fold_reference (ref
);
6270 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
6271 gimple_debug_bind_set_value (stmt
, tem
);
6280 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6281 tree ret
= gimple_return_retval(ret_stmt
);
6283 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6285 tree val
= valueize (ret
);
6286 if (val
&& val
!= ret
6287 && may_propagate_copy (ret
, val
))
6289 gimple_return_set_retval (ret_stmt
, val
);
6299 stmt
= gsi_stmt (*gsi
);
6301 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6305 /* Valueziation callback that ends up not following SSA edges. */
6308 no_follow_ssa_edges (tree
)
6313 /* Valueization callback that ends up following single-use SSA edges only. */
6316 follow_single_use_edges (tree val
)
6318 if (TREE_CODE (val
) == SSA_NAME
6319 && !has_single_use (val
))
6324 /* Valueization callback that follows all SSA edges. */
6327 follow_all_ssa_edges (tree val
)
6332 /* Fold the statement pointed to by GSI. In some cases, this function may
6333 replace the whole statement with a new one. Returns true iff folding
6335 The statement pointed to by GSI should be in valid gimple form but may
6336 be in unfolded state as resulting from for example constant propagation
6337 which can produce *&x = 0. */
6340 fold_stmt (gimple_stmt_iterator
*gsi
)
6342 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6346 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6348 return fold_stmt_1 (gsi
, false, valueize
);
6351 /* Perform the minimal folding on statement *GSI. Only operations like
6352 *&x created by constant propagation are handled. The statement cannot
6353 be replaced with a new one. Return true if the statement was
6354 changed, false otherwise.
6355 The statement *GSI should be in valid gimple form but may
6356 be in unfolded state as resulting from for example constant propagation
6357 which can produce *&x = 0. */
6360 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6362 gimple
*stmt
= gsi_stmt (*gsi
);
6363 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6364 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6368 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6369 if EXPR is null or we don't know how.
6370 If non-null, the result always has boolean type. */
6373 canonicalize_bool (tree expr
, bool invert
)
6379 if (integer_nonzerop (expr
))
6380 return boolean_false_node
;
6381 else if (integer_zerop (expr
))
6382 return boolean_true_node
;
6383 else if (TREE_CODE (expr
) == SSA_NAME
)
6384 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6385 build_int_cst (TREE_TYPE (expr
), 0));
6386 else if (COMPARISON_CLASS_P (expr
))
6387 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6389 TREE_OPERAND (expr
, 0),
6390 TREE_OPERAND (expr
, 1));
6396 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6398 if (integer_nonzerop (expr
))
6399 return boolean_true_node
;
6400 else if (integer_zerop (expr
))
6401 return boolean_false_node
;
6402 else if (TREE_CODE (expr
) == SSA_NAME
)
6403 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6404 build_int_cst (TREE_TYPE (expr
), 0));
6405 else if (COMPARISON_CLASS_P (expr
))
6406 return fold_build2 (TREE_CODE (expr
),
6408 TREE_OPERAND (expr
, 0),
6409 TREE_OPERAND (expr
, 1));
6415 /* Check to see if a boolean expression EXPR is logically equivalent to the
6416 comparison (OP1 CODE OP2). Check for various identities involving
6420 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6421 const_tree op1
, const_tree op2
)
6425 /* The obvious case. */
6426 if (TREE_CODE (expr
) == code
6427 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6428 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6431 /* Check for comparing (name, name != 0) and the case where expr
6432 is an SSA_NAME with a definition matching the comparison. */
6433 if (TREE_CODE (expr
) == SSA_NAME
6434 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6436 if (operand_equal_p (expr
, op1
, 0))
6437 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6438 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6439 s
= SSA_NAME_DEF_STMT (expr
);
6440 if (is_gimple_assign (s
)
6441 && gimple_assign_rhs_code (s
) == code
6442 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6443 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6447 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6448 of name is a comparison, recurse. */
6449 if (TREE_CODE (op1
) == SSA_NAME
6450 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6452 s
= SSA_NAME_DEF_STMT (op1
);
6453 if (is_gimple_assign (s
)
6454 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6456 enum tree_code c
= gimple_assign_rhs_code (s
);
6457 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6458 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6459 return same_bool_comparison_p (expr
, c
,
6460 gimple_assign_rhs1 (s
),
6461 gimple_assign_rhs2 (s
));
6462 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6463 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6464 return same_bool_comparison_p (expr
,
6465 invert_tree_comparison (c
, false),
6466 gimple_assign_rhs1 (s
),
6467 gimple_assign_rhs2 (s
));
6473 /* Check to see if two boolean expressions OP1 and OP2 are logically
6477 same_bool_result_p (const_tree op1
, const_tree op2
)
6479 /* Simple cases first. */
6480 if (operand_equal_p (op1
, op2
, 0))
6483 /* Check the cases where at least one of the operands is a comparison.
6484 These are a bit smarter than operand_equal_p in that they apply some
6485 identifies on SSA_NAMEs. */
6486 if (COMPARISON_CLASS_P (op2
)
6487 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6488 TREE_OPERAND (op2
, 0),
6489 TREE_OPERAND (op2
, 1)))
6491 if (COMPARISON_CLASS_P (op1
)
6492 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6493 TREE_OPERAND (op1
, 0),
6494 TREE_OPERAND (op1
, 1)))
6501 /* Forward declarations for some mutually recursive functions. */
6504 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6505 enum tree_code code2
, tree op2a
, tree op2b
);
6507 and_var_with_comparison (tree type
, tree var
, bool invert
,
6508 enum tree_code code2
, tree op2a
, tree op2b
);
6510 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6511 enum tree_code code2
, tree op2a
, tree op2b
);
6513 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6514 enum tree_code code2
, tree op2a
, tree op2b
);
6516 or_var_with_comparison (tree
, tree var
, bool invert
,
6517 enum tree_code code2
, tree op2a
, tree op2b
);
6519 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6520 enum tree_code code2
, tree op2a
, tree op2b
);
6522 /* Helper function for and_comparisons_1: try to simplify the AND of the
6523 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6524 If INVERT is true, invert the value of the VAR before doing the AND.
6525 Return NULL_EXPR if we can't simplify this to a single expression. */
6528 and_var_with_comparison (tree type
, tree var
, bool invert
,
6529 enum tree_code code2
, tree op2a
, tree op2b
)
6532 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6534 /* We can only deal with variables whose definitions are assignments. */
6535 if (!is_gimple_assign (stmt
))
6538 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6539 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6540 Then we only have to consider the simpler non-inverted cases. */
6542 t
= or_var_with_comparison_1 (type
, stmt
,
6543 invert_tree_comparison (code2
, false),
6546 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6547 return canonicalize_bool (t
, invert
);
6550 /* Try to simplify the AND of the ssa variable defined by the assignment
6551 STMT with the comparison specified by (OP2A CODE2 OP2B).
6552 Return NULL_EXPR if we can't simplify this to a single expression. */
6555 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6556 enum tree_code code2
, tree op2a
, tree op2b
)
6558 tree var
= gimple_assign_lhs (stmt
);
6559 tree true_test_var
= NULL_TREE
;
6560 tree false_test_var
= NULL_TREE
;
6561 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6563 /* Check for identities like (var AND (var == 0)) => false. */
6564 if (TREE_CODE (op2a
) == SSA_NAME
6565 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6567 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6568 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6570 true_test_var
= op2a
;
6571 if (var
== true_test_var
)
6574 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6575 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6577 false_test_var
= op2a
;
6578 if (var
== false_test_var
)
6579 return boolean_false_node
;
6583 /* If the definition is a comparison, recurse on it. */
6584 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6586 tree t
= and_comparisons_1 (type
, innercode
,
6587 gimple_assign_rhs1 (stmt
),
6588 gimple_assign_rhs2 (stmt
),
6596 /* If the definition is an AND or OR expression, we may be able to
6597 simplify by reassociating. */
6598 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6599 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6601 tree inner1
= gimple_assign_rhs1 (stmt
);
6602 tree inner2
= gimple_assign_rhs2 (stmt
);
6605 tree partial
= NULL_TREE
;
6606 bool is_and
= (innercode
== BIT_AND_EXPR
);
6608 /* Check for boolean identities that don't require recursive examination
6610 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6611 inner1 AND (inner1 OR inner2) => inner1
6612 !inner1 AND (inner1 AND inner2) => false
6613 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6614 Likewise for similar cases involving inner2. */
6615 if (inner1
== true_test_var
)
6616 return (is_and
? var
: inner1
);
6617 else if (inner2
== true_test_var
)
6618 return (is_and
? var
: inner2
);
6619 else if (inner1
== false_test_var
)
6621 ? boolean_false_node
6622 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6624 else if (inner2
== false_test_var
)
6626 ? boolean_false_node
6627 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6630 /* Next, redistribute/reassociate the AND across the inner tests.
6631 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6632 if (TREE_CODE (inner1
) == SSA_NAME
6633 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6634 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6635 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6636 gimple_assign_rhs1 (s
),
6637 gimple_assign_rhs2 (s
),
6638 code2
, op2a
, op2b
)))
6640 /* Handle the AND case, where we are reassociating:
6641 (inner1 AND inner2) AND (op2a code2 op2b)
6643 If the partial result t is a constant, we win. Otherwise
6644 continue on to try reassociating with the other inner test. */
6647 if (integer_onep (t
))
6649 else if (integer_zerop (t
))
6650 return boolean_false_node
;
6653 /* Handle the OR case, where we are redistributing:
6654 (inner1 OR inner2) AND (op2a code2 op2b)
6655 => (t OR (inner2 AND (op2a code2 op2b))) */
6656 else if (integer_onep (t
))
6657 return boolean_true_node
;
6659 /* Save partial result for later. */
6663 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6664 if (TREE_CODE (inner2
) == SSA_NAME
6665 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6666 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6667 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6668 gimple_assign_rhs1 (s
),
6669 gimple_assign_rhs2 (s
),
6670 code2
, op2a
, op2b
)))
6672 /* Handle the AND case, where we are reassociating:
6673 (inner1 AND inner2) AND (op2a code2 op2b)
6674 => (inner1 AND t) */
6677 if (integer_onep (t
))
6679 else if (integer_zerop (t
))
6680 return boolean_false_node
;
6681 /* If both are the same, we can apply the identity
6683 else if (partial
&& same_bool_result_p (t
, partial
))
6687 /* Handle the OR case. where we are redistributing:
6688 (inner1 OR inner2) AND (op2a code2 op2b)
6689 => (t OR (inner1 AND (op2a code2 op2b)))
6690 => (t OR partial) */
6693 if (integer_onep (t
))
6694 return boolean_true_node
;
6697 /* We already got a simplification for the other
6698 operand to the redistributed OR expression. The
6699 interesting case is when at least one is false.
6700 Or, if both are the same, we can apply the identity
6702 if (integer_zerop (partial
))
6704 else if (integer_zerop (t
))
6706 else if (same_bool_result_p (t
, partial
))
6715 /* Try to simplify the AND of two comparisons defined by
6716 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6717 If this can be done without constructing an intermediate value,
6718 return the resulting tree; otherwise NULL_TREE is returned.
6719 This function is deliberately asymmetric as it recurses on SSA_DEFs
6720 in the first comparison but not the second. */
6723 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6724 enum tree_code code2
, tree op2a
, tree op2b
)
6726 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6728 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6729 if (operand_equal_p (op1a
, op2a
, 0)
6730 && operand_equal_p (op1b
, op2b
, 0))
6732 /* Result will be either NULL_TREE, or a combined comparison. */
6733 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6734 TRUTH_ANDIF_EXPR
, code1
, code2
,
6735 truth_type
, op1a
, op1b
);
6740 /* Likewise the swapped case of the above. */
6741 if (operand_equal_p (op1a
, op2b
, 0)
6742 && operand_equal_p (op1b
, op2a
, 0))
6744 /* Result will be either NULL_TREE, or a combined comparison. */
6745 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6746 TRUTH_ANDIF_EXPR
, code1
,
6747 swap_tree_comparison (code2
),
6748 truth_type
, op1a
, op1b
);
6753 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6754 NAME's definition is a truth value. See if there are any simplifications
6755 that can be done against the NAME's definition. */
6756 if (TREE_CODE (op1a
) == SSA_NAME
6757 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6758 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6760 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6761 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6762 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6763 switch (gimple_code (stmt
))
6766 /* Try to simplify by copy-propagating the definition. */
6767 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6771 /* If every argument to the PHI produces the same result when
6772 ANDed with the second comparison, we win.
6773 Do not do this unless the type is bool since we need a bool
6774 result here anyway. */
6775 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6777 tree result
= NULL_TREE
;
6779 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6781 tree arg
= gimple_phi_arg_def (stmt
, i
);
6783 /* If this PHI has itself as an argument, ignore it.
6784 If all the other args produce the same result,
6786 if (arg
== gimple_phi_result (stmt
))
6788 else if (TREE_CODE (arg
) == INTEGER_CST
)
6790 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6793 result
= boolean_false_node
;
6794 else if (!integer_zerop (result
))
6798 result
= fold_build2 (code2
, boolean_type_node
,
6800 else if (!same_bool_comparison_p (result
,
6804 else if (TREE_CODE (arg
) == SSA_NAME
6805 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6808 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6809 /* In simple cases we can look through PHI nodes,
6810 but we have to be careful with loops.
6812 if (! dom_info_available_p (CDI_DOMINATORS
)
6813 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6814 || dominated_by_p (CDI_DOMINATORS
,
6815 gimple_bb (def_stmt
),
6818 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6824 else if (!same_bool_result_p (result
, temp
))
6840 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6841 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6842 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6843 simplify this to a single expression. As we are going to lower the cost
6844 of building SSA names / gimple stmts significantly, we need to allocate
6845 them ont the stack. This will cause the code to be a bit ugly. */
6848 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6849 enum tree_code code1
,
6850 tree op1a
, tree op1b
,
6851 enum tree_code code2
, tree op2a
,
6854 /* Allocate gimple stmt1 on the stack. */
6856 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6857 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6858 gimple_assign_set_rhs_code (stmt1
, code1
);
6859 gimple_assign_set_rhs1 (stmt1
, op1a
);
6860 gimple_assign_set_rhs2 (stmt1
, op1b
);
6862 /* Allocate gimple stmt2 on the stack. */
6864 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6865 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6866 gimple_assign_set_rhs_code (stmt2
, code2
);
6867 gimple_assign_set_rhs1 (stmt2
, op2a
);
6868 gimple_assign_set_rhs2 (stmt2
, op2b
);
6870 /* Allocate SSA names(lhs1) on the stack. */
6871 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6872 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6873 TREE_SET_CODE (lhs1
, SSA_NAME
);
6874 TREE_TYPE (lhs1
) = type
;
6875 init_ssa_name_imm_use (lhs1
);
6877 /* Allocate SSA names(lhs2) on the stack. */
6878 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6879 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6880 TREE_SET_CODE (lhs2
, SSA_NAME
);
6881 TREE_TYPE (lhs2
) = type
;
6882 init_ssa_name_imm_use (lhs2
);
6884 gimple_assign_set_lhs (stmt1
, lhs1
);
6885 gimple_assign_set_lhs (stmt2
, lhs2
);
6887 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6888 type
, gimple_assign_lhs (stmt1
),
6889 gimple_assign_lhs (stmt2
));
6890 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
6892 if (gimple_simplified_result_is_gimple_val (&op
))
6894 tree res
= op
.ops
[0];
6896 return build2 (code1
, type
, op1a
, op1b
);
6897 else if (res
== lhs2
)
6898 return build2 (code2
, type
, op2a
, op2b
);
6902 else if (op
.code
.is_tree_code ()
6903 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6905 tree op0
= op
.ops
[0];
6906 tree op1
= op
.ops
[1];
6907 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6908 return NULL_TREE
; /* not simple */
6910 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6917 /* Try to simplify the AND of two comparisons, specified by
6918 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6919 If this can be simplified to a single expression (without requiring
6920 introducing more SSA variables to hold intermediate values),
6921 return the resulting tree. Otherwise return NULL_TREE.
6922 If the result expression is non-null, it has boolean type. */
6925 maybe_fold_and_comparisons (tree type
,
6926 enum tree_code code1
, tree op1a
, tree op1b
,
6927 enum tree_code code2
, tree op2a
, tree op2b
)
6929 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6932 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6935 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
6936 op1a
, op1b
, code2
, op2a
,
6943 /* Helper function for or_comparisons_1: try to simplify the OR of the
6944 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6945 If INVERT is true, invert the value of VAR before doing the OR.
6946 Return NULL_EXPR if we can't simplify this to a single expression. */
6949 or_var_with_comparison (tree type
, tree var
, bool invert
,
6950 enum tree_code code2
, tree op2a
, tree op2b
)
6953 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6955 /* We can only deal with variables whose definitions are assignments. */
6956 if (!is_gimple_assign (stmt
))
6959 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6960 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6961 Then we only have to consider the simpler non-inverted cases. */
6963 t
= and_var_with_comparison_1 (type
, stmt
,
6964 invert_tree_comparison (code2
, false),
6967 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6968 return canonicalize_bool (t
, invert
);
6971 /* Try to simplify the OR of the ssa variable defined by the assignment
6972 STMT with the comparison specified by (OP2A CODE2 OP2B).
6973 Return NULL_EXPR if we can't simplify this to a single expression. */
6976 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
6977 enum tree_code code2
, tree op2a
, tree op2b
)
6979 tree var
= gimple_assign_lhs (stmt
);
6980 tree true_test_var
= NULL_TREE
;
6981 tree false_test_var
= NULL_TREE
;
6982 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6984 /* Check for identities like (var OR (var != 0)) => true . */
6985 if (TREE_CODE (op2a
) == SSA_NAME
6986 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6988 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6989 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6991 true_test_var
= op2a
;
6992 if (var
== true_test_var
)
6995 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6996 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6998 false_test_var
= op2a
;
6999 if (var
== false_test_var
)
7000 return boolean_true_node
;
7004 /* If the definition is a comparison, recurse on it. */
7005 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7007 tree t
= or_comparisons_1 (type
, innercode
,
7008 gimple_assign_rhs1 (stmt
),
7009 gimple_assign_rhs2 (stmt
),
7017 /* If the definition is an AND or OR expression, we may be able to
7018 simplify by reassociating. */
7019 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7020 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7022 tree inner1
= gimple_assign_rhs1 (stmt
);
7023 tree inner2
= gimple_assign_rhs2 (stmt
);
7026 tree partial
= NULL_TREE
;
7027 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7029 /* Check for boolean identities that don't require recursive examination
7031 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7032 inner1 OR (inner1 AND inner2) => inner1
7033 !inner1 OR (inner1 OR inner2) => true
7034 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7036 if (inner1
== true_test_var
)
7037 return (is_or
? var
: inner1
);
7038 else if (inner2
== true_test_var
)
7039 return (is_or
? var
: inner2
);
7040 else if (inner1
== false_test_var
)
7043 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7045 else if (inner2
== false_test_var
)
7048 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7051 /* Next, redistribute/reassociate the OR across the inner tests.
7052 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7053 if (TREE_CODE (inner1
) == SSA_NAME
7054 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7055 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7056 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7057 gimple_assign_rhs1 (s
),
7058 gimple_assign_rhs2 (s
),
7059 code2
, op2a
, op2b
)))
7061 /* Handle the OR case, where we are reassociating:
7062 (inner1 OR inner2) OR (op2a code2 op2b)
7064 If the partial result t is a constant, we win. Otherwise
7065 continue on to try reassociating with the other inner test. */
7068 if (integer_onep (t
))
7069 return boolean_true_node
;
7070 else if (integer_zerop (t
))
7074 /* Handle the AND case, where we are redistributing:
7075 (inner1 AND inner2) OR (op2a code2 op2b)
7076 => (t AND (inner2 OR (op2a code op2b))) */
7077 else if (integer_zerop (t
))
7078 return boolean_false_node
;
7080 /* Save partial result for later. */
7084 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7085 if (TREE_CODE (inner2
) == SSA_NAME
7086 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7087 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7088 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7089 gimple_assign_rhs1 (s
),
7090 gimple_assign_rhs2 (s
),
7091 code2
, op2a
, op2b
)))
7093 /* Handle the OR case, where we are reassociating:
7094 (inner1 OR inner2) OR (op2a code2 op2b)
7096 => (t OR partial) */
7099 if (integer_zerop (t
))
7101 else if (integer_onep (t
))
7102 return boolean_true_node
;
7103 /* If both are the same, we can apply the identity
7105 else if (partial
&& same_bool_result_p (t
, partial
))
7109 /* Handle the AND case, where we are redistributing:
7110 (inner1 AND inner2) OR (op2a code2 op2b)
7111 => (t AND (inner1 OR (op2a code2 op2b)))
7112 => (t AND partial) */
7115 if (integer_zerop (t
))
7116 return boolean_false_node
;
7119 /* We already got a simplification for the other
7120 operand to the redistributed AND expression. The
7121 interesting case is when at least one is true.
7122 Or, if both are the same, we can apply the identity
7124 if (integer_onep (partial
))
7126 else if (integer_onep (t
))
7128 else if (same_bool_result_p (t
, partial
))
7137 /* Try to simplify the OR of two comparisons defined by
7138 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7139 If this can be done without constructing an intermediate value,
7140 return the resulting tree; otherwise NULL_TREE is returned.
7141 This function is deliberately asymmetric as it recurses on SSA_DEFs
7142 in the first comparison but not the second. */
7145 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7146 enum tree_code code2
, tree op2a
, tree op2b
)
7148 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7150 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7151 if (operand_equal_p (op1a
, op2a
, 0)
7152 && operand_equal_p (op1b
, op2b
, 0))
7154 /* Result will be either NULL_TREE, or a combined comparison. */
7155 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7156 TRUTH_ORIF_EXPR
, code1
, code2
,
7157 truth_type
, op1a
, op1b
);
7162 /* Likewise the swapped case of the above. */
7163 if (operand_equal_p (op1a
, op2b
, 0)
7164 && operand_equal_p (op1b
, op2a
, 0))
7166 /* Result will be either NULL_TREE, or a combined comparison. */
7167 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7168 TRUTH_ORIF_EXPR
, code1
,
7169 swap_tree_comparison (code2
),
7170 truth_type
, op1a
, op1b
);
7175 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7176 NAME's definition is a truth value. See if there are any simplifications
7177 that can be done against the NAME's definition. */
7178 if (TREE_CODE (op1a
) == SSA_NAME
7179 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7180 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7182 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7183 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7184 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7185 switch (gimple_code (stmt
))
7188 /* Try to simplify by copy-propagating the definition. */
7189 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7193 /* If every argument to the PHI produces the same result when
7194 ORed with the second comparison, we win.
7195 Do not do this unless the type is bool since we need a bool
7196 result here anyway. */
7197 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7199 tree result
= NULL_TREE
;
7201 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7203 tree arg
= gimple_phi_arg_def (stmt
, i
);
7205 /* If this PHI has itself as an argument, ignore it.
7206 If all the other args produce the same result,
7208 if (arg
== gimple_phi_result (stmt
))
7210 else if (TREE_CODE (arg
) == INTEGER_CST
)
7212 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7215 result
= boolean_true_node
;
7216 else if (!integer_onep (result
))
7220 result
= fold_build2 (code2
, boolean_type_node
,
7222 else if (!same_bool_comparison_p (result
,
7226 else if (TREE_CODE (arg
) == SSA_NAME
7227 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7230 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7231 /* In simple cases we can look through PHI nodes,
7232 but we have to be careful with loops.
7234 if (! dom_info_available_p (CDI_DOMINATORS
)
7235 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7236 || dominated_by_p (CDI_DOMINATORS
,
7237 gimple_bb (def_stmt
),
7240 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7246 else if (!same_bool_result_p (result
, temp
))
7262 /* Try to simplify the OR of two comparisons, specified by
7263 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7264 If this can be simplified to a single expression (without requiring
7265 introducing more SSA variables to hold intermediate values),
7266 return the resulting tree. Otherwise return NULL_TREE.
7267 If the result expression is non-null, it has boolean type. */
7270 maybe_fold_or_comparisons (tree type
,
7271 enum tree_code code1
, tree op1a
, tree op1b
,
7272 enum tree_code code2
, tree op2a
, tree op2b
)
7274 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
7277 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
7280 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7281 op1a
, op1b
, code2
, op2a
,
7288 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7290 Either NULL_TREE, a simplified but non-constant or a constant
7293 ??? This should go into a gimple-fold-inline.h file to be eventually
7294 privatized with the single valueize function used in the various TUs
7295 to avoid the indirect function call overhead. */
7298 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7299 tree (*gvalueize
) (tree
))
7301 gimple_match_op res_op
;
7302 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7303 edges if there are intermediate VARYING defs. For this reason
7304 do not follow SSA edges here even though SCCVN can technically
7305 just deal fine with that. */
7306 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7308 tree res
= NULL_TREE
;
7309 if (gimple_simplified_result_is_gimple_val (&res_op
))
7310 res
= res_op
.ops
[0];
7311 else if (mprts_hook
)
7312 res
= mprts_hook (&res_op
);
7315 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7317 fprintf (dump_file
, "Match-and-simplified ");
7318 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7319 fprintf (dump_file
, " to ");
7320 print_generic_expr (dump_file
, res
);
7321 fprintf (dump_file
, "\n");
7327 location_t loc
= gimple_location (stmt
);
7328 switch (gimple_code (stmt
))
7332 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7334 switch (get_gimple_rhs_class (subcode
))
7336 case GIMPLE_SINGLE_RHS
:
7338 tree rhs
= gimple_assign_rhs1 (stmt
);
7339 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7341 if (TREE_CODE (rhs
) == SSA_NAME
)
7343 /* If the RHS is an SSA_NAME, return its known constant value,
7345 return (*valueize
) (rhs
);
7347 /* Handle propagating invariant addresses into address
7349 else if (TREE_CODE (rhs
) == ADDR_EXPR
7350 && !is_gimple_min_invariant (rhs
))
7352 poly_int64 offset
= 0;
7354 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7358 && (CONSTANT_CLASS_P (base
)
7359 || decl_address_invariant_p (base
)))
7360 return build_invariant_address (TREE_TYPE (rhs
),
7363 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7364 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7365 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7366 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7371 nelts
= CONSTRUCTOR_NELTS (rhs
);
7372 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7373 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7375 val
= (*valueize
) (val
);
7376 if (TREE_CODE (val
) == INTEGER_CST
7377 || TREE_CODE (val
) == REAL_CST
7378 || TREE_CODE (val
) == FIXED_CST
)
7379 vec
.quick_push (val
);
7384 return vec
.build ();
7386 if (subcode
== OBJ_TYPE_REF
)
7388 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7389 /* If callee is constant, we can fold away the wrapper. */
7390 if (is_gimple_min_invariant (val
))
7394 if (kind
== tcc_reference
)
7396 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7397 || TREE_CODE (rhs
) == REALPART_EXPR
7398 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7399 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7401 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7402 return fold_unary_loc (EXPR_LOCATION (rhs
),
7404 TREE_TYPE (rhs
), val
);
7406 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7407 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7409 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7410 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7412 TREE_TYPE (rhs
), val
,
7413 TREE_OPERAND (rhs
, 1),
7414 TREE_OPERAND (rhs
, 2));
7416 else if (TREE_CODE (rhs
) == MEM_REF
7417 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7419 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7420 if (TREE_CODE (val
) == ADDR_EXPR
7421 && is_gimple_min_invariant (val
))
7423 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7425 TREE_OPERAND (rhs
, 1));
7430 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7432 else if (kind
== tcc_declaration
)
7433 return get_symbol_constant_value (rhs
);
7437 case GIMPLE_UNARY_RHS
:
7440 case GIMPLE_BINARY_RHS
:
7441 /* Translate &x + CST into an invariant form suitable for
7442 further propagation. */
7443 if (subcode
== POINTER_PLUS_EXPR
)
7445 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7446 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7447 if (TREE_CODE (op0
) == ADDR_EXPR
7448 && TREE_CODE (op1
) == INTEGER_CST
)
7450 tree off
= fold_convert (ptr_type_node
, op1
);
7452 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7453 fold_build2 (MEM_REF
,
7454 TREE_TYPE (TREE_TYPE (op0
)),
7455 unshare_expr (op0
), off
));
7458 /* Canonicalize bool != 0 and bool == 0 appearing after
7459 valueization. While gimple_simplify handles this
7460 it can get confused by the ~X == 1 -> X == 0 transform
7461 which we cant reduce to a SSA name or a constant
7462 (and we have no way to tell gimple_simplify to not
7463 consider those transforms in the first place). */
7464 else if (subcode
== EQ_EXPR
7465 || subcode
== NE_EXPR
)
7467 tree lhs
= gimple_assign_lhs (stmt
);
7468 tree op0
= gimple_assign_rhs1 (stmt
);
7469 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7472 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7473 op0
= (*valueize
) (op0
);
7474 if (TREE_CODE (op0
) == INTEGER_CST
)
7475 std::swap (op0
, op1
);
7476 if (TREE_CODE (op1
) == INTEGER_CST
7477 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7478 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7484 case GIMPLE_TERNARY_RHS
:
7486 /* Handle ternary operators that can appear in GIMPLE form. */
7487 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7488 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7489 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7490 return fold_ternary_loc (loc
, subcode
,
7491 TREE_TYPE (gimple_assign_lhs (stmt
)),
7503 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7505 if (gimple_call_internal_p (stmt
))
7507 enum tree_code subcode
= ERROR_MARK
;
7508 switch (gimple_call_internal_fn (stmt
))
7510 case IFN_UBSAN_CHECK_ADD
:
7511 subcode
= PLUS_EXPR
;
7513 case IFN_UBSAN_CHECK_SUB
:
7514 subcode
= MINUS_EXPR
;
7516 case IFN_UBSAN_CHECK_MUL
:
7517 subcode
= MULT_EXPR
;
7519 case IFN_BUILTIN_EXPECT
:
7521 tree arg0
= gimple_call_arg (stmt
, 0);
7522 tree op0
= (*valueize
) (arg0
);
7523 if (TREE_CODE (op0
) == INTEGER_CST
)
7530 tree arg0
= gimple_call_arg (stmt
, 0);
7531 tree arg1
= gimple_call_arg (stmt
, 1);
7532 tree op0
= (*valueize
) (arg0
);
7533 tree op1
= (*valueize
) (arg1
);
7535 if (TREE_CODE (op0
) != INTEGER_CST
7536 || TREE_CODE (op1
) != INTEGER_CST
)
7541 /* x * 0 = 0 * x = 0 without overflow. */
7542 if (integer_zerop (op0
) || integer_zerop (op1
))
7543 return build_zero_cst (TREE_TYPE (arg0
));
7546 /* y - y = 0 without overflow. */
7547 if (operand_equal_p (op0
, op1
, 0))
7548 return build_zero_cst (TREE_TYPE (arg0
));
7555 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7557 && TREE_CODE (res
) == INTEGER_CST
7558 && !TREE_OVERFLOW (res
))
7563 fn
= (*valueize
) (gimple_call_fn (stmt
));
7564 if (TREE_CODE (fn
) == ADDR_EXPR
7565 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7566 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7567 && gimple_builtin_call_types_compatible_p (stmt
,
7568 TREE_OPERAND (fn
, 0)))
7570 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7573 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7574 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7575 retval
= fold_builtin_call_array (loc
,
7576 gimple_call_return_type (call_stmt
),
7577 fn
, gimple_call_num_args (stmt
), args
);
7580 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7581 STRIP_NOPS (retval
);
7582 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7595 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7596 Returns NULL_TREE if folding to a constant is not possible, otherwise
7597 returns a constant according to is_gimple_min_invariant. */
7600 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7602 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7603 if (res
&& is_gimple_min_invariant (res
))
7609 /* The following set of functions are supposed to fold references using
7610 their constant initializers. */
7612 /* See if we can find constructor defining value of BASE.
7613 When we know the consructor with constant offset (such as
7614 base is array[40] and we do know constructor of array), then
7615 BIT_OFFSET is adjusted accordingly.
7617 As a special case, return error_mark_node when constructor
7618 is not explicitly available, but it is known to be zero
7619 such as 'static const int a;'. */
7621 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7622 tree (*valueize
)(tree
))
7624 poly_int64 bit_offset2
, size
, max_size
;
7627 if (TREE_CODE (base
) == MEM_REF
)
7629 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7630 if (!boff
.to_shwi (bit_offset
))
7634 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7635 base
= valueize (TREE_OPERAND (base
, 0));
7636 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7638 base
= TREE_OPERAND (base
, 0);
7641 && TREE_CODE (base
) == SSA_NAME
)
7642 base
= valueize (base
);
7644 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7645 DECL_INITIAL. If BASE is a nested reference into another
7646 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7647 the inner reference. */
7648 switch (TREE_CODE (base
))
7653 tree init
= ctor_for_folding (base
);
7655 /* Our semantic is exact opposite of ctor_for_folding;
7656 NULL means unknown, while error_mark_node is 0. */
7657 if (init
== error_mark_node
)
7660 return error_mark_node
;
7664 case VIEW_CONVERT_EXPR
:
7665 return get_base_constructor (TREE_OPERAND (base
, 0),
7666 bit_offset
, valueize
);
7670 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7672 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7674 *bit_offset
+= bit_offset2
;
7675 return get_base_constructor (base
, bit_offset
, valueize
);
7681 if (CONSTANT_CLASS_P (base
))
7688 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7689 to the memory at bit OFFSET. When non-null, TYPE is the expected
7690 type of the reference; otherwise the type of the referenced element
7691 is used instead. When SIZE is zero, attempt to fold a reference to
7692 the entire element which OFFSET refers to. Increment *SUBOFF by
7693 the bit offset of the accessed element. */
7696 fold_array_ctor_reference (tree type
, tree ctor
,
7697 unsigned HOST_WIDE_INT offset
,
7698 unsigned HOST_WIDE_INT size
,
7700 unsigned HOST_WIDE_INT
*suboff
)
7702 offset_int low_bound
;
7703 offset_int elt_size
;
7704 offset_int access_index
;
7705 tree domain_type
= NULL_TREE
;
7706 HOST_WIDE_INT inner_offset
;
7708 /* Compute low bound and elt size. */
7709 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7710 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7711 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7713 /* Static constructors for variably sized objects make no sense. */
7714 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7716 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7720 /* Static constructors for variably sized objects make no sense. */
7721 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7723 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7725 /* When TYPE is non-null, verify that it specifies a constant-sized
7726 access of a multiple of the array element size. Avoid division
7727 by zero below when ELT_SIZE is zero, such as with the result of
7728 an initializer for a zero-length array or an empty struct. */
7731 && (!TYPE_SIZE_UNIT (type
)
7732 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7735 /* Compute the array index we look for. */
7736 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7738 access_index
+= low_bound
;
7740 /* And offset within the access. */
7741 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7743 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7744 if (size
> elt_sz
* BITS_PER_UNIT
)
7746 /* native_encode_expr constraints. */
7747 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7748 || size
% BITS_PER_UNIT
!= 0
7749 || inner_offset
% BITS_PER_UNIT
!= 0
7750 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7754 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7756 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7757 return build_zero_cst (type
);
7759 /* native-encode adjacent ctor elements. */
7760 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7761 unsigned bufoff
= 0;
7762 offset_int index
= 0;
7763 offset_int max_index
= access_index
;
7764 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7766 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7767 else if (!CONSTANT_CLASS_P (val
))
7771 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7773 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7774 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7777 index
= max_index
= wi::to_offset (elt
->index
);
7778 index
= wi::umax (index
, access_index
);
7781 if (bufoff
+ elt_sz
> sizeof (buf
))
7782 elt_sz
= sizeof (buf
) - bufoff
;
7783 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7784 inner_offset
/ BITS_PER_UNIT
);
7785 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7791 if (wi::cmpu (access_index
, index
) == 0)
7793 else if (wi::cmpu (access_index
, max_index
) > 0)
7796 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7798 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7803 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7805 max_index
= access_index
;
7808 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7810 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7811 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7814 index
= max_index
= wi::to_offset (elt
->index
);
7815 index
= wi::umax (index
, access_index
);
7816 if (wi::cmpu (access_index
, index
) == 0)
7819 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7823 while (bufoff
< size
/ BITS_PER_UNIT
);
7825 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7828 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7830 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7832 /* For the final reference to the entire accessed element
7833 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7834 may be null) in favor of the type of the element, and set
7835 SIZE to the size of the accessed element. */
7837 type
= TREE_TYPE (val
);
7838 size
= elt_sz
* BITS_PER_UNIT
;
7840 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7841 && TREE_CODE (val
) == CONSTRUCTOR
7842 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7843 /* If this isn't the last element in the CTOR and a CTOR itself
7844 and it does not cover the whole object we are requesting give up
7845 since we're not set up for combining from multiple CTORs. */
7848 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7849 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7853 /* Memory not explicitly mentioned in constructor is 0 (or
7854 the reference is out of range). */
7855 return type
? build_zero_cst (type
) : NULL_TREE
;
7858 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7859 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7860 is the expected type of the reference; otherwise the type of
7861 the referenced member is used instead. When SIZE is zero,
7862 attempt to fold a reference to the entire member which OFFSET
7863 refers to; in this case. Increment *SUBOFF by the bit offset
7864 of the accessed member. */
7867 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7868 unsigned HOST_WIDE_INT offset
,
7869 unsigned HOST_WIDE_INT size
,
7871 unsigned HOST_WIDE_INT
*suboff
)
7873 unsigned HOST_WIDE_INT cnt
;
7876 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7879 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7880 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7881 tree field_size
= DECL_SIZE (cfield
);
7885 /* Determine the size of the flexible array member from
7886 the size of the initializer provided for it. */
7887 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7890 /* Variable sized objects in static constructors makes no sense,
7891 but field_size can be NULL for flexible array members. */
7892 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7893 && TREE_CODE (byte_offset
) == INTEGER_CST
7894 && (field_size
!= NULL_TREE
7895 ? TREE_CODE (field_size
) == INTEGER_CST
7896 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7898 /* Compute bit offset of the field. */
7899 offset_int bitoffset
7900 = (wi::to_offset (field_offset
)
7901 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7902 /* Compute bit offset where the field ends. */
7903 offset_int bitoffset_end
;
7904 if (field_size
!= NULL_TREE
)
7905 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7909 /* Compute the bit offset of the end of the desired access.
7910 As a special case, if the size of the desired access is
7911 zero, assume the access is to the entire field (and let
7912 the caller make any necessary adjustments by storing
7913 the actual bounds of the field in FIELDBOUNDS). */
7914 offset_int access_end
= offset_int (offset
);
7918 access_end
= bitoffset_end
;
7920 /* Is there any overlap between the desired access at
7921 [OFFSET, OFFSET+SIZE) and the offset of the field within
7922 the object at [BITOFFSET, BITOFFSET_END)? */
7923 if (wi::cmps (access_end
, bitoffset
) > 0
7924 && (field_size
== NULL_TREE
7925 || wi::lts_p (offset
, bitoffset_end
)))
7927 *suboff
+= bitoffset
.to_uhwi ();
7929 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
7931 /* For the final reference to the entire accessed member
7932 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7933 be null) in favor of the type of the member, and set
7934 SIZE to the size of the accessed member. */
7935 offset
= bitoffset
.to_uhwi ();
7936 type
= TREE_TYPE (cval
);
7937 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
7940 /* We do have overlap. Now see if the field is large enough
7941 to cover the access. Give up for accesses that extend
7942 beyond the end of the object or that span multiple fields. */
7943 if (wi::cmps (access_end
, bitoffset_end
) > 0)
7945 if (offset
< bitoffset
)
7948 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
7949 return fold_ctor_reference (type
, cval
,
7950 inner_offset
.to_uhwi (), size
,
7958 return build_zero_cst (type
);
7961 /* CTOR is value initializing memory. Fold a reference of TYPE and
7962 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7963 is zero, attempt to fold a reference to the entire subobject
7964 which OFFSET refers to. This is used when folding accesses to
7965 string members of aggregates. When non-null, set *SUBOFF to
7966 the bit offset of the accessed subobject. */
7969 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
7970 const poly_uint64
&poly_size
, tree from_decl
,
7971 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
7975 /* We found the field with exact match. */
7977 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
7978 && known_eq (poly_offset
, 0U))
7979 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
7981 /* The remaining optimizations need a constant size and offset. */
7982 unsigned HOST_WIDE_INT size
, offset
;
7983 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
7986 /* We are at the end of walk, see if we can view convert the
7988 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
7989 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7990 && !compare_tree_int (TYPE_SIZE (type
), size
)
7991 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
7993 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
7996 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
7998 STRIP_USELESS_TYPE_CONVERSION (ret
);
8002 /* For constants and byte-aligned/sized reads try to go through
8003 native_encode/interpret. */
8004 if (CONSTANT_CLASS_P (ctor
)
8005 && BITS_PER_UNIT
== 8
8006 && offset
% BITS_PER_UNIT
== 0
8007 && offset
/ BITS_PER_UNIT
<= INT_MAX
8008 && size
% BITS_PER_UNIT
== 0
8009 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8010 && can_native_interpret_type_p (type
))
8012 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8013 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8014 offset
/ BITS_PER_UNIT
);
8016 return native_interpret_expr (type
, buf
, len
);
8018 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8020 unsigned HOST_WIDE_INT dummy
= 0;
8025 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8026 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8027 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8030 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8033 /* Fall back to native_encode_initializer. Needs to be done
8034 only in the outermost fold_ctor_reference call (because it itself
8035 recurses into CONSTRUCTORs) and doesn't update suboff. */
8036 if (ret
== NULL_TREE
8038 && BITS_PER_UNIT
== 8
8039 && offset
% BITS_PER_UNIT
== 0
8040 && offset
/ BITS_PER_UNIT
<= INT_MAX
8041 && size
% BITS_PER_UNIT
== 0
8042 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8043 && can_native_interpret_type_p (type
))
8045 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8046 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8047 offset
/ BITS_PER_UNIT
);
8049 return native_interpret_expr (type
, buf
, len
);
8058 /* Return the tree representing the element referenced by T if T is an
8059 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8060 names using VALUEIZE. Return NULL_TREE otherwise. */
8063 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8065 tree ctor
, idx
, base
;
8066 poly_int64 offset
, size
, max_size
;
8070 if (TREE_THIS_VOLATILE (t
))
8074 return get_symbol_constant_value (t
);
8076 tem
= fold_read_from_constant_string (t
);
8080 switch (TREE_CODE (t
))
8083 case ARRAY_RANGE_REF
:
8084 /* Constant indexes are handled well by get_base_constructor.
8085 Only special case variable offsets.
8086 FIXME: This code can't handle nested references with variable indexes
8087 (they will be handled only by iteration of ccp). Perhaps we can bring
8088 get_ref_base_and_extent here and make it use a valueize callback. */
8089 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8091 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8092 && poly_int_tree_p (idx
))
8094 tree low_bound
, unit_size
;
8096 /* If the resulting bit-offset is constant, track it. */
8097 if ((low_bound
= array_ref_low_bound (t
),
8098 poly_int_tree_p (low_bound
))
8099 && (unit_size
= array_ref_element_size (t
),
8100 tree_fits_uhwi_p (unit_size
)))
8102 poly_offset_int woffset
8103 = wi::sext (wi::to_poly_offset (idx
)
8104 - wi::to_poly_offset (low_bound
),
8105 TYPE_PRECISION (sizetype
));
8106 woffset
*= tree_to_uhwi (unit_size
);
8107 woffset
*= BITS_PER_UNIT
;
8108 if (woffset
.to_shwi (&offset
))
8110 base
= TREE_OPERAND (t
, 0);
8111 ctor
= get_base_constructor (base
, &offset
, valueize
);
8112 /* Empty constructor. Always fold to 0. */
8113 if (ctor
== error_mark_node
)
8114 return build_zero_cst (TREE_TYPE (t
));
8115 /* Out of bound array access. Value is undefined,
8117 if (maybe_lt (offset
, 0))
8119 /* We cannot determine ctor. */
8122 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8123 tree_to_uhwi (unit_size
)
8133 case TARGET_MEM_REF
:
8135 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8136 ctor
= get_base_constructor (base
, &offset
, valueize
);
8138 /* Empty constructor. Always fold to 0. */
8139 if (ctor
== error_mark_node
)
8140 return build_zero_cst (TREE_TYPE (t
));
8141 /* We do not know precise address. */
8142 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8144 /* We cannot determine ctor. */
8148 /* Out of bound array access. Value is undefined, but don't fold. */
8149 if (maybe_lt (offset
, 0))
8152 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8156 /* For bit field reads try to read the representative and
8158 if (TREE_CODE (t
) == COMPONENT_REF
8159 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8160 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8162 HOST_WIDE_INT csize
, coffset
;
8163 tree field
= TREE_OPERAND (t
, 1);
8164 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8165 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8166 && size
.is_constant (&csize
)
8167 && offset
.is_constant (&coffset
)
8168 && (coffset
% BITS_PER_UNIT
!= 0
8169 || csize
% BITS_PER_UNIT
!= 0)
8171 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8173 poly_int64 bitoffset
;
8174 poly_uint64 field_offset
, repr_offset
;
8175 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8176 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8177 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8180 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8181 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8182 HOST_WIDE_INT bitoff
;
8183 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8184 - TYPE_PRECISION (TREE_TYPE (field
)));
8185 if (bitoffset
.is_constant (&bitoff
)
8190 size
= tree_to_uhwi (DECL_SIZE (repr
));
8192 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8194 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8196 if (!BYTES_BIG_ENDIAN
)
8197 tem
= wide_int_to_tree (TREE_TYPE (field
),
8198 wi::lrshift (wi::to_wide (tem
),
8201 tem
= wide_int_to_tree (TREE_TYPE (field
),
8202 wi::lrshift (wi::to_wide (tem
),
8214 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8215 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8216 return fold_build1_loc (EXPR_LOCATION (t
),
8217 TREE_CODE (t
), TREE_TYPE (t
), c
);
8229 fold_const_aggregate_ref (tree t
)
8231 return fold_const_aggregate_ref_1 (t
, NULL
);
8234 /* Lookup virtual method with index TOKEN in a virtual table V
8236 Set CAN_REFER if non-NULL to false if method
8237 is not referable or if the virtual table is ill-formed (such as rewriten
8238 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8241 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8243 unsigned HOST_WIDE_INT offset
,
8246 tree vtable
= v
, init
, fn
;
8247 unsigned HOST_WIDE_INT size
;
8248 unsigned HOST_WIDE_INT elt_size
, access_index
;
8254 /* First of all double check we have virtual table. */
8255 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8257 /* Pass down that we lost track of the target. */
8263 init
= ctor_for_folding (v
);
8265 /* The virtual tables should always be born with constructors
8266 and we always should assume that they are avaialble for
8267 folding. At the moment we do not stream them in all cases,
8268 but it should never happen that ctor seem unreachable. */
8270 if (init
== error_mark_node
)
8272 /* Pass down that we lost track of the target. */
8277 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8278 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8279 offset
*= BITS_PER_UNIT
;
8280 offset
+= token
* size
;
8282 /* Lookup the value in the constructor that is assumed to be array.
8283 This is equivalent to
8284 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8285 offset, size, NULL);
8286 but in a constant time. We expect that frontend produced a simple
8287 array without indexed initializers. */
8289 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8290 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8291 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8292 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8294 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8295 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8297 /* The C++ FE can now produce indexed fields, and we check if the indexes
8299 if (access_index
< CONSTRUCTOR_NELTS (init
))
8301 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8302 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8303 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8309 /* For type inconsistent program we may end up looking up virtual method
8310 in virtual table that does not contain TOKEN entries. We may overrun
8311 the virtual table and pick up a constant or RTTI info pointer.
8312 In any case the call is undefined. */
8314 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8315 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8316 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
8319 fn
= TREE_OPERAND (fn
, 0);
8321 /* When cgraph node is missing and function is not public, we cannot
8322 devirtualize. This can happen in WHOPR when the actual method
8323 ends up in other partition, because we found devirtualization
8324 possibility too late. */
8325 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8336 /* Make sure we create a cgraph node for functions we'll reference.
8337 They can be non-existent if the reference comes from an entry
8338 of an external vtable for example. */
8339 cgraph_node::get_create (fn
);
8344 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8345 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8346 KNOWN_BINFO carries the binfo describing the true type of
8347 OBJ_TYPE_REF_OBJECT(REF).
8348 Set CAN_REFER if non-NULL to false if method
8349 is not referable or if the virtual table is ill-formed (such as rewriten
8350 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8353 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8356 unsigned HOST_WIDE_INT offset
;
8359 v
= BINFO_VTABLE (known_binfo
);
8360 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8364 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8370 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8373 /* Given a pointer value T, return a simplified version of an
8374 indirection through T, or NULL_TREE if no simplification is
8375 possible. Note that the resulting type may be different from
8376 the type pointed to in the sense that it is still compatible
8377 from the langhooks point of view. */
8380 gimple_fold_indirect_ref (tree t
)
8382 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8387 subtype
= TREE_TYPE (sub
);
8388 if (!POINTER_TYPE_P (subtype
)
8389 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8392 if (TREE_CODE (sub
) == ADDR_EXPR
)
8394 tree op
= TREE_OPERAND (sub
, 0);
8395 tree optype
= TREE_TYPE (op
);
8397 if (useless_type_conversion_p (type
, optype
))
8400 /* *(foo *)&fooarray => fooarray[0] */
8401 if (TREE_CODE (optype
) == ARRAY_TYPE
8402 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8403 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8405 tree type_domain
= TYPE_DOMAIN (optype
);
8406 tree min_val
= size_zero_node
;
8407 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8408 min_val
= TYPE_MIN_VALUE (type_domain
);
8409 if (TREE_CODE (min_val
) == INTEGER_CST
)
8410 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8412 /* *(foo *)&complexfoo => __real__ complexfoo */
8413 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8414 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8415 return fold_build1 (REALPART_EXPR
, type
, op
);
8416 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8417 else if (TREE_CODE (optype
) == VECTOR_TYPE
8418 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8420 tree part_width
= TYPE_SIZE (type
);
8421 tree index
= bitsize_int (0);
8422 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8426 /* *(p + CST) -> ... */
8427 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8428 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8430 tree addr
= TREE_OPERAND (sub
, 0);
8431 tree off
= TREE_OPERAND (sub
, 1);
8435 addrtype
= TREE_TYPE (addr
);
8437 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8438 if (TREE_CODE (addr
) == ADDR_EXPR
8439 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8440 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8441 && tree_fits_uhwi_p (off
))
8443 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8444 tree part_width
= TYPE_SIZE (type
);
8445 unsigned HOST_WIDE_INT part_widthi
8446 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8447 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8448 tree index
= bitsize_int (indexi
);
8449 if (known_lt (offset
/ part_widthi
,
8450 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8451 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8455 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8456 if (TREE_CODE (addr
) == ADDR_EXPR
8457 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8458 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8460 tree size
= TYPE_SIZE_UNIT (type
);
8461 if (tree_int_cst_equal (size
, off
))
8462 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8465 /* *(p + CST) -> MEM_REF <p, CST>. */
8466 if (TREE_CODE (addr
) != ADDR_EXPR
8467 || DECL_P (TREE_OPERAND (addr
, 0)))
8468 return fold_build2 (MEM_REF
, type
,
8470 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8473 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8474 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8475 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8476 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8479 tree min_val
= size_zero_node
;
8481 sub
= gimple_fold_indirect_ref (sub
);
8483 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8484 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8485 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8486 min_val
= TYPE_MIN_VALUE (type_domain
);
8487 if (TREE_CODE (min_val
) == INTEGER_CST
)
8488 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8494 /* Return true if CODE is an operation that when operating on signed
8495 integer types involves undefined behavior on overflow and the
8496 operation can be expressed with unsigned arithmetic. */
8499 arith_code_with_undefined_signed_overflow (tree_code code
)
8508 case POINTER_PLUS_EXPR
:
8515 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8516 operation that can be transformed to unsigned arithmetic by converting
8517 its operand, carrying out the operation in the corresponding unsigned
8518 type and converting the result back to the original type.
8520 Returns a sequence of statements that replace STMT and also contain
8521 a modified form of STMT itself. */
8524 rewrite_to_defined_overflow (gimple
*stmt
)
8526 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8528 fprintf (dump_file
, "rewriting stmt with undefined signed "
8530 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8533 tree lhs
= gimple_assign_lhs (stmt
);
8534 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8535 gimple_seq stmts
= NULL
;
8536 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8537 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8539 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8541 tree op
= gimple_op (stmt
, i
);
8542 op
= gimple_convert (&stmts
, type
, op
);
8543 gimple_set_op (stmt
, i
, op
);
8545 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8546 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8547 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8548 gimple_set_modified (stmt
, true);
8549 gimple_seq_add_stmt (&stmts
, stmt
);
8550 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8551 gimple_seq_add_stmt (&stmts
, cvt
);
8557 /* The valueization hook we use for the gimple_build API simplification.
8558 This makes us match fold_buildN behavior by only combining with
8559 statements in the sequence(s) we are currently building. */
8562 gimple_build_valueize (tree op
)
8564 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8569 /* Build the expression CODE OP0 of type TYPE with location LOC,
8570 simplifying it first if possible. Returns the built
8571 expression value and appends statements possibly defining it
8575 gimple_build (gimple_seq
*seq
, location_t loc
,
8576 enum tree_code code
, tree type
, tree op0
)
8578 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
8581 res
= create_tmp_reg_or_ssa_name (type
);
8583 if (code
== REALPART_EXPR
8584 || code
== IMAGPART_EXPR
8585 || code
== VIEW_CONVERT_EXPR
)
8586 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8588 stmt
= gimple_build_assign (res
, code
, op0
);
8589 gimple_set_location (stmt
, loc
);
8590 gimple_seq_add_stmt_without_update (seq
, stmt
);
8595 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8596 simplifying it first if possible. Returns the built
8597 expression value and appends statements possibly defining it
8601 gimple_build (gimple_seq
*seq
, location_t loc
,
8602 enum tree_code code
, tree type
, tree op0
, tree op1
)
8604 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
8607 res
= create_tmp_reg_or_ssa_name (type
);
8608 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8609 gimple_set_location (stmt
, loc
);
8610 gimple_seq_add_stmt_without_update (seq
, stmt
);
8615 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8616 simplifying it first if possible. Returns the built
8617 expression value and appends statements possibly defining it
8621 gimple_build (gimple_seq
*seq
, location_t loc
,
8622 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
8624 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
8625 seq
, gimple_build_valueize
);
8628 res
= create_tmp_reg_or_ssa_name (type
);
8630 if (code
== BIT_FIELD_REF
)
8631 stmt
= gimple_build_assign (res
, code
,
8632 build3 (code
, type
, op0
, op1
, op2
));
8634 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8635 gimple_set_location (stmt
, loc
);
8636 gimple_seq_add_stmt_without_update (seq
, stmt
);
8641 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8642 void) with a location LOC. Returns the built expression value (or NULL_TREE
8643 if TYPE is void) and appends statements possibly defining it to SEQ. */
8646 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
, tree type
)
8648 tree res
= NULL_TREE
;
8650 if (internal_fn_p (fn
))
8651 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8654 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8655 stmt
= gimple_build_call (decl
, 0);
8657 if (!VOID_TYPE_P (type
))
8659 res
= create_tmp_reg_or_ssa_name (type
);
8660 gimple_call_set_lhs (stmt
, res
);
8662 gimple_set_location (stmt
, loc
);
8663 gimple_seq_add_stmt_without_update (seq
, stmt
);
8667 /* Build the call FN (ARG0) with a result of type TYPE
8668 (or no result if TYPE is void) with location LOC,
8669 simplifying it first if possible. Returns the built
8670 expression value (or NULL_TREE if TYPE is void) and appends
8671 statements possibly defining it to SEQ. */
8674 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8675 tree type
, tree arg0
)
8677 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
8681 if (internal_fn_p (fn
))
8682 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8685 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8686 stmt
= gimple_build_call (decl
, 1, arg0
);
8688 if (!VOID_TYPE_P (type
))
8690 res
= create_tmp_reg_or_ssa_name (type
);
8691 gimple_call_set_lhs (stmt
, res
);
8693 gimple_set_location (stmt
, loc
);
8694 gimple_seq_add_stmt_without_update (seq
, stmt
);
8699 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8700 (or no result if TYPE is void) with location LOC,
8701 simplifying it first if possible. Returns the built
8702 expression value (or NULL_TREE if TYPE is void) and appends
8703 statements possibly defining it to SEQ. */
8706 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8707 tree type
, tree arg0
, tree arg1
)
8709 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
8713 if (internal_fn_p (fn
))
8714 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8717 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8718 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8720 if (!VOID_TYPE_P (type
))
8722 res
= create_tmp_reg_or_ssa_name (type
);
8723 gimple_call_set_lhs (stmt
, res
);
8725 gimple_set_location (stmt
, loc
);
8726 gimple_seq_add_stmt_without_update (seq
, stmt
);
8731 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8732 (or no result if TYPE is void) with location LOC,
8733 simplifying it first if possible. Returns the built
8734 expression value (or NULL_TREE if TYPE is void) and appends
8735 statements possibly defining it to SEQ. */
8738 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8739 tree type
, tree arg0
, tree arg1
, tree arg2
)
8741 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8742 seq
, gimple_build_valueize
);
8746 if (internal_fn_p (fn
))
8747 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8748 3, arg0
, arg1
, arg2
);
8751 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8752 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8754 if (!VOID_TYPE_P (type
))
8756 res
= create_tmp_reg_or_ssa_name (type
);
8757 gimple_call_set_lhs (stmt
, res
);
8759 gimple_set_location (stmt
, loc
);
8760 gimple_seq_add_stmt_without_update (seq
, stmt
);
8765 /* Build the conversion (TYPE) OP with a result of type TYPE
8766 with location LOC if such conversion is neccesary in GIMPLE,
8767 simplifying it first.
8768 Returns the built expression value and appends
8769 statements possibly defining it to SEQ. */
8772 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
8774 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8776 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
8779 /* Build the conversion (ptrofftype) OP with a result of a type
8780 compatible with ptrofftype with location LOC if such conversion
8781 is neccesary in GIMPLE, simplifying it first.
8782 Returns the built expression value and appends
8783 statements possibly defining it to SEQ. */
8786 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
8788 if (ptrofftype_p (TREE_TYPE (op
)))
8790 return gimple_convert (seq
, loc
, sizetype
, op
);
8793 /* Build a vector of type TYPE in which each element has the value OP.
8794 Return a gimple value for the result, appending any new statements
8798 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
8801 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
8802 && !CONSTANT_CLASS_P (op
))
8803 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
8805 tree res
, vec
= build_vector_from_val (type
, op
);
8806 if (is_gimple_val (vec
))
8808 if (gimple_in_ssa_p (cfun
))
8809 res
= make_ssa_name (type
);
8811 res
= create_tmp_reg (type
);
8812 gimple
*stmt
= gimple_build_assign (res
, vec
);
8813 gimple_set_location (stmt
, loc
);
8814 gimple_seq_add_stmt_without_update (seq
, stmt
);
8818 /* Build a vector from BUILDER, handling the case in which some elements
8819 are non-constant. Return a gimple value for the result, appending any
8820 new instructions to SEQ.
8822 BUILDER must not have a stepped encoding on entry. This is because
8823 the function is not geared up to handle the arithmetic that would
8824 be needed in the variable case, and any code building a vector that
8825 is known to be constant should use BUILDER->build () directly. */
8828 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
8829 tree_vector_builder
*builder
)
8831 gcc_assert (builder
->nelts_per_pattern () <= 2);
8832 unsigned int encoded_nelts
= builder
->encoded_nelts ();
8833 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
8834 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
8836 tree type
= builder
->type ();
8837 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8838 vec
<constructor_elt
, va_gc
> *v
;
8839 vec_alloc (v
, nelts
);
8840 for (i
= 0; i
< nelts
; ++i
)
8841 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
8844 if (gimple_in_ssa_p (cfun
))
8845 res
= make_ssa_name (type
);
8847 res
= create_tmp_reg (type
);
8848 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
8849 gimple_set_location (stmt
, loc
);
8850 gimple_seq_add_stmt_without_update (seq
, stmt
);
8853 return builder
->build ();
8856 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8857 and generate a value guaranteed to be rounded upwards to ALIGN.
8859 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8862 gimple_build_round_up (gimple_seq
*seq
, location_t loc
, tree type
,
8863 tree old_size
, unsigned HOST_WIDE_INT align
)
8865 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
8866 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8867 gcc_assert (INTEGRAL_TYPE_P (type
));
8868 tree tree_mask
= build_int_cst (type
, tg_mask
);
8869 tree oversize
= gimple_build (seq
, loc
, PLUS_EXPR
, type
, old_size
,
8872 tree mask
= build_int_cst (type
, -align
);
8873 return gimple_build (seq
, loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
8876 /* Return true if the result of assignment STMT is known to be non-negative.
8877 If the return value is based on the assumption that signed overflow is
8878 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8879 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8882 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8885 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8886 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
8887 switch (get_gimple_rhs_class (code
))
8889 case GIMPLE_UNARY_RHS
:
8890 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8892 gimple_assign_rhs1 (stmt
),
8893 strict_overflow_p
, depth
);
8894 case GIMPLE_BINARY_RHS
:
8895 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8897 gimple_assign_rhs1 (stmt
),
8898 gimple_assign_rhs2 (stmt
),
8899 strict_overflow_p
, depth
);
8900 case GIMPLE_TERNARY_RHS
:
8902 case GIMPLE_SINGLE_RHS
:
8903 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
8904 strict_overflow_p
, depth
);
8905 case GIMPLE_INVALID_RHS
:
8911 /* Return true if return value of call STMT is known to be non-negative.
8912 If the return value is based on the assumption that signed overflow is
8913 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8914 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8917 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8920 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
8921 gimple_call_arg (stmt
, 0) : NULL_TREE
;
8922 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
8923 gimple_call_arg (stmt
, 1) : NULL_TREE
;
8924 tree lhs
= gimple_call_lhs (stmt
);
8926 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs
),
8927 gimple_call_combined_fn (stmt
),
8929 strict_overflow_p
, depth
));
8932 /* Return true if return value of call STMT is known to be non-negative.
8933 If the return value is based on the assumption that signed overflow is
8934 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8935 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8938 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8941 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
8943 tree arg
= gimple_phi_arg_def (stmt
, i
);
8944 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
8950 /* Return true if STMT is known to compute a non-negative value.
8951 If the return value is based on the assumption that signed overflow is
8952 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8953 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8956 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8959 switch (gimple_code (stmt
))
8962 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8965 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8968 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8975 /* Return true if the floating-point value computed by assignment STMT
8976 is known to have an integer value. We also allow +Inf, -Inf and NaN
8977 to be considered integer values. Return false for signaling NaN.
8979 DEPTH is the current nesting depth of the query. */
8982 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
8984 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8985 switch (get_gimple_rhs_class (code
))
8987 case GIMPLE_UNARY_RHS
:
8988 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
8989 gimple_assign_rhs1 (stmt
), depth
);
8990 case GIMPLE_BINARY_RHS
:
8991 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
8992 gimple_assign_rhs1 (stmt
),
8993 gimple_assign_rhs2 (stmt
), depth
);
8994 case GIMPLE_TERNARY_RHS
:
8996 case GIMPLE_SINGLE_RHS
:
8997 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
8998 case GIMPLE_INVALID_RHS
:
9004 /* Return true if the floating-point value computed by call STMT is known
9005 to have an integer value. We also allow +Inf, -Inf and NaN to be
9006 considered integer values. Return false for signaling NaN.
9008 DEPTH is the current nesting depth of the query. */
9011 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9013 tree arg0
= (gimple_call_num_args (stmt
) > 0
9014 ? gimple_call_arg (stmt
, 0)
9016 tree arg1
= (gimple_call_num_args (stmt
) > 1
9017 ? gimple_call_arg (stmt
, 1)
9019 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9023 /* Return true if the floating-point result of phi STMT is known to have
9024 an integer value. We also allow +Inf, -Inf and NaN to be considered
9025 integer values. Return false for signaling NaN.
9027 DEPTH is the current nesting depth of the query. */
9030 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9032 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9034 tree arg
= gimple_phi_arg_def (stmt
, i
);
9035 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9041 /* Return true if the floating-point value computed by STMT is known
9042 to have an integer value. We also allow +Inf, -Inf and NaN to be
9043 considered integer values. Return false for signaling NaN.
9045 DEPTH is the current nesting depth of the query. */
9048 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9050 switch (gimple_code (stmt
))
9053 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9055 return gimple_call_integer_valued_real_p (stmt
, depth
);
9057 return gimple_phi_integer_valued_real_p (stmt
, depth
);