1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2021 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "gimple-fold.h"
42 #include "gimple-iterator.h"
43 #include "tree-into-ssa.h"
45 #include "tree-object-size.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
71 enum strlen_range_kind
{
72 /* Compute the exact constant string length. */
74 /* Compute the maximum constant string length. */
76 /* Compute a range of string lengths bounded by object sizes. When
77 the length of a string cannot be determined, consider as the upper
78 bound the size of the enclosing object the string may be a member
79 or element of. Also determine the size of the largest character
80 array the string may refer to. */
82 /* Determine the integer value of the argument (not string length). */
87 get_range_strlen (tree
, bitmap
*, strlen_range_kind
, c_strlen_data
*, unsigned);
89 /* Return true when DECL can be referenced from current unit.
90 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
91 We can get declarations that are not possible to reference for various
94 1) When analyzing C++ virtual tables.
95 C++ virtual tables do have known constructors even
96 when they are keyed to other compilation unit.
97 Those tables can contain pointers to methods and vars
98 in other units. Those methods have both STATIC and EXTERNAL
100 2) In WHOPR mode devirtualization might lead to reference
101 to method that was partitioned elsehwere.
102 In this case we have static VAR_DECL or FUNCTION_DECL
103 that has no corresponding callgraph/varpool node
105 3) COMDAT functions referred by external vtables that
106 we devirtualize only during final compilation stage.
107 At this time we already decided that we will not output
108 the function body and thus we can't reference the symbol
112 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
115 struct cgraph_node
*node
;
118 if (DECL_ABSTRACT_P (decl
))
121 /* We are concerned only about static/external vars and functions. */
122 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
123 || !VAR_OR_FUNCTION_DECL_P (decl
))
126 /* Static objects can be referred only if they are defined and not optimized
128 if (!TREE_PUBLIC (decl
))
130 if (DECL_EXTERNAL (decl
))
132 /* Before we start optimizing unreachable code we can be sure all
133 static objects are defined. */
134 if (symtab
->function_flags_ready
)
136 snode
= symtab_node::get (decl
);
137 if (!snode
|| !snode
->definition
)
139 node
= dyn_cast
<cgraph_node
*> (snode
);
140 return !node
|| !node
->inlined_to
;
143 /* We will later output the initializer, so we can refer to it.
144 So we are concerned only when DECL comes from initializer of
145 external var or var that has been optimized out. */
147 || !VAR_P (from_decl
)
148 || (!DECL_EXTERNAL (from_decl
)
149 && (vnode
= varpool_node::get (from_decl
)) != NULL
150 && vnode
->definition
)
152 && (vnode
= varpool_node::get (from_decl
)) != NULL
153 && vnode
->in_other_partition
))
155 /* We are folding reference from external vtable. The vtable may reffer
156 to a symbol keyed to other compilation unit. The other compilation
157 unit may be in separate DSO and the symbol may be hidden. */
158 if (DECL_VISIBILITY_SPECIFIED (decl
)
159 && DECL_EXTERNAL (decl
)
160 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
161 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
163 /* When function is public, we always can introduce new reference.
164 Exception are the COMDAT functions where introducing a direct
165 reference imply need to include function body in the curren tunit. */
166 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
168 /* We have COMDAT. We are going to check if we still have definition
169 or if the definition is going to be output in other partition.
170 Bypass this when gimplifying; all needed functions will be produced.
172 As observed in PR20991 for already optimized out comdat virtual functions
173 it may be tempting to not necessarily give up because the copy will be
174 output elsewhere when corresponding vtable is output.
175 This is however not possible - ABI specify that COMDATs are output in
176 units where they are used and when the other unit was compiled with LTO
177 it is possible that vtable was kept public while the function itself
179 if (!symtab
->function_flags_ready
)
182 snode
= symtab_node::get (decl
);
184 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
185 && (!snode
->in_other_partition
186 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
188 node
= dyn_cast
<cgraph_node
*> (snode
);
189 return !node
|| !node
->inlined_to
;
192 /* Create a temporary for TYPE for a statement STMT. If the current function
193 is in SSA form, a SSA name is created. Otherwise a temporary register
197 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
199 if (gimple_in_ssa_p (cfun
))
200 return make_ssa_name (type
, stmt
);
202 return create_tmp_reg (type
);
205 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
206 acceptable form for is_gimple_min_invariant.
207 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
210 canonicalize_constructor_val (tree cval
, tree from_decl
)
212 if (CONSTANT_CLASS_P (cval
))
215 tree orig_cval
= cval
;
217 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
218 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
220 tree ptr
= TREE_OPERAND (cval
, 0);
221 if (is_gimple_min_invariant (ptr
))
222 cval
= build1_loc (EXPR_LOCATION (cval
),
223 ADDR_EXPR
, TREE_TYPE (ptr
),
224 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
226 fold_convert (ptr_type_node
,
227 TREE_OPERAND (cval
, 1))));
229 if (TREE_CODE (cval
) == ADDR_EXPR
)
231 tree base
= NULL_TREE
;
232 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
234 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
236 TREE_OPERAND (cval
, 0) = base
;
239 base
= get_base_address (TREE_OPERAND (cval
, 0));
243 if (VAR_OR_FUNCTION_DECL_P (base
)
244 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
246 if (TREE_TYPE (base
) == error_mark_node
)
249 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
250 but since the use can be in a debug stmt we can't. */
252 else if (TREE_CODE (base
) == FUNCTION_DECL
)
254 /* Make sure we create a cgraph node for functions we'll reference.
255 They can be non-existent if the reference comes from an entry
256 of an external vtable for example. */
257 cgraph_node::get_create (base
);
259 /* Fixup types in global initializers. */
260 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
261 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
263 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
264 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
267 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
268 if (TREE_CODE (cval
) == INTEGER_CST
)
270 if (TREE_OVERFLOW_P (cval
))
271 cval
= drop_tree_overflow (cval
);
272 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
273 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
279 /* If SYM is a constant variable with known value, return the value.
280 NULL_TREE is returned otherwise. */
283 get_symbol_constant_value (tree sym
)
285 tree val
= ctor_for_folding (sym
);
286 if (val
!= error_mark_node
)
290 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
291 if (val
&& is_gimple_min_invariant (val
))
296 /* Variables declared 'const' without an initializer
297 have zero as the initializer if they may not be
298 overridden at link or run time. */
300 && is_gimple_reg_type (TREE_TYPE (sym
)))
301 return build_zero_cst (TREE_TYPE (sym
));
309 /* Subroutine of fold_stmt. We perform constant folding of the
310 memory reference tree EXPR. */
313 maybe_fold_reference (tree expr
)
315 tree result
= NULL_TREE
;
317 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
318 || TREE_CODE (expr
) == REALPART_EXPR
319 || TREE_CODE (expr
) == IMAGPART_EXPR
)
320 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
321 result
= fold_unary_loc (EXPR_LOCATION (expr
),
324 TREE_OPERAND (expr
, 0));
325 else if (TREE_CODE (expr
) == BIT_FIELD_REF
326 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
327 result
= fold_ternary_loc (EXPR_LOCATION (expr
),
330 TREE_OPERAND (expr
, 0),
331 TREE_OPERAND (expr
, 1),
332 TREE_OPERAND (expr
, 2));
334 result
= fold_const_aggregate_ref (expr
);
336 if (result
&& is_gimple_min_invariant (result
))
342 /* Return true if EXPR is an acceptable right-hand-side for a
343 GIMPLE assignment. We validate the entire tree, not just
344 the root node, thus catching expressions that embed complex
345 operands that are not permitted in GIMPLE. This function
346 is needed because the folding routines in fold-const.c
347 may return such expressions in some cases, e.g., an array
348 access with an embedded index addition. It may make more
349 sense to have folding routines that are sensitive to the
350 constraints on GIMPLE operands, rather than abandoning any
351 any attempt to fold if the usual folding turns out to be too
355 valid_gimple_rhs_p (tree expr
)
357 enum tree_code code
= TREE_CODE (expr
);
359 switch (TREE_CODE_CLASS (code
))
361 case tcc_declaration
:
362 if (!is_gimple_variable (expr
))
367 /* All constants are ok. */
371 /* GENERIC allows comparisons with non-boolean types, reject
372 those for GIMPLE. Let vector-typed comparisons pass - rules
373 for GENERIC and GIMPLE are the same here. */
374 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr
))
375 && (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
376 || TYPE_PRECISION (TREE_TYPE (expr
)) == 1))
377 && ! VECTOR_TYPE_P (TREE_TYPE (expr
)))
382 if (!is_gimple_val (TREE_OPERAND (expr
, 0))
383 || !is_gimple_val (TREE_OPERAND (expr
, 1)))
388 if (!is_gimple_val (TREE_OPERAND (expr
, 0)))
398 if (is_gimple_min_invariant (expr
))
400 t
= TREE_OPERAND (expr
, 0);
401 while (handled_component_p (t
))
403 /* ??? More checks needed, see the GIMPLE verifier. */
404 if ((TREE_CODE (t
) == ARRAY_REF
405 || TREE_CODE (t
) == ARRAY_RANGE_REF
)
406 && !is_gimple_val (TREE_OPERAND (t
, 1)))
408 t
= TREE_OPERAND (t
, 0);
410 if (!is_gimple_id (t
))
416 if (get_gimple_rhs_class (code
) == GIMPLE_TERNARY_RHS
)
418 if ((code
== COND_EXPR
419 ? !is_gimple_condexpr (TREE_OPERAND (expr
, 0))
420 : !is_gimple_val (TREE_OPERAND (expr
, 0)))
421 || !is_gimple_val (TREE_OPERAND (expr
, 1))
422 || !is_gimple_val (TREE_OPERAND (expr
, 2)))
433 case tcc_exceptional
:
434 if (code
== CONSTRUCTOR
)
438 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr
), i
, elt
)
439 if (!is_gimple_val (elt
))
443 if (code
!= SSA_NAME
)
448 if (code
== BIT_FIELD_REF
)
449 return is_gimple_val (TREE_OPERAND (expr
, 0));
460 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
461 replacement rhs for the statement or NULL_TREE if no simplification
462 could be made. It is assumed that the operands have been previously
466 fold_gimple_assign (gimple_stmt_iterator
*si
)
468 gimple
*stmt
= gsi_stmt (*si
);
469 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
470 location_t loc
= gimple_location (stmt
);
472 tree result
= NULL_TREE
;
474 switch (get_gimple_rhs_class (subcode
))
476 case GIMPLE_SINGLE_RHS
:
478 tree rhs
= gimple_assign_rhs1 (stmt
);
480 if (TREE_CLOBBER_P (rhs
))
483 if (REFERENCE_CLASS_P (rhs
))
484 return maybe_fold_reference (rhs
);
486 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
488 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
489 if (is_gimple_min_invariant (val
))
491 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
494 vec
<cgraph_node
*>targets
495 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
496 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
498 if (dump_enabled_p ())
500 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
501 "resolving virtual function address "
502 "reference to function %s\n",
503 targets
.length () == 1
504 ? targets
[0]->name ()
507 if (targets
.length () == 1)
509 val
= fold_convert (TREE_TYPE (val
),
510 build_fold_addr_expr_loc
511 (loc
, targets
[0]->decl
));
512 STRIP_USELESS_TYPE_CONVERSION (val
);
515 /* We cannot use __builtin_unreachable here because it
516 cannot have address taken. */
517 val
= build_int_cst (TREE_TYPE (val
), 0);
523 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
525 tree ref
= TREE_OPERAND (rhs
, 0);
526 if (TREE_CODE (ref
) == MEM_REF
527 && integer_zerop (TREE_OPERAND (ref
, 1)))
529 result
= TREE_OPERAND (ref
, 0);
530 if (!useless_type_conversion_p (TREE_TYPE (rhs
),
532 result
= build1 (NOP_EXPR
, TREE_TYPE (rhs
), result
);
537 else if (TREE_CODE (rhs
) == CONSTRUCTOR
538 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
540 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
544 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
545 if (! CONSTANT_CLASS_P (val
))
548 return build_vector_from_ctor (TREE_TYPE (rhs
),
549 CONSTRUCTOR_ELTS (rhs
));
552 else if (DECL_P (rhs
)
553 && is_gimple_reg_type (TREE_TYPE (rhs
)))
554 return get_symbol_constant_value (rhs
);
558 case GIMPLE_UNARY_RHS
:
561 case GIMPLE_BINARY_RHS
:
564 case GIMPLE_TERNARY_RHS
:
565 result
= fold_ternary_loc (loc
, subcode
,
566 TREE_TYPE (gimple_assign_lhs (stmt
)),
567 gimple_assign_rhs1 (stmt
),
568 gimple_assign_rhs2 (stmt
),
569 gimple_assign_rhs3 (stmt
));
573 STRIP_USELESS_TYPE_CONVERSION (result
);
574 if (valid_gimple_rhs_p (result
))
579 case GIMPLE_INVALID_RHS
:
587 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
588 adjusting the replacement stmts location and virtual operands.
589 If the statement has a lhs the last stmt in the sequence is expected
590 to assign to that lhs. */
593 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
595 gimple
*stmt
= gsi_stmt (*si_p
);
597 if (gimple_has_location (stmt
))
598 annotate_all_with_location (stmts
, gimple_location (stmt
));
600 /* First iterate over the replacement statements backward, assigning
601 virtual operands to their defining statements. */
602 gimple
*laststore
= NULL
;
603 for (gimple_stmt_iterator i
= gsi_last (stmts
);
604 !gsi_end_p (i
); gsi_prev (&i
))
606 gimple
*new_stmt
= gsi_stmt (i
);
607 if ((gimple_assign_single_p (new_stmt
)
608 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
609 || (is_gimple_call (new_stmt
)
610 && (gimple_call_flags (new_stmt
)
611 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
615 vdef
= gimple_vdef (stmt
);
617 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
618 gimple_set_vdef (new_stmt
, vdef
);
619 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
620 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
621 laststore
= new_stmt
;
625 /* Second iterate over the statements forward, assigning virtual
626 operands to their uses. */
627 tree reaching_vuse
= gimple_vuse (stmt
);
628 for (gimple_stmt_iterator i
= gsi_start (stmts
);
629 !gsi_end_p (i
); gsi_next (&i
))
631 gimple
*new_stmt
= gsi_stmt (i
);
632 /* If the new statement possibly has a VUSE, update it with exact SSA
633 name we know will reach this one. */
634 if (gimple_has_mem_ops (new_stmt
))
635 gimple_set_vuse (new_stmt
, reaching_vuse
);
636 gimple_set_modified (new_stmt
, true);
637 if (gimple_vdef (new_stmt
))
638 reaching_vuse
= gimple_vdef (new_stmt
);
641 /* If the new sequence does not do a store release the virtual
642 definition of the original statement. */
644 && reaching_vuse
== gimple_vuse (stmt
))
646 tree vdef
= gimple_vdef (stmt
);
648 && TREE_CODE (vdef
) == SSA_NAME
)
650 unlink_stmt_vdef (stmt
);
651 release_ssa_name (vdef
);
655 /* Finally replace the original statement with the sequence. */
656 gsi_replace_with_seq (si_p
, stmts
, false);
659 /* Helper function for update_gimple_call and
660 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
661 with GIMPLE_CALL NEW_STMT. */
664 finish_update_gimple_call (gimple_stmt_iterator
*si_p
, gimple
*new_stmt
,
667 tree lhs
= gimple_call_lhs (stmt
);
668 gimple_call_set_lhs (new_stmt
, lhs
);
669 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
670 SSA_NAME_DEF_STMT (lhs
) = new_stmt
;
671 gimple_move_vops (new_stmt
, stmt
);
672 gimple_set_location (new_stmt
, gimple_location (stmt
));
673 if (gimple_block (new_stmt
) == NULL_TREE
)
674 gimple_set_block (new_stmt
, gimple_block (stmt
));
675 gsi_replace (si_p
, new_stmt
, false);
678 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
679 with number of arguments NARGS, where the arguments in GIMPLE form
680 follow NARGS argument. */
683 update_gimple_call (gimple_stmt_iterator
*si_p
, tree fn
, int nargs
, ...)
686 gcall
*new_stmt
, *stmt
= as_a
<gcall
*> (gsi_stmt (*si_p
));
688 gcc_assert (is_gimple_call (stmt
));
689 va_start (ap
, nargs
);
690 new_stmt
= gimple_build_call_valist (fn
, nargs
, ap
);
691 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
696 /* Return true if EXPR is a CALL_EXPR suitable for representation
697 as a single GIMPLE_CALL statement. If the arguments require
698 further gimplification, return false. */
701 valid_gimple_call_p (tree expr
)
705 if (TREE_CODE (expr
) != CALL_EXPR
)
708 nargs
= call_expr_nargs (expr
);
709 for (i
= 0; i
< nargs
; i
++)
711 tree arg
= CALL_EXPR_ARG (expr
, i
);
712 if (is_gimple_reg_type (TREE_TYPE (arg
)))
714 if (!is_gimple_val (arg
))
718 if (!is_gimple_lvalue (arg
))
725 /* Convert EXPR into a GIMPLE value suitable for substitution on the
726 RHS of an assignment. Insert the necessary statements before
727 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
728 is replaced. If the call is expected to produces a result, then it
729 is replaced by an assignment of the new RHS to the result variable.
730 If the result is to be ignored, then the call is replaced by a
731 GIMPLE_NOP. A proper VDEF chain is retained by making the first
732 VUSE and the last VDEF of the whole sequence be the same as the replaced
733 statement and using new SSA names for stores in between. */
736 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
739 gimple
*stmt
, *new_stmt
;
740 gimple_stmt_iterator i
;
741 gimple_seq stmts
= NULL
;
743 stmt
= gsi_stmt (*si_p
);
745 gcc_assert (is_gimple_call (stmt
));
747 if (valid_gimple_call_p (expr
))
749 /* The call has simplified to another call. */
750 tree fn
= CALL_EXPR_FN (expr
);
752 unsigned nargs
= call_expr_nargs (expr
);
753 vec
<tree
> args
= vNULL
;
759 args
.safe_grow_cleared (nargs
, true);
761 for (i
= 0; i
< nargs
; i
++)
762 args
[i
] = CALL_EXPR_ARG (expr
, i
);
765 new_stmt
= gimple_build_call_vec (fn
, args
);
766 finish_update_gimple_call (si_p
, new_stmt
, stmt
);
771 lhs
= gimple_call_lhs (stmt
);
772 if (lhs
== NULL_TREE
)
774 push_gimplify_context (gimple_in_ssa_p (cfun
));
775 gimplify_and_add (expr
, &stmts
);
776 pop_gimplify_context (NULL
);
778 /* We can end up with folding a memcpy of an empty class assignment
779 which gets optimized away by C++ gimplification. */
780 if (gimple_seq_empty_p (stmts
))
782 if (gimple_in_ssa_p (cfun
))
784 unlink_stmt_vdef (stmt
);
787 gsi_replace (si_p
, gimple_build_nop (), false);
793 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
794 new_stmt
= gimple_build_assign (lhs
, tmp
);
795 i
= gsi_last (stmts
);
796 gsi_insert_after_without_update (&i
, new_stmt
,
797 GSI_CONTINUE_LINKING
);
800 gsi_replace_with_seq_vops (si_p
, stmts
);
804 /* Replace the call at *GSI with the gimple value VAL. */
807 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
809 gimple
*stmt
= gsi_stmt (*gsi
);
810 tree lhs
= gimple_call_lhs (stmt
);
814 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
815 val
= fold_convert (TREE_TYPE (lhs
), val
);
816 repl
= gimple_build_assign (lhs
, val
);
819 repl
= gimple_build_nop ();
820 tree vdef
= gimple_vdef (stmt
);
821 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
823 unlink_stmt_vdef (stmt
);
824 release_ssa_name (vdef
);
826 gsi_replace (gsi
, repl
, false);
829 /* Replace the call at *GSI with the new call REPL and fold that
833 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
835 gimple
*stmt
= gsi_stmt (*gsi
);
836 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
837 gimple_set_location (repl
, gimple_location (stmt
));
838 gimple_move_vops (repl
, stmt
);
839 gsi_replace (gsi
, repl
, false);
843 /* Return true if VAR is a VAR_DECL or a component thereof. */
846 var_decl_component_p (tree var
)
849 while (handled_component_p (inner
))
850 inner
= TREE_OPERAND (inner
, 0);
851 return (DECL_P (inner
)
852 || (TREE_CODE (inner
) == MEM_REF
853 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
856 /* Return TRUE if the SIZE argument, representing the size of an
857 object, is in a range of values of which exactly zero is valid. */
860 size_must_be_zero_p (tree size
)
862 if (integer_zerop (size
))
865 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
868 tree type
= TREE_TYPE (size
);
869 int prec
= TYPE_PRECISION (type
);
871 /* Compute the value of SSIZE_MAX, the largest positive value that
872 can be stored in ssize_t, the signed counterpart of size_t. */
873 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
874 value_range
valid_range (build_int_cst (type
, 0),
875 wide_int_to_tree (type
, ssize_max
));
878 get_range_query (cfun
)->range_of_expr (vr
, size
);
880 get_global_range_query ()->range_of_expr (vr
, size
);
881 if (vr
.undefined_p ())
882 vr
.set_varying (TREE_TYPE (size
));
883 vr
.intersect (&valid_range
);
887 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
888 diagnose (otherwise undefined) overlapping copies without preventing
889 folding. When folded, GCC guarantees that overlapping memcpy has
890 the same semantics as memmove. Call to the library memcpy need not
891 provide the same guarantee. Return false if no simplification can
895 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
896 tree dest
, tree src
, enum built_in_function code
)
898 gimple
*stmt
= gsi_stmt (*gsi
);
899 tree lhs
= gimple_call_lhs (stmt
);
900 tree len
= gimple_call_arg (stmt
, 2);
901 location_t loc
= gimple_location (stmt
);
903 /* If the LEN parameter is a constant zero or in range where
904 the only valid value is zero, return DEST. */
905 if (size_must_be_zero_p (len
))
908 if (gimple_call_lhs (stmt
))
909 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
911 repl
= gimple_build_nop ();
912 tree vdef
= gimple_vdef (stmt
);
913 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
915 unlink_stmt_vdef (stmt
);
916 release_ssa_name (vdef
);
918 gsi_replace (gsi
, repl
, false);
922 /* If SRC and DEST are the same (and not volatile), return
923 DEST{,+LEN,+LEN-1}. */
924 if (operand_equal_p (src
, dest
, 0))
926 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
927 It's safe and may even be emitted by GCC itself (see bug
929 unlink_stmt_vdef (stmt
);
930 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
931 release_ssa_name (gimple_vdef (stmt
));
934 gsi_replace (gsi
, gimple_build_nop (), false);
941 /* We cannot (easily) change the type of the copy if it is a storage
942 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
943 modify the storage order of objects (see storage_order_barrier_p). */
945 = POINTER_TYPE_P (TREE_TYPE (src
))
946 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
948 = POINTER_TYPE_P (TREE_TYPE (dest
))
949 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
950 tree destvar
, srcvar
, srcoff
;
951 unsigned int src_align
, dest_align
;
952 unsigned HOST_WIDE_INT tmp_len
;
955 /* Build accesses at offset zero with a ref-all character type. */
957 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
960 /* If we can perform the copy efficiently with first doing all loads
961 and then all stores inline it that way. Currently efficiently
962 means that we can load all the memory into a single integer
963 register which is what MOVE_MAX gives us. */
964 src_align
= get_pointer_alignment (src
);
965 dest_align
= get_pointer_alignment (dest
);
966 if (tree_fits_uhwi_p (len
)
967 && compare_tree_int (len
, MOVE_MAX
) <= 0
968 /* FIXME: Don't transform copies from strings with known length.
969 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
970 from being handled, and the case was XFAILed for that reason.
971 Now that it is handled and the XFAIL removed, as soon as other
972 strlenopt tests that rely on it for passing are adjusted, this
973 hack can be removed. */
974 && !c_strlen (src
, 1)
975 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
976 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
978 && AGGREGATE_TYPE_P (srctype
)
979 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
981 && AGGREGATE_TYPE_P (desttype
)
982 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
984 unsigned ilen
= tree_to_uhwi (len
);
985 if (pow2p_hwi (ilen
))
987 /* Detect out-of-bounds accesses without issuing warnings.
988 Avoid folding out-of-bounds copies but to avoid false
989 positives for unreachable code defer warning until after
990 DCE has worked its magic.
991 -Wrestrict is still diagnosed. */
992 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
995 if (warning
!= OPT_Wrestrict
)
998 scalar_int_mode mode
;
999 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
1001 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
1002 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
1003 /* If the destination pointer is not aligned we must be able
1004 to emit an unaligned store. */
1005 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
1006 || !targetm
.slow_unaligned_access (mode
, dest_align
)
1007 || (optab_handler (movmisalign_optab
, mode
)
1008 != CODE_FOR_nothing
)))
1010 tree srctype
= type
;
1011 tree desttype
= type
;
1012 if (src_align
< GET_MODE_ALIGNMENT (mode
))
1013 srctype
= build_aligned_type (type
, src_align
);
1014 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1015 tree tem
= fold_const_aggregate_ref (srcmem
);
1018 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
1019 && targetm
.slow_unaligned_access (mode
, src_align
)
1020 && (optab_handler (movmisalign_optab
, mode
)
1021 == CODE_FOR_nothing
))
1026 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
1028 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
1030 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
1032 gimple_assign_set_lhs (new_stmt
, srcmem
);
1033 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1034 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1036 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
1037 desttype
= build_aligned_type (type
, dest_align
);
1039 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
1042 gimple_move_vops (new_stmt
, stmt
);
1045 gsi_replace (gsi
, new_stmt
, false);
1048 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1055 if (code
== BUILT_IN_MEMMOVE
)
1057 /* Both DEST and SRC must be pointer types.
1058 ??? This is what old code did. Is the testing for pointer types
1061 If either SRC is readonly or length is 1, we can use memcpy. */
1062 if (!dest_align
|| !src_align
)
1064 if (readonly_data_expr (src
)
1065 || (tree_fits_uhwi_p (len
)
1066 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
1067 >= tree_to_uhwi (len
))))
1069 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1072 gimple_call_set_fndecl (stmt
, fn
);
1073 gimple_call_set_arg (stmt
, 0, dest
);
1074 gimple_call_set_arg (stmt
, 1, src
);
1079 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1080 if (TREE_CODE (src
) == ADDR_EXPR
1081 && TREE_CODE (dest
) == ADDR_EXPR
)
1083 tree src_base
, dest_base
, fn
;
1084 poly_int64 src_offset
= 0, dest_offset
= 0;
1085 poly_uint64 maxsize
;
1087 srcvar
= TREE_OPERAND (src
, 0);
1088 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
1089 if (src_base
== NULL
)
1091 destvar
= TREE_OPERAND (dest
, 0);
1092 dest_base
= get_addr_base_and_unit_offset (destvar
,
1094 if (dest_base
== NULL
)
1095 dest_base
= destvar
;
1096 if (!poly_int_tree_p (len
, &maxsize
))
1098 if (SSA_VAR_P (src_base
)
1099 && SSA_VAR_P (dest_base
))
1101 if (operand_equal_p (src_base
, dest_base
, 0)
1102 && ranges_maybe_overlap_p (src_offset
, maxsize
,
1103 dest_offset
, maxsize
))
1106 else if (TREE_CODE (src_base
) == MEM_REF
1107 && TREE_CODE (dest_base
) == MEM_REF
)
1109 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
1110 TREE_OPERAND (dest_base
, 0), 0))
1112 poly_offset_int full_src_offset
1113 = mem_ref_offset (src_base
) + src_offset
;
1114 poly_offset_int full_dest_offset
1115 = mem_ref_offset (dest_base
) + dest_offset
;
1116 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
1117 full_dest_offset
, maxsize
))
1123 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1126 gimple_call_set_fndecl (stmt
, fn
);
1127 gimple_call_set_arg (stmt
, 0, dest
);
1128 gimple_call_set_arg (stmt
, 1, src
);
1133 /* If the destination and source do not alias optimize into
1135 if ((is_gimple_min_invariant (dest
)
1136 || TREE_CODE (dest
) == SSA_NAME
)
1137 && (is_gimple_min_invariant (src
)
1138 || TREE_CODE (src
) == SSA_NAME
))
1141 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1142 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1143 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1146 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1149 gimple_call_set_fndecl (stmt
, fn
);
1150 gimple_call_set_arg (stmt
, 0, dest
);
1151 gimple_call_set_arg (stmt
, 1, src
);
1160 if (!tree_fits_shwi_p (len
))
1163 || (AGGREGATE_TYPE_P (srctype
)
1164 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
1167 || (AGGREGATE_TYPE_P (desttype
)
1168 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
1170 /* In the following try to find a type that is most natural to be
1171 used for the memcpy source and destination and that allows
1172 the most optimization when memcpy is turned into a plain assignment
1173 using that type. In theory we could always use a char[len] type
1174 but that only gains us that the destination and source possibly
1175 no longer will have their address taken. */
1176 if (TREE_CODE (srctype
) == ARRAY_TYPE
1177 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1178 srctype
= TREE_TYPE (srctype
);
1179 if (TREE_CODE (desttype
) == ARRAY_TYPE
1180 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1181 desttype
= TREE_TYPE (desttype
);
1182 if (TREE_ADDRESSABLE (srctype
)
1183 || TREE_ADDRESSABLE (desttype
))
1186 /* Make sure we are not copying using a floating-point mode or
1187 a type whose size possibly does not match its precision. */
1188 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1189 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1190 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1191 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1192 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1193 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1194 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1195 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1203 src_align
= get_pointer_alignment (src
);
1204 dest_align
= get_pointer_alignment (dest
);
1206 /* Choose between src and destination type for the access based
1207 on alignment, whether the access constitutes a register access
1208 and whether it may actually expose a declaration for SSA rewrite
1209 or SRA decomposition. Also try to expose a string constant, we
1210 might be able to concatenate several of them later into a single
1212 destvar
= NULL_TREE
;
1214 if (TREE_CODE (dest
) == ADDR_EXPR
1215 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1216 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1217 && dest_align
>= TYPE_ALIGN (desttype
)
1218 && (is_gimple_reg_type (desttype
)
1219 || src_align
>= TYPE_ALIGN (desttype
)))
1220 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1221 else if (TREE_CODE (src
) == ADDR_EXPR
1222 && var_decl_component_p (TREE_OPERAND (src
, 0))
1223 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1224 && src_align
>= TYPE_ALIGN (srctype
)
1225 && (is_gimple_reg_type (srctype
)
1226 || dest_align
>= TYPE_ALIGN (srctype
)))
1227 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1228 /* FIXME: Don't transform copies from strings with known original length.
1229 As soon as strlenopt tests that rely on it for passing are adjusted,
1230 this hack can be removed. */
1231 else if (gimple_call_alloca_for_var_p (stmt
)
1232 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1233 && integer_zerop (srcoff
)
1234 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1235 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1236 srctype
= TREE_TYPE (srcvar
);
1240 /* Now that we chose an access type express the other side in
1241 terms of it if the target allows that with respect to alignment
1243 if (srcvar
== NULL_TREE
)
1245 if (src_align
>= TYPE_ALIGN (desttype
))
1246 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1249 if (STRICT_ALIGNMENT
)
1251 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1253 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1256 else if (destvar
== NULL_TREE
)
1258 if (dest_align
>= TYPE_ALIGN (srctype
))
1259 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1262 if (STRICT_ALIGNMENT
)
1264 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1266 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1270 /* Same as above, detect out-of-bounds accesses without issuing
1271 warnings. Avoid folding out-of-bounds copies but to avoid
1272 false positives for unreachable code defer warning until
1273 after DCE has worked its magic.
1274 -Wrestrict is still diagnosed. */
1275 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1276 dest
, src
, len
, len
,
1278 if (warning
!= OPT_Wrestrict
)
1282 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1284 tree tem
= fold_const_aggregate_ref (srcvar
);
1287 if (! is_gimple_min_invariant (srcvar
))
1289 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1290 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1292 gimple_assign_set_lhs (new_stmt
, srcvar
);
1293 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1294 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1296 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1297 goto set_vop_and_replace
;
1300 /* We get an aggregate copy. If the source is a STRING_CST, then
1301 directly use its type to perform the copy. */
1302 if (TREE_CODE (srcvar
) == STRING_CST
)
1305 /* Or else, use an unsigned char[] type to perform the copy in order
1306 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1307 types or float modes behavior on copying. */
1310 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1311 tree_to_uhwi (len
));
1313 if (src_align
> TYPE_ALIGN (srctype
))
1314 srctype
= build_aligned_type (srctype
, src_align
);
1315 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1318 if (dest_align
> TYPE_ALIGN (desttype
))
1319 desttype
= build_aligned_type (desttype
, dest_align
);
1320 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1321 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1323 set_vop_and_replace
:
1324 gimple_move_vops (new_stmt
, stmt
);
1327 gsi_replace (gsi
, new_stmt
, false);
1330 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1334 gimple_seq stmts
= NULL
;
1335 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1337 else if (code
== BUILT_IN_MEMPCPY
)
1339 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1340 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1341 TREE_TYPE (dest
), dest
, len
);
1346 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1347 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1348 gsi_replace (gsi
, repl
, false);
1352 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1353 to built-in memcmp (a, b, len). */
1356 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1358 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1363 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1365 gimple
*stmt
= gsi_stmt (*gsi
);
1366 tree a
= gimple_call_arg (stmt
, 0);
1367 tree b
= gimple_call_arg (stmt
, 1);
1368 tree len
= gimple_call_arg (stmt
, 2);
1370 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1371 replace_call_with_call_and_fold (gsi
, repl
);
1376 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1377 to built-in memmove (dest, src, len). */
1380 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1382 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1387 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1388 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1389 len) into memmove (dest, src, len). */
1391 gimple
*stmt
= gsi_stmt (*gsi
);
1392 tree src
= gimple_call_arg (stmt
, 0);
1393 tree dest
= gimple_call_arg (stmt
, 1);
1394 tree len
= gimple_call_arg (stmt
, 2);
1396 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1397 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1398 replace_call_with_call_and_fold (gsi
, repl
);
1403 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1404 to built-in memset (dest, 0, len). */
1407 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1409 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1414 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1416 gimple
*stmt
= gsi_stmt (*gsi
);
1417 tree dest
= gimple_call_arg (stmt
, 0);
1418 tree len
= gimple_call_arg (stmt
, 1);
1420 gimple_seq seq
= NULL
;
1421 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1422 gimple_seq_add_stmt_without_update (&seq
, repl
);
1423 gsi_replace_with_seq_vops (gsi
, seq
);
1429 /* Fold function call to builtin memset or bzero at *GSI setting the
1430 memory of size LEN to VAL. Return whether a simplification was made. */
1433 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1435 gimple
*stmt
= gsi_stmt (*gsi
);
1437 unsigned HOST_WIDE_INT length
, cval
;
1439 /* If the LEN parameter is zero, return DEST. */
1440 if (integer_zerop (len
))
1442 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1446 if (! tree_fits_uhwi_p (len
))
1449 if (TREE_CODE (c
) != INTEGER_CST
)
1452 tree dest
= gimple_call_arg (stmt
, 0);
1454 if (TREE_CODE (var
) != ADDR_EXPR
)
1457 var
= TREE_OPERAND (var
, 0);
1458 if (TREE_THIS_VOLATILE (var
))
1461 etype
= TREE_TYPE (var
);
1462 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1463 etype
= TREE_TYPE (etype
);
1465 if (!INTEGRAL_TYPE_P (etype
)
1466 && !POINTER_TYPE_P (etype
))
1469 if (! var_decl_component_p (var
))
1472 length
= tree_to_uhwi (len
);
1473 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1474 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1475 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1476 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1479 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1482 if (!type_has_mode_precision_p (etype
))
1483 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1484 TYPE_UNSIGNED (etype
));
1486 if (integer_zerop (c
))
1490 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1493 cval
= TREE_INT_CST_LOW (c
);
1497 cval
|= (cval
<< 31) << 1;
1500 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1501 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1502 gimple_move_vops (store
, stmt
);
1503 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1504 if (gimple_call_lhs (stmt
))
1506 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1507 gsi_replace (gsi
, asgn
, false);
1511 gimple_stmt_iterator gsi2
= *gsi
;
1513 gsi_remove (&gsi2
, true);
1519 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1522 get_range_strlen_tree (tree arg
, bitmap
*visited
, strlen_range_kind rkind
,
1523 c_strlen_data
*pdata
, unsigned eltsize
)
1525 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1527 /* The length computed by this invocation of the function. */
1528 tree val
= NULL_TREE
;
1530 /* True if VAL is an optimistic (tight) bound determined from
1531 the size of the character array in which the string may be
1532 stored. In that case, the computed VAL is used to set
1534 bool tight_bound
= false;
1536 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1537 if (TREE_CODE (arg
) == ADDR_EXPR
1538 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1540 tree op
= TREE_OPERAND (arg
, 0);
1541 if (integer_zerop (TREE_OPERAND (op
, 1)))
1543 tree aop0
= TREE_OPERAND (op
, 0);
1544 if (TREE_CODE (aop0
) == INDIRECT_REF
1545 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1546 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1549 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1550 && rkind
== SRK_LENRANGE
)
1552 /* Fail if an array is the last member of a struct object
1553 since it could be treated as a (fake) flexible array
1555 tree idx
= TREE_OPERAND (op
, 1);
1557 arg
= TREE_OPERAND (op
, 0);
1558 tree optype
= TREE_TYPE (arg
);
1559 if (tree dom
= TYPE_DOMAIN (optype
))
1560 if (tree bound
= TYPE_MAX_VALUE (dom
))
1561 if (TREE_CODE (bound
) == INTEGER_CST
1562 && TREE_CODE (idx
) == INTEGER_CST
1563 && tree_int_cst_lt (bound
, idx
))
1568 if (rkind
== SRK_INT_VALUE
)
1570 /* We are computing the maximum value (not string length). */
1572 if (TREE_CODE (val
) != INTEGER_CST
1573 || tree_int_cst_sgn (val
) < 0)
1578 c_strlen_data lendata
= { };
1579 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1581 if (!val
&& lendata
.decl
)
1583 /* ARG refers to an unterminated const character array.
1584 DATA.DECL with size DATA.LEN. */
1585 val
= lendata
.minlen
;
1586 pdata
->decl
= lendata
.decl
;
1590 /* Set if VAL represents the maximum length based on array size (set
1591 when exact length cannot be determined). */
1592 bool maxbound
= false;
1594 if (!val
&& rkind
== SRK_LENRANGE
)
1596 if (TREE_CODE (arg
) == ADDR_EXPR
)
1597 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1600 if (TREE_CODE (arg
) == ARRAY_REF
)
1602 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1604 /* Determine the "innermost" array type. */
1605 while (TREE_CODE (optype
) == ARRAY_TYPE
1606 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1607 optype
= TREE_TYPE (optype
);
1609 /* Avoid arrays of pointers. */
1610 tree eltype
= TREE_TYPE (optype
);
1611 if (TREE_CODE (optype
) != ARRAY_TYPE
1612 || !INTEGRAL_TYPE_P (eltype
))
1615 /* Fail when the array bound is unknown or zero. */
1616 val
= TYPE_SIZE_UNIT (optype
);
1618 || TREE_CODE (val
) != INTEGER_CST
1619 || integer_zerop (val
))
1622 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1625 /* Set the minimum size to zero since the string in
1626 the array could have zero length. */
1627 pdata
->minlen
= ssize_int (0);
1631 else if (TREE_CODE (arg
) == COMPONENT_REF
1632 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1635 /* Use the type of the member array to determine the upper
1636 bound on the length of the array. This may be overly
1637 optimistic if the array itself isn't NUL-terminated and
1638 the caller relies on the subsequent member to contain
1639 the NUL but that would only be considered valid if
1640 the array were the last member of a struct. */
1642 tree fld
= TREE_OPERAND (arg
, 1);
1644 tree optype
= TREE_TYPE (fld
);
1646 /* Determine the "innermost" array type. */
1647 while (TREE_CODE (optype
) == ARRAY_TYPE
1648 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1649 optype
= TREE_TYPE (optype
);
1651 /* Fail when the array bound is unknown or zero. */
1652 val
= TYPE_SIZE_UNIT (optype
);
1654 || TREE_CODE (val
) != INTEGER_CST
1655 || integer_zerop (val
))
1657 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1660 /* Set the minimum size to zero since the string in
1661 the array could have zero length. */
1662 pdata
->minlen
= ssize_int (0);
1664 /* The array size determined above is an optimistic bound
1665 on the length. If the array isn't nul-terminated the
1666 length computed by the library function would be greater.
1667 Even though using strlen to cross the subobject boundary
1668 is undefined, avoid drawing conclusions from the member
1669 type about the length here. */
1672 else if (TREE_CODE (arg
) == MEM_REF
1673 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1674 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1675 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1677 /* Handle a MEM_REF into a DECL accessing an array of integers,
1678 being conservative about references to extern structures with
1679 flexible array members that can be initialized to arbitrary
1680 numbers of elements as an extension (static structs are okay).
1681 FIXME: Make this less conservative -- see
1682 component_ref_size in tree.c. */
1683 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1684 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1685 && (decl_binds_to_current_def_p (ref
)
1686 || !array_at_struct_end_p (arg
)))
1688 /* Fail if the offset is out of bounds. Such accesses
1689 should be diagnosed at some point. */
1690 val
= DECL_SIZE_UNIT (ref
);
1692 || TREE_CODE (val
) != INTEGER_CST
1693 || integer_zerop (val
))
1696 poly_offset_int psiz
= wi::to_offset (val
);
1697 poly_offset_int poff
= mem_ref_offset (arg
);
1698 if (known_le (psiz
, poff
))
1701 pdata
->minlen
= ssize_int (0);
1703 /* Subtract the offset and one for the terminating nul. */
1706 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1707 /* Since VAL reflects the size of a declared object
1708 rather the type of the access it is not a tight bound. */
1711 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1713 /* Avoid handling pointers to arrays. GCC might misuse
1714 a pointer to an array of one bound to point to an array
1715 object of a greater bound. */
1716 tree argtype
= TREE_TYPE (arg
);
1717 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1719 val
= TYPE_SIZE_UNIT (argtype
);
1721 || TREE_CODE (val
) != INTEGER_CST
1722 || integer_zerop (val
))
1724 val
= wide_int_to_tree (TREE_TYPE (val
),
1725 wi::sub (wi::to_wide (val
), 1));
1727 /* Set the minimum size to zero since the string in
1728 the array could have zero length. */
1729 pdata
->minlen
= ssize_int (0);
1738 /* Adjust the lower bound on the string length as necessary. */
1740 || (rkind
!= SRK_STRLEN
1741 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1742 && TREE_CODE (val
) == INTEGER_CST
1743 && tree_int_cst_lt (val
, pdata
->minlen
)))
1744 pdata
->minlen
= val
;
1746 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1748 /* Adjust the tighter (more optimistic) string length bound
1749 if necessary and proceed to adjust the more conservative
1751 if (TREE_CODE (val
) == INTEGER_CST
)
1753 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1754 pdata
->maxbound
= val
;
1757 pdata
->maxbound
= val
;
1759 else if (pdata
->maxbound
|| maxbound
)
1760 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1761 if VAL corresponds to the maximum length determined based
1762 on the type of the object. */
1763 pdata
->maxbound
= val
;
1767 /* VAL computed above represents an optimistically tight bound
1768 on the length of the string based on the referenced object's
1769 or subobject's type. Determine the conservative upper bound
1770 based on the enclosing object's size if possible. */
1771 if (rkind
== SRK_LENRANGE
)
1774 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1777 /* When the call above fails due to a non-constant offset
1778 assume the offset is zero and use the size of the whole
1779 enclosing object instead. */
1780 base
= get_base_address (arg
);
1783 /* If the base object is a pointer no upper bound on the length
1784 can be determined. Otherwise the maximum length is equal to
1785 the size of the enclosing object minus the offset of
1786 the referenced subobject minus 1 (for the terminating nul). */
1787 tree type
= TREE_TYPE (base
);
1788 if (TREE_CODE (type
) == POINTER_TYPE
1789 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1790 || !(val
= DECL_SIZE_UNIT (base
)))
1791 val
= build_all_ones_cst (size_type_node
);
1794 val
= DECL_SIZE_UNIT (base
);
1795 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1796 size_int (offset
+ 1));
1805 /* Adjust the more conservative bound if possible/necessary
1806 and fail otherwise. */
1807 if (rkind
!= SRK_STRLEN
)
1809 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1810 || TREE_CODE (val
) != INTEGER_CST
)
1813 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1814 pdata
->maxlen
= val
;
1817 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1819 /* Fail if the length of this ARG is different from that
1820 previously determined from another ARG. */
1825 pdata
->maxlen
= val
;
1826 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1829 /* For an ARG referencing one or more strings, try to obtain the range
1830 of their lengths, or the size of the largest array ARG referes to if
1831 the range of lengths cannot be determined, and store all in *PDATA.
1832 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1833 the maximum constant value.
1834 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1835 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1836 length or if we are unable to determine the length, return false.
1837 VISITED is a bitmap of visited variables.
1838 RKIND determines the kind of value or range to obtain (see
1840 Set PDATA->DECL if ARG refers to an unterminated constant array.
1841 On input, set ELTSIZE to 1 for normal single byte character strings,
1842 and either 2 or 4 for wide characer strings (the size of wchar_t).
1843 Return true if *PDATA was successfully populated and false otherwise. */
1846 get_range_strlen (tree arg
, bitmap
*visited
,
1847 strlen_range_kind rkind
,
1848 c_strlen_data
*pdata
, unsigned eltsize
)
1851 if (TREE_CODE (arg
) != SSA_NAME
)
1852 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1854 /* If ARG is registered for SSA update we cannot look at its defining
1856 if (name_registered_for_update_p (arg
))
1859 /* If we were already here, break the infinite cycle. */
1861 *visited
= BITMAP_ALLOC (NULL
);
1862 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1866 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1868 switch (gimple_code (def_stmt
))
1871 /* The RHS of the statement defining VAR must either have a
1872 constant length or come from another SSA_NAME with a constant
1874 if (gimple_assign_single_p (def_stmt
)
1875 || gimple_assign_unary_nop_p (def_stmt
))
1877 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1878 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1880 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1882 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1883 gimple_assign_rhs3 (def_stmt
) };
1885 for (unsigned int i
= 0; i
< 2; i
++)
1886 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1888 if (rkind
!= SRK_LENRANGE
)
1890 /* Set the upper bound to the maximum to prevent
1891 it from being adjusted in the next iteration but
1892 leave MINLEN and the more conservative MAXBOUND
1893 determined so far alone (or leave them null if
1894 they haven't been set yet). That the MINLEN is
1895 in fact zero can be determined from MAXLEN being
1896 unbounded but the discovered minimum is used for
1898 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1905 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1906 must have a constant length. */
1907 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1909 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1911 /* If this PHI has itself as an argument, we cannot
1912 determine the string length of this argument. However,
1913 if we can find a constant string length for the other
1914 PHI args then we can still be sure that this is a
1915 constant string length. So be optimistic and just
1916 continue with the next argument. */
1917 if (arg
== gimple_phi_result (def_stmt
))
1920 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1922 if (rkind
!= SRK_LENRANGE
)
1924 /* Set the upper bound to the maximum to prevent
1925 it from being adjusted in the next iteration but
1926 leave MINLEN and the more conservative MAXBOUND
1927 determined so far alone (or leave them null if
1928 they haven't been set yet). That the MINLEN is
1929 in fact zero can be determined from MAXLEN being
1930 unbounded but the discovered minimum is used for
1932 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1942 /* Try to obtain the range of the lengths of the string(s) referenced
1943 by ARG, or the size of the largest array ARG refers to if the range
1944 of lengths cannot be determined, and store all in *PDATA which must
1945 be zero-initialized on input except PDATA->MAXBOUND may be set to
1946 a non-null tree node other than INTEGER_CST to request to have it
1947 set to the length of the longest string in a PHI. ELTSIZE is
1948 the expected size of the string element in bytes: 1 for char and
1949 some power of 2 for wide characters.
1950 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1951 for optimization. Returning false means that a nonzero PDATA->MINLEN
1952 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1953 is -1 (in that case, the actual range is indeterminate, i.e.,
1954 [0, PTRDIFF_MAX - 2]. */
1957 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1959 bitmap visited
= NULL
;
1960 tree maxbound
= pdata
->maxbound
;
1962 if (!get_range_strlen (arg
, &visited
, SRK_LENRANGE
, pdata
, eltsize
))
1964 /* On failure extend the length range to an impossible maximum
1965 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1966 members can stay unchanged regardless. */
1967 pdata
->minlen
= ssize_int (0);
1968 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1970 else if (!pdata
->minlen
)
1971 pdata
->minlen
= ssize_int (0);
1973 /* If it's unchanged from it initial non-null value, set the conservative
1974 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1975 if (maxbound
&& pdata
->maxbound
== maxbound
)
1976 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1979 BITMAP_FREE (visited
);
1981 return !integer_all_onesp (pdata
->maxlen
);
1984 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1985 For ARG of pointer types, NONSTR indicates if the caller is prepared
1986 to handle unterminated strings. For integer ARG and when RKIND ==
1987 SRK_INT_VALUE, NONSTR must be null.
1989 If an unterminated array is discovered and our caller handles
1990 unterminated arrays, then bubble up the offending DECL and
1991 return the maximum size. Otherwise return NULL. */
1994 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
1996 /* A non-null NONSTR is meaningless when determining the maximum
1997 value of an integer ARG. */
1998 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
1999 /* ARG must have an integral type when RKIND says so. */
2000 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
2002 bitmap visited
= NULL
;
2004 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2006 c_strlen_data lendata
= { };
2007 if (!get_range_strlen (arg
, &visited
, rkind
, &lendata
, /* eltsize = */1))
2008 lendata
.maxlen
= NULL_TREE
;
2009 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
2010 lendata
.maxlen
= NULL_TREE
;
2013 BITMAP_FREE (visited
);
2017 /* For callers prepared to handle unterminated arrays set
2018 *NONSTR to point to the declaration of the array and return
2019 the maximum length/size. */
2020 *nonstr
= lendata
.decl
;
2021 return lendata
.maxlen
;
2024 /* Fail if the constant array isn't nul-terminated. */
2025 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
2029 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2030 If LEN is not NULL, it represents the length of the string to be
2031 copied. Return NULL_TREE if no simplification can be made. */
2034 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
2035 tree dest
, tree src
)
2037 gimple
*stmt
= gsi_stmt (*gsi
);
2038 location_t loc
= gimple_location (stmt
);
2041 /* If SRC and DEST are the same (and not volatile), return DEST. */
2042 if (operand_equal_p (src
, dest
, 0))
2044 /* Issue -Wrestrict unless the pointers are null (those do
2045 not point to objects and so do not indicate an overlap;
2046 such calls could be the result of sanitization and jump
2048 if (!integer_zerop (dest
) && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
2050 tree func
= gimple_call_fndecl (stmt
);
2052 warning_at (loc
, OPT_Wrestrict
,
2053 "%qD source argument is the same as destination",
2057 replace_call_with_value (gsi
, dest
);
2061 if (optimize_function_for_size_p (cfun
))
2064 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2068 /* Set to non-null if ARG refers to an unterminated array. */
2070 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
2074 /* Avoid folding calls with unterminated arrays. */
2075 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
2076 warn_string_no_nul (loc
, stmt
, "strcpy", src
, nonstr
);
2077 suppress_warning (stmt
, OPT_Wstringop_overread
);
2084 len
= fold_convert_loc (loc
, size_type_node
, len
);
2085 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
2086 len
= force_gimple_operand_gsi (gsi
, len
, true,
2087 NULL_TREE
, true, GSI_SAME_STMT
);
2088 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2089 replace_call_with_call_and_fold (gsi
, repl
);
2093 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2094 If SLEN is not NULL, it represents the length of the source string.
2095 Return NULL_TREE if no simplification can be made. */
2098 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
2099 tree dest
, tree src
, tree len
)
2101 gimple
*stmt
= gsi_stmt (*gsi
);
2102 location_t loc
= gimple_location (stmt
);
2103 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
2105 /* If the LEN parameter is zero, return DEST. */
2106 if (integer_zerop (len
))
2108 /* Avoid warning if the destination refers to an array/pointer
2109 decorate with attribute nonstring. */
2112 tree fndecl
= gimple_call_fndecl (stmt
);
2114 /* Warn about the lack of nul termination: the result is not
2115 a (nul-terminated) string. */
2116 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2117 if (slen
&& !integer_zerop (slen
))
2118 warning_at (loc
, OPT_Wstringop_truncation
,
2119 "%qD destination unchanged after copying no bytes "
2120 "from a string of length %E",
2123 warning_at (loc
, OPT_Wstringop_truncation
,
2124 "%qD destination unchanged after copying no bytes",
2128 replace_call_with_value (gsi
, dest
);
2132 /* We can't compare slen with len as constants below if len is not a
2134 if (TREE_CODE (len
) != INTEGER_CST
)
2137 /* Now, we must be passed a constant src ptr parameter. */
2138 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
2139 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
2142 /* The size of the source string including the terminating nul. */
2143 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
2145 /* We do not support simplification of this case, though we do
2146 support it when expanding trees into RTL. */
2147 /* FIXME: generate a call to __builtin_memset. */
2148 if (tree_int_cst_lt (ssize
, len
))
2151 /* Diagnose truncation that leaves the copy unterminated. */
2152 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
2154 /* OK transform into builtin memcpy. */
2155 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2159 len
= fold_convert_loc (loc
, size_type_node
, len
);
2160 len
= force_gimple_operand_gsi (gsi
, len
, true,
2161 NULL_TREE
, true, GSI_SAME_STMT
);
2162 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2163 replace_call_with_call_and_fold (gsi
, repl
);
2168 /* Fold function call to builtin strchr or strrchr.
2169 If both arguments are constant, evaluate and fold the result,
2170 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2171 In general strlen is significantly faster than strchr
2172 due to being a simpler operation. */
2174 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
2176 gimple
*stmt
= gsi_stmt (*gsi
);
2177 tree str
= gimple_call_arg (stmt
, 0);
2178 tree c
= gimple_call_arg (stmt
, 1);
2179 location_t loc
= gimple_location (stmt
);
2183 if (!gimple_call_lhs (stmt
))
2186 /* Avoid folding if the first argument is not a nul-terminated array.
2187 Defer warning until later. */
2188 if (!check_nul_terminated_array (NULL_TREE
, str
))
2191 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
2193 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
2197 replace_call_with_value (gsi
, integer_zero_node
);
2201 tree len
= build_int_cst (size_type_node
, p1
- p
);
2202 gimple_seq stmts
= NULL
;
2203 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2204 POINTER_PLUS_EXPR
, str
, len
);
2205 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2206 gsi_replace_with_seq_vops (gsi
, stmts
);
2210 if (!integer_zerop (c
))
2213 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2214 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2216 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2220 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2221 replace_call_with_call_and_fold (gsi
, repl
);
2229 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2234 /* Create newstr = strlen (str). */
2235 gimple_seq stmts
= NULL
;
2236 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2237 gimple_set_location (new_stmt
, loc
);
2238 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2239 gimple_call_set_lhs (new_stmt
, len
);
2240 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2242 /* Create (str p+ strlen (str)). */
2243 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2244 POINTER_PLUS_EXPR
, str
, len
);
2245 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2246 gsi_replace_with_seq_vops (gsi
, stmts
);
2247 /* gsi now points at the assignment to the lhs, get a
2248 stmt iterator to the strlen.
2249 ??? We can't use gsi_for_stmt as that doesn't work when the
2250 CFG isn't built yet. */
2251 gimple_stmt_iterator gsi2
= *gsi
;
2257 /* Fold function call to builtin strstr.
2258 If both arguments are constant, evaluate and fold the result,
2259 additionally fold strstr (x, "") into x and strstr (x, "c")
2260 into strchr (x, 'c'). */
2262 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2264 gimple
*stmt
= gsi_stmt (*gsi
);
2265 if (!gimple_call_lhs (stmt
))
2268 tree haystack
= gimple_call_arg (stmt
, 0);
2269 tree needle
= gimple_call_arg (stmt
, 1);
2271 /* Avoid folding if either argument is not a nul-terminated array.
2272 Defer warning until later. */
2273 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2274 || !check_nul_terminated_array (NULL_TREE
, needle
))
2277 const char *q
= c_getstr (needle
);
2281 if (const char *p
= c_getstr (haystack
))
2283 const char *r
= strstr (p
, q
);
2287 replace_call_with_value (gsi
, integer_zero_node
);
2291 tree len
= build_int_cst (size_type_node
, r
- p
);
2292 gimple_seq stmts
= NULL
;
2294 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2296 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2297 gsi_replace_with_seq_vops (gsi
, stmts
);
2301 /* For strstr (x, "") return x. */
2304 replace_call_with_value (gsi
, haystack
);
2308 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2311 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2314 tree c
= build_int_cst (integer_type_node
, q
[0]);
2315 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2316 replace_call_with_call_and_fold (gsi
, repl
);
2324 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2327 Return NULL_TREE if no simplification was possible, otherwise return the
2328 simplified form of the call as a tree.
2330 The simplified form may be a constant or other expression which
2331 computes the same value, but in a more efficient manner (including
2332 calls to other builtin functions).
2334 The call may contain arguments which need to be evaluated, but
2335 which are not useful to determine the result of the call. In
2336 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2337 COMPOUND_EXPR will be an argument which must be evaluated.
2338 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2339 COMPOUND_EXPR in the chain will contain the tree for the simplified
2340 form of the builtin function call. */
2343 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2345 gimple
*stmt
= gsi_stmt (*gsi
);
2346 location_t loc
= gimple_location (stmt
);
2348 const char *p
= c_getstr (src
);
2350 /* If the string length is zero, return the dst parameter. */
2351 if (p
&& *p
== '\0')
2353 replace_call_with_value (gsi
, dst
);
2357 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2360 /* See if we can store by pieces into (dst + strlen(dst)). */
2362 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2363 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2365 if (!strlen_fn
|| !memcpy_fn
)
2368 /* If the length of the source string isn't computable don't
2369 split strcat into strlen and memcpy. */
2370 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2374 /* Create strlen (dst). */
2375 gimple_seq stmts
= NULL
, stmts2
;
2376 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2377 gimple_set_location (repl
, loc
);
2378 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2379 gimple_call_set_lhs (repl
, newdst
);
2380 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2382 /* Create (dst p+ strlen (dst)). */
2383 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2384 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2385 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2387 len
= fold_convert_loc (loc
, size_type_node
, len
);
2388 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2389 build_int_cst (size_type_node
, 1));
2390 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2391 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2393 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2394 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2395 if (gimple_call_lhs (stmt
))
2397 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2398 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2399 gsi_replace_with_seq_vops (gsi
, stmts
);
2400 /* gsi now points at the assignment to the lhs, get a
2401 stmt iterator to the memcpy call.
2402 ??? We can't use gsi_for_stmt as that doesn't work when the
2403 CFG isn't built yet. */
2404 gimple_stmt_iterator gsi2
= *gsi
;
2410 gsi_replace_with_seq_vops (gsi
, stmts
);
2416 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2417 are the arguments to the call. */
2420 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2422 gimple
*stmt
= gsi_stmt (*gsi
);
2423 tree dest
= gimple_call_arg (stmt
, 0);
2424 tree src
= gimple_call_arg (stmt
, 1);
2425 tree size
= gimple_call_arg (stmt
, 2);
2431 /* If the SRC parameter is "", return DEST. */
2432 if (p
&& *p
== '\0')
2434 replace_call_with_value (gsi
, dest
);
2438 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2441 /* If __builtin_strcat_chk is used, assume strcat is available. */
2442 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2446 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2447 replace_call_with_call_and_fold (gsi
, repl
);
2451 /* Simplify a call to the strncat builtin. */
2454 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2456 gimple
*stmt
= gsi_stmt (*gsi
);
2457 tree dst
= gimple_call_arg (stmt
, 0);
2458 tree src
= gimple_call_arg (stmt
, 1);
2459 tree len
= gimple_call_arg (stmt
, 2);
2461 const char *p
= c_getstr (src
);
2463 /* If the requested length is zero, or the src parameter string
2464 length is zero, return the dst parameter. */
2465 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2467 replace_call_with_value (gsi
, dst
);
2471 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2474 unsigned srclen
= strlen (p
);
2476 int cmpsrc
= compare_tree_int (len
, srclen
);
2478 /* Return early if the requested len is less than the string length.
2479 Warnings will be issued elsewhere later. */
2483 unsigned HOST_WIDE_INT dstsize
;
2485 bool nowarn
= warning_suppressed_p (stmt
, OPT_Wstringop_overflow_
);
2487 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2489 int cmpdst
= compare_tree_int (len
, dstsize
);
2493 tree fndecl
= gimple_call_fndecl (stmt
);
2495 /* Strncat copies (at most) LEN bytes and always appends
2496 the terminating NUL so the specified bound should never
2497 be equal to (or greater than) the size of the destination.
2498 If it is, the copy could overflow. */
2499 location_t loc
= gimple_location (stmt
);
2500 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2502 ? G_("%qD specified bound %E equals "
2504 : G_("%qD specified bound %E exceeds "
2505 "destination size %wu"),
2506 fndecl
, len
, dstsize
);
2508 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2512 if (!nowarn
&& cmpsrc
== 0)
2514 tree fndecl
= gimple_call_fndecl (stmt
);
2515 location_t loc
= gimple_location (stmt
);
2517 /* To avoid possible overflow the specified bound should also
2518 not be equal to the length of the source, even when the size
2519 of the destination is unknown (it's not an uncommon mistake
2520 to specify as the bound to strncpy the length of the source). */
2521 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2522 "%qD specified bound %E equals source length",
2524 suppress_warning (stmt
, OPT_Wstringop_overflow_
);
2527 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2529 /* If the replacement _DECL isn't initialized, don't do the
2534 /* Otherwise, emit a call to strcat. */
2535 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2536 replace_call_with_call_and_fold (gsi
, repl
);
2540 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2544 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2546 gimple
*stmt
= gsi_stmt (*gsi
);
2547 tree dest
= gimple_call_arg (stmt
, 0);
2548 tree src
= gimple_call_arg (stmt
, 1);
2549 tree len
= gimple_call_arg (stmt
, 2);
2550 tree size
= gimple_call_arg (stmt
, 3);
2555 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2556 if ((p
&& *p
== '\0')
2557 || integer_zerop (len
))
2559 replace_call_with_value (gsi
, dest
);
2563 if (! tree_fits_uhwi_p (size
))
2566 if (! integer_all_onesp (size
))
2568 tree src_len
= c_strlen (src
, 1);
2570 && tree_fits_uhwi_p (src_len
)
2571 && tree_fits_uhwi_p (len
)
2572 && ! tree_int_cst_lt (len
, src_len
))
2574 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2575 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2579 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2580 replace_call_with_call_and_fold (gsi
, repl
);
2586 /* If __builtin_strncat_chk is used, assume strncat is available. */
2587 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2591 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2592 replace_call_with_call_and_fold (gsi
, repl
);
2596 /* Build and append gimple statements to STMTS that would load a first
2597 character of a memory location identified by STR. LOC is location
2598 of the statement. */
2601 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2605 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2606 tree cst_uchar_ptr_node
2607 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2608 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2610 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2611 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2612 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2614 gimple_assign_set_lhs (stmt
, var
);
2615 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2620 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2623 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2625 gimple
*stmt
= gsi_stmt (*gsi
);
2626 tree callee
= gimple_call_fndecl (stmt
);
2627 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2629 tree type
= integer_type_node
;
2630 tree str1
= gimple_call_arg (stmt
, 0);
2631 tree str2
= gimple_call_arg (stmt
, 1);
2632 tree lhs
= gimple_call_lhs (stmt
);
2634 tree bound_node
= NULL_TREE
;
2635 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2637 /* Handle strncmp and strncasecmp functions. */
2638 if (gimple_call_num_args (stmt
) == 3)
2640 bound_node
= gimple_call_arg (stmt
, 2);
2641 if (tree_fits_uhwi_p (bound_node
))
2642 bound
= tree_to_uhwi (bound_node
);
2645 /* If the BOUND parameter is zero, return zero. */
2648 replace_call_with_value (gsi
, integer_zero_node
);
2652 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2653 if (operand_equal_p (str1
, str2
, 0))
2655 replace_call_with_value (gsi
, integer_zero_node
);
2659 /* Initially set to the number of characters, including the terminating
2660 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2661 the array Sx is not terminated by a nul.
2662 For nul-terminated strings then adjusted to their length so that
2663 LENx == NULPOSx holds. */
2664 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2665 const char *p1
= getbyterep (str1
, &len1
);
2666 const char *p2
= getbyterep (str2
, &len2
);
2668 /* The position of the terminating nul character if one exists, otherwise
2669 a value greater than LENx. */
2670 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2674 size_t n
= strnlen (p1
, len1
);
2681 size_t n
= strnlen (p2
, len2
);
2686 /* For known strings, return an immediate value. */
2690 bool known_result
= false;
2694 case BUILT_IN_STRCMP
:
2695 case BUILT_IN_STRCMP_EQ
:
2696 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2699 r
= strcmp (p1
, p2
);
2700 known_result
= true;
2703 case BUILT_IN_STRNCMP
:
2704 case BUILT_IN_STRNCMP_EQ
:
2706 if (bound
== HOST_WIDE_INT_M1U
)
2709 /* Reduce the bound to be no more than the length
2710 of the shorter of the two strings, or the sizes
2711 of the unterminated arrays. */
2712 unsigned HOST_WIDE_INT n
= bound
;
2714 if (len1
== nulpos1
&& len1
< n
)
2716 if (len2
== nulpos2
&& len2
< n
)
2719 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2722 r
= strncmp (p1
, p2
, n
);
2723 known_result
= true;
2726 /* Only handleable situation is where the string are equal (result 0),
2727 which is already handled by operand_equal_p case. */
2728 case BUILT_IN_STRCASECMP
:
2730 case BUILT_IN_STRNCASECMP
:
2732 if (bound
== HOST_WIDE_INT_M1U
)
2734 r
= strncmp (p1
, p2
, bound
);
2736 known_result
= true;
2745 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2750 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2751 || fcode
== BUILT_IN_STRCMP
2752 || fcode
== BUILT_IN_STRCMP_EQ
2753 || fcode
== BUILT_IN_STRCASECMP
;
2755 location_t loc
= gimple_location (stmt
);
2757 /* If the second arg is "", return *(const unsigned char*)arg1. */
2758 if (p2
&& *p2
== '\0' && nonzero_bound
)
2760 gimple_seq stmts
= NULL
;
2761 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2764 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2765 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2768 gsi_replace_with_seq_vops (gsi
, stmts
);
2772 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2773 if (p1
&& *p1
== '\0' && nonzero_bound
)
2775 gimple_seq stmts
= NULL
;
2776 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2780 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2781 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2782 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2784 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2785 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2788 gsi_replace_with_seq_vops (gsi
, stmts
);
2792 /* If BOUND is one, return an expression corresponding to
2793 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2794 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2796 gimple_seq stmts
= NULL
;
2797 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2798 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2802 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2803 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2804 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2806 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2807 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2808 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2810 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2811 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2814 gsi_replace_with_seq_vops (gsi
, stmts
);
2818 /* If BOUND is greater than the length of one constant string,
2819 and the other argument is also a nul-terminated string, replace
2820 strncmp with strcmp. */
2821 if (fcode
== BUILT_IN_STRNCMP
2822 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2823 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2824 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2826 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2829 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2830 replace_call_with_call_and_fold (gsi
, repl
);
2837 /* Fold a call to the memchr pointed by GSI iterator. */
2840 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2842 gimple
*stmt
= gsi_stmt (*gsi
);
2843 tree lhs
= gimple_call_lhs (stmt
);
2844 tree arg1
= gimple_call_arg (stmt
, 0);
2845 tree arg2
= gimple_call_arg (stmt
, 1);
2846 tree len
= gimple_call_arg (stmt
, 2);
2848 /* If the LEN parameter is zero, return zero. */
2849 if (integer_zerop (len
))
2851 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2856 if (TREE_CODE (arg2
) != INTEGER_CST
2857 || !tree_fits_uhwi_p (len
)
2858 || !target_char_cst_p (arg2
, &c
))
2861 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2862 unsigned HOST_WIDE_INT string_length
;
2863 const char *p1
= getbyterep (arg1
, &string_length
);
2867 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2870 tree mem_size
, offset_node
;
2871 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2872 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2873 ? 0 : tree_to_uhwi (offset_node
);
2874 /* MEM_SIZE is the size of the array the string literal
2876 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2877 gcc_checking_assert (string_length
<= string_size
);
2878 if (length
<= string_size
)
2880 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2886 unsigned HOST_WIDE_INT offset
= r
- p1
;
2887 gimple_seq stmts
= NULL
;
2888 if (lhs
!= NULL_TREE
)
2890 tree offset_cst
= build_int_cst (sizetype
, offset
);
2891 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2893 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2896 gimple_seq_add_stmt_without_update (&stmts
,
2897 gimple_build_nop ());
2899 gsi_replace_with_seq_vops (gsi
, stmts
);
2907 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2908 to the call. IGNORE is true if the value returned
2909 by the builtin will be ignored. UNLOCKED is true is true if this
2910 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2911 the known length of the string. Return NULL_TREE if no simplification
2915 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2916 tree arg0
, tree arg1
,
2919 gimple
*stmt
= gsi_stmt (*gsi
);
2921 /* If we're using an unlocked function, assume the other unlocked
2922 functions exist explicitly. */
2923 tree
const fn_fputc
= (unlocked
2924 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2925 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2926 tree
const fn_fwrite
= (unlocked
2927 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2928 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2930 /* If the return value is used, don't do the transformation. */
2931 if (gimple_call_lhs (stmt
))
2934 /* Get the length of the string passed to fputs. If the length
2935 can't be determined, punt. */
2936 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2938 || TREE_CODE (len
) != INTEGER_CST
)
2941 switch (compare_tree_int (len
, 1))
2943 case -1: /* length is 0, delete the call entirely . */
2944 replace_call_with_value (gsi
, integer_zero_node
);
2947 case 0: /* length is 1, call fputc. */
2949 const char *p
= c_getstr (arg0
);
2955 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2957 (integer_type_node
, p
[0]), arg1
);
2958 replace_call_with_call_and_fold (gsi
, repl
);
2963 case 1: /* length is greater than 1, call fwrite. */
2965 /* If optimizing for size keep fputs. */
2966 if (optimize_function_for_size_p (cfun
))
2968 /* New argument list transforming fputs(string, stream) to
2969 fwrite(string, 1, len, stream). */
2973 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2974 size_one_node
, len
, arg1
);
2975 replace_call_with_call_and_fold (gsi
, repl
);
2984 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2985 DEST, SRC, LEN, and SIZE are the arguments to the call.
2986 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2987 code of the builtin. If MAXLEN is not NULL, it is maximum length
2988 passed as third argument. */
2991 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2992 tree dest
, tree src
, tree len
, tree size
,
2993 enum built_in_function fcode
)
2995 gimple
*stmt
= gsi_stmt (*gsi
);
2996 location_t loc
= gimple_location (stmt
);
2997 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3000 /* If SRC and DEST are the same (and not volatile), return DEST
3001 (resp. DEST+LEN for __mempcpy_chk). */
3002 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
3004 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
3006 replace_call_with_value (gsi
, dest
);
3011 gimple_seq stmts
= NULL
;
3012 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
3013 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
3014 TREE_TYPE (dest
), dest
, len
);
3015 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3016 replace_call_with_value (gsi
, temp
);
3021 if (! tree_fits_uhwi_p (size
))
3024 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3025 if (! integer_all_onesp (size
))
3027 if (! tree_fits_uhwi_p (len
))
3029 /* If LEN is not constant, try MAXLEN too.
3030 For MAXLEN only allow optimizing into non-_ocs function
3031 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3032 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3034 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
3036 /* (void) __mempcpy_chk () can be optimized into
3037 (void) __memcpy_chk (). */
3038 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3042 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3043 replace_call_with_call_and_fold (gsi
, repl
);
3052 if (tree_int_cst_lt (size
, maxlen
))
3057 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3058 mem{cpy,pcpy,move,set} is available. */
3061 case BUILT_IN_MEMCPY_CHK
:
3062 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
3064 case BUILT_IN_MEMPCPY_CHK
:
3065 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
3067 case BUILT_IN_MEMMOVE_CHK
:
3068 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
3070 case BUILT_IN_MEMSET_CHK
:
3071 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
3080 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3081 replace_call_with_call_and_fold (gsi
, repl
);
3085 /* Fold a call to the __st[rp]cpy_chk builtin.
3086 DEST, SRC, and SIZE are the arguments to the call.
3087 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3088 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3089 strings passed as second argument. */
3092 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
3094 tree src
, tree size
,
3095 enum built_in_function fcode
)
3097 gimple
*stmt
= gsi_stmt (*gsi
);
3098 location_t loc
= gimple_location (stmt
);
3099 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3102 /* If SRC and DEST are the same (and not volatile), return DEST. */
3103 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
3105 /* Issue -Wrestrict unless the pointers are null (those do
3106 not point to objects and so do not indicate an overlap;
3107 such calls could be the result of sanitization and jump
3109 if (!integer_zerop (dest
)
3110 && !warning_suppressed_p (stmt
, OPT_Wrestrict
))
3112 tree func
= gimple_call_fndecl (stmt
);
3114 warning_at (loc
, OPT_Wrestrict
,
3115 "%qD source argument is the same as destination",
3119 replace_call_with_value (gsi
, dest
);
3123 if (! tree_fits_uhwi_p (size
))
3126 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
3127 if (! integer_all_onesp (size
))
3129 len
= c_strlen (src
, 1);
3130 if (! len
|| ! tree_fits_uhwi_p (len
))
3132 /* If LEN is not constant, try MAXLEN too.
3133 For MAXLEN only allow optimizing into non-_ocs function
3134 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3135 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3137 if (fcode
== BUILT_IN_STPCPY_CHK
)
3142 /* If return value of __stpcpy_chk is ignored,
3143 optimize into __strcpy_chk. */
3144 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
3148 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
3149 replace_call_with_call_and_fold (gsi
, repl
);
3153 if (! len
|| TREE_SIDE_EFFECTS (len
))
3156 /* If c_strlen returned something, but not a constant,
3157 transform __strcpy_chk into __memcpy_chk. */
3158 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
3162 gimple_seq stmts
= NULL
;
3163 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
3164 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3165 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
3166 build_int_cst (size_type_node
, 1));
3167 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3168 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3169 replace_call_with_call_and_fold (gsi
, repl
);
3176 if (! tree_int_cst_lt (maxlen
, size
))
3180 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3181 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
3182 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
3186 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
3187 replace_call_with_call_and_fold (gsi
, repl
);
3191 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3192 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3193 length passed as third argument. IGNORE is true if return value can be
3194 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3197 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
3198 tree dest
, tree src
,
3199 tree len
, tree size
,
3200 enum built_in_function fcode
)
3202 gimple
*stmt
= gsi_stmt (*gsi
);
3203 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3206 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3208 /* If return value of __stpncpy_chk is ignored,
3209 optimize into __strncpy_chk. */
3210 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3213 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3214 replace_call_with_call_and_fold (gsi
, repl
);
3219 if (! tree_fits_uhwi_p (size
))
3222 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3223 if (! integer_all_onesp (size
))
3225 if (! tree_fits_uhwi_p (len
))
3227 /* If LEN is not constant, try MAXLEN too.
3228 For MAXLEN only allow optimizing into non-_ocs function
3229 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3230 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3236 if (tree_int_cst_lt (size
, maxlen
))
3240 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3241 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
3242 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3246 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3247 replace_call_with_call_and_fold (gsi
, repl
);
3251 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3252 Return NULL_TREE if no simplification can be made. */
3255 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3257 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3258 location_t loc
= gimple_location (stmt
);
3259 tree dest
= gimple_call_arg (stmt
, 0);
3260 tree src
= gimple_call_arg (stmt
, 1);
3263 /* If the result is unused, replace stpcpy with strcpy. */
3264 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3266 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3269 gimple_call_set_fndecl (stmt
, fn
);
3274 /* Set to non-null if ARG refers to an unterminated array. */
3275 c_strlen_data data
= { };
3276 /* The size of the unterminated array if SRC referes to one. */
3278 /* True if the size is exact/constant, false if it's the lower bound
3281 tree len
= c_strlen (src
, 1, &data
, 1);
3283 || TREE_CODE (len
) != INTEGER_CST
)
3285 data
.decl
= unterminated_array (src
, &size
, &exact
);
3292 /* Avoid folding calls with unterminated arrays. */
3293 if (!warning_suppressed_p (stmt
, OPT_Wstringop_overread
))
3294 warn_string_no_nul (loc
, stmt
, "stpcpy", src
, data
.decl
, size
,
3296 suppress_warning (stmt
, OPT_Wstringop_overread
);
3300 if (optimize_function_for_size_p (cfun
)
3301 /* If length is zero it's small enough. */
3302 && !integer_zerop (len
))
3305 /* If the source has a known length replace stpcpy with memcpy. */
3306 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3310 gimple_seq stmts
= NULL
;
3311 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3312 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3313 tem
, build_int_cst (size_type_node
, 1));
3314 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3315 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3316 gimple_move_vops (repl
, stmt
);
3317 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3318 /* Replace the result with dest + len. */
3320 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3321 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3322 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3323 POINTER_PLUS_EXPR
, dest
, tem
);
3324 gsi_replace (gsi
, ret
, false);
3325 /* Finally fold the memcpy call. */
3326 gimple_stmt_iterator gsi2
= *gsi
;
3332 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3333 NULL_TREE if a normal call should be emitted rather than expanding
3334 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3335 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3336 passed as second argument. */
3339 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3340 enum built_in_function fcode
)
3342 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3343 tree dest
, size
, len
, fn
, fmt
, flag
;
3344 const char *fmt_str
;
3346 /* Verify the required arguments in the original call. */
3347 if (gimple_call_num_args (stmt
) < 5)
3350 dest
= gimple_call_arg (stmt
, 0);
3351 len
= gimple_call_arg (stmt
, 1);
3352 flag
= gimple_call_arg (stmt
, 2);
3353 size
= gimple_call_arg (stmt
, 3);
3354 fmt
= gimple_call_arg (stmt
, 4);
3356 if (! tree_fits_uhwi_p (size
))
3359 if (! integer_all_onesp (size
))
3361 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3362 if (! tree_fits_uhwi_p (len
))
3364 /* If LEN is not constant, try MAXLEN too.
3365 For MAXLEN only allow optimizing into non-_ocs function
3366 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3367 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3373 if (tree_int_cst_lt (size
, maxlen
))
3377 if (!init_target_chars ())
3380 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3381 or if format doesn't contain % chars or is "%s". */
3382 if (! integer_zerop (flag
))
3384 fmt_str
= c_getstr (fmt
);
3385 if (fmt_str
== NULL
)
3387 if (strchr (fmt_str
, target_percent
) != NULL
3388 && strcmp (fmt_str
, target_percent_s
))
3392 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3394 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3395 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3399 /* Replace the called function and the first 5 argument by 3 retaining
3400 trailing varargs. */
3401 gimple_call_set_fndecl (stmt
, fn
);
3402 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3403 gimple_call_set_arg (stmt
, 0, dest
);
3404 gimple_call_set_arg (stmt
, 1, len
);
3405 gimple_call_set_arg (stmt
, 2, fmt
);
3406 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3407 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3408 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3413 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3414 Return NULL_TREE if a normal call should be emitted rather than
3415 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3416 or BUILT_IN_VSPRINTF_CHK. */
3419 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3420 enum built_in_function fcode
)
3422 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3423 tree dest
, size
, len
, fn
, fmt
, flag
;
3424 const char *fmt_str
;
3425 unsigned nargs
= gimple_call_num_args (stmt
);
3427 /* Verify the required arguments in the original call. */
3430 dest
= gimple_call_arg (stmt
, 0);
3431 flag
= gimple_call_arg (stmt
, 1);
3432 size
= gimple_call_arg (stmt
, 2);
3433 fmt
= gimple_call_arg (stmt
, 3);
3435 if (! tree_fits_uhwi_p (size
))
3440 if (!init_target_chars ())
3443 /* Check whether the format is a literal string constant. */
3444 fmt_str
= c_getstr (fmt
);
3445 if (fmt_str
!= NULL
)
3447 /* If the format doesn't contain % args or %%, we know the size. */
3448 if (strchr (fmt_str
, target_percent
) == 0)
3450 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3451 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3453 /* If the format is "%s" and first ... argument is a string literal,
3454 we know the size too. */
3455 else if (fcode
== BUILT_IN_SPRINTF_CHK
3456 && strcmp (fmt_str
, target_percent_s
) == 0)
3462 arg
= gimple_call_arg (stmt
, 4);
3463 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3465 len
= c_strlen (arg
, 1);
3466 if (! len
|| ! tree_fits_uhwi_p (len
))
3473 if (! integer_all_onesp (size
))
3475 if (! len
|| ! tree_int_cst_lt (len
, size
))
3479 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3480 or if format doesn't contain % chars or is "%s". */
3481 if (! integer_zerop (flag
))
3483 if (fmt_str
== NULL
)
3485 if (strchr (fmt_str
, target_percent
) != NULL
3486 && strcmp (fmt_str
, target_percent_s
))
3490 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3491 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3492 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3496 /* Replace the called function and the first 4 argument by 2 retaining
3497 trailing varargs. */
3498 gimple_call_set_fndecl (stmt
, fn
);
3499 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3500 gimple_call_set_arg (stmt
, 0, dest
);
3501 gimple_call_set_arg (stmt
, 1, fmt
);
3502 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3503 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3504 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3509 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3510 ORIG may be null if this is a 2-argument call. We don't attempt to
3511 simplify calls with more than 3 arguments.
3513 Return true if simplification was possible, otherwise false. */
3516 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3518 gimple
*stmt
= gsi_stmt (*gsi
);
3520 /* Verify the required arguments in the original call. We deal with two
3521 types of sprintf() calls: 'sprintf (str, fmt)' and
3522 'sprintf (dest, "%s", orig)'. */
3523 if (gimple_call_num_args (stmt
) > 3)
3526 tree orig
= NULL_TREE
;
3527 if (gimple_call_num_args (stmt
) == 3)
3528 orig
= gimple_call_arg (stmt
, 2);
3530 /* Check whether the format is a literal string constant. */
3531 tree fmt
= gimple_call_arg (stmt
, 1);
3532 const char *fmt_str
= c_getstr (fmt
);
3533 if (fmt_str
== NULL
)
3536 tree dest
= gimple_call_arg (stmt
, 0);
3538 if (!init_target_chars ())
3541 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3545 /* If the format doesn't contain % args or %%, use strcpy. */
3546 if (strchr (fmt_str
, target_percent
) == NULL
)
3548 /* Don't optimize sprintf (buf, "abc", ptr++). */
3552 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3553 'format' is known to contain no % formats. */
3554 gimple_seq stmts
= NULL
;
3555 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3557 /* Propagate the NO_WARNING bit to avoid issuing the same
3558 warning more than once. */
3559 copy_warning (repl
, stmt
);
3561 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3562 if (tree lhs
= gimple_call_lhs (stmt
))
3564 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3566 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3567 gsi_replace_with_seq_vops (gsi
, stmts
);
3568 /* gsi now points at the assignment to the lhs, get a
3569 stmt iterator to the memcpy call.
3570 ??? We can't use gsi_for_stmt as that doesn't work when the
3571 CFG isn't built yet. */
3572 gimple_stmt_iterator gsi2
= *gsi
;
3578 gsi_replace_with_seq_vops (gsi
, stmts
);
3584 /* If the format is "%s", use strcpy if the result isn't used. */
3585 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3587 /* Don't crash on sprintf (str1, "%s"). */
3591 /* Don't fold calls with source arguments of invalid (nonpointer)
3593 if (!POINTER_TYPE_P (TREE_TYPE (orig
)))
3596 tree orig_len
= NULL_TREE
;
3597 if (gimple_call_lhs (stmt
))
3599 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3604 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3605 gimple_seq stmts
= NULL
;
3606 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3608 /* Propagate the NO_WARNING bit to avoid issuing the same
3609 warning more than once. */
3610 copy_warning (repl
, stmt
);
3612 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3613 if (tree lhs
= gimple_call_lhs (stmt
))
3615 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3616 TREE_TYPE (orig_len
)))
3617 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3618 repl
= gimple_build_assign (lhs
, orig_len
);
3619 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3620 gsi_replace_with_seq_vops (gsi
, stmts
);
3621 /* gsi now points at the assignment to the lhs, get a
3622 stmt iterator to the memcpy call.
3623 ??? We can't use gsi_for_stmt as that doesn't work when the
3624 CFG isn't built yet. */
3625 gimple_stmt_iterator gsi2
= *gsi
;
3631 gsi_replace_with_seq_vops (gsi
, stmts
);
3639 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3640 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3641 attempt to simplify calls with more than 4 arguments.
3643 Return true if simplification was possible, otherwise false. */
3646 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3648 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3649 tree dest
= gimple_call_arg (stmt
, 0);
3650 tree destsize
= gimple_call_arg (stmt
, 1);
3651 tree fmt
= gimple_call_arg (stmt
, 2);
3652 tree orig
= NULL_TREE
;
3653 const char *fmt_str
= NULL
;
3655 if (gimple_call_num_args (stmt
) > 4)
3658 if (gimple_call_num_args (stmt
) == 4)
3659 orig
= gimple_call_arg (stmt
, 3);
3661 if (!tree_fits_uhwi_p (destsize
))
3663 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3665 /* Check whether the format is a literal string constant. */
3666 fmt_str
= c_getstr (fmt
);
3667 if (fmt_str
== NULL
)
3670 if (!init_target_chars ())
3673 /* If the format doesn't contain % args or %%, use strcpy. */
3674 if (strchr (fmt_str
, target_percent
) == NULL
)
3676 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3680 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3684 /* We could expand this as
3685 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3687 memcpy (str, fmt_with_nul_at_cstm1, cst);
3688 but in the former case that might increase code size
3689 and in the latter case grow .rodata section too much.
3691 size_t len
= strlen (fmt_str
);
3695 gimple_seq stmts
= NULL
;
3696 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3697 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3698 if (tree lhs
= gimple_call_lhs (stmt
))
3700 repl
= gimple_build_assign (lhs
,
3701 build_int_cst (TREE_TYPE (lhs
), len
));
3702 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3703 gsi_replace_with_seq_vops (gsi
, stmts
);
3704 /* gsi now points at the assignment to the lhs, get a
3705 stmt iterator to the memcpy call.
3706 ??? We can't use gsi_for_stmt as that doesn't work when the
3707 CFG isn't built yet. */
3708 gimple_stmt_iterator gsi2
= *gsi
;
3714 gsi_replace_with_seq_vops (gsi
, stmts
);
3720 /* If the format is "%s", use strcpy if the result isn't used. */
3721 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3723 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3727 /* Don't crash on snprintf (str1, cst, "%s"). */
3731 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3732 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3735 /* We could expand this as
3736 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3738 memcpy (str1, str2_with_nul_at_cstm1, cst);
3739 but in the former case that might increase code size
3740 and in the latter case grow .rodata section too much.
3742 if (compare_tree_int (orig_len
, destlen
) >= 0)
3745 /* Convert snprintf (str1, cst, "%s", str2) into
3746 strcpy (str1, str2) if strlen (str2) < cst. */
3747 gimple_seq stmts
= NULL
;
3748 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3749 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3750 if (tree lhs
= gimple_call_lhs (stmt
))
3752 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3753 TREE_TYPE (orig_len
)))
3754 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3755 repl
= gimple_build_assign (lhs
, orig_len
);
3756 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3757 gsi_replace_with_seq_vops (gsi
, stmts
);
3758 /* gsi now points at the assignment to the lhs, get a
3759 stmt iterator to the memcpy call.
3760 ??? We can't use gsi_for_stmt as that doesn't work when the
3761 CFG isn't built yet. */
3762 gimple_stmt_iterator gsi2
= *gsi
;
3768 gsi_replace_with_seq_vops (gsi
, stmts
);
3776 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3777 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3778 more than 3 arguments, and ARG may be null in the 2-argument case.
3780 Return NULL_TREE if no simplification was possible, otherwise return the
3781 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3782 code of the function to be simplified. */
3785 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3786 tree fp
, tree fmt
, tree arg
,
3787 enum built_in_function fcode
)
3789 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3790 tree fn_fputc
, fn_fputs
;
3791 const char *fmt_str
= NULL
;
3793 /* If the return value is used, don't do the transformation. */
3794 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3797 /* Check whether the format is a literal string constant. */
3798 fmt_str
= c_getstr (fmt
);
3799 if (fmt_str
== NULL
)
3802 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3804 /* If we're using an unlocked function, assume the other
3805 unlocked functions exist explicitly. */
3806 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3807 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3811 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3812 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3815 if (!init_target_chars ())
3818 /* If the format doesn't contain % args or %%, use strcpy. */
3819 if (strchr (fmt_str
, target_percent
) == NULL
)
3821 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3825 /* If the format specifier was "", fprintf does nothing. */
3826 if (fmt_str
[0] == '\0')
3828 replace_call_with_value (gsi
, NULL_TREE
);
3832 /* When "string" doesn't contain %, replace all cases of
3833 fprintf (fp, string) with fputs (string, fp). The fputs
3834 builtin will take care of special cases like length == 1. */
3837 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3838 replace_call_with_call_and_fold (gsi
, repl
);
3843 /* The other optimizations can be done only on the non-va_list variants. */
3844 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3847 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3848 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3850 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3854 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3855 replace_call_with_call_and_fold (gsi
, repl
);
3860 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3861 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3864 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3868 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3869 replace_call_with_call_and_fold (gsi
, repl
);
3877 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3878 FMT and ARG are the arguments to the call; we don't fold cases with
3879 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3881 Return NULL_TREE if no simplification was possible, otherwise return the
3882 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3883 code of the function to be simplified. */
3886 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3887 tree arg
, enum built_in_function fcode
)
3889 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3890 tree fn_putchar
, fn_puts
, newarg
;
3891 const char *fmt_str
= NULL
;
3893 /* If the return value is used, don't do the transformation. */
3894 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3897 /* Check whether the format is a literal string constant. */
3898 fmt_str
= c_getstr (fmt
);
3899 if (fmt_str
== NULL
)
3902 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3904 /* If we're using an unlocked function, assume the other
3905 unlocked functions exist explicitly. */
3906 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3907 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3911 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3912 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3915 if (!init_target_chars ())
3918 if (strcmp (fmt_str
, target_percent_s
) == 0
3919 || strchr (fmt_str
, target_percent
) == NULL
)
3923 if (strcmp (fmt_str
, target_percent_s
) == 0)
3925 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3928 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3931 str
= c_getstr (arg
);
3937 /* The format specifier doesn't contain any '%' characters. */
3938 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3944 /* If the string was "", printf does nothing. */
3947 replace_call_with_value (gsi
, NULL_TREE
);
3951 /* If the string has length of 1, call putchar. */
3954 /* Given printf("c"), (where c is any one character,)
3955 convert "c"[0] to an int and pass that to the replacement
3957 newarg
= build_int_cst (integer_type_node
, str
[0]);
3960 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3961 replace_call_with_call_and_fold (gsi
, repl
);
3967 /* If the string was "string\n", call puts("string"). */
3968 size_t len
= strlen (str
);
3969 if ((unsigned char)str
[len
- 1] == target_newline
3970 && (size_t) (int) len
== len
3975 /* Create a NUL-terminated string that's one char shorter
3976 than the original, stripping off the trailing '\n'. */
3977 newstr
= xstrdup (str
);
3978 newstr
[len
- 1] = '\0';
3979 newarg
= build_string_literal (len
, newstr
);
3983 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3984 replace_call_with_call_and_fold (gsi
, repl
);
3989 /* We'd like to arrange to call fputs(string,stdout) here,
3990 but we need stdout and don't have a way to get it yet. */
3995 /* The other optimizations can be done only on the non-va_list variants. */
3996 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3999 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
4000 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
4002 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
4006 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
4007 replace_call_with_call_and_fold (gsi
, repl
);
4012 /* If the format specifier was "%c", call __builtin_putchar(arg). */
4013 else if (strcmp (fmt_str
, target_percent_c
) == 0)
4015 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
4020 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
4021 replace_call_with_call_and_fold (gsi
, repl
);
4031 /* Fold a call to __builtin_strlen with known length LEN. */
4034 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
4036 gimple
*stmt
= gsi_stmt (*gsi
);
4037 tree arg
= gimple_call_arg (stmt
, 0);
4042 c_strlen_data lendata
= { };
4043 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
4045 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
4046 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
4048 /* The range of lengths refers to either a single constant
4049 string or to the longest and shortest constant string
4050 referenced by the argument of the strlen() call, or to
4051 the strings that can possibly be stored in the arrays
4052 the argument refers to. */
4053 minlen
= wi::to_wide (lendata
.minlen
);
4054 maxlen
= wi::to_wide (lendata
.maxlen
);
4058 unsigned prec
= TYPE_PRECISION (sizetype
);
4060 minlen
= wi::shwi (0, prec
);
4061 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
4064 if (minlen
== maxlen
)
4066 /* Fold the strlen call to a constant. */
4067 tree type
= TREE_TYPE (lendata
.minlen
);
4068 tree len
= force_gimple_operand_gsi (gsi
,
4069 wide_int_to_tree (type
, minlen
),
4070 true, NULL
, true, GSI_SAME_STMT
);
4071 replace_call_with_value (gsi
, len
);
4075 /* Set the strlen() range to [0, MAXLEN]. */
4076 if (tree lhs
= gimple_call_lhs (stmt
))
4077 set_strlen_range (lhs
, minlen
, maxlen
);
4082 /* Fold a call to __builtin_acc_on_device. */
4085 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
4087 /* Defer folding until we know which compiler we're in. */
4088 if (symtab
->state
!= EXPANSION
)
4091 unsigned val_host
= GOMP_DEVICE_HOST
;
4092 unsigned val_dev
= GOMP_DEVICE_NONE
;
4094 #ifdef ACCEL_COMPILER
4095 val_host
= GOMP_DEVICE_NOT_HOST
;
4096 val_dev
= ACCEL_COMPILER_acc_device
;
4099 location_t loc
= gimple_location (gsi_stmt (*gsi
));
4101 tree host_eq
= make_ssa_name (boolean_type_node
);
4102 gimple
*host_ass
= gimple_build_assign
4103 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
4104 gimple_set_location (host_ass
, loc
);
4105 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
4107 tree dev_eq
= make_ssa_name (boolean_type_node
);
4108 gimple
*dev_ass
= gimple_build_assign
4109 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
4110 gimple_set_location (dev_ass
, loc
);
4111 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
4113 tree result
= make_ssa_name (boolean_type_node
);
4114 gimple
*result_ass
= gimple_build_assign
4115 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
4116 gimple_set_location (result_ass
, loc
);
4117 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
4119 replace_call_with_value (gsi
, result
);
4124 /* Fold realloc (0, n) -> malloc (n). */
4127 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
4129 gimple
*stmt
= gsi_stmt (*gsi
);
4130 tree arg
= gimple_call_arg (stmt
, 0);
4131 tree size
= gimple_call_arg (stmt
, 1);
4133 if (operand_equal_p (arg
, null_pointer_node
, 0))
4135 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
4138 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
4139 replace_call_with_call_and_fold (gsi
, repl
);
4146 /* Number of bytes into which any type but aggregate or vector types
4148 static constexpr size_t clear_padding_unit
4149 = MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
;
4150 /* Buffer size on which __builtin_clear_padding folding code works. */
4151 static const size_t clear_padding_buf_size
= 32 * clear_padding_unit
;
4153 /* Data passed through __builtin_clear_padding folding. */
4154 struct clear_padding_struct
{
4156 /* 0 during __builtin_clear_padding folding, nonzero during
4157 clear_type_padding_in_mask. In that case, instead of clearing the
4158 non-padding bits in union_ptr array clear the padding bits in there. */
4162 gimple_stmt_iterator
*gsi
;
4163 /* Alignment of buf->base + 0. */
4165 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4167 /* Number of padding bytes before buf->off that don't have padding clear
4168 code emitted yet. */
4169 HOST_WIDE_INT padding_bytes
;
4170 /* The size of the whole object. Never emit code to touch
4171 buf->base + buf->sz or following bytes. */
4173 /* Number of bytes recorded in buf->buf. */
4175 /* When inside union, instead of emitting code we and bits inside of
4176 the union_ptr array. */
4177 unsigned char *union_ptr
;
4178 /* Set bits mean padding bits that need to be cleared by the builtin. */
4179 unsigned char buf
[clear_padding_buf_size
+ clear_padding_unit
];
4182 /* Emit code to clear padding requested in BUF->buf - set bits
4183 in there stand for padding that should be cleared. FULL is true
4184 if everything from the buffer should be flushed, otherwise
4185 it can leave up to 2 * clear_padding_unit bytes for further
4189 clear_padding_flush (clear_padding_struct
*buf
, bool full
)
4191 gcc_assert ((clear_padding_unit
% UNITS_PER_WORD
) == 0);
4192 if (!full
&& buf
->size
< 2 * clear_padding_unit
)
4194 gcc_assert ((buf
->off
% UNITS_PER_WORD
) == 0);
4195 size_t end
= buf
->size
;
4197 end
= ((end
- clear_padding_unit
- 1) / clear_padding_unit
4198 * clear_padding_unit
);
4199 size_t padding_bytes
= buf
->padding_bytes
;
4202 if (buf
->clear_in_mask
)
4204 /* During clear_type_padding_in_mask, clear the padding
4205 bits set in buf->buf in the buf->union_ptr mask. */
4206 for (size_t i
= 0; i
< end
; i
++)
4208 if (buf
->buf
[i
] == (unsigned char) ~0)
4212 memset (&buf
->union_ptr
[buf
->off
+ i
- padding_bytes
],
4215 buf
->union_ptr
[buf
->off
+ i
] &= ~buf
->buf
[i
];
4220 memset (&buf
->union_ptr
[buf
->off
+ end
- padding_bytes
],
4224 buf
->padding_bytes
= 0;
4228 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4231 buf
->padding_bytes
= padding_bytes
;
4235 /* Inside of a union, instead of emitting any code, instead
4236 clear all bits in the union_ptr buffer that are clear
4237 in buf. Whole padding bytes don't clear anything. */
4238 for (size_t i
= 0; i
< end
; i
++)
4240 if (buf
->buf
[i
] == (unsigned char) ~0)
4245 buf
->union_ptr
[buf
->off
+ i
] &= buf
->buf
[i
];
4252 buf
->padding_bytes
= 0;
4256 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4259 buf
->padding_bytes
= padding_bytes
;
4263 size_t wordsize
= UNITS_PER_WORD
;
4264 for (size_t i
= 0; i
< end
; i
+= wordsize
)
4266 size_t nonzero_first
= wordsize
;
4267 size_t nonzero_last
= 0;
4268 size_t zero_first
= wordsize
;
4269 size_t zero_last
= 0;
4270 bool all_ones
= true, bytes_only
= true;
4271 if ((unsigned HOST_WIDE_INT
) (buf
->off
+ i
+ wordsize
)
4272 > (unsigned HOST_WIDE_INT
) buf
->sz
)
4274 gcc_assert (wordsize
> 1);
4279 for (size_t j
= i
; j
< i
+ wordsize
&& j
< end
; j
++)
4283 if (nonzero_first
== wordsize
)
4285 nonzero_first
= j
- i
;
4286 nonzero_last
= j
- i
;
4288 if (nonzero_last
!= j
- i
)
4290 nonzero_last
= j
+ 1 - i
;
4294 if (zero_first
== wordsize
)
4296 zero_last
= j
+ 1 - i
;
4298 if (buf
->buf
[j
] != 0 && buf
->buf
[j
] != (unsigned char) ~0)
4304 size_t padding_end
= i
;
4307 if (nonzero_first
== 0
4308 && nonzero_last
== wordsize
4311 /* All bits are padding and we had some padding
4312 before too. Just extend it. */
4313 padding_bytes
+= wordsize
;
4316 if (all_ones
&& nonzero_first
== 0)
4318 padding_bytes
+= nonzero_last
;
4319 padding_end
+= nonzero_last
;
4320 nonzero_first
= wordsize
;
4323 else if (bytes_only
&& nonzero_first
== 0)
4325 gcc_assert (zero_first
&& zero_first
!= wordsize
);
4326 padding_bytes
+= zero_first
;
4327 padding_end
+= zero_first
;
4330 if (padding_bytes
== 1)
4332 atype
= char_type_node
;
4333 src
= build_zero_cst (char_type_node
);
4337 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4338 src
= build_constructor (atype
, NULL
);
4340 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4341 build_int_cst (buf
->alias_type
,
4342 buf
->off
+ padding_end
4344 gimple
*g
= gimple_build_assign (dst
, src
);
4345 gimple_set_location (g
, buf
->loc
);
4346 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4348 buf
->padding_bytes
= 0;
4350 if (nonzero_first
== wordsize
)
4351 /* All bits in a word are 0, there are no padding bits. */
4353 if (all_ones
&& nonzero_last
== wordsize
)
4355 /* All bits between nonzero_first and end of word are padding
4356 bits, start counting padding_bytes. */
4357 padding_bytes
= nonzero_last
- nonzero_first
;
4362 /* If bitfields aren't involved in this word, prefer storing
4363 individual bytes or groups of them over performing a RMW
4364 operation on the whole word. */
4365 gcc_assert (i
+ zero_last
<= end
);
4366 for (size_t j
= padding_end
; j
< i
+ zero_last
; j
++)
4371 for (k
= j
; k
< i
+ zero_last
; k
++)
4372 if (buf
->buf
[k
] == 0)
4374 HOST_WIDE_INT off
= buf
->off
+ j
;
4378 atype
= char_type_node
;
4379 src
= build_zero_cst (char_type_node
);
4383 atype
= build_array_type_nelts (char_type_node
, k
- j
);
4384 src
= build_constructor (atype
, NULL
);
4386 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
,
4388 build_int_cst (buf
->alias_type
, off
));
4389 gimple
*g
= gimple_build_assign (dst
, src
);
4390 gimple_set_location (g
, buf
->loc
);
4391 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4395 if (nonzero_last
== wordsize
)
4396 padding_bytes
= nonzero_last
- zero_last
;
4399 for (size_t eltsz
= 1; eltsz
<= wordsize
; eltsz
<<= 1)
4401 if (nonzero_last
- nonzero_first
<= eltsz
4402 && ((nonzero_first
& ~(eltsz
- 1))
4403 == ((nonzero_last
- 1) & ~(eltsz
- 1))))
4407 type
= char_type_node
;
4409 type
= lang_hooks
.types
.type_for_size (eltsz
* BITS_PER_UNIT
,
4411 size_t start
= nonzero_first
& ~(eltsz
- 1);
4412 HOST_WIDE_INT off
= buf
->off
+ i
+ start
;
4414 if (eltsz
> 1 && buf
->align
< TYPE_ALIGN (type
))
4415 atype
= build_aligned_type (type
, buf
->align
);
4416 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4417 build_int_cst (buf
->alias_type
, off
));
4421 && nonzero_first
== start
4422 && nonzero_last
== start
+ eltsz
)
4423 src
= build_zero_cst (type
);
4426 src
= make_ssa_name (type
);
4427 g
= gimple_build_assign (src
, unshare_expr (dst
));
4428 gimple_set_location (g
, buf
->loc
);
4429 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4430 tree mask
= native_interpret_expr (type
,
4431 buf
->buf
+ i
+ start
,
4433 gcc_assert (mask
&& TREE_CODE (mask
) == INTEGER_CST
);
4434 mask
= fold_build1 (BIT_NOT_EXPR
, type
, mask
);
4435 tree src_masked
= make_ssa_name (type
);
4436 g
= gimple_build_assign (src_masked
, BIT_AND_EXPR
,
4438 gimple_set_location (g
, buf
->loc
);
4439 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4442 g
= gimple_build_assign (dst
, src
);
4443 gimple_set_location (g
, buf
->loc
);
4444 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4454 if (padding_bytes
== 1)
4456 atype
= char_type_node
;
4457 src
= build_zero_cst (char_type_node
);
4461 atype
= build_array_type_nelts (char_type_node
, padding_bytes
);
4462 src
= build_constructor (atype
, NULL
);
4464 tree dst
= build2_loc (buf
->loc
, MEM_REF
, atype
, buf
->base
,
4465 build_int_cst (buf
->alias_type
,
4468 gimple
*g
= gimple_build_assign (dst
, src
);
4469 gimple_set_location (g
, buf
->loc
);
4470 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4472 size_t end_rem
= end
% UNITS_PER_WORD
;
4473 buf
->off
+= end
- end_rem
;
4474 buf
->size
= end_rem
;
4475 memset (buf
->buf
, 0, buf
->size
);
4476 buf
->padding_bytes
= 0;
4480 memmove (buf
->buf
, buf
->buf
+ end
, buf
->size
- end
);
4483 buf
->padding_bytes
= padding_bytes
;
4487 /* Append PADDING_BYTES padding bytes. */
4490 clear_padding_add_padding (clear_padding_struct
*buf
,
4491 HOST_WIDE_INT padding_bytes
)
4493 if (padding_bytes
== 0)
4495 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4496 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4497 clear_padding_flush (buf
, false);
4498 if ((unsigned HOST_WIDE_INT
) padding_bytes
+ buf
->size
4499 > (unsigned HOST_WIDE_INT
) clear_padding_buf_size
)
4501 memset (buf
->buf
+ buf
->size
, ~0, clear_padding_buf_size
- buf
->size
);
4502 padding_bytes
-= clear_padding_buf_size
- buf
->size
;
4503 buf
->size
= clear_padding_buf_size
;
4504 clear_padding_flush (buf
, false);
4505 gcc_assert (buf
->padding_bytes
);
4506 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4507 is guaranteed to be all ones. */
4508 padding_bytes
+= buf
->size
;
4509 buf
->size
= padding_bytes
% UNITS_PER_WORD
;
4510 memset (buf
->buf
, ~0, buf
->size
);
4511 buf
->off
+= padding_bytes
- buf
->size
;
4512 buf
->padding_bytes
+= padding_bytes
- buf
->size
;
4516 memset (buf
->buf
+ buf
->size
, ~0, padding_bytes
);
4517 buf
->size
+= padding_bytes
;
4521 static void clear_padding_type (clear_padding_struct
*, tree
, HOST_WIDE_INT
);
4523 /* Clear padding bits of union type TYPE. */
4526 clear_padding_union (clear_padding_struct
*buf
, tree type
, HOST_WIDE_INT sz
)
4528 clear_padding_struct
*union_buf
;
4529 HOST_WIDE_INT start_off
= 0, next_off
= 0;
4530 size_t start_size
= 0;
4533 start_off
= buf
->off
+ buf
->size
;
4534 next_off
= start_off
+ sz
;
4535 start_size
= start_off
% UNITS_PER_WORD
;
4536 start_off
-= start_size
;
4537 clear_padding_flush (buf
, true);
4542 if (sz
+ buf
->size
> clear_padding_buf_size
)
4543 clear_padding_flush (buf
, false);
4544 union_buf
= XALLOCA (clear_padding_struct
);
4545 union_buf
->loc
= buf
->loc
;
4546 union_buf
->clear_in_mask
= buf
->clear_in_mask
;
4547 union_buf
->base
= NULL_TREE
;
4548 union_buf
->alias_type
= NULL_TREE
;
4549 union_buf
->gsi
= NULL
;
4550 union_buf
->align
= 0;
4552 union_buf
->padding_bytes
= 0;
4554 union_buf
->size
= 0;
4555 if (sz
+ buf
->size
<= clear_padding_buf_size
)
4556 union_buf
->union_ptr
= buf
->buf
+ buf
->size
;
4558 union_buf
->union_ptr
= XNEWVEC (unsigned char, sz
);
4559 memset (union_buf
->union_ptr
, ~0, sz
);
4562 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4563 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4565 if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4567 if (TREE_TYPE (field
) == error_mark_node
)
4569 gcc_assert (TREE_CODE (TREE_TYPE (field
)) == ARRAY_TYPE
4570 && !COMPLETE_TYPE_P (TREE_TYPE (field
)));
4571 if (!buf
->clear_in_mask
)
4572 error_at (buf
->loc
, "flexible array member %qD does not have "
4573 "well defined padding bits for %qs",
4574 field
, "__builtin_clear_padding");
4577 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4578 gcc_assert (union_buf
->size
== 0);
4579 union_buf
->off
= start_off
;
4580 union_buf
->size
= start_size
;
4581 memset (union_buf
->buf
, ~0, start_size
);
4582 clear_padding_type (union_buf
, TREE_TYPE (field
), fldsz
);
4583 clear_padding_add_padding (union_buf
, sz
- fldsz
);
4584 clear_padding_flush (union_buf
, true);
4587 if (buf
== union_buf
)
4589 buf
->off
= next_off
;
4590 buf
->size
= next_off
% UNITS_PER_WORD
;
4591 buf
->off
-= buf
->size
;
4592 memset (buf
->buf
, ~0, buf
->size
);
4594 else if (sz
+ buf
->size
<= clear_padding_buf_size
)
4598 unsigned char *union_ptr
= union_buf
->union_ptr
;
4601 clear_padding_flush (buf
, false);
4602 HOST_WIDE_INT this_sz
4603 = MIN ((unsigned HOST_WIDE_INT
) sz
,
4604 clear_padding_buf_size
- buf
->size
);
4605 memcpy (buf
->buf
+ buf
->size
, union_ptr
, this_sz
);
4606 buf
->size
+= this_sz
;
4607 union_ptr
+= this_sz
;
4610 XDELETE (union_buf
->union_ptr
);
4614 /* The only known floating point formats with padding bits are the
4615 IEEE extended ones. */
4618 clear_padding_real_needs_padding_p (tree type
)
4620 const struct real_format
*fmt
= REAL_MODE_FORMAT (TYPE_MODE (type
));
4622 && fmt
->signbit_ro
== fmt
->signbit_rw
4623 && (fmt
->signbit_ro
== 79 || fmt
->signbit_ro
== 95));
4626 /* Return true if TYPE might contain any padding bits. */
4629 clear_padding_type_may_have_padding_p (tree type
)
4631 switch (TREE_CODE (type
))
4639 return clear_padding_type_may_have_padding_p (TREE_TYPE (type
));
4641 return clear_padding_real_needs_padding_p (type
);
4647 /* Emit a runtime loop:
4648 for (; buf.base != end; buf.base += sz)
4649 __builtin_clear_padding (buf.base); */
4652 clear_padding_emit_loop (clear_padding_struct
*buf
, tree type
, tree end
)
4654 tree l1
= create_artificial_label (buf
->loc
);
4655 tree l2
= create_artificial_label (buf
->loc
);
4656 tree l3
= create_artificial_label (buf
->loc
);
4657 gimple
*g
= gimple_build_goto (l2
);
4658 gimple_set_location (g
, buf
->loc
);
4659 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4660 g
= gimple_build_label (l1
);
4661 gimple_set_location (g
, buf
->loc
);
4662 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4663 clear_padding_type (buf
, type
, buf
->sz
);
4664 clear_padding_flush (buf
, true);
4665 g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
, buf
->base
,
4666 size_int (buf
->sz
));
4667 gimple_set_location (g
, buf
->loc
);
4668 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4669 g
= gimple_build_label (l2
);
4670 gimple_set_location (g
, buf
->loc
);
4671 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4672 g
= gimple_build_cond (NE_EXPR
, buf
->base
, end
, l1
, l3
);
4673 gimple_set_location (g
, buf
->loc
);
4674 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4675 g
= gimple_build_label (l3
);
4676 gimple_set_location (g
, buf
->loc
);
4677 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4680 /* Clear padding bits for TYPE. Called recursively from
4681 gimple_fold_builtin_clear_padding. */
4684 clear_padding_type (clear_padding_struct
*buf
, tree type
, HOST_WIDE_INT sz
)
4686 switch (TREE_CODE (type
))
4689 HOST_WIDE_INT cur_pos
;
4691 for (tree field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
4692 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_PADDING_P (field
))
4694 tree ftype
= TREE_TYPE (field
);
4695 if (DECL_BIT_FIELD (field
))
4697 HOST_WIDE_INT fldsz
= TYPE_PRECISION (ftype
);
4700 HOST_WIDE_INT pos
= int_byte_position (field
);
4704 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
));
4705 bpos
%= BITS_PER_UNIT
;
4707 = ROUND_UP (bpos
+ fldsz
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
4708 if (pos
+ end
> cur_pos
)
4710 clear_padding_add_padding (buf
, pos
+ end
- cur_pos
);
4711 cur_pos
= pos
+ end
;
4713 gcc_assert (cur_pos
> pos
4714 && ((unsigned HOST_WIDE_INT
) buf
->size
4715 >= (unsigned HOST_WIDE_INT
) cur_pos
- pos
));
4716 unsigned char *p
= buf
->buf
+ buf
->size
- (cur_pos
- pos
);
4717 if (BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
4718 sorry_at (buf
->loc
, "PDP11 bit-field handling unsupported"
4719 " in %qs", "__builtin_clear_padding");
4720 else if (BYTES_BIG_ENDIAN
)
4723 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4724 *p
&= ~(((1 << fldsz
) - 1)
4725 << (BITS_PER_UNIT
- bpos
- fldsz
));
4730 *p
&= ~(((1U << BITS_PER_UNIT
) - 1) >> bpos
);
4732 fldsz
-= BITS_PER_UNIT
- bpos
;
4734 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4735 p
+= fldsz
/ BITS_PER_UNIT
;
4736 fldsz
%= BITS_PER_UNIT
;
4738 *p
&= ((1U << BITS_PER_UNIT
) - 1) >> fldsz
;
4743 /* Little endian. */
4744 if (bpos
+ fldsz
<= BITS_PER_UNIT
)
4745 *p
&= ~(((1 << fldsz
) - 1) << bpos
);
4750 *p
&= ~(((1 << BITS_PER_UNIT
) - 1) << bpos
);
4752 fldsz
-= BITS_PER_UNIT
- bpos
;
4754 memset (p
, 0, fldsz
/ BITS_PER_UNIT
);
4755 p
+= fldsz
/ BITS_PER_UNIT
;
4756 fldsz
%= BITS_PER_UNIT
;
4758 *p
&= ~((1 << fldsz
) - 1);
4762 else if (DECL_SIZE_UNIT (field
) == NULL_TREE
)
4764 if (ftype
== error_mark_node
)
4766 gcc_assert (TREE_CODE (ftype
) == ARRAY_TYPE
4767 && !COMPLETE_TYPE_P (ftype
));
4768 if (!buf
->clear_in_mask
)
4769 error_at (buf
->loc
, "flexible array member %qD does not "
4770 "have well defined padding bits for %qs",
4771 field
, "__builtin_clear_padding");
4773 else if (is_empty_type (TREE_TYPE (field
)))
4777 HOST_WIDE_INT pos
= int_byte_position (field
);
4780 HOST_WIDE_INT fldsz
= tree_to_shwi (DECL_SIZE_UNIT (field
));
4781 gcc_assert (pos
>= 0 && fldsz
>= 0 && pos
>= cur_pos
);
4782 clear_padding_add_padding (buf
, pos
- cur_pos
);
4784 clear_padding_type (buf
, TREE_TYPE (field
), fldsz
);
4788 gcc_assert (sz
>= cur_pos
);
4789 clear_padding_add_padding (buf
, sz
- cur_pos
);
4792 HOST_WIDE_INT nelts
, fldsz
;
4793 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4798 && sz
> 8 * UNITS_PER_WORD
4799 && buf
->union_ptr
== NULL
4800 && clear_padding_type_may_have_padding_p (TREE_TYPE (type
)))
4802 /* For sufficiently large array of more than one elements,
4803 emit a runtime loop to keep code size manageable. */
4804 tree base
= buf
->base
;
4805 unsigned int prev_align
= buf
->align
;
4806 HOST_WIDE_INT off
= buf
->off
+ buf
->size
;
4807 HOST_WIDE_INT prev_sz
= buf
->sz
;
4808 clear_padding_flush (buf
, true);
4809 tree elttype
= TREE_TYPE (type
);
4810 buf
->base
= create_tmp_var (build_pointer_type (elttype
));
4811 tree end
= make_ssa_name (TREE_TYPE (buf
->base
));
4812 gimple
*g
= gimple_build_assign (buf
->base
, POINTER_PLUS_EXPR
,
4813 base
, size_int (off
));
4814 gimple_set_location (g
, buf
->loc
);
4815 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4816 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
->base
,
4818 gimple_set_location (g
, buf
->loc
);
4819 gsi_insert_before (buf
->gsi
, g
, GSI_SAME_STMT
);
4821 buf
->align
= TYPE_ALIGN (elttype
);
4824 clear_padding_emit_loop (buf
, elttype
, end
);
4827 buf
->align
= prev_align
;
4828 buf
->size
= off
% UNITS_PER_WORD
;
4829 buf
->off
= off
- buf
->size
;
4830 memset (buf
->buf
, 0, buf
->size
);
4833 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4834 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4837 clear_padding_union (buf
, type
, sz
);
4840 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4841 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4842 clear_padding_flush (buf
, false);
4843 if (clear_padding_real_needs_padding_p (type
))
4845 /* Use native_interpret_expr + native_encode_expr to figure out
4846 which bits are padding. */
4847 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4848 tree cst
= native_interpret_expr (type
, buf
->buf
+ buf
->size
, sz
);
4849 gcc_assert (cst
&& TREE_CODE (cst
) == REAL_CST
);
4850 int len
= native_encode_expr (cst
, buf
->buf
+ buf
->size
, sz
);
4851 gcc_assert (len
> 0 && (size_t) len
== (size_t) sz
);
4852 for (size_t i
= 0; i
< (size_t) sz
; i
++)
4853 buf
->buf
[buf
->size
+ i
] ^= ~0;
4856 memset (buf
->buf
+ buf
->size
, 0, sz
);
4860 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4861 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4862 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4865 nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
4866 fldsz
= int_size_in_bytes (TREE_TYPE (type
));
4867 for (HOST_WIDE_INT i
= 0; i
< nelts
; i
++)
4868 clear_padding_type (buf
, TREE_TYPE (type
), fldsz
);
4871 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4872 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4873 clear_padding_flush (buf
, false);
4874 memset (buf
->buf
+ buf
->size
, ~0, sz
);
4878 gcc_assert ((size_t) sz
<= clear_padding_unit
);
4879 if ((unsigned HOST_WIDE_INT
) sz
+ buf
->size
> clear_padding_buf_size
)
4880 clear_padding_flush (buf
, false);
4881 memset (buf
->buf
+ buf
->size
, 0, sz
);
4887 /* Clear padding bits of TYPE in MASK. */
4890 clear_type_padding_in_mask (tree type
, unsigned char *mask
)
4892 clear_padding_struct buf
;
4893 buf
.loc
= UNKNOWN_LOCATION
;
4894 buf
.clear_in_mask
= true;
4895 buf
.base
= NULL_TREE
;
4896 buf
.alias_type
= NULL_TREE
;
4900 buf
.padding_bytes
= 0;
4901 buf
.sz
= int_size_in_bytes (type
);
4903 buf
.union_ptr
= mask
;
4904 clear_padding_type (&buf
, type
, buf
.sz
);
4905 clear_padding_flush (&buf
, true);
4908 /* Fold __builtin_clear_padding builtin. */
4911 gimple_fold_builtin_clear_padding (gimple_stmt_iterator
*gsi
)
4913 gimple
*stmt
= gsi_stmt (*gsi
);
4914 gcc_assert (gimple_call_num_args (stmt
) == 2);
4915 tree ptr
= gimple_call_arg (stmt
, 0);
4916 tree typearg
= gimple_call_arg (stmt
, 1);
4917 tree type
= TREE_TYPE (TREE_TYPE (typearg
));
4918 location_t loc
= gimple_location (stmt
);
4919 clear_padding_struct buf
;
4920 gimple_stmt_iterator gsiprev
= *gsi
;
4921 /* This should be folded during the lower pass. */
4922 gcc_assert (!gimple_in_ssa_p (cfun
) && cfun
->cfg
== NULL
);
4923 gcc_assert (COMPLETE_TYPE_P (type
));
4924 gsi_prev (&gsiprev
);
4927 buf
.clear_in_mask
= false;
4929 buf
.alias_type
= NULL_TREE
;
4931 buf
.align
= get_pointer_alignment (ptr
);
4932 unsigned int talign
= min_align_of_type (type
) * BITS_PER_UNIT
;
4933 buf
.align
= MAX (buf
.align
, talign
);
4935 buf
.padding_bytes
= 0;
4937 buf
.sz
= int_size_in_bytes (type
);
4938 buf
.union_ptr
= NULL
;
4939 if (buf
.sz
< 0 && int_size_in_bytes (strip_array_types (type
)) < 0)
4940 sorry_at (loc
, "%s not supported for variable length aggregates",
4941 "__builtin_clear_padding");
4942 /* The implementation currently assumes 8-bit host and target
4943 chars which is the case for all currently supported targets
4944 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4945 else if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8)
4946 sorry_at (loc
, "%s not supported on this target",
4947 "__builtin_clear_padding");
4948 else if (!clear_padding_type_may_have_padding_p (type
))
4950 else if (TREE_CODE (type
) == ARRAY_TYPE
&& buf
.sz
< 0)
4952 tree sz
= TYPE_SIZE_UNIT (type
);
4953 tree elttype
= type
;
4954 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4955 while (TREE_CODE (elttype
) == ARRAY_TYPE
4956 && int_size_in_bytes (elttype
) < 0)
4957 elttype
= TREE_TYPE (elttype
);
4958 HOST_WIDE_INT eltsz
= int_size_in_bytes (elttype
);
4959 gcc_assert (eltsz
>= 0);
4962 buf
.base
= create_tmp_var (build_pointer_type (elttype
));
4963 tree end
= make_ssa_name (TREE_TYPE (buf
.base
));
4964 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4965 gimple_set_location (g
, loc
);
4966 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4967 g
= gimple_build_assign (end
, POINTER_PLUS_EXPR
, buf
.base
, sz
);
4968 gimple_set_location (g
, loc
);
4969 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4971 buf
.align
= TYPE_ALIGN (elttype
);
4972 buf
.alias_type
= build_pointer_type (elttype
);
4973 clear_padding_emit_loop (&buf
, elttype
, end
);
4978 if (!is_gimple_mem_ref_addr (buf
.base
))
4980 buf
.base
= make_ssa_name (TREE_TYPE (ptr
));
4981 gimple
*g
= gimple_build_assign (buf
.base
, ptr
);
4982 gimple_set_location (g
, loc
);
4983 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4985 buf
.alias_type
= build_pointer_type (type
);
4986 clear_padding_type (&buf
, type
, buf
.sz
);
4987 clear_padding_flush (&buf
, true);
4990 gimple_stmt_iterator gsiprev2
= *gsi
;
4991 gsi_prev (&gsiprev2
);
4992 if (gsi_stmt (gsiprev
) == gsi_stmt (gsiprev2
))
4993 gsi_replace (gsi
, gimple_build_nop (), true);
4996 gsi_remove (gsi
, true);
5002 /* Fold the non-target builtin at *GSI and return whether any simplification
5006 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
5008 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
5009 tree callee
= gimple_call_fndecl (stmt
);
5011 /* Give up for always_inline inline builtins until they are
5013 if (avoid_folding_inline_builtin (callee
))
5016 unsigned n
= gimple_call_num_args (stmt
);
5017 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
5021 return gimple_fold_builtin_bcmp (gsi
);
5022 case BUILT_IN_BCOPY
:
5023 return gimple_fold_builtin_bcopy (gsi
);
5024 case BUILT_IN_BZERO
:
5025 return gimple_fold_builtin_bzero (gsi
);
5027 case BUILT_IN_MEMSET
:
5028 return gimple_fold_builtin_memset (gsi
,
5029 gimple_call_arg (stmt
, 1),
5030 gimple_call_arg (stmt
, 2));
5031 case BUILT_IN_MEMCPY
:
5032 case BUILT_IN_MEMPCPY
:
5033 case BUILT_IN_MEMMOVE
:
5034 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
5035 gimple_call_arg (stmt
, 1), fcode
);
5036 case BUILT_IN_SPRINTF_CHK
:
5037 case BUILT_IN_VSPRINTF_CHK
:
5038 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
5039 case BUILT_IN_STRCAT_CHK
:
5040 return gimple_fold_builtin_strcat_chk (gsi
);
5041 case BUILT_IN_STRNCAT_CHK
:
5042 return gimple_fold_builtin_strncat_chk (gsi
);
5043 case BUILT_IN_STRLEN
:
5044 return gimple_fold_builtin_strlen (gsi
);
5045 case BUILT_IN_STRCPY
:
5046 return gimple_fold_builtin_strcpy (gsi
,
5047 gimple_call_arg (stmt
, 0),
5048 gimple_call_arg (stmt
, 1));
5049 case BUILT_IN_STRNCPY
:
5050 return gimple_fold_builtin_strncpy (gsi
,
5051 gimple_call_arg (stmt
, 0),
5052 gimple_call_arg (stmt
, 1),
5053 gimple_call_arg (stmt
, 2));
5054 case BUILT_IN_STRCAT
:
5055 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
5056 gimple_call_arg (stmt
, 1));
5057 case BUILT_IN_STRNCAT
:
5058 return gimple_fold_builtin_strncat (gsi
);
5059 case BUILT_IN_INDEX
:
5060 case BUILT_IN_STRCHR
:
5061 return gimple_fold_builtin_strchr (gsi
, false);
5062 case BUILT_IN_RINDEX
:
5063 case BUILT_IN_STRRCHR
:
5064 return gimple_fold_builtin_strchr (gsi
, true);
5065 case BUILT_IN_STRSTR
:
5066 return gimple_fold_builtin_strstr (gsi
);
5067 case BUILT_IN_STRCMP
:
5068 case BUILT_IN_STRCMP_EQ
:
5069 case BUILT_IN_STRCASECMP
:
5070 case BUILT_IN_STRNCMP
:
5071 case BUILT_IN_STRNCMP_EQ
:
5072 case BUILT_IN_STRNCASECMP
:
5073 return gimple_fold_builtin_string_compare (gsi
);
5074 case BUILT_IN_MEMCHR
:
5075 return gimple_fold_builtin_memchr (gsi
);
5076 case BUILT_IN_FPUTS
:
5077 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5078 gimple_call_arg (stmt
, 1), false);
5079 case BUILT_IN_FPUTS_UNLOCKED
:
5080 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
5081 gimple_call_arg (stmt
, 1), true);
5082 case BUILT_IN_MEMCPY_CHK
:
5083 case BUILT_IN_MEMPCPY_CHK
:
5084 case BUILT_IN_MEMMOVE_CHK
:
5085 case BUILT_IN_MEMSET_CHK
:
5086 return gimple_fold_builtin_memory_chk (gsi
,
5087 gimple_call_arg (stmt
, 0),
5088 gimple_call_arg (stmt
, 1),
5089 gimple_call_arg (stmt
, 2),
5090 gimple_call_arg (stmt
, 3),
5092 case BUILT_IN_STPCPY
:
5093 return gimple_fold_builtin_stpcpy (gsi
);
5094 case BUILT_IN_STRCPY_CHK
:
5095 case BUILT_IN_STPCPY_CHK
:
5096 return gimple_fold_builtin_stxcpy_chk (gsi
,
5097 gimple_call_arg (stmt
, 0),
5098 gimple_call_arg (stmt
, 1),
5099 gimple_call_arg (stmt
, 2),
5101 case BUILT_IN_STRNCPY_CHK
:
5102 case BUILT_IN_STPNCPY_CHK
:
5103 return gimple_fold_builtin_stxncpy_chk (gsi
,
5104 gimple_call_arg (stmt
, 0),
5105 gimple_call_arg (stmt
, 1),
5106 gimple_call_arg (stmt
, 2),
5107 gimple_call_arg (stmt
, 3),
5109 case BUILT_IN_SNPRINTF_CHK
:
5110 case BUILT_IN_VSNPRINTF_CHK
:
5111 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
5113 case BUILT_IN_FPRINTF
:
5114 case BUILT_IN_FPRINTF_UNLOCKED
:
5115 case BUILT_IN_VFPRINTF
:
5116 if (n
== 2 || n
== 3)
5117 return gimple_fold_builtin_fprintf (gsi
,
5118 gimple_call_arg (stmt
, 0),
5119 gimple_call_arg (stmt
, 1),
5121 ? gimple_call_arg (stmt
, 2)
5125 case BUILT_IN_FPRINTF_CHK
:
5126 case BUILT_IN_VFPRINTF_CHK
:
5127 if (n
== 3 || n
== 4)
5128 return gimple_fold_builtin_fprintf (gsi
,
5129 gimple_call_arg (stmt
, 0),
5130 gimple_call_arg (stmt
, 2),
5132 ? gimple_call_arg (stmt
, 3)
5136 case BUILT_IN_PRINTF
:
5137 case BUILT_IN_PRINTF_UNLOCKED
:
5138 case BUILT_IN_VPRINTF
:
5139 if (n
== 1 || n
== 2)
5140 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
5142 ? gimple_call_arg (stmt
, 1)
5143 : NULL_TREE
, fcode
);
5145 case BUILT_IN_PRINTF_CHK
:
5146 case BUILT_IN_VPRINTF_CHK
:
5147 if (n
== 2 || n
== 3)
5148 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
5150 ? gimple_call_arg (stmt
, 2)
5151 : NULL_TREE
, fcode
);
5153 case BUILT_IN_ACC_ON_DEVICE
:
5154 return gimple_fold_builtin_acc_on_device (gsi
,
5155 gimple_call_arg (stmt
, 0));
5156 case BUILT_IN_REALLOC
:
5157 return gimple_fold_builtin_realloc (gsi
);
5159 case BUILT_IN_CLEAR_PADDING
:
5160 return gimple_fold_builtin_clear_padding (gsi
);
5165 /* Try the generic builtin folder. */
5166 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
5167 tree result
= fold_call_stmt (stmt
, ignore
);
5171 STRIP_NOPS (result
);
5173 result
= fold_convert (gimple_call_return_type (stmt
), result
);
5174 gimplify_and_update_call_from_tree (gsi
, result
);
5181 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5182 function calls to constants, where possible. */
5185 fold_internal_goacc_dim (const gimple
*call
)
5187 int axis
= oacc_get_ifn_dim_arg (call
);
5188 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
5189 tree result
= NULL_TREE
;
5190 tree type
= TREE_TYPE (gimple_call_lhs (call
));
5192 switch (gimple_call_internal_fn (call
))
5194 case IFN_GOACC_DIM_POS
:
5195 /* If the size is 1, we know the answer. */
5197 result
= build_int_cst (type
, 0);
5199 case IFN_GOACC_DIM_SIZE
:
5200 /* If the size is not dynamic, we know the answer. */
5202 result
= build_int_cst (type
, size
);
5211 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5212 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5213 &var where var is only addressable because of such calls. */
5216 optimize_atomic_compare_exchange_p (gimple
*stmt
)
5218 if (gimple_call_num_args (stmt
) != 6
5219 || !flag_inline_atomics
5221 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
5222 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
5223 || !gimple_vdef (stmt
)
5224 || !gimple_vuse (stmt
))
5227 tree fndecl
= gimple_call_fndecl (stmt
);
5228 switch (DECL_FUNCTION_CODE (fndecl
))
5230 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
5231 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
5232 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
5233 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
5234 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
5240 tree expected
= gimple_call_arg (stmt
, 1);
5241 if (TREE_CODE (expected
) != ADDR_EXPR
5242 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
5245 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
5246 if (!is_gimple_reg_type (etype
)
5247 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
5248 || TREE_THIS_VOLATILE (etype
)
5249 || VECTOR_TYPE_P (etype
)
5250 || TREE_CODE (etype
) == COMPLEX_TYPE
5251 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5252 might not preserve all the bits. See PR71716. */
5253 || SCALAR_FLOAT_TYPE_P (etype
)
5254 || maybe_ne (TYPE_PRECISION (etype
),
5255 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5258 tree weak
= gimple_call_arg (stmt
, 3);
5259 if (!integer_zerop (weak
) && !integer_onep (weak
))
5262 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5263 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5264 machine_mode mode
= TYPE_MODE (itype
);
5266 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
5268 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
5271 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
5278 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5280 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5281 i = IMAGPART_EXPR <t>;
5283 e = REALPART_EXPR <t>; */
5286 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
5288 gimple
*stmt
= gsi_stmt (*gsi
);
5289 tree fndecl
= gimple_call_fndecl (stmt
);
5290 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
5291 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
5292 tree ctype
= build_complex_type (itype
);
5293 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
5294 bool throws
= false;
5296 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5298 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5299 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
5300 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
5302 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
5303 build1 (VIEW_CONVERT_EXPR
, itype
,
5304 gimple_assign_lhs (g
)));
5305 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
5307 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
5308 + int_size_in_bytes (itype
);
5309 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
5310 gimple_call_arg (stmt
, 0),
5311 gimple_assign_lhs (g
),
5312 gimple_call_arg (stmt
, 2),
5313 build_int_cst (integer_type_node
, flag
),
5314 gimple_call_arg (stmt
, 4),
5315 gimple_call_arg (stmt
, 5));
5316 tree lhs
= make_ssa_name (ctype
);
5317 gimple_call_set_lhs (g
, lhs
);
5318 gimple_move_vops (g
, stmt
);
5319 tree oldlhs
= gimple_call_lhs (stmt
);
5320 if (stmt_can_throw_internal (cfun
, stmt
))
5323 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
5325 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
5326 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
5327 gimple_call_set_lhs (stmt
, NULL_TREE
);
5328 gsi_replace (gsi
, g
, true);
5331 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
5332 build1 (IMAGPART_EXPR
, itype
, lhs
));
5335 gsi_insert_on_edge_immediate (e
, g
);
5336 *gsi
= gsi_for_stmt (g
);
5339 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5340 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
5341 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5343 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
5344 build1 (REALPART_EXPR
, itype
, lhs
));
5345 if (throws
&& oldlhs
== NULL_TREE
)
5347 gsi_insert_on_edge_immediate (e
, g
);
5348 *gsi
= gsi_for_stmt (g
);
5351 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5352 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
5354 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
5356 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
5357 gimple_assign_lhs (g
)));
5358 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5360 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
5361 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
5365 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5366 doesn't fit into TYPE. The test for overflow should be regardless of
5367 -fwrapv, and even for unsigned types. */
5370 arith_overflowed_p (enum tree_code code
, const_tree type
,
5371 const_tree arg0
, const_tree arg1
)
5373 widest2_int warg0
= widest2_int_cst (arg0
);
5374 widest2_int warg1
= widest2_int_cst (arg1
);
5378 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
5379 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
5380 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
5381 default: gcc_unreachable ();
5383 signop sign
= TYPE_SIGN (type
);
5384 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
5386 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
5389 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5390 for the memory it references, otherwise return null. VECTYPE is the
5391 type of the memory vector. */
5394 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
5396 tree ptr
= gimple_call_arg (call
, 0);
5397 tree alias_align
= gimple_call_arg (call
, 1);
5398 tree mask
= gimple_call_arg (call
, 2);
5399 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
5402 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
);
5403 if (TYPE_ALIGN (vectype
) != align
)
5404 vectype
= build_aligned_type (vectype
, align
);
5405 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
5406 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
5409 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5412 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
5414 tree lhs
= gimple_call_lhs (call
);
5418 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
5420 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5421 gimple_set_location (new_stmt
, gimple_location (call
));
5422 gimple_move_vops (new_stmt
, call
);
5423 gsi_replace (gsi
, new_stmt
, false);
5429 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5432 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
5434 tree rhs
= gimple_call_arg (call
, 3);
5435 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
5437 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
5438 gimple_set_location (new_stmt
, gimple_location (call
));
5439 gimple_move_vops (new_stmt
, call
);
5440 gsi_replace (gsi
, new_stmt
, false);
5446 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5447 The statement may be replaced by another statement, e.g., if the call
5448 simplifies to a constant value. Return true if any changes were made.
5449 It is assumed that the operands have been previously folded. */
5452 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
5454 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
5456 bool changed
= false;
5458 /* Check for virtual calls that became direct calls. */
5459 callee
= gimple_call_fn (stmt
);
5460 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
5462 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
5464 if (dump_file
&& virtual_method_call_p (callee
)
5465 && !possible_polymorphic_call_target_p
5466 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
5467 (OBJ_TYPE_REF_EXPR (callee
)))))
5470 "Type inheritance inconsistent devirtualization of ");
5471 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5472 fprintf (dump_file
, " to ");
5473 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
5474 fprintf (dump_file
, "\n");
5477 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
5480 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
5483 vec
<cgraph_node
*>targets
5484 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
5485 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5487 tree lhs
= gimple_call_lhs (stmt
);
5488 if (dump_enabled_p ())
5490 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5491 "folding virtual function call to %s\n",
5492 targets
.length () == 1
5493 ? targets
[0]->name ()
5494 : "__builtin_unreachable");
5496 if (targets
.length () == 1)
5498 tree fndecl
= targets
[0]->decl
;
5499 gimple_call_set_fndecl (stmt
, fndecl
);
5501 /* If changing the call to __cxa_pure_virtual
5502 or similar noreturn function, adjust gimple_call_fntype
5504 if (gimple_call_noreturn_p (stmt
)
5505 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
5506 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
5507 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
5509 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
5510 /* If the call becomes noreturn, remove the lhs. */
5512 && gimple_call_noreturn_p (stmt
)
5513 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
5514 || should_remove_lhs_p (lhs
)))
5516 if (TREE_CODE (lhs
) == SSA_NAME
)
5518 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5519 tree def
= get_or_create_ssa_default_def (cfun
, var
);
5520 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
5521 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
5523 gimple_call_set_lhs (stmt
, NULL_TREE
);
5525 maybe_remove_unused_call_args (cfun
, stmt
);
5529 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5530 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
5531 gimple_set_location (new_stmt
, gimple_location (stmt
));
5532 /* If the call had a SSA name as lhs morph that into
5533 an uninitialized value. */
5534 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5536 tree var
= create_tmp_var (TREE_TYPE (lhs
));
5537 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
5538 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
5539 set_ssa_default_def (cfun
, var
, lhs
);
5541 gimple_move_vops (new_stmt
, stmt
);
5542 gsi_replace (gsi
, new_stmt
, false);
5549 /* Check for indirect calls that became direct calls, and then
5550 no longer require a static chain. */
5551 if (gimple_call_chain (stmt
))
5553 tree fn
= gimple_call_fndecl (stmt
);
5554 if (fn
&& !DECL_STATIC_CHAIN (fn
))
5556 gimple_call_set_chain (stmt
, NULL
);
5564 /* Check for builtins that CCP can handle using information not
5565 available in the generic fold routines. */
5566 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
5568 if (gimple_fold_builtin (gsi
))
5571 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
5573 changed
|= targetm
.gimple_fold_builtin (gsi
);
5575 else if (gimple_call_internal_p (stmt
))
5577 enum tree_code subcode
= ERROR_MARK
;
5578 tree result
= NULL_TREE
;
5579 bool cplx_result
= false;
5580 tree overflow
= NULL_TREE
;
5581 switch (gimple_call_internal_fn (stmt
))
5583 case IFN_BUILTIN_EXPECT
:
5584 result
= fold_builtin_expect (gimple_location (stmt
),
5585 gimple_call_arg (stmt
, 0),
5586 gimple_call_arg (stmt
, 1),
5587 gimple_call_arg (stmt
, 2),
5590 case IFN_UBSAN_OBJECT_SIZE
:
5592 tree offset
= gimple_call_arg (stmt
, 1);
5593 tree objsize
= gimple_call_arg (stmt
, 2);
5594 if (integer_all_onesp (objsize
)
5595 || (TREE_CODE (offset
) == INTEGER_CST
5596 && TREE_CODE (objsize
) == INTEGER_CST
5597 && tree_int_cst_le (offset
, objsize
)))
5599 replace_call_with_value (gsi
, NULL_TREE
);
5605 if (integer_zerop (gimple_call_arg (stmt
, 1)))
5607 replace_call_with_value (gsi
, NULL_TREE
);
5611 case IFN_UBSAN_BOUNDS
:
5613 tree index
= gimple_call_arg (stmt
, 1);
5614 tree bound
= gimple_call_arg (stmt
, 2);
5615 if (TREE_CODE (index
) == INTEGER_CST
5616 && TREE_CODE (bound
) == INTEGER_CST
)
5618 index
= fold_convert (TREE_TYPE (bound
), index
);
5619 if (TREE_CODE (index
) == INTEGER_CST
5620 && tree_int_cst_le (index
, bound
))
5622 replace_call_with_value (gsi
, NULL_TREE
);
5628 case IFN_GOACC_DIM_SIZE
:
5629 case IFN_GOACC_DIM_POS
:
5630 result
= fold_internal_goacc_dim (stmt
);
5632 case IFN_UBSAN_CHECK_ADD
:
5633 subcode
= PLUS_EXPR
;
5635 case IFN_UBSAN_CHECK_SUB
:
5636 subcode
= MINUS_EXPR
;
5638 case IFN_UBSAN_CHECK_MUL
:
5639 subcode
= MULT_EXPR
;
5641 case IFN_ADD_OVERFLOW
:
5642 subcode
= PLUS_EXPR
;
5645 case IFN_SUB_OVERFLOW
:
5646 subcode
= MINUS_EXPR
;
5649 case IFN_MUL_OVERFLOW
:
5650 subcode
= MULT_EXPR
;
5654 changed
|= gimple_fold_mask_load (gsi
, stmt
);
5656 case IFN_MASK_STORE
:
5657 changed
|= gimple_fold_mask_store (gsi
, stmt
);
5662 if (subcode
!= ERROR_MARK
)
5664 tree arg0
= gimple_call_arg (stmt
, 0);
5665 tree arg1
= gimple_call_arg (stmt
, 1);
5666 tree type
= TREE_TYPE (arg0
);
5669 tree lhs
= gimple_call_lhs (stmt
);
5670 if (lhs
== NULL_TREE
)
5673 type
= TREE_TYPE (TREE_TYPE (lhs
));
5675 if (type
== NULL_TREE
)
5677 /* x = y + 0; x = y - 0; x = y * 0; */
5678 else if (integer_zerop (arg1
))
5679 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
5680 /* x = 0 + y; x = 0 * y; */
5681 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
5682 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
5684 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
5685 result
= integer_zero_node
;
5686 /* x = y * 1; x = 1 * y; */
5687 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
5689 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
5691 else if (TREE_CODE (arg0
) == INTEGER_CST
5692 && TREE_CODE (arg1
) == INTEGER_CST
)
5695 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
5696 fold_convert (type
, arg1
));
5698 result
= int_const_binop (subcode
, arg0
, arg1
);
5699 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
5702 overflow
= build_one_cst (type
);
5709 if (result
== integer_zero_node
)
5710 result
= build_zero_cst (type
);
5711 else if (cplx_result
&& TREE_TYPE (result
) != type
)
5713 if (TREE_CODE (result
) == INTEGER_CST
)
5715 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
5717 overflow
= build_one_cst (type
);
5719 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
5720 && TYPE_UNSIGNED (type
))
5721 || (TYPE_PRECISION (type
)
5722 < (TYPE_PRECISION (TREE_TYPE (result
))
5723 + (TYPE_UNSIGNED (TREE_TYPE (result
))
5724 && !TYPE_UNSIGNED (type
)))))
5727 result
= fold_convert (type
, result
);
5734 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
5735 result
= drop_tree_overflow (result
);
5738 if (overflow
== NULL_TREE
)
5739 overflow
= build_zero_cst (TREE_TYPE (result
));
5740 tree ctype
= build_complex_type (TREE_TYPE (result
));
5741 if (TREE_CODE (result
) == INTEGER_CST
5742 && TREE_CODE (overflow
) == INTEGER_CST
)
5743 result
= build_complex (ctype
, result
, overflow
);
5745 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
5746 ctype
, result
, overflow
);
5748 gimplify_and_update_call_from_tree (gsi
, result
);
5757 /* Return true whether NAME has a use on STMT. */
5760 has_use_on_stmt (tree name
, gimple
*stmt
)
5762 imm_use_iterator iter
;
5763 use_operand_p use_p
;
5764 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
5765 if (USE_STMT (use_p
) == stmt
)
5770 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5773 Replaces *GSI with the simplification result in RCODE and OPS
5774 and the associated statements in *SEQ. Does the replacement
5775 according to INPLACE and returns true if the operation succeeded. */
5778 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
5779 gimple_match_op
*res_op
,
5780 gimple_seq
*seq
, bool inplace
)
5782 gimple
*stmt
= gsi_stmt (*gsi
);
5783 tree
*ops
= res_op
->ops
;
5784 unsigned int num_ops
= res_op
->num_ops
;
5786 /* Play safe and do not allow abnormals to be mentioned in
5787 newly created statements. See also maybe_push_res_to_seq.
5788 As an exception allow such uses if there was a use of the
5789 same SSA name on the old stmt. */
5790 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5791 if (TREE_CODE (ops
[i
]) == SSA_NAME
5792 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
5793 && !has_use_on_stmt (ops
[i
], stmt
))
5796 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
5797 for (unsigned int i
= 0; i
< 2; ++i
)
5798 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
5799 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
5800 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
5803 /* Don't insert new statements when INPLACE is true, even if we could
5804 reuse STMT for the final statement. */
5805 if (inplace
&& !gimple_seq_empty_p (*seq
))
5808 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
5810 gcc_assert (res_op
->code
.is_tree_code ());
5811 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
5812 /* GIMPLE_CONDs condition may not throw. */
5813 && (!flag_exceptions
5814 || !cfun
->can_throw_non_call_exceptions
5815 || !operation_could_trap_p (res_op
->code
,
5816 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
5818 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
5819 else if (res_op
->code
== SSA_NAME
)
5820 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
5821 build_zero_cst (TREE_TYPE (ops
[0])));
5822 else if (res_op
->code
== INTEGER_CST
)
5824 if (integer_zerop (ops
[0]))
5825 gimple_cond_make_false (cond_stmt
);
5827 gimple_cond_make_true (cond_stmt
);
5831 tree res
= maybe_push_res_to_seq (res_op
, seq
);
5834 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
5835 build_zero_cst (TREE_TYPE (res
)));
5839 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5841 fprintf (dump_file
, "gimple_simplified to ");
5842 if (!gimple_seq_empty_p (*seq
))
5843 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5844 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5847 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5850 else if (is_gimple_assign (stmt
)
5851 && res_op
->code
.is_tree_code ())
5854 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
5856 maybe_build_generic_op (res_op
);
5857 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
5858 res_op
->op_or_null (0),
5859 res_op
->op_or_null (1),
5860 res_op
->op_or_null (2));
5861 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5863 fprintf (dump_file
, "gimple_simplified to ");
5864 if (!gimple_seq_empty_p (*seq
))
5865 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5866 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
5869 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5873 else if (res_op
->code
.is_fn_code ()
5874 && gimple_call_combined_fn (stmt
) == res_op
->code
)
5876 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
5877 for (unsigned int i
= 0; i
< num_ops
; ++i
)
5878 gimple_call_set_arg (stmt
, i
, ops
[i
]);
5879 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5881 fprintf (dump_file
, "gimple_simplified to ");
5882 if (!gimple_seq_empty_p (*seq
))
5883 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5884 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
5886 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
5891 if (gimple_has_lhs (stmt
))
5893 tree lhs
= gimple_get_lhs (stmt
);
5894 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
5896 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5898 fprintf (dump_file
, "gimple_simplified to ");
5899 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
5901 gsi_replace_with_seq_vops (gsi
, *seq
);
5911 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5914 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
5919 if (TREE_CODE (*t
) == ADDR_EXPR
)
5920 t
= &TREE_OPERAND (*t
, 0);
5922 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5923 generic vector extension. The actual vector referenced is
5924 view-converted to an array type for this purpose. If the index
5925 is constant the canonical representation in the middle-end is a
5926 BIT_FIELD_REF so re-write the former to the latter here. */
5927 if (TREE_CODE (*t
) == ARRAY_REF
5928 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
5929 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
5930 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
5932 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
5933 if (VECTOR_TYPE_P (vtype
))
5935 tree low
= array_ref_low_bound (*t
);
5936 if (TREE_CODE (low
) == INTEGER_CST
)
5938 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
5940 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
5941 wi::to_widest (low
));
5942 idx
= wi::mul (idx
, wi::to_widest
5943 (TYPE_SIZE (TREE_TYPE (*t
))));
5945 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
5946 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
5948 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
5950 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
5951 TYPE_SIZE (TREE_TYPE (*t
)),
5952 wide_int_to_tree (bitsizetype
, idx
));
5960 while (handled_component_p (*t
))
5961 t
= &TREE_OPERAND (*t
, 0);
5963 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5964 of invariant addresses into a SSA name MEM_REF address. */
5965 if (TREE_CODE (*t
) == MEM_REF
5966 || TREE_CODE (*t
) == TARGET_MEM_REF
)
5968 tree addr
= TREE_OPERAND (*t
, 0);
5969 if (TREE_CODE (addr
) == ADDR_EXPR
5970 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
5971 || handled_component_p (TREE_OPERAND (addr
, 0))))
5975 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
5984 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
5985 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
5986 TREE_OPERAND (*t
, 1),
5987 size_int (coffset
));
5990 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
5991 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
5994 /* Canonicalize back MEM_REFs to plain reference trees if the object
5995 accessed is a decl that has the same access semantics as the MEM_REF. */
5996 if (TREE_CODE (*t
) == MEM_REF
5997 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
5998 && integer_zerop (TREE_OPERAND (*t
, 1))
5999 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
6001 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6002 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
6003 if (/* Same volatile qualification. */
6004 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
6005 /* Same TBAA behavior with -fstrict-aliasing. */
6006 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
6007 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
6008 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
6009 /* Same alignment. */
6010 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
6011 /* We have to look out here to not drop a required conversion
6012 from the rhs to the lhs if *t appears on the lhs or vice-versa
6013 if it appears on the rhs. Thus require strict type
6015 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
6017 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
6022 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
6023 && TREE_CODE (*t
) == MEM_REF
6024 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
6028 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
6032 gcc_assert (TREE_CODE (base
) == MEM_REF
);
6034 if (mem_ref_offset (base
).to_shwi (&moffset
))
6037 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
6040 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
6047 /* Canonicalize TARGET_MEM_REF in particular with respect to
6048 the indexes becoming constant. */
6049 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
6051 tree tem
= maybe_fold_tmr (*t
);
6055 if (TREE_CODE (*orig_t
) == ADDR_EXPR
)
6056 recompute_tree_invariant_for_addr_expr (*orig_t
);
6064 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6065 distinguishes both cases. */
6068 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
6070 bool changed
= false;
6071 gimple
*stmt
= gsi_stmt (*gsi
);
6072 bool nowarning
= warning_suppressed_p (stmt
, OPT_Wstrict_overflow
);
6074 fold_defer_overflow_warnings ();
6076 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6078 ??? This shouldn't be done in generic folding but in the
6079 propagation helpers which also know whether an address was
6081 Also canonicalize operand order. */
6082 switch (gimple_code (stmt
))
6085 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
6087 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
6088 if ((REFERENCE_CLASS_P (*rhs
)
6089 || TREE_CODE (*rhs
) == ADDR_EXPR
)
6090 && maybe_canonicalize_mem_ref_addr (rhs
))
6092 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
6093 if (REFERENCE_CLASS_P (*lhs
)
6094 && maybe_canonicalize_mem_ref_addr (lhs
))
6099 /* Canonicalize operand order. */
6100 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6101 if (TREE_CODE_CLASS (code
) == tcc_comparison
6102 || commutative_tree_code (code
)
6103 || commutative_ternary_tree_code (code
))
6105 tree rhs1
= gimple_assign_rhs1 (stmt
);
6106 tree rhs2
= gimple_assign_rhs2 (stmt
);
6107 if (tree_swap_operands_p (rhs1
, rhs2
))
6109 gimple_assign_set_rhs1 (stmt
, rhs2
);
6110 gimple_assign_set_rhs2 (stmt
, rhs1
);
6111 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
6112 gimple_assign_set_rhs_code (stmt
,
6113 swap_tree_comparison (code
));
6121 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6123 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
6124 if (REFERENCE_CLASS_P (*arg
)
6125 && maybe_canonicalize_mem_ref_addr (arg
))
6128 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
6130 && REFERENCE_CLASS_P (*lhs
)
6131 && maybe_canonicalize_mem_ref_addr (lhs
))
6137 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
6138 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
6140 tree link
= gimple_asm_output_op (asm_stmt
, i
);
6141 tree op
= TREE_VALUE (link
);
6142 if (REFERENCE_CLASS_P (op
)
6143 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6146 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
6148 tree link
= gimple_asm_input_op (asm_stmt
, i
);
6149 tree op
= TREE_VALUE (link
);
6150 if ((REFERENCE_CLASS_P (op
)
6151 || TREE_CODE (op
) == ADDR_EXPR
)
6152 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
6158 if (gimple_debug_bind_p (stmt
))
6160 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
6162 && (REFERENCE_CLASS_P (*val
)
6163 || TREE_CODE (*val
) == ADDR_EXPR
)
6164 && maybe_canonicalize_mem_ref_addr (val
, true))
6170 /* Canonicalize operand order. */
6171 tree lhs
= gimple_cond_lhs (stmt
);
6172 tree rhs
= gimple_cond_rhs (stmt
);
6173 if (tree_swap_operands_p (lhs
, rhs
))
6175 gcond
*gc
= as_a
<gcond
*> (stmt
);
6176 gimple_cond_set_lhs (gc
, rhs
);
6177 gimple_cond_set_rhs (gc
, lhs
);
6178 gimple_cond_set_code (gc
,
6179 swap_tree_comparison (gimple_cond_code (gc
)));
6186 /* Dispatch to pattern-based folding. */
6188 || is_gimple_assign (stmt
)
6189 || gimple_code (stmt
) == GIMPLE_COND
)
6191 gimple_seq seq
= NULL
;
6192 gimple_match_op res_op
;
6193 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
6194 valueize
, valueize
))
6196 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
6199 gimple_seq_discard (seq
);
6203 stmt
= gsi_stmt (*gsi
);
6205 /* Fold the main computation performed by the statement. */
6206 switch (gimple_code (stmt
))
6210 /* Try to canonicalize for boolean-typed X the comparisons
6211 X == 0, X == 1, X != 0, and X != 1. */
6212 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
6213 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
6215 tree lhs
= gimple_assign_lhs (stmt
);
6216 tree op1
= gimple_assign_rhs1 (stmt
);
6217 tree op2
= gimple_assign_rhs2 (stmt
);
6218 tree type
= TREE_TYPE (op1
);
6220 /* Check whether the comparison operands are of the same boolean
6221 type as the result type is.
6222 Check that second operand is an integer-constant with value
6224 if (TREE_CODE (op2
) == INTEGER_CST
6225 && (integer_zerop (op2
) || integer_onep (op2
))
6226 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
6228 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
6229 bool is_logical_not
= false;
6231 /* X == 0 and X != 1 is a logical-not.of X
6232 X == 1 and X != 0 is X */
6233 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
6234 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
6235 is_logical_not
= true;
6237 if (is_logical_not
== false)
6238 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
6239 /* Only for one-bit precision typed X the transformation
6240 !X -> ~X is valied. */
6241 else if (TYPE_PRECISION (type
) == 1)
6242 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
6243 /* Otherwise we use !X -> X ^ 1. */
6245 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
6246 build_int_cst (type
, 1));
6252 unsigned old_num_ops
= gimple_num_ops (stmt
);
6253 tree lhs
= gimple_assign_lhs (stmt
);
6254 tree new_rhs
= fold_gimple_assign (gsi
);
6256 && !useless_type_conversion_p (TREE_TYPE (lhs
),
6257 TREE_TYPE (new_rhs
)))
6258 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
6261 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
6263 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
6270 changed
|= gimple_fold_call (gsi
, inplace
);
6274 if (gimple_debug_bind_p (stmt
))
6276 tree val
= gimple_debug_bind_get_value (stmt
);
6278 && REFERENCE_CLASS_P (val
))
6280 tree tem
= maybe_fold_reference (val
);
6283 gimple_debug_bind_set_value (stmt
, tem
);
6288 && TREE_CODE (val
) == ADDR_EXPR
)
6290 tree ref
= TREE_OPERAND (val
, 0);
6291 tree tem
= maybe_fold_reference (ref
);
6294 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
6295 gimple_debug_bind_set_value (stmt
, tem
);
6304 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
6305 tree ret
= gimple_return_retval(ret_stmt
);
6307 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
6309 tree val
= valueize (ret
);
6310 if (val
&& val
!= ret
6311 && may_propagate_copy (ret
, val
))
6313 gimple_return_set_retval (ret_stmt
, val
);
6323 stmt
= gsi_stmt (*gsi
);
6325 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
6329 /* Valueziation callback that ends up not following SSA edges. */
6332 no_follow_ssa_edges (tree
)
6337 /* Valueization callback that ends up following single-use SSA edges only. */
6340 follow_single_use_edges (tree val
)
6342 if (TREE_CODE (val
) == SSA_NAME
6343 && !has_single_use (val
))
6348 /* Valueization callback that follows all SSA edges. */
6351 follow_all_ssa_edges (tree val
)
6356 /* Fold the statement pointed to by GSI. In some cases, this function may
6357 replace the whole statement with a new one. Returns true iff folding
6359 The statement pointed to by GSI should be in valid gimple form but may
6360 be in unfolded state as resulting from for example constant propagation
6361 which can produce *&x = 0. */
6364 fold_stmt (gimple_stmt_iterator
*gsi
)
6366 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
6370 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
6372 return fold_stmt_1 (gsi
, false, valueize
);
6375 /* Perform the minimal folding on statement *GSI. Only operations like
6376 *&x created by constant propagation are handled. The statement cannot
6377 be replaced with a new one. Return true if the statement was
6378 changed, false otherwise.
6379 The statement *GSI should be in valid gimple form but may
6380 be in unfolded state as resulting from for example constant propagation
6381 which can produce *&x = 0. */
6384 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
6386 gimple
*stmt
= gsi_stmt (*gsi
);
6387 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
6388 gcc_assert (gsi_stmt (*gsi
) == stmt
);
6392 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6393 if EXPR is null or we don't know how.
6394 If non-null, the result always has boolean type. */
6397 canonicalize_bool (tree expr
, bool invert
)
6403 if (integer_nonzerop (expr
))
6404 return boolean_false_node
;
6405 else if (integer_zerop (expr
))
6406 return boolean_true_node
;
6407 else if (TREE_CODE (expr
) == SSA_NAME
)
6408 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
6409 build_int_cst (TREE_TYPE (expr
), 0));
6410 else if (COMPARISON_CLASS_P (expr
))
6411 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
6413 TREE_OPERAND (expr
, 0),
6414 TREE_OPERAND (expr
, 1));
6420 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6422 if (integer_nonzerop (expr
))
6423 return boolean_true_node
;
6424 else if (integer_zerop (expr
))
6425 return boolean_false_node
;
6426 else if (TREE_CODE (expr
) == SSA_NAME
)
6427 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
6428 build_int_cst (TREE_TYPE (expr
), 0));
6429 else if (COMPARISON_CLASS_P (expr
))
6430 return fold_build2 (TREE_CODE (expr
),
6432 TREE_OPERAND (expr
, 0),
6433 TREE_OPERAND (expr
, 1));
6439 /* Check to see if a boolean expression EXPR is logically equivalent to the
6440 comparison (OP1 CODE OP2). Check for various identities involving
6444 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
6445 const_tree op1
, const_tree op2
)
6449 /* The obvious case. */
6450 if (TREE_CODE (expr
) == code
6451 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
6452 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
6455 /* Check for comparing (name, name != 0) and the case where expr
6456 is an SSA_NAME with a definition matching the comparison. */
6457 if (TREE_CODE (expr
) == SSA_NAME
6458 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
6460 if (operand_equal_p (expr
, op1
, 0))
6461 return ((code
== NE_EXPR
&& integer_zerop (op2
))
6462 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
6463 s
= SSA_NAME_DEF_STMT (expr
);
6464 if (is_gimple_assign (s
)
6465 && gimple_assign_rhs_code (s
) == code
6466 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
6467 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
6471 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6472 of name is a comparison, recurse. */
6473 if (TREE_CODE (op1
) == SSA_NAME
6474 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
6476 s
= SSA_NAME_DEF_STMT (op1
);
6477 if (is_gimple_assign (s
)
6478 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
6480 enum tree_code c
= gimple_assign_rhs_code (s
);
6481 if ((c
== NE_EXPR
&& integer_zerop (op2
))
6482 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
6483 return same_bool_comparison_p (expr
, c
,
6484 gimple_assign_rhs1 (s
),
6485 gimple_assign_rhs2 (s
));
6486 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
6487 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
6488 return same_bool_comparison_p (expr
,
6489 invert_tree_comparison (c
, false),
6490 gimple_assign_rhs1 (s
),
6491 gimple_assign_rhs2 (s
));
6497 /* Check to see if two boolean expressions OP1 and OP2 are logically
6501 same_bool_result_p (const_tree op1
, const_tree op2
)
6503 /* Simple cases first. */
6504 if (operand_equal_p (op1
, op2
, 0))
6507 /* Check the cases where at least one of the operands is a comparison.
6508 These are a bit smarter than operand_equal_p in that they apply some
6509 identifies on SSA_NAMEs. */
6510 if (COMPARISON_CLASS_P (op2
)
6511 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
6512 TREE_OPERAND (op2
, 0),
6513 TREE_OPERAND (op2
, 1)))
6515 if (COMPARISON_CLASS_P (op1
)
6516 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
6517 TREE_OPERAND (op1
, 0),
6518 TREE_OPERAND (op1
, 1)))
6525 /* Forward declarations for some mutually recursive functions. */
6528 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6529 enum tree_code code2
, tree op2a
, tree op2b
);
6531 and_var_with_comparison (tree type
, tree var
, bool invert
,
6532 enum tree_code code2
, tree op2a
, tree op2b
);
6534 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6535 enum tree_code code2
, tree op2a
, tree op2b
);
6537 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
6538 enum tree_code code2
, tree op2a
, tree op2b
);
6540 or_var_with_comparison (tree
, tree var
, bool invert
,
6541 enum tree_code code2
, tree op2a
, tree op2b
);
6543 or_var_with_comparison_1 (tree
, gimple
*stmt
,
6544 enum tree_code code2
, tree op2a
, tree op2b
);
6546 /* Helper function for and_comparisons_1: try to simplify the AND of the
6547 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6548 If INVERT is true, invert the value of the VAR before doing the AND.
6549 Return NULL_EXPR if we can't simplify this to a single expression. */
6552 and_var_with_comparison (tree type
, tree var
, bool invert
,
6553 enum tree_code code2
, tree op2a
, tree op2b
)
6556 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6558 /* We can only deal with variables whose definitions are assignments. */
6559 if (!is_gimple_assign (stmt
))
6562 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6563 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6564 Then we only have to consider the simpler non-inverted cases. */
6566 t
= or_var_with_comparison_1 (type
, stmt
,
6567 invert_tree_comparison (code2
, false),
6570 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6571 return canonicalize_bool (t
, invert
);
6574 /* Try to simplify the AND of the ssa variable defined by the assignment
6575 STMT with the comparison specified by (OP2A CODE2 OP2B).
6576 Return NULL_EXPR if we can't simplify this to a single expression. */
6579 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
6580 enum tree_code code2
, tree op2a
, tree op2b
)
6582 tree var
= gimple_assign_lhs (stmt
);
6583 tree true_test_var
= NULL_TREE
;
6584 tree false_test_var
= NULL_TREE
;
6585 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6587 /* Check for identities like (var AND (var == 0)) => false. */
6588 if (TREE_CODE (op2a
) == SSA_NAME
6589 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6591 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6592 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6594 true_test_var
= op2a
;
6595 if (var
== true_test_var
)
6598 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6599 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6601 false_test_var
= op2a
;
6602 if (var
== false_test_var
)
6603 return boolean_false_node
;
6607 /* If the definition is a comparison, recurse on it. */
6608 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6610 tree t
= and_comparisons_1 (type
, innercode
,
6611 gimple_assign_rhs1 (stmt
),
6612 gimple_assign_rhs2 (stmt
),
6620 /* If the definition is an AND or OR expression, we may be able to
6621 simplify by reassociating. */
6622 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6623 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6625 tree inner1
= gimple_assign_rhs1 (stmt
);
6626 tree inner2
= gimple_assign_rhs2 (stmt
);
6629 tree partial
= NULL_TREE
;
6630 bool is_and
= (innercode
== BIT_AND_EXPR
);
6632 /* Check for boolean identities that don't require recursive examination
6634 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6635 inner1 AND (inner1 OR inner2) => inner1
6636 !inner1 AND (inner1 AND inner2) => false
6637 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6638 Likewise for similar cases involving inner2. */
6639 if (inner1
== true_test_var
)
6640 return (is_and
? var
: inner1
);
6641 else if (inner2
== true_test_var
)
6642 return (is_and
? var
: inner2
);
6643 else if (inner1
== false_test_var
)
6645 ? boolean_false_node
6646 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6648 else if (inner2
== false_test_var
)
6650 ? boolean_false_node
6651 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6654 /* Next, redistribute/reassociate the AND across the inner tests.
6655 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6656 if (TREE_CODE (inner1
) == SSA_NAME
6657 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6658 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6659 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6660 gimple_assign_rhs1 (s
),
6661 gimple_assign_rhs2 (s
),
6662 code2
, op2a
, op2b
)))
6664 /* Handle the AND case, where we are reassociating:
6665 (inner1 AND inner2) AND (op2a code2 op2b)
6667 If the partial result t is a constant, we win. Otherwise
6668 continue on to try reassociating with the other inner test. */
6671 if (integer_onep (t
))
6673 else if (integer_zerop (t
))
6674 return boolean_false_node
;
6677 /* Handle the OR case, where we are redistributing:
6678 (inner1 OR inner2) AND (op2a code2 op2b)
6679 => (t OR (inner2 AND (op2a code2 op2b))) */
6680 else if (integer_onep (t
))
6681 return boolean_true_node
;
6683 /* Save partial result for later. */
6687 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6688 if (TREE_CODE (inner2
) == SSA_NAME
6689 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6690 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6691 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
6692 gimple_assign_rhs1 (s
),
6693 gimple_assign_rhs2 (s
),
6694 code2
, op2a
, op2b
)))
6696 /* Handle the AND case, where we are reassociating:
6697 (inner1 AND inner2) AND (op2a code2 op2b)
6698 => (inner1 AND t) */
6701 if (integer_onep (t
))
6703 else if (integer_zerop (t
))
6704 return boolean_false_node
;
6705 /* If both are the same, we can apply the identity
6707 else if (partial
&& same_bool_result_p (t
, partial
))
6711 /* Handle the OR case. where we are redistributing:
6712 (inner1 OR inner2) AND (op2a code2 op2b)
6713 => (t OR (inner1 AND (op2a code2 op2b)))
6714 => (t OR partial) */
6717 if (integer_onep (t
))
6718 return boolean_true_node
;
6721 /* We already got a simplification for the other
6722 operand to the redistributed OR expression. The
6723 interesting case is when at least one is false.
6724 Or, if both are the same, we can apply the identity
6726 if (integer_zerop (partial
))
6728 else if (integer_zerop (t
))
6730 else if (same_bool_result_p (t
, partial
))
6739 /* Try to simplify the AND of two comparisons defined by
6740 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6741 If this can be done without constructing an intermediate value,
6742 return the resulting tree; otherwise NULL_TREE is returned.
6743 This function is deliberately asymmetric as it recurses on SSA_DEFs
6744 in the first comparison but not the second. */
6747 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6748 enum tree_code code2
, tree op2a
, tree op2b
)
6750 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6752 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6753 if (operand_equal_p (op1a
, op2a
, 0)
6754 && operand_equal_p (op1b
, op2b
, 0))
6756 /* Result will be either NULL_TREE, or a combined comparison. */
6757 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6758 TRUTH_ANDIF_EXPR
, code1
, code2
,
6759 truth_type
, op1a
, op1b
);
6764 /* Likewise the swapped case of the above. */
6765 if (operand_equal_p (op1a
, op2b
, 0)
6766 && operand_equal_p (op1b
, op2a
, 0))
6768 /* Result will be either NULL_TREE, or a combined comparison. */
6769 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6770 TRUTH_ANDIF_EXPR
, code1
,
6771 swap_tree_comparison (code2
),
6772 truth_type
, op1a
, op1b
);
6777 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6778 NAME's definition is a truth value. See if there are any simplifications
6779 that can be done against the NAME's definition. */
6780 if (TREE_CODE (op1a
) == SSA_NAME
6781 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6782 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6784 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6785 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6786 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6787 switch (gimple_code (stmt
))
6790 /* Try to simplify by copy-propagating the definition. */
6791 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6795 /* If every argument to the PHI produces the same result when
6796 ANDed with the second comparison, we win.
6797 Do not do this unless the type is bool since we need a bool
6798 result here anyway. */
6799 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6801 tree result
= NULL_TREE
;
6803 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6805 tree arg
= gimple_phi_arg_def (stmt
, i
);
6807 /* If this PHI has itself as an argument, ignore it.
6808 If all the other args produce the same result,
6810 if (arg
== gimple_phi_result (stmt
))
6812 else if (TREE_CODE (arg
) == INTEGER_CST
)
6814 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
6817 result
= boolean_false_node
;
6818 else if (!integer_zerop (result
))
6822 result
= fold_build2 (code2
, boolean_type_node
,
6824 else if (!same_bool_comparison_p (result
,
6828 else if (TREE_CODE (arg
) == SSA_NAME
6829 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6832 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6833 /* In simple cases we can look through PHI nodes,
6834 but we have to be careful with loops.
6836 if (! dom_info_available_p (CDI_DOMINATORS
)
6837 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6838 || dominated_by_p (CDI_DOMINATORS
,
6839 gimple_bb (def_stmt
),
6842 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
6848 else if (!same_bool_result_p (result
, temp
))
6864 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6865 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6866 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6867 simplify this to a single expression. As we are going to lower the cost
6868 of building SSA names / gimple stmts significantly, we need to allocate
6869 them ont the stack. This will cause the code to be a bit ugly. */
6872 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
6873 enum tree_code code1
,
6874 tree op1a
, tree op1b
,
6875 enum tree_code code2
, tree op2a
,
6878 /* Allocate gimple stmt1 on the stack. */
6880 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6881 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
6882 gimple_assign_set_rhs_code (stmt1
, code1
);
6883 gimple_assign_set_rhs1 (stmt1
, op1a
);
6884 gimple_assign_set_rhs2 (stmt1
, op1b
);
6886 /* Allocate gimple stmt2 on the stack. */
6888 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
6889 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
6890 gimple_assign_set_rhs_code (stmt2
, code2
);
6891 gimple_assign_set_rhs1 (stmt2
, op2a
);
6892 gimple_assign_set_rhs2 (stmt2
, op2b
);
6894 /* Allocate SSA names(lhs1) on the stack. */
6895 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
6896 memset (lhs1
, 0, sizeof (tree_ssa_name
));
6897 TREE_SET_CODE (lhs1
, SSA_NAME
);
6898 TREE_TYPE (lhs1
) = type
;
6899 init_ssa_name_imm_use (lhs1
);
6901 /* Allocate SSA names(lhs2) on the stack. */
6902 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
6903 memset (lhs2
, 0, sizeof (tree_ssa_name
));
6904 TREE_SET_CODE (lhs2
, SSA_NAME
);
6905 TREE_TYPE (lhs2
) = type
;
6906 init_ssa_name_imm_use (lhs2
);
6908 gimple_assign_set_lhs (stmt1
, lhs1
);
6909 gimple_assign_set_lhs (stmt2
, lhs2
);
6911 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
6912 type
, gimple_assign_lhs (stmt1
),
6913 gimple_assign_lhs (stmt2
));
6914 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
6916 if (gimple_simplified_result_is_gimple_val (&op
))
6918 tree res
= op
.ops
[0];
6920 return build2 (code1
, type
, op1a
, op1b
);
6921 else if (res
== lhs2
)
6922 return build2 (code2
, type
, op2a
, op2b
);
6926 else if (op
.code
.is_tree_code ()
6927 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
6929 tree op0
= op
.ops
[0];
6930 tree op1
= op
.ops
[1];
6931 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
6932 return NULL_TREE
; /* not simple */
6934 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
6941 /* Try to simplify the AND of two comparisons, specified by
6942 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6943 If this can be simplified to a single expression (without requiring
6944 introducing more SSA variables to hold intermediate values),
6945 return the resulting tree. Otherwise return NULL_TREE.
6946 If the result expression is non-null, it has boolean type. */
6949 maybe_fold_and_comparisons (tree type
,
6950 enum tree_code code1
, tree op1a
, tree op1b
,
6951 enum tree_code code2
, tree op2a
, tree op2b
)
6953 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6956 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6959 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
6960 op1a
, op1b
, code2
, op2a
,
6967 /* Helper function for or_comparisons_1: try to simplify the OR of the
6968 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6969 If INVERT is true, invert the value of VAR before doing the OR.
6970 Return NULL_EXPR if we can't simplify this to a single expression. */
6973 or_var_with_comparison (tree type
, tree var
, bool invert
,
6974 enum tree_code code2
, tree op2a
, tree op2b
)
6977 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6979 /* We can only deal with variables whose definitions are assignments. */
6980 if (!is_gimple_assign (stmt
))
6983 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6984 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6985 Then we only have to consider the simpler non-inverted cases. */
6987 t
= and_var_with_comparison_1 (type
, stmt
,
6988 invert_tree_comparison (code2
, false),
6991 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6992 return canonicalize_bool (t
, invert
);
6995 /* Try to simplify the OR of the ssa variable defined by the assignment
6996 STMT with the comparison specified by (OP2A CODE2 OP2B).
6997 Return NULL_EXPR if we can't simplify this to a single expression. */
7000 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
7001 enum tree_code code2
, tree op2a
, tree op2b
)
7003 tree var
= gimple_assign_lhs (stmt
);
7004 tree true_test_var
= NULL_TREE
;
7005 tree false_test_var
= NULL_TREE
;
7006 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
7008 /* Check for identities like (var OR (var != 0)) => true . */
7009 if (TREE_CODE (op2a
) == SSA_NAME
7010 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
7012 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
7013 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
7015 true_test_var
= op2a
;
7016 if (var
== true_test_var
)
7019 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
7020 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
7022 false_test_var
= op2a
;
7023 if (var
== false_test_var
)
7024 return boolean_true_node
;
7028 /* If the definition is a comparison, recurse on it. */
7029 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
7031 tree t
= or_comparisons_1 (type
, innercode
,
7032 gimple_assign_rhs1 (stmt
),
7033 gimple_assign_rhs2 (stmt
),
7041 /* If the definition is an AND or OR expression, we may be able to
7042 simplify by reassociating. */
7043 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
7044 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
7046 tree inner1
= gimple_assign_rhs1 (stmt
);
7047 tree inner2
= gimple_assign_rhs2 (stmt
);
7050 tree partial
= NULL_TREE
;
7051 bool is_or
= (innercode
== BIT_IOR_EXPR
);
7053 /* Check for boolean identities that don't require recursive examination
7055 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7056 inner1 OR (inner1 AND inner2) => inner1
7057 !inner1 OR (inner1 OR inner2) => true
7058 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7060 if (inner1
== true_test_var
)
7061 return (is_or
? var
: inner1
);
7062 else if (inner2
== true_test_var
)
7063 return (is_or
? var
: inner2
);
7064 else if (inner1
== false_test_var
)
7067 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
7069 else if (inner2
== false_test_var
)
7072 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
7075 /* Next, redistribute/reassociate the OR across the inner tests.
7076 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7077 if (TREE_CODE (inner1
) == SSA_NAME
7078 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
7079 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7080 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7081 gimple_assign_rhs1 (s
),
7082 gimple_assign_rhs2 (s
),
7083 code2
, op2a
, op2b
)))
7085 /* Handle the OR case, where we are reassociating:
7086 (inner1 OR inner2) OR (op2a code2 op2b)
7088 If the partial result t is a constant, we win. Otherwise
7089 continue on to try reassociating with the other inner test. */
7092 if (integer_onep (t
))
7093 return boolean_true_node
;
7094 else if (integer_zerop (t
))
7098 /* Handle the AND case, where we are redistributing:
7099 (inner1 AND inner2) OR (op2a code2 op2b)
7100 => (t AND (inner2 OR (op2a code op2b))) */
7101 else if (integer_zerop (t
))
7102 return boolean_false_node
;
7104 /* Save partial result for later. */
7108 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7109 if (TREE_CODE (inner2
) == SSA_NAME
7110 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
7111 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
7112 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
7113 gimple_assign_rhs1 (s
),
7114 gimple_assign_rhs2 (s
),
7115 code2
, op2a
, op2b
)))
7117 /* Handle the OR case, where we are reassociating:
7118 (inner1 OR inner2) OR (op2a code2 op2b)
7120 => (t OR partial) */
7123 if (integer_zerop (t
))
7125 else if (integer_onep (t
))
7126 return boolean_true_node
;
7127 /* If both are the same, we can apply the identity
7129 else if (partial
&& same_bool_result_p (t
, partial
))
7133 /* Handle the AND case, where we are redistributing:
7134 (inner1 AND inner2) OR (op2a code2 op2b)
7135 => (t AND (inner1 OR (op2a code2 op2b)))
7136 => (t AND partial) */
7139 if (integer_zerop (t
))
7140 return boolean_false_node
;
7143 /* We already got a simplification for the other
7144 operand to the redistributed AND expression. The
7145 interesting case is when at least one is true.
7146 Or, if both are the same, we can apply the identity
7148 if (integer_onep (partial
))
7150 else if (integer_onep (t
))
7152 else if (same_bool_result_p (t
, partial
))
7161 /* Try to simplify the OR of two comparisons defined by
7162 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7163 If this can be done without constructing an intermediate value,
7164 return the resulting tree; otherwise NULL_TREE is returned.
7165 This function is deliberately asymmetric as it recurses on SSA_DEFs
7166 in the first comparison but not the second. */
7169 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
7170 enum tree_code code2
, tree op2a
, tree op2b
)
7172 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
7174 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7175 if (operand_equal_p (op1a
, op2a
, 0)
7176 && operand_equal_p (op1b
, op2b
, 0))
7178 /* Result will be either NULL_TREE, or a combined comparison. */
7179 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7180 TRUTH_ORIF_EXPR
, code1
, code2
,
7181 truth_type
, op1a
, op1b
);
7186 /* Likewise the swapped case of the above. */
7187 if (operand_equal_p (op1a
, op2b
, 0)
7188 && operand_equal_p (op1b
, op2a
, 0))
7190 /* Result will be either NULL_TREE, or a combined comparison. */
7191 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
7192 TRUTH_ORIF_EXPR
, code1
,
7193 swap_tree_comparison (code2
),
7194 truth_type
, op1a
, op1b
);
7199 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7200 NAME's definition is a truth value. See if there are any simplifications
7201 that can be done against the NAME's definition. */
7202 if (TREE_CODE (op1a
) == SSA_NAME
7203 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
7204 && (integer_zerop (op1b
) || integer_onep (op1b
)))
7206 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
7207 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
7208 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
7209 switch (gimple_code (stmt
))
7212 /* Try to simplify by copy-propagating the definition. */
7213 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
7217 /* If every argument to the PHI produces the same result when
7218 ORed with the second comparison, we win.
7219 Do not do this unless the type is bool since we need a bool
7220 result here anyway. */
7221 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
7223 tree result
= NULL_TREE
;
7225 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
7227 tree arg
= gimple_phi_arg_def (stmt
, i
);
7229 /* If this PHI has itself as an argument, ignore it.
7230 If all the other args produce the same result,
7232 if (arg
== gimple_phi_result (stmt
))
7234 else if (TREE_CODE (arg
) == INTEGER_CST
)
7236 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
7239 result
= boolean_true_node
;
7240 else if (!integer_onep (result
))
7244 result
= fold_build2 (code2
, boolean_type_node
,
7246 else if (!same_bool_comparison_p (result
,
7250 else if (TREE_CODE (arg
) == SSA_NAME
7251 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
7254 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
7255 /* In simple cases we can look through PHI nodes,
7256 but we have to be careful with loops.
7258 if (! dom_info_available_p (CDI_DOMINATORS
)
7259 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
7260 || dominated_by_p (CDI_DOMINATORS
,
7261 gimple_bb (def_stmt
),
7264 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
7270 else if (!same_bool_result_p (result
, temp
))
7286 /* Try to simplify the OR of two comparisons, specified by
7287 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7288 If this can be simplified to a single expression (without requiring
7289 introducing more SSA variables to hold intermediate values),
7290 return the resulting tree. Otherwise return NULL_TREE.
7291 If the result expression is non-null, it has boolean type. */
7294 maybe_fold_or_comparisons (tree type
,
7295 enum tree_code code1
, tree op1a
, tree op1b
,
7296 enum tree_code code2
, tree op2a
, tree op2b
)
7298 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
7301 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
7304 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
7305 op1a
, op1b
, code2
, op2a
,
7312 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7314 Either NULL_TREE, a simplified but non-constant or a constant
7317 ??? This should go into a gimple-fold-inline.h file to be eventually
7318 privatized with the single valueize function used in the various TUs
7319 to avoid the indirect function call overhead. */
7322 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
7323 tree (*gvalueize
) (tree
))
7325 gimple_match_op res_op
;
7326 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7327 edges if there are intermediate VARYING defs. For this reason
7328 do not follow SSA edges here even though SCCVN can technically
7329 just deal fine with that. */
7330 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
7332 tree res
= NULL_TREE
;
7333 if (gimple_simplified_result_is_gimple_val (&res_op
))
7334 res
= res_op
.ops
[0];
7335 else if (mprts_hook
)
7336 res
= mprts_hook (&res_op
);
7339 if (dump_file
&& dump_flags
& TDF_DETAILS
)
7341 fprintf (dump_file
, "Match-and-simplified ");
7342 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
7343 fprintf (dump_file
, " to ");
7344 print_generic_expr (dump_file
, res
);
7345 fprintf (dump_file
, "\n");
7351 location_t loc
= gimple_location (stmt
);
7352 switch (gimple_code (stmt
))
7356 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
7358 switch (get_gimple_rhs_class (subcode
))
7360 case GIMPLE_SINGLE_RHS
:
7362 tree rhs
= gimple_assign_rhs1 (stmt
);
7363 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
7365 if (TREE_CODE (rhs
) == SSA_NAME
)
7367 /* If the RHS is an SSA_NAME, return its known constant value,
7369 return (*valueize
) (rhs
);
7371 /* Handle propagating invariant addresses into address
7373 else if (TREE_CODE (rhs
) == ADDR_EXPR
7374 && !is_gimple_min_invariant (rhs
))
7376 poly_int64 offset
= 0;
7378 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
7382 && (CONSTANT_CLASS_P (base
)
7383 || decl_address_invariant_p (base
)))
7384 return build_invariant_address (TREE_TYPE (rhs
),
7387 else if (TREE_CODE (rhs
) == CONSTRUCTOR
7388 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
7389 && known_eq (CONSTRUCTOR_NELTS (rhs
),
7390 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
7395 nelts
= CONSTRUCTOR_NELTS (rhs
);
7396 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
7397 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
7399 val
= (*valueize
) (val
);
7400 if (TREE_CODE (val
) == INTEGER_CST
7401 || TREE_CODE (val
) == REAL_CST
7402 || TREE_CODE (val
) == FIXED_CST
)
7403 vec
.quick_push (val
);
7408 return vec
.build ();
7410 if (subcode
== OBJ_TYPE_REF
)
7412 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
7413 /* If callee is constant, we can fold away the wrapper. */
7414 if (is_gimple_min_invariant (val
))
7418 if (kind
== tcc_reference
)
7420 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
7421 || TREE_CODE (rhs
) == REALPART_EXPR
7422 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
7423 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7425 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7426 return fold_unary_loc (EXPR_LOCATION (rhs
),
7428 TREE_TYPE (rhs
), val
);
7430 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
7431 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7433 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7434 return fold_ternary_loc (EXPR_LOCATION (rhs
),
7436 TREE_TYPE (rhs
), val
,
7437 TREE_OPERAND (rhs
, 1),
7438 TREE_OPERAND (rhs
, 2));
7440 else if (TREE_CODE (rhs
) == MEM_REF
7441 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
7443 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
7444 if (TREE_CODE (val
) == ADDR_EXPR
7445 && is_gimple_min_invariant (val
))
7447 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
7449 TREE_OPERAND (rhs
, 1));
7454 return fold_const_aggregate_ref_1 (rhs
, valueize
);
7456 else if (kind
== tcc_declaration
)
7457 return get_symbol_constant_value (rhs
);
7461 case GIMPLE_UNARY_RHS
:
7464 case GIMPLE_BINARY_RHS
:
7465 /* Translate &x + CST into an invariant form suitable for
7466 further propagation. */
7467 if (subcode
== POINTER_PLUS_EXPR
)
7469 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7470 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7471 if (TREE_CODE (op0
) == ADDR_EXPR
7472 && TREE_CODE (op1
) == INTEGER_CST
)
7474 tree off
= fold_convert (ptr_type_node
, op1
);
7476 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
7477 fold_build2 (MEM_REF
,
7478 TREE_TYPE (TREE_TYPE (op0
)),
7479 unshare_expr (op0
), off
));
7482 /* Canonicalize bool != 0 and bool == 0 appearing after
7483 valueization. While gimple_simplify handles this
7484 it can get confused by the ~X == 1 -> X == 0 transform
7485 which we cant reduce to a SSA name or a constant
7486 (and we have no way to tell gimple_simplify to not
7487 consider those transforms in the first place). */
7488 else if (subcode
== EQ_EXPR
7489 || subcode
== NE_EXPR
)
7491 tree lhs
= gimple_assign_lhs (stmt
);
7492 tree op0
= gimple_assign_rhs1 (stmt
);
7493 if (useless_type_conversion_p (TREE_TYPE (lhs
),
7496 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7497 op0
= (*valueize
) (op0
);
7498 if (TREE_CODE (op0
) == INTEGER_CST
)
7499 std::swap (op0
, op1
);
7500 if (TREE_CODE (op1
) == INTEGER_CST
7501 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
7502 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
7508 case GIMPLE_TERNARY_RHS
:
7510 /* Handle ternary operators that can appear in GIMPLE form. */
7511 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
7512 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
7513 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
7514 return fold_ternary_loc (loc
, subcode
,
7515 TREE_TYPE (gimple_assign_lhs (stmt
)),
7527 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
7529 if (gimple_call_internal_p (stmt
))
7531 enum tree_code subcode
= ERROR_MARK
;
7532 switch (gimple_call_internal_fn (stmt
))
7534 case IFN_UBSAN_CHECK_ADD
:
7535 subcode
= PLUS_EXPR
;
7537 case IFN_UBSAN_CHECK_SUB
:
7538 subcode
= MINUS_EXPR
;
7540 case IFN_UBSAN_CHECK_MUL
:
7541 subcode
= MULT_EXPR
;
7543 case IFN_BUILTIN_EXPECT
:
7545 tree arg0
= gimple_call_arg (stmt
, 0);
7546 tree op0
= (*valueize
) (arg0
);
7547 if (TREE_CODE (op0
) == INTEGER_CST
)
7554 tree arg0
= gimple_call_arg (stmt
, 0);
7555 tree arg1
= gimple_call_arg (stmt
, 1);
7556 tree op0
= (*valueize
) (arg0
);
7557 tree op1
= (*valueize
) (arg1
);
7559 if (TREE_CODE (op0
) != INTEGER_CST
7560 || TREE_CODE (op1
) != INTEGER_CST
)
7565 /* x * 0 = 0 * x = 0 without overflow. */
7566 if (integer_zerop (op0
) || integer_zerop (op1
))
7567 return build_zero_cst (TREE_TYPE (arg0
));
7570 /* y - y = 0 without overflow. */
7571 if (operand_equal_p (op0
, op1
, 0))
7572 return build_zero_cst (TREE_TYPE (arg0
));
7579 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
7581 && TREE_CODE (res
) == INTEGER_CST
7582 && !TREE_OVERFLOW (res
))
7587 fn
= (*valueize
) (gimple_call_fn (stmt
));
7588 if (TREE_CODE (fn
) == ADDR_EXPR
7589 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
7590 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
7591 && gimple_builtin_call_types_compatible_p (stmt
,
7592 TREE_OPERAND (fn
, 0)))
7594 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
7597 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
7598 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
7599 retval
= fold_builtin_call_array (loc
,
7600 gimple_call_return_type (call_stmt
),
7601 fn
, gimple_call_num_args (stmt
), args
);
7604 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7605 STRIP_NOPS (retval
);
7606 retval
= fold_convert (gimple_call_return_type (call_stmt
),
7619 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7620 Returns NULL_TREE if folding to a constant is not possible, otherwise
7621 returns a constant according to is_gimple_min_invariant. */
7624 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
7626 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
7627 if (res
&& is_gimple_min_invariant (res
))
7633 /* The following set of functions are supposed to fold references using
7634 their constant initializers. */
7636 /* See if we can find constructor defining value of BASE.
7637 When we know the consructor with constant offset (such as
7638 base is array[40] and we do know constructor of array), then
7639 BIT_OFFSET is adjusted accordingly.
7641 As a special case, return error_mark_node when constructor
7642 is not explicitly available, but it is known to be zero
7643 such as 'static const int a;'. */
7645 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
7646 tree (*valueize
)(tree
))
7648 poly_int64 bit_offset2
, size
, max_size
;
7651 if (TREE_CODE (base
) == MEM_REF
)
7653 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
7654 if (!boff
.to_shwi (bit_offset
))
7658 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
7659 base
= valueize (TREE_OPERAND (base
, 0));
7660 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
7662 base
= TREE_OPERAND (base
, 0);
7665 && TREE_CODE (base
) == SSA_NAME
)
7666 base
= valueize (base
);
7668 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7669 DECL_INITIAL. If BASE is a nested reference into another
7670 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7671 the inner reference. */
7672 switch (TREE_CODE (base
))
7677 tree init
= ctor_for_folding (base
);
7679 /* Our semantic is exact opposite of ctor_for_folding;
7680 NULL means unknown, while error_mark_node is 0. */
7681 if (init
== error_mark_node
)
7684 return error_mark_node
;
7688 case VIEW_CONVERT_EXPR
:
7689 return get_base_constructor (TREE_OPERAND (base
, 0),
7690 bit_offset
, valueize
);
7694 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
7696 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
7698 *bit_offset
+= bit_offset2
;
7699 return get_base_constructor (base
, bit_offset
, valueize
);
7705 if (CONSTANT_CLASS_P (base
))
7712 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7713 to the memory at bit OFFSET. When non-null, TYPE is the expected
7714 type of the reference; otherwise the type of the referenced element
7715 is used instead. When SIZE is zero, attempt to fold a reference to
7716 the entire element which OFFSET refers to. Increment *SUBOFF by
7717 the bit offset of the accessed element. */
7720 fold_array_ctor_reference (tree type
, tree ctor
,
7721 unsigned HOST_WIDE_INT offset
,
7722 unsigned HOST_WIDE_INT size
,
7724 unsigned HOST_WIDE_INT
*suboff
)
7726 offset_int low_bound
;
7727 offset_int elt_size
;
7728 offset_int access_index
;
7729 tree domain_type
= NULL_TREE
;
7730 HOST_WIDE_INT inner_offset
;
7732 /* Compute low bound and elt size. */
7733 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
7734 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
7735 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
7737 /* Static constructors for variably sized objects make no sense. */
7738 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
7740 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
7744 /* Static constructors for variably sized objects make no sense. */
7745 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
7747 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
7749 /* When TYPE is non-null, verify that it specifies a constant-sized
7750 access of a multiple of the array element size. Avoid division
7751 by zero below when ELT_SIZE is zero, such as with the result of
7752 an initializer for a zero-length array or an empty struct. */
7755 && (!TYPE_SIZE_UNIT (type
)
7756 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
7759 /* Compute the array index we look for. */
7760 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
7762 access_index
+= low_bound
;
7764 /* And offset within the access. */
7765 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
7767 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
7768 if (size
> elt_sz
* BITS_PER_UNIT
)
7770 /* native_encode_expr constraints. */
7771 if (size
> MAX_BITSIZE_MODE_ANY_MODE
7772 || size
% BITS_PER_UNIT
!= 0
7773 || inner_offset
% BITS_PER_UNIT
!= 0
7774 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
7778 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
7780 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7781 return build_zero_cst (type
);
7783 /* native-encode adjacent ctor elements. */
7784 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7785 unsigned bufoff
= 0;
7786 offset_int index
= 0;
7787 offset_int max_index
= access_index
;
7788 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7790 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7791 else if (!CONSTANT_CLASS_P (val
))
7795 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7797 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7798 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7801 index
= max_index
= wi::to_offset (elt
->index
);
7802 index
= wi::umax (index
, access_index
);
7805 if (bufoff
+ elt_sz
> sizeof (buf
))
7806 elt_sz
= sizeof (buf
) - bufoff
;
7807 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
7808 inner_offset
/ BITS_PER_UNIT
);
7809 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
7815 if (wi::cmpu (access_index
, index
) == 0)
7817 else if (wi::cmpu (access_index
, max_index
) > 0)
7820 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
7822 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7827 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
7829 max_index
= access_index
;
7832 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
7834 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
7835 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
7838 index
= max_index
= wi::to_offset (elt
->index
);
7839 index
= wi::umax (index
, access_index
);
7840 if (wi::cmpu (access_index
, index
) == 0)
7843 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
7847 while (bufoff
< size
/ BITS_PER_UNIT
);
7849 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
7852 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
7854 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
7856 /* For the final reference to the entire accessed element
7857 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7858 may be null) in favor of the type of the element, and set
7859 SIZE to the size of the accessed element. */
7861 type
= TREE_TYPE (val
);
7862 size
= elt_sz
* BITS_PER_UNIT
;
7864 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
7865 && TREE_CODE (val
) == CONSTRUCTOR
7866 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
7867 /* If this isn't the last element in the CTOR and a CTOR itself
7868 and it does not cover the whole object we are requesting give up
7869 since we're not set up for combining from multiple CTORs. */
7872 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
7873 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
7877 /* Memory not explicitly mentioned in constructor is 0 (or
7878 the reference is out of range). */
7879 return type
? build_zero_cst (type
) : NULL_TREE
;
7882 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7883 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7884 is the expected type of the reference; otherwise the type of
7885 the referenced member is used instead. When SIZE is zero,
7886 attempt to fold a reference to the entire member which OFFSET
7887 refers to; in this case. Increment *SUBOFF by the bit offset
7888 of the accessed member. */
7891 fold_nonarray_ctor_reference (tree type
, tree ctor
,
7892 unsigned HOST_WIDE_INT offset
,
7893 unsigned HOST_WIDE_INT size
,
7895 unsigned HOST_WIDE_INT
*suboff
)
7897 unsigned HOST_WIDE_INT cnt
;
7900 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
7903 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
7904 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
7905 tree field_size
= DECL_SIZE (cfield
);
7909 /* Determine the size of the flexible array member from
7910 the size of the initializer provided for it. */
7911 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
7914 /* Variable sized objects in static constructors makes no sense,
7915 but field_size can be NULL for flexible array members. */
7916 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
7917 && TREE_CODE (byte_offset
) == INTEGER_CST
7918 && (field_size
!= NULL_TREE
7919 ? TREE_CODE (field_size
) == INTEGER_CST
7920 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
7922 /* Compute bit offset of the field. */
7923 offset_int bitoffset
7924 = (wi::to_offset (field_offset
)
7925 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
7926 /* Compute bit offset where the field ends. */
7927 offset_int bitoffset_end
;
7928 if (field_size
!= NULL_TREE
)
7929 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
7933 /* Compute the bit offset of the end of the desired access.
7934 As a special case, if the size of the desired access is
7935 zero, assume the access is to the entire field (and let
7936 the caller make any necessary adjustments by storing
7937 the actual bounds of the field in FIELDBOUNDS). */
7938 offset_int access_end
= offset_int (offset
);
7942 access_end
= bitoffset_end
;
7944 /* Is there any overlap between the desired access at
7945 [OFFSET, OFFSET+SIZE) and the offset of the field within
7946 the object at [BITOFFSET, BITOFFSET_END)? */
7947 if (wi::cmps (access_end
, bitoffset
) > 0
7948 && (field_size
== NULL_TREE
7949 || wi::lts_p (offset
, bitoffset_end
)))
7951 *suboff
+= bitoffset
.to_uhwi ();
7953 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
7955 /* For the final reference to the entire accessed member
7956 (SIZE is zero), reset OFFSET, disegard TYPE (which may
7957 be null) in favor of the type of the member, and set
7958 SIZE to the size of the accessed member. */
7959 offset
= bitoffset
.to_uhwi ();
7960 type
= TREE_TYPE (cval
);
7961 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
7964 /* We do have overlap. Now see if the field is large enough
7965 to cover the access. Give up for accesses that extend
7966 beyond the end of the object or that span multiple fields. */
7967 if (wi::cmps (access_end
, bitoffset_end
) > 0)
7969 if (offset
< bitoffset
)
7972 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
7973 return fold_ctor_reference (type
, cval
,
7974 inner_offset
.to_uhwi (), size
,
7982 return build_zero_cst (type
);
7985 /* CTOR is value initializing memory. Fold a reference of TYPE and
7986 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7987 is zero, attempt to fold a reference to the entire subobject
7988 which OFFSET refers to. This is used when folding accesses to
7989 string members of aggregates. When non-null, set *SUBOFF to
7990 the bit offset of the accessed subobject. */
7993 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
7994 const poly_uint64
&poly_size
, tree from_decl
,
7995 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
7999 /* We found the field with exact match. */
8001 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
8002 && known_eq (poly_offset
, 0U))
8003 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8005 /* The remaining optimizations need a constant size and offset. */
8006 unsigned HOST_WIDE_INT size
, offset
;
8007 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
8010 /* We are at the end of walk, see if we can view convert the
8012 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
8013 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8014 && !compare_tree_int (TYPE_SIZE (type
), size
)
8015 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
8017 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
8020 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
8022 STRIP_USELESS_TYPE_CONVERSION (ret
);
8026 /* For constants and byte-aligned/sized reads try to go through
8027 native_encode/interpret. */
8028 if (CONSTANT_CLASS_P (ctor
)
8029 && BITS_PER_UNIT
== 8
8030 && offset
% BITS_PER_UNIT
== 0
8031 && offset
/ BITS_PER_UNIT
<= INT_MAX
8032 && size
% BITS_PER_UNIT
== 0
8033 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8034 && can_native_interpret_type_p (type
))
8036 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8037 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
8038 offset
/ BITS_PER_UNIT
);
8040 return native_interpret_expr (type
, buf
, len
);
8042 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
8044 unsigned HOST_WIDE_INT dummy
= 0;
8049 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
8050 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
8051 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
8054 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
8057 /* Fall back to native_encode_initializer. Needs to be done
8058 only in the outermost fold_ctor_reference call (because it itself
8059 recurses into CONSTRUCTORs) and doesn't update suboff. */
8060 if (ret
== NULL_TREE
8062 && BITS_PER_UNIT
== 8
8063 && offset
% BITS_PER_UNIT
== 0
8064 && offset
/ BITS_PER_UNIT
<= INT_MAX
8065 && size
% BITS_PER_UNIT
== 0
8066 && size
<= MAX_BITSIZE_MODE_ANY_MODE
8067 && can_native_interpret_type_p (type
))
8069 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
8070 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
8071 offset
/ BITS_PER_UNIT
);
8073 return native_interpret_expr (type
, buf
, len
);
8082 /* Return the tree representing the element referenced by T if T is an
8083 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8084 names using VALUEIZE. Return NULL_TREE otherwise. */
8087 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
8089 tree ctor
, idx
, base
;
8090 poly_int64 offset
, size
, max_size
;
8094 if (TREE_THIS_VOLATILE (t
))
8098 return get_symbol_constant_value (t
);
8100 tem
= fold_read_from_constant_string (t
);
8104 switch (TREE_CODE (t
))
8107 case ARRAY_RANGE_REF
:
8108 /* Constant indexes are handled well by get_base_constructor.
8109 Only special case variable offsets.
8110 FIXME: This code can't handle nested references with variable indexes
8111 (they will be handled only by iteration of ccp). Perhaps we can bring
8112 get_ref_base_and_extent here and make it use a valueize callback. */
8113 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
8115 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
8116 && poly_int_tree_p (idx
))
8118 tree low_bound
, unit_size
;
8120 /* If the resulting bit-offset is constant, track it. */
8121 if ((low_bound
= array_ref_low_bound (t
),
8122 poly_int_tree_p (low_bound
))
8123 && (unit_size
= array_ref_element_size (t
),
8124 tree_fits_uhwi_p (unit_size
)))
8126 poly_offset_int woffset
8127 = wi::sext (wi::to_poly_offset (idx
)
8128 - wi::to_poly_offset (low_bound
),
8129 TYPE_PRECISION (sizetype
));
8130 woffset
*= tree_to_uhwi (unit_size
);
8131 woffset
*= BITS_PER_UNIT
;
8132 if (woffset
.to_shwi (&offset
))
8134 base
= TREE_OPERAND (t
, 0);
8135 ctor
= get_base_constructor (base
, &offset
, valueize
);
8136 /* Empty constructor. Always fold to 0. */
8137 if (ctor
== error_mark_node
)
8138 return build_zero_cst (TREE_TYPE (t
));
8139 /* Out of bound array access. Value is undefined,
8141 if (maybe_lt (offset
, 0))
8143 /* We cannot determine ctor. */
8146 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
8147 tree_to_uhwi (unit_size
)
8157 case TARGET_MEM_REF
:
8159 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
8160 ctor
= get_base_constructor (base
, &offset
, valueize
);
8162 /* Empty constructor. Always fold to 0. */
8163 if (ctor
== error_mark_node
)
8164 return build_zero_cst (TREE_TYPE (t
));
8165 /* We do not know precise address. */
8166 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
8168 /* We cannot determine ctor. */
8172 /* Out of bound array access. Value is undefined, but don't fold. */
8173 if (maybe_lt (offset
, 0))
8176 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
8180 /* For bit field reads try to read the representative and
8182 if (TREE_CODE (t
) == COMPONENT_REF
8183 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
8184 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
8186 HOST_WIDE_INT csize
, coffset
;
8187 tree field
= TREE_OPERAND (t
, 1);
8188 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
8189 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
8190 && size
.is_constant (&csize
)
8191 && offset
.is_constant (&coffset
)
8192 && (coffset
% BITS_PER_UNIT
!= 0
8193 || csize
% BITS_PER_UNIT
!= 0)
8195 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
8197 poly_int64 bitoffset
;
8198 poly_uint64 field_offset
, repr_offset
;
8199 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
8200 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
8201 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
8204 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
8205 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
8206 HOST_WIDE_INT bitoff
;
8207 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
8208 - TYPE_PRECISION (TREE_TYPE (field
)));
8209 if (bitoffset
.is_constant (&bitoff
)
8214 size
= tree_to_uhwi (DECL_SIZE (repr
));
8216 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
8218 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
8220 if (!BYTES_BIG_ENDIAN
)
8221 tem
= wide_int_to_tree (TREE_TYPE (field
),
8222 wi::lrshift (wi::to_wide (tem
),
8225 tem
= wide_int_to_tree (TREE_TYPE (field
),
8226 wi::lrshift (wi::to_wide (tem
),
8238 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
8239 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
8240 return fold_build1_loc (EXPR_LOCATION (t
),
8241 TREE_CODE (t
), TREE_TYPE (t
), c
);
8253 fold_const_aggregate_ref (tree t
)
8255 return fold_const_aggregate_ref_1 (t
, NULL
);
8258 /* Lookup virtual method with index TOKEN in a virtual table V
8260 Set CAN_REFER if non-NULL to false if method
8261 is not referable or if the virtual table is ill-formed (such as rewriten
8262 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8265 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
8267 unsigned HOST_WIDE_INT offset
,
8270 tree vtable
= v
, init
, fn
;
8271 unsigned HOST_WIDE_INT size
;
8272 unsigned HOST_WIDE_INT elt_size
, access_index
;
8278 /* First of all double check we have virtual table. */
8279 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
8281 /* Pass down that we lost track of the target. */
8287 init
= ctor_for_folding (v
);
8289 /* The virtual tables should always be born with constructors
8290 and we always should assume that they are avaialble for
8291 folding. At the moment we do not stream them in all cases,
8292 but it should never happen that ctor seem unreachable. */
8294 if (init
== error_mark_node
)
8296 /* Pass down that we lost track of the target. */
8301 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
8302 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
8303 offset
*= BITS_PER_UNIT
;
8304 offset
+= token
* size
;
8306 /* Lookup the value in the constructor that is assumed to be array.
8307 This is equivalent to
8308 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8309 offset, size, NULL);
8310 but in a constant time. We expect that frontend produced a simple
8311 array without indexed initializers. */
8313 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
8314 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
8315 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
8316 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
8318 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
8319 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
8321 /* The C++ FE can now produce indexed fields, and we check if the indexes
8323 if (access_index
< CONSTRUCTOR_NELTS (init
))
8325 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
8326 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
8327 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
8333 /* For type inconsistent program we may end up looking up virtual method
8334 in virtual table that does not contain TOKEN entries. We may overrun
8335 the virtual table and pick up a constant or RTTI info pointer.
8336 In any case the call is undefined. */
8338 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
8339 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
8340 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
8343 fn
= TREE_OPERAND (fn
, 0);
8345 /* When cgraph node is missing and function is not public, we cannot
8346 devirtualize. This can happen in WHOPR when the actual method
8347 ends up in other partition, because we found devirtualization
8348 possibility too late. */
8349 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
8360 /* Make sure we create a cgraph node for functions we'll reference.
8361 They can be non-existent if the reference comes from an entry
8362 of an external vtable for example. */
8363 cgraph_node::get_create (fn
);
8368 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8369 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8370 KNOWN_BINFO carries the binfo describing the true type of
8371 OBJ_TYPE_REF_OBJECT(REF).
8372 Set CAN_REFER if non-NULL to false if method
8373 is not referable or if the virtual table is ill-formed (such as rewriten
8374 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8377 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
8380 unsigned HOST_WIDE_INT offset
;
8383 v
= BINFO_VTABLE (known_binfo
);
8384 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8388 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
8394 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
8397 /* Given a pointer value T, return a simplified version of an
8398 indirection through T, or NULL_TREE if no simplification is
8399 possible. Note that the resulting type may be different from
8400 the type pointed to in the sense that it is still compatible
8401 from the langhooks point of view. */
8404 gimple_fold_indirect_ref (tree t
)
8406 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
8411 subtype
= TREE_TYPE (sub
);
8412 if (!POINTER_TYPE_P (subtype
)
8413 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
8416 if (TREE_CODE (sub
) == ADDR_EXPR
)
8418 tree op
= TREE_OPERAND (sub
, 0);
8419 tree optype
= TREE_TYPE (op
);
8421 if (useless_type_conversion_p (type
, optype
))
8424 /* *(foo *)&fooarray => fooarray[0] */
8425 if (TREE_CODE (optype
) == ARRAY_TYPE
8426 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
8427 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8429 tree type_domain
= TYPE_DOMAIN (optype
);
8430 tree min_val
= size_zero_node
;
8431 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8432 min_val
= TYPE_MIN_VALUE (type_domain
);
8433 if (TREE_CODE (min_val
) == INTEGER_CST
)
8434 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
8436 /* *(foo *)&complexfoo => __real__ complexfoo */
8437 else if (TREE_CODE (optype
) == COMPLEX_TYPE
8438 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8439 return fold_build1 (REALPART_EXPR
, type
, op
);
8440 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8441 else if (TREE_CODE (optype
) == VECTOR_TYPE
8442 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
8444 tree part_width
= TYPE_SIZE (type
);
8445 tree index
= bitsize_int (0);
8446 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
8450 /* *(p + CST) -> ... */
8451 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
8452 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
8454 tree addr
= TREE_OPERAND (sub
, 0);
8455 tree off
= TREE_OPERAND (sub
, 1);
8459 addrtype
= TREE_TYPE (addr
);
8461 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8462 if (TREE_CODE (addr
) == ADDR_EXPR
8463 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
8464 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
8465 && tree_fits_uhwi_p (off
))
8467 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
8468 tree part_width
= TYPE_SIZE (type
);
8469 unsigned HOST_WIDE_INT part_widthi
8470 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
8471 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
8472 tree index
= bitsize_int (indexi
);
8473 if (known_lt (offset
/ part_widthi
,
8474 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
8475 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
8479 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8480 if (TREE_CODE (addr
) == ADDR_EXPR
8481 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
8482 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
8484 tree size
= TYPE_SIZE_UNIT (type
);
8485 if (tree_int_cst_equal (size
, off
))
8486 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
8489 /* *(p + CST) -> MEM_REF <p, CST>. */
8490 if (TREE_CODE (addr
) != ADDR_EXPR
8491 || DECL_P (TREE_OPERAND (addr
, 0)))
8492 return fold_build2 (MEM_REF
, type
,
8494 wide_int_to_tree (ptype
, wi::to_wide (off
)));
8497 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8498 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
8499 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
8500 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
8503 tree min_val
= size_zero_node
;
8505 sub
= gimple_fold_indirect_ref (sub
);
8507 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
8508 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
8509 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
8510 min_val
= TYPE_MIN_VALUE (type_domain
);
8511 if (TREE_CODE (min_val
) == INTEGER_CST
)
8512 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
8518 /* Return true if CODE is an operation that when operating on signed
8519 integer types involves undefined behavior on overflow and the
8520 operation can be expressed with unsigned arithmetic. */
8523 arith_code_with_undefined_signed_overflow (tree_code code
)
8532 case POINTER_PLUS_EXPR
:
8539 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8540 operation that can be transformed to unsigned arithmetic by converting
8541 its operand, carrying out the operation in the corresponding unsigned
8542 type and converting the result back to the original type.
8544 Returns a sequence of statements that replace STMT and also contain
8545 a modified form of STMT itself. */
8548 rewrite_to_defined_overflow (gimple
*stmt
)
8550 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8552 fprintf (dump_file
, "rewriting stmt with undefined signed "
8554 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
8557 tree lhs
= gimple_assign_lhs (stmt
);
8558 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
8559 gimple_seq stmts
= NULL
;
8560 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
8561 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
8563 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
8565 tree op
= gimple_op (stmt
, i
);
8566 op
= gimple_convert (&stmts
, type
, op
);
8567 gimple_set_op (stmt
, i
, op
);
8569 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
8570 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
8571 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
8572 gimple_set_modified (stmt
, true);
8573 gimple_seq_add_stmt (&stmts
, stmt
);
8574 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
8575 gimple_seq_add_stmt (&stmts
, cvt
);
8581 /* The valueization hook we use for the gimple_build API simplification.
8582 This makes us match fold_buildN behavior by only combining with
8583 statements in the sequence(s) we are currently building. */
8586 gimple_build_valueize (tree op
)
8588 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
8593 /* Build the expression CODE OP0 of type TYPE with location LOC,
8594 simplifying it first if possible. Returns the built
8595 expression value and appends statements possibly defining it
8599 gimple_build (gimple_seq
*seq
, location_t loc
,
8600 enum tree_code code
, tree type
, tree op0
)
8602 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
8605 res
= create_tmp_reg_or_ssa_name (type
);
8607 if (code
== REALPART_EXPR
8608 || code
== IMAGPART_EXPR
8609 || code
== VIEW_CONVERT_EXPR
)
8610 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
8612 stmt
= gimple_build_assign (res
, code
, op0
);
8613 gimple_set_location (stmt
, loc
);
8614 gimple_seq_add_stmt_without_update (seq
, stmt
);
8619 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8620 simplifying it first if possible. Returns the built
8621 expression value and appends statements possibly defining it
8625 gimple_build (gimple_seq
*seq
, location_t loc
,
8626 enum tree_code code
, tree type
, tree op0
, tree op1
)
8628 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
8631 res
= create_tmp_reg_or_ssa_name (type
);
8632 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
8633 gimple_set_location (stmt
, loc
);
8634 gimple_seq_add_stmt_without_update (seq
, stmt
);
8639 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8640 simplifying it first if possible. Returns the built
8641 expression value and appends statements possibly defining it
8645 gimple_build (gimple_seq
*seq
, location_t loc
,
8646 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
8648 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
8649 seq
, gimple_build_valueize
);
8652 res
= create_tmp_reg_or_ssa_name (type
);
8654 if (code
== BIT_FIELD_REF
)
8655 stmt
= gimple_build_assign (res
, code
,
8656 build3 (code
, type
, op0
, op1
, op2
));
8658 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
8659 gimple_set_location (stmt
, loc
);
8660 gimple_seq_add_stmt_without_update (seq
, stmt
);
8665 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8666 void) with a location LOC. Returns the built expression value (or NULL_TREE
8667 if TYPE is void) and appends statements possibly defining it to SEQ. */
8670 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
, tree type
)
8672 tree res
= NULL_TREE
;
8674 if (internal_fn_p (fn
))
8675 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 0);
8678 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8679 stmt
= gimple_build_call (decl
, 0);
8681 if (!VOID_TYPE_P (type
))
8683 res
= create_tmp_reg_or_ssa_name (type
);
8684 gimple_call_set_lhs (stmt
, res
);
8686 gimple_set_location (stmt
, loc
);
8687 gimple_seq_add_stmt_without_update (seq
, stmt
);
8691 /* Build the call FN (ARG0) with a result of type TYPE
8692 (or no result if TYPE is void) with location LOC,
8693 simplifying it first if possible. Returns the built
8694 expression value (or NULL_TREE if TYPE is void) and appends
8695 statements possibly defining it to SEQ. */
8698 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8699 tree type
, tree arg0
)
8701 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
8705 if (internal_fn_p (fn
))
8706 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
8709 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8710 stmt
= gimple_build_call (decl
, 1, arg0
);
8712 if (!VOID_TYPE_P (type
))
8714 res
= create_tmp_reg_or_ssa_name (type
);
8715 gimple_call_set_lhs (stmt
, res
);
8717 gimple_set_location (stmt
, loc
);
8718 gimple_seq_add_stmt_without_update (seq
, stmt
);
8723 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8724 (or no result if TYPE is void) with location LOC,
8725 simplifying it first if possible. Returns the built
8726 expression value (or NULL_TREE if TYPE is void) and appends
8727 statements possibly defining it to SEQ. */
8730 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8731 tree type
, tree arg0
, tree arg1
)
8733 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
8737 if (internal_fn_p (fn
))
8738 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
8741 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8742 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
8744 if (!VOID_TYPE_P (type
))
8746 res
= create_tmp_reg_or_ssa_name (type
);
8747 gimple_call_set_lhs (stmt
, res
);
8749 gimple_set_location (stmt
, loc
);
8750 gimple_seq_add_stmt_without_update (seq
, stmt
);
8755 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8756 (or no result if TYPE is void) with location LOC,
8757 simplifying it first if possible. Returns the built
8758 expression value (or NULL_TREE if TYPE is void) and appends
8759 statements possibly defining it to SEQ. */
8762 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
8763 tree type
, tree arg0
, tree arg1
, tree arg2
)
8765 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
8766 seq
, gimple_build_valueize
);
8770 if (internal_fn_p (fn
))
8771 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
8772 3, arg0
, arg1
, arg2
);
8775 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
8776 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
8778 if (!VOID_TYPE_P (type
))
8780 res
= create_tmp_reg_or_ssa_name (type
);
8781 gimple_call_set_lhs (stmt
, res
);
8783 gimple_set_location (stmt
, loc
);
8784 gimple_seq_add_stmt_without_update (seq
, stmt
);
8789 /* Build the conversion (TYPE) OP with a result of type TYPE
8790 with location LOC if such conversion is neccesary in GIMPLE,
8791 simplifying it first.
8792 Returns the built expression value and appends
8793 statements possibly defining it to SEQ. */
8796 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
8798 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
8800 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
8803 /* Build the conversion (ptrofftype) OP with a result of a type
8804 compatible with ptrofftype with location LOC if such conversion
8805 is neccesary in GIMPLE, simplifying it first.
8806 Returns the built expression value and appends
8807 statements possibly defining it to SEQ. */
8810 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
8812 if (ptrofftype_p (TREE_TYPE (op
)))
8814 return gimple_convert (seq
, loc
, sizetype
, op
);
8817 /* Build a vector of type TYPE in which each element has the value OP.
8818 Return a gimple value for the result, appending any new statements
8822 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
8825 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
8826 && !CONSTANT_CLASS_P (op
))
8827 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
8829 tree res
, vec
= build_vector_from_val (type
, op
);
8830 if (is_gimple_val (vec
))
8832 if (gimple_in_ssa_p (cfun
))
8833 res
= make_ssa_name (type
);
8835 res
= create_tmp_reg (type
);
8836 gimple
*stmt
= gimple_build_assign (res
, vec
);
8837 gimple_set_location (stmt
, loc
);
8838 gimple_seq_add_stmt_without_update (seq
, stmt
);
8842 /* Build a vector from BUILDER, handling the case in which some elements
8843 are non-constant. Return a gimple value for the result, appending any
8844 new instructions to SEQ.
8846 BUILDER must not have a stepped encoding on entry. This is because
8847 the function is not geared up to handle the arithmetic that would
8848 be needed in the variable case, and any code building a vector that
8849 is known to be constant should use BUILDER->build () directly. */
8852 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
8853 tree_vector_builder
*builder
)
8855 gcc_assert (builder
->nelts_per_pattern () <= 2);
8856 unsigned int encoded_nelts
= builder
->encoded_nelts ();
8857 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
8858 if (!CONSTANT_CLASS_P ((*builder
)[i
]))
8860 tree type
= builder
->type ();
8861 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
8862 vec
<constructor_elt
, va_gc
> *v
;
8863 vec_alloc (v
, nelts
);
8864 for (i
= 0; i
< nelts
; ++i
)
8865 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
8868 if (gimple_in_ssa_p (cfun
))
8869 res
= make_ssa_name (type
);
8871 res
= create_tmp_reg (type
);
8872 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
8873 gimple_set_location (stmt
, loc
);
8874 gimple_seq_add_stmt_without_update (seq
, stmt
);
8877 return builder
->build ();
8880 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
8881 and generate a value guaranteed to be rounded upwards to ALIGN.
8883 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
8886 gimple_build_round_up (gimple_seq
*seq
, location_t loc
, tree type
,
8887 tree old_size
, unsigned HOST_WIDE_INT align
)
8889 unsigned HOST_WIDE_INT tg_mask
= align
- 1;
8890 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
8891 gcc_assert (INTEGRAL_TYPE_P (type
));
8892 tree tree_mask
= build_int_cst (type
, tg_mask
);
8893 tree oversize
= gimple_build (seq
, loc
, PLUS_EXPR
, type
, old_size
,
8896 tree mask
= build_int_cst (type
, -align
);
8897 return gimple_build (seq
, loc
, BIT_AND_EXPR
, type
, oversize
, mask
);
8900 /* Return true if the result of assignment STMT is known to be non-negative.
8901 If the return value is based on the assumption that signed overflow is
8902 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8903 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8906 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8909 enum tree_code code
= gimple_assign_rhs_code (stmt
);
8910 tree type
= TREE_TYPE (gimple_assign_lhs (stmt
));
8911 switch (get_gimple_rhs_class (code
))
8913 case GIMPLE_UNARY_RHS
:
8914 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8916 gimple_assign_rhs1 (stmt
),
8917 strict_overflow_p
, depth
);
8918 case GIMPLE_BINARY_RHS
:
8919 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
8921 gimple_assign_rhs1 (stmt
),
8922 gimple_assign_rhs2 (stmt
),
8923 strict_overflow_p
, depth
);
8924 case GIMPLE_TERNARY_RHS
:
8926 case GIMPLE_SINGLE_RHS
:
8927 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
8928 strict_overflow_p
, depth
);
8929 case GIMPLE_INVALID_RHS
:
8935 /* Return true if return value of call STMT is known to be non-negative.
8936 If the return value is based on the assumption that signed overflow is
8937 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8938 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8941 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8944 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
8945 gimple_call_arg (stmt
, 0) : NULL_TREE
;
8946 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
8947 gimple_call_arg (stmt
, 1) : NULL_TREE
;
8948 tree lhs
= gimple_call_lhs (stmt
);
8950 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs
),
8951 gimple_call_combined_fn (stmt
),
8953 strict_overflow_p
, depth
));
8956 /* Return true if return value of call STMT is known to be non-negative.
8957 If the return value is based on the assumption that signed overflow is
8958 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8959 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8962 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8965 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
8967 tree arg
= gimple_phi_arg_def (stmt
, i
);
8968 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
8974 /* Return true if STMT is known to compute a non-negative value.
8975 If the return value is based on the assumption that signed overflow is
8976 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
8977 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
8980 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
8983 switch (gimple_code (stmt
))
8986 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8989 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8992 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
8999 /* Return true if the floating-point value computed by assignment STMT
9000 is known to have an integer value. We also allow +Inf, -Inf and NaN
9001 to be considered integer values. Return false for signaling NaN.
9003 DEPTH is the current nesting depth of the query. */
9006 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
9008 enum tree_code code
= gimple_assign_rhs_code (stmt
);
9009 switch (get_gimple_rhs_class (code
))
9011 case GIMPLE_UNARY_RHS
:
9012 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
9013 gimple_assign_rhs1 (stmt
), depth
);
9014 case GIMPLE_BINARY_RHS
:
9015 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
9016 gimple_assign_rhs1 (stmt
),
9017 gimple_assign_rhs2 (stmt
), depth
);
9018 case GIMPLE_TERNARY_RHS
:
9020 case GIMPLE_SINGLE_RHS
:
9021 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
9022 case GIMPLE_INVALID_RHS
:
9028 /* Return true if the floating-point value computed by call STMT is known
9029 to have an integer value. We also allow +Inf, -Inf and NaN to be
9030 considered integer values. Return false for signaling NaN.
9032 DEPTH is the current nesting depth of the query. */
9035 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
9037 tree arg0
= (gimple_call_num_args (stmt
) > 0
9038 ? gimple_call_arg (stmt
, 0)
9040 tree arg1
= (gimple_call_num_args (stmt
) > 1
9041 ? gimple_call_arg (stmt
, 1)
9043 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
9047 /* Return true if the floating-point result of phi STMT is known to have
9048 an integer value. We also allow +Inf, -Inf and NaN to be considered
9049 integer values. Return false for signaling NaN.
9051 DEPTH is the current nesting depth of the query. */
9054 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
9056 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
9058 tree arg
= gimple_phi_arg_def (stmt
, i
);
9059 if (!integer_valued_real_single_p (arg
, depth
+ 1))
9065 /* Return true if the floating-point value computed by STMT is known
9066 to have an integer value. We also allow +Inf, -Inf and NaN to be
9067 considered integer values. Return false for signaling NaN.
9069 DEPTH is the current nesting depth of the query. */
9072 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
9074 switch (gimple_code (stmt
))
9077 return gimple_assign_integer_valued_real_p (stmt
, depth
);
9079 return gimple_call_integer_valued_real_p (stmt
, depth
);
9081 return gimple_phi_integer_valued_real_p (stmt
, depth
);