1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "gimple-fold.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
44 #include "tree-object-size.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
63 #include "diagnostic-core.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
69 enum strlen_range_kind
{
70 /* Compute the exact constant string length. */
72 /* Compute the maximum constant string length. */
74 /* Compute a range of string lengths bounded by object sizes. When
75 the length of a string cannot be determined, consider as the upper
76 bound the size of the enclosing object the string may be a member
77 or element of. Also determine the size of the largest character
78 array the string may refer to. */
80 /* Temporary until the rest of Martin's strlen range work is integrated. */
82 /* Determine the integer value of the argument (not string length). */
86 static bool get_range_strlen (tree
, tree
[2], bitmap
*, strlen_range_kind
,
87 bool *, unsigned, tree
*);
89 /* Return true when DECL can be referenced from current unit.
90 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
91 We can get declarations that are not possible to reference for various
94 1) When analyzing C++ virtual tables.
95 C++ virtual tables do have known constructors even
96 when they are keyed to other compilation unit.
97 Those tables can contain pointers to methods and vars
98 in other units. Those methods have both STATIC and EXTERNAL
100 2) In WHOPR mode devirtualization might lead to reference
101 to method that was partitioned elsehwere.
102 In this case we have static VAR_DECL or FUNCTION_DECL
103 that has no corresponding callgraph/varpool node
105 3) COMDAT functions referred by external vtables that
106 we devirtualize only during final compilation stage.
107 At this time we already decided that we will not output
108 the function body and thus we can't reference the symbol
112 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
115 struct cgraph_node
*node
;
118 if (DECL_ABSTRACT_P (decl
))
121 /* We are concerned only about static/external vars and functions. */
122 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
123 || !VAR_OR_FUNCTION_DECL_P (decl
))
126 /* Static objects can be referred only if they was not optimized out yet. */
127 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
129 /* Before we start optimizing unreachable code we can be sure all
130 static objects are defined. */
131 if (symtab
->function_flags_ready
)
133 snode
= symtab_node::get (decl
);
134 if (!snode
|| !snode
->definition
)
136 node
= dyn_cast
<cgraph_node
*> (snode
);
137 return !node
|| !node
->global
.inlined_to
;
140 /* We will later output the initializer, so we can refer to it.
141 So we are concerned only when DECL comes from initializer of
142 external var or var that has been optimized out. */
144 || !VAR_P (from_decl
)
145 || (!DECL_EXTERNAL (from_decl
)
146 && (vnode
= varpool_node::get (from_decl
)) != NULL
147 && vnode
->definition
)
149 && (vnode
= varpool_node::get (from_decl
)) != NULL
150 && vnode
->in_other_partition
))
152 /* We are folding reference from external vtable. The vtable may reffer
153 to a symbol keyed to other compilation unit. The other compilation
154 unit may be in separate DSO and the symbol may be hidden. */
155 if (DECL_VISIBILITY_SPECIFIED (decl
)
156 && DECL_EXTERNAL (decl
)
157 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
158 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
160 /* When function is public, we always can introduce new reference.
161 Exception are the COMDAT functions where introducing a direct
162 reference imply need to include function body in the curren tunit. */
163 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
165 /* We have COMDAT. We are going to check if we still have definition
166 or if the definition is going to be output in other partition.
167 Bypass this when gimplifying; all needed functions will be produced.
169 As observed in PR20991 for already optimized out comdat virtual functions
170 it may be tempting to not necessarily give up because the copy will be
171 output elsewhere when corresponding vtable is output.
172 This is however not possible - ABI specify that COMDATs are output in
173 units where they are used and when the other unit was compiled with LTO
174 it is possible that vtable was kept public while the function itself
176 if (!symtab
->function_flags_ready
)
179 snode
= symtab_node::get (decl
);
181 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
182 && (!snode
->in_other_partition
183 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
185 node
= dyn_cast
<cgraph_node
*> (snode
);
186 return !node
|| !node
->global
.inlined_to
;
189 /* Create a temporary for TYPE for a statement STMT. If the current function
190 is in SSA form, a SSA name is created. Otherwise a temporary register
194 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
196 if (gimple_in_ssa_p (cfun
))
197 return make_ssa_name (type
, stmt
);
199 return create_tmp_reg (type
);
202 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
203 acceptable form for is_gimple_min_invariant.
204 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
207 canonicalize_constructor_val (tree cval
, tree from_decl
)
209 tree orig_cval
= cval
;
211 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
212 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
214 tree ptr
= TREE_OPERAND (cval
, 0);
215 if (is_gimple_min_invariant (ptr
))
216 cval
= build1_loc (EXPR_LOCATION (cval
),
217 ADDR_EXPR
, TREE_TYPE (ptr
),
218 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
220 fold_convert (ptr_type_node
,
221 TREE_OPERAND (cval
, 1))));
223 if (TREE_CODE (cval
) == ADDR_EXPR
)
225 tree base
= NULL_TREE
;
226 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
228 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
230 TREE_OPERAND (cval
, 0) = base
;
233 base
= get_base_address (TREE_OPERAND (cval
, 0));
237 if (VAR_OR_FUNCTION_DECL_P (base
)
238 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
240 if (TREE_TYPE (base
) == error_mark_node
)
243 TREE_ADDRESSABLE (base
) = 1;
244 else if (TREE_CODE (base
) == FUNCTION_DECL
)
246 /* Make sure we create a cgraph node for functions we'll reference.
247 They can be non-existent if the reference comes from an entry
248 of an external vtable for example. */
249 cgraph_node::get_create (base
);
251 /* Fixup types in global initializers. */
252 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
253 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
255 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
256 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
259 if (TREE_OVERFLOW_P (cval
))
260 return drop_tree_overflow (cval
);
264 /* If SYM is a constant variable with known value, return the value.
265 NULL_TREE is returned otherwise. */
268 get_symbol_constant_value (tree sym
)
270 tree val
= ctor_for_folding (sym
);
271 if (val
!= error_mark_node
)
275 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
276 if (val
&& is_gimple_min_invariant (val
))
281 /* Variables declared 'const' without an initializer
282 have zero as the initializer if they may not be
283 overridden at link or run time. */
285 && is_gimple_reg_type (TREE_TYPE (sym
)))
286 return build_zero_cst (TREE_TYPE (sym
));
294 /* Subroutine of fold_stmt. We perform several simplifications of the
295 memory reference tree EXPR and make sure to re-gimplify them properly
296 after propagation of constant addresses. IS_LHS is true if the
297 reference is supposed to be an lvalue. */
300 maybe_fold_reference (tree expr
, bool is_lhs
)
304 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
305 || TREE_CODE (expr
) == REALPART_EXPR
306 || TREE_CODE (expr
) == IMAGPART_EXPR
)
307 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
308 return fold_unary_loc (EXPR_LOCATION (expr
),
311 TREE_OPERAND (expr
, 0));
312 else if (TREE_CODE (expr
) == BIT_FIELD_REF
313 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
314 return fold_ternary_loc (EXPR_LOCATION (expr
),
317 TREE_OPERAND (expr
, 0),
318 TREE_OPERAND (expr
, 1),
319 TREE_OPERAND (expr
, 2));
322 && (result
= fold_const_aggregate_ref (expr
))
323 && is_gimple_min_invariant (result
))
330 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
331 replacement rhs for the statement or NULL_TREE if no simplification
332 could be made. It is assumed that the operands have been previously
336 fold_gimple_assign (gimple_stmt_iterator
*si
)
338 gimple
*stmt
= gsi_stmt (*si
);
339 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
340 location_t loc
= gimple_location (stmt
);
342 tree result
= NULL_TREE
;
344 switch (get_gimple_rhs_class (subcode
))
346 case GIMPLE_SINGLE_RHS
:
348 tree rhs
= gimple_assign_rhs1 (stmt
);
350 if (TREE_CLOBBER_P (rhs
))
353 if (REFERENCE_CLASS_P (rhs
))
354 return maybe_fold_reference (rhs
, false);
356 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
358 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
359 if (is_gimple_min_invariant (val
))
361 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
364 vec
<cgraph_node
*>targets
365 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
366 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
368 if (dump_enabled_p ())
370 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
371 "resolving virtual function address "
372 "reference to function %s\n",
373 targets
.length () == 1
374 ? targets
[0]->name ()
377 if (targets
.length () == 1)
379 val
= fold_convert (TREE_TYPE (val
),
380 build_fold_addr_expr_loc
381 (loc
, targets
[0]->decl
));
382 STRIP_USELESS_TYPE_CONVERSION (val
);
385 /* We can not use __builtin_unreachable here because it
386 can not have address taken. */
387 val
= build_int_cst (TREE_TYPE (val
), 0);
393 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
395 tree ref
= TREE_OPERAND (rhs
, 0);
396 tree tem
= maybe_fold_reference (ref
, true);
398 && TREE_CODE (tem
) == MEM_REF
399 && integer_zerop (TREE_OPERAND (tem
, 1)))
400 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
402 result
= fold_convert (TREE_TYPE (rhs
),
403 build_fold_addr_expr_loc (loc
, tem
));
404 else if (TREE_CODE (ref
) == MEM_REF
405 && integer_zerop (TREE_OPERAND (ref
, 1)))
406 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
410 /* Strip away useless type conversions. Both the
411 NON_LVALUE_EXPR that may have been added by fold, and
412 "useless" type conversions that might now be apparent
413 due to propagation. */
414 STRIP_USELESS_TYPE_CONVERSION (result
);
416 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
421 else if (TREE_CODE (rhs
) == CONSTRUCTOR
422 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
424 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
428 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
429 if (! CONSTANT_CLASS_P (val
))
432 return build_vector_from_ctor (TREE_TYPE (rhs
),
433 CONSTRUCTOR_ELTS (rhs
));
436 else if (DECL_P (rhs
))
437 return get_symbol_constant_value (rhs
);
441 case GIMPLE_UNARY_RHS
:
444 case GIMPLE_BINARY_RHS
:
447 case GIMPLE_TERNARY_RHS
:
448 result
= fold_ternary_loc (loc
, subcode
,
449 TREE_TYPE (gimple_assign_lhs (stmt
)),
450 gimple_assign_rhs1 (stmt
),
451 gimple_assign_rhs2 (stmt
),
452 gimple_assign_rhs3 (stmt
));
456 STRIP_USELESS_TYPE_CONVERSION (result
);
457 if (valid_gimple_rhs_p (result
))
462 case GIMPLE_INVALID_RHS
:
470 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
471 adjusting the replacement stmts location and virtual operands.
472 If the statement has a lhs the last stmt in the sequence is expected
473 to assign to that lhs. */
476 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
478 gimple
*stmt
= gsi_stmt (*si_p
);
480 if (gimple_has_location (stmt
))
481 annotate_all_with_location (stmts
, gimple_location (stmt
));
483 /* First iterate over the replacement statements backward, assigning
484 virtual operands to their defining statements. */
485 gimple
*laststore
= NULL
;
486 for (gimple_stmt_iterator i
= gsi_last (stmts
);
487 !gsi_end_p (i
); gsi_prev (&i
))
489 gimple
*new_stmt
= gsi_stmt (i
);
490 if ((gimple_assign_single_p (new_stmt
)
491 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
492 || (is_gimple_call (new_stmt
)
493 && (gimple_call_flags (new_stmt
)
494 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
498 vdef
= gimple_vdef (stmt
);
500 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
501 gimple_set_vdef (new_stmt
, vdef
);
502 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
503 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
504 laststore
= new_stmt
;
508 /* Second iterate over the statements forward, assigning virtual
509 operands to their uses. */
510 tree reaching_vuse
= gimple_vuse (stmt
);
511 for (gimple_stmt_iterator i
= gsi_start (stmts
);
512 !gsi_end_p (i
); gsi_next (&i
))
514 gimple
*new_stmt
= gsi_stmt (i
);
515 /* If the new statement possibly has a VUSE, update it with exact SSA
516 name we know will reach this one. */
517 if (gimple_has_mem_ops (new_stmt
))
518 gimple_set_vuse (new_stmt
, reaching_vuse
);
519 gimple_set_modified (new_stmt
, true);
520 if (gimple_vdef (new_stmt
))
521 reaching_vuse
= gimple_vdef (new_stmt
);
524 /* If the new sequence does not do a store release the virtual
525 definition of the original statement. */
527 && reaching_vuse
== gimple_vuse (stmt
))
529 tree vdef
= gimple_vdef (stmt
);
531 && TREE_CODE (vdef
) == SSA_NAME
)
533 unlink_stmt_vdef (stmt
);
534 release_ssa_name (vdef
);
538 /* Finally replace the original statement with the sequence. */
539 gsi_replace_with_seq (si_p
, stmts
, false);
542 /* Convert EXPR into a GIMPLE value suitable for substitution on the
543 RHS of an assignment. Insert the necessary statements before
544 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
545 is replaced. If the call is expected to produces a result, then it
546 is replaced by an assignment of the new RHS to the result variable.
547 If the result is to be ignored, then the call is replaced by a
548 GIMPLE_NOP. A proper VDEF chain is retained by making the first
549 VUSE and the last VDEF of the whole sequence be the same as the replaced
550 statement and using new SSA names for stores in between. */
553 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
556 gimple
*stmt
, *new_stmt
;
557 gimple_stmt_iterator i
;
558 gimple_seq stmts
= NULL
;
560 stmt
= gsi_stmt (*si_p
);
562 gcc_assert (is_gimple_call (stmt
));
564 push_gimplify_context (gimple_in_ssa_p (cfun
));
566 lhs
= gimple_call_lhs (stmt
);
567 if (lhs
== NULL_TREE
)
569 gimplify_and_add (expr
, &stmts
);
570 /* We can end up with folding a memcpy of an empty class assignment
571 which gets optimized away by C++ gimplification. */
572 if (gimple_seq_empty_p (stmts
))
574 pop_gimplify_context (NULL
);
575 if (gimple_in_ssa_p (cfun
))
577 unlink_stmt_vdef (stmt
);
580 gsi_replace (si_p
, gimple_build_nop (), false);
586 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
587 new_stmt
= gimple_build_assign (lhs
, tmp
);
588 i
= gsi_last (stmts
);
589 gsi_insert_after_without_update (&i
, new_stmt
,
590 GSI_CONTINUE_LINKING
);
593 pop_gimplify_context (NULL
);
595 gsi_replace_with_seq_vops (si_p
, stmts
);
599 /* Replace the call at *GSI with the gimple value VAL. */
602 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
604 gimple
*stmt
= gsi_stmt (*gsi
);
605 tree lhs
= gimple_call_lhs (stmt
);
609 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
610 val
= fold_convert (TREE_TYPE (lhs
), val
);
611 repl
= gimple_build_assign (lhs
, val
);
614 repl
= gimple_build_nop ();
615 tree vdef
= gimple_vdef (stmt
);
616 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
618 unlink_stmt_vdef (stmt
);
619 release_ssa_name (vdef
);
621 gsi_replace (gsi
, repl
, false);
624 /* Replace the call at *GSI with the new call REPL and fold that
628 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
630 gimple
*stmt
= gsi_stmt (*gsi
);
631 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
632 gimple_set_location (repl
, gimple_location (stmt
));
633 if (gimple_vdef (stmt
)
634 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
636 gimple_set_vdef (repl
, gimple_vdef (stmt
));
637 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
639 if (gimple_vuse (stmt
))
640 gimple_set_vuse (repl
, gimple_vuse (stmt
));
641 gsi_replace (gsi
, repl
, false);
645 /* Return true if VAR is a VAR_DECL or a component thereof. */
648 var_decl_component_p (tree var
)
651 while (handled_component_p (inner
))
652 inner
= TREE_OPERAND (inner
, 0);
653 return (DECL_P (inner
)
654 || (TREE_CODE (inner
) == MEM_REF
655 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
658 /* Return TRUE if the SIZE argument, representing the size of an
659 object, is in a range of values of which exactly zero is valid. */
662 size_must_be_zero_p (tree size
)
664 if (integer_zerop (size
))
667 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
670 tree type
= TREE_TYPE (size
);
671 int prec
= TYPE_PRECISION (type
);
673 /* Compute the value of SSIZE_MAX, the largest positive value that
674 can be stored in ssize_t, the signed counterpart of size_t. */
675 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
676 value_range
valid_range (VR_RANGE
,
677 build_int_cst (type
, 0),
678 wide_int_to_tree (type
, ssize_max
));
680 get_range_info (size
, vr
);
681 vr
.intersect (&valid_range
);
685 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
686 diagnose (otherwise undefined) overlapping copies without preventing
687 folding. When folded, GCC guarantees that overlapping memcpy has
688 the same semantics as memmove. Call to the library memcpy need not
689 provide the same guarantee. Return false if no simplification can
693 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
694 tree dest
, tree src
, int endp
)
696 gimple
*stmt
= gsi_stmt (*gsi
);
697 tree lhs
= gimple_call_lhs (stmt
);
698 tree len
= gimple_call_arg (stmt
, 2);
699 tree destvar
, srcvar
;
700 location_t loc
= gimple_location (stmt
);
702 bool nowarn
= gimple_no_warning_p (stmt
);
704 /* If the LEN parameter is a constant zero or in range where
705 the only valid value is zero, return DEST. */
706 if (size_must_be_zero_p (len
))
709 if (gimple_call_lhs (stmt
))
710 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
712 repl
= gimple_build_nop ();
713 tree vdef
= gimple_vdef (stmt
);
714 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
716 unlink_stmt_vdef (stmt
);
717 release_ssa_name (vdef
);
719 gsi_replace (gsi
, repl
, false);
723 /* If SRC and DEST are the same (and not volatile), return
724 DEST{,+LEN,+LEN-1}. */
725 if (operand_equal_p (src
, dest
, 0))
727 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
728 It's safe and may even be emitted by GCC itself (see bug
730 unlink_stmt_vdef (stmt
);
731 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
732 release_ssa_name (gimple_vdef (stmt
));
735 gsi_replace (gsi
, gimple_build_nop (), false);
742 tree srctype
, desttype
;
743 unsigned int src_align
, dest_align
;
746 unsigned HOST_WIDE_INT tmp_len
;
748 /* Build accesses at offset zero with a ref-all character type. */
749 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
752 /* If we can perform the copy efficiently with first doing all loads
753 and then all stores inline it that way. Currently efficiently
754 means that we can load all the memory into a single integer
755 register which is what MOVE_MAX gives us. */
756 src_align
= get_pointer_alignment (src
);
757 dest_align
= get_pointer_alignment (dest
);
758 if (tree_fits_uhwi_p (len
)
759 && compare_tree_int (len
, MOVE_MAX
) <= 0
760 /* ??? Don't transform copies from strings with known length this
761 confuses the tree-ssa-strlen.c. This doesn't handle
762 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
764 && !c_strlen (src
, 2)
765 && !((tmp_str
= c_getstr (src
, &tmp_len
)) != NULL
766 && memchr (tmp_str
, 0, tmp_len
) == NULL
))
768 unsigned ilen
= tree_to_uhwi (len
);
769 if (pow2p_hwi (ilen
))
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
774 && check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
775 dest
, src
, len
, len
))
776 gimple_set_no_warning (stmt
, true);
778 scalar_int_mode mode
;
779 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
781 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
782 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
786 || !targetm
.slow_unaligned_access (mode
, dest_align
)
787 || (optab_handler (movmisalign_optab
, mode
)
788 != CODE_FOR_nothing
)))
791 tree desttype
= type
;
792 if (src_align
< GET_MODE_ALIGNMENT (mode
))
793 srctype
= build_aligned_type (type
, src_align
);
794 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
795 tree tem
= fold_const_aggregate_ref (srcmem
);
798 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
799 && targetm
.slow_unaligned_access (mode
, src_align
)
800 && (optab_handler (movmisalign_optab
, mode
)
801 == CODE_FOR_nothing
))
806 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
808 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
812 gimple_assign_set_lhs (new_stmt
, srcmem
);
813 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
814 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
816 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
817 desttype
= build_aligned_type (type
, dest_align
);
819 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
822 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
823 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
824 if (gimple_vdef (new_stmt
)
825 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
829 gsi_replace (gsi
, new_stmt
, false);
832 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align
|| !src_align
)
848 if (readonly_data_expr (src
)
849 || (tree_fits_uhwi_p (len
)
850 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
851 >= tree_to_uhwi (len
))))
853 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
856 gimple_call_set_fndecl (stmt
, fn
);
857 gimple_call_set_arg (stmt
, 0, dest
);
858 gimple_call_set_arg (stmt
, 1, src
);
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src
) == ADDR_EXPR
865 && TREE_CODE (dest
) == ADDR_EXPR
)
867 tree src_base
, dest_base
, fn
;
868 poly_int64 src_offset
= 0, dest_offset
= 0;
871 srcvar
= TREE_OPERAND (src
, 0);
872 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
873 if (src_base
== NULL
)
875 destvar
= TREE_OPERAND (dest
, 0);
876 dest_base
= get_addr_base_and_unit_offset (destvar
,
878 if (dest_base
== NULL
)
880 if (!poly_int_tree_p (len
, &maxsize
))
882 if (SSA_VAR_P (src_base
)
883 && SSA_VAR_P (dest_base
))
885 if (operand_equal_p (src_base
, dest_base
, 0)
886 && ranges_maybe_overlap_p (src_offset
, maxsize
,
887 dest_offset
, maxsize
))
890 else if (TREE_CODE (src_base
) == MEM_REF
891 && TREE_CODE (dest_base
) == MEM_REF
)
893 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
894 TREE_OPERAND (dest_base
, 0), 0))
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base
) + src_offset
;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base
) + dest_offset
;
900 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
901 full_dest_offset
, maxsize
))
907 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
910 gimple_call_set_fndecl (stmt
, fn
);
911 gimple_call_set_arg (stmt
, 0, dest
);
912 gimple_call_set_arg (stmt
, 1, src
);
917 /* If the destination and source do not alias optimize into
919 if ((is_gimple_min_invariant (dest
)
920 || TREE_CODE (dest
) == SSA_NAME
)
921 && (is_gimple_min_invariant (src
)
922 || TREE_CODE (src
) == SSA_NAME
))
925 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
926 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
927 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
930 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
933 gimple_call_set_fndecl (stmt
, fn
);
934 gimple_call_set_arg (stmt
, 0, dest
);
935 gimple_call_set_arg (stmt
, 1, src
);
944 if (!tree_fits_shwi_p (len
))
946 if (!POINTER_TYPE_P (TREE_TYPE (src
))
947 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype
= TREE_TYPE (TREE_TYPE (src
));
956 if (TREE_CODE (srctype
) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
958 srctype
= TREE_TYPE (srctype
);
959 desttype
= TREE_TYPE (TREE_TYPE (dest
));
960 if (TREE_CODE (desttype
) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
962 desttype
= TREE_TYPE (desttype
);
963 if (TREE_ADDRESSABLE (srctype
)
964 || TREE_ADDRESSABLE (desttype
))
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
970 || TREE_CODE (desttype
) == BOOLEAN_TYPE
971 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
972 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
974 || TREE_CODE (srctype
) == BOOLEAN_TYPE
975 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
976 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
984 src_align
= get_pointer_alignment (src
);
985 dest_align
= get_pointer_alignment (dest
);
986 if (dest_align
< TYPE_ALIGN (desttype
)
987 || src_align
< TYPE_ALIGN (srctype
))
991 if (TREE_CODE (dest
) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest
, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
994 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
997 if (TREE_CODE (src
) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src
, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1002 || src_align
>= TYPE_ALIGN (desttype
))
1003 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1005 else if (!STRICT_ALIGNMENT
)
1007 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1009 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1013 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1016 if (srcvar
== NULL_TREE
)
1018 if (src_align
>= TYPE_ALIGN (desttype
))
1019 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1022 if (STRICT_ALIGNMENT
)
1024 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1026 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1029 else if (destvar
== NULL_TREE
)
1031 if (dest_align
>= TYPE_ALIGN (srctype
))
1032 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1035 if (STRICT_ALIGNMENT
)
1037 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1039 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1046 check_bounds_or_overlap (as_a
<gcall
*>(stmt
), dest
, src
, len
, len
);
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1051 tree tem
= fold_const_aggregate_ref (srcvar
);
1054 if (! is_gimple_min_invariant (srcvar
))
1056 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1057 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1059 gimple_assign_set_lhs (new_stmt
, srcvar
);
1060 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1061 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1063 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1064 goto set_vop_and_replace
;
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1071 tree_to_uhwi (len
));
1073 if (src_align
> TYPE_ALIGN (srctype
))
1074 srctype
= build_aligned_type (srctype
, src_align
);
1075 if (dest_align
> TYPE_ALIGN (desttype
))
1076 desttype
= build_aligned_type (desttype
, dest_align
);
1078 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
, dest
, off0
),
1079 fold_build2 (MEM_REF
, srctype
, src
, off0
));
1080 set_vop_and_replace
:
1081 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1082 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1083 if (gimple_vdef (new_stmt
)
1084 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1088 gsi_replace (gsi
, new_stmt
, false);
1091 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1095 gimple_seq stmts
= NULL
;
1096 if (endp
== 0 || endp
== 3)
1099 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1101 if (endp
== 2 || endp
== 1)
1103 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1104 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1105 TREE_TYPE (dest
), dest
, len
);
1108 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1109 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1110 gsi_replace (gsi
, repl
, false);
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1120 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127 gimple
*stmt
= gsi_stmt (*gsi
);
1128 tree a
= gimple_call_arg (stmt
, 0);
1129 tree b
= gimple_call_arg (stmt
, 1);
1130 tree len
= gimple_call_arg (stmt
, 2);
1132 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1133 replace_call_with_call_and_fold (gsi
, repl
);
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1144 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1153 gimple
*stmt
= gsi_stmt (*gsi
);
1154 tree src
= gimple_call_arg (stmt
, 0);
1155 tree dest
= gimple_call_arg (stmt
, 1);
1156 tree len
= gimple_call_arg (stmt
, 2);
1158 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1159 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1160 replace_call_with_call_and_fold (gsi
, repl
);
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1171 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178 gimple
*stmt
= gsi_stmt (*gsi
);
1179 tree dest
= gimple_call_arg (stmt
, 0);
1180 tree len
= gimple_call_arg (stmt
, 1);
1182 gimple_seq seq
= NULL
;
1183 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1184 gimple_seq_add_stmt_without_update (&seq
, repl
);
1185 gsi_replace_with_seq_vops (gsi
, seq
);
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1195 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1197 gimple
*stmt
= gsi_stmt (*gsi
);
1199 unsigned HOST_WIDE_INT length
, cval
;
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len
))
1204 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1208 if (! tree_fits_uhwi_p (len
))
1211 if (TREE_CODE (c
) != INTEGER_CST
)
1214 tree dest
= gimple_call_arg (stmt
, 0);
1216 if (TREE_CODE (var
) != ADDR_EXPR
)
1219 var
= TREE_OPERAND (var
, 0);
1220 if (TREE_THIS_VOLATILE (var
))
1223 etype
= TREE_TYPE (var
);
1224 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1225 etype
= TREE_TYPE (etype
);
1227 if (!INTEGRAL_TYPE_P (etype
)
1228 && !POINTER_TYPE_P (etype
))
1231 if (! var_decl_component_p (var
))
1234 length
= tree_to_uhwi (len
);
1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1236 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1239 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1242 if (integer_zerop (c
))
1246 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1249 cval
= TREE_INT_CST_LOW (c
);
1253 cval
|= (cval
<< 31) << 1;
1256 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1257 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1258 gimple_set_vuse (store
, gimple_vuse (stmt
));
1259 tree vdef
= gimple_vdef (stmt
);
1260 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1262 gimple_set_vdef (store
, gimple_vdef (stmt
));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1265 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1266 if (gimple_call_lhs (stmt
))
1268 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1269 gsi_replace (gsi
, asgn
, false);
1273 gimple_stmt_iterator gsi2
= *gsi
;
1275 gsi_remove (&gsi2
, true);
1281 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1284 get_range_strlen_tree (tree arg
, tree length
[2], bitmap
*visited
,
1285 strlen_range_kind rkind
,
1286 bool *flexp
, unsigned eltsize
, tree
*nonstr
)
1288 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1290 /* The minimum and maximum length. */
1291 tree
*const minlen
= length
;
1292 tree
*const maxlen
= length
+ 1;
1294 /* The length computed by this invocation of the function. */
1295 tree val
= NULL_TREE
;
1297 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1298 if (TREE_CODE (arg
) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1301 tree op
= TREE_OPERAND (arg
, 0);
1302 if (integer_zerop (TREE_OPERAND (op
, 1)))
1304 tree aop0
= TREE_OPERAND (op
, 0);
1305 if (TREE_CODE (aop0
) == INDIRECT_REF
1306 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1307 return get_range_strlen (TREE_OPERAND (aop0
, 0), length
,
1308 visited
, rkind
, flexp
,
1311 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1312 && (rkind
== SRK_LENRANGE
|| rkind
== SRK_LENRANGE_2
))
1314 /* Fail if an array is the last member of a struct object
1315 since it could be treated as a (fake) flexible array
1317 tree idx
= TREE_OPERAND (op
, 1);
1319 arg
= TREE_OPERAND (op
, 0);
1320 tree optype
= TREE_TYPE (arg
);
1321 if (tree dom
= TYPE_DOMAIN (optype
))
1322 if (tree bound
= TYPE_MAX_VALUE (dom
))
1323 if (TREE_CODE (bound
) == INTEGER_CST
1324 && TREE_CODE (idx
) == INTEGER_CST
1325 && tree_int_cst_lt (bound
, idx
))
1330 if (rkind
== SRK_INT_VALUE
)
1332 /* We are computing the maximum value (not string length). */
1334 if (TREE_CODE (val
) != INTEGER_CST
1335 || tree_int_cst_sgn (val
) < 0)
1340 c_strlen_data lendata
= { };
1341 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1343 /* If we potentially had a non-terminated string, then
1344 bubble that information up to the caller. */
1345 if (!val
&& lendata
.decl
)
1347 *nonstr
= lendata
.decl
;
1348 *minlen
= lendata
.minlen
;
1349 *maxlen
= lendata
.minlen
;
1350 return rkind
== SRK_STRLEN
? false : true;
1354 if (!val
&& (rkind
== SRK_LENRANGE
|| rkind
== SRK_LENRANGE_2
))
1356 if (TREE_CODE (arg
) == ADDR_EXPR
)
1357 return get_range_strlen (TREE_OPERAND (arg
, 0), length
,
1358 visited
, rkind
, flexp
,
1361 if (TREE_CODE (arg
) == ARRAY_REF
)
1363 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1365 /* Determine the "innermost" array type. */
1366 while (TREE_CODE (optype
) == ARRAY_TYPE
1367 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1368 optype
= TREE_TYPE (optype
);
1370 /* Avoid arrays of pointers. */
1371 tree eltype
= TREE_TYPE (optype
);
1372 if (TREE_CODE (optype
) != ARRAY_TYPE
1373 || !INTEGRAL_TYPE_P (eltype
))
1376 /* Fail when the array bound is unknown or zero. */
1377 val
= TYPE_SIZE_UNIT (optype
);
1378 if (!val
|| integer_zerop (val
))
1381 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1384 /* Set the minimum size to zero since the string in
1385 the array could have zero length. */
1386 *minlen
= ssize_int (0);
1388 if (TREE_CODE (TREE_OPERAND (arg
, 0)) == COMPONENT_REF
1389 && optype
== TREE_TYPE (TREE_OPERAND (arg
, 0))
1390 && array_at_struct_end_p (TREE_OPERAND (arg
, 0)))
1393 else if (TREE_CODE (arg
) == COMPONENT_REF
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1397 /* Use the type of the member array to determine the upper
1398 bound on the length of the array. This may be overly
1399 optimistic if the array itself isn't NUL-terminated and
1400 the caller relies on the subsequent member to contain
1401 the NUL but that would only be considered valid if
1402 the array were the last member of a struct.
1403 Set *FLEXP to true if the array whose bound is being
1404 used is at the end of a struct. */
1405 if (array_at_struct_end_p (arg
))
1408 tree fld
= TREE_OPERAND (arg
, 1);
1410 tree optype
= TREE_TYPE (fld
);
1412 /* Determine the "innermost" array type. */
1413 while (TREE_CODE (optype
) == ARRAY_TYPE
1414 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1415 optype
= TREE_TYPE (optype
);
1417 /* Fail when the array bound is unknown or zero. */
1418 val
= TYPE_SIZE_UNIT (optype
);
1419 if (!val
|| integer_zerop (val
))
1421 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1424 /* Set the minimum size to zero since the string in
1425 the array could have zero length. */
1426 *minlen
= ssize_int (0);
1431 tree type
= TREE_TYPE (arg
);
1432 if (POINTER_TYPE_P (type
))
1433 type
= TREE_TYPE (type
);
1435 if (TREE_CODE (type
) == ARRAY_TYPE
)
1437 val
= TYPE_SIZE_UNIT (type
);
1439 || TREE_CODE (val
) != INTEGER_CST
1440 || integer_zerop (val
))
1442 val
= wide_int_to_tree (TREE_TYPE (val
),
1443 wi::sub (wi::to_wide (val
), 1));
1445 /* Set the minimum size to zero since the string in
1446 the array could have zero length. */
1447 *minlen
= ssize_int (0);
1455 /* Adjust the lower bound on the string length as necessary. */
1457 || (rkind
!= SRK_STRLEN
1458 && TREE_CODE (*minlen
) == INTEGER_CST
1459 && TREE_CODE (val
) == INTEGER_CST
1460 && tree_int_cst_lt (val
, *minlen
)))
1465 /* Adjust the more conservative bound if possible/necessary
1466 and fail otherwise. */
1467 if (rkind
!= SRK_STRLEN
)
1469 if (TREE_CODE (*maxlen
) != INTEGER_CST
1470 || TREE_CODE (val
) != INTEGER_CST
)
1473 if (tree_int_cst_lt (*maxlen
, val
))
1477 else if (simple_cst_equal (val
, *maxlen
) != 1)
1479 /* Fail if the length of this ARG is different from that
1480 previously determined from another ARG. */
1489 /* Obtain the minimum and maximum string length or minimum and maximum
1490 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1491 If ARG is an SSA name variable, follow its use-def chains. When
1492 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1493 if we are unable to determine the length or value, return false.
1494 VISITED is a bitmap of visited variables.
1495 RKIND determines the kind of value or range to obtain (see
1497 Set PDATA->DECL if ARG refers to an unterminated constant array.
1498 On input, set ELTSIZE to 1 for normal single byte character strings,
1499 and either 2 or 4 for wide characer strings (the size of wchar_t).
1500 Return true if *PDATA was successfully populated and false otherwise. */
1503 get_range_strlen (tree arg
, tree length
[2], bitmap
*visited
,
1504 strlen_range_kind rkind
,
1505 bool *flexp
, unsigned eltsize
, tree
*nonstr
)
1508 if (TREE_CODE (arg
) != SSA_NAME
)
1509 return get_range_strlen_tree (arg
, length
, visited
, rkind
, flexp
,
1512 /* If ARG is registered for SSA update we cannot look at its defining
1514 if (name_registered_for_update_p (arg
))
1517 /* If we were already here, break the infinite cycle. */
1519 *visited
= BITMAP_ALLOC (NULL
);
1520 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1524 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1526 /* The minimum and maximum length. */
1527 tree
*const maxlen
= length
+ 1;
1529 switch (gimple_code (def_stmt
))
1532 /* The RHS of the statement defining VAR must either have a
1533 constant length or come from another SSA_NAME with a constant
1535 if (gimple_assign_single_p (def_stmt
)
1536 || gimple_assign_unary_nop_p (def_stmt
))
1538 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1539 return get_range_strlen (rhs
, length
, visited
, rkind
, flexp
,
1542 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1544 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1545 gimple_assign_rhs3 (def_stmt
) };
1547 for (unsigned int i
= 0; i
< 2; i
++)
1548 if (!get_range_strlen (ops
[i
], length
, visited
, rkind
,
1549 flexp
, eltsize
, nonstr
))
1551 if (rkind
!= SRK_LENRANGE_2
)
1553 /* Set the upper bound to the maximum to prevent
1554 it from being adjusted in the next iteration but
1555 leave MINLEN and the more conservative MAXBOUND
1556 determined so far alone (or leave them null if
1557 they haven't been set yet). That the MINLEN is
1558 in fact zero can be determined from MAXLEN being
1559 unbounded but the discovered minimum is used for
1561 *maxlen
= build_all_ones_cst (size_type_node
);
1568 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1569 must have a constant length. */
1570 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1572 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1574 /* If this PHI has itself as an argument, we cannot
1575 determine the string length of this argument. However,
1576 if we can find a constant string length for the other
1577 PHI args then we can still be sure that this is a
1578 constant string length. So be optimistic and just
1579 continue with the next argument. */
1580 if (arg
== gimple_phi_result (def_stmt
))
1583 if (!get_range_strlen (arg
, length
, visited
, rkind
, flexp
,
1586 if (rkind
!= SRK_LENRANGE_2
)
1588 /* Set the upper bound to the maximum to prevent
1589 it from being adjusted in the next iteration but
1590 leave MINLEN and the more conservative MAXBOUND
1591 determined so far alone (or leave them null if
1592 they haven't been set yet). That the MINLEN is
1593 in fact zero can be determined from MAXLEN being
1594 unbounded but the discovered minimum is used for
1596 *maxlen
= build_all_ones_cst (size_type_node
);
1605 /* Determine the minimum and maximum value or string length that ARG
1606 refers to and store each in the first two elements of MINMAXLEN.
1607 For expressions that point to strings of unknown lengths that are
1608 character arrays, use the upper bound of the array as the maximum
1609 length. For example, given an expression like 'x ? array : "xyz"'
1610 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1611 to 0 and MINMAXLEN[1] to 7, the longest string that could be
1613 Return true if the range of the string lengths has been obtained
1614 from the upper bound of an array at the end of a struct. Such
1615 an array may hold a string that's longer than its upper bound
1616 due to it being used as a poor-man's flexible array member.
1618 STRICT is true if it will handle PHIs and COND_EXPRs conservatively
1619 and false if PHIs and COND_EXPRs are to be handled optimistically,
1620 if we can determine string length minimum and maximum; it will use
1621 the minimum from the ones where it can be determined.
1622 STRICT false should be only used for warning code.
1623 When non-null, clear *NONSTR if ARG refers to a constant array
1624 that is known not be nul-terminated. Otherwise set it to
1625 the declaration of the constant non-terminated array.
1627 ELTSIZE is 1 for normal single byte character strings, and 2 or
1628 4 for wide characer strings. ELTSIZE is by default 1. */
1631 get_range_strlen (tree arg
, tree minmaxlen
[2], unsigned eltsize
,
1632 bool strict
, tree
*nonstr
/* = NULL */)
1634 bitmap visited
= NULL
;
1636 minmaxlen
[0] = NULL_TREE
;
1637 minmaxlen
[1] = NULL_TREE
;
1641 nonstr
= &nonstrbuf
;
1642 *nonstr
= NULL_TREE
;
1644 bool flexarray
= false;
1645 if (!get_range_strlen (arg
, minmaxlen
, &visited
,
1646 strict
? SRK_LENRANGE
: SRK_LENRANGE_2
,
1647 &flexarray
, eltsize
, nonstr
))
1649 minmaxlen
[0] = NULL_TREE
;
1650 minmaxlen
[1] = NULL_TREE
;
1654 BITMAP_FREE (visited
);
1659 /* Return the maximum string length for ARG, counting by TYPE
1660 (1, 2 or 4 for normal or wide chars). NONSTR indicates
1661 if the caller is prepared to handle unterminated strings.
1663 If an unterminated string is discovered and our caller handles
1664 unterminated strings, then bubble up the offending DECL and
1665 return the maximum size. Otherwise return NULL. */
1668 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
1670 /* A non-null NONSTR is meaningless when determining the maximum
1671 value of an integer ARG. */
1672 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
1673 /* ARG must have an integral type when RKIND says so. */
1674 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
1676 bitmap visited
= NULL
;
1677 tree len
[2] = { NULL_TREE
, NULL_TREE
};
1680 /* Set to non-null if ARG refers to an untermianted array. */
1681 tree mynonstr
= NULL_TREE
;
1682 if (!get_range_strlen (arg
, len
, &visited
, rkind
, &dummy
, 1, &mynonstr
))
1685 BITMAP_FREE (visited
);
1689 /* For callers prepared to handle unterminated arrays set
1690 *NONSTR to point to the declaration of the array and return
1691 the maximum length/size. */
1696 /* Fail if the constant array isn't nul-terminated. */
1697 return mynonstr
? NULL_TREE
: len
[1];
1701 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1702 If LEN is not NULL, it represents the length of the string to be
1703 copied. Return NULL_TREE if no simplification can be made. */
1706 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1707 tree dest
, tree src
)
1709 gimple
*stmt
= gsi_stmt (*gsi
);
1710 location_t loc
= gimple_location (stmt
);
1713 /* If SRC and DEST are the same (and not volatile), return DEST. */
1714 if (operand_equal_p (src
, dest
, 0))
1716 /* Issue -Wrestrict unless the pointers are null (those do
1717 not point to objects and so do not indicate an overlap;
1718 such calls could be the result of sanitization and jump
1720 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
1722 tree func
= gimple_call_fndecl (stmt
);
1724 warning_at (loc
, OPT_Wrestrict
,
1725 "%qD source argument is the same as destination",
1729 replace_call_with_value (gsi
, dest
);
1733 if (optimize_function_for_size_p (cfun
))
1736 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1740 /* Set to non-null if ARG refers to an unterminated array. */
1742 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
1746 /* Avoid folding calls with unterminated arrays. */
1747 if (!gimple_no_warning_p (stmt
))
1748 warn_string_no_nul (loc
, "strcpy", src
, nonstr
);
1749 gimple_set_no_warning (stmt
, true);
1756 len
= fold_convert_loc (loc
, size_type_node
, len
);
1757 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1758 len
= force_gimple_operand_gsi (gsi
, len
, true,
1759 NULL_TREE
, true, GSI_SAME_STMT
);
1760 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1761 replace_call_with_call_and_fold (gsi
, repl
);
1765 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1766 If SLEN is not NULL, it represents the length of the source string.
1767 Return NULL_TREE if no simplification can be made. */
1770 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1771 tree dest
, tree src
, tree len
)
1773 gimple
*stmt
= gsi_stmt (*gsi
);
1774 location_t loc
= gimple_location (stmt
);
1775 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
1777 /* If the LEN parameter is zero, return DEST. */
1778 if (integer_zerop (len
))
1780 /* Avoid warning if the destination refers to a an array/pointer
1781 decorate with attribute nonstring. */
1784 tree fndecl
= gimple_call_fndecl (stmt
);
1786 /* Warn about the lack of nul termination: the result is not
1787 a (nul-terminated) string. */
1788 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
1789 if (slen
&& !integer_zerop (slen
))
1790 warning_at (loc
, OPT_Wstringop_truncation
,
1791 "%G%qD destination unchanged after copying no bytes "
1792 "from a string of length %E",
1793 stmt
, fndecl
, slen
);
1795 warning_at (loc
, OPT_Wstringop_truncation
,
1796 "%G%qD destination unchanged after copying no bytes",
1800 replace_call_with_value (gsi
, dest
);
1804 /* We can't compare slen with len as constants below if len is not a
1806 if (TREE_CODE (len
) != INTEGER_CST
)
1809 /* Now, we must be passed a constant src ptr parameter. */
1810 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
1811 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1814 /* The size of the source string including the terminating nul. */
1815 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1817 /* We do not support simplification of this case, though we do
1818 support it when expanding trees into RTL. */
1819 /* FIXME: generate a call to __builtin_memset. */
1820 if (tree_int_cst_lt (ssize
, len
))
1823 /* Diagnose truncation that leaves the copy unterminated. */
1824 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
1826 /* OK transform into builtin memcpy. */
1827 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1831 len
= fold_convert_loc (loc
, size_type_node
, len
);
1832 len
= force_gimple_operand_gsi (gsi
, len
, true,
1833 NULL_TREE
, true, GSI_SAME_STMT
);
1834 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1835 replace_call_with_call_and_fold (gsi
, repl
);
1840 /* Fold function call to builtin strchr or strrchr.
1841 If both arguments are constant, evaluate and fold the result,
1842 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1843 In general strlen is significantly faster than strchr
1844 due to being a simpler operation. */
1846 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1848 gimple
*stmt
= gsi_stmt (*gsi
);
1849 tree str
= gimple_call_arg (stmt
, 0);
1850 tree c
= gimple_call_arg (stmt
, 1);
1851 location_t loc
= gimple_location (stmt
);
1855 if (!gimple_call_lhs (stmt
))
1858 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1860 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1864 replace_call_with_value (gsi
, integer_zero_node
);
1868 tree len
= build_int_cst (size_type_node
, p1
- p
);
1869 gimple_seq stmts
= NULL
;
1870 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1871 POINTER_PLUS_EXPR
, str
, len
);
1872 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1873 gsi_replace_with_seq_vops (gsi
, stmts
);
1877 if (!integer_zerop (c
))
1880 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1881 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
1883 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1887 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
1888 replace_call_with_call_and_fold (gsi
, repl
);
1896 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1901 /* Create newstr = strlen (str). */
1902 gimple_seq stmts
= NULL
;
1903 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1904 gimple_set_location (new_stmt
, loc
);
1905 len
= create_tmp_reg_or_ssa_name (size_type_node
);
1906 gimple_call_set_lhs (new_stmt
, len
);
1907 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1909 /* Create (str p+ strlen (str)). */
1910 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1911 POINTER_PLUS_EXPR
, str
, len
);
1912 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1913 gsi_replace_with_seq_vops (gsi
, stmts
);
1914 /* gsi now points at the assignment to the lhs, get a
1915 stmt iterator to the strlen.
1916 ??? We can't use gsi_for_stmt as that doesn't work when the
1917 CFG isn't built yet. */
1918 gimple_stmt_iterator gsi2
= *gsi
;
1924 /* Fold function call to builtin strstr.
1925 If both arguments are constant, evaluate and fold the result,
1926 additionally fold strstr (x, "") into x and strstr (x, "c")
1927 into strchr (x, 'c'). */
1929 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
1931 gimple
*stmt
= gsi_stmt (*gsi
);
1932 tree haystack
= gimple_call_arg (stmt
, 0);
1933 tree needle
= gimple_call_arg (stmt
, 1);
1936 if (!gimple_call_lhs (stmt
))
1939 q
= c_getstr (needle
);
1943 if ((p
= c_getstr (haystack
)))
1945 const char *r
= strstr (p
, q
);
1949 replace_call_with_value (gsi
, integer_zero_node
);
1953 tree len
= build_int_cst (size_type_node
, r
- p
);
1954 gimple_seq stmts
= NULL
;
1956 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
1958 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1959 gsi_replace_with_seq_vops (gsi
, stmts
);
1963 /* For strstr (x, "") return x. */
1966 replace_call_with_value (gsi
, haystack
);
1970 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1973 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1976 tree c
= build_int_cst (integer_type_node
, q
[0]);
1977 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
1978 replace_call_with_call_and_fold (gsi
, repl
);
1986 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1989 Return NULL_TREE if no simplification was possible, otherwise return the
1990 simplified form of the call as a tree.
1992 The simplified form may be a constant or other expression which
1993 computes the same value, but in a more efficient manner (including
1994 calls to other builtin functions).
1996 The call may contain arguments which need to be evaluated, but
1997 which are not useful to determine the result of the call. In
1998 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1999 COMPOUND_EXPR will be an argument which must be evaluated.
2000 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2001 COMPOUND_EXPR in the chain will contain the tree for the simplified
2002 form of the builtin function call. */
2005 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2007 gimple
*stmt
= gsi_stmt (*gsi
);
2008 location_t loc
= gimple_location (stmt
);
2010 const char *p
= c_getstr (src
);
2012 /* If the string length is zero, return the dst parameter. */
2013 if (p
&& *p
== '\0')
2015 replace_call_with_value (gsi
, dst
);
2019 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2022 /* See if we can store by pieces into (dst + strlen(dst)). */
2024 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2025 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2027 if (!strlen_fn
|| !memcpy_fn
)
2030 /* If the length of the source string isn't computable don't
2031 split strcat into strlen and memcpy. */
2032 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2036 /* Create strlen (dst). */
2037 gimple_seq stmts
= NULL
, stmts2
;
2038 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2039 gimple_set_location (repl
, loc
);
2040 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2041 gimple_call_set_lhs (repl
, newdst
);
2042 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2044 /* Create (dst p+ strlen (dst)). */
2045 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2046 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2047 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2049 len
= fold_convert_loc (loc
, size_type_node
, len
);
2050 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2051 build_int_cst (size_type_node
, 1));
2052 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2053 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2055 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2056 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2057 if (gimple_call_lhs (stmt
))
2059 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2060 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2061 gsi_replace_with_seq_vops (gsi
, stmts
);
2062 /* gsi now points at the assignment to the lhs, get a
2063 stmt iterator to the memcpy call.
2064 ??? We can't use gsi_for_stmt as that doesn't work when the
2065 CFG isn't built yet. */
2066 gimple_stmt_iterator gsi2
= *gsi
;
2072 gsi_replace_with_seq_vops (gsi
, stmts
);
2078 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2079 are the arguments to the call. */
2082 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2084 gimple
*stmt
= gsi_stmt (*gsi
);
2085 tree dest
= gimple_call_arg (stmt
, 0);
2086 tree src
= gimple_call_arg (stmt
, 1);
2087 tree size
= gimple_call_arg (stmt
, 2);
2093 /* If the SRC parameter is "", return DEST. */
2094 if (p
&& *p
== '\0')
2096 replace_call_with_value (gsi
, dest
);
2100 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2103 /* If __builtin_strcat_chk is used, assume strcat is available. */
2104 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2108 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2109 replace_call_with_call_and_fold (gsi
, repl
);
2113 /* Simplify a call to the strncat builtin. */
2116 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2118 gimple
*stmt
= gsi_stmt (*gsi
);
2119 tree dst
= gimple_call_arg (stmt
, 0);
2120 tree src
= gimple_call_arg (stmt
, 1);
2121 tree len
= gimple_call_arg (stmt
, 2);
2123 const char *p
= c_getstr (src
);
2125 /* If the requested length is zero, or the src parameter string
2126 length is zero, return the dst parameter. */
2127 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2129 replace_call_with_value (gsi
, dst
);
2133 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2136 unsigned srclen
= strlen (p
);
2138 int cmpsrc
= compare_tree_int (len
, srclen
);
2140 /* Return early if the requested len is less than the string length.
2141 Warnings will be issued elsewhere later. */
2145 unsigned HOST_WIDE_INT dstsize
;
2147 bool nowarn
= gimple_no_warning_p (stmt
);
2149 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2151 int cmpdst
= compare_tree_int (len
, dstsize
);
2155 tree fndecl
= gimple_call_fndecl (stmt
);
2157 /* Strncat copies (at most) LEN bytes and always appends
2158 the terminating NUL so the specified bound should never
2159 be equal to (or greater than) the size of the destination.
2160 If it is, the copy could overflow. */
2161 location_t loc
= gimple_location (stmt
);
2162 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2164 ? G_("%G%qD specified bound %E equals "
2166 : G_("%G%qD specified bound %E exceeds "
2167 "destination size %wu"),
2168 stmt
, fndecl
, len
, dstsize
);
2170 gimple_set_no_warning (stmt
, true);
2174 if (!nowarn
&& cmpsrc
== 0)
2176 tree fndecl
= gimple_call_fndecl (stmt
);
2177 location_t loc
= gimple_location (stmt
);
2179 /* To avoid possible overflow the specified bound should also
2180 not be equal to the length of the source, even when the size
2181 of the destination is unknown (it's not an uncommon mistake
2182 to specify as the bound to strncpy the length of the source). */
2183 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2184 "%G%qD specified bound %E equals source length",
2186 gimple_set_no_warning (stmt
, true);
2189 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2191 /* If the replacement _DECL isn't initialized, don't do the
2196 /* Otherwise, emit a call to strcat. */
2197 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2198 replace_call_with_call_and_fold (gsi
, repl
);
2202 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2206 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2208 gimple
*stmt
= gsi_stmt (*gsi
);
2209 tree dest
= gimple_call_arg (stmt
, 0);
2210 tree src
= gimple_call_arg (stmt
, 1);
2211 tree len
= gimple_call_arg (stmt
, 2);
2212 tree size
= gimple_call_arg (stmt
, 3);
2217 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2218 if ((p
&& *p
== '\0')
2219 || integer_zerop (len
))
2221 replace_call_with_value (gsi
, dest
);
2225 if (! tree_fits_uhwi_p (size
))
2228 if (! integer_all_onesp (size
))
2230 tree src_len
= c_strlen (src
, 1);
2232 && tree_fits_uhwi_p (src_len
)
2233 && tree_fits_uhwi_p (len
)
2234 && ! tree_int_cst_lt (len
, src_len
))
2236 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2237 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2241 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2242 replace_call_with_call_and_fold (gsi
, repl
);
2248 /* If __builtin_strncat_chk is used, assume strncat is available. */
2249 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2253 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2254 replace_call_with_call_and_fold (gsi
, repl
);
2258 /* Build and append gimple statements to STMTS that would load a first
2259 character of a memory location identified by STR. LOC is location
2260 of the statement. */
2263 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2267 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2268 tree cst_uchar_ptr_node
2269 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2270 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2272 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2273 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2274 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2276 gimple_assign_set_lhs (stmt
, var
);
2277 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2282 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2283 FCODE is the name of the builtin. */
2286 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2288 gimple
*stmt
= gsi_stmt (*gsi
);
2289 tree callee
= gimple_call_fndecl (stmt
);
2290 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2292 tree type
= integer_type_node
;
2293 tree str1
= gimple_call_arg (stmt
, 0);
2294 tree str2
= gimple_call_arg (stmt
, 1);
2295 tree lhs
= gimple_call_lhs (stmt
);
2296 HOST_WIDE_INT length
= -1;
2298 /* Handle strncmp and strncasecmp functions. */
2299 if (gimple_call_num_args (stmt
) == 3)
2301 tree len
= gimple_call_arg (stmt
, 2);
2302 if (tree_fits_uhwi_p (len
))
2303 length
= tree_to_uhwi (len
);
2306 /* If the LEN parameter is zero, return zero. */
2309 replace_call_with_value (gsi
, integer_zero_node
);
2313 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2314 if (operand_equal_p (str1
, str2
, 0))
2316 replace_call_with_value (gsi
, integer_zero_node
);
2320 const char *p1
= c_getstr (str1
);
2321 const char *p2
= c_getstr (str2
);
2323 /* For known strings, return an immediate value. */
2327 bool known_result
= false;
2331 case BUILT_IN_STRCMP
:
2332 case BUILT_IN_STRCMP_EQ
:
2334 r
= strcmp (p1
, p2
);
2335 known_result
= true;
2338 case BUILT_IN_STRNCMP
:
2339 case BUILT_IN_STRNCMP_EQ
:
2343 r
= strncmp (p1
, p2
, length
);
2344 known_result
= true;
2347 /* Only handleable situation is where the string are equal (result 0),
2348 which is already handled by operand_equal_p case. */
2349 case BUILT_IN_STRCASECMP
:
2351 case BUILT_IN_STRNCASECMP
:
2355 r
= strncmp (p1
, p2
, length
);
2357 known_result
= true;
2366 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2371 bool nonzero_length
= length
>= 1
2372 || fcode
== BUILT_IN_STRCMP
2373 || fcode
== BUILT_IN_STRCMP_EQ
2374 || fcode
== BUILT_IN_STRCASECMP
;
2376 location_t loc
= gimple_location (stmt
);
2378 /* If the second arg is "", return *(const unsigned char*)arg1. */
2379 if (p2
&& *p2
== '\0' && nonzero_length
)
2381 gimple_seq stmts
= NULL
;
2382 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2385 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2386 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2389 gsi_replace_with_seq_vops (gsi
, stmts
);
2393 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2394 if (p1
&& *p1
== '\0' && nonzero_length
)
2396 gimple_seq stmts
= NULL
;
2397 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2401 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2402 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2403 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2405 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2406 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2409 gsi_replace_with_seq_vops (gsi
, stmts
);
2413 /* If len parameter is one, return an expression corresponding to
2414 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2415 if (fcode
== BUILT_IN_STRNCMP
&& length
== 1)
2417 gimple_seq stmts
= NULL
;
2418 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2419 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2423 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2424 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2425 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2427 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2428 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2429 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2431 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2432 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2435 gsi_replace_with_seq_vops (gsi
, stmts
);
2439 /* If length is larger than the length of one constant string,
2440 replace strncmp with corresponding strcmp */
2441 if (fcode
== BUILT_IN_STRNCMP
2443 && ((p2
&& (size_t) length
> strlen (p2
))
2444 || (p1
&& (size_t) length
> strlen (p1
))))
2446 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2449 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2450 replace_call_with_call_and_fold (gsi
, repl
);
2457 /* Fold a call to the memchr pointed by GSI iterator. */
2460 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2462 gimple
*stmt
= gsi_stmt (*gsi
);
2463 tree lhs
= gimple_call_lhs (stmt
);
2464 tree arg1
= gimple_call_arg (stmt
, 0);
2465 tree arg2
= gimple_call_arg (stmt
, 1);
2466 tree len
= gimple_call_arg (stmt
, 2);
2468 /* If the LEN parameter is zero, return zero. */
2469 if (integer_zerop (len
))
2471 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2476 if (TREE_CODE (arg2
) != INTEGER_CST
2477 || !tree_fits_uhwi_p (len
)
2478 || !target_char_cst_p (arg2
, &c
))
2481 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2482 unsigned HOST_WIDE_INT string_length
;
2483 const char *p1
= c_getstr (arg1
, &string_length
);
2487 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2490 if (length
<= string_length
)
2492 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2498 unsigned HOST_WIDE_INT offset
= r
- p1
;
2499 gimple_seq stmts
= NULL
;
2500 if (lhs
!= NULL_TREE
)
2502 tree offset_cst
= build_int_cst (TREE_TYPE (len
), offset
);
2503 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2505 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2508 gimple_seq_add_stmt_without_update (&stmts
,
2509 gimple_build_nop ());
2511 gsi_replace_with_seq_vops (gsi
, stmts
);
2519 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2520 to the call. IGNORE is true if the value returned
2521 by the builtin will be ignored. UNLOCKED is true is true if this
2522 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2523 the known length of the string. Return NULL_TREE if no simplification
2527 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2528 tree arg0
, tree arg1
,
2531 gimple
*stmt
= gsi_stmt (*gsi
);
2533 /* If we're using an unlocked function, assume the other unlocked
2534 functions exist explicitly. */
2535 tree
const fn_fputc
= (unlocked
2536 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2537 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2538 tree
const fn_fwrite
= (unlocked
2539 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2540 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2542 /* If the return value is used, don't do the transformation. */
2543 if (gimple_call_lhs (stmt
))
2546 /* Get the length of the string passed to fputs. If the length
2547 can't be determined, punt. */
2548 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2550 || TREE_CODE (len
) != INTEGER_CST
)
2553 switch (compare_tree_int (len
, 1))
2555 case -1: /* length is 0, delete the call entirely . */
2556 replace_call_with_value (gsi
, integer_zero_node
);
2559 case 0: /* length is 1, call fputc. */
2561 const char *p
= c_getstr (arg0
);
2567 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2569 (integer_type_node
, p
[0]), arg1
);
2570 replace_call_with_call_and_fold (gsi
, repl
);
2575 case 1: /* length is greater than 1, call fwrite. */
2577 /* If optimizing for size keep fputs. */
2578 if (optimize_function_for_size_p (cfun
))
2580 /* New argument list transforming fputs(string, stream) to
2581 fwrite(string, 1, len, stream). */
2585 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2586 size_one_node
, len
, arg1
);
2587 replace_call_with_call_and_fold (gsi
, repl
);
2596 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2597 DEST, SRC, LEN, and SIZE are the arguments to the call.
2598 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2599 code of the builtin. If MAXLEN is not NULL, it is maximum length
2600 passed as third argument. */
2603 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2604 tree dest
, tree src
, tree len
, tree size
,
2605 enum built_in_function fcode
)
2607 gimple
*stmt
= gsi_stmt (*gsi
);
2608 location_t loc
= gimple_location (stmt
);
2609 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2612 /* If SRC and DEST are the same (and not volatile), return DEST
2613 (resp. DEST+LEN for __mempcpy_chk). */
2614 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2616 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2618 replace_call_with_value (gsi
, dest
);
2623 gimple_seq stmts
= NULL
;
2624 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2625 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2626 TREE_TYPE (dest
), dest
, len
);
2627 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2628 replace_call_with_value (gsi
, temp
);
2633 if (! tree_fits_uhwi_p (size
))
2636 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
2637 if (! integer_all_onesp (size
))
2639 if (! tree_fits_uhwi_p (len
))
2641 /* If LEN is not constant, try MAXLEN too.
2642 For MAXLEN only allow optimizing into non-_ocs function
2643 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2644 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2646 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2648 /* (void) __mempcpy_chk () can be optimized into
2649 (void) __memcpy_chk (). */
2650 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2654 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2655 replace_call_with_call_and_fold (gsi
, repl
);
2664 if (tree_int_cst_lt (size
, maxlen
))
2669 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2670 mem{cpy,pcpy,move,set} is available. */
2673 case BUILT_IN_MEMCPY_CHK
:
2674 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2676 case BUILT_IN_MEMPCPY_CHK
:
2677 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2679 case BUILT_IN_MEMMOVE_CHK
:
2680 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2682 case BUILT_IN_MEMSET_CHK
:
2683 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2692 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2693 replace_call_with_call_and_fold (gsi
, repl
);
2697 /* Fold a call to the __st[rp]cpy_chk builtin.
2698 DEST, SRC, and SIZE are the arguments to the call.
2699 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2700 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2701 strings passed as second argument. */
2704 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2706 tree src
, tree size
,
2707 enum built_in_function fcode
)
2709 gimple
*stmt
= gsi_stmt (*gsi
);
2710 location_t loc
= gimple_location (stmt
);
2711 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2714 /* If SRC and DEST are the same (and not volatile), return DEST. */
2715 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2717 /* Issue -Wrestrict unless the pointers are null (those do
2718 not point to objects and so do not indicate an overlap;
2719 such calls could be the result of sanitization and jump
2721 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
2723 tree func
= gimple_call_fndecl (stmt
);
2725 warning_at (loc
, OPT_Wrestrict
,
2726 "%qD source argument is the same as destination",
2730 replace_call_with_value (gsi
, dest
);
2734 if (! tree_fits_uhwi_p (size
))
2737 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
2738 if (! integer_all_onesp (size
))
2740 len
= c_strlen (src
, 1);
2741 if (! len
|| ! tree_fits_uhwi_p (len
))
2743 /* If LEN is not constant, try MAXLEN too.
2744 For MAXLEN only allow optimizing into non-_ocs function
2745 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2746 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2748 if (fcode
== BUILT_IN_STPCPY_CHK
)
2753 /* If return value of __stpcpy_chk is ignored,
2754 optimize into __strcpy_chk. */
2755 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2759 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2760 replace_call_with_call_and_fold (gsi
, repl
);
2764 if (! len
|| TREE_SIDE_EFFECTS (len
))
2767 /* If c_strlen returned something, but not a constant,
2768 transform __strcpy_chk into __memcpy_chk. */
2769 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2773 gimple_seq stmts
= NULL
;
2774 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
2775 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2776 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2777 build_int_cst (size_type_node
, 1));
2778 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2779 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2780 replace_call_with_call_and_fold (gsi
, repl
);
2787 if (! tree_int_cst_lt (maxlen
, size
))
2791 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2792 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2793 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2797 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2798 replace_call_with_call_and_fold (gsi
, repl
);
2802 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2803 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2804 length passed as third argument. IGNORE is true if return value can be
2805 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2808 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2809 tree dest
, tree src
,
2810 tree len
, tree size
,
2811 enum built_in_function fcode
)
2813 gimple
*stmt
= gsi_stmt (*gsi
);
2814 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2817 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2819 /* If return value of __stpncpy_chk is ignored,
2820 optimize into __strncpy_chk. */
2821 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2824 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2825 replace_call_with_call_and_fold (gsi
, repl
);
2830 if (! tree_fits_uhwi_p (size
))
2833 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
2834 if (! integer_all_onesp (size
))
2836 if (! tree_fits_uhwi_p (len
))
2838 /* If LEN is not constant, try MAXLEN too.
2839 For MAXLEN only allow optimizing into non-_ocs function
2840 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2841 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2847 if (tree_int_cst_lt (size
, maxlen
))
2851 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2852 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2853 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2857 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2858 replace_call_with_call_and_fold (gsi
, repl
);
2862 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2863 Return NULL_TREE if no simplification can be made. */
2866 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2868 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2869 location_t loc
= gimple_location (stmt
);
2870 tree dest
= gimple_call_arg (stmt
, 0);
2871 tree src
= gimple_call_arg (stmt
, 1);
2874 /* If the result is unused, replace stpcpy with strcpy. */
2875 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2877 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2880 gimple_call_set_fndecl (stmt
, fn
);
2885 /* Set to non-null if ARG refers to an unterminated array. */
2886 c_strlen_data data
= { };
2887 tree len
= c_strlen (src
, 1, &data
, 1);
2889 || TREE_CODE (len
) != INTEGER_CST
)
2891 data
.decl
= unterminated_array (src
);
2898 /* Avoid folding calls with unterminated arrays. */
2899 if (!gimple_no_warning_p (stmt
))
2900 warn_string_no_nul (loc
, "stpcpy", src
, data
.decl
);
2901 gimple_set_no_warning (stmt
, true);
2905 if (optimize_function_for_size_p (cfun
)
2906 /* If length is zero it's small enough. */
2907 && !integer_zerop (len
))
2910 /* If the source has a known length replace stpcpy with memcpy. */
2911 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2915 gimple_seq stmts
= NULL
;
2916 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2917 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2918 tem
, build_int_cst (size_type_node
, 1));
2919 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2920 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2921 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2922 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2923 if (gimple_vdef (repl
)
2924 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2925 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2926 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2927 /* Replace the result with dest + len. */
2929 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2930 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2931 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2932 POINTER_PLUS_EXPR
, dest
, tem
);
2933 gsi_replace (gsi
, ret
, false);
2934 /* Finally fold the memcpy call. */
2935 gimple_stmt_iterator gsi2
= *gsi
;
2941 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2942 NULL_TREE if a normal call should be emitted rather than expanding
2943 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2944 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2945 passed as second argument. */
2948 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2949 enum built_in_function fcode
)
2951 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2952 tree dest
, size
, len
, fn
, fmt
, flag
;
2953 const char *fmt_str
;
2955 /* Verify the required arguments in the original call. */
2956 if (gimple_call_num_args (stmt
) < 5)
2959 dest
= gimple_call_arg (stmt
, 0);
2960 len
= gimple_call_arg (stmt
, 1);
2961 flag
= gimple_call_arg (stmt
, 2);
2962 size
= gimple_call_arg (stmt
, 3);
2963 fmt
= gimple_call_arg (stmt
, 4);
2965 if (! tree_fits_uhwi_p (size
))
2968 if (! integer_all_onesp (size
))
2970 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
2971 if (! tree_fits_uhwi_p (len
))
2973 /* If LEN is not constant, try MAXLEN too.
2974 For MAXLEN only allow optimizing into non-_ocs function
2975 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2976 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2982 if (tree_int_cst_lt (size
, maxlen
))
2986 if (!init_target_chars ())
2989 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2990 or if format doesn't contain % chars or is "%s". */
2991 if (! integer_zerop (flag
))
2993 fmt_str
= c_getstr (fmt
);
2994 if (fmt_str
== NULL
)
2996 if (strchr (fmt_str
, target_percent
) != NULL
2997 && strcmp (fmt_str
, target_percent_s
))
3001 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3003 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3004 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3008 /* Replace the called function and the first 5 argument by 3 retaining
3009 trailing varargs. */
3010 gimple_call_set_fndecl (stmt
, fn
);
3011 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3012 gimple_call_set_arg (stmt
, 0, dest
);
3013 gimple_call_set_arg (stmt
, 1, len
);
3014 gimple_call_set_arg (stmt
, 2, fmt
);
3015 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3016 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3017 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3022 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3023 Return NULL_TREE if a normal call should be emitted rather than
3024 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3025 or BUILT_IN_VSPRINTF_CHK. */
3028 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3029 enum built_in_function fcode
)
3031 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3032 tree dest
, size
, len
, fn
, fmt
, flag
;
3033 const char *fmt_str
;
3034 unsigned nargs
= gimple_call_num_args (stmt
);
3036 /* Verify the required arguments in the original call. */
3039 dest
= gimple_call_arg (stmt
, 0);
3040 flag
= gimple_call_arg (stmt
, 1);
3041 size
= gimple_call_arg (stmt
, 2);
3042 fmt
= gimple_call_arg (stmt
, 3);
3044 if (! tree_fits_uhwi_p (size
))
3049 if (!init_target_chars ())
3052 /* Check whether the format is a literal string constant. */
3053 fmt_str
= c_getstr (fmt
);
3054 if (fmt_str
!= NULL
)
3056 /* If the format doesn't contain % args or %%, we know the size. */
3057 if (strchr (fmt_str
, target_percent
) == 0)
3059 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3060 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3062 /* If the format is "%s" and first ... argument is a string literal,
3063 we know the size too. */
3064 else if (fcode
== BUILT_IN_SPRINTF_CHK
3065 && strcmp (fmt_str
, target_percent_s
) == 0)
3071 arg
= gimple_call_arg (stmt
, 4);
3072 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3074 len
= c_strlen (arg
, 1);
3075 if (! len
|| ! tree_fits_uhwi_p (len
))
3082 if (! integer_all_onesp (size
))
3084 if (! len
|| ! tree_int_cst_lt (len
, size
))
3088 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3089 or if format doesn't contain % chars or is "%s". */
3090 if (! integer_zerop (flag
))
3092 if (fmt_str
== NULL
)
3094 if (strchr (fmt_str
, target_percent
) != NULL
3095 && strcmp (fmt_str
, target_percent_s
))
3099 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3100 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3101 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3105 /* Replace the called function and the first 4 argument by 2 retaining
3106 trailing varargs. */
3107 gimple_call_set_fndecl (stmt
, fn
);
3108 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3109 gimple_call_set_arg (stmt
, 0, dest
);
3110 gimple_call_set_arg (stmt
, 1, fmt
);
3111 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3112 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3113 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3118 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3119 ORIG may be null if this is a 2-argument call. We don't attempt to
3120 simplify calls with more than 3 arguments.
3122 Return true if simplification was possible, otherwise false. */
3125 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3127 gimple
*stmt
= gsi_stmt (*gsi
);
3128 tree dest
= gimple_call_arg (stmt
, 0);
3129 tree fmt
= gimple_call_arg (stmt
, 1);
3130 tree orig
= NULL_TREE
;
3131 const char *fmt_str
= NULL
;
3133 /* Verify the required arguments in the original call. We deal with two
3134 types of sprintf() calls: 'sprintf (str, fmt)' and
3135 'sprintf (dest, "%s", orig)'. */
3136 if (gimple_call_num_args (stmt
) > 3)
3139 if (gimple_call_num_args (stmt
) == 3)
3140 orig
= gimple_call_arg (stmt
, 2);
3142 /* Check whether the format is a literal string constant. */
3143 fmt_str
= c_getstr (fmt
);
3144 if (fmt_str
== NULL
)
3147 if (!init_target_chars ())
3150 /* If the format doesn't contain % args or %%, use strcpy. */
3151 if (strchr (fmt_str
, target_percent
) == NULL
)
3153 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3158 /* Don't optimize sprintf (buf, "abc", ptr++). */
3162 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3163 'format' is known to contain no % formats. */
3164 gimple_seq stmts
= NULL
;
3165 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3167 /* Propagate the NO_WARNING bit to avoid issuing the same
3168 warning more than once. */
3169 if (gimple_no_warning_p (stmt
))
3170 gimple_set_no_warning (repl
, true);
3172 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3173 if (gimple_call_lhs (stmt
))
3175 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
3176 build_int_cst (integer_type_node
,
3178 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3179 gsi_replace_with_seq_vops (gsi
, stmts
);
3180 /* gsi now points at the assignment to the lhs, get a
3181 stmt iterator to the memcpy call.
3182 ??? We can't use gsi_for_stmt as that doesn't work when the
3183 CFG isn't built yet. */
3184 gimple_stmt_iterator gsi2
= *gsi
;
3190 gsi_replace_with_seq_vops (gsi
, stmts
);
3196 /* If the format is "%s", use strcpy if the result isn't used. */
3197 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3200 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3205 /* Don't crash on sprintf (str1, "%s"). */
3209 tree orig_len
= NULL_TREE
;
3210 if (gimple_call_lhs (stmt
))
3212 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3217 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3218 gimple_seq stmts
= NULL
;
3219 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3221 /* Propagate the NO_WARNING bit to avoid issuing the same
3222 warning more than once. */
3223 if (gimple_no_warning_p (stmt
))
3224 gimple_set_no_warning (repl
, true);
3226 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3227 if (gimple_call_lhs (stmt
))
3229 if (!useless_type_conversion_p (integer_type_node
,
3230 TREE_TYPE (orig_len
)))
3231 orig_len
= fold_convert (integer_type_node
, orig_len
);
3232 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
3233 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3234 gsi_replace_with_seq_vops (gsi
, stmts
);
3235 /* gsi now points at the assignment to the lhs, get a
3236 stmt iterator to the memcpy call.
3237 ??? We can't use gsi_for_stmt as that doesn't work when the
3238 CFG isn't built yet. */
3239 gimple_stmt_iterator gsi2
= *gsi
;
3245 gsi_replace_with_seq_vops (gsi
, stmts
);
3253 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3254 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3255 attempt to simplify calls with more than 4 arguments.
3257 Return true if simplification was possible, otherwise false. */
3260 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3262 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3263 tree dest
= gimple_call_arg (stmt
, 0);
3264 tree destsize
= gimple_call_arg (stmt
, 1);
3265 tree fmt
= gimple_call_arg (stmt
, 2);
3266 tree orig
= NULL_TREE
;
3267 const char *fmt_str
= NULL
;
3269 if (gimple_call_num_args (stmt
) > 4)
3272 if (gimple_call_num_args (stmt
) == 4)
3273 orig
= gimple_call_arg (stmt
, 3);
3275 if (!tree_fits_uhwi_p (destsize
))
3277 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3279 /* Check whether the format is a literal string constant. */
3280 fmt_str
= c_getstr (fmt
);
3281 if (fmt_str
== NULL
)
3284 if (!init_target_chars ())
3287 /* If the format doesn't contain % args or %%, use strcpy. */
3288 if (strchr (fmt_str
, target_percent
) == NULL
)
3290 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3294 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3298 /* We could expand this as
3299 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3301 memcpy (str, fmt_with_nul_at_cstm1, cst);
3302 but in the former case that might increase code size
3303 and in the latter case grow .rodata section too much.
3305 size_t len
= strlen (fmt_str
);
3309 gimple_seq stmts
= NULL
;
3310 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3311 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3312 if (gimple_call_lhs (stmt
))
3314 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
3315 build_int_cst (integer_type_node
, len
));
3316 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3317 gsi_replace_with_seq_vops (gsi
, stmts
);
3318 /* gsi now points at the assignment to the lhs, get a
3319 stmt iterator to the memcpy call.
3320 ??? We can't use gsi_for_stmt as that doesn't work when the
3321 CFG isn't built yet. */
3322 gimple_stmt_iterator gsi2
= *gsi
;
3328 gsi_replace_with_seq_vops (gsi
, stmts
);
3334 /* If the format is "%s", use strcpy if the result isn't used. */
3335 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3337 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3341 /* Don't crash on snprintf (str1, cst, "%s"). */
3345 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3346 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3349 /* We could expand this as
3350 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3352 memcpy (str1, str2_with_nul_at_cstm1, cst);
3353 but in the former case that might increase code size
3354 and in the latter case grow .rodata section too much.
3356 if (compare_tree_int (orig_len
, destlen
) >= 0)
3359 /* Convert snprintf (str1, cst, "%s", str2) into
3360 strcpy (str1, str2) if strlen (str2) < cst. */
3361 gimple_seq stmts
= NULL
;
3362 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3363 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3364 if (gimple_call_lhs (stmt
))
3366 if (!useless_type_conversion_p (integer_type_node
,
3367 TREE_TYPE (orig_len
)))
3368 orig_len
= fold_convert (integer_type_node
, orig_len
);
3369 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
3370 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3371 gsi_replace_with_seq_vops (gsi
, stmts
);
3372 /* gsi now points at the assignment to the lhs, get a
3373 stmt iterator to the memcpy call.
3374 ??? We can't use gsi_for_stmt as that doesn't work when the
3375 CFG isn't built yet. */
3376 gimple_stmt_iterator gsi2
= *gsi
;
3382 gsi_replace_with_seq_vops (gsi
, stmts
);
3390 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3391 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3392 more than 3 arguments, and ARG may be null in the 2-argument case.
3394 Return NULL_TREE if no simplification was possible, otherwise return the
3395 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3396 code of the function to be simplified. */
3399 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3400 tree fp
, tree fmt
, tree arg
,
3401 enum built_in_function fcode
)
3403 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3404 tree fn_fputc
, fn_fputs
;
3405 const char *fmt_str
= NULL
;
3407 /* If the return value is used, don't do the transformation. */
3408 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3411 /* Check whether the format is a literal string constant. */
3412 fmt_str
= c_getstr (fmt
);
3413 if (fmt_str
== NULL
)
3416 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3418 /* If we're using an unlocked function, assume the other
3419 unlocked functions exist explicitly. */
3420 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3421 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3425 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3426 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3429 if (!init_target_chars ())
3432 /* If the format doesn't contain % args or %%, use strcpy. */
3433 if (strchr (fmt_str
, target_percent
) == NULL
)
3435 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3439 /* If the format specifier was "", fprintf does nothing. */
3440 if (fmt_str
[0] == '\0')
3442 replace_call_with_value (gsi
, NULL_TREE
);
3446 /* When "string" doesn't contain %, replace all cases of
3447 fprintf (fp, string) with fputs (string, fp). The fputs
3448 builtin will take care of special cases like length == 1. */
3451 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3452 replace_call_with_call_and_fold (gsi
, repl
);
3457 /* The other optimizations can be done only on the non-va_list variants. */
3458 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3461 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3462 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3464 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3468 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3469 replace_call_with_call_and_fold (gsi
, repl
);
3474 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3475 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3478 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3482 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3483 replace_call_with_call_and_fold (gsi
, repl
);
3491 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3492 FMT and ARG are the arguments to the call; we don't fold cases with
3493 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3495 Return NULL_TREE if no simplification was possible, otherwise return the
3496 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3497 code of the function to be simplified. */
3500 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3501 tree arg
, enum built_in_function fcode
)
3503 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3504 tree fn_putchar
, fn_puts
, newarg
;
3505 const char *fmt_str
= NULL
;
3507 /* If the return value is used, don't do the transformation. */
3508 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3511 /* Check whether the format is a literal string constant. */
3512 fmt_str
= c_getstr (fmt
);
3513 if (fmt_str
== NULL
)
3516 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3518 /* If we're using an unlocked function, assume the other
3519 unlocked functions exist explicitly. */
3520 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3521 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3525 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3526 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3529 if (!init_target_chars ())
3532 if (strcmp (fmt_str
, target_percent_s
) == 0
3533 || strchr (fmt_str
, target_percent
) == NULL
)
3537 if (strcmp (fmt_str
, target_percent_s
) == 0)
3539 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3542 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3545 str
= c_getstr (arg
);
3551 /* The format specifier doesn't contain any '%' characters. */
3552 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3558 /* If the string was "", printf does nothing. */
3561 replace_call_with_value (gsi
, NULL_TREE
);
3565 /* If the string has length of 1, call putchar. */
3568 /* Given printf("c"), (where c is any one character,)
3569 convert "c"[0] to an int and pass that to the replacement
3571 newarg
= build_int_cst (integer_type_node
, str
[0]);
3574 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3575 replace_call_with_call_and_fold (gsi
, repl
);
3581 /* If the string was "string\n", call puts("string"). */
3582 size_t len
= strlen (str
);
3583 if ((unsigned char)str
[len
- 1] == target_newline
3584 && (size_t) (int) len
== len
3589 /* Create a NUL-terminated string that's one char shorter
3590 than the original, stripping off the trailing '\n'. */
3591 newstr
= xstrdup (str
);
3592 newstr
[len
- 1] = '\0';
3593 newarg
= build_string_literal (len
, newstr
);
3597 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3598 replace_call_with_call_and_fold (gsi
, repl
);
3603 /* We'd like to arrange to call fputs(string,stdout) here,
3604 but we need stdout and don't have a way to get it yet. */
3609 /* The other optimizations can be done only on the non-va_list variants. */
3610 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3613 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3614 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3616 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3620 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3621 replace_call_with_call_and_fold (gsi
, repl
);
3626 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3627 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3629 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3634 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3635 replace_call_with_call_and_fold (gsi
, repl
);
3645 /* Fold a call to __builtin_strlen with known length LEN. */
3648 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3650 gimple
*stmt
= gsi_stmt (*gsi
);
3651 tree arg
= gimple_call_arg (stmt
, 0);
3656 /* Set to non-null if ARG refers to an unterminated array. */
3659 if (!get_range_strlen (arg
, lenrange
, 1, true, &nonstr
)
3661 && lenrange
[0] && TREE_CODE (lenrange
[0]) == INTEGER_CST
3662 && lenrange
[1] && TREE_CODE (lenrange
[1]) == INTEGER_CST
)
3664 /* The range of lengths refers to either a single constant
3665 string or to the longest and shortest constant string
3666 referenced by the argument of the strlen() call, or to
3667 the strings that can possibly be stored in the arrays
3668 the argument refers to. */
3669 minlen
= wi::to_wide (lenrange
[0]);
3670 maxlen
= wi::to_wide (lenrange
[1]);
3674 unsigned prec
= TYPE_PRECISION (sizetype
);
3676 minlen
= wi::shwi (0, prec
);
3677 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
3680 if (minlen
== maxlen
)
3682 lenrange
[0] = force_gimple_operand_gsi (gsi
, lenrange
[0], true, NULL
,
3683 true, GSI_SAME_STMT
);
3684 replace_call_with_value (gsi
, lenrange
[0]);
3688 if (tree lhs
= gimple_call_lhs (stmt
))
3689 if (TREE_CODE (lhs
) == SSA_NAME
3690 && INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
3691 set_range_info (lhs
, VR_RANGE
, minlen
, maxlen
);
3696 /* Fold a call to __builtin_acc_on_device. */
3699 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3701 /* Defer folding until we know which compiler we're in. */
3702 if (symtab
->state
!= EXPANSION
)
3705 unsigned val_host
= GOMP_DEVICE_HOST
;
3706 unsigned val_dev
= GOMP_DEVICE_NONE
;
3708 #ifdef ACCEL_COMPILER
3709 val_host
= GOMP_DEVICE_NOT_HOST
;
3710 val_dev
= ACCEL_COMPILER_acc_device
;
3713 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3715 tree host_eq
= make_ssa_name (boolean_type_node
);
3716 gimple
*host_ass
= gimple_build_assign
3717 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3718 gimple_set_location (host_ass
, loc
);
3719 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3721 tree dev_eq
= make_ssa_name (boolean_type_node
);
3722 gimple
*dev_ass
= gimple_build_assign
3723 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3724 gimple_set_location (dev_ass
, loc
);
3725 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3727 tree result
= make_ssa_name (boolean_type_node
);
3728 gimple
*result_ass
= gimple_build_assign
3729 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3730 gimple_set_location (result_ass
, loc
);
3731 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3733 replace_call_with_value (gsi
, result
);
3738 /* Fold realloc (0, n) -> malloc (n). */
3741 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3743 gimple
*stmt
= gsi_stmt (*gsi
);
3744 tree arg
= gimple_call_arg (stmt
, 0);
3745 tree size
= gimple_call_arg (stmt
, 1);
3747 if (operand_equal_p (arg
, null_pointer_node
, 0))
3749 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3752 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3753 replace_call_with_call_and_fold (gsi
, repl
);
3760 /* Fold the non-target builtin at *GSI and return whether any simplification
3764 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
3766 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
3767 tree callee
= gimple_call_fndecl (stmt
);
3769 /* Give up for always_inline inline builtins until they are
3771 if (avoid_folding_inline_builtin (callee
))
3774 unsigned n
= gimple_call_num_args (stmt
);
3775 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
3779 return gimple_fold_builtin_bcmp (gsi
);
3780 case BUILT_IN_BCOPY
:
3781 return gimple_fold_builtin_bcopy (gsi
);
3782 case BUILT_IN_BZERO
:
3783 return gimple_fold_builtin_bzero (gsi
);
3785 case BUILT_IN_MEMSET
:
3786 return gimple_fold_builtin_memset (gsi
,
3787 gimple_call_arg (stmt
, 1),
3788 gimple_call_arg (stmt
, 2));
3789 case BUILT_IN_MEMCPY
:
3790 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3791 gimple_call_arg (stmt
, 1), 0);
3792 case BUILT_IN_MEMPCPY
:
3793 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3794 gimple_call_arg (stmt
, 1), 1);
3795 case BUILT_IN_MEMMOVE
:
3796 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3797 gimple_call_arg (stmt
, 1), 3);
3798 case BUILT_IN_SPRINTF_CHK
:
3799 case BUILT_IN_VSPRINTF_CHK
:
3800 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
3801 case BUILT_IN_STRCAT_CHK
:
3802 return gimple_fold_builtin_strcat_chk (gsi
);
3803 case BUILT_IN_STRNCAT_CHK
:
3804 return gimple_fold_builtin_strncat_chk (gsi
);
3805 case BUILT_IN_STRLEN
:
3806 return gimple_fold_builtin_strlen (gsi
);
3807 case BUILT_IN_STRCPY
:
3808 return gimple_fold_builtin_strcpy (gsi
,
3809 gimple_call_arg (stmt
, 0),
3810 gimple_call_arg (stmt
, 1));
3811 case BUILT_IN_STRNCPY
:
3812 return gimple_fold_builtin_strncpy (gsi
,
3813 gimple_call_arg (stmt
, 0),
3814 gimple_call_arg (stmt
, 1),
3815 gimple_call_arg (stmt
, 2));
3816 case BUILT_IN_STRCAT
:
3817 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
3818 gimple_call_arg (stmt
, 1));
3819 case BUILT_IN_STRNCAT
:
3820 return gimple_fold_builtin_strncat (gsi
);
3821 case BUILT_IN_INDEX
:
3822 case BUILT_IN_STRCHR
:
3823 return gimple_fold_builtin_strchr (gsi
, false);
3824 case BUILT_IN_RINDEX
:
3825 case BUILT_IN_STRRCHR
:
3826 return gimple_fold_builtin_strchr (gsi
, true);
3827 case BUILT_IN_STRSTR
:
3828 return gimple_fold_builtin_strstr (gsi
);
3829 case BUILT_IN_STRCMP
:
3830 case BUILT_IN_STRCMP_EQ
:
3831 case BUILT_IN_STRCASECMP
:
3832 case BUILT_IN_STRNCMP
:
3833 case BUILT_IN_STRNCMP_EQ
:
3834 case BUILT_IN_STRNCASECMP
:
3835 return gimple_fold_builtin_string_compare (gsi
);
3836 case BUILT_IN_MEMCHR
:
3837 return gimple_fold_builtin_memchr (gsi
);
3838 case BUILT_IN_FPUTS
:
3839 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3840 gimple_call_arg (stmt
, 1), false);
3841 case BUILT_IN_FPUTS_UNLOCKED
:
3842 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3843 gimple_call_arg (stmt
, 1), true);
3844 case BUILT_IN_MEMCPY_CHK
:
3845 case BUILT_IN_MEMPCPY_CHK
:
3846 case BUILT_IN_MEMMOVE_CHK
:
3847 case BUILT_IN_MEMSET_CHK
:
3848 return gimple_fold_builtin_memory_chk (gsi
,
3849 gimple_call_arg (stmt
, 0),
3850 gimple_call_arg (stmt
, 1),
3851 gimple_call_arg (stmt
, 2),
3852 gimple_call_arg (stmt
, 3),
3854 case BUILT_IN_STPCPY
:
3855 return gimple_fold_builtin_stpcpy (gsi
);
3856 case BUILT_IN_STRCPY_CHK
:
3857 case BUILT_IN_STPCPY_CHK
:
3858 return gimple_fold_builtin_stxcpy_chk (gsi
,
3859 gimple_call_arg (stmt
, 0),
3860 gimple_call_arg (stmt
, 1),
3861 gimple_call_arg (stmt
, 2),
3863 case BUILT_IN_STRNCPY_CHK
:
3864 case BUILT_IN_STPNCPY_CHK
:
3865 return gimple_fold_builtin_stxncpy_chk (gsi
,
3866 gimple_call_arg (stmt
, 0),
3867 gimple_call_arg (stmt
, 1),
3868 gimple_call_arg (stmt
, 2),
3869 gimple_call_arg (stmt
, 3),
3871 case BUILT_IN_SNPRINTF_CHK
:
3872 case BUILT_IN_VSNPRINTF_CHK
:
3873 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
3875 case BUILT_IN_FPRINTF
:
3876 case BUILT_IN_FPRINTF_UNLOCKED
:
3877 case BUILT_IN_VFPRINTF
:
3878 if (n
== 2 || n
== 3)
3879 return gimple_fold_builtin_fprintf (gsi
,
3880 gimple_call_arg (stmt
, 0),
3881 gimple_call_arg (stmt
, 1),
3883 ? gimple_call_arg (stmt
, 2)
3887 case BUILT_IN_FPRINTF_CHK
:
3888 case BUILT_IN_VFPRINTF_CHK
:
3889 if (n
== 3 || n
== 4)
3890 return gimple_fold_builtin_fprintf (gsi
,
3891 gimple_call_arg (stmt
, 0),
3892 gimple_call_arg (stmt
, 2),
3894 ? gimple_call_arg (stmt
, 3)
3898 case BUILT_IN_PRINTF
:
3899 case BUILT_IN_PRINTF_UNLOCKED
:
3900 case BUILT_IN_VPRINTF
:
3901 if (n
== 1 || n
== 2)
3902 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
3904 ? gimple_call_arg (stmt
, 1)
3905 : NULL_TREE
, fcode
);
3907 case BUILT_IN_PRINTF_CHK
:
3908 case BUILT_IN_VPRINTF_CHK
:
3909 if (n
== 2 || n
== 3)
3910 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3912 ? gimple_call_arg (stmt
, 2)
3913 : NULL_TREE
, fcode
);
3915 case BUILT_IN_ACC_ON_DEVICE
:
3916 return gimple_fold_builtin_acc_on_device (gsi
,
3917 gimple_call_arg (stmt
, 0));
3918 case BUILT_IN_REALLOC
:
3919 return gimple_fold_builtin_realloc (gsi
);
3924 /* Try the generic builtin folder. */
3925 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3926 tree result
= fold_call_stmt (stmt
, ignore
);
3930 STRIP_NOPS (result
);
3932 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3933 if (!update_call_from_tree (gsi
, result
))
3934 gimplify_and_update_call_from_tree (gsi
, result
);
3941 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3942 function calls to constants, where possible. */
3945 fold_internal_goacc_dim (const gimple
*call
)
3947 int axis
= oacc_get_ifn_dim_arg (call
);
3948 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
3949 tree result
= NULL_TREE
;
3950 tree type
= TREE_TYPE (gimple_call_lhs (call
));
3952 switch (gimple_call_internal_fn (call
))
3954 case IFN_GOACC_DIM_POS
:
3955 /* If the size is 1, we know the answer. */
3957 result
= build_int_cst (type
, 0);
3959 case IFN_GOACC_DIM_SIZE
:
3960 /* If the size is not dynamic, we know the answer. */
3962 result
= build_int_cst (type
, size
);
3971 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3972 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3973 &var where var is only addressable because of such calls. */
3976 optimize_atomic_compare_exchange_p (gimple
*stmt
)
3978 if (gimple_call_num_args (stmt
) != 6
3979 || !flag_inline_atomics
3981 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
3982 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3983 || !gimple_vdef (stmt
)
3984 || !gimple_vuse (stmt
))
3987 tree fndecl
= gimple_call_fndecl (stmt
);
3988 switch (DECL_FUNCTION_CODE (fndecl
))
3990 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
3991 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
3992 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
3993 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
3994 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
4000 tree expected
= gimple_call_arg (stmt
, 1);
4001 if (TREE_CODE (expected
) != ADDR_EXPR
4002 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
4005 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
4006 if (!is_gimple_reg_type (etype
)
4007 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
4008 || TREE_THIS_VOLATILE (etype
)
4009 || VECTOR_TYPE_P (etype
)
4010 || TREE_CODE (etype
) == COMPLEX_TYPE
4011 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4012 might not preserve all the bits. See PR71716. */
4013 || SCALAR_FLOAT_TYPE_P (etype
)
4014 || maybe_ne (TYPE_PRECISION (etype
),
4015 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
4018 tree weak
= gimple_call_arg (stmt
, 3);
4019 if (!integer_zerop (weak
) && !integer_onep (weak
))
4022 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4023 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
4024 machine_mode mode
= TYPE_MODE (itype
);
4026 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
4028 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
4031 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
4038 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4040 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4041 i = IMAGPART_EXPR <t>;
4043 e = REALPART_EXPR <t>; */
4046 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
4048 gimple
*stmt
= gsi_stmt (*gsi
);
4049 tree fndecl
= gimple_call_fndecl (stmt
);
4050 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4051 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
4052 tree ctype
= build_complex_type (itype
);
4053 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
4054 bool throws
= false;
4056 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
4058 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4059 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
4060 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
4062 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
4063 build1 (VIEW_CONVERT_EXPR
, itype
,
4064 gimple_assign_lhs (g
)));
4065 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4067 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
4068 + int_size_in_bytes (itype
);
4069 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
4070 gimple_call_arg (stmt
, 0),
4071 gimple_assign_lhs (g
),
4072 gimple_call_arg (stmt
, 2),
4073 build_int_cst (integer_type_node
, flag
),
4074 gimple_call_arg (stmt
, 4),
4075 gimple_call_arg (stmt
, 5));
4076 tree lhs
= make_ssa_name (ctype
);
4077 gimple_call_set_lhs (g
, lhs
);
4078 gimple_set_vdef (g
, gimple_vdef (stmt
));
4079 gimple_set_vuse (g
, gimple_vuse (stmt
));
4080 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
4081 tree oldlhs
= gimple_call_lhs (stmt
);
4082 if (stmt_can_throw_internal (cfun
, stmt
))
4085 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
4087 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
4088 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
4089 gimple_call_set_lhs (stmt
, NULL_TREE
);
4090 gsi_replace (gsi
, g
, true);
4093 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
4094 build1 (IMAGPART_EXPR
, itype
, lhs
));
4097 gsi_insert_on_edge_immediate (e
, g
);
4098 *gsi
= gsi_for_stmt (g
);
4101 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4102 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
4103 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4105 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
4106 build1 (REALPART_EXPR
, itype
, lhs
));
4107 if (throws
&& oldlhs
== NULL_TREE
)
4109 gsi_insert_on_edge_immediate (e
, g
);
4110 *gsi
= gsi_for_stmt (g
);
4113 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4114 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
4116 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
4118 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
4119 gimple_assign_lhs (g
)));
4120 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4122 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
4123 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4127 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4128 doesn't fit into TYPE. The test for overflow should be regardless of
4129 -fwrapv, and even for unsigned types. */
4132 arith_overflowed_p (enum tree_code code
, const_tree type
,
4133 const_tree arg0
, const_tree arg1
)
4135 widest2_int warg0
= widest2_int_cst (arg0
);
4136 widest2_int warg1
= widest2_int_cst (arg1
);
4140 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
4141 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
4142 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
4143 default: gcc_unreachable ();
4145 signop sign
= TYPE_SIGN (type
);
4146 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
4148 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
4151 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4152 The statement may be replaced by another statement, e.g., if the call
4153 simplifies to a constant value. Return true if any changes were made.
4154 It is assumed that the operands have been previously folded. */
4157 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
4159 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
4161 bool changed
= false;
4164 /* Fold *& in call arguments. */
4165 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4166 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
4168 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
4171 gimple_call_set_arg (stmt
, i
, tmp
);
4176 /* Check for virtual calls that became direct calls. */
4177 callee
= gimple_call_fn (stmt
);
4178 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
4180 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
4182 if (dump_file
&& virtual_method_call_p (callee
)
4183 && !possible_polymorphic_call_target_p
4184 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
4185 (OBJ_TYPE_REF_EXPR (callee
)))))
4188 "Type inheritance inconsistent devirtualization of ");
4189 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
4190 fprintf (dump_file
, " to ");
4191 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
4192 fprintf (dump_file
, "\n");
4195 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
4198 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
4201 vec
<cgraph_node
*>targets
4202 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
4203 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4205 tree lhs
= gimple_call_lhs (stmt
);
4206 if (dump_enabled_p ())
4208 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
4209 "folding virtual function call to %s\n",
4210 targets
.length () == 1
4211 ? targets
[0]->name ()
4212 : "__builtin_unreachable");
4214 if (targets
.length () == 1)
4216 tree fndecl
= targets
[0]->decl
;
4217 gimple_call_set_fndecl (stmt
, fndecl
);
4219 /* If changing the call to __cxa_pure_virtual
4220 or similar noreturn function, adjust gimple_call_fntype
4222 if (gimple_call_noreturn_p (stmt
)
4223 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
4224 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
4225 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
4227 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
4228 /* If the call becomes noreturn, remove the lhs. */
4230 && gimple_call_noreturn_p (stmt
)
4231 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
4232 || should_remove_lhs_p (lhs
)))
4234 if (TREE_CODE (lhs
) == SSA_NAME
)
4236 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4237 tree def
= get_or_create_ssa_default_def (cfun
, var
);
4238 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
4239 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
4241 gimple_call_set_lhs (stmt
, NULL_TREE
);
4243 maybe_remove_unused_call_args (cfun
, stmt
);
4247 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4248 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
4249 gimple_set_location (new_stmt
, gimple_location (stmt
));
4250 /* If the call had a SSA name as lhs morph that into
4251 an uninitialized value. */
4252 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4254 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4255 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
4256 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
4257 set_ssa_default_def (cfun
, var
, lhs
);
4259 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4260 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4261 gsi_replace (gsi
, new_stmt
, false);
4268 /* Check for indirect calls that became direct calls, and then
4269 no longer require a static chain. */
4270 if (gimple_call_chain (stmt
))
4272 tree fn
= gimple_call_fndecl (stmt
);
4273 if (fn
&& !DECL_STATIC_CHAIN (fn
))
4275 gimple_call_set_chain (stmt
, NULL
);
4280 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
4283 gimple_call_set_chain (stmt
, tmp
);
4292 /* Check for builtins that CCP can handle using information not
4293 available in the generic fold routines. */
4294 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4296 if (gimple_fold_builtin (gsi
))
4299 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
4301 changed
|= targetm
.gimple_fold_builtin (gsi
);
4303 else if (gimple_call_internal_p (stmt
))
4305 enum tree_code subcode
= ERROR_MARK
;
4306 tree result
= NULL_TREE
;
4307 bool cplx_result
= false;
4308 tree overflow
= NULL_TREE
;
4309 switch (gimple_call_internal_fn (stmt
))
4311 case IFN_BUILTIN_EXPECT
:
4312 result
= fold_builtin_expect (gimple_location (stmt
),
4313 gimple_call_arg (stmt
, 0),
4314 gimple_call_arg (stmt
, 1),
4315 gimple_call_arg (stmt
, 2),
4318 case IFN_UBSAN_OBJECT_SIZE
:
4320 tree offset
= gimple_call_arg (stmt
, 1);
4321 tree objsize
= gimple_call_arg (stmt
, 2);
4322 if (integer_all_onesp (objsize
)
4323 || (TREE_CODE (offset
) == INTEGER_CST
4324 && TREE_CODE (objsize
) == INTEGER_CST
4325 && tree_int_cst_le (offset
, objsize
)))
4327 replace_call_with_value (gsi
, NULL_TREE
);
4333 if (integer_zerop (gimple_call_arg (stmt
, 1)))
4335 replace_call_with_value (gsi
, NULL_TREE
);
4339 case IFN_UBSAN_BOUNDS
:
4341 tree index
= gimple_call_arg (stmt
, 1);
4342 tree bound
= gimple_call_arg (stmt
, 2);
4343 if (TREE_CODE (index
) == INTEGER_CST
4344 && TREE_CODE (bound
) == INTEGER_CST
)
4346 index
= fold_convert (TREE_TYPE (bound
), index
);
4347 if (TREE_CODE (index
) == INTEGER_CST
4348 && tree_int_cst_le (index
, bound
))
4350 replace_call_with_value (gsi
, NULL_TREE
);
4356 case IFN_GOACC_DIM_SIZE
:
4357 case IFN_GOACC_DIM_POS
:
4358 result
= fold_internal_goacc_dim (stmt
);
4360 case IFN_UBSAN_CHECK_ADD
:
4361 subcode
= PLUS_EXPR
;
4363 case IFN_UBSAN_CHECK_SUB
:
4364 subcode
= MINUS_EXPR
;
4366 case IFN_UBSAN_CHECK_MUL
:
4367 subcode
= MULT_EXPR
;
4369 case IFN_ADD_OVERFLOW
:
4370 subcode
= PLUS_EXPR
;
4373 case IFN_SUB_OVERFLOW
:
4374 subcode
= MINUS_EXPR
;
4377 case IFN_MUL_OVERFLOW
:
4378 subcode
= MULT_EXPR
;
4384 if (subcode
!= ERROR_MARK
)
4386 tree arg0
= gimple_call_arg (stmt
, 0);
4387 tree arg1
= gimple_call_arg (stmt
, 1);
4388 tree type
= TREE_TYPE (arg0
);
4391 tree lhs
= gimple_call_lhs (stmt
);
4392 if (lhs
== NULL_TREE
)
4395 type
= TREE_TYPE (TREE_TYPE (lhs
));
4397 if (type
== NULL_TREE
)
4399 /* x = y + 0; x = y - 0; x = y * 0; */
4400 else if (integer_zerop (arg1
))
4401 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
4402 /* x = 0 + y; x = 0 * y; */
4403 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
4404 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
4406 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
4407 result
= integer_zero_node
;
4408 /* x = y * 1; x = 1 * y; */
4409 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
4411 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
4413 else if (TREE_CODE (arg0
) == INTEGER_CST
4414 && TREE_CODE (arg1
) == INTEGER_CST
)
4417 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
4418 fold_convert (type
, arg1
));
4420 result
= int_const_binop (subcode
, arg0
, arg1
);
4421 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
4424 overflow
= build_one_cst (type
);
4431 if (result
== integer_zero_node
)
4432 result
= build_zero_cst (type
);
4433 else if (cplx_result
&& TREE_TYPE (result
) != type
)
4435 if (TREE_CODE (result
) == INTEGER_CST
)
4437 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
4439 overflow
= build_one_cst (type
);
4441 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
4442 && TYPE_UNSIGNED (type
))
4443 || (TYPE_PRECISION (type
)
4444 < (TYPE_PRECISION (TREE_TYPE (result
))
4445 + (TYPE_UNSIGNED (TREE_TYPE (result
))
4446 && !TYPE_UNSIGNED (type
)))))
4449 result
= fold_convert (type
, result
);
4456 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
4457 result
= drop_tree_overflow (result
);
4460 if (overflow
== NULL_TREE
)
4461 overflow
= build_zero_cst (TREE_TYPE (result
));
4462 tree ctype
= build_complex_type (TREE_TYPE (result
));
4463 if (TREE_CODE (result
) == INTEGER_CST
4464 && TREE_CODE (overflow
) == INTEGER_CST
)
4465 result
= build_complex (ctype
, result
, overflow
);
4467 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
4468 ctype
, result
, overflow
);
4470 if (!update_call_from_tree (gsi
, result
))
4471 gimplify_and_update_call_from_tree (gsi
, result
);
4480 /* Return true whether NAME has a use on STMT. */
4483 has_use_on_stmt (tree name
, gimple
*stmt
)
4485 imm_use_iterator iter
;
4486 use_operand_p use_p
;
4487 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
4488 if (USE_STMT (use_p
) == stmt
)
4493 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4496 Replaces *GSI with the simplification result in RCODE and OPS
4497 and the associated statements in *SEQ. Does the replacement
4498 according to INPLACE and returns true if the operation succeeded. */
4501 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
4502 gimple_match_op
*res_op
,
4503 gimple_seq
*seq
, bool inplace
)
4505 gimple
*stmt
= gsi_stmt (*gsi
);
4506 tree
*ops
= res_op
->ops
;
4507 unsigned int num_ops
= res_op
->num_ops
;
4509 /* Play safe and do not allow abnormals to be mentioned in
4510 newly created statements. See also maybe_push_res_to_seq.
4511 As an exception allow such uses if there was a use of the
4512 same SSA name on the old stmt. */
4513 for (unsigned int i
= 0; i
< num_ops
; ++i
)
4514 if (TREE_CODE (ops
[i
]) == SSA_NAME
4515 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
4516 && !has_use_on_stmt (ops
[i
], stmt
))
4519 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
4520 for (unsigned int i
= 0; i
< 2; ++i
)
4521 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
4522 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
4523 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
4526 /* Don't insert new statements when INPLACE is true, even if we could
4527 reuse STMT for the final statement. */
4528 if (inplace
&& !gimple_seq_empty_p (*seq
))
4531 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
4533 gcc_assert (res_op
->code
.is_tree_code ());
4534 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
4535 /* GIMPLE_CONDs condition may not throw. */
4536 && (!flag_exceptions
4537 || !cfun
->can_throw_non_call_exceptions
4538 || !operation_could_trap_p (res_op
->code
,
4539 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
4541 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
4542 else if (res_op
->code
== SSA_NAME
)
4543 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
4544 build_zero_cst (TREE_TYPE (ops
[0])));
4545 else if (res_op
->code
== INTEGER_CST
)
4547 if (integer_zerop (ops
[0]))
4548 gimple_cond_make_false (cond_stmt
);
4550 gimple_cond_make_true (cond_stmt
);
4554 tree res
= maybe_push_res_to_seq (res_op
, seq
);
4557 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
4558 build_zero_cst (TREE_TYPE (res
)));
4562 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4564 fprintf (dump_file
, "gimple_simplified to ");
4565 if (!gimple_seq_empty_p (*seq
))
4566 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4567 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4570 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4573 else if (is_gimple_assign (stmt
)
4574 && res_op
->code
.is_tree_code ())
4577 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
4579 maybe_build_generic_op (res_op
);
4580 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
4581 res_op
->op_or_null (0),
4582 res_op
->op_or_null (1),
4583 res_op
->op_or_null (2));
4584 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4586 fprintf (dump_file
, "gimple_simplified to ");
4587 if (!gimple_seq_empty_p (*seq
))
4588 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4589 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4592 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4596 else if (res_op
->code
.is_fn_code ()
4597 && gimple_call_combined_fn (stmt
) == res_op
->code
)
4599 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
4600 for (unsigned int i
= 0; i
< num_ops
; ++i
)
4601 gimple_call_set_arg (stmt
, i
, ops
[i
]);
4602 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4604 fprintf (dump_file
, "gimple_simplified to ");
4605 if (!gimple_seq_empty_p (*seq
))
4606 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4607 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
4609 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4614 if (gimple_has_lhs (stmt
))
4616 tree lhs
= gimple_get_lhs (stmt
);
4617 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
4619 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4621 fprintf (dump_file
, "gimple_simplified to ");
4622 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4624 gsi_replace_with_seq_vops (gsi
, *seq
);
4634 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4637 maybe_canonicalize_mem_ref_addr (tree
*t
)
4641 if (TREE_CODE (*t
) == ADDR_EXPR
)
4642 t
= &TREE_OPERAND (*t
, 0);
4644 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4645 generic vector extension. The actual vector referenced is
4646 view-converted to an array type for this purpose. If the index
4647 is constant the canonical representation in the middle-end is a
4648 BIT_FIELD_REF so re-write the former to the latter here. */
4649 if (TREE_CODE (*t
) == ARRAY_REF
4650 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
4651 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
4652 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
4654 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
4655 if (VECTOR_TYPE_P (vtype
))
4657 tree low
= array_ref_low_bound (*t
);
4658 if (TREE_CODE (low
) == INTEGER_CST
)
4660 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
4662 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
4663 wi::to_widest (low
));
4664 idx
= wi::mul (idx
, wi::to_widest
4665 (TYPE_SIZE (TREE_TYPE (*t
))));
4667 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
4668 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
4670 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
4672 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
4673 TYPE_SIZE (TREE_TYPE (*t
)),
4674 wide_int_to_tree (bitsizetype
, idx
));
4682 while (handled_component_p (*t
))
4683 t
= &TREE_OPERAND (*t
, 0);
4685 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4686 of invariant addresses into a SSA name MEM_REF address. */
4687 if (TREE_CODE (*t
) == MEM_REF
4688 || TREE_CODE (*t
) == TARGET_MEM_REF
)
4690 tree addr
= TREE_OPERAND (*t
, 0);
4691 if (TREE_CODE (addr
) == ADDR_EXPR
4692 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
4693 || handled_component_p (TREE_OPERAND (addr
, 0))))
4697 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
4702 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
4703 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
4704 TREE_OPERAND (*t
, 1),
4705 size_int (coffset
));
4708 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
4709 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
4712 /* Canonicalize back MEM_REFs to plain reference trees if the object
4713 accessed is a decl that has the same access semantics as the MEM_REF. */
4714 if (TREE_CODE (*t
) == MEM_REF
4715 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
4716 && integer_zerop (TREE_OPERAND (*t
, 1))
4717 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
4719 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4720 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
4721 if (/* Same volatile qualification. */
4722 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
4723 /* Same TBAA behavior with -fstrict-aliasing. */
4724 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
4725 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
4726 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
4727 /* Same alignment. */
4728 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
4729 /* We have to look out here to not drop a required conversion
4730 from the rhs to the lhs if *t appears on the lhs or vice-versa
4731 if it appears on the rhs. Thus require strict type
4733 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
4735 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4740 /* Canonicalize TARGET_MEM_REF in particular with respect to
4741 the indexes becoming constant. */
4742 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
4744 tree tem
= maybe_fold_tmr (*t
);
4755 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4756 distinguishes both cases. */
4759 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
4761 bool changed
= false;
4762 gimple
*stmt
= gsi_stmt (*gsi
);
4763 bool nowarning
= gimple_no_warning_p (stmt
);
4765 fold_defer_overflow_warnings ();
4767 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4769 ??? This shouldn't be done in generic folding but in the
4770 propagation helpers which also know whether an address was
4772 Also canonicalize operand order. */
4773 switch (gimple_code (stmt
))
4776 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
4778 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
4779 if ((REFERENCE_CLASS_P (*rhs
)
4780 || TREE_CODE (*rhs
) == ADDR_EXPR
)
4781 && maybe_canonicalize_mem_ref_addr (rhs
))
4783 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
4784 if (REFERENCE_CLASS_P (*lhs
)
4785 && maybe_canonicalize_mem_ref_addr (lhs
))
4790 /* Canonicalize operand order. */
4791 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4792 if (TREE_CODE_CLASS (code
) == tcc_comparison
4793 || commutative_tree_code (code
)
4794 || commutative_ternary_tree_code (code
))
4796 tree rhs1
= gimple_assign_rhs1 (stmt
);
4797 tree rhs2
= gimple_assign_rhs2 (stmt
);
4798 if (tree_swap_operands_p (rhs1
, rhs2
))
4800 gimple_assign_set_rhs1 (stmt
, rhs2
);
4801 gimple_assign_set_rhs2 (stmt
, rhs1
);
4802 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4803 gimple_assign_set_rhs_code (stmt
,
4804 swap_tree_comparison (code
));
4812 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4814 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
4815 if (REFERENCE_CLASS_P (*arg
)
4816 && maybe_canonicalize_mem_ref_addr (arg
))
4819 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
4821 && REFERENCE_CLASS_P (*lhs
)
4822 && maybe_canonicalize_mem_ref_addr (lhs
))
4828 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4829 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4831 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4832 tree op
= TREE_VALUE (link
);
4833 if (REFERENCE_CLASS_P (op
)
4834 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4837 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4839 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4840 tree op
= TREE_VALUE (link
);
4841 if ((REFERENCE_CLASS_P (op
)
4842 || TREE_CODE (op
) == ADDR_EXPR
)
4843 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4849 if (gimple_debug_bind_p (stmt
))
4851 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
4853 && (REFERENCE_CLASS_P (*val
)
4854 || TREE_CODE (*val
) == ADDR_EXPR
)
4855 && maybe_canonicalize_mem_ref_addr (val
))
4861 /* Canonicalize operand order. */
4862 tree lhs
= gimple_cond_lhs (stmt
);
4863 tree rhs
= gimple_cond_rhs (stmt
);
4864 if (tree_swap_operands_p (lhs
, rhs
))
4866 gcond
*gc
= as_a
<gcond
*> (stmt
);
4867 gimple_cond_set_lhs (gc
, rhs
);
4868 gimple_cond_set_rhs (gc
, lhs
);
4869 gimple_cond_set_code (gc
,
4870 swap_tree_comparison (gimple_cond_code (gc
)));
4877 /* Dispatch to pattern-based folding. */
4879 || is_gimple_assign (stmt
)
4880 || gimple_code (stmt
) == GIMPLE_COND
)
4882 gimple_seq seq
= NULL
;
4883 gimple_match_op res_op
;
4884 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
4885 valueize
, valueize
))
4887 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
4890 gimple_seq_discard (seq
);
4894 stmt
= gsi_stmt (*gsi
);
4896 /* Fold the main computation performed by the statement. */
4897 switch (gimple_code (stmt
))
4901 /* Try to canonicalize for boolean-typed X the comparisons
4902 X == 0, X == 1, X != 0, and X != 1. */
4903 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
4904 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
4906 tree lhs
= gimple_assign_lhs (stmt
);
4907 tree op1
= gimple_assign_rhs1 (stmt
);
4908 tree op2
= gimple_assign_rhs2 (stmt
);
4909 tree type
= TREE_TYPE (op1
);
4911 /* Check whether the comparison operands are of the same boolean
4912 type as the result type is.
4913 Check that second operand is an integer-constant with value
4915 if (TREE_CODE (op2
) == INTEGER_CST
4916 && (integer_zerop (op2
) || integer_onep (op2
))
4917 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
4919 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
4920 bool is_logical_not
= false;
4922 /* X == 0 and X != 1 is a logical-not.of X
4923 X == 1 and X != 0 is X */
4924 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
4925 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
4926 is_logical_not
= true;
4928 if (is_logical_not
== false)
4929 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
4930 /* Only for one-bit precision typed X the transformation
4931 !X -> ~X is valied. */
4932 else if (TYPE_PRECISION (type
) == 1)
4933 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
4934 /* Otherwise we use !X -> X ^ 1. */
4936 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
4937 build_int_cst (type
, 1));
4943 unsigned old_num_ops
= gimple_num_ops (stmt
);
4944 tree lhs
= gimple_assign_lhs (stmt
);
4945 tree new_rhs
= fold_gimple_assign (gsi
);
4947 && !useless_type_conversion_p (TREE_TYPE (lhs
),
4948 TREE_TYPE (new_rhs
)))
4949 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
4952 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
4954 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
4961 changed
|= gimple_fold_call (gsi
, inplace
);
4965 /* Fold *& in asm operands. */
4967 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4969 const char **oconstraints
;
4970 const char *constraint
;
4971 bool allows_mem
, allows_reg
;
4973 noutputs
= gimple_asm_noutputs (asm_stmt
);
4974 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4976 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4978 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4979 tree op
= TREE_VALUE (link
);
4981 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4982 if (REFERENCE_CLASS_P (op
)
4983 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
4985 TREE_VALUE (link
) = op
;
4989 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4991 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4992 tree op
= TREE_VALUE (link
);
4994 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4995 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
4996 oconstraints
, &allows_mem
, &allows_reg
);
4997 if (REFERENCE_CLASS_P (op
)
4998 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
5001 TREE_VALUE (link
) = op
;
5009 if (gimple_debug_bind_p (stmt
))
5011 tree val
= gimple_debug_bind_get_value (stmt
);
5013 && REFERENCE_CLASS_P (val
))
5015 tree tem
= maybe_fold_reference (val
, false);
5018 gimple_debug_bind_set_value (stmt
, tem
);
5023 && TREE_CODE (val
) == ADDR_EXPR
)
5025 tree ref
= TREE_OPERAND (val
, 0);
5026 tree tem
= maybe_fold_reference (ref
, false);
5029 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
5030 gimple_debug_bind_set_value (stmt
, tem
);
5039 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
5040 tree ret
= gimple_return_retval(ret_stmt
);
5042 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
5044 tree val
= valueize (ret
);
5045 if (val
&& val
!= ret
5046 && may_propagate_copy (ret
, val
))
5048 gimple_return_set_retval (ret_stmt
, val
);
5058 stmt
= gsi_stmt (*gsi
);
5060 /* Fold *& on the lhs. */
5061 if (gimple_has_lhs (stmt
))
5063 tree lhs
= gimple_get_lhs (stmt
);
5064 if (lhs
&& REFERENCE_CLASS_P (lhs
))
5066 tree new_lhs
= maybe_fold_reference (lhs
, true);
5069 gimple_set_lhs (stmt
, new_lhs
);
5075 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
5079 /* Valueziation callback that ends up not following SSA edges. */
5082 no_follow_ssa_edges (tree
)
5087 /* Valueization callback that ends up following single-use SSA edges only. */
5090 follow_single_use_edges (tree val
)
5092 if (TREE_CODE (val
) == SSA_NAME
5093 && !has_single_use (val
))
5098 /* Valueization callback that follows all SSA edges. */
5101 follow_all_ssa_edges (tree val
)
5106 /* Fold the statement pointed to by GSI. In some cases, this function may
5107 replace the whole statement with a new one. Returns true iff folding
5109 The statement pointed to by GSI should be in valid gimple form but may
5110 be in unfolded state as resulting from for example constant propagation
5111 which can produce *&x = 0. */
5114 fold_stmt (gimple_stmt_iterator
*gsi
)
5116 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
5120 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
5122 return fold_stmt_1 (gsi
, false, valueize
);
5125 /* Perform the minimal folding on statement *GSI. Only operations like
5126 *&x created by constant propagation are handled. The statement cannot
5127 be replaced with a new one. Return true if the statement was
5128 changed, false otherwise.
5129 The statement *GSI should be in valid gimple form but may
5130 be in unfolded state as resulting from for example constant propagation
5131 which can produce *&x = 0. */
5134 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
5136 gimple
*stmt
= gsi_stmt (*gsi
);
5137 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
5138 gcc_assert (gsi_stmt (*gsi
) == stmt
);
5142 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5143 if EXPR is null or we don't know how.
5144 If non-null, the result always has boolean type. */
5147 canonicalize_bool (tree expr
, bool invert
)
5153 if (integer_nonzerop (expr
))
5154 return boolean_false_node
;
5155 else if (integer_zerop (expr
))
5156 return boolean_true_node
;
5157 else if (TREE_CODE (expr
) == SSA_NAME
)
5158 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
5159 build_int_cst (TREE_TYPE (expr
), 0));
5160 else if (COMPARISON_CLASS_P (expr
))
5161 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
5163 TREE_OPERAND (expr
, 0),
5164 TREE_OPERAND (expr
, 1));
5170 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
5172 if (integer_nonzerop (expr
))
5173 return boolean_true_node
;
5174 else if (integer_zerop (expr
))
5175 return boolean_false_node
;
5176 else if (TREE_CODE (expr
) == SSA_NAME
)
5177 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
5178 build_int_cst (TREE_TYPE (expr
), 0));
5179 else if (COMPARISON_CLASS_P (expr
))
5180 return fold_build2 (TREE_CODE (expr
),
5182 TREE_OPERAND (expr
, 0),
5183 TREE_OPERAND (expr
, 1));
5189 /* Check to see if a boolean expression EXPR is logically equivalent to the
5190 comparison (OP1 CODE OP2). Check for various identities involving
5194 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
5195 const_tree op1
, const_tree op2
)
5199 /* The obvious case. */
5200 if (TREE_CODE (expr
) == code
5201 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
5202 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
5205 /* Check for comparing (name, name != 0) and the case where expr
5206 is an SSA_NAME with a definition matching the comparison. */
5207 if (TREE_CODE (expr
) == SSA_NAME
5208 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
5210 if (operand_equal_p (expr
, op1
, 0))
5211 return ((code
== NE_EXPR
&& integer_zerop (op2
))
5212 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
5213 s
= SSA_NAME_DEF_STMT (expr
);
5214 if (is_gimple_assign (s
)
5215 && gimple_assign_rhs_code (s
) == code
5216 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
5217 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
5221 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5222 of name is a comparison, recurse. */
5223 if (TREE_CODE (op1
) == SSA_NAME
5224 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
5226 s
= SSA_NAME_DEF_STMT (op1
);
5227 if (is_gimple_assign (s
)
5228 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
5230 enum tree_code c
= gimple_assign_rhs_code (s
);
5231 if ((c
== NE_EXPR
&& integer_zerop (op2
))
5232 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
5233 return same_bool_comparison_p (expr
, c
,
5234 gimple_assign_rhs1 (s
),
5235 gimple_assign_rhs2 (s
));
5236 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
5237 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
5238 return same_bool_comparison_p (expr
,
5239 invert_tree_comparison (c
, false),
5240 gimple_assign_rhs1 (s
),
5241 gimple_assign_rhs2 (s
));
5247 /* Check to see if two boolean expressions OP1 and OP2 are logically
5251 same_bool_result_p (const_tree op1
, const_tree op2
)
5253 /* Simple cases first. */
5254 if (operand_equal_p (op1
, op2
, 0))
5257 /* Check the cases where at least one of the operands is a comparison.
5258 These are a bit smarter than operand_equal_p in that they apply some
5259 identifies on SSA_NAMEs. */
5260 if (COMPARISON_CLASS_P (op2
)
5261 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
5262 TREE_OPERAND (op2
, 0),
5263 TREE_OPERAND (op2
, 1)))
5265 if (COMPARISON_CLASS_P (op1
)
5266 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
5267 TREE_OPERAND (op1
, 0),
5268 TREE_OPERAND (op1
, 1)))
5275 /* Forward declarations for some mutually recursive functions. */
5278 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5279 enum tree_code code2
, tree op2a
, tree op2b
);
5281 and_var_with_comparison (tree var
, bool invert
,
5282 enum tree_code code2
, tree op2a
, tree op2b
);
5284 and_var_with_comparison_1 (gimple
*stmt
,
5285 enum tree_code code2
, tree op2a
, tree op2b
);
5287 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5288 enum tree_code code2
, tree op2a
, tree op2b
);
5290 or_var_with_comparison (tree var
, bool invert
,
5291 enum tree_code code2
, tree op2a
, tree op2b
);
5293 or_var_with_comparison_1 (gimple
*stmt
,
5294 enum tree_code code2
, tree op2a
, tree op2b
);
5296 /* Helper function for and_comparisons_1: try to simplify the AND of the
5297 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5298 If INVERT is true, invert the value of the VAR before doing the AND.
5299 Return NULL_EXPR if we can't simplify this to a single expression. */
5302 and_var_with_comparison (tree var
, bool invert
,
5303 enum tree_code code2
, tree op2a
, tree op2b
)
5306 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5308 /* We can only deal with variables whose definitions are assignments. */
5309 if (!is_gimple_assign (stmt
))
5312 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5313 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5314 Then we only have to consider the simpler non-inverted cases. */
5316 t
= or_var_with_comparison_1 (stmt
,
5317 invert_tree_comparison (code2
, false),
5320 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5321 return canonicalize_bool (t
, invert
);
5324 /* Try to simplify the AND of the ssa variable defined by the assignment
5325 STMT with the comparison specified by (OP2A CODE2 OP2B).
5326 Return NULL_EXPR if we can't simplify this to a single expression. */
5329 and_var_with_comparison_1 (gimple
*stmt
,
5330 enum tree_code code2
, tree op2a
, tree op2b
)
5332 tree var
= gimple_assign_lhs (stmt
);
5333 tree true_test_var
= NULL_TREE
;
5334 tree false_test_var
= NULL_TREE
;
5335 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5337 /* Check for identities like (var AND (var == 0)) => false. */
5338 if (TREE_CODE (op2a
) == SSA_NAME
5339 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5341 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5342 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5344 true_test_var
= op2a
;
5345 if (var
== true_test_var
)
5348 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5349 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5351 false_test_var
= op2a
;
5352 if (var
== false_test_var
)
5353 return boolean_false_node
;
5357 /* If the definition is a comparison, recurse on it. */
5358 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5360 tree t
= and_comparisons_1 (innercode
,
5361 gimple_assign_rhs1 (stmt
),
5362 gimple_assign_rhs2 (stmt
),
5370 /* If the definition is an AND or OR expression, we may be able to
5371 simplify by reassociating. */
5372 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5373 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5375 tree inner1
= gimple_assign_rhs1 (stmt
);
5376 tree inner2
= gimple_assign_rhs2 (stmt
);
5379 tree partial
= NULL_TREE
;
5380 bool is_and
= (innercode
== BIT_AND_EXPR
);
5382 /* Check for boolean identities that don't require recursive examination
5384 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5385 inner1 AND (inner1 OR inner2) => inner1
5386 !inner1 AND (inner1 AND inner2) => false
5387 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5388 Likewise for similar cases involving inner2. */
5389 if (inner1
== true_test_var
)
5390 return (is_and
? var
: inner1
);
5391 else if (inner2
== true_test_var
)
5392 return (is_and
? var
: inner2
);
5393 else if (inner1
== false_test_var
)
5395 ? boolean_false_node
5396 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5397 else if (inner2
== false_test_var
)
5399 ? boolean_false_node
5400 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5402 /* Next, redistribute/reassociate the AND across the inner tests.
5403 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5404 if (TREE_CODE (inner1
) == SSA_NAME
5405 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5406 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5407 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5408 gimple_assign_rhs1 (s
),
5409 gimple_assign_rhs2 (s
),
5410 code2
, op2a
, op2b
)))
5412 /* Handle the AND case, where we are reassociating:
5413 (inner1 AND inner2) AND (op2a code2 op2b)
5415 If the partial result t is a constant, we win. Otherwise
5416 continue on to try reassociating with the other inner test. */
5419 if (integer_onep (t
))
5421 else if (integer_zerop (t
))
5422 return boolean_false_node
;
5425 /* Handle the OR case, where we are redistributing:
5426 (inner1 OR inner2) AND (op2a code2 op2b)
5427 => (t OR (inner2 AND (op2a code2 op2b))) */
5428 else if (integer_onep (t
))
5429 return boolean_true_node
;
5431 /* Save partial result for later. */
5435 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5436 if (TREE_CODE (inner2
) == SSA_NAME
5437 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5438 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5439 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5440 gimple_assign_rhs1 (s
),
5441 gimple_assign_rhs2 (s
),
5442 code2
, op2a
, op2b
)))
5444 /* Handle the AND case, where we are reassociating:
5445 (inner1 AND inner2) AND (op2a code2 op2b)
5446 => (inner1 AND t) */
5449 if (integer_onep (t
))
5451 else if (integer_zerop (t
))
5452 return boolean_false_node
;
5453 /* If both are the same, we can apply the identity
5455 else if (partial
&& same_bool_result_p (t
, partial
))
5459 /* Handle the OR case. where we are redistributing:
5460 (inner1 OR inner2) AND (op2a code2 op2b)
5461 => (t OR (inner1 AND (op2a code2 op2b)))
5462 => (t OR partial) */
5465 if (integer_onep (t
))
5466 return boolean_true_node
;
5469 /* We already got a simplification for the other
5470 operand to the redistributed OR expression. The
5471 interesting case is when at least one is false.
5472 Or, if both are the same, we can apply the identity
5474 if (integer_zerop (partial
))
5476 else if (integer_zerop (t
))
5478 else if (same_bool_result_p (t
, partial
))
5487 /* Try to simplify the AND of two comparisons defined by
5488 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5489 If this can be done without constructing an intermediate value,
5490 return the resulting tree; otherwise NULL_TREE is returned.
5491 This function is deliberately asymmetric as it recurses on SSA_DEFs
5492 in the first comparison but not the second. */
5495 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5496 enum tree_code code2
, tree op2a
, tree op2b
)
5498 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5500 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5501 if (operand_equal_p (op1a
, op2a
, 0)
5502 && operand_equal_p (op1b
, op2b
, 0))
5504 /* Result will be either NULL_TREE, or a combined comparison. */
5505 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5506 TRUTH_ANDIF_EXPR
, code1
, code2
,
5507 truth_type
, op1a
, op1b
);
5512 /* Likewise the swapped case of the above. */
5513 if (operand_equal_p (op1a
, op2b
, 0)
5514 && operand_equal_p (op1b
, op2a
, 0))
5516 /* Result will be either NULL_TREE, or a combined comparison. */
5517 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5518 TRUTH_ANDIF_EXPR
, code1
,
5519 swap_tree_comparison (code2
),
5520 truth_type
, op1a
, op1b
);
5525 /* If both comparisons are of the same value against constants, we might
5526 be able to merge them. */
5527 if (operand_equal_p (op1a
, op2a
, 0)
5528 && TREE_CODE (op1b
) == INTEGER_CST
5529 && TREE_CODE (op2b
) == INTEGER_CST
)
5531 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5533 /* If we have (op1a == op1b), we should either be able to
5534 return that or FALSE, depending on whether the constant op1b
5535 also satisfies the other comparison against op2b. */
5536 if (code1
== EQ_EXPR
)
5542 case EQ_EXPR
: val
= (cmp
== 0); break;
5543 case NE_EXPR
: val
= (cmp
!= 0); break;
5544 case LT_EXPR
: val
= (cmp
< 0); break;
5545 case GT_EXPR
: val
= (cmp
> 0); break;
5546 case LE_EXPR
: val
= (cmp
<= 0); break;
5547 case GE_EXPR
: val
= (cmp
>= 0); break;
5548 default: done
= false;
5553 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5555 return boolean_false_node
;
5558 /* Likewise if the second comparison is an == comparison. */
5559 else if (code2
== EQ_EXPR
)
5565 case EQ_EXPR
: val
= (cmp
== 0); break;
5566 case NE_EXPR
: val
= (cmp
!= 0); break;
5567 case LT_EXPR
: val
= (cmp
> 0); break;
5568 case GT_EXPR
: val
= (cmp
< 0); break;
5569 case LE_EXPR
: val
= (cmp
>= 0); break;
5570 case GE_EXPR
: val
= (cmp
<= 0); break;
5571 default: done
= false;
5576 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5578 return boolean_false_node
;
5582 /* Same business with inequality tests. */
5583 else if (code1
== NE_EXPR
)
5588 case EQ_EXPR
: val
= (cmp
!= 0); break;
5589 case NE_EXPR
: val
= (cmp
== 0); break;
5590 case LT_EXPR
: val
= (cmp
>= 0); break;
5591 case GT_EXPR
: val
= (cmp
<= 0); break;
5592 case LE_EXPR
: val
= (cmp
> 0); break;
5593 case GE_EXPR
: val
= (cmp
< 0); break;
5598 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5600 else if (code2
== NE_EXPR
)
5605 case EQ_EXPR
: val
= (cmp
== 0); break;
5606 case NE_EXPR
: val
= (cmp
!= 0); break;
5607 case LT_EXPR
: val
= (cmp
<= 0); break;
5608 case GT_EXPR
: val
= (cmp
>= 0); break;
5609 case LE_EXPR
: val
= (cmp
< 0); break;
5610 case GE_EXPR
: val
= (cmp
> 0); break;
5615 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5618 /* Chose the more restrictive of two < or <= comparisons. */
5619 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5620 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5622 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5623 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5625 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5628 /* Likewise chose the more restrictive of two > or >= comparisons. */
5629 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5630 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5632 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5633 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5635 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5638 /* Check for singleton ranges. */
5640 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
5641 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
5642 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
5644 /* Check for disjoint ranges. */
5646 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5647 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5648 return boolean_false_node
;
5650 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5651 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5652 return boolean_false_node
;
5655 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5656 NAME's definition is a truth value. See if there are any simplifications
5657 that can be done against the NAME's definition. */
5658 if (TREE_CODE (op1a
) == SSA_NAME
5659 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5660 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5662 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5663 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5664 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5665 switch (gimple_code (stmt
))
5668 /* Try to simplify by copy-propagating the definition. */
5669 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5672 /* If every argument to the PHI produces the same result when
5673 ANDed with the second comparison, we win.
5674 Do not do this unless the type is bool since we need a bool
5675 result here anyway. */
5676 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5678 tree result
= NULL_TREE
;
5680 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5682 tree arg
= gimple_phi_arg_def (stmt
, i
);
5684 /* If this PHI has itself as an argument, ignore it.
5685 If all the other args produce the same result,
5687 if (arg
== gimple_phi_result (stmt
))
5689 else if (TREE_CODE (arg
) == INTEGER_CST
)
5691 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
5694 result
= boolean_false_node
;
5695 else if (!integer_zerop (result
))
5699 result
= fold_build2 (code2
, boolean_type_node
,
5701 else if (!same_bool_comparison_p (result
,
5705 else if (TREE_CODE (arg
) == SSA_NAME
5706 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5709 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5710 /* In simple cases we can look through PHI nodes,
5711 but we have to be careful with loops.
5713 if (! dom_info_available_p (CDI_DOMINATORS
)
5714 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5715 || dominated_by_p (CDI_DOMINATORS
,
5716 gimple_bb (def_stmt
),
5719 temp
= and_var_with_comparison (arg
, invert
, code2
,
5725 else if (!same_bool_result_p (result
, temp
))
5741 /* Try to simplify the AND of two comparisons, specified by
5742 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5743 If this can be simplified to a single expression (without requiring
5744 introducing more SSA variables to hold intermediate values),
5745 return the resulting tree. Otherwise return NULL_TREE.
5746 If the result expression is non-null, it has boolean type. */
5749 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5750 enum tree_code code2
, tree op2a
, tree op2b
)
5752 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5756 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5759 /* Helper function for or_comparisons_1: try to simplify the OR of the
5760 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5761 If INVERT is true, invert the value of VAR before doing the OR.
5762 Return NULL_EXPR if we can't simplify this to a single expression. */
5765 or_var_with_comparison (tree var
, bool invert
,
5766 enum tree_code code2
, tree op2a
, tree op2b
)
5769 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5771 /* We can only deal with variables whose definitions are assignments. */
5772 if (!is_gimple_assign (stmt
))
5775 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5776 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5777 Then we only have to consider the simpler non-inverted cases. */
5779 t
= and_var_with_comparison_1 (stmt
,
5780 invert_tree_comparison (code2
, false),
5783 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5784 return canonicalize_bool (t
, invert
);
5787 /* Try to simplify the OR of the ssa variable defined by the assignment
5788 STMT with the comparison specified by (OP2A CODE2 OP2B).
5789 Return NULL_EXPR if we can't simplify this to a single expression. */
5792 or_var_with_comparison_1 (gimple
*stmt
,
5793 enum tree_code code2
, tree op2a
, tree op2b
)
5795 tree var
= gimple_assign_lhs (stmt
);
5796 tree true_test_var
= NULL_TREE
;
5797 tree false_test_var
= NULL_TREE
;
5798 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5800 /* Check for identities like (var OR (var != 0)) => true . */
5801 if (TREE_CODE (op2a
) == SSA_NAME
5802 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5804 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5805 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5807 true_test_var
= op2a
;
5808 if (var
== true_test_var
)
5811 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5812 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5814 false_test_var
= op2a
;
5815 if (var
== false_test_var
)
5816 return boolean_true_node
;
5820 /* If the definition is a comparison, recurse on it. */
5821 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5823 tree t
= or_comparisons_1 (innercode
,
5824 gimple_assign_rhs1 (stmt
),
5825 gimple_assign_rhs2 (stmt
),
5833 /* If the definition is an AND or OR expression, we may be able to
5834 simplify by reassociating. */
5835 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5836 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5838 tree inner1
= gimple_assign_rhs1 (stmt
);
5839 tree inner2
= gimple_assign_rhs2 (stmt
);
5842 tree partial
= NULL_TREE
;
5843 bool is_or
= (innercode
== BIT_IOR_EXPR
);
5845 /* Check for boolean identities that don't require recursive examination
5847 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5848 inner1 OR (inner1 AND inner2) => inner1
5849 !inner1 OR (inner1 OR inner2) => true
5850 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5852 if (inner1
== true_test_var
)
5853 return (is_or
? var
: inner1
);
5854 else if (inner2
== true_test_var
)
5855 return (is_or
? var
: inner2
);
5856 else if (inner1
== false_test_var
)
5859 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5860 else if (inner2
== false_test_var
)
5863 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5865 /* Next, redistribute/reassociate the OR across the inner tests.
5866 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5867 if (TREE_CODE (inner1
) == SSA_NAME
5868 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5869 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5870 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5871 gimple_assign_rhs1 (s
),
5872 gimple_assign_rhs2 (s
),
5873 code2
, op2a
, op2b
)))
5875 /* Handle the OR case, where we are reassociating:
5876 (inner1 OR inner2) OR (op2a code2 op2b)
5878 If the partial result t is a constant, we win. Otherwise
5879 continue on to try reassociating with the other inner test. */
5882 if (integer_onep (t
))
5883 return boolean_true_node
;
5884 else if (integer_zerop (t
))
5888 /* Handle the AND case, where we are redistributing:
5889 (inner1 AND inner2) OR (op2a code2 op2b)
5890 => (t AND (inner2 OR (op2a code op2b))) */
5891 else if (integer_zerop (t
))
5892 return boolean_false_node
;
5894 /* Save partial result for later. */
5898 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5899 if (TREE_CODE (inner2
) == SSA_NAME
5900 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5901 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5902 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5903 gimple_assign_rhs1 (s
),
5904 gimple_assign_rhs2 (s
),
5905 code2
, op2a
, op2b
)))
5907 /* Handle the OR case, where we are reassociating:
5908 (inner1 OR inner2) OR (op2a code2 op2b)
5910 => (t OR partial) */
5913 if (integer_zerop (t
))
5915 else if (integer_onep (t
))
5916 return boolean_true_node
;
5917 /* If both are the same, we can apply the identity
5919 else if (partial
&& same_bool_result_p (t
, partial
))
5923 /* Handle the AND case, where we are redistributing:
5924 (inner1 AND inner2) OR (op2a code2 op2b)
5925 => (t AND (inner1 OR (op2a code2 op2b)))
5926 => (t AND partial) */
5929 if (integer_zerop (t
))
5930 return boolean_false_node
;
5933 /* We already got a simplification for the other
5934 operand to the redistributed AND expression. The
5935 interesting case is when at least one is true.
5936 Or, if both are the same, we can apply the identity
5938 if (integer_onep (partial
))
5940 else if (integer_onep (t
))
5942 else if (same_bool_result_p (t
, partial
))
5951 /* Try to simplify the OR of two comparisons defined by
5952 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5953 If this can be done without constructing an intermediate value,
5954 return the resulting tree; otherwise NULL_TREE is returned.
5955 This function is deliberately asymmetric as it recurses on SSA_DEFs
5956 in the first comparison but not the second. */
5959 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5960 enum tree_code code2
, tree op2a
, tree op2b
)
5962 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5964 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5965 if (operand_equal_p (op1a
, op2a
, 0)
5966 && operand_equal_p (op1b
, op2b
, 0))
5968 /* Result will be either NULL_TREE, or a combined comparison. */
5969 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5970 TRUTH_ORIF_EXPR
, code1
, code2
,
5971 truth_type
, op1a
, op1b
);
5976 /* Likewise the swapped case of the above. */
5977 if (operand_equal_p (op1a
, op2b
, 0)
5978 && operand_equal_p (op1b
, op2a
, 0))
5980 /* Result will be either NULL_TREE, or a combined comparison. */
5981 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5982 TRUTH_ORIF_EXPR
, code1
,
5983 swap_tree_comparison (code2
),
5984 truth_type
, op1a
, op1b
);
5989 /* If both comparisons are of the same value against constants, we might
5990 be able to merge them. */
5991 if (operand_equal_p (op1a
, op2a
, 0)
5992 && TREE_CODE (op1b
) == INTEGER_CST
5993 && TREE_CODE (op2b
) == INTEGER_CST
)
5995 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5997 /* If we have (op1a != op1b), we should either be able to
5998 return that or TRUE, depending on whether the constant op1b
5999 also satisfies the other comparison against op2b. */
6000 if (code1
== NE_EXPR
)
6006 case EQ_EXPR
: val
= (cmp
== 0); break;
6007 case NE_EXPR
: val
= (cmp
!= 0); break;
6008 case LT_EXPR
: val
= (cmp
< 0); break;
6009 case GT_EXPR
: val
= (cmp
> 0); break;
6010 case LE_EXPR
: val
= (cmp
<= 0); break;
6011 case GE_EXPR
: val
= (cmp
>= 0); break;
6012 default: done
= false;
6017 return boolean_true_node
;
6019 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
6022 /* Likewise if the second comparison is a != comparison. */
6023 else if (code2
== NE_EXPR
)
6029 case EQ_EXPR
: val
= (cmp
== 0); break;
6030 case NE_EXPR
: val
= (cmp
!= 0); break;
6031 case LT_EXPR
: val
= (cmp
> 0); break;
6032 case GT_EXPR
: val
= (cmp
< 0); break;
6033 case LE_EXPR
: val
= (cmp
>= 0); break;
6034 case GE_EXPR
: val
= (cmp
<= 0); break;
6035 default: done
= false;
6040 return boolean_true_node
;
6042 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
6046 /* See if an equality test is redundant with the other comparison. */
6047 else if (code1
== EQ_EXPR
)
6052 case EQ_EXPR
: val
= (cmp
== 0); break;
6053 case NE_EXPR
: val
= (cmp
!= 0); break;
6054 case LT_EXPR
: val
= (cmp
< 0); break;
6055 case GT_EXPR
: val
= (cmp
> 0); break;
6056 case LE_EXPR
: val
= (cmp
<= 0); break;
6057 case GE_EXPR
: val
= (cmp
>= 0); break;
6062 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
6064 else if (code2
== EQ_EXPR
)
6069 case EQ_EXPR
: val
= (cmp
== 0); break;
6070 case NE_EXPR
: val
= (cmp
!= 0); break;
6071 case LT_EXPR
: val
= (cmp
> 0); break;
6072 case GT_EXPR
: val
= (cmp
< 0); break;
6073 case LE_EXPR
: val
= (cmp
>= 0); break;
6074 case GE_EXPR
: val
= (cmp
<= 0); break;
6079 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
6082 /* Chose the less restrictive of two < or <= comparisons. */
6083 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
6084 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
6086 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
6087 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
6089 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
6092 /* Likewise chose the less restrictive of two > or >= comparisons. */
6093 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
6094 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
6096 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
6097 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
6099 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
6102 /* Check for singleton ranges. */
6104 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
6105 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
6106 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
6108 /* Check for less/greater pairs that don't restrict the range at all. */
6110 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
6111 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
6112 return boolean_true_node
;
6114 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
6115 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
6116 return boolean_true_node
;
6119 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6120 NAME's definition is a truth value. See if there are any simplifications
6121 that can be done against the NAME's definition. */
6122 if (TREE_CODE (op1a
) == SSA_NAME
6123 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6124 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6126 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6127 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6128 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6129 switch (gimple_code (stmt
))
6132 /* Try to simplify by copy-propagating the definition. */
6133 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
6136 /* If every argument to the PHI produces the same result when
6137 ORed with the second comparison, we win.
6138 Do not do this unless the type is bool since we need a bool
6139 result here anyway. */
6140 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6142 tree result
= NULL_TREE
;
6144 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6146 tree arg
= gimple_phi_arg_def (stmt
, i
);
6148 /* If this PHI has itself as an argument, ignore it.
6149 If all the other args produce the same result,
6151 if (arg
== gimple_phi_result (stmt
))
6153 else if (TREE_CODE (arg
) == INTEGER_CST
)
6155 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
6158 result
= boolean_true_node
;
6159 else if (!integer_onep (result
))
6163 result
= fold_build2 (code2
, boolean_type_node
,
6165 else if (!same_bool_comparison_p (result
,
6169 else if (TREE_CODE (arg
) == SSA_NAME
6170 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6173 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6174 /* In simple cases we can look through PHI nodes,
6175 but we have to be careful with loops.
6177 if (! dom_info_available_p (CDI_DOMINATORS
)
6178 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6179 || dominated_by_p (CDI_DOMINATORS
,
6180 gimple_bb (def_stmt
),
6183 temp
= or_var_with_comparison (arg
, invert
, code2
,
6189 else if (!same_bool_result_p (result
, temp
))
6205 /* Try to simplify the OR of two comparisons, specified by
6206 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6207 If this can be simplified to a single expression (without requiring
6208 introducing more SSA variables to hold intermediate values),
6209 return the resulting tree. Otherwise return NULL_TREE.
6210 If the result expression is non-null, it has boolean type. */
6213 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
6214 enum tree_code code2
, tree op2a
, tree op2b
)
6216 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
6220 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
6224 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6226 Either NULL_TREE, a simplified but non-constant or a constant
6229 ??? This should go into a gimple-fold-inline.h file to be eventually
6230 privatized with the single valueize function used in the various TUs
6231 to avoid the indirect function call overhead. */
6234 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
6235 tree (*gvalueize
) (tree
))
6237 gimple_match_op res_op
;
6238 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6239 edges if there are intermediate VARYING defs. For this reason
6240 do not follow SSA edges here even though SCCVN can technically
6241 just deal fine with that. */
6242 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
6244 tree res
= NULL_TREE
;
6245 if (gimple_simplified_result_is_gimple_val (&res_op
))
6246 res
= res_op
.ops
[0];
6247 else if (mprts_hook
)
6248 res
= mprts_hook (&res_op
);
6251 if (dump_file
&& dump_flags
& TDF_DETAILS
)
6253 fprintf (dump_file
, "Match-and-simplified ");
6254 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
6255 fprintf (dump_file
, " to ");
6256 print_generic_expr (dump_file
, res
);
6257 fprintf (dump_file
, "\n");
6263 location_t loc
= gimple_location (stmt
);
6264 switch (gimple_code (stmt
))
6268 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
6270 switch (get_gimple_rhs_class (subcode
))
6272 case GIMPLE_SINGLE_RHS
:
6274 tree rhs
= gimple_assign_rhs1 (stmt
);
6275 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
6277 if (TREE_CODE (rhs
) == SSA_NAME
)
6279 /* If the RHS is an SSA_NAME, return its known constant value,
6281 return (*valueize
) (rhs
);
6283 /* Handle propagating invariant addresses into address
6285 else if (TREE_CODE (rhs
) == ADDR_EXPR
6286 && !is_gimple_min_invariant (rhs
))
6288 poly_int64 offset
= 0;
6290 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
6294 && (CONSTANT_CLASS_P (base
)
6295 || decl_address_invariant_p (base
)))
6296 return build_invariant_address (TREE_TYPE (rhs
),
6299 else if (TREE_CODE (rhs
) == CONSTRUCTOR
6300 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
6301 && known_eq (CONSTRUCTOR_NELTS (rhs
),
6302 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
6307 nelts
= CONSTRUCTOR_NELTS (rhs
);
6308 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
6309 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
6311 val
= (*valueize
) (val
);
6312 if (TREE_CODE (val
) == INTEGER_CST
6313 || TREE_CODE (val
) == REAL_CST
6314 || TREE_CODE (val
) == FIXED_CST
)
6315 vec
.quick_push (val
);
6320 return vec
.build ();
6322 if (subcode
== OBJ_TYPE_REF
)
6324 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
6325 /* If callee is constant, we can fold away the wrapper. */
6326 if (is_gimple_min_invariant (val
))
6330 if (kind
== tcc_reference
)
6332 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
6333 || TREE_CODE (rhs
) == REALPART_EXPR
6334 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
6335 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6337 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6338 return fold_unary_loc (EXPR_LOCATION (rhs
),
6340 TREE_TYPE (rhs
), val
);
6342 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
6343 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6345 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6346 return fold_ternary_loc (EXPR_LOCATION (rhs
),
6348 TREE_TYPE (rhs
), val
,
6349 TREE_OPERAND (rhs
, 1),
6350 TREE_OPERAND (rhs
, 2));
6352 else if (TREE_CODE (rhs
) == MEM_REF
6353 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6355 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6356 if (TREE_CODE (val
) == ADDR_EXPR
6357 && is_gimple_min_invariant (val
))
6359 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
6361 TREE_OPERAND (rhs
, 1));
6366 return fold_const_aggregate_ref_1 (rhs
, valueize
);
6368 else if (kind
== tcc_declaration
)
6369 return get_symbol_constant_value (rhs
);
6373 case GIMPLE_UNARY_RHS
:
6376 case GIMPLE_BINARY_RHS
:
6377 /* Translate &x + CST into an invariant form suitable for
6378 further propagation. */
6379 if (subcode
== POINTER_PLUS_EXPR
)
6381 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6382 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6383 if (TREE_CODE (op0
) == ADDR_EXPR
6384 && TREE_CODE (op1
) == INTEGER_CST
)
6386 tree off
= fold_convert (ptr_type_node
, op1
);
6387 return build_fold_addr_expr_loc
6389 fold_build2 (MEM_REF
,
6390 TREE_TYPE (TREE_TYPE (op0
)),
6391 unshare_expr (op0
), off
));
6394 /* Canonicalize bool != 0 and bool == 0 appearing after
6395 valueization. While gimple_simplify handles this
6396 it can get confused by the ~X == 1 -> X == 0 transform
6397 which we cant reduce to a SSA name or a constant
6398 (and we have no way to tell gimple_simplify to not
6399 consider those transforms in the first place). */
6400 else if (subcode
== EQ_EXPR
6401 || subcode
== NE_EXPR
)
6403 tree lhs
= gimple_assign_lhs (stmt
);
6404 tree op0
= gimple_assign_rhs1 (stmt
);
6405 if (useless_type_conversion_p (TREE_TYPE (lhs
),
6408 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6409 op0
= (*valueize
) (op0
);
6410 if (TREE_CODE (op0
) == INTEGER_CST
)
6411 std::swap (op0
, op1
);
6412 if (TREE_CODE (op1
) == INTEGER_CST
6413 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
6414 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
6420 case GIMPLE_TERNARY_RHS
:
6422 /* Handle ternary operators that can appear in GIMPLE form. */
6423 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6424 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6425 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
6426 return fold_ternary_loc (loc
, subcode
,
6427 gimple_expr_type (stmt
), op0
, op1
, op2
);
6438 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
6440 if (gimple_call_internal_p (stmt
))
6442 enum tree_code subcode
= ERROR_MARK
;
6443 switch (gimple_call_internal_fn (stmt
))
6445 case IFN_UBSAN_CHECK_ADD
:
6446 subcode
= PLUS_EXPR
;
6448 case IFN_UBSAN_CHECK_SUB
:
6449 subcode
= MINUS_EXPR
;
6451 case IFN_UBSAN_CHECK_MUL
:
6452 subcode
= MULT_EXPR
;
6454 case IFN_BUILTIN_EXPECT
:
6456 tree arg0
= gimple_call_arg (stmt
, 0);
6457 tree op0
= (*valueize
) (arg0
);
6458 if (TREE_CODE (op0
) == INTEGER_CST
)
6465 tree arg0
= gimple_call_arg (stmt
, 0);
6466 tree arg1
= gimple_call_arg (stmt
, 1);
6467 tree op0
= (*valueize
) (arg0
);
6468 tree op1
= (*valueize
) (arg1
);
6470 if (TREE_CODE (op0
) != INTEGER_CST
6471 || TREE_CODE (op1
) != INTEGER_CST
)
6476 /* x * 0 = 0 * x = 0 without overflow. */
6477 if (integer_zerop (op0
) || integer_zerop (op1
))
6478 return build_zero_cst (TREE_TYPE (arg0
));
6481 /* y - y = 0 without overflow. */
6482 if (operand_equal_p (op0
, op1
, 0))
6483 return build_zero_cst (TREE_TYPE (arg0
));
6490 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
6492 && TREE_CODE (res
) == INTEGER_CST
6493 && !TREE_OVERFLOW (res
))
6498 fn
= (*valueize
) (gimple_call_fn (stmt
));
6499 if (TREE_CODE (fn
) == ADDR_EXPR
6500 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
6501 && gimple_builtin_call_types_compatible_p (stmt
,
6502 TREE_OPERAND (fn
, 0)))
6504 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
6507 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6508 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
6509 retval
= fold_builtin_call_array (loc
,
6510 gimple_call_return_type (call_stmt
),
6511 fn
, gimple_call_num_args (stmt
), args
);
6514 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6515 STRIP_NOPS (retval
);
6516 retval
= fold_convert (gimple_call_return_type (call_stmt
),
6529 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6530 Returns NULL_TREE if folding to a constant is not possible, otherwise
6531 returns a constant according to is_gimple_min_invariant. */
6534 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
6536 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
6537 if (res
&& is_gimple_min_invariant (res
))
6543 /* The following set of functions are supposed to fold references using
6544 their constant initializers. */
6546 /* See if we can find constructor defining value of BASE.
6547 When we know the consructor with constant offset (such as
6548 base is array[40] and we do know constructor of array), then
6549 BIT_OFFSET is adjusted accordingly.
6551 As a special case, return error_mark_node when constructor
6552 is not explicitly available, but it is known to be zero
6553 such as 'static const int a;'. */
6555 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
6556 tree (*valueize
)(tree
))
6558 poly_int64 bit_offset2
, size
, max_size
;
6561 if (TREE_CODE (base
) == MEM_REF
)
6563 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
6564 if (!boff
.to_shwi (bit_offset
))
6568 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
6569 base
= valueize (TREE_OPERAND (base
, 0));
6570 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
6572 base
= TREE_OPERAND (base
, 0);
6575 && TREE_CODE (base
) == SSA_NAME
)
6576 base
= valueize (base
);
6578 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6579 DECL_INITIAL. If BASE is a nested reference into another
6580 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6581 the inner reference. */
6582 switch (TREE_CODE (base
))
6587 tree init
= ctor_for_folding (base
);
6589 /* Our semantic is exact opposite of ctor_for_folding;
6590 NULL means unknown, while error_mark_node is 0. */
6591 if (init
== error_mark_node
)
6594 return error_mark_node
;
6598 case VIEW_CONVERT_EXPR
:
6599 return get_base_constructor (TREE_OPERAND (base
, 0),
6600 bit_offset
, valueize
);
6604 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
6606 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
6608 *bit_offset
+= bit_offset2
;
6609 return get_base_constructor (base
, bit_offset
, valueize
);
6615 if (CONSTANT_CLASS_P (base
))
6622 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6623 to the memory at bit OFFSET. When non-null, TYPE is the expected
6624 type of the reference; otherwise the type of the referenced element
6625 is used instead. When SIZE is zero, attempt to fold a reference to
6626 the entire element which OFFSET refers to. Increment *SUBOFF by
6627 the bit offset of the accessed element. */
6630 fold_array_ctor_reference (tree type
, tree ctor
,
6631 unsigned HOST_WIDE_INT offset
,
6632 unsigned HOST_WIDE_INT size
,
6634 unsigned HOST_WIDE_INT
*suboff
)
6636 offset_int low_bound
;
6637 offset_int elt_size
;
6638 offset_int access_index
;
6639 tree domain_type
= NULL_TREE
;
6640 HOST_WIDE_INT inner_offset
;
6642 /* Compute low bound and elt size. */
6643 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
6644 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
6645 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6647 /* Static constructors for variably sized objects makes no sense. */
6648 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
6650 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
6654 /* Static constructors for variably sized objects makes no sense. */
6655 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
6657 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
6659 /* When TYPE is non-null, verify that it specifies a constant-sized
6660 accessed not larger than size of array element. */
6662 && (!TYPE_SIZE_UNIT (type
)
6663 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
6664 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
6668 /* Compute the array index we look for. */
6669 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
6671 access_index
+= low_bound
;
6673 /* And offset within the access. */
6674 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
6676 /* See if the array field is large enough to span whole access. We do not
6677 care to fold accesses spanning multiple array indexes. */
6678 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
6680 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
6682 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
6684 /* For the final reference to the entire accessed element
6685 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6686 may be null) in favor of the type of the element, and set
6687 SIZE to the size of the accessed element. */
6689 type
= TREE_TYPE (val
);
6690 size
= elt_size
.to_uhwi () * BITS_PER_UNIT
;
6693 *suboff
+= (access_index
* elt_size
* BITS_PER_UNIT
).to_uhwi ();
6694 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
6698 /* Memory not explicitly mentioned in constructor is 0 (or
6699 the reference is out of range). */
6700 return type
? build_zero_cst (type
) : NULL_TREE
;
6703 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6704 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6705 is the expected type of the reference; otherwise the type of
6706 the referenced member is used instead. When SIZE is zero,
6707 attempt to fold a reference to the entire member which OFFSET
6708 refers to; in this case. Increment *SUBOFF by the bit offset
6709 of the accessed member. */
6712 fold_nonarray_ctor_reference (tree type
, tree ctor
,
6713 unsigned HOST_WIDE_INT offset
,
6714 unsigned HOST_WIDE_INT size
,
6716 unsigned HOST_WIDE_INT
*suboff
)
6718 unsigned HOST_WIDE_INT cnt
;
6721 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
6724 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
6725 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
6726 tree field_size
= DECL_SIZE (cfield
);
6730 /* Determine the size of the flexible array member from
6731 the size of the initializer provided for it. */
6732 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
6735 /* Variable sized objects in static constructors makes no sense,
6736 but field_size can be NULL for flexible array members. */
6737 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
6738 && TREE_CODE (byte_offset
) == INTEGER_CST
6739 && (field_size
!= NULL_TREE
6740 ? TREE_CODE (field_size
) == INTEGER_CST
6741 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
6743 /* Compute bit offset of the field. */
6744 offset_int bitoffset
6745 = (wi::to_offset (field_offset
)
6746 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
6747 /* Compute bit offset where the field ends. */
6748 offset_int bitoffset_end
;
6749 if (field_size
!= NULL_TREE
)
6750 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
6754 /* Compute the bit offset of the end of the desired access.
6755 As a special case, if the size of the desired access is
6756 zero, assume the access is to the entire field (and let
6757 the caller make any necessary adjustments by storing
6758 the actual bounds of the field in FIELDBOUNDS). */
6759 offset_int access_end
= offset_int (offset
);
6763 access_end
= bitoffset_end
;
6765 /* Is there any overlap between the desired access at
6766 [OFFSET, OFFSET+SIZE) and the offset of the field within
6767 the object at [BITOFFSET, BITOFFSET_END)? */
6768 if (wi::cmps (access_end
, bitoffset
) > 0
6769 && (field_size
== NULL_TREE
6770 || wi::lts_p (offset
, bitoffset_end
)))
6772 *suboff
+= bitoffset
.to_uhwi ();
6774 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
6776 /* For the final reference to the entire accessed member
6777 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6778 be null) in favor of the type of the member, and set
6779 SIZE to the size of the accessed member. */
6780 offset
= bitoffset
.to_uhwi ();
6781 type
= TREE_TYPE (cval
);
6782 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
6785 /* We do have overlap. Now see if the field is large enough
6786 to cover the access. Give up for accesses that extend
6787 beyond the end of the object or that span multiple fields. */
6788 if (wi::cmps (access_end
, bitoffset_end
) > 0)
6790 if (offset
< bitoffset
)
6793 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
6794 return fold_ctor_reference (type
, cval
,
6795 inner_offset
.to_uhwi (), size
,
6799 /* Memory not explicitly mentioned in constructor is 0. */
6800 return type
? build_zero_cst (type
) : NULL_TREE
;
6803 /* CTOR is value initializing memory. Fold a reference of TYPE and
6804 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When SIZE
6805 is zero, attempt to fold a reference to the entire subobject
6806 which OFFSET refers to. This is used when folding accesses to
6807 string members of aggregates. When non-null, set *SUBOFF to
6808 the bit offset of the accessed subobject. */
6811 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
6812 const poly_uint64
&poly_size
, tree from_decl
,
6813 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
6817 /* We found the field with exact match. */
6819 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
6820 && known_eq (poly_offset
, 0U))
6821 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6823 /* The remaining optimizations need a constant size and offset. */
6824 unsigned HOST_WIDE_INT size
, offset
;
6825 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
6828 /* We are at the end of walk, see if we can view convert the
6830 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
6831 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6832 && !compare_tree_int (TYPE_SIZE (type
), size
)
6833 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
6835 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6838 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
6840 STRIP_USELESS_TYPE_CONVERSION (ret
);
6844 /* For constants and byte-aligned/sized reads try to go through
6845 native_encode/interpret. */
6846 if (CONSTANT_CLASS_P (ctor
)
6847 && BITS_PER_UNIT
== 8
6848 && offset
% BITS_PER_UNIT
== 0
6849 && size
% BITS_PER_UNIT
== 0
6850 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
6852 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6853 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
6854 offset
/ BITS_PER_UNIT
);
6856 return native_interpret_expr (type
, buf
, len
);
6858 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
6860 unsigned HOST_WIDE_INT dummy
= 0;
6864 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
6865 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
6866 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
6869 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
6876 /* Return the tree representing the element referenced by T if T is an
6877 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6878 names using VALUEIZE. Return NULL_TREE otherwise. */
6881 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
6883 tree ctor
, idx
, base
;
6884 poly_int64 offset
, size
, max_size
;
6888 if (TREE_THIS_VOLATILE (t
))
6892 return get_symbol_constant_value (t
);
6894 tem
= fold_read_from_constant_string (t
);
6898 switch (TREE_CODE (t
))
6901 case ARRAY_RANGE_REF
:
6902 /* Constant indexes are handled well by get_base_constructor.
6903 Only special case variable offsets.
6904 FIXME: This code can't handle nested references with variable indexes
6905 (they will be handled only by iteration of ccp). Perhaps we can bring
6906 get_ref_base_and_extent here and make it use a valueize callback. */
6907 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
6909 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
6910 && poly_int_tree_p (idx
))
6912 tree low_bound
, unit_size
;
6914 /* If the resulting bit-offset is constant, track it. */
6915 if ((low_bound
= array_ref_low_bound (t
),
6916 poly_int_tree_p (low_bound
))
6917 && (unit_size
= array_ref_element_size (t
),
6918 tree_fits_uhwi_p (unit_size
)))
6920 poly_offset_int woffset
6921 = wi::sext (wi::to_poly_offset (idx
)
6922 - wi::to_poly_offset (low_bound
),
6923 TYPE_PRECISION (TREE_TYPE (idx
)));
6925 if (woffset
.to_shwi (&offset
))
6927 /* TODO: This code seems wrong, multiply then check
6928 to see if it fits. */
6929 offset
*= tree_to_uhwi (unit_size
);
6930 offset
*= BITS_PER_UNIT
;
6932 base
= TREE_OPERAND (t
, 0);
6933 ctor
= get_base_constructor (base
, &offset
, valueize
);
6934 /* Empty constructor. Always fold to 0. */
6935 if (ctor
== error_mark_node
)
6936 return build_zero_cst (TREE_TYPE (t
));
6937 /* Out of bound array access. Value is undefined,
6939 if (maybe_lt (offset
, 0))
6941 /* We can not determine ctor. */
6944 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
6945 tree_to_uhwi (unit_size
)
6955 case TARGET_MEM_REF
:
6957 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
6958 ctor
= get_base_constructor (base
, &offset
, valueize
);
6960 /* Empty constructor. Always fold to 0. */
6961 if (ctor
== error_mark_node
)
6962 return build_zero_cst (TREE_TYPE (t
));
6963 /* We do not know precise address. */
6964 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
6966 /* We can not determine ctor. */
6970 /* Out of bound array access. Value is undefined, but don't fold. */
6971 if (maybe_lt (offset
, 0))
6974 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
6980 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
6981 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
6982 return fold_build1_loc (EXPR_LOCATION (t
),
6983 TREE_CODE (t
), TREE_TYPE (t
), c
);
6995 fold_const_aggregate_ref (tree t
)
6997 return fold_const_aggregate_ref_1 (t
, NULL
);
7000 /* Lookup virtual method with index TOKEN in a virtual table V
7002 Set CAN_REFER if non-NULL to false if method
7003 is not referable or if the virtual table is ill-formed (such as rewriten
7004 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7007 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
7009 unsigned HOST_WIDE_INT offset
,
7012 tree vtable
= v
, init
, fn
;
7013 unsigned HOST_WIDE_INT size
;
7014 unsigned HOST_WIDE_INT elt_size
, access_index
;
7020 /* First of all double check we have virtual table. */
7021 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
7023 /* Pass down that we lost track of the target. */
7029 init
= ctor_for_folding (v
);
7031 /* The virtual tables should always be born with constructors
7032 and we always should assume that they are avaialble for
7033 folding. At the moment we do not stream them in all cases,
7034 but it should never happen that ctor seem unreachable. */
7036 if (init
== error_mark_node
)
7038 /* Pass down that we lost track of the target. */
7043 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
7044 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
7045 offset
*= BITS_PER_UNIT
;
7046 offset
+= token
* size
;
7048 /* Lookup the value in the constructor that is assumed to be array.
7049 This is equivalent to
7050 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7051 offset, size, NULL);
7052 but in a constant time. We expect that frontend produced a simple
7053 array without indexed initializers. */
7055 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
7056 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
7057 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
7058 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
7060 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
7061 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
7063 /* The C++ FE can now produce indexed fields, and we check if the indexes
7065 if (access_index
< CONSTRUCTOR_NELTS (init
))
7067 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
7068 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
7069 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
7075 /* For type inconsistent program we may end up looking up virtual method
7076 in virtual table that does not contain TOKEN entries. We may overrun
7077 the virtual table and pick up a constant or RTTI info pointer.
7078 In any case the call is undefined. */
7080 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
7081 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
7082 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
7085 fn
= TREE_OPERAND (fn
, 0);
7087 /* When cgraph node is missing and function is not public, we cannot
7088 devirtualize. This can happen in WHOPR when the actual method
7089 ends up in other partition, because we found devirtualization
7090 possibility too late. */
7091 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
7102 /* Make sure we create a cgraph node for functions we'll reference.
7103 They can be non-existent if the reference comes from an entry
7104 of an external vtable for example. */
7105 cgraph_node::get_create (fn
);
7110 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7111 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7112 KNOWN_BINFO carries the binfo describing the true type of
7113 OBJ_TYPE_REF_OBJECT(REF).
7114 Set CAN_REFER if non-NULL to false if method
7115 is not referable or if the virtual table is ill-formed (such as rewriten
7116 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7119 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
7122 unsigned HOST_WIDE_INT offset
;
7125 v
= BINFO_VTABLE (known_binfo
);
7126 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7130 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
7136 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
7139 /* Given a pointer value T, return a simplified version of an
7140 indirection through T, or NULL_TREE if no simplification is
7141 possible. Note that the resulting type may be different from
7142 the type pointed to in the sense that it is still compatible
7143 from the langhooks point of view. */
7146 gimple_fold_indirect_ref (tree t
)
7148 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
7153 subtype
= TREE_TYPE (sub
);
7154 if (!POINTER_TYPE_P (subtype
)
7155 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
7158 if (TREE_CODE (sub
) == ADDR_EXPR
)
7160 tree op
= TREE_OPERAND (sub
, 0);
7161 tree optype
= TREE_TYPE (op
);
7163 if (useless_type_conversion_p (type
, optype
))
7166 /* *(foo *)&fooarray => fooarray[0] */
7167 if (TREE_CODE (optype
) == ARRAY_TYPE
7168 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
7169 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7171 tree type_domain
= TYPE_DOMAIN (optype
);
7172 tree min_val
= size_zero_node
;
7173 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
7174 min_val
= TYPE_MIN_VALUE (type_domain
);
7175 if (TREE_CODE (min_val
) == INTEGER_CST
)
7176 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
7178 /* *(foo *)&complexfoo => __real__ complexfoo */
7179 else if (TREE_CODE (optype
) == COMPLEX_TYPE
7180 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7181 return fold_build1 (REALPART_EXPR
, type
, op
);
7182 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7183 else if (TREE_CODE (optype
) == VECTOR_TYPE
7184 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7186 tree part_width
= TYPE_SIZE (type
);
7187 tree index
= bitsize_int (0);
7188 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
7192 /* *(p + CST) -> ... */
7193 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
7194 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
7196 tree addr
= TREE_OPERAND (sub
, 0);
7197 tree off
= TREE_OPERAND (sub
, 1);
7201 addrtype
= TREE_TYPE (addr
);
7203 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7204 if (TREE_CODE (addr
) == ADDR_EXPR
7205 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
7206 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
7207 && tree_fits_uhwi_p (off
))
7209 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
7210 tree part_width
= TYPE_SIZE (type
);
7211 unsigned HOST_WIDE_INT part_widthi
7212 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
7213 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
7214 tree index
= bitsize_int (indexi
);
7215 if (known_lt (offset
/ part_widthi
,
7216 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
7217 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
7221 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7222 if (TREE_CODE (addr
) == ADDR_EXPR
7223 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
7224 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
7226 tree size
= TYPE_SIZE_UNIT (type
);
7227 if (tree_int_cst_equal (size
, off
))
7228 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
7231 /* *(p + CST) -> MEM_REF <p, CST>. */
7232 if (TREE_CODE (addr
) != ADDR_EXPR
7233 || DECL_P (TREE_OPERAND (addr
, 0)))
7234 return fold_build2 (MEM_REF
, type
,
7236 wide_int_to_tree (ptype
, wi::to_wide (off
)));
7239 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7240 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
7241 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
7242 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
7245 tree min_val
= size_zero_node
;
7247 sub
= gimple_fold_indirect_ref (sub
);
7249 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
7250 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
7251 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
7252 min_val
= TYPE_MIN_VALUE (type_domain
);
7253 if (TREE_CODE (min_val
) == INTEGER_CST
)
7254 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
7260 /* Return true if CODE is an operation that when operating on signed
7261 integer types involves undefined behavior on overflow and the
7262 operation can be expressed with unsigned arithmetic. */
7265 arith_code_with_undefined_signed_overflow (tree_code code
)
7273 case POINTER_PLUS_EXPR
:
7280 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7281 operation that can be transformed to unsigned arithmetic by converting
7282 its operand, carrying out the operation in the corresponding unsigned
7283 type and converting the result back to the original type.
7285 Returns a sequence of statements that replace STMT and also contain
7286 a modified form of STMT itself. */
7289 rewrite_to_defined_overflow (gimple
*stmt
)
7291 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7293 fprintf (dump_file
, "rewriting stmt with undefined signed "
7295 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
7298 tree lhs
= gimple_assign_lhs (stmt
);
7299 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
7300 gimple_seq stmts
= NULL
;
7301 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
7303 tree op
= gimple_op (stmt
, i
);
7304 op
= gimple_convert (&stmts
, type
, op
);
7305 gimple_set_op (stmt
, i
, op
);
7307 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
7308 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
7309 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
7310 gimple_seq_add_stmt (&stmts
, stmt
);
7311 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
7312 gimple_seq_add_stmt (&stmts
, cvt
);
7318 /* The valueization hook we use for the gimple_build API simplification.
7319 This makes us match fold_buildN behavior by only combining with
7320 statements in the sequence(s) we are currently building. */
7323 gimple_build_valueize (tree op
)
7325 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
7330 /* Build the expression CODE OP0 of type TYPE with location LOC,
7331 simplifying it first if possible. Returns the built
7332 expression value and appends statements possibly defining it
7336 gimple_build (gimple_seq
*seq
, location_t loc
,
7337 enum tree_code code
, tree type
, tree op0
)
7339 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
7342 res
= create_tmp_reg_or_ssa_name (type
);
7344 if (code
== REALPART_EXPR
7345 || code
== IMAGPART_EXPR
7346 || code
== VIEW_CONVERT_EXPR
)
7347 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
7349 stmt
= gimple_build_assign (res
, code
, op0
);
7350 gimple_set_location (stmt
, loc
);
7351 gimple_seq_add_stmt_without_update (seq
, stmt
);
7356 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7357 simplifying it first if possible. Returns the built
7358 expression value and appends statements possibly defining it
7362 gimple_build (gimple_seq
*seq
, location_t loc
,
7363 enum tree_code code
, tree type
, tree op0
, tree op1
)
7365 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
7368 res
= create_tmp_reg_or_ssa_name (type
);
7369 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
7370 gimple_set_location (stmt
, loc
);
7371 gimple_seq_add_stmt_without_update (seq
, stmt
);
7376 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7377 simplifying it first if possible. Returns the built
7378 expression value and appends statements possibly defining it
7382 gimple_build (gimple_seq
*seq
, location_t loc
,
7383 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
7385 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
7386 seq
, gimple_build_valueize
);
7389 res
= create_tmp_reg_or_ssa_name (type
);
7391 if (code
== BIT_FIELD_REF
)
7392 stmt
= gimple_build_assign (res
, code
,
7393 build3 (code
, type
, op0
, op1
, op2
));
7395 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
7396 gimple_set_location (stmt
, loc
);
7397 gimple_seq_add_stmt_without_update (seq
, stmt
);
7402 /* Build the call FN (ARG0) with a result of type TYPE
7403 (or no result if TYPE is void) with location LOC,
7404 simplifying it first if possible. Returns the built
7405 expression value (or NULL_TREE if TYPE is void) and appends
7406 statements possibly defining it to SEQ. */
7409 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7410 tree type
, tree arg0
)
7412 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
7416 if (internal_fn_p (fn
))
7417 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
7420 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7421 stmt
= gimple_build_call (decl
, 1, arg0
);
7423 if (!VOID_TYPE_P (type
))
7425 res
= create_tmp_reg_or_ssa_name (type
);
7426 gimple_call_set_lhs (stmt
, res
);
7428 gimple_set_location (stmt
, loc
);
7429 gimple_seq_add_stmt_without_update (seq
, stmt
);
7434 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7435 (or no result if TYPE is void) with location LOC,
7436 simplifying it first if possible. Returns the built
7437 expression value (or NULL_TREE if TYPE is void) and appends
7438 statements possibly defining it to SEQ. */
7441 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7442 tree type
, tree arg0
, tree arg1
)
7444 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
7448 if (internal_fn_p (fn
))
7449 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
7452 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7453 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
7455 if (!VOID_TYPE_P (type
))
7457 res
= create_tmp_reg_or_ssa_name (type
);
7458 gimple_call_set_lhs (stmt
, res
);
7460 gimple_set_location (stmt
, loc
);
7461 gimple_seq_add_stmt_without_update (seq
, stmt
);
7466 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7467 (or no result if TYPE is void) with location LOC,
7468 simplifying it first if possible. Returns the built
7469 expression value (or NULL_TREE if TYPE is void) and appends
7470 statements possibly defining it to SEQ. */
7473 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7474 tree type
, tree arg0
, tree arg1
, tree arg2
)
7476 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
7477 seq
, gimple_build_valueize
);
7481 if (internal_fn_p (fn
))
7482 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
7483 3, arg0
, arg1
, arg2
);
7486 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7487 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
7489 if (!VOID_TYPE_P (type
))
7491 res
= create_tmp_reg_or_ssa_name (type
);
7492 gimple_call_set_lhs (stmt
, res
);
7494 gimple_set_location (stmt
, loc
);
7495 gimple_seq_add_stmt_without_update (seq
, stmt
);
7500 /* Build the conversion (TYPE) OP with a result of type TYPE
7501 with location LOC if such conversion is neccesary in GIMPLE,
7502 simplifying it first.
7503 Returns the built expression value and appends
7504 statements possibly defining it to SEQ. */
7507 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
7509 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
7511 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
7514 /* Build the conversion (ptrofftype) OP with a result of a type
7515 compatible with ptrofftype with location LOC if such conversion
7516 is neccesary in GIMPLE, simplifying it first.
7517 Returns the built expression value and appends
7518 statements possibly defining it to SEQ. */
7521 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
7523 if (ptrofftype_p (TREE_TYPE (op
)))
7525 return gimple_convert (seq
, loc
, sizetype
, op
);
7528 /* Build a vector of type TYPE in which each element has the value OP.
7529 Return a gimple value for the result, appending any new statements
7533 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
7536 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
7537 && !CONSTANT_CLASS_P (op
))
7538 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
7540 tree res
, vec
= build_vector_from_val (type
, op
);
7541 if (is_gimple_val (vec
))
7543 if (gimple_in_ssa_p (cfun
))
7544 res
= make_ssa_name (type
);
7546 res
= create_tmp_reg (type
);
7547 gimple
*stmt
= gimple_build_assign (res
, vec
);
7548 gimple_set_location (stmt
, loc
);
7549 gimple_seq_add_stmt_without_update (seq
, stmt
);
7553 /* Build a vector from BUILDER, handling the case in which some elements
7554 are non-constant. Return a gimple value for the result, appending any
7555 new instructions to SEQ.
7557 BUILDER must not have a stepped encoding on entry. This is because
7558 the function is not geared up to handle the arithmetic that would
7559 be needed in the variable case, and any code building a vector that
7560 is known to be constant should use BUILDER->build () directly. */
7563 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
7564 tree_vector_builder
*builder
)
7566 gcc_assert (builder
->nelts_per_pattern () <= 2);
7567 unsigned int encoded_nelts
= builder
->encoded_nelts ();
7568 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
7569 if (!TREE_CONSTANT ((*builder
)[i
]))
7571 tree type
= builder
->type ();
7572 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
7573 vec
<constructor_elt
, va_gc
> *v
;
7574 vec_alloc (v
, nelts
);
7575 for (i
= 0; i
< nelts
; ++i
)
7576 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
7579 if (gimple_in_ssa_p (cfun
))
7580 res
= make_ssa_name (type
);
7582 res
= create_tmp_reg (type
);
7583 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
7584 gimple_set_location (stmt
, loc
);
7585 gimple_seq_add_stmt_without_update (seq
, stmt
);
7588 return builder
->build ();
7591 /* Return true if the result of assignment STMT is known to be non-negative.
7592 If the return value is based on the assumption that signed overflow is
7593 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7594 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7597 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7600 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7601 switch (get_gimple_rhs_class (code
))
7603 case GIMPLE_UNARY_RHS
:
7604 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7605 gimple_expr_type (stmt
),
7606 gimple_assign_rhs1 (stmt
),
7607 strict_overflow_p
, depth
);
7608 case GIMPLE_BINARY_RHS
:
7609 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7610 gimple_expr_type (stmt
),
7611 gimple_assign_rhs1 (stmt
),
7612 gimple_assign_rhs2 (stmt
),
7613 strict_overflow_p
, depth
);
7614 case GIMPLE_TERNARY_RHS
:
7616 case GIMPLE_SINGLE_RHS
:
7617 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
7618 strict_overflow_p
, depth
);
7619 case GIMPLE_INVALID_RHS
:
7625 /* Return true if return value of call STMT is known to be non-negative.
7626 If the return value is based on the assumption that signed overflow is
7627 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7628 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7631 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7634 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
7635 gimple_call_arg (stmt
, 0) : NULL_TREE
;
7636 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
7637 gimple_call_arg (stmt
, 1) : NULL_TREE
;
7639 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
7640 gimple_call_combined_fn (stmt
),
7643 strict_overflow_p
, depth
);
7646 /* Return true if return value of call STMT is known to be non-negative.
7647 If the return value is based on the assumption that signed overflow is
7648 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7649 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7652 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7655 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7657 tree arg
= gimple_phi_arg_def (stmt
, i
);
7658 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
7664 /* Return true if STMT is known to compute a non-negative value.
7665 If the return value is based on the assumption that signed overflow is
7666 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7667 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7670 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7673 switch (gimple_code (stmt
))
7676 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7679 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7682 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7689 /* Return true if the floating-point value computed by assignment STMT
7690 is known to have an integer value. We also allow +Inf, -Inf and NaN
7691 to be considered integer values. Return false for signaling NaN.
7693 DEPTH is the current nesting depth of the query. */
7696 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
7698 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7699 switch (get_gimple_rhs_class (code
))
7701 case GIMPLE_UNARY_RHS
:
7702 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
7703 gimple_assign_rhs1 (stmt
), depth
);
7704 case GIMPLE_BINARY_RHS
:
7705 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
7706 gimple_assign_rhs1 (stmt
),
7707 gimple_assign_rhs2 (stmt
), depth
);
7708 case GIMPLE_TERNARY_RHS
:
7710 case GIMPLE_SINGLE_RHS
:
7711 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
7712 case GIMPLE_INVALID_RHS
:
7718 /* Return true if the floating-point value computed by call STMT is known
7719 to have an integer value. We also allow +Inf, -Inf and NaN to be
7720 considered integer values. Return false for signaling NaN.
7722 DEPTH is the current nesting depth of the query. */
7725 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
7727 tree arg0
= (gimple_call_num_args (stmt
) > 0
7728 ? gimple_call_arg (stmt
, 0)
7730 tree arg1
= (gimple_call_num_args (stmt
) > 1
7731 ? gimple_call_arg (stmt
, 1)
7733 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
7737 /* Return true if the floating-point result of phi STMT is known to have
7738 an integer value. We also allow +Inf, -Inf and NaN to be considered
7739 integer values. Return false for signaling NaN.
7741 DEPTH is the current nesting depth of the query. */
7744 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
7746 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7748 tree arg
= gimple_phi_arg_def (stmt
, i
);
7749 if (!integer_valued_real_single_p (arg
, depth
+ 1))
7755 /* Return true if the floating-point value computed by STMT is known
7756 to have an integer value. We also allow +Inf, -Inf and NaN to be
7757 considered integer values. Return false for signaling NaN.
7759 DEPTH is the current nesting depth of the query. */
7762 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
7764 switch (gimple_code (stmt
))
7767 return gimple_assign_integer_valued_real_p (stmt
, depth
);
7769 return gimple_call_integer_valued_real_p (stmt
, depth
);
7771 return gimple_phi_integer_valued_real_p (stmt
, depth
);