1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "gimple-fold.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
44 #include "tree-object-size.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
63 #include "diagnostic-core.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
70 enum strlen_range_kind
{
71 /* Compute the exact constant string length. */
73 /* Compute the maximum constant string length. */
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
81 /* Determine the integer value of the argument (not string length). */
86 get_range_strlen (tree
, bitmap
*, strlen_range_kind
, c_strlen_data
*, unsigned);
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
111 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
114 struct cgraph_node
*node
;
117 if (DECL_ABSTRACT_P (decl
))
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
122 || !VAR_OR_FUNCTION_DECL_P (decl
))
125 /* Static objects can be referred only if they are defined and not optimized
127 if (!TREE_PUBLIC (decl
))
129 if (DECL_EXTERNAL (decl
))
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab
->function_flags_ready
)
135 snode
= symtab_node::get (decl
);
136 if (!snode
|| !snode
->definition
)
138 node
= dyn_cast
<cgraph_node
*> (snode
);
139 return !node
|| !node
->inlined_to
;
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
146 || !VAR_P (from_decl
)
147 || (!DECL_EXTERNAL (from_decl
)
148 && (vnode
= varpool_node::get (from_decl
)) != NULL
149 && vnode
->definition
)
151 && (vnode
= varpool_node::get (from_decl
)) != NULL
152 && vnode
->in_other_partition
))
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl
)
158 && DECL_EXTERNAL (decl
)
159 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
160 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
178 if (!symtab
->function_flags_ready
)
181 snode
= symtab_node::get (decl
);
183 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
184 && (!snode
->in_other_partition
185 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
187 node
= dyn_cast
<cgraph_node
*> (snode
);
188 return !node
|| !node
->inlined_to
;
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
196 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
198 if (gimple_in_ssa_p (cfun
))
199 return make_ssa_name (type
, stmt
);
201 return create_tmp_reg (type
);
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
209 canonicalize_constructor_val (tree cval
, tree from_decl
)
211 if (CONSTANT_CLASS_P (cval
))
214 tree orig_cval
= cval
;
216 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
219 tree ptr
= TREE_OPERAND (cval
, 0);
220 if (is_gimple_min_invariant (ptr
))
221 cval
= build1_loc (EXPR_LOCATION (cval
),
222 ADDR_EXPR
, TREE_TYPE (ptr
),
223 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
225 fold_convert (ptr_type_node
,
226 TREE_OPERAND (cval
, 1))));
228 if (TREE_CODE (cval
) == ADDR_EXPR
)
230 tree base
= NULL_TREE
;
231 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
233 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
235 TREE_OPERAND (cval
, 0) = base
;
238 base
= get_base_address (TREE_OPERAND (cval
, 0));
242 if (VAR_OR_FUNCTION_DECL_P (base
)
243 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
245 if (TREE_TYPE (base
) == error_mark_node
)
248 TREE_ADDRESSABLE (base
) = 1;
249 else if (TREE_CODE (base
) == FUNCTION_DECL
)
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
254 cgraph_node::get_create (base
);
256 /* Fixup types in global initializers. */
257 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
258 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
261 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval
) == INTEGER_CST
)
267 if (TREE_OVERFLOW_P (cval
))
268 cval
= drop_tree_overflow (cval
);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
270 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
276 /* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
280 get_symbol_constant_value (tree sym
)
282 tree val
= ctor_for_folding (sym
);
283 if (val
!= error_mark_node
)
287 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
288 if (val
&& is_gimple_min_invariant (val
))
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
297 && is_gimple_reg_type (TREE_TYPE (sym
)))
298 return build_zero_cst (TREE_TYPE (sym
));
306 /* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
312 maybe_fold_reference (tree expr
, bool is_lhs
)
316 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr
) == REALPART_EXPR
318 || TREE_CODE (expr
) == IMAGPART_EXPR
)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr
),
323 TREE_OPERAND (expr
, 0));
324 else if (TREE_CODE (expr
) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr
),
329 TREE_OPERAND (expr
, 0),
330 TREE_OPERAND (expr
, 1),
331 TREE_OPERAND (expr
, 2));
334 && (result
= fold_const_aggregate_ref (expr
))
335 && is_gimple_min_invariant (result
))
342 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
348 fold_gimple_assign (gimple_stmt_iterator
*si
)
350 gimple
*stmt
= gsi_stmt (*si
);
351 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
352 location_t loc
= gimple_location (stmt
);
354 tree result
= NULL_TREE
;
356 switch (get_gimple_rhs_class (subcode
))
358 case GIMPLE_SINGLE_RHS
:
360 tree rhs
= gimple_assign_rhs1 (stmt
);
362 if (TREE_CLOBBER_P (rhs
))
365 if (REFERENCE_CLASS_P (rhs
))
366 return maybe_fold_reference (rhs
, false);
368 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
370 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
371 if (is_gimple_min_invariant (val
))
373 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
376 vec
<cgraph_node
*>targets
377 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
378 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
380 if (dump_enabled_p ())
382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets
.length () == 1
386 ? targets
[0]->name ()
389 if (targets
.length () == 1)
391 val
= fold_convert (TREE_TYPE (val
),
392 build_fold_addr_expr_loc
393 (loc
, targets
[0]->decl
));
394 STRIP_USELESS_TYPE_CONVERSION (val
);
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
399 val
= build_int_cst (TREE_TYPE (val
), 0);
405 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
407 tree ref
= TREE_OPERAND (rhs
, 0);
408 tree tem
= maybe_fold_reference (ref
, true);
410 && TREE_CODE (tem
) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem
, 1)))
412 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
414 result
= fold_convert (TREE_TYPE (rhs
),
415 build_fold_addr_expr_loc (loc
, tem
));
416 else if (TREE_CODE (ref
) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref
, 1)))
418 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result
);
428 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
433 else if (TREE_CODE (rhs
) == CONSTRUCTOR
434 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
441 if (! CONSTANT_CLASS_P (val
))
444 return build_vector_from_ctor (TREE_TYPE (rhs
),
445 CONSTRUCTOR_ELTS (rhs
));
448 else if (DECL_P (rhs
))
449 return get_symbol_constant_value (rhs
);
453 case GIMPLE_UNARY_RHS
:
456 case GIMPLE_BINARY_RHS
:
459 case GIMPLE_TERNARY_RHS
:
460 result
= fold_ternary_loc (loc
, subcode
,
461 TREE_TYPE (gimple_assign_lhs (stmt
)),
462 gimple_assign_rhs1 (stmt
),
463 gimple_assign_rhs2 (stmt
),
464 gimple_assign_rhs3 (stmt
));
468 STRIP_USELESS_TYPE_CONVERSION (result
);
469 if (valid_gimple_rhs_p (result
))
474 case GIMPLE_INVALID_RHS
:
482 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
488 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
490 gimple
*stmt
= gsi_stmt (*si_p
);
492 if (gimple_has_location (stmt
))
493 annotate_all_with_location (stmts
, gimple_location (stmt
));
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
497 gimple
*laststore
= NULL
;
498 for (gimple_stmt_iterator i
= gsi_last (stmts
);
499 !gsi_end_p (i
); gsi_prev (&i
))
501 gimple
*new_stmt
= gsi_stmt (i
);
502 if ((gimple_assign_single_p (new_stmt
)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
504 || (is_gimple_call (new_stmt
)
505 && (gimple_call_flags (new_stmt
)
506 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
510 vdef
= gimple_vdef (stmt
);
512 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
513 gimple_set_vdef (new_stmt
, vdef
);
514 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
515 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
516 laststore
= new_stmt
;
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse
= gimple_vuse (stmt
);
523 for (gimple_stmt_iterator i
= gsi_start (stmts
);
524 !gsi_end_p (i
); gsi_next (&i
))
526 gimple
*new_stmt
= gsi_stmt (i
);
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt
))
530 gimple_set_vuse (new_stmt
, reaching_vuse
);
531 gimple_set_modified (new_stmt
, true);
532 if (gimple_vdef (new_stmt
))
533 reaching_vuse
= gimple_vdef (new_stmt
);
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
539 && reaching_vuse
== gimple_vuse (stmt
))
541 tree vdef
= gimple_vdef (stmt
);
543 && TREE_CODE (vdef
) == SSA_NAME
)
545 unlink_stmt_vdef (stmt
);
546 release_ssa_name (vdef
);
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p
, stmts
, false);
554 /* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
565 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
568 gimple
*stmt
, *new_stmt
;
569 gimple_stmt_iterator i
;
570 gimple_seq stmts
= NULL
;
572 stmt
= gsi_stmt (*si_p
);
574 gcc_assert (is_gimple_call (stmt
));
576 push_gimplify_context (gimple_in_ssa_p (cfun
));
578 lhs
= gimple_call_lhs (stmt
);
579 if (lhs
== NULL_TREE
)
581 gimplify_and_add (expr
, &stmts
);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts
))
586 pop_gimplify_context (NULL
);
587 if (gimple_in_ssa_p (cfun
))
589 unlink_stmt_vdef (stmt
);
592 gsi_replace (si_p
, gimple_build_nop (), false);
598 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
599 new_stmt
= gimple_build_assign (lhs
, tmp
);
600 i
= gsi_last (stmts
);
601 gsi_insert_after_without_update (&i
, new_stmt
,
602 GSI_CONTINUE_LINKING
);
605 pop_gimplify_context (NULL
);
607 gsi_replace_with_seq_vops (si_p
, stmts
);
611 /* Replace the call at *GSI with the gimple value VAL. */
614 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
616 gimple
*stmt
= gsi_stmt (*gsi
);
617 tree lhs
= gimple_call_lhs (stmt
);
621 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
622 val
= fold_convert (TREE_TYPE (lhs
), val
);
623 repl
= gimple_build_assign (lhs
, val
);
626 repl
= gimple_build_nop ();
627 tree vdef
= gimple_vdef (stmt
);
628 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
630 unlink_stmt_vdef (stmt
);
631 release_ssa_name (vdef
);
633 gsi_replace (gsi
, repl
, false);
636 /* Replace the call at *GSI with the new call REPL and fold that
640 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
642 gimple
*stmt
= gsi_stmt (*gsi
);
643 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
644 gimple_set_location (repl
, gimple_location (stmt
));
645 gimple_move_vops (repl
, stmt
);
646 gsi_replace (gsi
, repl
, false);
650 /* Return true if VAR is a VAR_DECL or a component thereof. */
653 var_decl_component_p (tree var
)
656 while (handled_component_p (inner
))
657 inner
= TREE_OPERAND (inner
, 0);
658 return (DECL_P (inner
)
659 || (TREE_CODE (inner
) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
663 /* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
667 size_must_be_zero_p (tree size
)
669 if (integer_zerop (size
))
672 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
675 tree type
= TREE_TYPE (size
);
676 int prec
= TYPE_PRECISION (type
);
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
681 value_range
valid_range (build_int_cst (type
, 0),
682 wide_int_to_tree (type
, ssize_max
));
684 get_range_info (size
, vr
);
685 vr
.intersect (&valid_range
);
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
698 tree dest
, tree src
, enum built_in_function code
)
700 gimple
*stmt
= gsi_stmt (*gsi
);
701 tree lhs
= gimple_call_lhs (stmt
);
702 tree len
= gimple_call_arg (stmt
, 2);
703 location_t loc
= gimple_location (stmt
);
705 /* If the LEN parameter is a constant zero or in range where
706 the only valid value is zero, return DEST. */
707 if (size_must_be_zero_p (len
))
710 if (gimple_call_lhs (stmt
))
711 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
713 repl
= gimple_build_nop ();
714 tree vdef
= gimple_vdef (stmt
);
715 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
717 unlink_stmt_vdef (stmt
);
718 release_ssa_name (vdef
);
720 gsi_replace (gsi
, repl
, false);
724 /* If SRC and DEST are the same (and not volatile), return
725 DEST{,+LEN,+LEN-1}. */
726 if (operand_equal_p (src
, dest
, 0))
728 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
729 It's safe and may even be emitted by GCC itself (see bug
731 unlink_stmt_vdef (stmt
);
732 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
733 release_ssa_name (gimple_vdef (stmt
));
736 gsi_replace (gsi
, gimple_build_nop (), false);
743 /* We cannot (easily) change the type of the copy if it is a storage
744 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
745 modify the storage order of objects (see storage_order_barrier_p). */
747 = POINTER_TYPE_P (TREE_TYPE (src
))
748 ? TREE_TYPE (TREE_TYPE (src
)) : NULL_TREE
;
750 = POINTER_TYPE_P (TREE_TYPE (dest
))
751 ? TREE_TYPE (TREE_TYPE (dest
)) : NULL_TREE
;
752 tree destvar
, srcvar
, srcoff
;
753 unsigned int src_align
, dest_align
;
754 unsigned HOST_WIDE_INT tmp_len
;
757 /* Build accesses at offset zero with a ref-all character type. */
759 = build_int_cst (build_pointer_type_for_mode (char_type_node
,
762 /* If we can perform the copy efficiently with first doing all loads
763 and then all stores inline it that way. Currently efficiently
764 means that we can load all the memory into a single integer
765 register which is what MOVE_MAX gives us. */
766 src_align
= get_pointer_alignment (src
);
767 dest_align
= get_pointer_alignment (dest
);
768 if (tree_fits_uhwi_p (len
)
769 && compare_tree_int (len
, MOVE_MAX
) <= 0
770 /* FIXME: Don't transform copies from strings with known length.
771 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
772 from being handled, and the case was XFAILed for that reason.
773 Now that it is handled and the XFAIL removed, as soon as other
774 strlenopt tests that rely on it for passing are adjusted, this
775 hack can be removed. */
776 && !c_strlen (src
, 1)
777 && !((tmp_str
= getbyterep (src
, &tmp_len
)) != NULL
778 && memchr (tmp_str
, 0, tmp_len
) == NULL
)
780 && AGGREGATE_TYPE_P (srctype
)
781 && TYPE_REVERSE_STORAGE_ORDER (srctype
))
783 && AGGREGATE_TYPE_P (desttype
)
784 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
786 unsigned ilen
= tree_to_uhwi (len
);
787 if (pow2p_hwi (ilen
))
789 /* Detect out-of-bounds accesses without issuing warnings.
790 Avoid folding out-of-bounds copies but to avoid false
791 positives for unreachable code defer warning until after
792 DCE has worked its magic.
793 -Wrestrict is still diagnosed. */
794 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
797 if (warning
!= OPT_Wrestrict
)
800 scalar_int_mode mode
;
801 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
803 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
804 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
805 /* If the destination pointer is not aligned we must be able
806 to emit an unaligned store. */
807 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
808 || !targetm
.slow_unaligned_access (mode
, dest_align
)
809 || (optab_handler (movmisalign_optab
, mode
)
810 != CODE_FOR_nothing
)))
813 tree desttype
= type
;
814 if (src_align
< GET_MODE_ALIGNMENT (mode
))
815 srctype
= build_aligned_type (type
, src_align
);
816 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
817 tree tem
= fold_const_aggregate_ref (srcmem
);
820 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
821 && targetm
.slow_unaligned_access (mode
, src_align
)
822 && (optab_handler (movmisalign_optab
, mode
)
823 == CODE_FOR_nothing
))
828 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
830 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
832 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
834 gimple_assign_set_lhs (new_stmt
, srcmem
);
835 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
836 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
838 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
839 desttype
= build_aligned_type (type
, dest_align
);
841 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
844 gimple_move_vops (new_stmt
, stmt
);
847 gsi_replace (gsi
, new_stmt
, false);
850 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
857 if (code
== BUILT_IN_MEMMOVE
)
859 /* Both DEST and SRC must be pointer types.
860 ??? This is what old code did. Is the testing for pointer types
863 If either SRC is readonly or length is 1, we can use memcpy. */
864 if (!dest_align
|| !src_align
)
866 if (readonly_data_expr (src
)
867 || (tree_fits_uhwi_p (len
)
868 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
869 >= tree_to_uhwi (len
))))
871 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
874 gimple_call_set_fndecl (stmt
, fn
);
875 gimple_call_set_arg (stmt
, 0, dest
);
876 gimple_call_set_arg (stmt
, 1, src
);
881 /* If *src and *dest can't overlap, optimize into memcpy as well. */
882 if (TREE_CODE (src
) == ADDR_EXPR
883 && TREE_CODE (dest
) == ADDR_EXPR
)
885 tree src_base
, dest_base
, fn
;
886 poly_int64 src_offset
= 0, dest_offset
= 0;
889 srcvar
= TREE_OPERAND (src
, 0);
890 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
891 if (src_base
== NULL
)
893 destvar
= TREE_OPERAND (dest
, 0);
894 dest_base
= get_addr_base_and_unit_offset (destvar
,
896 if (dest_base
== NULL
)
898 if (!poly_int_tree_p (len
, &maxsize
))
900 if (SSA_VAR_P (src_base
)
901 && SSA_VAR_P (dest_base
))
903 if (operand_equal_p (src_base
, dest_base
, 0)
904 && ranges_maybe_overlap_p (src_offset
, maxsize
,
905 dest_offset
, maxsize
))
908 else if (TREE_CODE (src_base
) == MEM_REF
909 && TREE_CODE (dest_base
) == MEM_REF
)
911 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
912 TREE_OPERAND (dest_base
, 0), 0))
914 poly_offset_int full_src_offset
915 = mem_ref_offset (src_base
) + src_offset
;
916 poly_offset_int full_dest_offset
917 = mem_ref_offset (dest_base
) + dest_offset
;
918 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
919 full_dest_offset
, maxsize
))
925 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
928 gimple_call_set_fndecl (stmt
, fn
);
929 gimple_call_set_arg (stmt
, 0, dest
);
930 gimple_call_set_arg (stmt
, 1, src
);
935 /* If the destination and source do not alias optimize into
937 if ((is_gimple_min_invariant (dest
)
938 || TREE_CODE (dest
) == SSA_NAME
)
939 && (is_gimple_min_invariant (src
)
940 || TREE_CODE (src
) == SSA_NAME
))
943 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
944 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
945 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
948 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
951 gimple_call_set_fndecl (stmt
, fn
);
952 gimple_call_set_arg (stmt
, 0, dest
);
953 gimple_call_set_arg (stmt
, 1, src
);
962 if (!tree_fits_shwi_p (len
))
965 || (AGGREGATE_TYPE_P (srctype
)
966 && TYPE_REVERSE_STORAGE_ORDER (srctype
)))
969 || (AGGREGATE_TYPE_P (desttype
)
970 && TYPE_REVERSE_STORAGE_ORDER (desttype
)))
972 /* In the following try to find a type that is most natural to be
973 used for the memcpy source and destination and that allows
974 the most optimization when memcpy is turned into a plain assignment
975 using that type. In theory we could always use a char[len] type
976 but that only gains us that the destination and source possibly
977 no longer will have their address taken. */
978 if (TREE_CODE (srctype
) == ARRAY_TYPE
979 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
980 srctype
= TREE_TYPE (srctype
);
981 if (TREE_CODE (desttype
) == ARRAY_TYPE
982 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
983 desttype
= TREE_TYPE (desttype
);
984 if (TREE_ADDRESSABLE (srctype
)
985 || TREE_ADDRESSABLE (desttype
))
988 /* Make sure we are not copying using a floating-point mode or
989 a type whose size possibly does not match its precision. */
990 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
991 || TREE_CODE (desttype
) == BOOLEAN_TYPE
992 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
993 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
994 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
995 || TREE_CODE (srctype
) == BOOLEAN_TYPE
996 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
997 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1005 src_align
= get_pointer_alignment (src
);
1006 dest_align
= get_pointer_alignment (dest
);
1008 /* Choose between src and destination type for the access based
1009 on alignment, whether the access constitutes a register access
1010 and whether it may actually expose a declaration for SSA rewrite
1011 or SRA decomposition. Also try to expose a string constant, we
1012 might be able to concatenate several of them later into a single
1014 destvar
= NULL_TREE
;
1016 if (TREE_CODE (dest
) == ADDR_EXPR
1017 && var_decl_component_p (TREE_OPERAND (dest
, 0))
1018 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1019 && dest_align
>= TYPE_ALIGN (desttype
)
1020 && (is_gimple_reg_type (desttype
)
1021 || src_align
>= TYPE_ALIGN (desttype
)))
1022 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1023 else if (TREE_CODE (src
) == ADDR_EXPR
1024 && var_decl_component_p (TREE_OPERAND (src
, 0))
1025 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1026 && src_align
>= TYPE_ALIGN (srctype
)
1027 && (is_gimple_reg_type (srctype
)
1028 || dest_align
>= TYPE_ALIGN (srctype
)))
1029 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1030 /* FIXME: Don't transform copies from strings with known original length.
1031 As soon as strlenopt tests that rely on it for passing are adjusted,
1032 this hack can be removed. */
1033 else if (gimple_call_alloca_for_var_p (stmt
)
1034 && (srcvar
= string_constant (src
, &srcoff
, NULL
, NULL
))
1035 && integer_zerop (srcoff
)
1036 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar
)), len
)
1037 && dest_align
>= TYPE_ALIGN (TREE_TYPE (srcvar
)))
1038 srctype
= TREE_TYPE (srcvar
);
1042 /* Now that we chose an access type express the other side in
1043 terms of it if the target allows that with respect to alignment
1045 if (srcvar
== NULL_TREE
)
1047 if (src_align
>= TYPE_ALIGN (desttype
))
1048 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1051 if (STRICT_ALIGNMENT
)
1053 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1055 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1058 else if (destvar
== NULL_TREE
)
1060 if (dest_align
>= TYPE_ALIGN (srctype
))
1061 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1064 if (STRICT_ALIGNMENT
)
1066 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1068 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1072 /* Same as above, detect out-of-bounds accesses without issuing
1073 warnings. Avoid folding out-of-bounds copies but to avoid
1074 false positives for unreachable code defer warning until
1075 after DCE has worked its magic.
1076 -Wrestrict is still diagnosed. */
1077 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1078 dest
, src
, len
, len
,
1080 if (warning
!= OPT_Wrestrict
)
1084 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1086 tree tem
= fold_const_aggregate_ref (srcvar
);
1089 if (! is_gimple_min_invariant (srcvar
))
1091 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1092 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1094 gimple_assign_set_lhs (new_stmt
, srcvar
);
1095 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1096 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1098 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1099 goto set_vop_and_replace
;
1102 /* We get an aggregate copy. If the source is a STRING_CST, then
1103 directly use its type to perform the copy. */
1104 if (TREE_CODE (srcvar
) == STRING_CST
)
1107 /* Or else, use an unsigned char[] type to perform the copy in order
1108 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1109 types or float modes behavior on copying. */
1112 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1113 tree_to_uhwi (len
));
1115 if (src_align
> TYPE_ALIGN (srctype
))
1116 srctype
= build_aligned_type (srctype
, src_align
);
1117 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1120 if (dest_align
> TYPE_ALIGN (desttype
))
1121 desttype
= build_aligned_type (desttype
, dest_align
);
1122 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1123 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1125 set_vop_and_replace
:
1126 gimple_move_vops (new_stmt
, stmt
);
1129 gsi_replace (gsi
, new_stmt
, false);
1132 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1136 gimple_seq stmts
= NULL
;
1137 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1139 else if (code
== BUILT_IN_MEMPCPY
)
1141 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1142 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1143 TREE_TYPE (dest
), dest
, len
);
1148 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1149 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1150 gsi_replace (gsi
, repl
, false);
1154 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1155 to built-in memcmp (a, b, len). */
1158 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1160 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1165 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1167 gimple
*stmt
= gsi_stmt (*gsi
);
1168 tree a
= gimple_call_arg (stmt
, 0);
1169 tree b
= gimple_call_arg (stmt
, 1);
1170 tree len
= gimple_call_arg (stmt
, 2);
1172 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1173 replace_call_with_call_and_fold (gsi
, repl
);
1178 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1179 to built-in memmove (dest, src, len). */
1182 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1184 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1189 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1190 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1191 len) into memmove (dest, src, len). */
1193 gimple
*stmt
= gsi_stmt (*gsi
);
1194 tree src
= gimple_call_arg (stmt
, 0);
1195 tree dest
= gimple_call_arg (stmt
, 1);
1196 tree len
= gimple_call_arg (stmt
, 2);
1198 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1199 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1200 replace_call_with_call_and_fold (gsi
, repl
);
1205 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1206 to built-in memset (dest, 0, len). */
1209 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1211 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1216 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1218 gimple
*stmt
= gsi_stmt (*gsi
);
1219 tree dest
= gimple_call_arg (stmt
, 0);
1220 tree len
= gimple_call_arg (stmt
, 1);
1222 gimple_seq seq
= NULL
;
1223 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1224 gimple_seq_add_stmt_without_update (&seq
, repl
);
1225 gsi_replace_with_seq_vops (gsi
, seq
);
1231 /* Fold function call to builtin memset or bzero at *GSI setting the
1232 memory of size LEN to VAL. Return whether a simplification was made. */
1235 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1237 gimple
*stmt
= gsi_stmt (*gsi
);
1239 unsigned HOST_WIDE_INT length
, cval
;
1241 /* If the LEN parameter is zero, return DEST. */
1242 if (integer_zerop (len
))
1244 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1248 if (! tree_fits_uhwi_p (len
))
1251 if (TREE_CODE (c
) != INTEGER_CST
)
1254 tree dest
= gimple_call_arg (stmt
, 0);
1256 if (TREE_CODE (var
) != ADDR_EXPR
)
1259 var
= TREE_OPERAND (var
, 0);
1260 if (TREE_THIS_VOLATILE (var
))
1263 etype
= TREE_TYPE (var
);
1264 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1265 etype
= TREE_TYPE (etype
);
1267 if (!INTEGRAL_TYPE_P (etype
)
1268 && !POINTER_TYPE_P (etype
))
1271 if (! var_decl_component_p (var
))
1274 length
= tree_to_uhwi (len
);
1275 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1276 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype
))
1277 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype
)))
1278 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1281 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1284 if (!type_has_mode_precision_p (etype
))
1285 etype
= lang_hooks
.types
.type_for_mode (SCALAR_INT_TYPE_MODE (etype
),
1286 TYPE_UNSIGNED (etype
));
1288 if (integer_zerop (c
))
1292 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1295 cval
= TREE_INT_CST_LOW (c
);
1299 cval
|= (cval
<< 31) << 1;
1302 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1303 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1304 gimple_move_vops (store
, stmt
);
1305 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1306 if (gimple_call_lhs (stmt
))
1308 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1309 gsi_replace (gsi
, asgn
, false);
1313 gimple_stmt_iterator gsi2
= *gsi
;
1315 gsi_remove (&gsi2
, true);
1321 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1324 get_range_strlen_tree (tree arg
, bitmap
*visited
, strlen_range_kind rkind
,
1325 c_strlen_data
*pdata
, unsigned eltsize
)
1327 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1329 /* The length computed by this invocation of the function. */
1330 tree val
= NULL_TREE
;
1332 /* True if VAL is an optimistic (tight) bound determined from
1333 the size of the character array in which the string may be
1334 stored. In that case, the computed VAL is used to set
1336 bool tight_bound
= false;
1338 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1339 if (TREE_CODE (arg
) == ADDR_EXPR
1340 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1342 tree op
= TREE_OPERAND (arg
, 0);
1343 if (integer_zerop (TREE_OPERAND (op
, 1)))
1345 tree aop0
= TREE_OPERAND (op
, 0);
1346 if (TREE_CODE (aop0
) == INDIRECT_REF
1347 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1348 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1351 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1352 && rkind
== SRK_LENRANGE
)
1354 /* Fail if an array is the last member of a struct object
1355 since it could be treated as a (fake) flexible array
1357 tree idx
= TREE_OPERAND (op
, 1);
1359 arg
= TREE_OPERAND (op
, 0);
1360 tree optype
= TREE_TYPE (arg
);
1361 if (tree dom
= TYPE_DOMAIN (optype
))
1362 if (tree bound
= TYPE_MAX_VALUE (dom
))
1363 if (TREE_CODE (bound
) == INTEGER_CST
1364 && TREE_CODE (idx
) == INTEGER_CST
1365 && tree_int_cst_lt (bound
, idx
))
1370 if (rkind
== SRK_INT_VALUE
)
1372 /* We are computing the maximum value (not string length). */
1374 if (TREE_CODE (val
) != INTEGER_CST
1375 || tree_int_cst_sgn (val
) < 0)
1380 c_strlen_data lendata
= { };
1381 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1383 if (!val
&& lendata
.decl
)
1385 /* ARG refers to an unterminated const character array.
1386 DATA.DECL with size DATA.LEN. */
1387 val
= lendata
.minlen
;
1388 pdata
->decl
= lendata
.decl
;
1392 /* Set if VAL represents the maximum length based on array size (set
1393 when exact length cannot be determined). */
1394 bool maxbound
= false;
1396 if (!val
&& rkind
== SRK_LENRANGE
)
1398 if (TREE_CODE (arg
) == ADDR_EXPR
)
1399 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1402 if (TREE_CODE (arg
) == ARRAY_REF
)
1404 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1406 /* Determine the "innermost" array type. */
1407 while (TREE_CODE (optype
) == ARRAY_TYPE
1408 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1409 optype
= TREE_TYPE (optype
);
1411 /* Avoid arrays of pointers. */
1412 tree eltype
= TREE_TYPE (optype
);
1413 if (TREE_CODE (optype
) != ARRAY_TYPE
1414 || !INTEGRAL_TYPE_P (eltype
))
1417 /* Fail when the array bound is unknown or zero. */
1418 val
= TYPE_SIZE_UNIT (optype
);
1420 || TREE_CODE (val
) != INTEGER_CST
1421 || integer_zerop (val
))
1424 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1427 /* Set the minimum size to zero since the string in
1428 the array could have zero length. */
1429 pdata
->minlen
= ssize_int (0);
1433 else if (TREE_CODE (arg
) == COMPONENT_REF
1434 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1437 /* Use the type of the member array to determine the upper
1438 bound on the length of the array. This may be overly
1439 optimistic if the array itself isn't NUL-terminated and
1440 the caller relies on the subsequent member to contain
1441 the NUL but that would only be considered valid if
1442 the array were the last member of a struct. */
1444 tree fld
= TREE_OPERAND (arg
, 1);
1446 tree optype
= TREE_TYPE (fld
);
1448 /* Determine the "innermost" array type. */
1449 while (TREE_CODE (optype
) == ARRAY_TYPE
1450 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1451 optype
= TREE_TYPE (optype
);
1453 /* Fail when the array bound is unknown or zero. */
1454 val
= TYPE_SIZE_UNIT (optype
);
1456 || TREE_CODE (val
) != INTEGER_CST
1457 || integer_zerop (val
))
1459 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1462 /* Set the minimum size to zero since the string in
1463 the array could have zero length. */
1464 pdata
->minlen
= ssize_int (0);
1466 /* The array size determined above is an optimistic bound
1467 on the length. If the array isn't nul-terminated the
1468 length computed by the library function would be greater.
1469 Even though using strlen to cross the subobject boundary
1470 is undefined, avoid drawing conclusions from the member
1471 type about the length here. */
1474 else if (TREE_CODE (arg
) == MEM_REF
1475 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1476 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1477 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1479 /* Handle a MEM_REF into a DECL accessing an array of integers,
1480 being conservative about references to extern structures with
1481 flexible array members that can be initialized to arbitrary
1482 numbers of elements as an extension (static structs are okay).
1483 FIXME: Make this less conservative -- see
1484 component_ref_size in tree.c. */
1485 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1486 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1487 && (decl_binds_to_current_def_p (ref
)
1488 || !array_at_struct_end_p (arg
)))
1490 /* Fail if the offset is out of bounds. Such accesses
1491 should be diagnosed at some point. */
1492 val
= DECL_SIZE_UNIT (ref
);
1494 || TREE_CODE (val
) != INTEGER_CST
1495 || integer_zerop (val
))
1498 poly_offset_int psiz
= wi::to_offset (val
);
1499 poly_offset_int poff
= mem_ref_offset (arg
);
1500 if (known_le (psiz
, poff
))
1503 pdata
->minlen
= ssize_int (0);
1505 /* Subtract the offset and one for the terminating nul. */
1508 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1509 /* Since VAL reflects the size of a declared object
1510 rather the type of the access it is not a tight bound. */
1513 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1515 /* Avoid handling pointers to arrays. GCC might misuse
1516 a pointer to an array of one bound to point to an array
1517 object of a greater bound. */
1518 tree argtype
= TREE_TYPE (arg
);
1519 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1521 val
= TYPE_SIZE_UNIT (argtype
);
1523 || TREE_CODE (val
) != INTEGER_CST
1524 || integer_zerop (val
))
1526 val
= wide_int_to_tree (TREE_TYPE (val
),
1527 wi::sub (wi::to_wide (val
), 1));
1529 /* Set the minimum size to zero since the string in
1530 the array could have zero length. */
1531 pdata
->minlen
= ssize_int (0);
1540 /* Adjust the lower bound on the string length as necessary. */
1542 || (rkind
!= SRK_STRLEN
1543 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1544 && TREE_CODE (val
) == INTEGER_CST
1545 && tree_int_cst_lt (val
, pdata
->minlen
)))
1546 pdata
->minlen
= val
;
1548 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1550 /* Adjust the tighter (more optimistic) string length bound
1551 if necessary and proceed to adjust the more conservative
1553 if (TREE_CODE (val
) == INTEGER_CST
)
1555 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1556 pdata
->maxbound
= val
;
1559 pdata
->maxbound
= val
;
1561 else if (pdata
->maxbound
|| maxbound
)
1562 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1563 if VAL corresponds to the maximum length determined based
1564 on the type of the object. */
1565 pdata
->maxbound
= val
;
1569 /* VAL computed above represents an optimistically tight bound
1570 on the length of the string based on the referenced object's
1571 or subobject's type. Determine the conservative upper bound
1572 based on the enclosing object's size if possible. */
1573 if (rkind
== SRK_LENRANGE
)
1576 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1579 /* When the call above fails due to a non-constant offset
1580 assume the offset is zero and use the size of the whole
1581 enclosing object instead. */
1582 base
= get_base_address (arg
);
1585 /* If the base object is a pointer no upper bound on the length
1586 can be determined. Otherwise the maximum length is equal to
1587 the size of the enclosing object minus the offset of
1588 the referenced subobject minus 1 (for the terminating nul). */
1589 tree type
= TREE_TYPE (base
);
1590 if (TREE_CODE (type
) == POINTER_TYPE
1591 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1592 || !(val
= DECL_SIZE_UNIT (base
)))
1593 val
= build_all_ones_cst (size_type_node
);
1596 val
= DECL_SIZE_UNIT (base
);
1597 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1598 size_int (offset
+ 1));
1607 /* Adjust the more conservative bound if possible/necessary
1608 and fail otherwise. */
1609 if (rkind
!= SRK_STRLEN
)
1611 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1612 || TREE_CODE (val
) != INTEGER_CST
)
1615 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1616 pdata
->maxlen
= val
;
1619 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1621 /* Fail if the length of this ARG is different from that
1622 previously determined from another ARG. */
1627 pdata
->maxlen
= val
;
1628 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1631 /* For an ARG referencing one or more strings, try to obtain the range
1632 of their lengths, or the size of the largest array ARG referes to if
1633 the range of lengths cannot be determined, and store all in *PDATA.
1634 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1635 the maximum constant value.
1636 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1637 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1638 length or if we are unable to determine the length, return false.
1639 VISITED is a bitmap of visited variables.
1640 RKIND determines the kind of value or range to obtain (see
1642 Set PDATA->DECL if ARG refers to an unterminated constant array.
1643 On input, set ELTSIZE to 1 for normal single byte character strings,
1644 and either 2 or 4 for wide characer strings (the size of wchar_t).
1645 Return true if *PDATA was successfully populated and false otherwise. */
1648 get_range_strlen (tree arg
, bitmap
*visited
,
1649 strlen_range_kind rkind
,
1650 c_strlen_data
*pdata
, unsigned eltsize
)
1653 if (TREE_CODE (arg
) != SSA_NAME
)
1654 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1656 /* If ARG is registered for SSA update we cannot look at its defining
1658 if (name_registered_for_update_p (arg
))
1661 /* If we were already here, break the infinite cycle. */
1663 *visited
= BITMAP_ALLOC (NULL
);
1664 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1668 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1670 switch (gimple_code (def_stmt
))
1673 /* The RHS of the statement defining VAR must either have a
1674 constant length or come from another SSA_NAME with a constant
1676 if (gimple_assign_single_p (def_stmt
)
1677 || gimple_assign_unary_nop_p (def_stmt
))
1679 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1680 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1682 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1684 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1685 gimple_assign_rhs3 (def_stmt
) };
1687 for (unsigned int i
= 0; i
< 2; i
++)
1688 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1690 if (rkind
!= SRK_LENRANGE
)
1692 /* Set the upper bound to the maximum to prevent
1693 it from being adjusted in the next iteration but
1694 leave MINLEN and the more conservative MAXBOUND
1695 determined so far alone (or leave them null if
1696 they haven't been set yet). That the MINLEN is
1697 in fact zero can be determined from MAXLEN being
1698 unbounded but the discovered minimum is used for
1700 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1707 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1708 must have a constant length. */
1709 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1711 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1713 /* If this PHI has itself as an argument, we cannot
1714 determine the string length of this argument. However,
1715 if we can find a constant string length for the other
1716 PHI args then we can still be sure that this is a
1717 constant string length. So be optimistic and just
1718 continue with the next argument. */
1719 if (arg
== gimple_phi_result (def_stmt
))
1722 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1724 if (rkind
!= SRK_LENRANGE
)
1726 /* Set the upper bound to the maximum to prevent
1727 it from being adjusted in the next iteration but
1728 leave MINLEN and the more conservative MAXBOUND
1729 determined so far alone (or leave them null if
1730 they haven't been set yet). That the MINLEN is
1731 in fact zero can be determined from MAXLEN being
1732 unbounded but the discovered minimum is used for
1734 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1744 /* Try to obtain the range of the lengths of the string(s) referenced
1745 by ARG, or the size of the largest array ARG refers to if the range
1746 of lengths cannot be determined, and store all in *PDATA which must
1747 be zero-initialized on input except PDATA->MAXBOUND may be set to
1748 a non-null tree node other than INTEGER_CST to request to have it
1749 set to the length of the longest string in a PHI. ELTSIZE is
1750 the expected size of the string element in bytes: 1 for char and
1751 some power of 2 for wide characters.
1752 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1753 for optimization. Returning false means that a nonzero PDATA->MINLEN
1754 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1755 is -1 (in that case, the actual range is indeterminate, i.e.,
1756 [0, PTRDIFF_MAX - 2]. */
1759 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1761 bitmap visited
= NULL
;
1762 tree maxbound
= pdata
->maxbound
;
1764 if (!get_range_strlen (arg
, &visited
, SRK_LENRANGE
, pdata
, eltsize
))
1766 /* On failure extend the length range to an impossible maximum
1767 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1768 members can stay unchanged regardless. */
1769 pdata
->minlen
= ssize_int (0);
1770 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1772 else if (!pdata
->minlen
)
1773 pdata
->minlen
= ssize_int (0);
1775 /* If it's unchanged from it initial non-null value, set the conservative
1776 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1777 if (maxbound
&& pdata
->maxbound
== maxbound
)
1778 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1781 BITMAP_FREE (visited
);
1783 return !integer_all_onesp (pdata
->maxlen
);
1786 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1787 For ARG of pointer types, NONSTR indicates if the caller is prepared
1788 to handle unterminated strings. For integer ARG and when RKIND ==
1789 SRK_INT_VALUE, NONSTR must be null.
1791 If an unterminated array is discovered and our caller handles
1792 unterminated arrays, then bubble up the offending DECL and
1793 return the maximum size. Otherwise return NULL. */
1796 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
1798 /* A non-null NONSTR is meaningless when determining the maximum
1799 value of an integer ARG. */
1800 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
1801 /* ARG must have an integral type when RKIND says so. */
1802 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
1804 bitmap visited
= NULL
;
1806 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1808 c_strlen_data lendata
= { };
1809 if (!get_range_strlen (arg
, &visited
, rkind
, &lendata
, /* eltsize = */1))
1810 lendata
.maxlen
= NULL_TREE
;
1811 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
1812 lendata
.maxlen
= NULL_TREE
;
1815 BITMAP_FREE (visited
);
1819 /* For callers prepared to handle unterminated arrays set
1820 *NONSTR to point to the declaration of the array and return
1821 the maximum length/size. */
1822 *nonstr
= lendata
.decl
;
1823 return lendata
.maxlen
;
1826 /* Fail if the constant array isn't nul-terminated. */
1827 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
1831 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1832 If LEN is not NULL, it represents the length of the string to be
1833 copied. Return NULL_TREE if no simplification can be made. */
1836 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1837 tree dest
, tree src
)
1839 gimple
*stmt
= gsi_stmt (*gsi
);
1840 location_t loc
= gimple_location (stmt
);
1843 /* If SRC and DEST are the same (and not volatile), return DEST. */
1844 if (operand_equal_p (src
, dest
, 0))
1846 /* Issue -Wrestrict unless the pointers are null (those do
1847 not point to objects and so do not indicate an overlap;
1848 such calls could be the result of sanitization and jump
1850 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
1852 tree func
= gimple_call_fndecl (stmt
);
1854 warning_at (loc
, OPT_Wrestrict
,
1855 "%qD source argument is the same as destination",
1859 replace_call_with_value (gsi
, dest
);
1863 if (optimize_function_for_size_p (cfun
))
1866 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1870 /* Set to non-null if ARG refers to an unterminated array. */
1872 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
1876 /* Avoid folding calls with unterminated arrays. */
1877 if (!gimple_no_warning_p (stmt
))
1878 warn_string_no_nul (loc
, NULL_TREE
, "strcpy", src
, nonstr
);
1879 gimple_set_no_warning (stmt
, true);
1886 len
= fold_convert_loc (loc
, size_type_node
, len
);
1887 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1888 len
= force_gimple_operand_gsi (gsi
, len
, true,
1889 NULL_TREE
, true, GSI_SAME_STMT
);
1890 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1891 replace_call_with_call_and_fold (gsi
, repl
);
1895 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1896 If SLEN is not NULL, it represents the length of the source string.
1897 Return NULL_TREE if no simplification can be made. */
1900 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1901 tree dest
, tree src
, tree len
)
1903 gimple
*stmt
= gsi_stmt (*gsi
);
1904 location_t loc
= gimple_location (stmt
);
1905 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
1907 /* If the LEN parameter is zero, return DEST. */
1908 if (integer_zerop (len
))
1910 /* Avoid warning if the destination refers to an array/pointer
1911 decorate with attribute nonstring. */
1914 tree fndecl
= gimple_call_fndecl (stmt
);
1916 /* Warn about the lack of nul termination: the result is not
1917 a (nul-terminated) string. */
1918 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
1919 if (slen
&& !integer_zerop (slen
))
1920 warning_at (loc
, OPT_Wstringop_truncation
,
1921 "%G%qD destination unchanged after copying no bytes "
1922 "from a string of length %E",
1923 stmt
, fndecl
, slen
);
1925 warning_at (loc
, OPT_Wstringop_truncation
,
1926 "%G%qD destination unchanged after copying no bytes",
1930 replace_call_with_value (gsi
, dest
);
1934 /* We can't compare slen with len as constants below if len is not a
1936 if (TREE_CODE (len
) != INTEGER_CST
)
1939 /* Now, we must be passed a constant src ptr parameter. */
1940 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
1941 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1944 /* The size of the source string including the terminating nul. */
1945 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1947 /* We do not support simplification of this case, though we do
1948 support it when expanding trees into RTL. */
1949 /* FIXME: generate a call to __builtin_memset. */
1950 if (tree_int_cst_lt (ssize
, len
))
1953 /* Diagnose truncation that leaves the copy unterminated. */
1954 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
1956 /* OK transform into builtin memcpy. */
1957 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1961 len
= fold_convert_loc (loc
, size_type_node
, len
);
1962 len
= force_gimple_operand_gsi (gsi
, len
, true,
1963 NULL_TREE
, true, GSI_SAME_STMT
);
1964 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1965 replace_call_with_call_and_fold (gsi
, repl
);
1970 /* Fold function call to builtin strchr or strrchr.
1971 If both arguments are constant, evaluate and fold the result,
1972 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1973 In general strlen is significantly faster than strchr
1974 due to being a simpler operation. */
1976 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1978 gimple
*stmt
= gsi_stmt (*gsi
);
1979 tree str
= gimple_call_arg (stmt
, 0);
1980 tree c
= gimple_call_arg (stmt
, 1);
1981 location_t loc
= gimple_location (stmt
);
1985 if (!gimple_call_lhs (stmt
))
1988 /* Avoid folding if the first argument is not a nul-terminated array.
1989 Defer warning until later. */
1990 if (!check_nul_terminated_array (NULL_TREE
, str
))
1993 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1995 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1999 replace_call_with_value (gsi
, integer_zero_node
);
2003 tree len
= build_int_cst (size_type_node
, p1
- p
);
2004 gimple_seq stmts
= NULL
;
2005 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2006 POINTER_PLUS_EXPR
, str
, len
);
2007 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2008 gsi_replace_with_seq_vops (gsi
, stmts
);
2012 if (!integer_zerop (c
))
2015 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2016 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
2018 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2022 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
2023 replace_call_with_call_and_fold (gsi
, repl
);
2031 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2036 /* Create newstr = strlen (str). */
2037 gimple_seq stmts
= NULL
;
2038 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
2039 gimple_set_location (new_stmt
, loc
);
2040 len
= create_tmp_reg_or_ssa_name (size_type_node
);
2041 gimple_call_set_lhs (new_stmt
, len
);
2042 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2044 /* Create (str p+ strlen (str)). */
2045 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
2046 POINTER_PLUS_EXPR
, str
, len
);
2047 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2048 gsi_replace_with_seq_vops (gsi
, stmts
);
2049 /* gsi now points at the assignment to the lhs, get a
2050 stmt iterator to the strlen.
2051 ??? We can't use gsi_for_stmt as that doesn't work when the
2052 CFG isn't built yet. */
2053 gimple_stmt_iterator gsi2
= *gsi
;
2059 /* Fold function call to builtin strstr.
2060 If both arguments are constant, evaluate and fold the result,
2061 additionally fold strstr (x, "") into x and strstr (x, "c")
2062 into strchr (x, 'c'). */
2064 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2066 gimple
*stmt
= gsi_stmt (*gsi
);
2067 if (!gimple_call_lhs (stmt
))
2070 tree haystack
= gimple_call_arg (stmt
, 0);
2071 tree needle
= gimple_call_arg (stmt
, 1);
2073 /* Avoid folding if either argument is not a nul-terminated array.
2074 Defer warning until later. */
2075 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2076 || !check_nul_terminated_array (NULL_TREE
, needle
))
2079 const char *q
= c_getstr (needle
);
2083 if (const char *p
= c_getstr (haystack
))
2085 const char *r
= strstr (p
, q
);
2089 replace_call_with_value (gsi
, integer_zero_node
);
2093 tree len
= build_int_cst (size_type_node
, r
- p
);
2094 gimple_seq stmts
= NULL
;
2096 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2098 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2099 gsi_replace_with_seq_vops (gsi
, stmts
);
2103 /* For strstr (x, "") return x. */
2106 replace_call_with_value (gsi
, haystack
);
2110 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2113 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2116 tree c
= build_int_cst (integer_type_node
, q
[0]);
2117 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2118 replace_call_with_call_and_fold (gsi
, repl
);
2126 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2129 Return NULL_TREE if no simplification was possible, otherwise return the
2130 simplified form of the call as a tree.
2132 The simplified form may be a constant or other expression which
2133 computes the same value, but in a more efficient manner (including
2134 calls to other builtin functions).
2136 The call may contain arguments which need to be evaluated, but
2137 which are not useful to determine the result of the call. In
2138 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2139 COMPOUND_EXPR will be an argument which must be evaluated.
2140 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2141 COMPOUND_EXPR in the chain will contain the tree for the simplified
2142 form of the builtin function call. */
2145 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2147 gimple
*stmt
= gsi_stmt (*gsi
);
2148 location_t loc
= gimple_location (stmt
);
2150 const char *p
= c_getstr (src
);
2152 /* If the string length is zero, return the dst parameter. */
2153 if (p
&& *p
== '\0')
2155 replace_call_with_value (gsi
, dst
);
2159 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2162 /* See if we can store by pieces into (dst + strlen(dst)). */
2164 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2165 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2167 if (!strlen_fn
|| !memcpy_fn
)
2170 /* If the length of the source string isn't computable don't
2171 split strcat into strlen and memcpy. */
2172 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2176 /* Create strlen (dst). */
2177 gimple_seq stmts
= NULL
, stmts2
;
2178 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2179 gimple_set_location (repl
, loc
);
2180 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2181 gimple_call_set_lhs (repl
, newdst
);
2182 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2184 /* Create (dst p+ strlen (dst)). */
2185 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2186 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2187 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2189 len
= fold_convert_loc (loc
, size_type_node
, len
);
2190 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2191 build_int_cst (size_type_node
, 1));
2192 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2193 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2195 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2196 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2197 if (gimple_call_lhs (stmt
))
2199 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2200 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2201 gsi_replace_with_seq_vops (gsi
, stmts
);
2202 /* gsi now points at the assignment to the lhs, get a
2203 stmt iterator to the memcpy call.
2204 ??? We can't use gsi_for_stmt as that doesn't work when the
2205 CFG isn't built yet. */
2206 gimple_stmt_iterator gsi2
= *gsi
;
2212 gsi_replace_with_seq_vops (gsi
, stmts
);
2218 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2219 are the arguments to the call. */
2222 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2224 gimple
*stmt
= gsi_stmt (*gsi
);
2225 tree dest
= gimple_call_arg (stmt
, 0);
2226 tree src
= gimple_call_arg (stmt
, 1);
2227 tree size
= gimple_call_arg (stmt
, 2);
2233 /* If the SRC parameter is "", return DEST. */
2234 if (p
&& *p
== '\0')
2236 replace_call_with_value (gsi
, dest
);
2240 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2243 /* If __builtin_strcat_chk is used, assume strcat is available. */
2244 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2248 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2249 replace_call_with_call_and_fold (gsi
, repl
);
2253 /* Simplify a call to the strncat builtin. */
2256 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2258 gimple
*stmt
= gsi_stmt (*gsi
);
2259 tree dst
= gimple_call_arg (stmt
, 0);
2260 tree src
= gimple_call_arg (stmt
, 1);
2261 tree len
= gimple_call_arg (stmt
, 2);
2263 const char *p
= c_getstr (src
);
2265 /* If the requested length is zero, or the src parameter string
2266 length is zero, return the dst parameter. */
2267 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2269 replace_call_with_value (gsi
, dst
);
2273 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2276 unsigned srclen
= strlen (p
);
2278 int cmpsrc
= compare_tree_int (len
, srclen
);
2280 /* Return early if the requested len is less than the string length.
2281 Warnings will be issued elsewhere later. */
2285 unsigned HOST_WIDE_INT dstsize
;
2287 bool nowarn
= gimple_no_warning_p (stmt
);
2289 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2291 int cmpdst
= compare_tree_int (len
, dstsize
);
2295 tree fndecl
= gimple_call_fndecl (stmt
);
2297 /* Strncat copies (at most) LEN bytes and always appends
2298 the terminating NUL so the specified bound should never
2299 be equal to (or greater than) the size of the destination.
2300 If it is, the copy could overflow. */
2301 location_t loc
= gimple_location (stmt
);
2302 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2304 ? G_("%G%qD specified bound %E equals "
2306 : G_("%G%qD specified bound %E exceeds "
2307 "destination size %wu"),
2308 stmt
, fndecl
, len
, dstsize
);
2310 gimple_set_no_warning (stmt
, true);
2314 if (!nowarn
&& cmpsrc
== 0)
2316 tree fndecl
= gimple_call_fndecl (stmt
);
2317 location_t loc
= gimple_location (stmt
);
2319 /* To avoid possible overflow the specified bound should also
2320 not be equal to the length of the source, even when the size
2321 of the destination is unknown (it's not an uncommon mistake
2322 to specify as the bound to strncpy the length of the source). */
2323 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2324 "%G%qD specified bound %E equals source length",
2326 gimple_set_no_warning (stmt
, true);
2329 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2331 /* If the replacement _DECL isn't initialized, don't do the
2336 /* Otherwise, emit a call to strcat. */
2337 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2338 replace_call_with_call_and_fold (gsi
, repl
);
2342 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2346 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2348 gimple
*stmt
= gsi_stmt (*gsi
);
2349 tree dest
= gimple_call_arg (stmt
, 0);
2350 tree src
= gimple_call_arg (stmt
, 1);
2351 tree len
= gimple_call_arg (stmt
, 2);
2352 tree size
= gimple_call_arg (stmt
, 3);
2357 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2358 if ((p
&& *p
== '\0')
2359 || integer_zerop (len
))
2361 replace_call_with_value (gsi
, dest
);
2365 if (! tree_fits_uhwi_p (size
))
2368 if (! integer_all_onesp (size
))
2370 tree src_len
= c_strlen (src
, 1);
2372 && tree_fits_uhwi_p (src_len
)
2373 && tree_fits_uhwi_p (len
)
2374 && ! tree_int_cst_lt (len
, src_len
))
2376 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2377 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2381 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2382 replace_call_with_call_and_fold (gsi
, repl
);
2388 /* If __builtin_strncat_chk is used, assume strncat is available. */
2389 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2393 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2394 replace_call_with_call_and_fold (gsi
, repl
);
2398 /* Build and append gimple statements to STMTS that would load a first
2399 character of a memory location identified by STR. LOC is location
2400 of the statement. */
2403 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2407 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2408 tree cst_uchar_ptr_node
2409 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2410 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2412 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2413 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2414 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2416 gimple_assign_set_lhs (stmt
, var
);
2417 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2422 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2425 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2427 gimple
*stmt
= gsi_stmt (*gsi
);
2428 tree callee
= gimple_call_fndecl (stmt
);
2429 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2431 tree type
= integer_type_node
;
2432 tree str1
= gimple_call_arg (stmt
, 0);
2433 tree str2
= gimple_call_arg (stmt
, 1);
2434 tree lhs
= gimple_call_lhs (stmt
);
2436 tree bound_node
= NULL_TREE
;
2437 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2439 /* Handle strncmp and strncasecmp functions. */
2440 if (gimple_call_num_args (stmt
) == 3)
2442 bound_node
= gimple_call_arg (stmt
, 2);
2443 if (tree_fits_uhwi_p (bound_node
))
2444 bound
= tree_to_uhwi (bound_node
);
2447 /* If the BOUND parameter is zero, return zero. */
2450 replace_call_with_value (gsi
, integer_zero_node
);
2454 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2455 if (operand_equal_p (str1
, str2
, 0))
2457 replace_call_with_value (gsi
, integer_zero_node
);
2461 /* Initially set to the number of characters, including the terminating
2462 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2463 the array Sx is not terminated by a nul.
2464 For nul-terminated strings then adjusted to their length so that
2465 LENx == NULPOSx holds. */
2466 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2467 const char *p1
= getbyterep (str1
, &len1
);
2468 const char *p2
= getbyterep (str2
, &len2
);
2470 /* The position of the terminating nul character if one exists, otherwise
2471 a value greater than LENx. */
2472 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2476 size_t n
= strnlen (p1
, len1
);
2483 size_t n
= strnlen (p2
, len2
);
2488 /* For known strings, return an immediate value. */
2492 bool known_result
= false;
2496 case BUILT_IN_STRCMP
:
2497 case BUILT_IN_STRCMP_EQ
:
2498 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2501 r
= strcmp (p1
, p2
);
2502 known_result
= true;
2505 case BUILT_IN_STRNCMP
:
2506 case BUILT_IN_STRNCMP_EQ
:
2508 if (bound
== HOST_WIDE_INT_M1U
)
2511 /* Reduce the bound to be no more than the length
2512 of the shorter of the two strings, or the sizes
2513 of the unterminated arrays. */
2514 unsigned HOST_WIDE_INT n
= bound
;
2516 if (len1
== nulpos1
&& len1
< n
)
2518 if (len2
== nulpos2
&& len2
< n
)
2521 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2524 r
= strncmp (p1
, p2
, n
);
2525 known_result
= true;
2528 /* Only handleable situation is where the string are equal (result 0),
2529 which is already handled by operand_equal_p case. */
2530 case BUILT_IN_STRCASECMP
:
2532 case BUILT_IN_STRNCASECMP
:
2534 if (bound
== HOST_WIDE_INT_M1U
)
2536 r
= strncmp (p1
, p2
, bound
);
2538 known_result
= true;
2547 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2552 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2553 || fcode
== BUILT_IN_STRCMP
2554 || fcode
== BUILT_IN_STRCMP_EQ
2555 || fcode
== BUILT_IN_STRCASECMP
;
2557 location_t loc
= gimple_location (stmt
);
2559 /* If the second arg is "", return *(const unsigned char*)arg1. */
2560 if (p2
&& *p2
== '\0' && nonzero_bound
)
2562 gimple_seq stmts
= NULL
;
2563 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2566 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2567 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2570 gsi_replace_with_seq_vops (gsi
, stmts
);
2574 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2575 if (p1
&& *p1
== '\0' && nonzero_bound
)
2577 gimple_seq stmts
= NULL
;
2578 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2582 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2583 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2584 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2586 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2587 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2590 gsi_replace_with_seq_vops (gsi
, stmts
);
2594 /* If BOUND is one, return an expression corresponding to
2595 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2596 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2598 gimple_seq stmts
= NULL
;
2599 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2600 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2604 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2605 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2606 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2608 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2609 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2610 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2612 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2613 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2616 gsi_replace_with_seq_vops (gsi
, stmts
);
2620 /* If BOUND is greater than the length of one constant string,
2621 and the other argument is also a nul-terminated string, replace
2622 strncmp with strcmp. */
2623 if (fcode
== BUILT_IN_STRNCMP
2624 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2625 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2626 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2628 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2631 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2632 replace_call_with_call_and_fold (gsi
, repl
);
2639 /* Fold a call to the memchr pointed by GSI iterator. */
2642 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2644 gimple
*stmt
= gsi_stmt (*gsi
);
2645 tree lhs
= gimple_call_lhs (stmt
);
2646 tree arg1
= gimple_call_arg (stmt
, 0);
2647 tree arg2
= gimple_call_arg (stmt
, 1);
2648 tree len
= gimple_call_arg (stmt
, 2);
2650 /* If the LEN parameter is zero, return zero. */
2651 if (integer_zerop (len
))
2653 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2658 if (TREE_CODE (arg2
) != INTEGER_CST
2659 || !tree_fits_uhwi_p (len
)
2660 || !target_char_cst_p (arg2
, &c
))
2663 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2664 unsigned HOST_WIDE_INT string_length
;
2665 const char *p1
= getbyterep (arg1
, &string_length
);
2669 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2672 tree mem_size
, offset_node
;
2673 byte_representation (arg1
, &offset_node
, &mem_size
, NULL
);
2674 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2675 ? 0 : tree_to_uhwi (offset_node
);
2676 /* MEM_SIZE is the size of the array the string literal
2678 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2679 gcc_checking_assert (string_length
<= string_size
);
2680 if (length
<= string_size
)
2682 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2688 unsigned HOST_WIDE_INT offset
= r
- p1
;
2689 gimple_seq stmts
= NULL
;
2690 if (lhs
!= NULL_TREE
)
2692 tree offset_cst
= build_int_cst (TREE_TYPE (len
), offset
);
2693 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2695 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2698 gimple_seq_add_stmt_without_update (&stmts
,
2699 gimple_build_nop ());
2701 gsi_replace_with_seq_vops (gsi
, stmts
);
2709 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2710 to the call. IGNORE is true if the value returned
2711 by the builtin will be ignored. UNLOCKED is true is true if this
2712 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2713 the known length of the string. Return NULL_TREE if no simplification
2717 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2718 tree arg0
, tree arg1
,
2721 gimple
*stmt
= gsi_stmt (*gsi
);
2723 /* If we're using an unlocked function, assume the other unlocked
2724 functions exist explicitly. */
2725 tree
const fn_fputc
= (unlocked
2726 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2727 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2728 tree
const fn_fwrite
= (unlocked
2729 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2730 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2732 /* If the return value is used, don't do the transformation. */
2733 if (gimple_call_lhs (stmt
))
2736 /* Get the length of the string passed to fputs. If the length
2737 can't be determined, punt. */
2738 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2740 || TREE_CODE (len
) != INTEGER_CST
)
2743 switch (compare_tree_int (len
, 1))
2745 case -1: /* length is 0, delete the call entirely . */
2746 replace_call_with_value (gsi
, integer_zero_node
);
2749 case 0: /* length is 1, call fputc. */
2751 const char *p
= c_getstr (arg0
);
2757 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2759 (integer_type_node
, p
[0]), arg1
);
2760 replace_call_with_call_and_fold (gsi
, repl
);
2765 case 1: /* length is greater than 1, call fwrite. */
2767 /* If optimizing for size keep fputs. */
2768 if (optimize_function_for_size_p (cfun
))
2770 /* New argument list transforming fputs(string, stream) to
2771 fwrite(string, 1, len, stream). */
2775 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2776 size_one_node
, len
, arg1
);
2777 replace_call_with_call_and_fold (gsi
, repl
);
2786 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2787 DEST, SRC, LEN, and SIZE are the arguments to the call.
2788 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2789 code of the builtin. If MAXLEN is not NULL, it is maximum length
2790 passed as third argument. */
2793 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2794 tree dest
, tree src
, tree len
, tree size
,
2795 enum built_in_function fcode
)
2797 gimple
*stmt
= gsi_stmt (*gsi
);
2798 location_t loc
= gimple_location (stmt
);
2799 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2802 /* If SRC and DEST are the same (and not volatile), return DEST
2803 (resp. DEST+LEN for __mempcpy_chk). */
2804 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2806 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2808 replace_call_with_value (gsi
, dest
);
2813 gimple_seq stmts
= NULL
;
2814 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2815 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2816 TREE_TYPE (dest
), dest
, len
);
2817 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2818 replace_call_with_value (gsi
, temp
);
2823 if (! tree_fits_uhwi_p (size
))
2826 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
2827 if (! integer_all_onesp (size
))
2829 if (! tree_fits_uhwi_p (len
))
2831 /* If LEN is not constant, try MAXLEN too.
2832 For MAXLEN only allow optimizing into non-_ocs function
2833 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2834 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2836 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2838 /* (void) __mempcpy_chk () can be optimized into
2839 (void) __memcpy_chk (). */
2840 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2844 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2845 replace_call_with_call_and_fold (gsi
, repl
);
2854 if (tree_int_cst_lt (size
, maxlen
))
2859 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2860 mem{cpy,pcpy,move,set} is available. */
2863 case BUILT_IN_MEMCPY_CHK
:
2864 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2866 case BUILT_IN_MEMPCPY_CHK
:
2867 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2869 case BUILT_IN_MEMMOVE_CHK
:
2870 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2872 case BUILT_IN_MEMSET_CHK
:
2873 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2882 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2883 replace_call_with_call_and_fold (gsi
, repl
);
2887 /* Fold a call to the __st[rp]cpy_chk builtin.
2888 DEST, SRC, and SIZE are the arguments to the call.
2889 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2890 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2891 strings passed as second argument. */
2894 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2896 tree src
, tree size
,
2897 enum built_in_function fcode
)
2899 gimple
*stmt
= gsi_stmt (*gsi
);
2900 location_t loc
= gimple_location (stmt
);
2901 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2904 /* If SRC and DEST are the same (and not volatile), return DEST. */
2905 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2907 /* Issue -Wrestrict unless the pointers are null (those do
2908 not point to objects and so do not indicate an overlap;
2909 such calls could be the result of sanitization and jump
2911 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
2913 tree func
= gimple_call_fndecl (stmt
);
2915 warning_at (loc
, OPT_Wrestrict
,
2916 "%qD source argument is the same as destination",
2920 replace_call_with_value (gsi
, dest
);
2924 if (! tree_fits_uhwi_p (size
))
2927 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
2928 if (! integer_all_onesp (size
))
2930 len
= c_strlen (src
, 1);
2931 if (! len
|| ! tree_fits_uhwi_p (len
))
2933 /* If LEN is not constant, try MAXLEN too.
2934 For MAXLEN only allow optimizing into non-_ocs function
2935 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2936 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2938 if (fcode
== BUILT_IN_STPCPY_CHK
)
2943 /* If return value of __stpcpy_chk is ignored,
2944 optimize into __strcpy_chk. */
2945 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2949 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2950 replace_call_with_call_and_fold (gsi
, repl
);
2954 if (! len
|| TREE_SIDE_EFFECTS (len
))
2957 /* If c_strlen returned something, but not a constant,
2958 transform __strcpy_chk into __memcpy_chk. */
2959 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2963 gimple_seq stmts
= NULL
;
2964 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
2965 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2966 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2967 build_int_cst (size_type_node
, 1));
2968 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2969 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2970 replace_call_with_call_and_fold (gsi
, repl
);
2977 if (! tree_int_cst_lt (maxlen
, size
))
2981 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2982 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2983 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2987 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2988 replace_call_with_call_and_fold (gsi
, repl
);
2992 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2993 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2994 length passed as third argument. IGNORE is true if return value can be
2995 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2998 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2999 tree dest
, tree src
,
3000 tree len
, tree size
,
3001 enum built_in_function fcode
)
3003 gimple
*stmt
= gsi_stmt (*gsi
);
3004 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
3007 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
3009 /* If return value of __stpncpy_chk is ignored,
3010 optimize into __strncpy_chk. */
3011 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
3014 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
3015 replace_call_with_call_and_fold (gsi
, repl
);
3020 if (! tree_fits_uhwi_p (size
))
3023 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3024 if (! integer_all_onesp (size
))
3026 if (! tree_fits_uhwi_p (len
))
3028 /* If LEN is not constant, try MAXLEN too.
3029 For MAXLEN only allow optimizing into non-_ocs function
3030 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3031 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3037 if (tree_int_cst_lt (size
, maxlen
))
3041 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3042 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
3043 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
3047 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
3048 replace_call_with_call_and_fold (gsi
, repl
);
3052 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3053 Return NULL_TREE if no simplification can be made. */
3056 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3058 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3059 location_t loc
= gimple_location (stmt
);
3060 tree dest
= gimple_call_arg (stmt
, 0);
3061 tree src
= gimple_call_arg (stmt
, 1);
3064 /* If the result is unused, replace stpcpy with strcpy. */
3065 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3067 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3070 gimple_call_set_fndecl (stmt
, fn
);
3075 /* Set to non-null if ARG refers to an unterminated array. */
3076 c_strlen_data data
= { };
3077 /* The size of the unterminated array if SRC referes to one. */
3079 /* True if the size is exact/constant, false if it's the lower bound
3082 tree len
= c_strlen (src
, 1, &data
, 1);
3084 || TREE_CODE (len
) != INTEGER_CST
)
3086 data
.decl
= unterminated_array (src
, &size
, &exact
);
3093 /* Avoid folding calls with unterminated arrays. */
3094 if (!gimple_no_warning_p (stmt
))
3095 warn_string_no_nul (loc
, NULL_TREE
, "stpcpy", src
, data
.decl
, size
,
3097 gimple_set_no_warning (stmt
, true);
3101 if (optimize_function_for_size_p (cfun
)
3102 /* If length is zero it's small enough. */
3103 && !integer_zerop (len
))
3106 /* If the source has a known length replace stpcpy with memcpy. */
3107 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3111 gimple_seq stmts
= NULL
;
3112 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3113 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3114 tem
, build_int_cst (size_type_node
, 1));
3115 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3116 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3117 gimple_move_vops (repl
, stmt
);
3118 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3119 /* Replace the result with dest + len. */
3121 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3122 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3123 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3124 POINTER_PLUS_EXPR
, dest
, tem
);
3125 gsi_replace (gsi
, ret
, false);
3126 /* Finally fold the memcpy call. */
3127 gimple_stmt_iterator gsi2
= *gsi
;
3133 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3134 NULL_TREE if a normal call should be emitted rather than expanding
3135 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3136 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3137 passed as second argument. */
3140 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3141 enum built_in_function fcode
)
3143 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3144 tree dest
, size
, len
, fn
, fmt
, flag
;
3145 const char *fmt_str
;
3147 /* Verify the required arguments in the original call. */
3148 if (gimple_call_num_args (stmt
) < 5)
3151 dest
= gimple_call_arg (stmt
, 0);
3152 len
= gimple_call_arg (stmt
, 1);
3153 flag
= gimple_call_arg (stmt
, 2);
3154 size
= gimple_call_arg (stmt
, 3);
3155 fmt
= gimple_call_arg (stmt
, 4);
3157 if (! tree_fits_uhwi_p (size
))
3160 if (! integer_all_onesp (size
))
3162 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3163 if (! tree_fits_uhwi_p (len
))
3165 /* If LEN is not constant, try MAXLEN too.
3166 For MAXLEN only allow optimizing into non-_ocs function
3167 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3168 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3174 if (tree_int_cst_lt (size
, maxlen
))
3178 if (!init_target_chars ())
3181 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3182 or if format doesn't contain % chars or is "%s". */
3183 if (! integer_zerop (flag
))
3185 fmt_str
= c_getstr (fmt
);
3186 if (fmt_str
== NULL
)
3188 if (strchr (fmt_str
, target_percent
) != NULL
3189 && strcmp (fmt_str
, target_percent_s
))
3193 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3195 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3196 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3200 /* Replace the called function and the first 5 argument by 3 retaining
3201 trailing varargs. */
3202 gimple_call_set_fndecl (stmt
, fn
);
3203 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3204 gimple_call_set_arg (stmt
, 0, dest
);
3205 gimple_call_set_arg (stmt
, 1, len
);
3206 gimple_call_set_arg (stmt
, 2, fmt
);
3207 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3208 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3209 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3214 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3215 Return NULL_TREE if a normal call should be emitted rather than
3216 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3217 or BUILT_IN_VSPRINTF_CHK. */
3220 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3221 enum built_in_function fcode
)
3223 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3224 tree dest
, size
, len
, fn
, fmt
, flag
;
3225 const char *fmt_str
;
3226 unsigned nargs
= gimple_call_num_args (stmt
);
3228 /* Verify the required arguments in the original call. */
3231 dest
= gimple_call_arg (stmt
, 0);
3232 flag
= gimple_call_arg (stmt
, 1);
3233 size
= gimple_call_arg (stmt
, 2);
3234 fmt
= gimple_call_arg (stmt
, 3);
3236 if (! tree_fits_uhwi_p (size
))
3241 if (!init_target_chars ())
3244 /* Check whether the format is a literal string constant. */
3245 fmt_str
= c_getstr (fmt
);
3246 if (fmt_str
!= NULL
)
3248 /* If the format doesn't contain % args or %%, we know the size. */
3249 if (strchr (fmt_str
, target_percent
) == 0)
3251 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3252 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3254 /* If the format is "%s" and first ... argument is a string literal,
3255 we know the size too. */
3256 else if (fcode
== BUILT_IN_SPRINTF_CHK
3257 && strcmp (fmt_str
, target_percent_s
) == 0)
3263 arg
= gimple_call_arg (stmt
, 4);
3264 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3266 len
= c_strlen (arg
, 1);
3267 if (! len
|| ! tree_fits_uhwi_p (len
))
3274 if (! integer_all_onesp (size
))
3276 if (! len
|| ! tree_int_cst_lt (len
, size
))
3280 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3281 or if format doesn't contain % chars or is "%s". */
3282 if (! integer_zerop (flag
))
3284 if (fmt_str
== NULL
)
3286 if (strchr (fmt_str
, target_percent
) != NULL
3287 && strcmp (fmt_str
, target_percent_s
))
3291 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3292 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3293 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3297 /* Replace the called function and the first 4 argument by 2 retaining
3298 trailing varargs. */
3299 gimple_call_set_fndecl (stmt
, fn
);
3300 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3301 gimple_call_set_arg (stmt
, 0, dest
);
3302 gimple_call_set_arg (stmt
, 1, fmt
);
3303 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3304 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3305 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3310 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3311 ORIG may be null if this is a 2-argument call. We don't attempt to
3312 simplify calls with more than 3 arguments.
3314 Return true if simplification was possible, otherwise false. */
3317 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3319 gimple
*stmt
= gsi_stmt (*gsi
);
3320 tree dest
= gimple_call_arg (stmt
, 0);
3321 tree fmt
= gimple_call_arg (stmt
, 1);
3322 tree orig
= NULL_TREE
;
3323 const char *fmt_str
= NULL
;
3325 /* Verify the required arguments in the original call. We deal with two
3326 types of sprintf() calls: 'sprintf (str, fmt)' and
3327 'sprintf (dest, "%s", orig)'. */
3328 if (gimple_call_num_args (stmt
) > 3)
3331 if (gimple_call_num_args (stmt
) == 3)
3332 orig
= gimple_call_arg (stmt
, 2);
3334 /* Check whether the format is a literal string constant. */
3335 fmt_str
= c_getstr (fmt
);
3336 if (fmt_str
== NULL
)
3339 if (!init_target_chars ())
3342 /* If the format doesn't contain % args or %%, use strcpy. */
3343 if (strchr (fmt_str
, target_percent
) == NULL
)
3345 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3350 /* Don't optimize sprintf (buf, "abc", ptr++). */
3354 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3355 'format' is known to contain no % formats. */
3356 gimple_seq stmts
= NULL
;
3357 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3359 /* Propagate the NO_WARNING bit to avoid issuing the same
3360 warning more than once. */
3361 if (gimple_no_warning_p (stmt
))
3362 gimple_set_no_warning (repl
, true);
3364 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3365 if (tree lhs
= gimple_call_lhs (stmt
))
3367 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3369 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3370 gsi_replace_with_seq_vops (gsi
, stmts
);
3371 /* gsi now points at the assignment to the lhs, get a
3372 stmt iterator to the memcpy call.
3373 ??? We can't use gsi_for_stmt as that doesn't work when the
3374 CFG isn't built yet. */
3375 gimple_stmt_iterator gsi2
= *gsi
;
3381 gsi_replace_with_seq_vops (gsi
, stmts
);
3387 /* If the format is "%s", use strcpy if the result isn't used. */
3388 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3391 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3396 /* Don't crash on sprintf (str1, "%s"). */
3400 tree orig_len
= NULL_TREE
;
3401 if (gimple_call_lhs (stmt
))
3403 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3408 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3409 gimple_seq stmts
= NULL
;
3410 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3412 /* Propagate the NO_WARNING bit to avoid issuing the same
3413 warning more than once. */
3414 if (gimple_no_warning_p (stmt
))
3415 gimple_set_no_warning (repl
, true);
3417 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3418 if (tree lhs
= gimple_call_lhs (stmt
))
3420 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3421 TREE_TYPE (orig_len
)))
3422 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3423 repl
= gimple_build_assign (lhs
, orig_len
);
3424 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3425 gsi_replace_with_seq_vops (gsi
, stmts
);
3426 /* gsi now points at the assignment to the lhs, get a
3427 stmt iterator to the memcpy call.
3428 ??? We can't use gsi_for_stmt as that doesn't work when the
3429 CFG isn't built yet. */
3430 gimple_stmt_iterator gsi2
= *gsi
;
3436 gsi_replace_with_seq_vops (gsi
, stmts
);
3444 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3445 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3446 attempt to simplify calls with more than 4 arguments.
3448 Return true if simplification was possible, otherwise false. */
3451 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3453 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3454 tree dest
= gimple_call_arg (stmt
, 0);
3455 tree destsize
= gimple_call_arg (stmt
, 1);
3456 tree fmt
= gimple_call_arg (stmt
, 2);
3457 tree orig
= NULL_TREE
;
3458 const char *fmt_str
= NULL
;
3460 if (gimple_call_num_args (stmt
) > 4)
3463 if (gimple_call_num_args (stmt
) == 4)
3464 orig
= gimple_call_arg (stmt
, 3);
3466 if (!tree_fits_uhwi_p (destsize
))
3468 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3470 /* Check whether the format is a literal string constant. */
3471 fmt_str
= c_getstr (fmt
);
3472 if (fmt_str
== NULL
)
3475 if (!init_target_chars ())
3478 /* If the format doesn't contain % args or %%, use strcpy. */
3479 if (strchr (fmt_str
, target_percent
) == NULL
)
3481 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3485 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3489 /* We could expand this as
3490 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3492 memcpy (str, fmt_with_nul_at_cstm1, cst);
3493 but in the former case that might increase code size
3494 and in the latter case grow .rodata section too much.
3496 size_t len
= strlen (fmt_str
);
3500 gimple_seq stmts
= NULL
;
3501 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3502 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3503 if (tree lhs
= gimple_call_lhs (stmt
))
3505 repl
= gimple_build_assign (lhs
,
3506 build_int_cst (TREE_TYPE (lhs
), len
));
3507 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3508 gsi_replace_with_seq_vops (gsi
, stmts
);
3509 /* gsi now points at the assignment to the lhs, get a
3510 stmt iterator to the memcpy call.
3511 ??? We can't use gsi_for_stmt as that doesn't work when the
3512 CFG isn't built yet. */
3513 gimple_stmt_iterator gsi2
= *gsi
;
3519 gsi_replace_with_seq_vops (gsi
, stmts
);
3525 /* If the format is "%s", use strcpy if the result isn't used. */
3526 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3528 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3532 /* Don't crash on snprintf (str1, cst, "%s"). */
3536 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3537 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3540 /* We could expand this as
3541 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3543 memcpy (str1, str2_with_nul_at_cstm1, cst);
3544 but in the former case that might increase code size
3545 and in the latter case grow .rodata section too much.
3547 if (compare_tree_int (orig_len
, destlen
) >= 0)
3550 /* Convert snprintf (str1, cst, "%s", str2) into
3551 strcpy (str1, str2) if strlen (str2) < cst. */
3552 gimple_seq stmts
= NULL
;
3553 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3554 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3555 if (tree lhs
= gimple_call_lhs (stmt
))
3557 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3558 TREE_TYPE (orig_len
)))
3559 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3560 repl
= gimple_build_assign (lhs
, orig_len
);
3561 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3562 gsi_replace_with_seq_vops (gsi
, stmts
);
3563 /* gsi now points at the assignment to the lhs, get a
3564 stmt iterator to the memcpy call.
3565 ??? We can't use gsi_for_stmt as that doesn't work when the
3566 CFG isn't built yet. */
3567 gimple_stmt_iterator gsi2
= *gsi
;
3573 gsi_replace_with_seq_vops (gsi
, stmts
);
3581 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3582 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3583 more than 3 arguments, and ARG may be null in the 2-argument case.
3585 Return NULL_TREE if no simplification was possible, otherwise return the
3586 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3587 code of the function to be simplified. */
3590 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3591 tree fp
, tree fmt
, tree arg
,
3592 enum built_in_function fcode
)
3594 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3595 tree fn_fputc
, fn_fputs
;
3596 const char *fmt_str
= NULL
;
3598 /* If the return value is used, don't do the transformation. */
3599 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3602 /* Check whether the format is a literal string constant. */
3603 fmt_str
= c_getstr (fmt
);
3604 if (fmt_str
== NULL
)
3607 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3609 /* If we're using an unlocked function, assume the other
3610 unlocked functions exist explicitly. */
3611 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3612 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3616 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3617 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3620 if (!init_target_chars ())
3623 /* If the format doesn't contain % args or %%, use strcpy. */
3624 if (strchr (fmt_str
, target_percent
) == NULL
)
3626 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3630 /* If the format specifier was "", fprintf does nothing. */
3631 if (fmt_str
[0] == '\0')
3633 replace_call_with_value (gsi
, NULL_TREE
);
3637 /* When "string" doesn't contain %, replace all cases of
3638 fprintf (fp, string) with fputs (string, fp). The fputs
3639 builtin will take care of special cases like length == 1. */
3642 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3643 replace_call_with_call_and_fold (gsi
, repl
);
3648 /* The other optimizations can be done only on the non-va_list variants. */
3649 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3652 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3653 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3655 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3659 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3660 replace_call_with_call_and_fold (gsi
, repl
);
3665 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3666 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3669 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3673 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3674 replace_call_with_call_and_fold (gsi
, repl
);
3682 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3683 FMT and ARG are the arguments to the call; we don't fold cases with
3684 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3686 Return NULL_TREE if no simplification was possible, otherwise return the
3687 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3688 code of the function to be simplified. */
3691 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3692 tree arg
, enum built_in_function fcode
)
3694 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3695 tree fn_putchar
, fn_puts
, newarg
;
3696 const char *fmt_str
= NULL
;
3698 /* If the return value is used, don't do the transformation. */
3699 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3702 /* Check whether the format is a literal string constant. */
3703 fmt_str
= c_getstr (fmt
);
3704 if (fmt_str
== NULL
)
3707 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3709 /* If we're using an unlocked function, assume the other
3710 unlocked functions exist explicitly. */
3711 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3712 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3716 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3717 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3720 if (!init_target_chars ())
3723 if (strcmp (fmt_str
, target_percent_s
) == 0
3724 || strchr (fmt_str
, target_percent
) == NULL
)
3728 if (strcmp (fmt_str
, target_percent_s
) == 0)
3730 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3733 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3736 str
= c_getstr (arg
);
3742 /* The format specifier doesn't contain any '%' characters. */
3743 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3749 /* If the string was "", printf does nothing. */
3752 replace_call_with_value (gsi
, NULL_TREE
);
3756 /* If the string has length of 1, call putchar. */
3759 /* Given printf("c"), (where c is any one character,)
3760 convert "c"[0] to an int and pass that to the replacement
3762 newarg
= build_int_cst (integer_type_node
, str
[0]);
3765 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3766 replace_call_with_call_and_fold (gsi
, repl
);
3772 /* If the string was "string\n", call puts("string"). */
3773 size_t len
= strlen (str
);
3774 if ((unsigned char)str
[len
- 1] == target_newline
3775 && (size_t) (int) len
== len
3780 /* Create a NUL-terminated string that's one char shorter
3781 than the original, stripping off the trailing '\n'. */
3782 newstr
= xstrdup (str
);
3783 newstr
[len
- 1] = '\0';
3784 newarg
= build_string_literal (len
, newstr
);
3788 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3789 replace_call_with_call_and_fold (gsi
, repl
);
3794 /* We'd like to arrange to call fputs(string,stdout) here,
3795 but we need stdout and don't have a way to get it yet. */
3800 /* The other optimizations can be done only on the non-va_list variants. */
3801 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3804 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3805 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3807 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3811 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3812 replace_call_with_call_and_fold (gsi
, repl
);
3817 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3818 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3820 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3825 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3826 replace_call_with_call_and_fold (gsi
, repl
);
3836 /* Fold a call to __builtin_strlen with known length LEN. */
3839 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3841 gimple
*stmt
= gsi_stmt (*gsi
);
3842 tree arg
= gimple_call_arg (stmt
, 0);
3847 c_strlen_data lendata
= { };
3848 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
3850 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
3851 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
3853 /* The range of lengths refers to either a single constant
3854 string or to the longest and shortest constant string
3855 referenced by the argument of the strlen() call, or to
3856 the strings that can possibly be stored in the arrays
3857 the argument refers to. */
3858 minlen
= wi::to_wide (lendata
.minlen
);
3859 maxlen
= wi::to_wide (lendata
.maxlen
);
3863 unsigned prec
= TYPE_PRECISION (sizetype
);
3865 minlen
= wi::shwi (0, prec
);
3866 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
3869 if (minlen
== maxlen
)
3871 /* Fold the strlen call to a constant. */
3872 tree type
= TREE_TYPE (lendata
.minlen
);
3873 tree len
= force_gimple_operand_gsi (gsi
,
3874 wide_int_to_tree (type
, minlen
),
3875 true, NULL
, true, GSI_SAME_STMT
);
3876 replace_call_with_value (gsi
, len
);
3880 /* Set the strlen() range to [0, MAXLEN]. */
3881 if (tree lhs
= gimple_call_lhs (stmt
))
3882 set_strlen_range (lhs
, minlen
, maxlen
);
3887 /* Fold a call to __builtin_acc_on_device. */
3890 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3892 /* Defer folding until we know which compiler we're in. */
3893 if (symtab
->state
!= EXPANSION
)
3896 unsigned val_host
= GOMP_DEVICE_HOST
;
3897 unsigned val_dev
= GOMP_DEVICE_NONE
;
3899 #ifdef ACCEL_COMPILER
3900 val_host
= GOMP_DEVICE_NOT_HOST
;
3901 val_dev
= ACCEL_COMPILER_acc_device
;
3904 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3906 tree host_eq
= make_ssa_name (boolean_type_node
);
3907 gimple
*host_ass
= gimple_build_assign
3908 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3909 gimple_set_location (host_ass
, loc
);
3910 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3912 tree dev_eq
= make_ssa_name (boolean_type_node
);
3913 gimple
*dev_ass
= gimple_build_assign
3914 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3915 gimple_set_location (dev_ass
, loc
);
3916 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3918 tree result
= make_ssa_name (boolean_type_node
);
3919 gimple
*result_ass
= gimple_build_assign
3920 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3921 gimple_set_location (result_ass
, loc
);
3922 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3924 replace_call_with_value (gsi
, result
);
3929 /* Fold realloc (0, n) -> malloc (n). */
3932 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3934 gimple
*stmt
= gsi_stmt (*gsi
);
3935 tree arg
= gimple_call_arg (stmt
, 0);
3936 tree size
= gimple_call_arg (stmt
, 1);
3938 if (operand_equal_p (arg
, null_pointer_node
, 0))
3940 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3943 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3944 replace_call_with_call_and_fold (gsi
, repl
);
3951 /* Fold the non-target builtin at *GSI and return whether any simplification
3955 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
3957 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
3958 tree callee
= gimple_call_fndecl (stmt
);
3960 /* Give up for always_inline inline builtins until they are
3962 if (avoid_folding_inline_builtin (callee
))
3965 unsigned n
= gimple_call_num_args (stmt
);
3966 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
3970 return gimple_fold_builtin_bcmp (gsi
);
3971 case BUILT_IN_BCOPY
:
3972 return gimple_fold_builtin_bcopy (gsi
);
3973 case BUILT_IN_BZERO
:
3974 return gimple_fold_builtin_bzero (gsi
);
3976 case BUILT_IN_MEMSET
:
3977 return gimple_fold_builtin_memset (gsi
,
3978 gimple_call_arg (stmt
, 1),
3979 gimple_call_arg (stmt
, 2));
3980 case BUILT_IN_MEMCPY
:
3981 case BUILT_IN_MEMPCPY
:
3982 case BUILT_IN_MEMMOVE
:
3983 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3984 gimple_call_arg (stmt
, 1), fcode
);
3985 case BUILT_IN_SPRINTF_CHK
:
3986 case BUILT_IN_VSPRINTF_CHK
:
3987 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
3988 case BUILT_IN_STRCAT_CHK
:
3989 return gimple_fold_builtin_strcat_chk (gsi
);
3990 case BUILT_IN_STRNCAT_CHK
:
3991 return gimple_fold_builtin_strncat_chk (gsi
);
3992 case BUILT_IN_STRLEN
:
3993 return gimple_fold_builtin_strlen (gsi
);
3994 case BUILT_IN_STRCPY
:
3995 return gimple_fold_builtin_strcpy (gsi
,
3996 gimple_call_arg (stmt
, 0),
3997 gimple_call_arg (stmt
, 1));
3998 case BUILT_IN_STRNCPY
:
3999 return gimple_fold_builtin_strncpy (gsi
,
4000 gimple_call_arg (stmt
, 0),
4001 gimple_call_arg (stmt
, 1),
4002 gimple_call_arg (stmt
, 2));
4003 case BUILT_IN_STRCAT
:
4004 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
4005 gimple_call_arg (stmt
, 1));
4006 case BUILT_IN_STRNCAT
:
4007 return gimple_fold_builtin_strncat (gsi
);
4008 case BUILT_IN_INDEX
:
4009 case BUILT_IN_STRCHR
:
4010 return gimple_fold_builtin_strchr (gsi
, false);
4011 case BUILT_IN_RINDEX
:
4012 case BUILT_IN_STRRCHR
:
4013 return gimple_fold_builtin_strchr (gsi
, true);
4014 case BUILT_IN_STRSTR
:
4015 return gimple_fold_builtin_strstr (gsi
);
4016 case BUILT_IN_STRCMP
:
4017 case BUILT_IN_STRCMP_EQ
:
4018 case BUILT_IN_STRCASECMP
:
4019 case BUILT_IN_STRNCMP
:
4020 case BUILT_IN_STRNCMP_EQ
:
4021 case BUILT_IN_STRNCASECMP
:
4022 return gimple_fold_builtin_string_compare (gsi
);
4023 case BUILT_IN_MEMCHR
:
4024 return gimple_fold_builtin_memchr (gsi
);
4025 case BUILT_IN_FPUTS
:
4026 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
4027 gimple_call_arg (stmt
, 1), false);
4028 case BUILT_IN_FPUTS_UNLOCKED
:
4029 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
4030 gimple_call_arg (stmt
, 1), true);
4031 case BUILT_IN_MEMCPY_CHK
:
4032 case BUILT_IN_MEMPCPY_CHK
:
4033 case BUILT_IN_MEMMOVE_CHK
:
4034 case BUILT_IN_MEMSET_CHK
:
4035 return gimple_fold_builtin_memory_chk (gsi
,
4036 gimple_call_arg (stmt
, 0),
4037 gimple_call_arg (stmt
, 1),
4038 gimple_call_arg (stmt
, 2),
4039 gimple_call_arg (stmt
, 3),
4041 case BUILT_IN_STPCPY
:
4042 return gimple_fold_builtin_stpcpy (gsi
);
4043 case BUILT_IN_STRCPY_CHK
:
4044 case BUILT_IN_STPCPY_CHK
:
4045 return gimple_fold_builtin_stxcpy_chk (gsi
,
4046 gimple_call_arg (stmt
, 0),
4047 gimple_call_arg (stmt
, 1),
4048 gimple_call_arg (stmt
, 2),
4050 case BUILT_IN_STRNCPY_CHK
:
4051 case BUILT_IN_STPNCPY_CHK
:
4052 return gimple_fold_builtin_stxncpy_chk (gsi
,
4053 gimple_call_arg (stmt
, 0),
4054 gimple_call_arg (stmt
, 1),
4055 gimple_call_arg (stmt
, 2),
4056 gimple_call_arg (stmt
, 3),
4058 case BUILT_IN_SNPRINTF_CHK
:
4059 case BUILT_IN_VSNPRINTF_CHK
:
4060 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
4062 case BUILT_IN_FPRINTF
:
4063 case BUILT_IN_FPRINTF_UNLOCKED
:
4064 case BUILT_IN_VFPRINTF
:
4065 if (n
== 2 || n
== 3)
4066 return gimple_fold_builtin_fprintf (gsi
,
4067 gimple_call_arg (stmt
, 0),
4068 gimple_call_arg (stmt
, 1),
4070 ? gimple_call_arg (stmt
, 2)
4074 case BUILT_IN_FPRINTF_CHK
:
4075 case BUILT_IN_VFPRINTF_CHK
:
4076 if (n
== 3 || n
== 4)
4077 return gimple_fold_builtin_fprintf (gsi
,
4078 gimple_call_arg (stmt
, 0),
4079 gimple_call_arg (stmt
, 2),
4081 ? gimple_call_arg (stmt
, 3)
4085 case BUILT_IN_PRINTF
:
4086 case BUILT_IN_PRINTF_UNLOCKED
:
4087 case BUILT_IN_VPRINTF
:
4088 if (n
== 1 || n
== 2)
4089 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
4091 ? gimple_call_arg (stmt
, 1)
4092 : NULL_TREE
, fcode
);
4094 case BUILT_IN_PRINTF_CHK
:
4095 case BUILT_IN_VPRINTF_CHK
:
4096 if (n
== 2 || n
== 3)
4097 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
4099 ? gimple_call_arg (stmt
, 2)
4100 : NULL_TREE
, fcode
);
4102 case BUILT_IN_ACC_ON_DEVICE
:
4103 return gimple_fold_builtin_acc_on_device (gsi
,
4104 gimple_call_arg (stmt
, 0));
4105 case BUILT_IN_REALLOC
:
4106 return gimple_fold_builtin_realloc (gsi
);
4111 /* Try the generic builtin folder. */
4112 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
4113 tree result
= fold_call_stmt (stmt
, ignore
);
4117 STRIP_NOPS (result
);
4119 result
= fold_convert (gimple_call_return_type (stmt
), result
);
4120 if (!update_call_from_tree (gsi
, result
))
4121 gimplify_and_update_call_from_tree (gsi
, result
);
4128 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4129 function calls to constants, where possible. */
4132 fold_internal_goacc_dim (const gimple
*call
)
4134 int axis
= oacc_get_ifn_dim_arg (call
);
4135 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
4136 tree result
= NULL_TREE
;
4137 tree type
= TREE_TYPE (gimple_call_lhs (call
));
4139 switch (gimple_call_internal_fn (call
))
4141 case IFN_GOACC_DIM_POS
:
4142 /* If the size is 1, we know the answer. */
4144 result
= build_int_cst (type
, 0);
4146 case IFN_GOACC_DIM_SIZE
:
4147 /* If the size is not dynamic, we know the answer. */
4149 result
= build_int_cst (type
, size
);
4158 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4159 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4160 &var where var is only addressable because of such calls. */
4163 optimize_atomic_compare_exchange_p (gimple
*stmt
)
4165 if (gimple_call_num_args (stmt
) != 6
4166 || !flag_inline_atomics
4168 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
4169 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
4170 || !gimple_vdef (stmt
)
4171 || !gimple_vuse (stmt
))
4174 tree fndecl
= gimple_call_fndecl (stmt
);
4175 switch (DECL_FUNCTION_CODE (fndecl
))
4177 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
4178 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
4179 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
4180 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
4181 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
4187 tree expected
= gimple_call_arg (stmt
, 1);
4188 if (TREE_CODE (expected
) != ADDR_EXPR
4189 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
4192 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
4193 if (!is_gimple_reg_type (etype
)
4194 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
4195 || TREE_THIS_VOLATILE (etype
)
4196 || VECTOR_TYPE_P (etype
)
4197 || TREE_CODE (etype
) == COMPLEX_TYPE
4198 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4199 might not preserve all the bits. See PR71716. */
4200 || SCALAR_FLOAT_TYPE_P (etype
)
4201 || maybe_ne (TYPE_PRECISION (etype
),
4202 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
4205 tree weak
= gimple_call_arg (stmt
, 3);
4206 if (!integer_zerop (weak
) && !integer_onep (weak
))
4209 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4210 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
4211 machine_mode mode
= TYPE_MODE (itype
);
4213 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
4215 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
4218 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
4225 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4227 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4228 i = IMAGPART_EXPR <t>;
4230 e = REALPART_EXPR <t>; */
4233 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
4235 gimple
*stmt
= gsi_stmt (*gsi
);
4236 tree fndecl
= gimple_call_fndecl (stmt
);
4237 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4238 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
4239 tree ctype
= build_complex_type (itype
);
4240 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
4241 bool throws
= false;
4243 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
4245 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4246 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
4247 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
4249 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
4250 build1 (VIEW_CONVERT_EXPR
, itype
,
4251 gimple_assign_lhs (g
)));
4252 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4254 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
4255 + int_size_in_bytes (itype
);
4256 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
4257 gimple_call_arg (stmt
, 0),
4258 gimple_assign_lhs (g
),
4259 gimple_call_arg (stmt
, 2),
4260 build_int_cst (integer_type_node
, flag
),
4261 gimple_call_arg (stmt
, 4),
4262 gimple_call_arg (stmt
, 5));
4263 tree lhs
= make_ssa_name (ctype
);
4264 gimple_call_set_lhs (g
, lhs
);
4265 gimple_move_vops (g
, stmt
);
4266 tree oldlhs
= gimple_call_lhs (stmt
);
4267 if (stmt_can_throw_internal (cfun
, stmt
))
4270 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
4272 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
4273 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
4274 gimple_call_set_lhs (stmt
, NULL_TREE
);
4275 gsi_replace (gsi
, g
, true);
4278 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
4279 build1 (IMAGPART_EXPR
, itype
, lhs
));
4282 gsi_insert_on_edge_immediate (e
, g
);
4283 *gsi
= gsi_for_stmt (g
);
4286 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4287 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
4288 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4290 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
4291 build1 (REALPART_EXPR
, itype
, lhs
));
4292 if (throws
&& oldlhs
== NULL_TREE
)
4294 gsi_insert_on_edge_immediate (e
, g
);
4295 *gsi
= gsi_for_stmt (g
);
4298 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4299 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
4301 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
4303 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
4304 gimple_assign_lhs (g
)));
4305 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4307 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
4308 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4312 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4313 doesn't fit into TYPE. The test for overflow should be regardless of
4314 -fwrapv, and even for unsigned types. */
4317 arith_overflowed_p (enum tree_code code
, const_tree type
,
4318 const_tree arg0
, const_tree arg1
)
4320 widest2_int warg0
= widest2_int_cst (arg0
);
4321 widest2_int warg1
= widest2_int_cst (arg1
);
4325 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
4326 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
4327 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
4328 default: gcc_unreachable ();
4330 signop sign
= TYPE_SIGN (type
);
4331 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
4333 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
4336 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4337 for the memory it references, otherwise return null. VECTYPE is the
4338 type of the memory vector. */
4341 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
4343 tree ptr
= gimple_call_arg (call
, 0);
4344 tree alias_align
= gimple_call_arg (call
, 1);
4345 tree mask
= gimple_call_arg (call
, 2);
4346 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
4349 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
) * BITS_PER_UNIT
;
4350 if (TYPE_ALIGN (vectype
) != align
)
4351 vectype
= build_aligned_type (vectype
, align
);
4352 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
4353 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
4356 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4359 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
4361 tree lhs
= gimple_call_lhs (call
);
4365 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
4367 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
4368 gimple_set_location (new_stmt
, gimple_location (call
));
4369 gimple_move_vops (new_stmt
, call
);
4370 gsi_replace (gsi
, new_stmt
, false);
4376 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4379 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
4381 tree rhs
= gimple_call_arg (call
, 3);
4382 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
4384 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
4385 gimple_set_location (new_stmt
, gimple_location (call
));
4386 gimple_move_vops (new_stmt
, call
);
4387 gsi_replace (gsi
, new_stmt
, false);
4393 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4394 The statement may be replaced by another statement, e.g., if the call
4395 simplifies to a constant value. Return true if any changes were made.
4396 It is assumed that the operands have been previously folded. */
4399 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
4401 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
4403 bool changed
= false;
4406 /* Fold *& in call arguments. */
4407 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4408 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
4410 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
4413 gimple_call_set_arg (stmt
, i
, tmp
);
4418 /* Check for virtual calls that became direct calls. */
4419 callee
= gimple_call_fn (stmt
);
4420 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
4422 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
4424 if (dump_file
&& virtual_method_call_p (callee
)
4425 && !possible_polymorphic_call_target_p
4426 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
4427 (OBJ_TYPE_REF_EXPR (callee
)))))
4430 "Type inheritance inconsistent devirtualization of ");
4431 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
4432 fprintf (dump_file
, " to ");
4433 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
4434 fprintf (dump_file
, "\n");
4437 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
4440 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
4443 vec
<cgraph_node
*>targets
4444 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
4445 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4447 tree lhs
= gimple_call_lhs (stmt
);
4448 if (dump_enabled_p ())
4450 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
4451 "folding virtual function call to %s\n",
4452 targets
.length () == 1
4453 ? targets
[0]->name ()
4454 : "__builtin_unreachable");
4456 if (targets
.length () == 1)
4458 tree fndecl
= targets
[0]->decl
;
4459 gimple_call_set_fndecl (stmt
, fndecl
);
4461 /* If changing the call to __cxa_pure_virtual
4462 or similar noreturn function, adjust gimple_call_fntype
4464 if (gimple_call_noreturn_p (stmt
)
4465 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
4466 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
4467 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
4469 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
4470 /* If the call becomes noreturn, remove the lhs. */
4472 && gimple_call_noreturn_p (stmt
)
4473 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
4474 || should_remove_lhs_p (lhs
)))
4476 if (TREE_CODE (lhs
) == SSA_NAME
)
4478 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4479 tree def
= get_or_create_ssa_default_def (cfun
, var
);
4480 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
4481 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
4483 gimple_call_set_lhs (stmt
, NULL_TREE
);
4485 maybe_remove_unused_call_args (cfun
, stmt
);
4489 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4490 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
4491 gimple_set_location (new_stmt
, gimple_location (stmt
));
4492 /* If the call had a SSA name as lhs morph that into
4493 an uninitialized value. */
4494 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4496 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4497 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
4498 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
4499 set_ssa_default_def (cfun
, var
, lhs
);
4501 gimple_move_vops (new_stmt
, stmt
);
4502 gsi_replace (gsi
, new_stmt
, false);
4509 /* Check for indirect calls that became direct calls, and then
4510 no longer require a static chain. */
4511 if (gimple_call_chain (stmt
))
4513 tree fn
= gimple_call_fndecl (stmt
);
4514 if (fn
&& !DECL_STATIC_CHAIN (fn
))
4516 gimple_call_set_chain (stmt
, NULL
);
4521 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
4524 gimple_call_set_chain (stmt
, tmp
);
4533 /* Check for builtins that CCP can handle using information not
4534 available in the generic fold routines. */
4535 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4537 if (gimple_fold_builtin (gsi
))
4540 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
4542 changed
|= targetm
.gimple_fold_builtin (gsi
);
4544 else if (gimple_call_internal_p (stmt
))
4546 enum tree_code subcode
= ERROR_MARK
;
4547 tree result
= NULL_TREE
;
4548 bool cplx_result
= false;
4549 tree overflow
= NULL_TREE
;
4550 switch (gimple_call_internal_fn (stmt
))
4552 case IFN_BUILTIN_EXPECT
:
4553 result
= fold_builtin_expect (gimple_location (stmt
),
4554 gimple_call_arg (stmt
, 0),
4555 gimple_call_arg (stmt
, 1),
4556 gimple_call_arg (stmt
, 2),
4559 case IFN_UBSAN_OBJECT_SIZE
:
4561 tree offset
= gimple_call_arg (stmt
, 1);
4562 tree objsize
= gimple_call_arg (stmt
, 2);
4563 if (integer_all_onesp (objsize
)
4564 || (TREE_CODE (offset
) == INTEGER_CST
4565 && TREE_CODE (objsize
) == INTEGER_CST
4566 && tree_int_cst_le (offset
, objsize
)))
4568 replace_call_with_value (gsi
, NULL_TREE
);
4574 if (integer_zerop (gimple_call_arg (stmt
, 1)))
4576 replace_call_with_value (gsi
, NULL_TREE
);
4580 case IFN_UBSAN_BOUNDS
:
4582 tree index
= gimple_call_arg (stmt
, 1);
4583 tree bound
= gimple_call_arg (stmt
, 2);
4584 if (TREE_CODE (index
) == INTEGER_CST
4585 && TREE_CODE (bound
) == INTEGER_CST
)
4587 index
= fold_convert (TREE_TYPE (bound
), index
);
4588 if (TREE_CODE (index
) == INTEGER_CST
4589 && tree_int_cst_le (index
, bound
))
4591 replace_call_with_value (gsi
, NULL_TREE
);
4597 case IFN_GOACC_DIM_SIZE
:
4598 case IFN_GOACC_DIM_POS
:
4599 result
= fold_internal_goacc_dim (stmt
);
4601 case IFN_UBSAN_CHECK_ADD
:
4602 subcode
= PLUS_EXPR
;
4604 case IFN_UBSAN_CHECK_SUB
:
4605 subcode
= MINUS_EXPR
;
4607 case IFN_UBSAN_CHECK_MUL
:
4608 subcode
= MULT_EXPR
;
4610 case IFN_ADD_OVERFLOW
:
4611 subcode
= PLUS_EXPR
;
4614 case IFN_SUB_OVERFLOW
:
4615 subcode
= MINUS_EXPR
;
4618 case IFN_MUL_OVERFLOW
:
4619 subcode
= MULT_EXPR
;
4623 changed
|= gimple_fold_mask_load (gsi
, stmt
);
4625 case IFN_MASK_STORE
:
4626 changed
|= gimple_fold_mask_store (gsi
, stmt
);
4631 if (subcode
!= ERROR_MARK
)
4633 tree arg0
= gimple_call_arg (stmt
, 0);
4634 tree arg1
= gimple_call_arg (stmt
, 1);
4635 tree type
= TREE_TYPE (arg0
);
4638 tree lhs
= gimple_call_lhs (stmt
);
4639 if (lhs
== NULL_TREE
)
4642 type
= TREE_TYPE (TREE_TYPE (lhs
));
4644 if (type
== NULL_TREE
)
4646 /* x = y + 0; x = y - 0; x = y * 0; */
4647 else if (integer_zerop (arg1
))
4648 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
4649 /* x = 0 + y; x = 0 * y; */
4650 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
4651 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
4653 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
4654 result
= integer_zero_node
;
4655 /* x = y * 1; x = 1 * y; */
4656 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
4658 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
4660 else if (TREE_CODE (arg0
) == INTEGER_CST
4661 && TREE_CODE (arg1
) == INTEGER_CST
)
4664 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
4665 fold_convert (type
, arg1
));
4667 result
= int_const_binop (subcode
, arg0
, arg1
);
4668 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
4671 overflow
= build_one_cst (type
);
4678 if (result
== integer_zero_node
)
4679 result
= build_zero_cst (type
);
4680 else if (cplx_result
&& TREE_TYPE (result
) != type
)
4682 if (TREE_CODE (result
) == INTEGER_CST
)
4684 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
4686 overflow
= build_one_cst (type
);
4688 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
4689 && TYPE_UNSIGNED (type
))
4690 || (TYPE_PRECISION (type
)
4691 < (TYPE_PRECISION (TREE_TYPE (result
))
4692 + (TYPE_UNSIGNED (TREE_TYPE (result
))
4693 && !TYPE_UNSIGNED (type
)))))
4696 result
= fold_convert (type
, result
);
4703 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
4704 result
= drop_tree_overflow (result
);
4707 if (overflow
== NULL_TREE
)
4708 overflow
= build_zero_cst (TREE_TYPE (result
));
4709 tree ctype
= build_complex_type (TREE_TYPE (result
));
4710 if (TREE_CODE (result
) == INTEGER_CST
4711 && TREE_CODE (overflow
) == INTEGER_CST
)
4712 result
= build_complex (ctype
, result
, overflow
);
4714 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
4715 ctype
, result
, overflow
);
4717 if (!update_call_from_tree (gsi
, result
))
4718 gimplify_and_update_call_from_tree (gsi
, result
);
4727 /* Return true whether NAME has a use on STMT. */
4730 has_use_on_stmt (tree name
, gimple
*stmt
)
4732 imm_use_iterator iter
;
4733 use_operand_p use_p
;
4734 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
4735 if (USE_STMT (use_p
) == stmt
)
4740 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4743 Replaces *GSI with the simplification result in RCODE and OPS
4744 and the associated statements in *SEQ. Does the replacement
4745 according to INPLACE and returns true if the operation succeeded. */
4748 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
4749 gimple_match_op
*res_op
,
4750 gimple_seq
*seq
, bool inplace
)
4752 gimple
*stmt
= gsi_stmt (*gsi
);
4753 tree
*ops
= res_op
->ops
;
4754 unsigned int num_ops
= res_op
->num_ops
;
4756 /* Play safe and do not allow abnormals to be mentioned in
4757 newly created statements. See also maybe_push_res_to_seq.
4758 As an exception allow such uses if there was a use of the
4759 same SSA name on the old stmt. */
4760 for (unsigned int i
= 0; i
< num_ops
; ++i
)
4761 if (TREE_CODE (ops
[i
]) == SSA_NAME
4762 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
4763 && !has_use_on_stmt (ops
[i
], stmt
))
4766 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
4767 for (unsigned int i
= 0; i
< 2; ++i
)
4768 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
4769 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
4770 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
4773 /* Don't insert new statements when INPLACE is true, even if we could
4774 reuse STMT for the final statement. */
4775 if (inplace
&& !gimple_seq_empty_p (*seq
))
4778 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
4780 gcc_assert (res_op
->code
.is_tree_code ());
4781 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
4782 /* GIMPLE_CONDs condition may not throw. */
4783 && (!flag_exceptions
4784 || !cfun
->can_throw_non_call_exceptions
4785 || !operation_could_trap_p (res_op
->code
,
4786 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
4788 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
4789 else if (res_op
->code
== SSA_NAME
)
4790 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
4791 build_zero_cst (TREE_TYPE (ops
[0])));
4792 else if (res_op
->code
== INTEGER_CST
)
4794 if (integer_zerop (ops
[0]))
4795 gimple_cond_make_false (cond_stmt
);
4797 gimple_cond_make_true (cond_stmt
);
4801 tree res
= maybe_push_res_to_seq (res_op
, seq
);
4804 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
4805 build_zero_cst (TREE_TYPE (res
)));
4809 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4811 fprintf (dump_file
, "gimple_simplified to ");
4812 if (!gimple_seq_empty_p (*seq
))
4813 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4814 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4817 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4820 else if (is_gimple_assign (stmt
)
4821 && res_op
->code
.is_tree_code ())
4824 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
4826 maybe_build_generic_op (res_op
);
4827 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
4828 res_op
->op_or_null (0),
4829 res_op
->op_or_null (1),
4830 res_op
->op_or_null (2));
4831 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4833 fprintf (dump_file
, "gimple_simplified to ");
4834 if (!gimple_seq_empty_p (*seq
))
4835 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4836 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4839 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4843 else if (res_op
->code
.is_fn_code ()
4844 && gimple_call_combined_fn (stmt
) == res_op
->code
)
4846 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
4847 for (unsigned int i
= 0; i
< num_ops
; ++i
)
4848 gimple_call_set_arg (stmt
, i
, ops
[i
]);
4849 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4851 fprintf (dump_file
, "gimple_simplified to ");
4852 if (!gimple_seq_empty_p (*seq
))
4853 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4854 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
4856 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4861 if (gimple_has_lhs (stmt
))
4863 tree lhs
= gimple_get_lhs (stmt
);
4864 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
4866 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4868 fprintf (dump_file
, "gimple_simplified to ");
4869 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4871 gsi_replace_with_seq_vops (gsi
, *seq
);
4881 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4884 maybe_canonicalize_mem_ref_addr (tree
*t
, bool is_debug
= false)
4889 if (TREE_CODE (*t
) == ADDR_EXPR
)
4890 t
= &TREE_OPERAND (*t
, 0);
4892 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4893 generic vector extension. The actual vector referenced is
4894 view-converted to an array type for this purpose. If the index
4895 is constant the canonical representation in the middle-end is a
4896 BIT_FIELD_REF so re-write the former to the latter here. */
4897 if (TREE_CODE (*t
) == ARRAY_REF
4898 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
4899 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
4900 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
4902 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
4903 if (VECTOR_TYPE_P (vtype
))
4905 tree low
= array_ref_low_bound (*t
);
4906 if (TREE_CODE (low
) == INTEGER_CST
)
4908 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
4910 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
4911 wi::to_widest (low
));
4912 idx
= wi::mul (idx
, wi::to_widest
4913 (TYPE_SIZE (TREE_TYPE (*t
))));
4915 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
4916 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
4918 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
4920 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
4921 TYPE_SIZE (TREE_TYPE (*t
)),
4922 wide_int_to_tree (bitsizetype
, idx
));
4930 while (handled_component_p (*t
))
4931 t
= &TREE_OPERAND (*t
, 0);
4933 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4934 of invariant addresses into a SSA name MEM_REF address. */
4935 if (TREE_CODE (*t
) == MEM_REF
4936 || TREE_CODE (*t
) == TARGET_MEM_REF
)
4938 tree addr
= TREE_OPERAND (*t
, 0);
4939 if (TREE_CODE (addr
) == ADDR_EXPR
4940 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
4941 || handled_component_p (TREE_OPERAND (addr
, 0))))
4945 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
4954 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
4955 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
4956 TREE_OPERAND (*t
, 1),
4957 size_int (coffset
));
4960 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
4961 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
4964 /* Canonicalize back MEM_REFs to plain reference trees if the object
4965 accessed is a decl that has the same access semantics as the MEM_REF. */
4966 if (TREE_CODE (*t
) == MEM_REF
4967 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
4968 && integer_zerop (TREE_OPERAND (*t
, 1))
4969 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
4971 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4972 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
4973 if (/* Same volatile qualification. */
4974 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
4975 /* Same TBAA behavior with -fstrict-aliasing. */
4976 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
4977 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
4978 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
4979 /* Same alignment. */
4980 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
4981 /* We have to look out here to not drop a required conversion
4982 from the rhs to the lhs if *t appears on the lhs or vice-versa
4983 if it appears on the rhs. Thus require strict type
4985 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
4987 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4992 else if (TREE_CODE (*orig_t
) == ADDR_EXPR
4993 && TREE_CODE (*t
) == MEM_REF
4994 && TREE_CODE (TREE_OPERAND (*t
, 0)) == INTEGER_CST
)
4998 base
= get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t
, 0),
5002 gcc_assert (TREE_CODE (base
) == MEM_REF
);
5004 if (mem_ref_offset (base
).to_shwi (&moffset
))
5007 if (wi::to_poly_wide (TREE_OPERAND (base
, 0)).to_shwi (&moffset
))
5010 *orig_t
= build_int_cst (TREE_TYPE (*orig_t
), coffset
);
5017 /* Canonicalize TARGET_MEM_REF in particular with respect to
5018 the indexes becoming constant. */
5019 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
5021 tree tem
= maybe_fold_tmr (*t
);
5032 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
5033 distinguishes both cases. */
5036 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
5038 bool changed
= false;
5039 gimple
*stmt
= gsi_stmt (*gsi
);
5040 bool nowarning
= gimple_no_warning_p (stmt
);
5042 fold_defer_overflow_warnings ();
5044 /* First do required canonicalization of [TARGET_]MEM_REF addresses
5046 ??? This shouldn't be done in generic folding but in the
5047 propagation helpers which also know whether an address was
5049 Also canonicalize operand order. */
5050 switch (gimple_code (stmt
))
5053 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
5055 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
5056 if ((REFERENCE_CLASS_P (*rhs
)
5057 || TREE_CODE (*rhs
) == ADDR_EXPR
)
5058 && maybe_canonicalize_mem_ref_addr (rhs
))
5060 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
5061 if (REFERENCE_CLASS_P (*lhs
)
5062 && maybe_canonicalize_mem_ref_addr (lhs
))
5067 /* Canonicalize operand order. */
5068 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5069 if (TREE_CODE_CLASS (code
) == tcc_comparison
5070 || commutative_tree_code (code
)
5071 || commutative_ternary_tree_code (code
))
5073 tree rhs1
= gimple_assign_rhs1 (stmt
);
5074 tree rhs2
= gimple_assign_rhs2 (stmt
);
5075 if (tree_swap_operands_p (rhs1
, rhs2
))
5077 gimple_assign_set_rhs1 (stmt
, rhs2
);
5078 gimple_assign_set_rhs2 (stmt
, rhs1
);
5079 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
5080 gimple_assign_set_rhs_code (stmt
,
5081 swap_tree_comparison (code
));
5089 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5091 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
5092 if (REFERENCE_CLASS_P (*arg
)
5093 && maybe_canonicalize_mem_ref_addr (arg
))
5096 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
5098 && REFERENCE_CLASS_P (*lhs
)
5099 && maybe_canonicalize_mem_ref_addr (lhs
))
5105 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5106 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
5108 tree link
= gimple_asm_output_op (asm_stmt
, i
);
5109 tree op
= TREE_VALUE (link
);
5110 if (REFERENCE_CLASS_P (op
)
5111 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
5114 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
5116 tree link
= gimple_asm_input_op (asm_stmt
, i
);
5117 tree op
= TREE_VALUE (link
);
5118 if ((REFERENCE_CLASS_P (op
)
5119 || TREE_CODE (op
) == ADDR_EXPR
)
5120 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
5126 if (gimple_debug_bind_p (stmt
))
5128 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
5130 && (REFERENCE_CLASS_P (*val
)
5131 || TREE_CODE (*val
) == ADDR_EXPR
)
5132 && maybe_canonicalize_mem_ref_addr (val
, true))
5138 /* Canonicalize operand order. */
5139 tree lhs
= gimple_cond_lhs (stmt
);
5140 tree rhs
= gimple_cond_rhs (stmt
);
5141 if (tree_swap_operands_p (lhs
, rhs
))
5143 gcond
*gc
= as_a
<gcond
*> (stmt
);
5144 gimple_cond_set_lhs (gc
, rhs
);
5145 gimple_cond_set_rhs (gc
, lhs
);
5146 gimple_cond_set_code (gc
,
5147 swap_tree_comparison (gimple_cond_code (gc
)));
5154 /* Dispatch to pattern-based folding. */
5156 || is_gimple_assign (stmt
)
5157 || gimple_code (stmt
) == GIMPLE_COND
)
5159 gimple_seq seq
= NULL
;
5160 gimple_match_op res_op
;
5161 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
5162 valueize
, valueize
))
5164 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
5167 gimple_seq_discard (seq
);
5171 stmt
= gsi_stmt (*gsi
);
5173 /* Fold the main computation performed by the statement. */
5174 switch (gimple_code (stmt
))
5178 /* Try to canonicalize for boolean-typed X the comparisons
5179 X == 0, X == 1, X != 0, and X != 1. */
5180 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
5181 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
5183 tree lhs
= gimple_assign_lhs (stmt
);
5184 tree op1
= gimple_assign_rhs1 (stmt
);
5185 tree op2
= gimple_assign_rhs2 (stmt
);
5186 tree type
= TREE_TYPE (op1
);
5188 /* Check whether the comparison operands are of the same boolean
5189 type as the result type is.
5190 Check that second operand is an integer-constant with value
5192 if (TREE_CODE (op2
) == INTEGER_CST
5193 && (integer_zerop (op2
) || integer_onep (op2
))
5194 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
5196 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
5197 bool is_logical_not
= false;
5199 /* X == 0 and X != 1 is a logical-not.of X
5200 X == 1 and X != 0 is X */
5201 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
5202 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
5203 is_logical_not
= true;
5205 if (is_logical_not
== false)
5206 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
5207 /* Only for one-bit precision typed X the transformation
5208 !X -> ~X is valied. */
5209 else if (TYPE_PRECISION (type
) == 1)
5210 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
5211 /* Otherwise we use !X -> X ^ 1. */
5213 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
5214 build_int_cst (type
, 1));
5220 unsigned old_num_ops
= gimple_num_ops (stmt
);
5221 tree lhs
= gimple_assign_lhs (stmt
);
5222 tree new_rhs
= fold_gimple_assign (gsi
);
5224 && !useless_type_conversion_p (TREE_TYPE (lhs
),
5225 TREE_TYPE (new_rhs
)))
5226 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
5229 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
5231 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
5238 changed
|= gimple_fold_call (gsi
, inplace
);
5242 /* Fold *& in asm operands. */
5244 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5246 const char **oconstraints
;
5247 const char *constraint
;
5248 bool allows_mem
, allows_reg
;
5250 noutputs
= gimple_asm_noutputs (asm_stmt
);
5251 oconstraints
= XALLOCAVEC (const char *, noutputs
);
5253 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
5255 tree link
= gimple_asm_output_op (asm_stmt
, i
);
5256 tree op
= TREE_VALUE (link
);
5258 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5259 if (REFERENCE_CLASS_P (op
)
5260 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
5262 TREE_VALUE (link
) = op
;
5266 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
5268 tree link
= gimple_asm_input_op (asm_stmt
, i
);
5269 tree op
= TREE_VALUE (link
);
5271 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5272 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
5273 oconstraints
, &allows_mem
, &allows_reg
);
5274 if (REFERENCE_CLASS_P (op
)
5275 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
5278 TREE_VALUE (link
) = op
;
5286 if (gimple_debug_bind_p (stmt
))
5288 tree val
= gimple_debug_bind_get_value (stmt
);
5290 && REFERENCE_CLASS_P (val
))
5292 tree tem
= maybe_fold_reference (val
, false);
5295 gimple_debug_bind_set_value (stmt
, tem
);
5300 && TREE_CODE (val
) == ADDR_EXPR
)
5302 tree ref
= TREE_OPERAND (val
, 0);
5303 tree tem
= maybe_fold_reference (ref
, false);
5306 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
5307 gimple_debug_bind_set_value (stmt
, tem
);
5316 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
5317 tree ret
= gimple_return_retval(ret_stmt
);
5319 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
5321 tree val
= valueize (ret
);
5322 if (val
&& val
!= ret
5323 && may_propagate_copy (ret
, val
))
5325 gimple_return_set_retval (ret_stmt
, val
);
5335 stmt
= gsi_stmt (*gsi
);
5337 /* Fold *& on the lhs. */
5338 if (gimple_has_lhs (stmt
))
5340 tree lhs
= gimple_get_lhs (stmt
);
5341 if (lhs
&& REFERENCE_CLASS_P (lhs
))
5343 tree new_lhs
= maybe_fold_reference (lhs
, true);
5346 gimple_set_lhs (stmt
, new_lhs
);
5352 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
5356 /* Valueziation callback that ends up not following SSA edges. */
5359 no_follow_ssa_edges (tree
)
5364 /* Valueization callback that ends up following single-use SSA edges only. */
5367 follow_single_use_edges (tree val
)
5369 if (TREE_CODE (val
) == SSA_NAME
5370 && !has_single_use (val
))
5375 /* Valueization callback that follows all SSA edges. */
5378 follow_all_ssa_edges (tree val
)
5383 /* Fold the statement pointed to by GSI. In some cases, this function may
5384 replace the whole statement with a new one. Returns true iff folding
5386 The statement pointed to by GSI should be in valid gimple form but may
5387 be in unfolded state as resulting from for example constant propagation
5388 which can produce *&x = 0. */
5391 fold_stmt (gimple_stmt_iterator
*gsi
)
5393 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
5397 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
5399 return fold_stmt_1 (gsi
, false, valueize
);
5402 /* Perform the minimal folding on statement *GSI. Only operations like
5403 *&x created by constant propagation are handled. The statement cannot
5404 be replaced with a new one. Return true if the statement was
5405 changed, false otherwise.
5406 The statement *GSI should be in valid gimple form but may
5407 be in unfolded state as resulting from for example constant propagation
5408 which can produce *&x = 0. */
5411 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
5413 gimple
*stmt
= gsi_stmt (*gsi
);
5414 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
5415 gcc_assert (gsi_stmt (*gsi
) == stmt
);
5419 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5420 if EXPR is null or we don't know how.
5421 If non-null, the result always has boolean type. */
5424 canonicalize_bool (tree expr
, bool invert
)
5430 if (integer_nonzerop (expr
))
5431 return boolean_false_node
;
5432 else if (integer_zerop (expr
))
5433 return boolean_true_node
;
5434 else if (TREE_CODE (expr
) == SSA_NAME
)
5435 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
5436 build_int_cst (TREE_TYPE (expr
), 0));
5437 else if (COMPARISON_CLASS_P (expr
))
5438 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
5440 TREE_OPERAND (expr
, 0),
5441 TREE_OPERAND (expr
, 1));
5447 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
5449 if (integer_nonzerop (expr
))
5450 return boolean_true_node
;
5451 else if (integer_zerop (expr
))
5452 return boolean_false_node
;
5453 else if (TREE_CODE (expr
) == SSA_NAME
)
5454 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
5455 build_int_cst (TREE_TYPE (expr
), 0));
5456 else if (COMPARISON_CLASS_P (expr
))
5457 return fold_build2 (TREE_CODE (expr
),
5459 TREE_OPERAND (expr
, 0),
5460 TREE_OPERAND (expr
, 1));
5466 /* Check to see if a boolean expression EXPR is logically equivalent to the
5467 comparison (OP1 CODE OP2). Check for various identities involving
5471 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
5472 const_tree op1
, const_tree op2
)
5476 /* The obvious case. */
5477 if (TREE_CODE (expr
) == code
5478 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
5479 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
5482 /* Check for comparing (name, name != 0) and the case where expr
5483 is an SSA_NAME with a definition matching the comparison. */
5484 if (TREE_CODE (expr
) == SSA_NAME
5485 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
5487 if (operand_equal_p (expr
, op1
, 0))
5488 return ((code
== NE_EXPR
&& integer_zerop (op2
))
5489 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
5490 s
= SSA_NAME_DEF_STMT (expr
);
5491 if (is_gimple_assign (s
)
5492 && gimple_assign_rhs_code (s
) == code
5493 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
5494 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
5498 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5499 of name is a comparison, recurse. */
5500 if (TREE_CODE (op1
) == SSA_NAME
5501 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
5503 s
= SSA_NAME_DEF_STMT (op1
);
5504 if (is_gimple_assign (s
)
5505 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
5507 enum tree_code c
= gimple_assign_rhs_code (s
);
5508 if ((c
== NE_EXPR
&& integer_zerop (op2
))
5509 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
5510 return same_bool_comparison_p (expr
, c
,
5511 gimple_assign_rhs1 (s
),
5512 gimple_assign_rhs2 (s
));
5513 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
5514 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
5515 return same_bool_comparison_p (expr
,
5516 invert_tree_comparison (c
, false),
5517 gimple_assign_rhs1 (s
),
5518 gimple_assign_rhs2 (s
));
5524 /* Check to see if two boolean expressions OP1 and OP2 are logically
5528 same_bool_result_p (const_tree op1
, const_tree op2
)
5530 /* Simple cases first. */
5531 if (operand_equal_p (op1
, op2
, 0))
5534 /* Check the cases where at least one of the operands is a comparison.
5535 These are a bit smarter than operand_equal_p in that they apply some
5536 identifies on SSA_NAMEs. */
5537 if (COMPARISON_CLASS_P (op2
)
5538 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
5539 TREE_OPERAND (op2
, 0),
5540 TREE_OPERAND (op2
, 1)))
5542 if (COMPARISON_CLASS_P (op1
)
5543 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
5544 TREE_OPERAND (op1
, 0),
5545 TREE_OPERAND (op1
, 1)))
5552 /* Forward declarations for some mutually recursive functions. */
5555 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
5556 enum tree_code code2
, tree op2a
, tree op2b
);
5558 and_var_with_comparison (tree type
, tree var
, bool invert
,
5559 enum tree_code code2
, tree op2a
, tree op2b
);
5561 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
5562 enum tree_code code2
, tree op2a
, tree op2b
);
5564 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
5565 enum tree_code code2
, tree op2a
, tree op2b
);
5567 or_var_with_comparison (tree
, tree var
, bool invert
,
5568 enum tree_code code2
, tree op2a
, tree op2b
);
5570 or_var_with_comparison_1 (tree
, gimple
*stmt
,
5571 enum tree_code code2
, tree op2a
, tree op2b
);
5573 /* Helper function for and_comparisons_1: try to simplify the AND of the
5574 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5575 If INVERT is true, invert the value of the VAR before doing the AND.
5576 Return NULL_EXPR if we can't simplify this to a single expression. */
5579 and_var_with_comparison (tree type
, tree var
, bool invert
,
5580 enum tree_code code2
, tree op2a
, tree op2b
)
5583 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5585 /* We can only deal with variables whose definitions are assignments. */
5586 if (!is_gimple_assign (stmt
))
5589 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5590 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5591 Then we only have to consider the simpler non-inverted cases. */
5593 t
= or_var_with_comparison_1 (type
, stmt
,
5594 invert_tree_comparison (code2
, false),
5597 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
5598 return canonicalize_bool (t
, invert
);
5601 /* Try to simplify the AND of the ssa variable defined by the assignment
5602 STMT with the comparison specified by (OP2A CODE2 OP2B).
5603 Return NULL_EXPR if we can't simplify this to a single expression. */
5606 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
5607 enum tree_code code2
, tree op2a
, tree op2b
)
5609 tree var
= gimple_assign_lhs (stmt
);
5610 tree true_test_var
= NULL_TREE
;
5611 tree false_test_var
= NULL_TREE
;
5612 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5614 /* Check for identities like (var AND (var == 0)) => false. */
5615 if (TREE_CODE (op2a
) == SSA_NAME
5616 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5618 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5619 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5621 true_test_var
= op2a
;
5622 if (var
== true_test_var
)
5625 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5626 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5628 false_test_var
= op2a
;
5629 if (var
== false_test_var
)
5630 return boolean_false_node
;
5634 /* If the definition is a comparison, recurse on it. */
5635 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5637 tree t
= and_comparisons_1 (type
, innercode
,
5638 gimple_assign_rhs1 (stmt
),
5639 gimple_assign_rhs2 (stmt
),
5647 /* If the definition is an AND or OR expression, we may be able to
5648 simplify by reassociating. */
5649 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5650 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5652 tree inner1
= gimple_assign_rhs1 (stmt
);
5653 tree inner2
= gimple_assign_rhs2 (stmt
);
5656 tree partial
= NULL_TREE
;
5657 bool is_and
= (innercode
== BIT_AND_EXPR
);
5659 /* Check for boolean identities that don't require recursive examination
5661 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5662 inner1 AND (inner1 OR inner2) => inner1
5663 !inner1 AND (inner1 AND inner2) => false
5664 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5665 Likewise for similar cases involving inner2. */
5666 if (inner1
== true_test_var
)
5667 return (is_and
? var
: inner1
);
5668 else if (inner2
== true_test_var
)
5669 return (is_and
? var
: inner2
);
5670 else if (inner1
== false_test_var
)
5672 ? boolean_false_node
5673 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
5675 else if (inner2
== false_test_var
)
5677 ? boolean_false_node
5678 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
5681 /* Next, redistribute/reassociate the AND across the inner tests.
5682 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5683 if (TREE_CODE (inner1
) == SSA_NAME
5684 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5685 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5686 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
5687 gimple_assign_rhs1 (s
),
5688 gimple_assign_rhs2 (s
),
5689 code2
, op2a
, op2b
)))
5691 /* Handle the AND case, where we are reassociating:
5692 (inner1 AND inner2) AND (op2a code2 op2b)
5694 If the partial result t is a constant, we win. Otherwise
5695 continue on to try reassociating with the other inner test. */
5698 if (integer_onep (t
))
5700 else if (integer_zerop (t
))
5701 return boolean_false_node
;
5704 /* Handle the OR case, where we are redistributing:
5705 (inner1 OR inner2) AND (op2a code2 op2b)
5706 => (t OR (inner2 AND (op2a code2 op2b))) */
5707 else if (integer_onep (t
))
5708 return boolean_true_node
;
5710 /* Save partial result for later. */
5714 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5715 if (TREE_CODE (inner2
) == SSA_NAME
5716 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5717 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5718 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
5719 gimple_assign_rhs1 (s
),
5720 gimple_assign_rhs2 (s
),
5721 code2
, op2a
, op2b
)))
5723 /* Handle the AND case, where we are reassociating:
5724 (inner1 AND inner2) AND (op2a code2 op2b)
5725 => (inner1 AND t) */
5728 if (integer_onep (t
))
5730 else if (integer_zerop (t
))
5731 return boolean_false_node
;
5732 /* If both are the same, we can apply the identity
5734 else if (partial
&& same_bool_result_p (t
, partial
))
5738 /* Handle the OR case. where we are redistributing:
5739 (inner1 OR inner2) AND (op2a code2 op2b)
5740 => (t OR (inner1 AND (op2a code2 op2b)))
5741 => (t OR partial) */
5744 if (integer_onep (t
))
5745 return boolean_true_node
;
5748 /* We already got a simplification for the other
5749 operand to the redistributed OR expression. The
5750 interesting case is when at least one is false.
5751 Or, if both are the same, we can apply the identity
5753 if (integer_zerop (partial
))
5755 else if (integer_zerop (t
))
5757 else if (same_bool_result_p (t
, partial
))
5766 /* Try to simplify the AND of two comparisons defined by
5767 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5768 If this can be done without constructing an intermediate value,
5769 return the resulting tree; otherwise NULL_TREE is returned.
5770 This function is deliberately asymmetric as it recurses on SSA_DEFs
5771 in the first comparison but not the second. */
5774 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
5775 enum tree_code code2
, tree op2a
, tree op2b
)
5777 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5779 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5780 if (operand_equal_p (op1a
, op2a
, 0)
5781 && operand_equal_p (op1b
, op2b
, 0))
5783 /* Result will be either NULL_TREE, or a combined comparison. */
5784 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5785 TRUTH_ANDIF_EXPR
, code1
, code2
,
5786 truth_type
, op1a
, op1b
);
5791 /* Likewise the swapped case of the above. */
5792 if (operand_equal_p (op1a
, op2b
, 0)
5793 && operand_equal_p (op1b
, op2a
, 0))
5795 /* Result will be either NULL_TREE, or a combined comparison. */
5796 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5797 TRUTH_ANDIF_EXPR
, code1
,
5798 swap_tree_comparison (code2
),
5799 truth_type
, op1a
, op1b
);
5804 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5805 NAME's definition is a truth value. See if there are any simplifications
5806 that can be done against the NAME's definition. */
5807 if (TREE_CODE (op1a
) == SSA_NAME
5808 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5809 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5811 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5812 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5813 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5814 switch (gimple_code (stmt
))
5817 /* Try to simplify by copy-propagating the definition. */
5818 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
5822 /* If every argument to the PHI produces the same result when
5823 ANDed with the second comparison, we win.
5824 Do not do this unless the type is bool since we need a bool
5825 result here anyway. */
5826 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5828 tree result
= NULL_TREE
;
5830 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5832 tree arg
= gimple_phi_arg_def (stmt
, i
);
5834 /* If this PHI has itself as an argument, ignore it.
5835 If all the other args produce the same result,
5837 if (arg
== gimple_phi_result (stmt
))
5839 else if (TREE_CODE (arg
) == INTEGER_CST
)
5841 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
5844 result
= boolean_false_node
;
5845 else if (!integer_zerop (result
))
5849 result
= fold_build2 (code2
, boolean_type_node
,
5851 else if (!same_bool_comparison_p (result
,
5855 else if (TREE_CODE (arg
) == SSA_NAME
5856 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5859 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5860 /* In simple cases we can look through PHI nodes,
5861 but we have to be careful with loops.
5863 if (! dom_info_available_p (CDI_DOMINATORS
)
5864 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5865 || dominated_by_p (CDI_DOMINATORS
,
5866 gimple_bb (def_stmt
),
5869 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
5875 else if (!same_bool_result_p (result
, temp
))
5891 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5892 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5893 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5894 simplify this to a single expression. As we are going to lower the cost
5895 of building SSA names / gimple stmts significantly, we need to allocate
5896 them ont the stack. This will cause the code to be a bit ugly. */
5899 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
5900 enum tree_code code1
,
5901 tree op1a
, tree op1b
,
5902 enum tree_code code2
, tree op2a
,
5905 /* Allocate gimple stmt1 on the stack. */
5907 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
5908 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
5909 gimple_assign_set_rhs_code (stmt1
, code1
);
5910 gimple_assign_set_rhs1 (stmt1
, op1a
);
5911 gimple_assign_set_rhs2 (stmt1
, op1b
);
5913 /* Allocate gimple stmt2 on the stack. */
5915 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
5916 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
5917 gimple_assign_set_rhs_code (stmt2
, code2
);
5918 gimple_assign_set_rhs1 (stmt2
, op2a
);
5919 gimple_assign_set_rhs2 (stmt2
, op2b
);
5921 /* Allocate SSA names(lhs1) on the stack. */
5922 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
5923 memset (lhs1
, 0, sizeof (tree_ssa_name
));
5924 TREE_SET_CODE (lhs1
, SSA_NAME
);
5925 TREE_TYPE (lhs1
) = type
;
5926 init_ssa_name_imm_use (lhs1
);
5928 /* Allocate SSA names(lhs2) on the stack. */
5929 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
5930 memset (lhs2
, 0, sizeof (tree_ssa_name
));
5931 TREE_SET_CODE (lhs2
, SSA_NAME
);
5932 TREE_TYPE (lhs2
) = type
;
5933 init_ssa_name_imm_use (lhs2
);
5935 gimple_assign_set_lhs (stmt1
, lhs1
);
5936 gimple_assign_set_lhs (stmt2
, lhs2
);
5938 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
5939 type
, gimple_assign_lhs (stmt1
),
5940 gimple_assign_lhs (stmt2
));
5941 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
5943 if (gimple_simplified_result_is_gimple_val (&op
))
5945 tree res
= op
.ops
[0];
5947 return build2 (code1
, type
, op1a
, op1b
);
5948 else if (res
== lhs2
)
5949 return build2 (code2
, type
, op2a
, op2b
);
5953 else if (op
.code
.is_tree_code ()
5954 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
5956 tree op0
= op
.ops
[0];
5957 tree op1
= op
.ops
[1];
5958 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
5959 return NULL_TREE
; /* not simple */
5961 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
5968 /* Try to simplify the AND of two comparisons, specified by
5969 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5970 If this can be simplified to a single expression (without requiring
5971 introducing more SSA variables to hold intermediate values),
5972 return the resulting tree. Otherwise return NULL_TREE.
5973 If the result expression is non-null, it has boolean type. */
5976 maybe_fold_and_comparisons (tree type
,
5977 enum tree_code code1
, tree op1a
, tree op1b
,
5978 enum tree_code code2
, tree op2a
, tree op2b
)
5980 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
5983 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
5986 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
5987 op1a
, op1b
, code2
, op2a
,
5994 /* Helper function for or_comparisons_1: try to simplify the OR of the
5995 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5996 If INVERT is true, invert the value of VAR before doing the OR.
5997 Return NULL_EXPR if we can't simplify this to a single expression. */
6000 or_var_with_comparison (tree type
, tree var
, bool invert
,
6001 enum tree_code code2
, tree op2a
, tree op2b
)
6004 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
6006 /* We can only deal with variables whose definitions are assignments. */
6007 if (!is_gimple_assign (stmt
))
6010 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6011 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
6012 Then we only have to consider the simpler non-inverted cases. */
6014 t
= and_var_with_comparison_1 (type
, stmt
,
6015 invert_tree_comparison (code2
, false),
6018 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
6019 return canonicalize_bool (t
, invert
);
6022 /* Try to simplify the OR of the ssa variable defined by the assignment
6023 STMT with the comparison specified by (OP2A CODE2 OP2B).
6024 Return NULL_EXPR if we can't simplify this to a single expression. */
6027 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
6028 enum tree_code code2
, tree op2a
, tree op2b
)
6030 tree var
= gimple_assign_lhs (stmt
);
6031 tree true_test_var
= NULL_TREE
;
6032 tree false_test_var
= NULL_TREE
;
6033 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
6035 /* Check for identities like (var OR (var != 0)) => true . */
6036 if (TREE_CODE (op2a
) == SSA_NAME
6037 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
6039 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
6040 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
6042 true_test_var
= op2a
;
6043 if (var
== true_test_var
)
6046 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
6047 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
6049 false_test_var
= op2a
;
6050 if (var
== false_test_var
)
6051 return boolean_true_node
;
6055 /* If the definition is a comparison, recurse on it. */
6056 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
6058 tree t
= or_comparisons_1 (type
, innercode
,
6059 gimple_assign_rhs1 (stmt
),
6060 gimple_assign_rhs2 (stmt
),
6068 /* If the definition is an AND or OR expression, we may be able to
6069 simplify by reassociating. */
6070 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
6071 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
6073 tree inner1
= gimple_assign_rhs1 (stmt
);
6074 tree inner2
= gimple_assign_rhs2 (stmt
);
6077 tree partial
= NULL_TREE
;
6078 bool is_or
= (innercode
== BIT_IOR_EXPR
);
6080 /* Check for boolean identities that don't require recursive examination
6082 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
6083 inner1 OR (inner1 AND inner2) => inner1
6084 !inner1 OR (inner1 OR inner2) => true
6085 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6087 if (inner1
== true_test_var
)
6088 return (is_or
? var
: inner1
);
6089 else if (inner2
== true_test_var
)
6090 return (is_or
? var
: inner2
);
6091 else if (inner1
== false_test_var
)
6094 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6096 else if (inner2
== false_test_var
)
6099 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6102 /* Next, redistribute/reassociate the OR across the inner tests.
6103 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6104 if (TREE_CODE (inner1
) == SSA_NAME
6105 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6106 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6107 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
6108 gimple_assign_rhs1 (s
),
6109 gimple_assign_rhs2 (s
),
6110 code2
, op2a
, op2b
)))
6112 /* Handle the OR case, where we are reassociating:
6113 (inner1 OR inner2) OR (op2a code2 op2b)
6115 If the partial result t is a constant, we win. Otherwise
6116 continue on to try reassociating with the other inner test. */
6119 if (integer_onep (t
))
6120 return boolean_true_node
;
6121 else if (integer_zerop (t
))
6125 /* Handle the AND case, where we are redistributing:
6126 (inner1 AND inner2) OR (op2a code2 op2b)
6127 => (t AND (inner2 OR (op2a code op2b))) */
6128 else if (integer_zerop (t
))
6129 return boolean_false_node
;
6131 /* Save partial result for later. */
6135 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6136 if (TREE_CODE (inner2
) == SSA_NAME
6137 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6138 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6139 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
6140 gimple_assign_rhs1 (s
),
6141 gimple_assign_rhs2 (s
),
6142 code2
, op2a
, op2b
)))
6144 /* Handle the OR case, where we are reassociating:
6145 (inner1 OR inner2) OR (op2a code2 op2b)
6147 => (t OR partial) */
6150 if (integer_zerop (t
))
6152 else if (integer_onep (t
))
6153 return boolean_true_node
;
6154 /* If both are the same, we can apply the identity
6156 else if (partial
&& same_bool_result_p (t
, partial
))
6160 /* Handle the AND case, where we are redistributing:
6161 (inner1 AND inner2) OR (op2a code2 op2b)
6162 => (t AND (inner1 OR (op2a code2 op2b)))
6163 => (t AND partial) */
6166 if (integer_zerop (t
))
6167 return boolean_false_node
;
6170 /* We already got a simplification for the other
6171 operand to the redistributed AND expression. The
6172 interesting case is when at least one is true.
6173 Or, if both are the same, we can apply the identity
6175 if (integer_onep (partial
))
6177 else if (integer_onep (t
))
6179 else if (same_bool_result_p (t
, partial
))
6188 /* Try to simplify the OR of two comparisons defined by
6189 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6190 If this can be done without constructing an intermediate value,
6191 return the resulting tree; otherwise NULL_TREE is returned.
6192 This function is deliberately asymmetric as it recurses on SSA_DEFs
6193 in the first comparison but not the second. */
6196 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6197 enum tree_code code2
, tree op2a
, tree op2b
)
6199 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6201 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6202 if (operand_equal_p (op1a
, op2a
, 0)
6203 && operand_equal_p (op1b
, op2b
, 0))
6205 /* Result will be either NULL_TREE, or a combined comparison. */
6206 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6207 TRUTH_ORIF_EXPR
, code1
, code2
,
6208 truth_type
, op1a
, op1b
);
6213 /* Likewise the swapped case of the above. */
6214 if (operand_equal_p (op1a
, op2b
, 0)
6215 && operand_equal_p (op1b
, op2a
, 0))
6217 /* Result will be either NULL_TREE, or a combined comparison. */
6218 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6219 TRUTH_ORIF_EXPR
, code1
,
6220 swap_tree_comparison (code2
),
6221 truth_type
, op1a
, op1b
);
6226 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6227 NAME's definition is a truth value. See if there are any simplifications
6228 that can be done against the NAME's definition. */
6229 if (TREE_CODE (op1a
) == SSA_NAME
6230 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6231 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6233 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6234 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6235 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6236 switch (gimple_code (stmt
))
6239 /* Try to simplify by copy-propagating the definition. */
6240 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6244 /* If every argument to the PHI produces the same result when
6245 ORed with the second comparison, we win.
6246 Do not do this unless the type is bool since we need a bool
6247 result here anyway. */
6248 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6250 tree result
= NULL_TREE
;
6252 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6254 tree arg
= gimple_phi_arg_def (stmt
, i
);
6256 /* If this PHI has itself as an argument, ignore it.
6257 If all the other args produce the same result,
6259 if (arg
== gimple_phi_result (stmt
))
6261 else if (TREE_CODE (arg
) == INTEGER_CST
)
6263 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
6266 result
= boolean_true_node
;
6267 else if (!integer_onep (result
))
6271 result
= fold_build2 (code2
, boolean_type_node
,
6273 else if (!same_bool_comparison_p (result
,
6277 else if (TREE_CODE (arg
) == SSA_NAME
6278 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6281 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6282 /* In simple cases we can look through PHI nodes,
6283 but we have to be careful with loops.
6285 if (! dom_info_available_p (CDI_DOMINATORS
)
6286 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6287 || dominated_by_p (CDI_DOMINATORS
,
6288 gimple_bb (def_stmt
),
6291 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
6297 else if (!same_bool_result_p (result
, temp
))
6313 /* Try to simplify the OR of two comparisons, specified by
6314 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6315 If this can be simplified to a single expression (without requiring
6316 introducing more SSA variables to hold intermediate values),
6317 return the resulting tree. Otherwise return NULL_TREE.
6318 If the result expression is non-null, it has boolean type. */
6321 maybe_fold_or_comparisons (tree type
,
6322 enum tree_code code1
, tree op1a
, tree op1b
,
6323 enum tree_code code2
, tree op2a
, tree op2b
)
6325 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6328 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6331 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
6332 op1a
, op1b
, code2
, op2a
,
6339 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6341 Either NULL_TREE, a simplified but non-constant or a constant
6344 ??? This should go into a gimple-fold-inline.h file to be eventually
6345 privatized with the single valueize function used in the various TUs
6346 to avoid the indirect function call overhead. */
6349 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
6350 tree (*gvalueize
) (tree
))
6352 gimple_match_op res_op
;
6353 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6354 edges if there are intermediate VARYING defs. For this reason
6355 do not follow SSA edges here even though SCCVN can technically
6356 just deal fine with that. */
6357 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
6359 tree res
= NULL_TREE
;
6360 if (gimple_simplified_result_is_gimple_val (&res_op
))
6361 res
= res_op
.ops
[0];
6362 else if (mprts_hook
)
6363 res
= mprts_hook (&res_op
);
6366 if (dump_file
&& dump_flags
& TDF_DETAILS
)
6368 fprintf (dump_file
, "Match-and-simplified ");
6369 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
6370 fprintf (dump_file
, " to ");
6371 print_generic_expr (dump_file
, res
);
6372 fprintf (dump_file
, "\n");
6378 location_t loc
= gimple_location (stmt
);
6379 switch (gimple_code (stmt
))
6383 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
6385 switch (get_gimple_rhs_class (subcode
))
6387 case GIMPLE_SINGLE_RHS
:
6389 tree rhs
= gimple_assign_rhs1 (stmt
);
6390 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
6392 if (TREE_CODE (rhs
) == SSA_NAME
)
6394 /* If the RHS is an SSA_NAME, return its known constant value,
6396 return (*valueize
) (rhs
);
6398 /* Handle propagating invariant addresses into address
6400 else if (TREE_CODE (rhs
) == ADDR_EXPR
6401 && !is_gimple_min_invariant (rhs
))
6403 poly_int64 offset
= 0;
6405 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
6409 && (CONSTANT_CLASS_P (base
)
6410 || decl_address_invariant_p (base
)))
6411 return build_invariant_address (TREE_TYPE (rhs
),
6414 else if (TREE_CODE (rhs
) == CONSTRUCTOR
6415 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
6416 && known_eq (CONSTRUCTOR_NELTS (rhs
),
6417 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
6422 nelts
= CONSTRUCTOR_NELTS (rhs
);
6423 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
6424 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
6426 val
= (*valueize
) (val
);
6427 if (TREE_CODE (val
) == INTEGER_CST
6428 || TREE_CODE (val
) == REAL_CST
6429 || TREE_CODE (val
) == FIXED_CST
)
6430 vec
.quick_push (val
);
6435 return vec
.build ();
6437 if (subcode
== OBJ_TYPE_REF
)
6439 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
6440 /* If callee is constant, we can fold away the wrapper. */
6441 if (is_gimple_min_invariant (val
))
6445 if (kind
== tcc_reference
)
6447 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
6448 || TREE_CODE (rhs
) == REALPART_EXPR
6449 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
6450 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6452 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6453 return fold_unary_loc (EXPR_LOCATION (rhs
),
6455 TREE_TYPE (rhs
), val
);
6457 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
6458 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6460 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6461 return fold_ternary_loc (EXPR_LOCATION (rhs
),
6463 TREE_TYPE (rhs
), val
,
6464 TREE_OPERAND (rhs
, 1),
6465 TREE_OPERAND (rhs
, 2));
6467 else if (TREE_CODE (rhs
) == MEM_REF
6468 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6470 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6471 if (TREE_CODE (val
) == ADDR_EXPR
6472 && is_gimple_min_invariant (val
))
6474 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
6476 TREE_OPERAND (rhs
, 1));
6481 return fold_const_aggregate_ref_1 (rhs
, valueize
);
6483 else if (kind
== tcc_declaration
)
6484 return get_symbol_constant_value (rhs
);
6488 case GIMPLE_UNARY_RHS
:
6491 case GIMPLE_BINARY_RHS
:
6492 /* Translate &x + CST into an invariant form suitable for
6493 further propagation. */
6494 if (subcode
== POINTER_PLUS_EXPR
)
6496 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6497 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6498 if (TREE_CODE (op0
) == ADDR_EXPR
6499 && TREE_CODE (op1
) == INTEGER_CST
)
6501 tree off
= fold_convert (ptr_type_node
, op1
);
6503 (loc
, ADDR_EXPR
, TREE_TYPE (op0
),
6504 fold_build2 (MEM_REF
,
6505 TREE_TYPE (TREE_TYPE (op0
)),
6506 unshare_expr (op0
), off
));
6509 /* Canonicalize bool != 0 and bool == 0 appearing after
6510 valueization. While gimple_simplify handles this
6511 it can get confused by the ~X == 1 -> X == 0 transform
6512 which we cant reduce to a SSA name or a constant
6513 (and we have no way to tell gimple_simplify to not
6514 consider those transforms in the first place). */
6515 else if (subcode
== EQ_EXPR
6516 || subcode
== NE_EXPR
)
6518 tree lhs
= gimple_assign_lhs (stmt
);
6519 tree op0
= gimple_assign_rhs1 (stmt
);
6520 if (useless_type_conversion_p (TREE_TYPE (lhs
),
6523 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6524 op0
= (*valueize
) (op0
);
6525 if (TREE_CODE (op0
) == INTEGER_CST
)
6526 std::swap (op0
, op1
);
6527 if (TREE_CODE (op1
) == INTEGER_CST
6528 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
6529 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
6535 case GIMPLE_TERNARY_RHS
:
6537 /* Handle ternary operators that can appear in GIMPLE form. */
6538 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6539 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6540 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
6541 return fold_ternary_loc (loc
, subcode
,
6542 gimple_expr_type (stmt
), op0
, op1
, op2
);
6553 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
6555 if (gimple_call_internal_p (stmt
))
6557 enum tree_code subcode
= ERROR_MARK
;
6558 switch (gimple_call_internal_fn (stmt
))
6560 case IFN_UBSAN_CHECK_ADD
:
6561 subcode
= PLUS_EXPR
;
6563 case IFN_UBSAN_CHECK_SUB
:
6564 subcode
= MINUS_EXPR
;
6566 case IFN_UBSAN_CHECK_MUL
:
6567 subcode
= MULT_EXPR
;
6569 case IFN_BUILTIN_EXPECT
:
6571 tree arg0
= gimple_call_arg (stmt
, 0);
6572 tree op0
= (*valueize
) (arg0
);
6573 if (TREE_CODE (op0
) == INTEGER_CST
)
6580 tree arg0
= gimple_call_arg (stmt
, 0);
6581 tree arg1
= gimple_call_arg (stmt
, 1);
6582 tree op0
= (*valueize
) (arg0
);
6583 tree op1
= (*valueize
) (arg1
);
6585 if (TREE_CODE (op0
) != INTEGER_CST
6586 || TREE_CODE (op1
) != INTEGER_CST
)
6591 /* x * 0 = 0 * x = 0 without overflow. */
6592 if (integer_zerop (op0
) || integer_zerop (op1
))
6593 return build_zero_cst (TREE_TYPE (arg0
));
6596 /* y - y = 0 without overflow. */
6597 if (operand_equal_p (op0
, op1
, 0))
6598 return build_zero_cst (TREE_TYPE (arg0
));
6605 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
6607 && TREE_CODE (res
) == INTEGER_CST
6608 && !TREE_OVERFLOW (res
))
6613 fn
= (*valueize
) (gimple_call_fn (stmt
));
6614 if (TREE_CODE (fn
) == ADDR_EXPR
6615 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
6616 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
6617 && gimple_builtin_call_types_compatible_p (stmt
,
6618 TREE_OPERAND (fn
, 0)))
6620 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
6623 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6624 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
6625 retval
= fold_builtin_call_array (loc
,
6626 gimple_call_return_type (call_stmt
),
6627 fn
, gimple_call_num_args (stmt
), args
);
6630 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6631 STRIP_NOPS (retval
);
6632 retval
= fold_convert (gimple_call_return_type (call_stmt
),
6645 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6646 Returns NULL_TREE if folding to a constant is not possible, otherwise
6647 returns a constant according to is_gimple_min_invariant. */
6650 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
6652 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
6653 if (res
&& is_gimple_min_invariant (res
))
6659 /* The following set of functions are supposed to fold references using
6660 their constant initializers. */
6662 /* See if we can find constructor defining value of BASE.
6663 When we know the consructor with constant offset (such as
6664 base is array[40] and we do know constructor of array), then
6665 BIT_OFFSET is adjusted accordingly.
6667 As a special case, return error_mark_node when constructor
6668 is not explicitly available, but it is known to be zero
6669 such as 'static const int a;'. */
6671 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
6672 tree (*valueize
)(tree
))
6674 poly_int64 bit_offset2
, size
, max_size
;
6677 if (TREE_CODE (base
) == MEM_REF
)
6679 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
6680 if (!boff
.to_shwi (bit_offset
))
6684 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
6685 base
= valueize (TREE_OPERAND (base
, 0));
6686 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
6688 base
= TREE_OPERAND (base
, 0);
6691 && TREE_CODE (base
) == SSA_NAME
)
6692 base
= valueize (base
);
6694 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6695 DECL_INITIAL. If BASE is a nested reference into another
6696 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6697 the inner reference. */
6698 switch (TREE_CODE (base
))
6703 tree init
= ctor_for_folding (base
);
6705 /* Our semantic is exact opposite of ctor_for_folding;
6706 NULL means unknown, while error_mark_node is 0. */
6707 if (init
== error_mark_node
)
6710 return error_mark_node
;
6714 case VIEW_CONVERT_EXPR
:
6715 return get_base_constructor (TREE_OPERAND (base
, 0),
6716 bit_offset
, valueize
);
6720 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
6722 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
6724 *bit_offset
+= bit_offset2
;
6725 return get_base_constructor (base
, bit_offset
, valueize
);
6731 if (CONSTANT_CLASS_P (base
))
6738 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6739 to the memory at bit OFFSET. When non-null, TYPE is the expected
6740 type of the reference; otherwise the type of the referenced element
6741 is used instead. When SIZE is zero, attempt to fold a reference to
6742 the entire element which OFFSET refers to. Increment *SUBOFF by
6743 the bit offset of the accessed element. */
6746 fold_array_ctor_reference (tree type
, tree ctor
,
6747 unsigned HOST_WIDE_INT offset
,
6748 unsigned HOST_WIDE_INT size
,
6750 unsigned HOST_WIDE_INT
*suboff
)
6752 offset_int low_bound
;
6753 offset_int elt_size
;
6754 offset_int access_index
;
6755 tree domain_type
= NULL_TREE
;
6756 HOST_WIDE_INT inner_offset
;
6758 /* Compute low bound and elt size. */
6759 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
6760 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
6761 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6763 /* Static constructors for variably sized objects make no sense. */
6764 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
6766 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
6770 /* Static constructors for variably sized objects make no sense. */
6771 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
6773 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
6775 /* When TYPE is non-null, verify that it specifies a constant-sized
6776 access of a multiple of the array element size. Avoid division
6777 by zero below when ELT_SIZE is zero, such as with the result of
6778 an initializer for a zero-length array or an empty struct. */
6781 && (!TYPE_SIZE_UNIT (type
)
6782 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
6785 /* Compute the array index we look for. */
6786 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
6788 access_index
+= low_bound
;
6790 /* And offset within the access. */
6791 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
6793 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
6794 if (size
> elt_sz
* BITS_PER_UNIT
)
6796 /* native_encode_expr constraints. */
6797 if (size
> MAX_BITSIZE_MODE_ANY_MODE
6798 || size
% BITS_PER_UNIT
!= 0
6799 || inner_offset
% BITS_PER_UNIT
!= 0
6800 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
6804 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
6806 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
6807 return build_zero_cst (type
);
6809 /* native-encode adjacent ctor elements. */
6810 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6811 unsigned bufoff
= 0;
6812 offset_int index
= 0;
6813 offset_int max_index
= access_index
;
6814 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
6816 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
6817 else if (!CONSTANT_CLASS_P (val
))
6821 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
6823 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
6824 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
6827 index
= max_index
= wi::to_offset (elt
->index
);
6828 index
= wi::umax (index
, access_index
);
6831 if (bufoff
+ elt_sz
> sizeof (buf
))
6832 elt_sz
= sizeof (buf
) - bufoff
;
6833 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
6834 inner_offset
/ BITS_PER_UNIT
);
6835 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
6841 if (wi::cmpu (access_index
, index
) == 0)
6843 else if (wi::cmpu (access_index
, max_index
) > 0)
6846 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
6848 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
6853 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
6855 max_index
= access_index
;
6858 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
6860 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
6861 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
6864 index
= max_index
= wi::to_offset (elt
->index
);
6865 index
= wi::umax (index
, access_index
);
6866 if (wi::cmpu (access_index
, index
) == 0)
6869 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
6873 while (bufoff
< size
/ BITS_PER_UNIT
);
6875 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
6878 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
6880 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
6882 /* For the final reference to the entire accessed element
6883 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6884 may be null) in favor of the type of the element, and set
6885 SIZE to the size of the accessed element. */
6887 type
= TREE_TYPE (val
);
6888 size
= elt_sz
* BITS_PER_UNIT
;
6890 else if (size
&& access_index
< CONSTRUCTOR_NELTS (ctor
) - 1
6891 && TREE_CODE (val
) == CONSTRUCTOR
6892 && (elt_sz
* BITS_PER_UNIT
- inner_offset
) < size
)
6893 /* If this isn't the last element in the CTOR and a CTOR itself
6894 and it does not cover the whole object we are requesting give up
6895 since we're not set up for combining from multiple CTORs. */
6898 *suboff
+= access_index
.to_uhwi () * elt_sz
* BITS_PER_UNIT
;
6899 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
6903 /* Memory not explicitly mentioned in constructor is 0 (or
6904 the reference is out of range). */
6905 return type
? build_zero_cst (type
) : NULL_TREE
;
6908 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6909 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6910 is the expected type of the reference; otherwise the type of
6911 the referenced member is used instead. When SIZE is zero,
6912 attempt to fold a reference to the entire member which OFFSET
6913 refers to; in this case. Increment *SUBOFF by the bit offset
6914 of the accessed member. */
6917 fold_nonarray_ctor_reference (tree type
, tree ctor
,
6918 unsigned HOST_WIDE_INT offset
,
6919 unsigned HOST_WIDE_INT size
,
6921 unsigned HOST_WIDE_INT
*suboff
)
6923 unsigned HOST_WIDE_INT cnt
;
6926 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
6929 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
6930 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
6931 tree field_size
= DECL_SIZE (cfield
);
6935 /* Determine the size of the flexible array member from
6936 the size of the initializer provided for it. */
6937 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
6940 /* Variable sized objects in static constructors makes no sense,
6941 but field_size can be NULL for flexible array members. */
6942 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
6943 && TREE_CODE (byte_offset
) == INTEGER_CST
6944 && (field_size
!= NULL_TREE
6945 ? TREE_CODE (field_size
) == INTEGER_CST
6946 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
6948 /* Compute bit offset of the field. */
6949 offset_int bitoffset
6950 = (wi::to_offset (field_offset
)
6951 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
6952 /* Compute bit offset where the field ends. */
6953 offset_int bitoffset_end
;
6954 if (field_size
!= NULL_TREE
)
6955 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
6959 /* Compute the bit offset of the end of the desired access.
6960 As a special case, if the size of the desired access is
6961 zero, assume the access is to the entire field (and let
6962 the caller make any necessary adjustments by storing
6963 the actual bounds of the field in FIELDBOUNDS). */
6964 offset_int access_end
= offset_int (offset
);
6968 access_end
= bitoffset_end
;
6970 /* Is there any overlap between the desired access at
6971 [OFFSET, OFFSET+SIZE) and the offset of the field within
6972 the object at [BITOFFSET, BITOFFSET_END)? */
6973 if (wi::cmps (access_end
, bitoffset
) > 0
6974 && (field_size
== NULL_TREE
6975 || wi::lts_p (offset
, bitoffset_end
)))
6977 *suboff
+= bitoffset
.to_uhwi ();
6979 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
6981 /* For the final reference to the entire accessed member
6982 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6983 be null) in favor of the type of the member, and set
6984 SIZE to the size of the accessed member. */
6985 offset
= bitoffset
.to_uhwi ();
6986 type
= TREE_TYPE (cval
);
6987 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
6990 /* We do have overlap. Now see if the field is large enough
6991 to cover the access. Give up for accesses that extend
6992 beyond the end of the object or that span multiple fields. */
6993 if (wi::cmps (access_end
, bitoffset_end
) > 0)
6995 if (offset
< bitoffset
)
6998 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
6999 return fold_ctor_reference (type
, cval
,
7000 inner_offset
.to_uhwi (), size
,
7008 return build_zero_cst (type
);
7011 /* CTOR is value initializing memory. Fold a reference of TYPE and
7012 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
7013 is zero, attempt to fold a reference to the entire subobject
7014 which OFFSET refers to. This is used when folding accesses to
7015 string members of aggregates. When non-null, set *SUBOFF to
7016 the bit offset of the accessed subobject. */
7019 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
7020 const poly_uint64
&poly_size
, tree from_decl
,
7021 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
7025 /* We found the field with exact match. */
7027 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
7028 && known_eq (poly_offset
, 0U))
7029 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
7031 /* The remaining optimizations need a constant size and offset. */
7032 unsigned HOST_WIDE_INT size
, offset
;
7033 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
7036 /* We are at the end of walk, see if we can view convert the
7038 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
7039 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
7040 && !compare_tree_int (TYPE_SIZE (type
), size
)
7041 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
7043 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
7046 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
7048 STRIP_USELESS_TYPE_CONVERSION (ret
);
7052 /* For constants and byte-aligned/sized reads try to go through
7053 native_encode/interpret. */
7054 if (CONSTANT_CLASS_P (ctor
)
7055 && BITS_PER_UNIT
== 8
7056 && offset
% BITS_PER_UNIT
== 0
7057 && offset
/ BITS_PER_UNIT
<= INT_MAX
7058 && size
% BITS_PER_UNIT
== 0
7059 && size
<= MAX_BITSIZE_MODE_ANY_MODE
7060 && can_native_interpret_type_p (type
))
7062 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7063 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
7064 offset
/ BITS_PER_UNIT
);
7066 return native_interpret_expr (type
, buf
, len
);
7068 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
7070 unsigned HOST_WIDE_INT dummy
= 0;
7075 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
7076 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
7077 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
7080 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
7083 /* Fall back to native_encode_initializer. Needs to be done
7084 only in the outermost fold_ctor_reference call (because it itself
7085 recurses into CONSTRUCTORs) and doesn't update suboff. */
7086 if (ret
== NULL_TREE
7088 && BITS_PER_UNIT
== 8
7089 && offset
% BITS_PER_UNIT
== 0
7090 && offset
/ BITS_PER_UNIT
<= INT_MAX
7091 && size
% BITS_PER_UNIT
== 0
7092 && size
<= MAX_BITSIZE_MODE_ANY_MODE
7093 && can_native_interpret_type_p (type
))
7095 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7096 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
7097 offset
/ BITS_PER_UNIT
);
7099 return native_interpret_expr (type
, buf
, len
);
7108 /* Return the tree representing the element referenced by T if T is an
7109 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7110 names using VALUEIZE. Return NULL_TREE otherwise. */
7113 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
7115 tree ctor
, idx
, base
;
7116 poly_int64 offset
, size
, max_size
;
7120 if (TREE_THIS_VOLATILE (t
))
7124 return get_symbol_constant_value (t
);
7126 tem
= fold_read_from_constant_string (t
);
7130 switch (TREE_CODE (t
))
7133 case ARRAY_RANGE_REF
:
7134 /* Constant indexes are handled well by get_base_constructor.
7135 Only special case variable offsets.
7136 FIXME: This code can't handle nested references with variable indexes
7137 (they will be handled only by iteration of ccp). Perhaps we can bring
7138 get_ref_base_and_extent here and make it use a valueize callback. */
7139 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
7141 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
7142 && poly_int_tree_p (idx
))
7144 tree low_bound
, unit_size
;
7146 /* If the resulting bit-offset is constant, track it. */
7147 if ((low_bound
= array_ref_low_bound (t
),
7148 poly_int_tree_p (low_bound
))
7149 && (unit_size
= array_ref_element_size (t
),
7150 tree_fits_uhwi_p (unit_size
)))
7152 poly_offset_int woffset
7153 = wi::sext (wi::to_poly_offset (idx
)
7154 - wi::to_poly_offset (low_bound
),
7155 TYPE_PRECISION (TREE_TYPE (idx
)));
7156 woffset
*= tree_to_uhwi (unit_size
);
7157 woffset
*= BITS_PER_UNIT
;
7158 if (woffset
.to_shwi (&offset
))
7160 base
= TREE_OPERAND (t
, 0);
7161 ctor
= get_base_constructor (base
, &offset
, valueize
);
7162 /* Empty constructor. Always fold to 0. */
7163 if (ctor
== error_mark_node
)
7164 return build_zero_cst (TREE_TYPE (t
));
7165 /* Out of bound array access. Value is undefined,
7167 if (maybe_lt (offset
, 0))
7169 /* We cannot determine ctor. */
7172 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
7173 tree_to_uhwi (unit_size
)
7183 case TARGET_MEM_REF
:
7185 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
7186 ctor
= get_base_constructor (base
, &offset
, valueize
);
7188 /* Empty constructor. Always fold to 0. */
7189 if (ctor
== error_mark_node
)
7190 return build_zero_cst (TREE_TYPE (t
));
7191 /* We do not know precise address. */
7192 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
7194 /* We cannot determine ctor. */
7198 /* Out of bound array access. Value is undefined, but don't fold. */
7199 if (maybe_lt (offset
, 0))
7202 tem
= fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
, base
);
7206 /* For bit field reads try to read the representative and
7208 if (TREE_CODE (t
) == COMPONENT_REF
7209 && DECL_BIT_FIELD (TREE_OPERAND (t
, 1))
7210 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t
, 1)))
7212 HOST_WIDE_INT csize
, coffset
;
7213 tree field
= TREE_OPERAND (t
, 1);
7214 tree repr
= DECL_BIT_FIELD_REPRESENTATIVE (field
);
7215 if (INTEGRAL_TYPE_P (TREE_TYPE (repr
))
7216 && size
.is_constant (&csize
)
7217 && offset
.is_constant (&coffset
)
7218 && (coffset
% BITS_PER_UNIT
!= 0
7219 || csize
% BITS_PER_UNIT
!= 0)
7221 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
)
7223 poly_int64 bitoffset
;
7224 poly_uint64 field_offset
, repr_offset
;
7225 if (poly_int_tree_p (DECL_FIELD_OFFSET (field
), &field_offset
)
7226 && poly_int_tree_p (DECL_FIELD_OFFSET (repr
), &repr_offset
))
7227 bitoffset
= (field_offset
- repr_offset
) * BITS_PER_UNIT
;
7230 bitoffset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
7231 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr
)));
7232 HOST_WIDE_INT bitoff
;
7233 int diff
= (TYPE_PRECISION (TREE_TYPE (repr
))
7234 - TYPE_PRECISION (TREE_TYPE (field
)));
7235 if (bitoffset
.is_constant (&bitoff
)
7240 size
= tree_to_uhwi (DECL_SIZE (repr
));
7242 tem
= fold_ctor_reference (TREE_TYPE (repr
), ctor
, offset
,
7244 if (tem
&& TREE_CODE (tem
) == INTEGER_CST
)
7246 if (!BYTES_BIG_ENDIAN
)
7247 tem
= wide_int_to_tree (TREE_TYPE (field
),
7248 wi::lrshift (wi::to_wide (tem
),
7251 tem
= wide_int_to_tree (TREE_TYPE (field
),
7252 wi::lrshift (wi::to_wide (tem
),
7264 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
7265 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
7266 return fold_build1_loc (EXPR_LOCATION (t
),
7267 TREE_CODE (t
), TREE_TYPE (t
), c
);
7279 fold_const_aggregate_ref (tree t
)
7281 return fold_const_aggregate_ref_1 (t
, NULL
);
7284 /* Lookup virtual method with index TOKEN in a virtual table V
7286 Set CAN_REFER if non-NULL to false if method
7287 is not referable or if the virtual table is ill-formed (such as rewriten
7288 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7291 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
7293 unsigned HOST_WIDE_INT offset
,
7296 tree vtable
= v
, init
, fn
;
7297 unsigned HOST_WIDE_INT size
;
7298 unsigned HOST_WIDE_INT elt_size
, access_index
;
7304 /* First of all double check we have virtual table. */
7305 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
7307 /* Pass down that we lost track of the target. */
7313 init
= ctor_for_folding (v
);
7315 /* The virtual tables should always be born with constructors
7316 and we always should assume that they are avaialble for
7317 folding. At the moment we do not stream them in all cases,
7318 but it should never happen that ctor seem unreachable. */
7320 if (init
== error_mark_node
)
7322 /* Pass down that we lost track of the target. */
7327 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
7328 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
7329 offset
*= BITS_PER_UNIT
;
7330 offset
+= token
* size
;
7332 /* Lookup the value in the constructor that is assumed to be array.
7333 This is equivalent to
7334 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7335 offset, size, NULL);
7336 but in a constant time. We expect that frontend produced a simple
7337 array without indexed initializers. */
7339 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
7340 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
7341 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
7342 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
7344 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
7345 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
7347 /* The C++ FE can now produce indexed fields, and we check if the indexes
7349 if (access_index
< CONSTRUCTOR_NELTS (init
))
7351 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
7352 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
7353 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
7359 /* For type inconsistent program we may end up looking up virtual method
7360 in virtual table that does not contain TOKEN entries. We may overrun
7361 the virtual table and pick up a constant or RTTI info pointer.
7362 In any case the call is undefined. */
7364 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
7365 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
7366 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
7369 fn
= TREE_OPERAND (fn
, 0);
7371 /* When cgraph node is missing and function is not public, we cannot
7372 devirtualize. This can happen in WHOPR when the actual method
7373 ends up in other partition, because we found devirtualization
7374 possibility too late. */
7375 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
7386 /* Make sure we create a cgraph node for functions we'll reference.
7387 They can be non-existent if the reference comes from an entry
7388 of an external vtable for example. */
7389 cgraph_node::get_create (fn
);
7394 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7395 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7396 KNOWN_BINFO carries the binfo describing the true type of
7397 OBJ_TYPE_REF_OBJECT(REF).
7398 Set CAN_REFER if non-NULL to false if method
7399 is not referable or if the virtual table is ill-formed (such as rewriten
7400 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7403 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
7406 unsigned HOST_WIDE_INT offset
;
7409 v
= BINFO_VTABLE (known_binfo
);
7410 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7414 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
7420 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
7423 /* Given a pointer value T, return a simplified version of an
7424 indirection through T, or NULL_TREE if no simplification is
7425 possible. Note that the resulting type may be different from
7426 the type pointed to in the sense that it is still compatible
7427 from the langhooks point of view. */
7430 gimple_fold_indirect_ref (tree t
)
7432 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
7437 subtype
= TREE_TYPE (sub
);
7438 if (!POINTER_TYPE_P (subtype
)
7439 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
7442 if (TREE_CODE (sub
) == ADDR_EXPR
)
7444 tree op
= TREE_OPERAND (sub
, 0);
7445 tree optype
= TREE_TYPE (op
);
7447 if (useless_type_conversion_p (type
, optype
))
7450 /* *(foo *)&fooarray => fooarray[0] */
7451 if (TREE_CODE (optype
) == ARRAY_TYPE
7452 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
7453 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7455 tree type_domain
= TYPE_DOMAIN (optype
);
7456 tree min_val
= size_zero_node
;
7457 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
7458 min_val
= TYPE_MIN_VALUE (type_domain
);
7459 if (TREE_CODE (min_val
) == INTEGER_CST
)
7460 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
7462 /* *(foo *)&complexfoo => __real__ complexfoo */
7463 else if (TREE_CODE (optype
) == COMPLEX_TYPE
7464 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7465 return fold_build1 (REALPART_EXPR
, type
, op
);
7466 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7467 else if (TREE_CODE (optype
) == VECTOR_TYPE
7468 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7470 tree part_width
= TYPE_SIZE (type
);
7471 tree index
= bitsize_int (0);
7472 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
7476 /* *(p + CST) -> ... */
7477 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
7478 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
7480 tree addr
= TREE_OPERAND (sub
, 0);
7481 tree off
= TREE_OPERAND (sub
, 1);
7485 addrtype
= TREE_TYPE (addr
);
7487 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7488 if (TREE_CODE (addr
) == ADDR_EXPR
7489 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
7490 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
7491 && tree_fits_uhwi_p (off
))
7493 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
7494 tree part_width
= TYPE_SIZE (type
);
7495 unsigned HOST_WIDE_INT part_widthi
7496 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
7497 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
7498 tree index
= bitsize_int (indexi
);
7499 if (known_lt (offset
/ part_widthi
,
7500 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
7501 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
7505 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7506 if (TREE_CODE (addr
) == ADDR_EXPR
7507 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
7508 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
7510 tree size
= TYPE_SIZE_UNIT (type
);
7511 if (tree_int_cst_equal (size
, off
))
7512 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
7515 /* *(p + CST) -> MEM_REF <p, CST>. */
7516 if (TREE_CODE (addr
) != ADDR_EXPR
7517 || DECL_P (TREE_OPERAND (addr
, 0)))
7518 return fold_build2 (MEM_REF
, type
,
7520 wide_int_to_tree (ptype
, wi::to_wide (off
)));
7523 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7524 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
7525 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
7526 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
7529 tree min_val
= size_zero_node
;
7531 sub
= gimple_fold_indirect_ref (sub
);
7533 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
7534 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
7535 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
7536 min_val
= TYPE_MIN_VALUE (type_domain
);
7537 if (TREE_CODE (min_val
) == INTEGER_CST
)
7538 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
7544 /* Return true if CODE is an operation that when operating on signed
7545 integer types involves undefined behavior on overflow and the
7546 operation can be expressed with unsigned arithmetic. */
7549 arith_code_with_undefined_signed_overflow (tree_code code
)
7558 case POINTER_PLUS_EXPR
:
7565 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7566 operation that can be transformed to unsigned arithmetic by converting
7567 its operand, carrying out the operation in the corresponding unsigned
7568 type and converting the result back to the original type.
7570 Returns a sequence of statements that replace STMT and also contain
7571 a modified form of STMT itself. */
7574 rewrite_to_defined_overflow (gimple
*stmt
)
7576 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7578 fprintf (dump_file
, "rewriting stmt with undefined signed "
7580 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
7583 tree lhs
= gimple_assign_lhs (stmt
);
7584 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
7585 gimple_seq stmts
= NULL
;
7586 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
7587 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
7589 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
7591 tree op
= gimple_op (stmt
, i
);
7592 op
= gimple_convert (&stmts
, type
, op
);
7593 gimple_set_op (stmt
, i
, op
);
7595 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
7596 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
7597 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
7598 gimple_set_modified (stmt
, true);
7599 gimple_seq_add_stmt (&stmts
, stmt
);
7600 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
7601 gimple_seq_add_stmt (&stmts
, cvt
);
7607 /* The valueization hook we use for the gimple_build API simplification.
7608 This makes us match fold_buildN behavior by only combining with
7609 statements in the sequence(s) we are currently building. */
7612 gimple_build_valueize (tree op
)
7614 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
7619 /* Build the expression CODE OP0 of type TYPE with location LOC,
7620 simplifying it first if possible. Returns the built
7621 expression value and appends statements possibly defining it
7625 gimple_build (gimple_seq
*seq
, location_t loc
,
7626 enum tree_code code
, tree type
, tree op0
)
7628 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
7631 res
= create_tmp_reg_or_ssa_name (type
);
7633 if (code
== REALPART_EXPR
7634 || code
== IMAGPART_EXPR
7635 || code
== VIEW_CONVERT_EXPR
)
7636 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
7638 stmt
= gimple_build_assign (res
, code
, op0
);
7639 gimple_set_location (stmt
, loc
);
7640 gimple_seq_add_stmt_without_update (seq
, stmt
);
7645 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7646 simplifying it first if possible. Returns the built
7647 expression value and appends statements possibly defining it
7651 gimple_build (gimple_seq
*seq
, location_t loc
,
7652 enum tree_code code
, tree type
, tree op0
, tree op1
)
7654 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
7657 res
= create_tmp_reg_or_ssa_name (type
);
7658 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
7659 gimple_set_location (stmt
, loc
);
7660 gimple_seq_add_stmt_without_update (seq
, stmt
);
7665 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7666 simplifying it first if possible. Returns the built
7667 expression value and appends statements possibly defining it
7671 gimple_build (gimple_seq
*seq
, location_t loc
,
7672 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
7674 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
7675 seq
, gimple_build_valueize
);
7678 res
= create_tmp_reg_or_ssa_name (type
);
7680 if (code
== BIT_FIELD_REF
)
7681 stmt
= gimple_build_assign (res
, code
,
7682 build3 (code
, type
, op0
, op1
, op2
));
7684 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
7685 gimple_set_location (stmt
, loc
);
7686 gimple_seq_add_stmt_without_update (seq
, stmt
);
7691 /* Build the call FN (ARG0) with a result of type TYPE
7692 (or no result if TYPE is void) with location LOC,
7693 simplifying it first if possible. Returns the built
7694 expression value (or NULL_TREE if TYPE is void) and appends
7695 statements possibly defining it to SEQ. */
7698 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7699 tree type
, tree arg0
)
7701 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
7705 if (internal_fn_p (fn
))
7706 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
7709 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7710 stmt
= gimple_build_call (decl
, 1, arg0
);
7712 if (!VOID_TYPE_P (type
))
7714 res
= create_tmp_reg_or_ssa_name (type
);
7715 gimple_call_set_lhs (stmt
, res
);
7717 gimple_set_location (stmt
, loc
);
7718 gimple_seq_add_stmt_without_update (seq
, stmt
);
7723 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7724 (or no result if TYPE is void) with location LOC,
7725 simplifying it first if possible. Returns the built
7726 expression value (or NULL_TREE if TYPE is void) and appends
7727 statements possibly defining it to SEQ. */
7730 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7731 tree type
, tree arg0
, tree arg1
)
7733 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
7737 if (internal_fn_p (fn
))
7738 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
7741 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7742 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
7744 if (!VOID_TYPE_P (type
))
7746 res
= create_tmp_reg_or_ssa_name (type
);
7747 gimple_call_set_lhs (stmt
, res
);
7749 gimple_set_location (stmt
, loc
);
7750 gimple_seq_add_stmt_without_update (seq
, stmt
);
7755 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7756 (or no result if TYPE is void) with location LOC,
7757 simplifying it first if possible. Returns the built
7758 expression value (or NULL_TREE if TYPE is void) and appends
7759 statements possibly defining it to SEQ. */
7762 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7763 tree type
, tree arg0
, tree arg1
, tree arg2
)
7765 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
7766 seq
, gimple_build_valueize
);
7770 if (internal_fn_p (fn
))
7771 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
7772 3, arg0
, arg1
, arg2
);
7775 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7776 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
7778 if (!VOID_TYPE_P (type
))
7780 res
= create_tmp_reg_or_ssa_name (type
);
7781 gimple_call_set_lhs (stmt
, res
);
7783 gimple_set_location (stmt
, loc
);
7784 gimple_seq_add_stmt_without_update (seq
, stmt
);
7789 /* Build the conversion (TYPE) OP with a result of type TYPE
7790 with location LOC if such conversion is neccesary in GIMPLE,
7791 simplifying it first.
7792 Returns the built expression value and appends
7793 statements possibly defining it to SEQ. */
7796 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
7798 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
7800 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
7803 /* Build the conversion (ptrofftype) OP with a result of a type
7804 compatible with ptrofftype with location LOC if such conversion
7805 is neccesary in GIMPLE, simplifying it first.
7806 Returns the built expression value and appends
7807 statements possibly defining it to SEQ. */
7810 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
7812 if (ptrofftype_p (TREE_TYPE (op
)))
7814 return gimple_convert (seq
, loc
, sizetype
, op
);
7817 /* Build a vector of type TYPE in which each element has the value OP.
7818 Return a gimple value for the result, appending any new statements
7822 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
7825 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
7826 && !CONSTANT_CLASS_P (op
))
7827 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
7829 tree res
, vec
= build_vector_from_val (type
, op
);
7830 if (is_gimple_val (vec
))
7832 if (gimple_in_ssa_p (cfun
))
7833 res
= make_ssa_name (type
);
7835 res
= create_tmp_reg (type
);
7836 gimple
*stmt
= gimple_build_assign (res
, vec
);
7837 gimple_set_location (stmt
, loc
);
7838 gimple_seq_add_stmt_without_update (seq
, stmt
);
7842 /* Build a vector from BUILDER, handling the case in which some elements
7843 are non-constant. Return a gimple value for the result, appending any
7844 new instructions to SEQ.
7846 BUILDER must not have a stepped encoding on entry. This is because
7847 the function is not geared up to handle the arithmetic that would
7848 be needed in the variable case, and any code building a vector that
7849 is known to be constant should use BUILDER->build () directly. */
7852 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
7853 tree_vector_builder
*builder
)
7855 gcc_assert (builder
->nelts_per_pattern () <= 2);
7856 unsigned int encoded_nelts
= builder
->encoded_nelts ();
7857 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
7858 if (!TREE_CONSTANT ((*builder
)[i
]))
7860 tree type
= builder
->type ();
7861 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
7862 vec
<constructor_elt
, va_gc
> *v
;
7863 vec_alloc (v
, nelts
);
7864 for (i
= 0; i
< nelts
; ++i
)
7865 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
7868 if (gimple_in_ssa_p (cfun
))
7869 res
= make_ssa_name (type
);
7871 res
= create_tmp_reg (type
);
7872 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
7873 gimple_set_location (stmt
, loc
);
7874 gimple_seq_add_stmt_without_update (seq
, stmt
);
7877 return builder
->build ();
7880 /* Return true if the result of assignment STMT is known to be non-negative.
7881 If the return value is based on the assumption that signed overflow is
7882 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7883 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7886 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7889 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7890 switch (get_gimple_rhs_class (code
))
7892 case GIMPLE_UNARY_RHS
:
7893 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7894 gimple_expr_type (stmt
),
7895 gimple_assign_rhs1 (stmt
),
7896 strict_overflow_p
, depth
);
7897 case GIMPLE_BINARY_RHS
:
7898 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7899 gimple_expr_type (stmt
),
7900 gimple_assign_rhs1 (stmt
),
7901 gimple_assign_rhs2 (stmt
),
7902 strict_overflow_p
, depth
);
7903 case GIMPLE_TERNARY_RHS
:
7905 case GIMPLE_SINGLE_RHS
:
7906 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
7907 strict_overflow_p
, depth
);
7908 case GIMPLE_INVALID_RHS
:
7914 /* Return true if return value of call STMT is known to be non-negative.
7915 If the return value is based on the assumption that signed overflow is
7916 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7917 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7920 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7923 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
7924 gimple_call_arg (stmt
, 0) : NULL_TREE
;
7925 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
7926 gimple_call_arg (stmt
, 1) : NULL_TREE
;
7928 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
7929 gimple_call_combined_fn (stmt
),
7932 strict_overflow_p
, depth
);
7935 /* Return true if return value of call STMT is known to be non-negative.
7936 If the return value is based on the assumption that signed overflow is
7937 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7938 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7941 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7944 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7946 tree arg
= gimple_phi_arg_def (stmt
, i
);
7947 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
7953 /* Return true if STMT is known to compute a non-negative value.
7954 If the return value is based on the assumption that signed overflow is
7955 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7956 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7959 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7962 switch (gimple_code (stmt
))
7965 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7968 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7971 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7978 /* Return true if the floating-point value computed by assignment STMT
7979 is known to have an integer value. We also allow +Inf, -Inf and NaN
7980 to be considered integer values. Return false for signaling NaN.
7982 DEPTH is the current nesting depth of the query. */
7985 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
7987 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7988 switch (get_gimple_rhs_class (code
))
7990 case GIMPLE_UNARY_RHS
:
7991 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
7992 gimple_assign_rhs1 (stmt
), depth
);
7993 case GIMPLE_BINARY_RHS
:
7994 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
7995 gimple_assign_rhs1 (stmt
),
7996 gimple_assign_rhs2 (stmt
), depth
);
7997 case GIMPLE_TERNARY_RHS
:
7999 case GIMPLE_SINGLE_RHS
:
8000 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
8001 case GIMPLE_INVALID_RHS
:
8007 /* Return true if the floating-point value computed by call STMT is known
8008 to have an integer value. We also allow +Inf, -Inf and NaN to be
8009 considered integer values. Return false for signaling NaN.
8011 DEPTH is the current nesting depth of the query. */
8014 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
8016 tree arg0
= (gimple_call_num_args (stmt
) > 0
8017 ? gimple_call_arg (stmt
, 0)
8019 tree arg1
= (gimple_call_num_args (stmt
) > 1
8020 ? gimple_call_arg (stmt
, 1)
8022 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
8026 /* Return true if the floating-point result of phi STMT is known to have
8027 an integer value. We also allow +Inf, -Inf and NaN to be considered
8028 integer values. Return false for signaling NaN.
8030 DEPTH is the current nesting depth of the query. */
8033 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
8035 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
8037 tree arg
= gimple_phi_arg_def (stmt
, i
);
8038 if (!integer_valued_real_single_p (arg
, depth
+ 1))
8044 /* Return true if the floating-point value computed by STMT is known
8045 to have an integer value. We also allow +Inf, -Inf and NaN to be
8046 considered integer values. Return false for signaling NaN.
8048 DEPTH is the current nesting depth of the query. */
8051 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
8053 switch (gimple_code (stmt
))
8056 return gimple_assign_integer_valued_real_p (stmt
, depth
);
8058 return gimple_call_integer_valued_real_p (stmt
, depth
);
8060 return gimple_phi_integer_valued_real_p (stmt
, depth
);