1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
43 #include "tree-object-size.h"
45 #include "tree-ssa-propagate.h"
46 #include "ipa-utils.h"
47 #include "tree-ssa-address.h"
48 #include "langhooks.h"
49 #include "gimplify-me.h"
53 #include "gimple-match.h"
54 #include "gomp-constants.h"
55 #include "optabs-query.h"
56 #include "omp-general.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
63 #include "diagnostic-core.h"
67 /* Return true when DECL can be referenced from current unit.
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
83 3) COMDAT functions referred by external vtables that
84 we devirtualize only during final compilation stage.
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
90 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
93 struct cgraph_node
*node
;
96 if (DECL_ABSTRACT_P (decl
))
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
101 || !VAR_OR_FUNCTION_DECL_P (decl
))
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
109 if (symtab
->function_flags_ready
)
111 snode
= symtab_node::get (decl
);
112 if (!snode
|| !snode
->definition
)
114 node
= dyn_cast
<cgraph_node
*> (snode
);
115 return !node
|| !node
->global
.inlined_to
;
118 /* We will later output the initializer, so we can refer to it.
119 So we are concerned only when DECL comes from initializer of
120 external var or var that has been optimized out. */
122 || !VAR_P (from_decl
)
123 || (!DECL_EXTERNAL (from_decl
)
124 && (vnode
= varpool_node::get (from_decl
)) != NULL
125 && vnode
->definition
)
127 && (vnode
= varpool_node::get (from_decl
)) != NULL
128 && vnode
->in_other_partition
))
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl
)
134 && DECL_EXTERNAL (decl
)
135 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
136 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
147 As observed in PR20991 for already optimized out comdat virtual functions
148 it may be tempting to not necessarily give up because the copy will be
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
154 if (!symtab
->function_flags_ready
)
157 snode
= symtab_node::get (decl
);
159 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
160 && (!snode
->in_other_partition
161 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
163 node
= dyn_cast
<cgraph_node
*> (snode
);
164 return !node
|| !node
->global
.inlined_to
;
167 /* Create a temporary for TYPE for a statement STMT. If the current function
168 is in SSA form, a SSA name is created. Otherwise a temporary register
172 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
174 if (gimple_in_ssa_p (cfun
))
175 return make_ssa_name (type
, stmt
);
177 return create_tmp_reg (type
);
180 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
181 acceptable form for is_gimple_min_invariant.
182 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
185 canonicalize_constructor_val (tree cval
, tree from_decl
)
187 tree orig_cval
= cval
;
189 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
190 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
192 tree ptr
= TREE_OPERAND (cval
, 0);
193 if (is_gimple_min_invariant (ptr
))
194 cval
= build1_loc (EXPR_LOCATION (cval
),
195 ADDR_EXPR
, TREE_TYPE (ptr
),
196 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
198 fold_convert (ptr_type_node
,
199 TREE_OPERAND (cval
, 1))));
201 if (TREE_CODE (cval
) == ADDR_EXPR
)
203 tree base
= NULL_TREE
;
204 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
206 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
208 TREE_OPERAND (cval
, 0) = base
;
211 base
= get_base_address (TREE_OPERAND (cval
, 0));
215 if (VAR_OR_FUNCTION_DECL_P (base
)
216 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
218 if (TREE_TYPE (base
) == error_mark_node
)
221 TREE_ADDRESSABLE (base
) = 1;
222 else if (TREE_CODE (base
) == FUNCTION_DECL
)
224 /* Make sure we create a cgraph node for functions we'll reference.
225 They can be non-existent if the reference comes from an entry
226 of an external vtable for example. */
227 cgraph_node::get_create (base
);
229 /* Fixup types in global initializers. */
230 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
231 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
233 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
234 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
237 if (TREE_OVERFLOW_P (cval
))
238 return drop_tree_overflow (cval
);
242 /* If SYM is a constant variable with known value, return the value.
243 NULL_TREE is returned otherwise. */
246 get_symbol_constant_value (tree sym
)
248 tree val
= ctor_for_folding (sym
);
249 if (val
!= error_mark_node
)
253 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
254 if (val
&& is_gimple_min_invariant (val
))
259 /* Variables declared 'const' without an initializer
260 have zero as the initializer if they may not be
261 overridden at link or run time. */
263 && is_gimple_reg_type (TREE_TYPE (sym
)))
264 return build_zero_cst (TREE_TYPE (sym
));
272 /* Subroutine of fold_stmt. We perform several simplifications of the
273 memory reference tree EXPR and make sure to re-gimplify them properly
274 after propagation of constant addresses. IS_LHS is true if the
275 reference is supposed to be an lvalue. */
278 maybe_fold_reference (tree expr
, bool is_lhs
)
282 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
283 || TREE_CODE (expr
) == REALPART_EXPR
284 || TREE_CODE (expr
) == IMAGPART_EXPR
)
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
286 return fold_unary_loc (EXPR_LOCATION (expr
),
289 TREE_OPERAND (expr
, 0));
290 else if (TREE_CODE (expr
) == BIT_FIELD_REF
291 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
292 return fold_ternary_loc (EXPR_LOCATION (expr
),
295 TREE_OPERAND (expr
, 0),
296 TREE_OPERAND (expr
, 1),
297 TREE_OPERAND (expr
, 2));
300 && (result
= fold_const_aggregate_ref (expr
))
301 && is_gimple_min_invariant (result
))
308 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
309 replacement rhs for the statement or NULL_TREE if no simplification
310 could be made. It is assumed that the operands have been previously
314 fold_gimple_assign (gimple_stmt_iterator
*si
)
316 gimple
*stmt
= gsi_stmt (*si
);
317 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
318 location_t loc
= gimple_location (stmt
);
320 tree result
= NULL_TREE
;
322 switch (get_gimple_rhs_class (subcode
))
324 case GIMPLE_SINGLE_RHS
:
326 tree rhs
= gimple_assign_rhs1 (stmt
);
328 if (TREE_CLOBBER_P (rhs
))
331 if (REFERENCE_CLASS_P (rhs
))
332 return maybe_fold_reference (rhs
, false);
334 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
336 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
337 if (is_gimple_min_invariant (val
))
339 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
342 vec
<cgraph_node
*>targets
343 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
344 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
346 if (dump_enabled_p ())
348 location_t loc
= gimple_location_safe (stmt
);
349 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
350 "resolving virtual function address "
351 "reference to function %s\n",
352 targets
.length () == 1
353 ? targets
[0]->name ()
356 if (targets
.length () == 1)
358 val
= fold_convert (TREE_TYPE (val
),
359 build_fold_addr_expr_loc
360 (loc
, targets
[0]->decl
));
361 STRIP_USELESS_TYPE_CONVERSION (val
);
364 /* We can not use __builtin_unreachable here because it
365 can not have address taken. */
366 val
= build_int_cst (TREE_TYPE (val
), 0);
372 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
374 tree ref
= TREE_OPERAND (rhs
, 0);
375 tree tem
= maybe_fold_reference (ref
, true);
377 && TREE_CODE (tem
) == MEM_REF
378 && integer_zerop (TREE_OPERAND (tem
, 1)))
379 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
381 result
= fold_convert (TREE_TYPE (rhs
),
382 build_fold_addr_expr_loc (loc
, tem
));
383 else if (TREE_CODE (ref
) == MEM_REF
384 && integer_zerop (TREE_OPERAND (ref
, 1)))
385 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
389 /* Strip away useless type conversions. Both the
390 NON_LVALUE_EXPR that may have been added by fold, and
391 "useless" type conversions that might now be apparent
392 due to propagation. */
393 STRIP_USELESS_TYPE_CONVERSION (result
);
395 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
400 else if (TREE_CODE (rhs
) == CONSTRUCTOR
401 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
403 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
407 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
408 if (! CONSTANT_CLASS_P (val
))
411 return build_vector_from_ctor (TREE_TYPE (rhs
),
412 CONSTRUCTOR_ELTS (rhs
));
415 else if (DECL_P (rhs
))
416 return get_symbol_constant_value (rhs
);
420 case GIMPLE_UNARY_RHS
:
423 case GIMPLE_BINARY_RHS
:
426 case GIMPLE_TERNARY_RHS
:
427 result
= fold_ternary_loc (loc
, subcode
,
428 TREE_TYPE (gimple_assign_lhs (stmt
)),
429 gimple_assign_rhs1 (stmt
),
430 gimple_assign_rhs2 (stmt
),
431 gimple_assign_rhs3 (stmt
));
435 STRIP_USELESS_TYPE_CONVERSION (result
);
436 if (valid_gimple_rhs_p (result
))
441 case GIMPLE_INVALID_RHS
:
449 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
450 adjusting the replacement stmts location and virtual operands.
451 If the statement has a lhs the last stmt in the sequence is expected
452 to assign to that lhs. */
455 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
457 gimple
*stmt
= gsi_stmt (*si_p
);
459 if (gimple_has_location (stmt
))
460 annotate_all_with_location (stmts
, gimple_location (stmt
));
462 /* First iterate over the replacement statements backward, assigning
463 virtual operands to their defining statements. */
464 gimple
*laststore
= NULL
;
465 for (gimple_stmt_iterator i
= gsi_last (stmts
);
466 !gsi_end_p (i
); gsi_prev (&i
))
468 gimple
*new_stmt
= gsi_stmt (i
);
469 if ((gimple_assign_single_p (new_stmt
)
470 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
471 || (is_gimple_call (new_stmt
)
472 && (gimple_call_flags (new_stmt
)
473 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
477 vdef
= gimple_vdef (stmt
);
479 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
480 gimple_set_vdef (new_stmt
, vdef
);
481 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
482 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
483 laststore
= new_stmt
;
487 /* Second iterate over the statements forward, assigning virtual
488 operands to their uses. */
489 tree reaching_vuse
= gimple_vuse (stmt
);
490 for (gimple_stmt_iterator i
= gsi_start (stmts
);
491 !gsi_end_p (i
); gsi_next (&i
))
493 gimple
*new_stmt
= gsi_stmt (i
);
494 /* If the new statement possibly has a VUSE, update it with exact SSA
495 name we know will reach this one. */
496 if (gimple_has_mem_ops (new_stmt
))
497 gimple_set_vuse (new_stmt
, reaching_vuse
);
498 gimple_set_modified (new_stmt
, true);
499 if (gimple_vdef (new_stmt
))
500 reaching_vuse
= gimple_vdef (new_stmt
);
503 /* If the new sequence does not do a store release the virtual
504 definition of the original statement. */
506 && reaching_vuse
== gimple_vuse (stmt
))
508 tree vdef
= gimple_vdef (stmt
);
510 && TREE_CODE (vdef
) == SSA_NAME
)
512 unlink_stmt_vdef (stmt
);
513 release_ssa_name (vdef
);
517 /* Finally replace the original statement with the sequence. */
518 gsi_replace_with_seq (si_p
, stmts
, false);
521 /* Convert EXPR into a GIMPLE value suitable for substitution on the
522 RHS of an assignment. Insert the necessary statements before
523 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
524 is replaced. If the call is expected to produces a result, then it
525 is replaced by an assignment of the new RHS to the result variable.
526 If the result is to be ignored, then the call is replaced by a
527 GIMPLE_NOP. A proper VDEF chain is retained by making the first
528 VUSE and the last VDEF of the whole sequence be the same as the replaced
529 statement and using new SSA names for stores in between. */
532 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
535 gimple
*stmt
, *new_stmt
;
536 gimple_stmt_iterator i
;
537 gimple_seq stmts
= NULL
;
539 stmt
= gsi_stmt (*si_p
);
541 gcc_assert (is_gimple_call (stmt
));
543 push_gimplify_context (gimple_in_ssa_p (cfun
));
545 lhs
= gimple_call_lhs (stmt
);
546 if (lhs
== NULL_TREE
)
548 gimplify_and_add (expr
, &stmts
);
549 /* We can end up with folding a memcpy of an empty class assignment
550 which gets optimized away by C++ gimplification. */
551 if (gimple_seq_empty_p (stmts
))
553 pop_gimplify_context (NULL
);
554 if (gimple_in_ssa_p (cfun
))
556 unlink_stmt_vdef (stmt
);
559 gsi_replace (si_p
, gimple_build_nop (), false);
565 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
566 new_stmt
= gimple_build_assign (lhs
, tmp
);
567 i
= gsi_last (stmts
);
568 gsi_insert_after_without_update (&i
, new_stmt
,
569 GSI_CONTINUE_LINKING
);
572 pop_gimplify_context (NULL
);
574 gsi_replace_with_seq_vops (si_p
, stmts
);
578 /* Replace the call at *GSI with the gimple value VAL. */
581 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
583 gimple
*stmt
= gsi_stmt (*gsi
);
584 tree lhs
= gimple_call_lhs (stmt
);
588 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
589 val
= fold_convert (TREE_TYPE (lhs
), val
);
590 repl
= gimple_build_assign (lhs
, val
);
593 repl
= gimple_build_nop ();
594 tree vdef
= gimple_vdef (stmt
);
595 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
597 unlink_stmt_vdef (stmt
);
598 release_ssa_name (vdef
);
600 gsi_replace (gsi
, repl
, false);
603 /* Replace the call at *GSI with the new call REPL and fold that
607 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
609 gimple
*stmt
= gsi_stmt (*gsi
);
610 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
611 gimple_set_location (repl
, gimple_location (stmt
));
612 if (gimple_vdef (stmt
)
613 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
615 gimple_set_vdef (repl
, gimple_vdef (stmt
));
616 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
618 if (gimple_vuse (stmt
))
619 gimple_set_vuse (repl
, gimple_vuse (stmt
));
620 gsi_replace (gsi
, repl
, false);
624 /* Return true if VAR is a VAR_DECL or a component thereof. */
627 var_decl_component_p (tree var
)
630 while (handled_component_p (inner
))
631 inner
= TREE_OPERAND (inner
, 0);
632 return SSA_VAR_P (inner
);
635 /* If the SIZE argument representing the size of an object is in a range
636 of values of which exactly one is valid (and that is zero), return
637 true, otherwise false. */
640 size_must_be_zero_p (tree size
)
642 if (integer_zerop (size
))
645 if (TREE_CODE (size
) != SSA_NAME
)
649 enum value_range_type rtype
= get_range_info (size
, &min
, &max
);
650 if (rtype
!= VR_ANTI_RANGE
)
653 tree type
= TREE_TYPE (size
);
654 int prec
= TYPE_PRECISION (type
);
656 wide_int wone
= wi::one (prec
);
658 /* Compute the value of SSIZE_MAX, the largest positive value that
659 can be stored in ssize_t, the signed counterpart of size_t. */
660 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
662 return wi::eq_p (min
, wone
) && wi::geu_p (max
, ssize_max
);
665 /* Fold function call to builtin mem{{,p}cpy,move}. Return
666 false if no simplification can be made.
667 If ENDP is 0, return DEST (like memcpy).
668 If ENDP is 1, return DEST+LEN (like mempcpy).
669 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
670 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
674 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
675 tree dest
, tree src
, int endp
)
677 gimple
*stmt
= gsi_stmt (*gsi
);
678 tree lhs
= gimple_call_lhs (stmt
);
679 tree len
= gimple_call_arg (stmt
, 2);
680 tree destvar
, srcvar
;
681 location_t loc
= gimple_location (stmt
);
683 /* If the LEN parameter is a constant zero or in range where
684 the only valid value is zero, return DEST. */
685 if (size_must_be_zero_p (len
))
688 if (gimple_call_lhs (stmt
))
689 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
691 repl
= gimple_build_nop ();
692 tree vdef
= gimple_vdef (stmt
);
693 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
695 unlink_stmt_vdef (stmt
);
696 release_ssa_name (vdef
);
698 gsi_replace (gsi
, repl
, false);
702 /* If SRC and DEST are the same (and not volatile), return
703 DEST{,+LEN,+LEN-1}. */
704 if (operand_equal_p (src
, dest
, 0))
706 unlink_stmt_vdef (stmt
);
707 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
708 release_ssa_name (gimple_vdef (stmt
));
711 gsi_replace (gsi
, gimple_build_nop (), false);
718 tree srctype
, desttype
;
719 unsigned int src_align
, dest_align
;
722 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
723 pointers as wide integer) and also may result in huge function
724 size because of inlined bounds copy. Thus don't inline for
725 functions we want to instrument. */
726 if (flag_check_pointer_bounds
727 && chkp_instrumentable_p (cfun
->decl
)
728 /* Even if data may contain pointers we can inline if copy
729 less than a pointer size. */
730 && (!tree_fits_uhwi_p (len
)
731 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
734 /* Build accesses at offset zero with a ref-all character type. */
735 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
738 /* If we can perform the copy efficiently with first doing all loads
739 and then all stores inline it that way. Currently efficiently
740 means that we can load all the memory into a single integer
741 register which is what MOVE_MAX gives us. */
742 src_align
= get_pointer_alignment (src
);
743 dest_align
= get_pointer_alignment (dest
);
744 if (tree_fits_uhwi_p (len
)
745 && compare_tree_int (len
, MOVE_MAX
) <= 0
746 /* ??? Don't transform copies from strings with known length this
747 confuses the tree-ssa-strlen.c. This doesn't handle
748 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
750 && !c_strlen (src
, 2))
752 unsigned ilen
= tree_to_uhwi (len
);
753 if (pow2p_hwi (ilen
))
755 scalar_int_mode mode
;
756 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
758 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
759 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
760 /* If the destination pointer is not aligned we must be able
761 to emit an unaligned store. */
762 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
763 || !targetm
.slow_unaligned_access (mode
, dest_align
)
764 || (optab_handler (movmisalign_optab
, mode
)
765 != CODE_FOR_nothing
)))
768 tree desttype
= type
;
769 if (src_align
< GET_MODE_ALIGNMENT (mode
))
770 srctype
= build_aligned_type (type
, src_align
);
771 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
772 tree tem
= fold_const_aggregate_ref (srcmem
);
775 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
776 && targetm
.slow_unaligned_access (mode
, src_align
)
777 && (optab_handler (movmisalign_optab
, mode
)
778 == CODE_FOR_nothing
))
783 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
785 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
787 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
789 gimple_assign_set_lhs (new_stmt
, srcmem
);
790 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
791 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
793 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
794 desttype
= build_aligned_type (type
, dest_align
);
796 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
799 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
800 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
801 if (gimple_vdef (new_stmt
)
802 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
803 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
806 gsi_replace (gsi
, new_stmt
, false);
809 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
818 /* Both DEST and SRC must be pointer types.
819 ??? This is what old code did. Is the testing for pointer types
822 If either SRC is readonly or length is 1, we can use memcpy. */
823 if (!dest_align
|| !src_align
)
825 if (readonly_data_expr (src
)
826 || (tree_fits_uhwi_p (len
)
827 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
828 >= tree_to_uhwi (len
))))
830 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
833 gimple_call_set_fndecl (stmt
, fn
);
834 gimple_call_set_arg (stmt
, 0, dest
);
835 gimple_call_set_arg (stmt
, 1, src
);
840 /* If *src and *dest can't overlap, optimize into memcpy as well. */
841 if (TREE_CODE (src
) == ADDR_EXPR
842 && TREE_CODE (dest
) == ADDR_EXPR
)
844 tree src_base
, dest_base
, fn
;
845 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
846 HOST_WIDE_INT maxsize
;
848 srcvar
= TREE_OPERAND (src
, 0);
849 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
850 if (src_base
== NULL
)
852 destvar
= TREE_OPERAND (dest
, 0);
853 dest_base
= get_addr_base_and_unit_offset (destvar
,
855 if (dest_base
== NULL
)
857 if (tree_fits_uhwi_p (len
))
858 maxsize
= tree_to_uhwi (len
);
861 if (SSA_VAR_P (src_base
)
862 && SSA_VAR_P (dest_base
))
864 if (operand_equal_p (src_base
, dest_base
, 0)
865 && ranges_overlap_p (src_offset
, maxsize
,
866 dest_offset
, maxsize
))
869 else if (TREE_CODE (src_base
) == MEM_REF
870 && TREE_CODE (dest_base
) == MEM_REF
)
872 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
873 TREE_OPERAND (dest_base
, 0), 0))
875 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
876 if (!wi::fits_shwi_p (off
))
878 src_offset
= off
.to_shwi ();
880 off
= mem_ref_offset (dest_base
) + dest_offset
;
881 if (!wi::fits_shwi_p (off
))
883 dest_offset
= off
.to_shwi ();
884 if (ranges_overlap_p (src_offset
, maxsize
,
885 dest_offset
, maxsize
))
891 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
894 gimple_call_set_fndecl (stmt
, fn
);
895 gimple_call_set_arg (stmt
, 0, dest
);
896 gimple_call_set_arg (stmt
, 1, src
);
901 /* If the destination and source do not alias optimize into
903 if ((is_gimple_min_invariant (dest
)
904 || TREE_CODE (dest
) == SSA_NAME
)
905 && (is_gimple_min_invariant (src
)
906 || TREE_CODE (src
) == SSA_NAME
))
909 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
910 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
911 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
914 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
917 gimple_call_set_fndecl (stmt
, fn
);
918 gimple_call_set_arg (stmt
, 0, dest
);
919 gimple_call_set_arg (stmt
, 1, src
);
928 if (!tree_fits_shwi_p (len
))
930 if (!POINTER_TYPE_P (TREE_TYPE (src
))
931 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
933 /* In the following try to find a type that is most natural to be
934 used for the memcpy source and destination and that allows
935 the most optimization when memcpy is turned into a plain assignment
936 using that type. In theory we could always use a char[len] type
937 but that only gains us that the destination and source possibly
938 no longer will have their address taken. */
939 srctype
= TREE_TYPE (TREE_TYPE (src
));
940 if (TREE_CODE (srctype
) == ARRAY_TYPE
941 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
942 srctype
= TREE_TYPE (srctype
);
943 desttype
= TREE_TYPE (TREE_TYPE (dest
));
944 if (TREE_CODE (desttype
) == ARRAY_TYPE
945 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
946 desttype
= TREE_TYPE (desttype
);
947 if (TREE_ADDRESSABLE (srctype
)
948 || TREE_ADDRESSABLE (desttype
))
951 /* Make sure we are not copying using a floating-point mode or
952 a type whose size possibly does not match its precision. */
953 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
954 || TREE_CODE (desttype
) == BOOLEAN_TYPE
955 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
956 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
957 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
958 || TREE_CODE (srctype
) == BOOLEAN_TYPE
959 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
960 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
968 src_align
= get_pointer_alignment (src
);
969 dest_align
= get_pointer_alignment (dest
);
970 if (dest_align
< TYPE_ALIGN (desttype
)
971 || src_align
< TYPE_ALIGN (srctype
))
975 if (TREE_CODE (dest
) == ADDR_EXPR
976 && var_decl_component_p (TREE_OPERAND (dest
, 0))
977 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
978 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
981 if (TREE_CODE (src
) == ADDR_EXPR
982 && var_decl_component_p (TREE_OPERAND (src
, 0))
983 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
986 || src_align
>= TYPE_ALIGN (desttype
))
987 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
989 else if (!STRICT_ALIGNMENT
)
991 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
993 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
997 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1000 if (srcvar
== NULL_TREE
)
1002 if (src_align
>= TYPE_ALIGN (desttype
))
1003 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1006 if (STRICT_ALIGNMENT
)
1008 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1010 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1013 else if (destvar
== NULL_TREE
)
1015 if (dest_align
>= TYPE_ALIGN (srctype
))
1016 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1019 if (STRICT_ALIGNMENT
)
1021 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1023 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1028 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1030 tree tem
= fold_const_aggregate_ref (srcvar
);
1033 if (! is_gimple_min_invariant (srcvar
))
1035 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1036 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1038 gimple_assign_set_lhs (new_stmt
, srcvar
);
1039 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1040 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1042 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1043 goto set_vop_and_replace
;
1046 /* We get an aggregate copy. Use an unsigned char[] type to
1047 perform the copying to preserve padding and to avoid any issues
1048 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1049 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1050 tree_to_uhwi (len
));
1052 if (src_align
> TYPE_ALIGN (srctype
))
1053 srctype
= build_aligned_type (srctype
, src_align
);
1054 if (dest_align
> TYPE_ALIGN (desttype
))
1055 desttype
= build_aligned_type (desttype
, dest_align
);
1057 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
, dest
, off0
),
1058 fold_build2 (MEM_REF
, srctype
, src
, off0
));
1059 set_vop_and_replace
:
1060 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1061 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1062 if (gimple_vdef (new_stmt
)
1063 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1064 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1067 gsi_replace (gsi
, new_stmt
, false);
1070 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1074 gimple_seq stmts
= NULL
;
1075 if (endp
== 0 || endp
== 3)
1078 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1080 if (endp
== 2 || endp
== 1)
1082 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1083 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1084 TREE_TYPE (dest
), dest
, len
);
1087 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1088 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1089 gsi_replace (gsi
, repl
, false);
1093 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1094 to built-in memcmp (a, b, len). */
1097 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1099 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1104 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1106 gimple
*stmt
= gsi_stmt (*gsi
);
1107 tree a
= gimple_call_arg (stmt
, 0);
1108 tree b
= gimple_call_arg (stmt
, 1);
1109 tree len
= gimple_call_arg (stmt
, 2);
1111 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1112 replace_call_with_call_and_fold (gsi
, repl
);
1117 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1118 to built-in memmove (dest, src, len). */
1121 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1123 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1128 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1129 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1130 len) into memmove (dest, src, len). */
1132 gimple
*stmt
= gsi_stmt (*gsi
);
1133 tree src
= gimple_call_arg (stmt
, 0);
1134 tree dest
= gimple_call_arg (stmt
, 1);
1135 tree len
= gimple_call_arg (stmt
, 2);
1137 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1138 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1139 replace_call_with_call_and_fold (gsi
, repl
);
1144 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1145 to built-in memset (dest, 0, len). */
1148 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1150 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1155 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1157 gimple
*stmt
= gsi_stmt (*gsi
);
1158 tree dest
= gimple_call_arg (stmt
, 0);
1159 tree len
= gimple_call_arg (stmt
, 1);
1161 gimple_seq seq
= NULL
;
1162 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1163 gimple_seq_add_stmt_without_update (&seq
, repl
);
1164 gsi_replace_with_seq_vops (gsi
, seq
);
1170 /* Fold function call to builtin memset or bzero at *GSI setting the
1171 memory of size LEN to VAL. Return whether a simplification was made. */
1174 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1176 gimple
*stmt
= gsi_stmt (*gsi
);
1178 unsigned HOST_WIDE_INT length
, cval
;
1180 /* If the LEN parameter is zero, return DEST. */
1181 if (integer_zerop (len
))
1183 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1187 if (! tree_fits_uhwi_p (len
))
1190 if (TREE_CODE (c
) != INTEGER_CST
)
1193 tree dest
= gimple_call_arg (stmt
, 0);
1195 if (TREE_CODE (var
) != ADDR_EXPR
)
1198 var
= TREE_OPERAND (var
, 0);
1199 if (TREE_THIS_VOLATILE (var
))
1202 etype
= TREE_TYPE (var
);
1203 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1204 etype
= TREE_TYPE (etype
);
1206 if (!INTEGRAL_TYPE_P (etype
)
1207 && !POINTER_TYPE_P (etype
))
1210 if (! var_decl_component_p (var
))
1213 length
= tree_to_uhwi (len
);
1214 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1215 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1218 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1221 if (integer_zerop (c
))
1225 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1228 cval
= TREE_INT_CST_LOW (c
);
1232 cval
|= (cval
<< 31) << 1;
1235 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1236 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1237 gimple_set_vuse (store
, gimple_vuse (stmt
));
1238 tree vdef
= gimple_vdef (stmt
);
1239 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1241 gimple_set_vdef (store
, gimple_vdef (stmt
));
1242 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1244 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1245 if (gimple_call_lhs (stmt
))
1247 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1248 gsi_replace (gsi
, asgn
, false);
1252 gimple_stmt_iterator gsi2
= *gsi
;
1254 gsi_remove (&gsi2
, true);
1261 /* Obtain the minimum and maximum string length or minimum and maximum
1262 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1263 If ARG is an SSA name variable, follow its use-def chains. When
1264 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1265 if we are unable to determine the length or value, return False.
1266 VISITED is a bitmap of visited variables.
1267 TYPE is 0 if string length should be obtained, 1 for maximum string
1268 length and 2 for maximum value ARG can have.
1269 When FUZZY is set and the length of a string cannot be determined,
1270 the function instead considers as the maximum possible length the
1271 size of a character array it may refer to.
1272 Set *FLEXP to true if the range of the string lengths has been
1273 obtained from the upper bound of an array at the end of a struct.
1274 Such an array may hold a string that's longer than its upper bound
1275 due to it being used as a poor-man's flexible array member. */
1278 get_range_strlen (tree arg
, tree length
[2], bitmap
*visited
, int type
,
1279 bool fuzzy
, bool *flexp
)
1284 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1285 but MINLEN may be cleared during the execution of the function. */
1286 tree
*minlen
= length
;
1287 tree
*const maxlen
= length
+ 1;
1289 if (TREE_CODE (arg
) != SSA_NAME
)
1291 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1292 if (TREE_CODE (arg
) == ADDR_EXPR
1293 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1294 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1296 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1297 if (TREE_CODE (aop0
) == INDIRECT_REF
1298 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1299 return get_range_strlen (TREE_OPERAND (aop0
, 0),
1300 length
, visited
, type
, fuzzy
, flexp
);
1306 if (TREE_CODE (val
) != INTEGER_CST
1307 || tree_int_cst_sgn (val
) < 0)
1311 val
= c_strlen (arg
, 1);
1315 if (TREE_CODE (arg
) == ADDR_EXPR
)
1316 return get_range_strlen (TREE_OPERAND (arg
, 0), length
,
1317 visited
, type
, fuzzy
, flexp
);
1319 if (TREE_CODE (arg
) == COMPONENT_REF
1320 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1))) == ARRAY_TYPE
)
1322 /* Use the type of the member array to determine the upper
1323 bound on the length of the array. This may be overly
1324 optimistic if the array itself isn't NUL-terminated and
1325 the caller relies on the subsequent member to contain
1327 Set *FLEXP to true if the array whose bound is being
1328 used is at the end of a struct. */
1329 if (array_at_struct_end_p (arg
))
1332 arg
= TREE_OPERAND (arg
, 1);
1333 val
= TYPE_SIZE_UNIT (TREE_TYPE (arg
));
1334 if (!val
|| integer_zerop (val
))
1336 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1338 /* Set the minimum size to zero since the string in
1339 the array could have zero length. */
1340 *minlen
= ssize_int (0);
1350 && TREE_CODE (*minlen
) == INTEGER_CST
1351 && TREE_CODE (val
) == INTEGER_CST
1352 && tree_int_cst_lt (val
, *minlen
))))
1359 if (TREE_CODE (*maxlen
) != INTEGER_CST
1360 || TREE_CODE (val
) != INTEGER_CST
)
1363 if (tree_int_cst_lt (*maxlen
, val
))
1367 else if (simple_cst_equal (val
, *maxlen
) != 1)
1375 /* If ARG is registered for SSA update we cannot look at its defining
1377 if (name_registered_for_update_p (arg
))
1380 /* If we were already here, break the infinite cycle. */
1382 *visited
= BITMAP_ALLOC (NULL
);
1383 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1387 def_stmt
= SSA_NAME_DEF_STMT (var
);
1389 switch (gimple_code (def_stmt
))
1392 /* The RHS of the statement defining VAR must either have a
1393 constant length or come from another SSA_NAME with a constant
1395 if (gimple_assign_single_p (def_stmt
)
1396 || gimple_assign_unary_nop_p (def_stmt
))
1398 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1399 return get_range_strlen (rhs
, length
, visited
, type
, fuzzy
, flexp
);
1401 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1403 tree op2
= gimple_assign_rhs2 (def_stmt
);
1404 tree op3
= gimple_assign_rhs3 (def_stmt
);
1405 return get_range_strlen (op2
, length
, visited
, type
, fuzzy
, flexp
)
1406 && get_range_strlen (op3
, length
, visited
, type
, fuzzy
, flexp
);
1412 /* All the arguments of the PHI node must have the same constant
1416 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1418 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1420 /* If this PHI has itself as an argument, we cannot
1421 determine the string length of this argument. However,
1422 if we can find a constant string length for the other
1423 PHI args then we can still be sure that this is a
1424 constant string length. So be optimistic and just
1425 continue with the next argument. */
1426 if (arg
== gimple_phi_result (def_stmt
))
1429 if (!get_range_strlen (arg
, length
, visited
, type
, fuzzy
, flexp
))
1432 *maxlen
= build_all_ones_cst (size_type_node
);
1445 /* Determine the minimum and maximum value or string length that ARG
1446 refers to and store each in the first two elements of MINMAXLEN.
1447 For expressions that point to strings of unknown lengths that are
1448 character arrays, use the upper bound of the array as the maximum
1449 length. For example, given an expression like 'x ? array : "xyz"'
1450 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1451 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1453 Return true if the range of the string lengths has been obtained
1454 from the upper bound of an array at the end of a struct. Such
1455 an array may hold a string that's longer than its upper bound
1456 due to it being used as a poor-man's flexible array member. */
1459 get_range_strlen (tree arg
, tree minmaxlen
[2])
1461 bitmap visited
= NULL
;
1463 minmaxlen
[0] = NULL_TREE
;
1464 minmaxlen
[1] = NULL_TREE
;
1466 bool flexarray
= false;
1467 get_range_strlen (arg
, minmaxlen
, &visited
, 1, true, &flexarray
);
1470 BITMAP_FREE (visited
);
1476 get_maxval_strlen (tree arg
, int type
)
1478 bitmap visited
= NULL
;
1479 tree len
[2] = { NULL_TREE
, NULL_TREE
};
1482 if (!get_range_strlen (arg
, len
, &visited
, type
, false, &dummy
))
1485 BITMAP_FREE (visited
);
1491 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1492 If LEN is not NULL, it represents the length of the string to be
1493 copied. Return NULL_TREE if no simplification can be made. */
1496 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1497 tree dest
, tree src
)
1499 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1502 /* If SRC and DEST are the same (and not volatile), return DEST. */
1503 if (operand_equal_p (src
, dest
, 0))
1505 replace_call_with_value (gsi
, dest
);
1509 if (optimize_function_for_size_p (cfun
))
1512 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1516 tree len
= get_maxval_strlen (src
, 0);
1520 len
= fold_convert_loc (loc
, size_type_node
, len
);
1521 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1522 len
= force_gimple_operand_gsi (gsi
, len
, true,
1523 NULL_TREE
, true, GSI_SAME_STMT
);
1524 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1525 replace_call_with_call_and_fold (gsi
, repl
);
1529 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1530 If SLEN is not NULL, it represents the length of the source string.
1531 Return NULL_TREE if no simplification can be made. */
1534 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1535 tree dest
, tree src
, tree len
)
1537 gimple
*stmt
= gsi_stmt (*gsi
);
1538 location_t loc
= gimple_location (stmt
);
1539 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
1541 /* If the LEN parameter is zero, return DEST. */
1542 if (integer_zerop (len
))
1544 /* Avoid warning if the destination refers to a an array/pointer
1545 decorate with attribute nonstring. */
1548 tree fndecl
= gimple_call_fndecl (stmt
);
1549 gcall
*call
= as_a
<gcall
*> (stmt
);
1551 /* Warn about the lack of nul termination: the result is not
1552 a (nul-terminated) string. */
1553 tree slen
= get_maxval_strlen (src
, 0);
1554 if (slen
&& !integer_zerop (slen
))
1555 warning_at (loc
, OPT_Wstringop_truncation
,
1556 "%G%qD destination unchanged after copying no bytes "
1557 "from a string of length %E",
1558 call
, fndecl
, slen
);
1560 warning_at (loc
, OPT_Wstringop_truncation
,
1561 "%G%qD destination unchanged after copying no bytes",
1565 replace_call_with_value (gsi
, dest
);
1569 /* We can't compare slen with len as constants below if len is not a
1571 if (TREE_CODE (len
) != INTEGER_CST
)
1574 /* Now, we must be passed a constant src ptr parameter. */
1575 tree slen
= get_maxval_strlen (src
, 0);
1576 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1579 /* The size of the source string including the terminating nul. */
1580 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1582 /* We do not support simplification of this case, though we do
1583 support it when expanding trees into RTL. */
1584 /* FIXME: generate a call to __builtin_memset. */
1585 if (tree_int_cst_lt (ssize
, len
))
1590 if (tree_int_cst_lt (len
, slen
))
1592 tree fndecl
= gimple_call_fndecl (stmt
);
1593 gcall
*call
= as_a
<gcall
*> (stmt
);
1595 warning_at (loc
, OPT_Wstringop_truncation
,
1596 (tree_int_cst_equal (size_one_node
, len
)
1597 ? G_("%G%qD output truncated copying %E byte "
1598 "from a string of length %E")
1599 : G_("%G%qD output truncated copying %E bytes "
1600 "from a string of length %E")),
1601 call
, fndecl
, len
, slen
);
1603 else if (tree_int_cst_equal (len
, slen
))
1605 tree fndecl
= gimple_call_fndecl (stmt
);
1606 gcall
*call
= as_a
<gcall
*> (stmt
);
1608 warning_at (loc
, OPT_Wstringop_truncation
,
1609 (tree_int_cst_equal (size_one_node
, len
)
1610 ? G_("%G%qD output truncated before terminating nul "
1611 "copying %E byte from a string of the same "
1613 : G_("%G%qD output truncated before terminating nul "
1614 "copying %E bytes from a string of the same "
1620 /* OK transform into builtin memcpy. */
1621 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1625 len
= fold_convert_loc (loc
, size_type_node
, len
);
1626 len
= force_gimple_operand_gsi (gsi
, len
, true,
1627 NULL_TREE
, true, GSI_SAME_STMT
);
1628 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1629 replace_call_with_call_and_fold (gsi
, repl
);
1634 /* Fold function call to builtin strchr or strrchr.
1635 If both arguments are constant, evaluate and fold the result,
1636 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1637 In general strlen is significantly faster than strchr
1638 due to being a simpler operation. */
1640 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1642 gimple
*stmt
= gsi_stmt (*gsi
);
1643 tree str
= gimple_call_arg (stmt
, 0);
1644 tree c
= gimple_call_arg (stmt
, 1);
1645 location_t loc
= gimple_location (stmt
);
1649 if (!gimple_call_lhs (stmt
))
1652 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1654 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1658 replace_call_with_value (gsi
, integer_zero_node
);
1662 tree len
= build_int_cst (size_type_node
, p1
- p
);
1663 gimple_seq stmts
= NULL
;
1664 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1665 POINTER_PLUS_EXPR
, str
, len
);
1666 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1667 gsi_replace_with_seq_vops (gsi
, stmts
);
1671 if (!integer_zerop (c
))
1674 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1675 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
1677 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1681 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
1682 replace_call_with_call_and_fold (gsi
, repl
);
1690 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1695 /* Create newstr = strlen (str). */
1696 gimple_seq stmts
= NULL
;
1697 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1698 gimple_set_location (new_stmt
, loc
);
1699 len
= create_tmp_reg_or_ssa_name (size_type_node
);
1700 gimple_call_set_lhs (new_stmt
, len
);
1701 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1703 /* Create (str p+ strlen (str)). */
1704 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1705 POINTER_PLUS_EXPR
, str
, len
);
1706 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1707 gsi_replace_with_seq_vops (gsi
, stmts
);
1708 /* gsi now points at the assignment to the lhs, get a
1709 stmt iterator to the strlen.
1710 ??? We can't use gsi_for_stmt as that doesn't work when the
1711 CFG isn't built yet. */
1712 gimple_stmt_iterator gsi2
= *gsi
;
1718 /* Fold function call to builtin strstr.
1719 If both arguments are constant, evaluate and fold the result,
1720 additionally fold strstr (x, "") into x and strstr (x, "c")
1721 into strchr (x, 'c'). */
1723 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
1725 gimple
*stmt
= gsi_stmt (*gsi
);
1726 tree haystack
= gimple_call_arg (stmt
, 0);
1727 tree needle
= gimple_call_arg (stmt
, 1);
1730 if (!gimple_call_lhs (stmt
))
1733 q
= c_getstr (needle
);
1737 if ((p
= c_getstr (haystack
)))
1739 const char *r
= strstr (p
, q
);
1743 replace_call_with_value (gsi
, integer_zero_node
);
1747 tree len
= build_int_cst (size_type_node
, r
- p
);
1748 gimple_seq stmts
= NULL
;
1750 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
1752 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1753 gsi_replace_with_seq_vops (gsi
, stmts
);
1757 /* For strstr (x, "") return x. */
1760 replace_call_with_value (gsi
, haystack
);
1764 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1767 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1770 tree c
= build_int_cst (integer_type_node
, q
[0]);
1771 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
1772 replace_call_with_call_and_fold (gsi
, repl
);
1780 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1783 Return NULL_TREE if no simplification was possible, otherwise return the
1784 simplified form of the call as a tree.
1786 The simplified form may be a constant or other expression which
1787 computes the same value, but in a more efficient manner (including
1788 calls to other builtin functions).
1790 The call may contain arguments which need to be evaluated, but
1791 which are not useful to determine the result of the call. In
1792 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1793 COMPOUND_EXPR will be an argument which must be evaluated.
1794 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1795 COMPOUND_EXPR in the chain will contain the tree for the simplified
1796 form of the builtin function call. */
1799 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1801 gimple
*stmt
= gsi_stmt (*gsi
);
1802 location_t loc
= gimple_location (stmt
);
1804 const char *p
= c_getstr (src
);
1806 /* If the string length is zero, return the dst parameter. */
1807 if (p
&& *p
== '\0')
1809 replace_call_with_value (gsi
, dst
);
1813 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1816 /* See if we can store by pieces into (dst + strlen(dst)). */
1818 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1819 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1821 if (!strlen_fn
|| !memcpy_fn
)
1824 /* If the length of the source string isn't computable don't
1825 split strcat into strlen and memcpy. */
1826 tree len
= get_maxval_strlen (src
, 0);
1830 /* Create strlen (dst). */
1831 gimple_seq stmts
= NULL
, stmts2
;
1832 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1833 gimple_set_location (repl
, loc
);
1834 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
1835 gimple_call_set_lhs (repl
, newdst
);
1836 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1838 /* Create (dst p+ strlen (dst)). */
1839 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1840 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1841 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1843 len
= fold_convert_loc (loc
, size_type_node
, len
);
1844 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1845 build_int_cst (size_type_node
, 1));
1846 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1847 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1849 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1850 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1851 if (gimple_call_lhs (stmt
))
1853 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1854 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1855 gsi_replace_with_seq_vops (gsi
, stmts
);
1856 /* gsi now points at the assignment to the lhs, get a
1857 stmt iterator to the memcpy call.
1858 ??? We can't use gsi_for_stmt as that doesn't work when the
1859 CFG isn't built yet. */
1860 gimple_stmt_iterator gsi2
= *gsi
;
1866 gsi_replace_with_seq_vops (gsi
, stmts
);
1872 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1873 are the arguments to the call. */
1876 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1878 gimple
*stmt
= gsi_stmt (*gsi
);
1879 tree dest
= gimple_call_arg (stmt
, 0);
1880 tree src
= gimple_call_arg (stmt
, 1);
1881 tree size
= gimple_call_arg (stmt
, 2);
1887 /* If the SRC parameter is "", return DEST. */
1888 if (p
&& *p
== '\0')
1890 replace_call_with_value (gsi
, dest
);
1894 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1897 /* If __builtin_strcat_chk is used, assume strcat is available. */
1898 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1902 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1903 replace_call_with_call_and_fold (gsi
, repl
);
1907 /* Simplify a call to the strncat builtin. */
1910 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1912 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1913 tree dst
= gimple_call_arg (stmt
, 0);
1914 tree src
= gimple_call_arg (stmt
, 1);
1915 tree len
= gimple_call_arg (stmt
, 2);
1917 const char *p
= c_getstr (src
);
1919 /* If the requested length is zero, or the src parameter string
1920 length is zero, return the dst parameter. */
1921 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1923 replace_call_with_value (gsi
, dst
);
1927 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
1930 unsigned srclen
= strlen (p
);
1932 int cmpsrc
= compare_tree_int (len
, srclen
);
1934 /* Return early if the requested len is less than the string length.
1935 Warnings will be issued elsewhere later. */
1939 unsigned HOST_WIDE_INT dstsize
;
1941 bool nowarn
= gimple_no_warning_p (stmt
);
1943 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
1945 int cmpdst
= compare_tree_int (len
, dstsize
);
1949 tree fndecl
= gimple_call_fndecl (stmt
);
1951 /* Strncat copies (at most) LEN bytes and always appends
1952 the terminating NUL so the specified bound should never
1953 be equal to (or greater than) the size of the destination.
1954 If it is, the copy could overflow. */
1955 location_t loc
= gimple_location (stmt
);
1956 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
1958 ? G_("%G%qD specified bound %E equals "
1960 : G_("%G%qD specified bound %E exceeds "
1961 "destination size %wu"),
1962 stmt
, fndecl
, len
, dstsize
);
1964 gimple_set_no_warning (stmt
, true);
1968 if (!nowarn
&& cmpsrc
== 0)
1970 tree fndecl
= gimple_call_fndecl (stmt
);
1972 /* To avoid certain truncation the specified bound should also
1973 not be equal to (or less than) the length of the source. */
1974 location_t loc
= gimple_location (stmt
);
1975 if (warning_at (loc
, OPT_Wstringop_overflow_
,
1976 "%G%qD specified bound %E equals source length",
1978 gimple_set_no_warning (stmt
, true);
1981 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1983 /* If the replacement _DECL isn't initialized, don't do the
1988 /* Otherwise, emit a call to strcat. */
1989 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1990 replace_call_with_call_and_fold (gsi
, repl
);
1994 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1998 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2000 gimple
*stmt
= gsi_stmt (*gsi
);
2001 tree dest
= gimple_call_arg (stmt
, 0);
2002 tree src
= gimple_call_arg (stmt
, 1);
2003 tree len
= gimple_call_arg (stmt
, 2);
2004 tree size
= gimple_call_arg (stmt
, 3);
2009 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2010 if ((p
&& *p
== '\0')
2011 || integer_zerop (len
))
2013 replace_call_with_value (gsi
, dest
);
2017 if (! tree_fits_uhwi_p (size
))
2020 if (! integer_all_onesp (size
))
2022 tree src_len
= c_strlen (src
, 1);
2024 && tree_fits_uhwi_p (src_len
)
2025 && tree_fits_uhwi_p (len
)
2026 && ! tree_int_cst_lt (len
, src_len
))
2028 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2029 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2033 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2034 replace_call_with_call_and_fold (gsi
, repl
);
2040 /* If __builtin_strncat_chk is used, assume strncat is available. */
2041 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2045 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2046 replace_call_with_call_and_fold (gsi
, repl
);
2050 /* Build and append gimple statements to STMTS that would load a first
2051 character of a memory location identified by STR. LOC is location
2052 of the statement. */
2055 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2059 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2060 tree cst_uchar_ptr_node
2061 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2062 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2064 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2065 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2066 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2068 gimple_assign_set_lhs (stmt
, var
);
2069 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2074 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2075 FCODE is the name of the builtin. */
2078 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2080 gimple
*stmt
= gsi_stmt (*gsi
);
2081 tree callee
= gimple_call_fndecl (stmt
);
2082 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2084 tree type
= integer_type_node
;
2085 tree str1
= gimple_call_arg (stmt
, 0);
2086 tree str2
= gimple_call_arg (stmt
, 1);
2087 tree lhs
= gimple_call_lhs (stmt
);
2088 HOST_WIDE_INT length
= -1;
2090 /* Handle strncmp and strncasecmp functions. */
2091 if (gimple_call_num_args (stmt
) == 3)
2093 tree len
= gimple_call_arg (stmt
, 2);
2094 if (tree_fits_uhwi_p (len
))
2095 length
= tree_to_uhwi (len
);
2098 /* If the LEN parameter is zero, return zero. */
2101 replace_call_with_value (gsi
, integer_zero_node
);
2105 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2106 if (operand_equal_p (str1
, str2
, 0))
2108 replace_call_with_value (gsi
, integer_zero_node
);
2112 const char *p1
= c_getstr (str1
);
2113 const char *p2
= c_getstr (str2
);
2115 /* For known strings, return an immediate value. */
2119 bool known_result
= false;
2123 case BUILT_IN_STRCMP
:
2125 r
= strcmp (p1
, p2
);
2126 known_result
= true;
2129 case BUILT_IN_STRNCMP
:
2133 r
= strncmp (p1
, p2
, length
);
2134 known_result
= true;
2137 /* Only handleable situation is where the string are equal (result 0),
2138 which is already handled by operand_equal_p case. */
2139 case BUILT_IN_STRCASECMP
:
2141 case BUILT_IN_STRNCASECMP
:
2145 r
= strncmp (p1
, p2
, length
);
2147 known_result
= true;
2156 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2161 bool nonzero_length
= length
>= 1
2162 || fcode
== BUILT_IN_STRCMP
2163 || fcode
== BUILT_IN_STRCASECMP
;
2165 location_t loc
= gimple_location (stmt
);
2167 /* If the second arg is "", return *(const unsigned char*)arg1. */
2168 if (p2
&& *p2
== '\0' && nonzero_length
)
2170 gimple_seq stmts
= NULL
;
2171 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2174 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2175 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2178 gsi_replace_with_seq_vops (gsi
, stmts
);
2182 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2183 if (p1
&& *p1
== '\0' && nonzero_length
)
2185 gimple_seq stmts
= NULL
;
2186 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2190 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2191 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2192 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2194 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2195 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2198 gsi_replace_with_seq_vops (gsi
, stmts
);
2202 /* If len parameter is one, return an expression corresponding to
2203 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2204 if (fcode
== BUILT_IN_STRNCMP
&& length
== 1)
2206 gimple_seq stmts
= NULL
;
2207 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2208 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2212 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2213 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2214 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2216 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2217 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2218 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2220 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2221 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2224 gsi_replace_with_seq_vops (gsi
, stmts
);
2228 /* If length is larger than the length of one constant string,
2229 replace strncmp with corresponding strcmp */
2230 if (fcode
== BUILT_IN_STRNCMP
2232 && ((p2
&& (size_t) length
> strlen (p2
))
2233 || (p1
&& (size_t) length
> strlen (p1
))))
2235 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2238 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2239 replace_call_with_call_and_fold (gsi
, repl
);
2246 /* Fold a call to the memchr pointed by GSI iterator. */
2249 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2251 gimple
*stmt
= gsi_stmt (*gsi
);
2252 tree lhs
= gimple_call_lhs (stmt
);
2253 tree arg1
= gimple_call_arg (stmt
, 0);
2254 tree arg2
= gimple_call_arg (stmt
, 1);
2255 tree len
= gimple_call_arg (stmt
, 2);
2257 /* If the LEN parameter is zero, return zero. */
2258 if (integer_zerop (len
))
2260 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2265 if (TREE_CODE (arg2
) != INTEGER_CST
2266 || !tree_fits_uhwi_p (len
)
2267 || !target_char_cst_p (arg2
, &c
))
2270 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2271 unsigned HOST_WIDE_INT string_length
;
2272 const char *p1
= c_getstr (arg1
, &string_length
);
2276 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2279 if (length
<= string_length
)
2281 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2287 unsigned HOST_WIDE_INT offset
= r
- p1
;
2288 gimple_seq stmts
= NULL
;
2289 if (lhs
!= NULL_TREE
)
2291 tree offset_cst
= build_int_cst (TREE_TYPE (len
), offset
);
2292 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2294 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2297 gimple_seq_add_stmt_without_update (&stmts
,
2298 gimple_build_nop ());
2300 gsi_replace_with_seq_vops (gsi
, stmts
);
2308 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2309 to the call. IGNORE is true if the value returned
2310 by the builtin will be ignored. UNLOCKED is true is true if this
2311 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2312 the known length of the string. Return NULL_TREE if no simplification
2316 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2317 tree arg0
, tree arg1
,
2320 gimple
*stmt
= gsi_stmt (*gsi
);
2322 /* If we're using an unlocked function, assume the other unlocked
2323 functions exist explicitly. */
2324 tree
const fn_fputc
= (unlocked
2325 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2326 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2327 tree
const fn_fwrite
= (unlocked
2328 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2329 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2331 /* If the return value is used, don't do the transformation. */
2332 if (gimple_call_lhs (stmt
))
2335 /* Get the length of the string passed to fputs. If the length
2336 can't be determined, punt. */
2337 tree len
= get_maxval_strlen (arg0
, 0);
2339 || TREE_CODE (len
) != INTEGER_CST
)
2342 switch (compare_tree_int (len
, 1))
2344 case -1: /* length is 0, delete the call entirely . */
2345 replace_call_with_value (gsi
, integer_zero_node
);
2348 case 0: /* length is 1, call fputc. */
2350 const char *p
= c_getstr (arg0
);
2356 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2358 (integer_type_node
, p
[0]), arg1
);
2359 replace_call_with_call_and_fold (gsi
, repl
);
2364 case 1: /* length is greater than 1, call fwrite. */
2366 /* If optimizing for size keep fputs. */
2367 if (optimize_function_for_size_p (cfun
))
2369 /* New argument list transforming fputs(string, stream) to
2370 fwrite(string, 1, len, stream). */
2374 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2375 size_one_node
, len
, arg1
);
2376 replace_call_with_call_and_fold (gsi
, repl
);
2385 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2386 DEST, SRC, LEN, and SIZE are the arguments to the call.
2387 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2388 code of the builtin. If MAXLEN is not NULL, it is maximum length
2389 passed as third argument. */
2392 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2393 tree dest
, tree src
, tree len
, tree size
,
2394 enum built_in_function fcode
)
2396 gimple
*stmt
= gsi_stmt (*gsi
);
2397 location_t loc
= gimple_location (stmt
);
2398 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2401 /* If SRC and DEST are the same (and not volatile), return DEST
2402 (resp. DEST+LEN for __mempcpy_chk). */
2403 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2405 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2407 replace_call_with_value (gsi
, dest
);
2412 gimple_seq stmts
= NULL
;
2413 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2414 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2415 TREE_TYPE (dest
), dest
, len
);
2416 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2417 replace_call_with_value (gsi
, temp
);
2422 if (! tree_fits_uhwi_p (size
))
2425 tree maxlen
= get_maxval_strlen (len
, 2);
2426 if (! integer_all_onesp (size
))
2428 if (! tree_fits_uhwi_p (len
))
2430 /* If LEN is not constant, try MAXLEN too.
2431 For MAXLEN only allow optimizing into non-_ocs function
2432 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2433 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2435 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2437 /* (void) __mempcpy_chk () can be optimized into
2438 (void) __memcpy_chk (). */
2439 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2443 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2444 replace_call_with_call_and_fold (gsi
, repl
);
2453 if (tree_int_cst_lt (size
, maxlen
))
2458 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2459 mem{cpy,pcpy,move,set} is available. */
2462 case BUILT_IN_MEMCPY_CHK
:
2463 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2465 case BUILT_IN_MEMPCPY_CHK
:
2466 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2468 case BUILT_IN_MEMMOVE_CHK
:
2469 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2471 case BUILT_IN_MEMSET_CHK
:
2472 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2481 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2482 replace_call_with_call_and_fold (gsi
, repl
);
2486 /* Fold a call to the __st[rp]cpy_chk builtin.
2487 DEST, SRC, and SIZE are the arguments to the call.
2488 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2489 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2490 strings passed as second argument. */
2493 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2495 tree src
, tree size
,
2496 enum built_in_function fcode
)
2498 gimple
*stmt
= gsi_stmt (*gsi
);
2499 location_t loc
= gimple_location (stmt
);
2500 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2503 /* If SRC and DEST are the same (and not volatile), return DEST. */
2504 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2506 replace_call_with_value (gsi
, dest
);
2510 if (! tree_fits_uhwi_p (size
))
2513 tree maxlen
= get_maxval_strlen (src
, 1);
2514 if (! integer_all_onesp (size
))
2516 len
= c_strlen (src
, 1);
2517 if (! len
|| ! tree_fits_uhwi_p (len
))
2519 /* If LEN is not constant, try MAXLEN too.
2520 For MAXLEN only allow optimizing into non-_ocs function
2521 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2522 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2524 if (fcode
== BUILT_IN_STPCPY_CHK
)
2529 /* If return value of __stpcpy_chk is ignored,
2530 optimize into __strcpy_chk. */
2531 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2535 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2536 replace_call_with_call_and_fold (gsi
, repl
);
2540 if (! len
|| TREE_SIDE_EFFECTS (len
))
2543 /* If c_strlen returned something, but not a constant,
2544 transform __strcpy_chk into __memcpy_chk. */
2545 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2549 gimple_seq stmts
= NULL
;
2550 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2551 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2552 build_int_cst (size_type_node
, 1));
2553 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2554 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2555 replace_call_with_call_and_fold (gsi
, repl
);
2562 if (! tree_int_cst_lt (maxlen
, size
))
2566 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2567 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2568 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2572 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2573 replace_call_with_call_and_fold (gsi
, repl
);
2577 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2578 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2579 length passed as third argument. IGNORE is true if return value can be
2580 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2583 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2584 tree dest
, tree src
,
2585 tree len
, tree size
,
2586 enum built_in_function fcode
)
2588 gimple
*stmt
= gsi_stmt (*gsi
);
2589 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2592 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2594 /* If return value of __stpncpy_chk is ignored,
2595 optimize into __strncpy_chk. */
2596 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2599 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2600 replace_call_with_call_and_fold (gsi
, repl
);
2605 if (! tree_fits_uhwi_p (size
))
2608 tree maxlen
= get_maxval_strlen (len
, 2);
2609 if (! integer_all_onesp (size
))
2611 if (! tree_fits_uhwi_p (len
))
2613 /* If LEN is not constant, try MAXLEN too.
2614 For MAXLEN only allow optimizing into non-_ocs function
2615 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2616 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2622 if (tree_int_cst_lt (size
, maxlen
))
2626 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2627 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2628 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2632 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2633 replace_call_with_call_and_fold (gsi
, repl
);
2637 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2638 Return NULL_TREE if no simplification can be made. */
2641 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2643 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2644 location_t loc
= gimple_location (stmt
);
2645 tree dest
= gimple_call_arg (stmt
, 0);
2646 tree src
= gimple_call_arg (stmt
, 1);
2647 tree fn
, len
, lenp1
;
2649 /* If the result is unused, replace stpcpy with strcpy. */
2650 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2652 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2655 gimple_call_set_fndecl (stmt
, fn
);
2660 len
= c_strlen (src
, 1);
2662 || TREE_CODE (len
) != INTEGER_CST
)
2665 if (optimize_function_for_size_p (cfun
)
2666 /* If length is zero it's small enough. */
2667 && !integer_zerop (len
))
2670 /* If the source has a known length replace stpcpy with memcpy. */
2671 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2675 gimple_seq stmts
= NULL
;
2676 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2677 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2678 tem
, build_int_cst (size_type_node
, 1));
2679 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2680 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2681 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2682 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2683 if (gimple_vdef (repl
)
2684 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2685 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2686 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2687 /* Replace the result with dest + len. */
2689 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2690 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2691 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2692 POINTER_PLUS_EXPR
, dest
, tem
);
2693 gsi_replace (gsi
, ret
, false);
2694 /* Finally fold the memcpy call. */
2695 gimple_stmt_iterator gsi2
= *gsi
;
2701 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2702 NULL_TREE if a normal call should be emitted rather than expanding
2703 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2704 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2705 passed as second argument. */
2708 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2709 enum built_in_function fcode
)
2711 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2712 tree dest
, size
, len
, fn
, fmt
, flag
;
2713 const char *fmt_str
;
2715 /* Verify the required arguments in the original call. */
2716 if (gimple_call_num_args (stmt
) < 5)
2719 dest
= gimple_call_arg (stmt
, 0);
2720 len
= gimple_call_arg (stmt
, 1);
2721 flag
= gimple_call_arg (stmt
, 2);
2722 size
= gimple_call_arg (stmt
, 3);
2723 fmt
= gimple_call_arg (stmt
, 4);
2725 if (! tree_fits_uhwi_p (size
))
2728 if (! integer_all_onesp (size
))
2730 tree maxlen
= get_maxval_strlen (len
, 2);
2731 if (! tree_fits_uhwi_p (len
))
2733 /* If LEN is not constant, try MAXLEN too.
2734 For MAXLEN only allow optimizing into non-_ocs function
2735 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2736 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2742 if (tree_int_cst_lt (size
, maxlen
))
2746 if (!init_target_chars ())
2749 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2750 or if format doesn't contain % chars or is "%s". */
2751 if (! integer_zerop (flag
))
2753 fmt_str
= c_getstr (fmt
);
2754 if (fmt_str
== NULL
)
2756 if (strchr (fmt_str
, target_percent
) != NULL
2757 && strcmp (fmt_str
, target_percent_s
))
2761 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2763 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2764 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2768 /* Replace the called function and the first 5 argument by 3 retaining
2769 trailing varargs. */
2770 gimple_call_set_fndecl (stmt
, fn
);
2771 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2772 gimple_call_set_arg (stmt
, 0, dest
);
2773 gimple_call_set_arg (stmt
, 1, len
);
2774 gimple_call_set_arg (stmt
, 2, fmt
);
2775 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2776 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2777 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2782 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2783 Return NULL_TREE if a normal call should be emitted rather than
2784 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2785 or BUILT_IN_VSPRINTF_CHK. */
2788 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2789 enum built_in_function fcode
)
2791 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2792 tree dest
, size
, len
, fn
, fmt
, flag
;
2793 const char *fmt_str
;
2794 unsigned nargs
= gimple_call_num_args (stmt
);
2796 /* Verify the required arguments in the original call. */
2799 dest
= gimple_call_arg (stmt
, 0);
2800 flag
= gimple_call_arg (stmt
, 1);
2801 size
= gimple_call_arg (stmt
, 2);
2802 fmt
= gimple_call_arg (stmt
, 3);
2804 if (! tree_fits_uhwi_p (size
))
2809 if (!init_target_chars ())
2812 /* Check whether the format is a literal string constant. */
2813 fmt_str
= c_getstr (fmt
);
2814 if (fmt_str
!= NULL
)
2816 /* If the format doesn't contain % args or %%, we know the size. */
2817 if (strchr (fmt_str
, target_percent
) == 0)
2819 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2820 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2822 /* If the format is "%s" and first ... argument is a string literal,
2823 we know the size too. */
2824 else if (fcode
== BUILT_IN_SPRINTF_CHK
2825 && strcmp (fmt_str
, target_percent_s
) == 0)
2831 arg
= gimple_call_arg (stmt
, 4);
2832 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2834 len
= c_strlen (arg
, 1);
2835 if (! len
|| ! tree_fits_uhwi_p (len
))
2842 if (! integer_all_onesp (size
))
2844 if (! len
|| ! tree_int_cst_lt (len
, size
))
2848 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2849 or if format doesn't contain % chars or is "%s". */
2850 if (! integer_zerop (flag
))
2852 if (fmt_str
== NULL
)
2854 if (strchr (fmt_str
, target_percent
) != NULL
2855 && strcmp (fmt_str
, target_percent_s
))
2859 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2860 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2861 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2865 /* Replace the called function and the first 4 argument by 2 retaining
2866 trailing varargs. */
2867 gimple_call_set_fndecl (stmt
, fn
);
2868 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2869 gimple_call_set_arg (stmt
, 0, dest
);
2870 gimple_call_set_arg (stmt
, 1, fmt
);
2871 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2872 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2873 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2878 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2879 ORIG may be null if this is a 2-argument call. We don't attempt to
2880 simplify calls with more than 3 arguments.
2882 Return true if simplification was possible, otherwise false. */
2885 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2887 gimple
*stmt
= gsi_stmt (*gsi
);
2888 tree dest
= gimple_call_arg (stmt
, 0);
2889 tree fmt
= gimple_call_arg (stmt
, 1);
2890 tree orig
= NULL_TREE
;
2891 const char *fmt_str
= NULL
;
2893 /* Verify the required arguments in the original call. We deal with two
2894 types of sprintf() calls: 'sprintf (str, fmt)' and
2895 'sprintf (dest, "%s", orig)'. */
2896 if (gimple_call_num_args (stmt
) > 3)
2899 if (gimple_call_num_args (stmt
) == 3)
2900 orig
= gimple_call_arg (stmt
, 2);
2902 /* Check whether the format is a literal string constant. */
2903 fmt_str
= c_getstr (fmt
);
2904 if (fmt_str
== NULL
)
2907 if (!init_target_chars ())
2910 /* If the format doesn't contain % args or %%, use strcpy. */
2911 if (strchr (fmt_str
, target_percent
) == NULL
)
2913 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2918 /* Don't optimize sprintf (buf, "abc", ptr++). */
2922 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2923 'format' is known to contain no % formats. */
2924 gimple_seq stmts
= NULL
;
2925 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2926 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2927 if (gimple_call_lhs (stmt
))
2929 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2930 build_int_cst (integer_type_node
,
2932 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2933 gsi_replace_with_seq_vops (gsi
, stmts
);
2934 /* gsi now points at the assignment to the lhs, get a
2935 stmt iterator to the memcpy call.
2936 ??? We can't use gsi_for_stmt as that doesn't work when the
2937 CFG isn't built yet. */
2938 gimple_stmt_iterator gsi2
= *gsi
;
2944 gsi_replace_with_seq_vops (gsi
, stmts
);
2950 /* If the format is "%s", use strcpy if the result isn't used. */
2951 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2954 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2959 /* Don't crash on sprintf (str1, "%s"). */
2963 tree orig_len
= NULL_TREE
;
2964 if (gimple_call_lhs (stmt
))
2966 orig_len
= get_maxval_strlen (orig
, 0);
2971 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2972 gimple_seq stmts
= NULL
;
2973 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2974 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2975 if (gimple_call_lhs (stmt
))
2977 if (!useless_type_conversion_p (integer_type_node
,
2978 TREE_TYPE (orig_len
)))
2979 orig_len
= fold_convert (integer_type_node
, orig_len
);
2980 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2981 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2982 gsi_replace_with_seq_vops (gsi
, stmts
);
2983 /* gsi now points at the assignment to the lhs, get a
2984 stmt iterator to the memcpy call.
2985 ??? We can't use gsi_for_stmt as that doesn't work when the
2986 CFG isn't built yet. */
2987 gimple_stmt_iterator gsi2
= *gsi
;
2993 gsi_replace_with_seq_vops (gsi
, stmts
);
3001 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3002 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3003 attempt to simplify calls with more than 4 arguments.
3005 Return true if simplification was possible, otherwise false. */
3008 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3010 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3011 tree dest
= gimple_call_arg (stmt
, 0);
3012 tree destsize
= gimple_call_arg (stmt
, 1);
3013 tree fmt
= gimple_call_arg (stmt
, 2);
3014 tree orig
= NULL_TREE
;
3015 const char *fmt_str
= NULL
;
3017 if (gimple_call_num_args (stmt
) > 4)
3020 if (gimple_call_num_args (stmt
) == 4)
3021 orig
= gimple_call_arg (stmt
, 3);
3023 if (!tree_fits_uhwi_p (destsize
))
3025 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3027 /* Check whether the format is a literal string constant. */
3028 fmt_str
= c_getstr (fmt
);
3029 if (fmt_str
== NULL
)
3032 if (!init_target_chars ())
3035 /* If the format doesn't contain % args or %%, use strcpy. */
3036 if (strchr (fmt_str
, target_percent
) == NULL
)
3038 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3042 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3046 /* We could expand this as
3047 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3049 memcpy (str, fmt_with_nul_at_cstm1, cst);
3050 but in the former case that might increase code size
3051 and in the latter case grow .rodata section too much.
3053 size_t len
= strlen (fmt_str
);
3057 gimple_seq stmts
= NULL
;
3058 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3059 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3060 if (gimple_call_lhs (stmt
))
3062 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
3063 build_int_cst (integer_type_node
, len
));
3064 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3065 gsi_replace_with_seq_vops (gsi
, stmts
);
3066 /* gsi now points at the assignment to the lhs, get a
3067 stmt iterator to the memcpy call.
3068 ??? We can't use gsi_for_stmt as that doesn't work when the
3069 CFG isn't built yet. */
3070 gimple_stmt_iterator gsi2
= *gsi
;
3076 gsi_replace_with_seq_vops (gsi
, stmts
);
3082 /* If the format is "%s", use strcpy if the result isn't used. */
3083 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3085 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3089 /* Don't crash on snprintf (str1, cst, "%s"). */
3093 tree orig_len
= get_maxval_strlen (orig
, 0);
3094 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3097 /* We could expand this as
3098 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3100 memcpy (str1, str2_with_nul_at_cstm1, cst);
3101 but in the former case that might increase code size
3102 and in the latter case grow .rodata section too much.
3104 if (compare_tree_int (orig_len
, destlen
) >= 0)
3107 /* Convert snprintf (str1, cst, "%s", str2) into
3108 strcpy (str1, str2) if strlen (str2) < cst. */
3109 gimple_seq stmts
= NULL
;
3110 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3111 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3112 if (gimple_call_lhs (stmt
))
3114 if (!useless_type_conversion_p (integer_type_node
,
3115 TREE_TYPE (orig_len
)))
3116 orig_len
= fold_convert (integer_type_node
, orig_len
);
3117 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
3118 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3119 gsi_replace_with_seq_vops (gsi
, stmts
);
3120 /* gsi now points at the assignment to the lhs, get a
3121 stmt iterator to the memcpy call.
3122 ??? We can't use gsi_for_stmt as that doesn't work when the
3123 CFG isn't built yet. */
3124 gimple_stmt_iterator gsi2
= *gsi
;
3130 gsi_replace_with_seq_vops (gsi
, stmts
);
3138 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3139 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3140 more than 3 arguments, and ARG may be null in the 2-argument case.
3142 Return NULL_TREE if no simplification was possible, otherwise return the
3143 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3144 code of the function to be simplified. */
3147 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3148 tree fp
, tree fmt
, tree arg
,
3149 enum built_in_function fcode
)
3151 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3152 tree fn_fputc
, fn_fputs
;
3153 const char *fmt_str
= NULL
;
3155 /* If the return value is used, don't do the transformation. */
3156 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3159 /* Check whether the format is a literal string constant. */
3160 fmt_str
= c_getstr (fmt
);
3161 if (fmt_str
== NULL
)
3164 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3166 /* If we're using an unlocked function, assume the other
3167 unlocked functions exist explicitly. */
3168 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3169 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3173 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3174 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3177 if (!init_target_chars ())
3180 /* If the format doesn't contain % args or %%, use strcpy. */
3181 if (strchr (fmt_str
, target_percent
) == NULL
)
3183 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3187 /* If the format specifier was "", fprintf does nothing. */
3188 if (fmt_str
[0] == '\0')
3190 replace_call_with_value (gsi
, NULL_TREE
);
3194 /* When "string" doesn't contain %, replace all cases of
3195 fprintf (fp, string) with fputs (string, fp). The fputs
3196 builtin will take care of special cases like length == 1. */
3199 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3200 replace_call_with_call_and_fold (gsi
, repl
);
3205 /* The other optimizations can be done only on the non-va_list variants. */
3206 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3209 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3210 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3212 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3216 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3217 replace_call_with_call_and_fold (gsi
, repl
);
3222 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3223 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3226 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3230 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3231 replace_call_with_call_and_fold (gsi
, repl
);
3239 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3240 FMT and ARG are the arguments to the call; we don't fold cases with
3241 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3243 Return NULL_TREE if no simplification was possible, otherwise return the
3244 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3245 code of the function to be simplified. */
3248 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3249 tree arg
, enum built_in_function fcode
)
3251 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3252 tree fn_putchar
, fn_puts
, newarg
;
3253 const char *fmt_str
= NULL
;
3255 /* If the return value is used, don't do the transformation. */
3256 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3259 /* Check whether the format is a literal string constant. */
3260 fmt_str
= c_getstr (fmt
);
3261 if (fmt_str
== NULL
)
3264 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3266 /* If we're using an unlocked function, assume the other
3267 unlocked functions exist explicitly. */
3268 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3269 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3273 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3274 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3277 if (!init_target_chars ())
3280 if (strcmp (fmt_str
, target_percent_s
) == 0
3281 || strchr (fmt_str
, target_percent
) == NULL
)
3285 if (strcmp (fmt_str
, target_percent_s
) == 0)
3287 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3290 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3293 str
= c_getstr (arg
);
3299 /* The format specifier doesn't contain any '%' characters. */
3300 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3306 /* If the string was "", printf does nothing. */
3309 replace_call_with_value (gsi
, NULL_TREE
);
3313 /* If the string has length of 1, call putchar. */
3316 /* Given printf("c"), (where c is any one character,)
3317 convert "c"[0] to an int and pass that to the replacement
3319 newarg
= build_int_cst (integer_type_node
, str
[0]);
3322 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3323 replace_call_with_call_and_fold (gsi
, repl
);
3329 /* If the string was "string\n", call puts("string"). */
3330 size_t len
= strlen (str
);
3331 if ((unsigned char)str
[len
- 1] == target_newline
3332 && (size_t) (int) len
== len
3336 tree offset_node
, string_cst
;
3338 /* Create a NUL-terminated string that's one char shorter
3339 than the original, stripping off the trailing '\n'. */
3340 newarg
= build_string_literal (len
, str
);
3341 string_cst
= string_constant (newarg
, &offset_node
);
3342 gcc_checking_assert (string_cst
3343 && (TREE_STRING_LENGTH (string_cst
)
3345 && integer_zerop (offset_node
)
3347 TREE_STRING_POINTER (string_cst
)[len
- 1]
3349 /* build_string_literal creates a new STRING_CST,
3350 modify it in place to avoid double copying. */
3351 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
3352 newstr
[len
- 1] = '\0';
3355 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3356 replace_call_with_call_and_fold (gsi
, repl
);
3361 /* We'd like to arrange to call fputs(string,stdout) here,
3362 but we need stdout and don't have a way to get it yet. */
3367 /* The other optimizations can be done only on the non-va_list variants. */
3368 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3371 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3372 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3374 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3378 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3379 replace_call_with_call_and_fold (gsi
, repl
);
3384 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3385 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3387 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3392 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3393 replace_call_with_call_and_fold (gsi
, repl
);
3403 /* Fold a call to __builtin_strlen with known length LEN. */
3406 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3408 gimple
*stmt
= gsi_stmt (*gsi
);
3409 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
3412 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
3413 replace_call_with_value (gsi
, len
);
3417 /* Fold a call to __builtin_acc_on_device. */
3420 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3422 /* Defer folding until we know which compiler we're in. */
3423 if (symtab
->state
!= EXPANSION
)
3426 unsigned val_host
= GOMP_DEVICE_HOST
;
3427 unsigned val_dev
= GOMP_DEVICE_NONE
;
3429 #ifdef ACCEL_COMPILER
3430 val_host
= GOMP_DEVICE_NOT_HOST
;
3431 val_dev
= ACCEL_COMPILER_acc_device
;
3434 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3436 tree host_eq
= make_ssa_name (boolean_type_node
);
3437 gimple
*host_ass
= gimple_build_assign
3438 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3439 gimple_set_location (host_ass
, loc
);
3440 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3442 tree dev_eq
= make_ssa_name (boolean_type_node
);
3443 gimple
*dev_ass
= gimple_build_assign
3444 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3445 gimple_set_location (dev_ass
, loc
);
3446 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3448 tree result
= make_ssa_name (boolean_type_node
);
3449 gimple
*result_ass
= gimple_build_assign
3450 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3451 gimple_set_location (result_ass
, loc
);
3452 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3454 replace_call_with_value (gsi
, result
);
3459 /* Fold realloc (0, n) -> malloc (n). */
3462 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3464 gimple
*stmt
= gsi_stmt (*gsi
);
3465 tree arg
= gimple_call_arg (stmt
, 0);
3466 tree size
= gimple_call_arg (stmt
, 1);
3468 if (operand_equal_p (arg
, null_pointer_node
, 0))
3470 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3473 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3474 replace_call_with_call_and_fold (gsi
, repl
);
3481 /* Fold the non-target builtin at *GSI and return whether any simplification
3485 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
3487 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
3488 tree callee
= gimple_call_fndecl (stmt
);
3490 /* Give up for always_inline inline builtins until they are
3492 if (avoid_folding_inline_builtin (callee
))
3495 unsigned n
= gimple_call_num_args (stmt
);
3496 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
3500 return gimple_fold_builtin_bcmp (gsi
);
3501 case BUILT_IN_BCOPY
:
3502 return gimple_fold_builtin_bcopy (gsi
);
3503 case BUILT_IN_BZERO
:
3504 return gimple_fold_builtin_bzero (gsi
);
3506 case BUILT_IN_MEMSET
:
3507 return gimple_fold_builtin_memset (gsi
,
3508 gimple_call_arg (stmt
, 1),
3509 gimple_call_arg (stmt
, 2));
3510 case BUILT_IN_MEMCPY
:
3511 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3512 gimple_call_arg (stmt
, 1), 0);
3513 case BUILT_IN_MEMPCPY
:
3514 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3515 gimple_call_arg (stmt
, 1), 1);
3516 case BUILT_IN_MEMMOVE
:
3517 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3518 gimple_call_arg (stmt
, 1), 3);
3519 case BUILT_IN_SPRINTF_CHK
:
3520 case BUILT_IN_VSPRINTF_CHK
:
3521 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
3522 case BUILT_IN_STRCAT_CHK
:
3523 return gimple_fold_builtin_strcat_chk (gsi
);
3524 case BUILT_IN_STRNCAT_CHK
:
3525 return gimple_fold_builtin_strncat_chk (gsi
);
3526 case BUILT_IN_STRLEN
:
3527 return gimple_fold_builtin_strlen (gsi
);
3528 case BUILT_IN_STRCPY
:
3529 return gimple_fold_builtin_strcpy (gsi
,
3530 gimple_call_arg (stmt
, 0),
3531 gimple_call_arg (stmt
, 1));
3532 case BUILT_IN_STRNCPY
:
3533 return gimple_fold_builtin_strncpy (gsi
,
3534 gimple_call_arg (stmt
, 0),
3535 gimple_call_arg (stmt
, 1),
3536 gimple_call_arg (stmt
, 2));
3537 case BUILT_IN_STRCAT
:
3538 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
3539 gimple_call_arg (stmt
, 1));
3540 case BUILT_IN_STRNCAT
:
3541 return gimple_fold_builtin_strncat (gsi
);
3542 case BUILT_IN_INDEX
:
3543 case BUILT_IN_STRCHR
:
3544 return gimple_fold_builtin_strchr (gsi
, false);
3545 case BUILT_IN_RINDEX
:
3546 case BUILT_IN_STRRCHR
:
3547 return gimple_fold_builtin_strchr (gsi
, true);
3548 case BUILT_IN_STRSTR
:
3549 return gimple_fold_builtin_strstr (gsi
);
3550 case BUILT_IN_STRCMP
:
3551 case BUILT_IN_STRCASECMP
:
3552 case BUILT_IN_STRNCMP
:
3553 case BUILT_IN_STRNCASECMP
:
3554 return gimple_fold_builtin_string_compare (gsi
);
3555 case BUILT_IN_MEMCHR
:
3556 return gimple_fold_builtin_memchr (gsi
);
3557 case BUILT_IN_FPUTS
:
3558 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3559 gimple_call_arg (stmt
, 1), false);
3560 case BUILT_IN_FPUTS_UNLOCKED
:
3561 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3562 gimple_call_arg (stmt
, 1), true);
3563 case BUILT_IN_MEMCPY_CHK
:
3564 case BUILT_IN_MEMPCPY_CHK
:
3565 case BUILT_IN_MEMMOVE_CHK
:
3566 case BUILT_IN_MEMSET_CHK
:
3567 return gimple_fold_builtin_memory_chk (gsi
,
3568 gimple_call_arg (stmt
, 0),
3569 gimple_call_arg (stmt
, 1),
3570 gimple_call_arg (stmt
, 2),
3571 gimple_call_arg (stmt
, 3),
3573 case BUILT_IN_STPCPY
:
3574 return gimple_fold_builtin_stpcpy (gsi
);
3575 case BUILT_IN_STRCPY_CHK
:
3576 case BUILT_IN_STPCPY_CHK
:
3577 return gimple_fold_builtin_stxcpy_chk (gsi
,
3578 gimple_call_arg (stmt
, 0),
3579 gimple_call_arg (stmt
, 1),
3580 gimple_call_arg (stmt
, 2),
3582 case BUILT_IN_STRNCPY_CHK
:
3583 case BUILT_IN_STPNCPY_CHK
:
3584 return gimple_fold_builtin_stxncpy_chk (gsi
,
3585 gimple_call_arg (stmt
, 0),
3586 gimple_call_arg (stmt
, 1),
3587 gimple_call_arg (stmt
, 2),
3588 gimple_call_arg (stmt
, 3),
3590 case BUILT_IN_SNPRINTF_CHK
:
3591 case BUILT_IN_VSNPRINTF_CHK
:
3592 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
3594 case BUILT_IN_FPRINTF
:
3595 case BUILT_IN_FPRINTF_UNLOCKED
:
3596 case BUILT_IN_VFPRINTF
:
3597 if (n
== 2 || n
== 3)
3598 return gimple_fold_builtin_fprintf (gsi
,
3599 gimple_call_arg (stmt
, 0),
3600 gimple_call_arg (stmt
, 1),
3602 ? gimple_call_arg (stmt
, 2)
3606 case BUILT_IN_FPRINTF_CHK
:
3607 case BUILT_IN_VFPRINTF_CHK
:
3608 if (n
== 3 || n
== 4)
3609 return gimple_fold_builtin_fprintf (gsi
,
3610 gimple_call_arg (stmt
, 0),
3611 gimple_call_arg (stmt
, 2),
3613 ? gimple_call_arg (stmt
, 3)
3617 case BUILT_IN_PRINTF
:
3618 case BUILT_IN_PRINTF_UNLOCKED
:
3619 case BUILT_IN_VPRINTF
:
3620 if (n
== 1 || n
== 2)
3621 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
3623 ? gimple_call_arg (stmt
, 1)
3624 : NULL_TREE
, fcode
);
3626 case BUILT_IN_PRINTF_CHK
:
3627 case BUILT_IN_VPRINTF_CHK
:
3628 if (n
== 2 || n
== 3)
3629 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3631 ? gimple_call_arg (stmt
, 2)
3632 : NULL_TREE
, fcode
);
3634 case BUILT_IN_ACC_ON_DEVICE
:
3635 return gimple_fold_builtin_acc_on_device (gsi
,
3636 gimple_call_arg (stmt
, 0));
3637 case BUILT_IN_REALLOC
:
3638 return gimple_fold_builtin_realloc (gsi
);
3643 /* Try the generic builtin folder. */
3644 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3645 tree result
= fold_call_stmt (stmt
, ignore
);
3649 STRIP_NOPS (result
);
3651 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3652 if (!update_call_from_tree (gsi
, result
))
3653 gimplify_and_update_call_from_tree (gsi
, result
);
3660 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3661 function calls to constants, where possible. */
3664 fold_internal_goacc_dim (const gimple
*call
)
3666 int axis
= oacc_get_ifn_dim_arg (call
);
3667 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
3668 bool is_pos
= gimple_call_internal_fn (call
) == IFN_GOACC_DIM_POS
;
3669 tree result
= NULL_TREE
;
3671 /* If the size is 1, or we only want the size and it is not dynamic,
3672 we know the answer. */
3673 if (size
== 1 || (!is_pos
&& size
))
3675 tree type
= TREE_TYPE (gimple_call_lhs (call
));
3676 result
= build_int_cst (type
, size
- is_pos
);
3682 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3683 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3684 &var where var is only addressable because of such calls. */
3687 optimize_atomic_compare_exchange_p (gimple
*stmt
)
3689 if (gimple_call_num_args (stmt
) != 6
3690 || !flag_inline_atomics
3692 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
3693 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3694 || !gimple_vdef (stmt
)
3695 || !gimple_vuse (stmt
))
3698 tree fndecl
= gimple_call_fndecl (stmt
);
3699 switch (DECL_FUNCTION_CODE (fndecl
))
3701 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
3702 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
3703 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
3704 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
3705 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
3711 tree expected
= gimple_call_arg (stmt
, 1);
3712 if (TREE_CODE (expected
) != ADDR_EXPR
3713 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
3716 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
3717 if (!is_gimple_reg_type (etype
)
3718 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
3719 || TREE_THIS_VOLATILE (etype
)
3720 || VECTOR_TYPE_P (etype
)
3721 || TREE_CODE (etype
) == COMPLEX_TYPE
3722 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3723 might not preserve all the bits. See PR71716. */
3724 || SCALAR_FLOAT_TYPE_P (etype
)
3725 || TYPE_PRECISION (etype
) != GET_MODE_BITSIZE (TYPE_MODE (etype
)))
3728 tree weak
= gimple_call_arg (stmt
, 3);
3729 if (!integer_zerop (weak
) && !integer_onep (weak
))
3732 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3733 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3734 machine_mode mode
= TYPE_MODE (itype
);
3736 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
3738 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
3741 if (int_size_in_bytes (etype
) != GET_MODE_SIZE (mode
))
3748 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3750 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3751 i = IMAGPART_EXPR <t>;
3753 e = REALPART_EXPR <t>; */
3756 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
3758 gimple
*stmt
= gsi_stmt (*gsi
);
3759 tree fndecl
= gimple_call_fndecl (stmt
);
3760 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3761 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3762 tree ctype
= build_complex_type (itype
);
3763 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3764 bool throws
= false;
3766 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3768 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3769 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
3770 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
3772 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
3773 build1 (VIEW_CONVERT_EXPR
, itype
,
3774 gimple_assign_lhs (g
)));
3775 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3777 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
3778 + int_size_in_bytes (itype
);
3779 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
3780 gimple_call_arg (stmt
, 0),
3781 gimple_assign_lhs (g
),
3782 gimple_call_arg (stmt
, 2),
3783 build_int_cst (integer_type_node
, flag
),
3784 gimple_call_arg (stmt
, 4),
3785 gimple_call_arg (stmt
, 5));
3786 tree lhs
= make_ssa_name (ctype
);
3787 gimple_call_set_lhs (g
, lhs
);
3788 gimple_set_vdef (g
, gimple_vdef (stmt
));
3789 gimple_set_vuse (g
, gimple_vuse (stmt
));
3790 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
3791 tree oldlhs
= gimple_call_lhs (stmt
);
3792 if (stmt_can_throw_internal (stmt
))
3795 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
3797 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
3798 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
3799 gimple_call_set_lhs (stmt
, NULL_TREE
);
3800 gsi_replace (gsi
, g
, true);
3803 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
3804 build1 (IMAGPART_EXPR
, itype
, lhs
));
3807 gsi_insert_on_edge_immediate (e
, g
);
3808 *gsi
= gsi_for_stmt (g
);
3811 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3812 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
3813 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3815 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
3816 build1 (REALPART_EXPR
, itype
, lhs
));
3817 if (throws
&& oldlhs
== NULL_TREE
)
3819 gsi_insert_on_edge_immediate (e
, g
);
3820 *gsi
= gsi_for_stmt (g
);
3823 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3824 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
3826 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3828 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
3829 gimple_assign_lhs (g
)));
3830 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3832 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
3833 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3837 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3838 doesn't fit into TYPE. The test for overflow should be regardless of
3839 -fwrapv, and even for unsigned types. */
3842 arith_overflowed_p (enum tree_code code
, const_tree type
,
3843 const_tree arg0
, const_tree arg1
)
3845 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3846 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3848 widest2_int warg0
= widest2_int_cst (arg0
);
3849 widest2_int warg1
= widest2_int_cst (arg1
);
3853 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3854 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3855 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3856 default: gcc_unreachable ();
3858 signop sign
= TYPE_SIGN (type
);
3859 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3861 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3864 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3865 The statement may be replaced by another statement, e.g., if the call
3866 simplifies to a constant value. Return true if any changes were made.
3867 It is assumed that the operands have been previously folded. */
3870 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3872 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3874 bool changed
= false;
3877 /* Fold *& in call arguments. */
3878 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3879 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3881 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3884 gimple_call_set_arg (stmt
, i
, tmp
);
3889 /* Check for virtual calls that became direct calls. */
3890 callee
= gimple_call_fn (stmt
);
3891 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3893 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3895 if (dump_file
&& virtual_method_call_p (callee
)
3896 && !possible_polymorphic_call_target_p
3897 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3898 (OBJ_TYPE_REF_EXPR (callee
)))))
3901 "Type inheritance inconsistent devirtualization of ");
3902 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3903 fprintf (dump_file
, " to ");
3904 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3905 fprintf (dump_file
, "\n");
3908 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3911 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3914 vec
<cgraph_node
*>targets
3915 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3916 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3918 tree lhs
= gimple_call_lhs (stmt
);
3919 if (dump_enabled_p ())
3921 location_t loc
= gimple_location_safe (stmt
);
3922 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3923 "folding virtual function call to %s\n",
3924 targets
.length () == 1
3925 ? targets
[0]->name ()
3926 : "__builtin_unreachable");
3928 if (targets
.length () == 1)
3930 tree fndecl
= targets
[0]->decl
;
3931 gimple_call_set_fndecl (stmt
, fndecl
);
3933 /* If changing the call to __cxa_pure_virtual
3934 or similar noreturn function, adjust gimple_call_fntype
3936 if (gimple_call_noreturn_p (stmt
)
3937 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
3938 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
3939 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
3941 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
3942 /* If the call becomes noreturn, remove the lhs. */
3944 && gimple_call_noreturn_p (stmt
)
3945 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
3946 || should_remove_lhs_p (lhs
)))
3948 if (TREE_CODE (lhs
) == SSA_NAME
)
3950 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3951 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3952 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3953 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3955 gimple_call_set_lhs (stmt
, NULL_TREE
);
3957 maybe_remove_unused_call_args (cfun
, stmt
);
3961 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3962 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3963 gimple_set_location (new_stmt
, gimple_location (stmt
));
3964 /* If the call had a SSA name as lhs morph that into
3965 an uninitialized value. */
3966 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3968 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3969 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
3970 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
3971 set_ssa_default_def (cfun
, var
, lhs
);
3973 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3974 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3975 gsi_replace (gsi
, new_stmt
, false);
3982 /* Check for indirect calls that became direct calls, and then
3983 no longer require a static chain. */
3984 if (gimple_call_chain (stmt
))
3986 tree fn
= gimple_call_fndecl (stmt
);
3987 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3989 gimple_call_set_chain (stmt
, NULL
);
3994 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3997 gimple_call_set_chain (stmt
, tmp
);
4006 /* Check for builtins that CCP can handle using information not
4007 available in the generic fold routines. */
4008 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4010 if (gimple_fold_builtin (gsi
))
4013 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
4015 changed
|= targetm
.gimple_fold_builtin (gsi
);
4017 else if (gimple_call_internal_p (stmt
))
4019 enum tree_code subcode
= ERROR_MARK
;
4020 tree result
= NULL_TREE
;
4021 bool cplx_result
= false;
4022 tree overflow
= NULL_TREE
;
4023 switch (gimple_call_internal_fn (stmt
))
4025 case IFN_BUILTIN_EXPECT
:
4026 result
= fold_builtin_expect (gimple_location (stmt
),
4027 gimple_call_arg (stmt
, 0),
4028 gimple_call_arg (stmt
, 1),
4029 gimple_call_arg (stmt
, 2));
4031 case IFN_UBSAN_OBJECT_SIZE
:
4033 tree offset
= gimple_call_arg (stmt
, 1);
4034 tree objsize
= gimple_call_arg (stmt
, 2);
4035 if (integer_all_onesp (objsize
)
4036 || (TREE_CODE (offset
) == INTEGER_CST
4037 && TREE_CODE (objsize
) == INTEGER_CST
4038 && tree_int_cst_le (offset
, objsize
)))
4040 replace_call_with_value (gsi
, NULL_TREE
);
4046 if (integer_zerop (gimple_call_arg (stmt
, 1)))
4048 replace_call_with_value (gsi
, NULL_TREE
);
4052 case IFN_UBSAN_BOUNDS
:
4054 tree index
= gimple_call_arg (stmt
, 1);
4055 tree bound
= gimple_call_arg (stmt
, 2);
4056 if (TREE_CODE (index
) == INTEGER_CST
4057 && TREE_CODE (bound
) == INTEGER_CST
)
4059 index
= fold_convert (TREE_TYPE (bound
), index
);
4060 if (TREE_CODE (index
) == INTEGER_CST
4061 && tree_int_cst_le (index
, bound
))
4063 replace_call_with_value (gsi
, NULL_TREE
);
4069 case IFN_GOACC_DIM_SIZE
:
4070 case IFN_GOACC_DIM_POS
:
4071 result
= fold_internal_goacc_dim (stmt
);
4073 case IFN_UBSAN_CHECK_ADD
:
4074 subcode
= PLUS_EXPR
;
4076 case IFN_UBSAN_CHECK_SUB
:
4077 subcode
= MINUS_EXPR
;
4079 case IFN_UBSAN_CHECK_MUL
:
4080 subcode
= MULT_EXPR
;
4082 case IFN_ADD_OVERFLOW
:
4083 subcode
= PLUS_EXPR
;
4086 case IFN_SUB_OVERFLOW
:
4087 subcode
= MINUS_EXPR
;
4090 case IFN_MUL_OVERFLOW
:
4091 subcode
= MULT_EXPR
;
4097 if (subcode
!= ERROR_MARK
)
4099 tree arg0
= gimple_call_arg (stmt
, 0);
4100 tree arg1
= gimple_call_arg (stmt
, 1);
4101 tree type
= TREE_TYPE (arg0
);
4104 tree lhs
= gimple_call_lhs (stmt
);
4105 if (lhs
== NULL_TREE
)
4108 type
= TREE_TYPE (TREE_TYPE (lhs
));
4110 if (type
== NULL_TREE
)
4112 /* x = y + 0; x = y - 0; x = y * 0; */
4113 else if (integer_zerop (arg1
))
4114 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
4115 /* x = 0 + y; x = 0 * y; */
4116 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
4117 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
4119 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
4120 result
= integer_zero_node
;
4121 /* x = y * 1; x = 1 * y; */
4122 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
4124 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
4126 else if (TREE_CODE (arg0
) == INTEGER_CST
4127 && TREE_CODE (arg1
) == INTEGER_CST
)
4130 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
4131 fold_convert (type
, arg1
));
4133 result
= int_const_binop (subcode
, arg0
, arg1
);
4134 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
4137 overflow
= build_one_cst (type
);
4144 if (result
== integer_zero_node
)
4145 result
= build_zero_cst (type
);
4146 else if (cplx_result
&& TREE_TYPE (result
) != type
)
4148 if (TREE_CODE (result
) == INTEGER_CST
)
4150 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
4152 overflow
= build_one_cst (type
);
4154 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
4155 && TYPE_UNSIGNED (type
))
4156 || (TYPE_PRECISION (type
)
4157 < (TYPE_PRECISION (TREE_TYPE (result
))
4158 + (TYPE_UNSIGNED (TREE_TYPE (result
))
4159 && !TYPE_UNSIGNED (type
)))))
4162 result
= fold_convert (type
, result
);
4169 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
4170 result
= drop_tree_overflow (result
);
4173 if (overflow
== NULL_TREE
)
4174 overflow
= build_zero_cst (TREE_TYPE (result
));
4175 tree ctype
= build_complex_type (TREE_TYPE (result
));
4176 if (TREE_CODE (result
) == INTEGER_CST
4177 && TREE_CODE (overflow
) == INTEGER_CST
)
4178 result
= build_complex (ctype
, result
, overflow
);
4180 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
4181 ctype
, result
, overflow
);
4183 if (!update_call_from_tree (gsi
, result
))
4184 gimplify_and_update_call_from_tree (gsi
, result
);
4193 /* Return true whether NAME has a use on STMT. */
4196 has_use_on_stmt (tree name
, gimple
*stmt
)
4198 imm_use_iterator iter
;
4199 use_operand_p use_p
;
4200 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
4201 if (USE_STMT (use_p
) == stmt
)
4206 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4209 Replaces *GSI with the simplification result in RCODE and OPS
4210 and the associated statements in *SEQ. Does the replacement
4211 according to INPLACE and returns true if the operation succeeded. */
4214 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
4215 code_helper rcode
, tree
*ops
,
4216 gimple_seq
*seq
, bool inplace
)
4218 gimple
*stmt
= gsi_stmt (*gsi
);
4220 /* Play safe and do not allow abnormals to be mentioned in
4221 newly created statements. See also maybe_push_res_to_seq.
4222 As an exception allow such uses if there was a use of the
4223 same SSA name on the old stmt. */
4224 if ((TREE_CODE (ops
[0]) == SSA_NAME
4225 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
4226 && !has_use_on_stmt (ops
[0], stmt
))
4228 && TREE_CODE (ops
[1]) == SSA_NAME
4229 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
4230 && !has_use_on_stmt (ops
[1], stmt
))
4232 && TREE_CODE (ops
[2]) == SSA_NAME
4233 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
4234 && !has_use_on_stmt (ops
[2], stmt
))
4235 || (COMPARISON_CLASS_P (ops
[0])
4236 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
4237 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 0))
4238 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 0), stmt
))
4239 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
4240 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 1))
4241 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 1), stmt
)))))
4244 /* Don't insert new statements when INPLACE is true, even if we could
4245 reuse STMT for the final statement. */
4246 if (inplace
&& !gimple_seq_empty_p (*seq
))
4249 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
4251 gcc_assert (rcode
.is_tree_code ());
4252 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
4253 /* GIMPLE_CONDs condition may not throw. */
4254 && (!flag_exceptions
4255 || !cfun
->can_throw_non_call_exceptions
4256 || !operation_could_trap_p (rcode
,
4257 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
4259 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
4260 else if (rcode
== SSA_NAME
)
4261 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
4262 build_zero_cst (TREE_TYPE (ops
[0])));
4263 else if (rcode
== INTEGER_CST
)
4265 if (integer_zerop (ops
[0]))
4266 gimple_cond_make_false (cond_stmt
);
4268 gimple_cond_make_true (cond_stmt
);
4272 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
4276 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
4277 build_zero_cst (TREE_TYPE (res
)));
4281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4283 fprintf (dump_file
, "gimple_simplified to ");
4284 if (!gimple_seq_empty_p (*seq
))
4285 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4286 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4289 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4292 else if (is_gimple_assign (stmt
)
4293 && rcode
.is_tree_code ())
4296 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
4298 maybe_build_generic_op (rcode
,
4299 TREE_TYPE (gimple_assign_lhs (stmt
)), ops
);
4300 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
4301 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4303 fprintf (dump_file
, "gimple_simplified to ");
4304 if (!gimple_seq_empty_p (*seq
))
4305 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4306 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4309 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4313 else if (rcode
.is_fn_code ()
4314 && gimple_call_combined_fn (stmt
) == rcode
)
4317 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4319 gcc_assert (ops
[i
] != NULL_TREE
);
4320 gimple_call_set_arg (stmt
, i
, ops
[i
]);
4323 gcc_assert (ops
[i
] == NULL_TREE
);
4324 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4326 fprintf (dump_file
, "gimple_simplified to ");
4327 if (!gimple_seq_empty_p (*seq
))
4328 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4329 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
4331 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4336 if (gimple_has_lhs (stmt
))
4338 tree lhs
= gimple_get_lhs (stmt
);
4339 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
4342 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4344 fprintf (dump_file
, "gimple_simplified to ");
4345 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4347 gsi_replace_with_seq_vops (gsi
, *seq
);
4357 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4360 maybe_canonicalize_mem_ref_addr (tree
*t
)
4364 if (TREE_CODE (*t
) == ADDR_EXPR
)
4365 t
= &TREE_OPERAND (*t
, 0);
4367 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4368 generic vector extension. The actual vector referenced is
4369 view-converted to an array type for this purpose. If the index
4370 is constant the canonical representation in the middle-end is a
4371 BIT_FIELD_REF so re-write the former to the latter here. */
4372 if (TREE_CODE (*t
) == ARRAY_REF
4373 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
4374 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
4375 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
4377 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
4378 if (VECTOR_TYPE_P (vtype
))
4380 tree low
= array_ref_low_bound (*t
);
4381 if (TREE_CODE (low
) == INTEGER_CST
)
4383 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
4385 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
4386 wi::to_widest (low
));
4387 idx
= wi::mul (idx
, wi::to_widest
4388 (TYPE_SIZE (TREE_TYPE (*t
))));
4390 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
4391 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
4393 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
4395 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
4396 TYPE_SIZE (TREE_TYPE (*t
)),
4397 wide_int_to_tree (bitsizetype
, idx
));
4405 while (handled_component_p (*t
))
4406 t
= &TREE_OPERAND (*t
, 0);
4408 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4409 of invariant addresses into a SSA name MEM_REF address. */
4410 if (TREE_CODE (*t
) == MEM_REF
4411 || TREE_CODE (*t
) == TARGET_MEM_REF
)
4413 tree addr
= TREE_OPERAND (*t
, 0);
4414 if (TREE_CODE (addr
) == ADDR_EXPR
4415 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
4416 || handled_component_p (TREE_OPERAND (addr
, 0))))
4419 HOST_WIDE_INT coffset
;
4420 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
4425 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
4426 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
4427 TREE_OPERAND (*t
, 1),
4428 size_int (coffset
));
4431 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
4432 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
4435 /* Canonicalize back MEM_REFs to plain reference trees if the object
4436 accessed is a decl that has the same access semantics as the MEM_REF. */
4437 if (TREE_CODE (*t
) == MEM_REF
4438 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
4439 && integer_zerop (TREE_OPERAND (*t
, 1))
4440 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
4442 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4443 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
4444 if (/* Same volatile qualification. */
4445 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
4446 /* Same TBAA behavior with -fstrict-aliasing. */
4447 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
4448 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
4449 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
4450 /* Same alignment. */
4451 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
4452 /* We have to look out here to not drop a required conversion
4453 from the rhs to the lhs if *t appears on the lhs or vice-versa
4454 if it appears on the rhs. Thus require strict type
4456 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
4458 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4463 /* Canonicalize TARGET_MEM_REF in particular with respect to
4464 the indexes becoming constant. */
4465 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
4467 tree tem
= maybe_fold_tmr (*t
);
4478 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4479 distinguishes both cases. */
4482 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
4484 bool changed
= false;
4485 gimple
*stmt
= gsi_stmt (*gsi
);
4486 bool nowarning
= gimple_no_warning_p (stmt
);
4488 fold_defer_overflow_warnings ();
4490 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4492 ??? This shouldn't be done in generic folding but in the
4493 propagation helpers which also know whether an address was
4495 Also canonicalize operand order. */
4496 switch (gimple_code (stmt
))
4499 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
4501 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
4502 if ((REFERENCE_CLASS_P (*rhs
)
4503 || TREE_CODE (*rhs
) == ADDR_EXPR
)
4504 && maybe_canonicalize_mem_ref_addr (rhs
))
4506 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
4507 if (REFERENCE_CLASS_P (*lhs
)
4508 && maybe_canonicalize_mem_ref_addr (lhs
))
4513 /* Canonicalize operand order. */
4514 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4515 if (TREE_CODE_CLASS (code
) == tcc_comparison
4516 || commutative_tree_code (code
)
4517 || commutative_ternary_tree_code (code
))
4519 tree rhs1
= gimple_assign_rhs1 (stmt
);
4520 tree rhs2
= gimple_assign_rhs2 (stmt
);
4521 if (tree_swap_operands_p (rhs1
, rhs2
))
4523 gimple_assign_set_rhs1 (stmt
, rhs2
);
4524 gimple_assign_set_rhs2 (stmt
, rhs1
);
4525 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4526 gimple_assign_set_rhs_code (stmt
,
4527 swap_tree_comparison (code
));
4535 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4537 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
4538 if (REFERENCE_CLASS_P (*arg
)
4539 && maybe_canonicalize_mem_ref_addr (arg
))
4542 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
4544 && REFERENCE_CLASS_P (*lhs
)
4545 && maybe_canonicalize_mem_ref_addr (lhs
))
4551 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4552 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4554 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4555 tree op
= TREE_VALUE (link
);
4556 if (REFERENCE_CLASS_P (op
)
4557 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4560 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4562 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4563 tree op
= TREE_VALUE (link
);
4564 if ((REFERENCE_CLASS_P (op
)
4565 || TREE_CODE (op
) == ADDR_EXPR
)
4566 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4572 if (gimple_debug_bind_p (stmt
))
4574 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
4576 && (REFERENCE_CLASS_P (*val
)
4577 || TREE_CODE (*val
) == ADDR_EXPR
)
4578 && maybe_canonicalize_mem_ref_addr (val
))
4584 /* Canonicalize operand order. */
4585 tree lhs
= gimple_cond_lhs (stmt
);
4586 tree rhs
= gimple_cond_rhs (stmt
);
4587 if (tree_swap_operands_p (lhs
, rhs
))
4589 gcond
*gc
= as_a
<gcond
*> (stmt
);
4590 gimple_cond_set_lhs (gc
, rhs
);
4591 gimple_cond_set_rhs (gc
, lhs
);
4592 gimple_cond_set_code (gc
,
4593 swap_tree_comparison (gimple_cond_code (gc
)));
4600 /* Dispatch to pattern-based folding. */
4602 || is_gimple_assign (stmt
)
4603 || gimple_code (stmt
) == GIMPLE_COND
)
4605 gimple_seq seq
= NULL
;
4608 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
4609 valueize
, valueize
))
4611 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
4614 gimple_seq_discard (seq
);
4618 stmt
= gsi_stmt (*gsi
);
4620 /* Fold the main computation performed by the statement. */
4621 switch (gimple_code (stmt
))
4625 /* Try to canonicalize for boolean-typed X the comparisons
4626 X == 0, X == 1, X != 0, and X != 1. */
4627 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
4628 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
4630 tree lhs
= gimple_assign_lhs (stmt
);
4631 tree op1
= gimple_assign_rhs1 (stmt
);
4632 tree op2
= gimple_assign_rhs2 (stmt
);
4633 tree type
= TREE_TYPE (op1
);
4635 /* Check whether the comparison operands are of the same boolean
4636 type as the result type is.
4637 Check that second operand is an integer-constant with value
4639 if (TREE_CODE (op2
) == INTEGER_CST
4640 && (integer_zerop (op2
) || integer_onep (op2
))
4641 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
4643 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
4644 bool is_logical_not
= false;
4646 /* X == 0 and X != 1 is a logical-not.of X
4647 X == 1 and X != 0 is X */
4648 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
4649 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
4650 is_logical_not
= true;
4652 if (is_logical_not
== false)
4653 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
4654 /* Only for one-bit precision typed X the transformation
4655 !X -> ~X is valied. */
4656 else if (TYPE_PRECISION (type
) == 1)
4657 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
4658 /* Otherwise we use !X -> X ^ 1. */
4660 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
4661 build_int_cst (type
, 1));
4667 unsigned old_num_ops
= gimple_num_ops (stmt
);
4668 tree lhs
= gimple_assign_lhs (stmt
);
4669 tree new_rhs
= fold_gimple_assign (gsi
);
4671 && !useless_type_conversion_p (TREE_TYPE (lhs
),
4672 TREE_TYPE (new_rhs
)))
4673 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
4676 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
4678 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
4685 changed
|= gimple_fold_call (gsi
, inplace
);
4689 /* Fold *& in asm operands. */
4691 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4693 const char **oconstraints
;
4694 const char *constraint
;
4695 bool allows_mem
, allows_reg
;
4697 noutputs
= gimple_asm_noutputs (asm_stmt
);
4698 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4700 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4702 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4703 tree op
= TREE_VALUE (link
);
4705 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4706 if (REFERENCE_CLASS_P (op
)
4707 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
4709 TREE_VALUE (link
) = op
;
4713 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4715 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4716 tree op
= TREE_VALUE (link
);
4718 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4719 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
4720 oconstraints
, &allows_mem
, &allows_reg
);
4721 if (REFERENCE_CLASS_P (op
)
4722 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
4725 TREE_VALUE (link
) = op
;
4733 if (gimple_debug_bind_p (stmt
))
4735 tree val
= gimple_debug_bind_get_value (stmt
);
4737 && REFERENCE_CLASS_P (val
))
4739 tree tem
= maybe_fold_reference (val
, false);
4742 gimple_debug_bind_set_value (stmt
, tem
);
4747 && TREE_CODE (val
) == ADDR_EXPR
)
4749 tree ref
= TREE_OPERAND (val
, 0);
4750 tree tem
= maybe_fold_reference (ref
, false);
4753 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
4754 gimple_debug_bind_set_value (stmt
, tem
);
4763 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
4764 tree ret
= gimple_return_retval(ret_stmt
);
4766 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
4768 tree val
= valueize (ret
);
4769 if (val
&& val
!= ret
4770 && may_propagate_copy (ret
, val
))
4772 gimple_return_set_retval (ret_stmt
, val
);
4782 stmt
= gsi_stmt (*gsi
);
4784 /* Fold *& on the lhs. */
4785 if (gimple_has_lhs (stmt
))
4787 tree lhs
= gimple_get_lhs (stmt
);
4788 if (lhs
&& REFERENCE_CLASS_P (lhs
))
4790 tree new_lhs
= maybe_fold_reference (lhs
, true);
4793 gimple_set_lhs (stmt
, new_lhs
);
4799 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
4803 /* Valueziation callback that ends up not following SSA edges. */
4806 no_follow_ssa_edges (tree
)
4811 /* Valueization callback that ends up following single-use SSA edges only. */
4814 follow_single_use_edges (tree val
)
4816 if (TREE_CODE (val
) == SSA_NAME
4817 && !has_single_use (val
))
4822 /* Fold the statement pointed to by GSI. In some cases, this function may
4823 replace the whole statement with a new one. Returns true iff folding
4825 The statement pointed to by GSI should be in valid gimple form but may
4826 be in unfolded state as resulting from for example constant propagation
4827 which can produce *&x = 0. */
4830 fold_stmt (gimple_stmt_iterator
*gsi
)
4832 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
4836 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
4838 return fold_stmt_1 (gsi
, false, valueize
);
4841 /* Perform the minimal folding on statement *GSI. Only operations like
4842 *&x created by constant propagation are handled. The statement cannot
4843 be replaced with a new one. Return true if the statement was
4844 changed, false otherwise.
4845 The statement *GSI should be in valid gimple form but may
4846 be in unfolded state as resulting from for example constant propagation
4847 which can produce *&x = 0. */
4850 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
4852 gimple
*stmt
= gsi_stmt (*gsi
);
4853 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
4854 gcc_assert (gsi_stmt (*gsi
) == stmt
);
4858 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4859 if EXPR is null or we don't know how.
4860 If non-null, the result always has boolean type. */
4863 canonicalize_bool (tree expr
, bool invert
)
4869 if (integer_nonzerop (expr
))
4870 return boolean_false_node
;
4871 else if (integer_zerop (expr
))
4872 return boolean_true_node
;
4873 else if (TREE_CODE (expr
) == SSA_NAME
)
4874 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
4875 build_int_cst (TREE_TYPE (expr
), 0));
4876 else if (COMPARISON_CLASS_P (expr
))
4877 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
4879 TREE_OPERAND (expr
, 0),
4880 TREE_OPERAND (expr
, 1));
4886 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4888 if (integer_nonzerop (expr
))
4889 return boolean_true_node
;
4890 else if (integer_zerop (expr
))
4891 return boolean_false_node
;
4892 else if (TREE_CODE (expr
) == SSA_NAME
)
4893 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
4894 build_int_cst (TREE_TYPE (expr
), 0));
4895 else if (COMPARISON_CLASS_P (expr
))
4896 return fold_build2 (TREE_CODE (expr
),
4898 TREE_OPERAND (expr
, 0),
4899 TREE_OPERAND (expr
, 1));
4905 /* Check to see if a boolean expression EXPR is logically equivalent to the
4906 comparison (OP1 CODE OP2). Check for various identities involving
4910 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
4911 const_tree op1
, const_tree op2
)
4915 /* The obvious case. */
4916 if (TREE_CODE (expr
) == code
4917 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
4918 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
4921 /* Check for comparing (name, name != 0) and the case where expr
4922 is an SSA_NAME with a definition matching the comparison. */
4923 if (TREE_CODE (expr
) == SSA_NAME
4924 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4926 if (operand_equal_p (expr
, op1
, 0))
4927 return ((code
== NE_EXPR
&& integer_zerop (op2
))
4928 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
4929 s
= SSA_NAME_DEF_STMT (expr
);
4930 if (is_gimple_assign (s
)
4931 && gimple_assign_rhs_code (s
) == code
4932 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
4933 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
4937 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4938 of name is a comparison, recurse. */
4939 if (TREE_CODE (op1
) == SSA_NAME
4940 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
4942 s
= SSA_NAME_DEF_STMT (op1
);
4943 if (is_gimple_assign (s
)
4944 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
4946 enum tree_code c
= gimple_assign_rhs_code (s
);
4947 if ((c
== NE_EXPR
&& integer_zerop (op2
))
4948 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
4949 return same_bool_comparison_p (expr
, c
,
4950 gimple_assign_rhs1 (s
),
4951 gimple_assign_rhs2 (s
));
4952 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
4953 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
4954 return same_bool_comparison_p (expr
,
4955 invert_tree_comparison (c
, false),
4956 gimple_assign_rhs1 (s
),
4957 gimple_assign_rhs2 (s
));
4963 /* Check to see if two boolean expressions OP1 and OP2 are logically
4967 same_bool_result_p (const_tree op1
, const_tree op2
)
4969 /* Simple cases first. */
4970 if (operand_equal_p (op1
, op2
, 0))
4973 /* Check the cases where at least one of the operands is a comparison.
4974 These are a bit smarter than operand_equal_p in that they apply some
4975 identifies on SSA_NAMEs. */
4976 if (COMPARISON_CLASS_P (op2
)
4977 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
4978 TREE_OPERAND (op2
, 0),
4979 TREE_OPERAND (op2
, 1)))
4981 if (COMPARISON_CLASS_P (op1
)
4982 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
4983 TREE_OPERAND (op1
, 0),
4984 TREE_OPERAND (op1
, 1)))
4991 /* Forward declarations for some mutually recursive functions. */
4994 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4995 enum tree_code code2
, tree op2a
, tree op2b
);
4997 and_var_with_comparison (tree var
, bool invert
,
4998 enum tree_code code2
, tree op2a
, tree op2b
);
5000 and_var_with_comparison_1 (gimple
*stmt
,
5001 enum tree_code code2
, tree op2a
, tree op2b
);
5003 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5004 enum tree_code code2
, tree op2a
, tree op2b
);
5006 or_var_with_comparison (tree var
, bool invert
,
5007 enum tree_code code2
, tree op2a
, tree op2b
);
5009 or_var_with_comparison_1 (gimple
*stmt
,
5010 enum tree_code code2
, tree op2a
, tree op2b
);
5012 /* Helper function for and_comparisons_1: try to simplify the AND of the
5013 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5014 If INVERT is true, invert the value of the VAR before doing the AND.
5015 Return NULL_EXPR if we can't simplify this to a single expression. */
5018 and_var_with_comparison (tree var
, bool invert
,
5019 enum tree_code code2
, tree op2a
, tree op2b
)
5022 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5024 /* We can only deal with variables whose definitions are assignments. */
5025 if (!is_gimple_assign (stmt
))
5028 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5029 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5030 Then we only have to consider the simpler non-inverted cases. */
5032 t
= or_var_with_comparison_1 (stmt
,
5033 invert_tree_comparison (code2
, false),
5036 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5037 return canonicalize_bool (t
, invert
);
5040 /* Try to simplify the AND of the ssa variable defined by the assignment
5041 STMT with the comparison specified by (OP2A CODE2 OP2B).
5042 Return NULL_EXPR if we can't simplify this to a single expression. */
5045 and_var_with_comparison_1 (gimple
*stmt
,
5046 enum tree_code code2
, tree op2a
, tree op2b
)
5048 tree var
= gimple_assign_lhs (stmt
);
5049 tree true_test_var
= NULL_TREE
;
5050 tree false_test_var
= NULL_TREE
;
5051 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5053 /* Check for identities like (var AND (var == 0)) => false. */
5054 if (TREE_CODE (op2a
) == SSA_NAME
5055 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5057 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5058 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5060 true_test_var
= op2a
;
5061 if (var
== true_test_var
)
5064 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5065 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5067 false_test_var
= op2a
;
5068 if (var
== false_test_var
)
5069 return boolean_false_node
;
5073 /* If the definition is a comparison, recurse on it. */
5074 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5076 tree t
= and_comparisons_1 (innercode
,
5077 gimple_assign_rhs1 (stmt
),
5078 gimple_assign_rhs2 (stmt
),
5086 /* If the definition is an AND or OR expression, we may be able to
5087 simplify by reassociating. */
5088 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5089 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5091 tree inner1
= gimple_assign_rhs1 (stmt
);
5092 tree inner2
= gimple_assign_rhs2 (stmt
);
5095 tree partial
= NULL_TREE
;
5096 bool is_and
= (innercode
== BIT_AND_EXPR
);
5098 /* Check for boolean identities that don't require recursive examination
5100 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5101 inner1 AND (inner1 OR inner2) => inner1
5102 !inner1 AND (inner1 AND inner2) => false
5103 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5104 Likewise for similar cases involving inner2. */
5105 if (inner1
== true_test_var
)
5106 return (is_and
? var
: inner1
);
5107 else if (inner2
== true_test_var
)
5108 return (is_and
? var
: inner2
);
5109 else if (inner1
== false_test_var
)
5111 ? boolean_false_node
5112 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5113 else if (inner2
== false_test_var
)
5115 ? boolean_false_node
5116 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5118 /* Next, redistribute/reassociate the AND across the inner tests.
5119 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5120 if (TREE_CODE (inner1
) == SSA_NAME
5121 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5122 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5123 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5124 gimple_assign_rhs1 (s
),
5125 gimple_assign_rhs2 (s
),
5126 code2
, op2a
, op2b
)))
5128 /* Handle the AND case, where we are reassociating:
5129 (inner1 AND inner2) AND (op2a code2 op2b)
5131 If the partial result t is a constant, we win. Otherwise
5132 continue on to try reassociating with the other inner test. */
5135 if (integer_onep (t
))
5137 else if (integer_zerop (t
))
5138 return boolean_false_node
;
5141 /* Handle the OR case, where we are redistributing:
5142 (inner1 OR inner2) AND (op2a code2 op2b)
5143 => (t OR (inner2 AND (op2a code2 op2b))) */
5144 else if (integer_onep (t
))
5145 return boolean_true_node
;
5147 /* Save partial result for later. */
5151 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5152 if (TREE_CODE (inner2
) == SSA_NAME
5153 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5154 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5155 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5156 gimple_assign_rhs1 (s
),
5157 gimple_assign_rhs2 (s
),
5158 code2
, op2a
, op2b
)))
5160 /* Handle the AND case, where we are reassociating:
5161 (inner1 AND inner2) AND (op2a code2 op2b)
5162 => (inner1 AND t) */
5165 if (integer_onep (t
))
5167 else if (integer_zerop (t
))
5168 return boolean_false_node
;
5169 /* If both are the same, we can apply the identity
5171 else if (partial
&& same_bool_result_p (t
, partial
))
5175 /* Handle the OR case. where we are redistributing:
5176 (inner1 OR inner2) AND (op2a code2 op2b)
5177 => (t OR (inner1 AND (op2a code2 op2b)))
5178 => (t OR partial) */
5181 if (integer_onep (t
))
5182 return boolean_true_node
;
5185 /* We already got a simplification for the other
5186 operand to the redistributed OR expression. The
5187 interesting case is when at least one is false.
5188 Or, if both are the same, we can apply the identity
5190 if (integer_zerop (partial
))
5192 else if (integer_zerop (t
))
5194 else if (same_bool_result_p (t
, partial
))
5203 /* Try to simplify the AND of two comparisons defined by
5204 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5205 If this can be done without constructing an intermediate value,
5206 return the resulting tree; otherwise NULL_TREE is returned.
5207 This function is deliberately asymmetric as it recurses on SSA_DEFs
5208 in the first comparison but not the second. */
5211 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5212 enum tree_code code2
, tree op2a
, tree op2b
)
5214 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5216 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5217 if (operand_equal_p (op1a
, op2a
, 0)
5218 && operand_equal_p (op1b
, op2b
, 0))
5220 /* Result will be either NULL_TREE, or a combined comparison. */
5221 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5222 TRUTH_ANDIF_EXPR
, code1
, code2
,
5223 truth_type
, op1a
, op1b
);
5228 /* Likewise the swapped case of the above. */
5229 if (operand_equal_p (op1a
, op2b
, 0)
5230 && operand_equal_p (op1b
, op2a
, 0))
5232 /* Result will be either NULL_TREE, or a combined comparison. */
5233 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5234 TRUTH_ANDIF_EXPR
, code1
,
5235 swap_tree_comparison (code2
),
5236 truth_type
, op1a
, op1b
);
5241 /* If both comparisons are of the same value against constants, we might
5242 be able to merge them. */
5243 if (operand_equal_p (op1a
, op2a
, 0)
5244 && TREE_CODE (op1b
) == INTEGER_CST
5245 && TREE_CODE (op2b
) == INTEGER_CST
)
5247 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5249 /* If we have (op1a == op1b), we should either be able to
5250 return that or FALSE, depending on whether the constant op1b
5251 also satisfies the other comparison against op2b. */
5252 if (code1
== EQ_EXPR
)
5258 case EQ_EXPR
: val
= (cmp
== 0); break;
5259 case NE_EXPR
: val
= (cmp
!= 0); break;
5260 case LT_EXPR
: val
= (cmp
< 0); break;
5261 case GT_EXPR
: val
= (cmp
> 0); break;
5262 case LE_EXPR
: val
= (cmp
<= 0); break;
5263 case GE_EXPR
: val
= (cmp
>= 0); break;
5264 default: done
= false;
5269 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5271 return boolean_false_node
;
5274 /* Likewise if the second comparison is an == comparison. */
5275 else if (code2
== EQ_EXPR
)
5281 case EQ_EXPR
: val
= (cmp
== 0); break;
5282 case NE_EXPR
: val
= (cmp
!= 0); break;
5283 case LT_EXPR
: val
= (cmp
> 0); break;
5284 case GT_EXPR
: val
= (cmp
< 0); break;
5285 case LE_EXPR
: val
= (cmp
>= 0); break;
5286 case GE_EXPR
: val
= (cmp
<= 0); break;
5287 default: done
= false;
5292 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5294 return boolean_false_node
;
5298 /* Same business with inequality tests. */
5299 else if (code1
== NE_EXPR
)
5304 case EQ_EXPR
: val
= (cmp
!= 0); break;
5305 case NE_EXPR
: val
= (cmp
== 0); break;
5306 case LT_EXPR
: val
= (cmp
>= 0); break;
5307 case GT_EXPR
: val
= (cmp
<= 0); break;
5308 case LE_EXPR
: val
= (cmp
> 0); break;
5309 case GE_EXPR
: val
= (cmp
< 0); break;
5314 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5316 else if (code2
== NE_EXPR
)
5321 case EQ_EXPR
: val
= (cmp
== 0); break;
5322 case NE_EXPR
: val
= (cmp
!= 0); break;
5323 case LT_EXPR
: val
= (cmp
<= 0); break;
5324 case GT_EXPR
: val
= (cmp
>= 0); break;
5325 case LE_EXPR
: val
= (cmp
< 0); break;
5326 case GE_EXPR
: val
= (cmp
> 0); break;
5331 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5334 /* Chose the more restrictive of two < or <= comparisons. */
5335 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5336 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5338 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5339 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5341 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5344 /* Likewise chose the more restrictive of two > or >= comparisons. */
5345 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5346 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5348 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5349 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5351 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5354 /* Check for singleton ranges. */
5356 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
5357 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
5358 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
5360 /* Check for disjoint ranges. */
5362 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5363 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5364 return boolean_false_node
;
5366 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5367 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5368 return boolean_false_node
;
5371 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5372 NAME's definition is a truth value. See if there are any simplifications
5373 that can be done against the NAME's definition. */
5374 if (TREE_CODE (op1a
) == SSA_NAME
5375 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5376 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5378 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5379 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5380 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5381 switch (gimple_code (stmt
))
5384 /* Try to simplify by copy-propagating the definition. */
5385 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5388 /* If every argument to the PHI produces the same result when
5389 ANDed with the second comparison, we win.
5390 Do not do this unless the type is bool since we need a bool
5391 result here anyway. */
5392 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5394 tree result
= NULL_TREE
;
5396 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5398 tree arg
= gimple_phi_arg_def (stmt
, i
);
5400 /* If this PHI has itself as an argument, ignore it.
5401 If all the other args produce the same result,
5403 if (arg
== gimple_phi_result (stmt
))
5405 else if (TREE_CODE (arg
) == INTEGER_CST
)
5407 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
5410 result
= boolean_false_node
;
5411 else if (!integer_zerop (result
))
5415 result
= fold_build2 (code2
, boolean_type_node
,
5417 else if (!same_bool_comparison_p (result
,
5421 else if (TREE_CODE (arg
) == SSA_NAME
5422 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5425 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5426 /* In simple cases we can look through PHI nodes,
5427 but we have to be careful with loops.
5429 if (! dom_info_available_p (CDI_DOMINATORS
)
5430 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5431 || dominated_by_p (CDI_DOMINATORS
,
5432 gimple_bb (def_stmt
),
5435 temp
= and_var_with_comparison (arg
, invert
, code2
,
5441 else if (!same_bool_result_p (result
, temp
))
5457 /* Try to simplify the AND of two comparisons, specified by
5458 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5459 If this can be simplified to a single expression (without requiring
5460 introducing more SSA variables to hold intermediate values),
5461 return the resulting tree. Otherwise return NULL_TREE.
5462 If the result expression is non-null, it has boolean type. */
5465 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5466 enum tree_code code2
, tree op2a
, tree op2b
)
5468 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5472 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5475 /* Helper function for or_comparisons_1: try to simplify the OR of the
5476 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5477 If INVERT is true, invert the value of VAR before doing the OR.
5478 Return NULL_EXPR if we can't simplify this to a single expression. */
5481 or_var_with_comparison (tree var
, bool invert
,
5482 enum tree_code code2
, tree op2a
, tree op2b
)
5485 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5487 /* We can only deal with variables whose definitions are assignments. */
5488 if (!is_gimple_assign (stmt
))
5491 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5492 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5493 Then we only have to consider the simpler non-inverted cases. */
5495 t
= and_var_with_comparison_1 (stmt
,
5496 invert_tree_comparison (code2
, false),
5499 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5500 return canonicalize_bool (t
, invert
);
5503 /* Try to simplify the OR of the ssa variable defined by the assignment
5504 STMT with the comparison specified by (OP2A CODE2 OP2B).
5505 Return NULL_EXPR if we can't simplify this to a single expression. */
5508 or_var_with_comparison_1 (gimple
*stmt
,
5509 enum tree_code code2
, tree op2a
, tree op2b
)
5511 tree var
= gimple_assign_lhs (stmt
);
5512 tree true_test_var
= NULL_TREE
;
5513 tree false_test_var
= NULL_TREE
;
5514 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5516 /* Check for identities like (var OR (var != 0)) => true . */
5517 if (TREE_CODE (op2a
) == SSA_NAME
5518 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5520 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5521 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5523 true_test_var
= op2a
;
5524 if (var
== true_test_var
)
5527 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5528 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5530 false_test_var
= op2a
;
5531 if (var
== false_test_var
)
5532 return boolean_true_node
;
5536 /* If the definition is a comparison, recurse on it. */
5537 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5539 tree t
= or_comparisons_1 (innercode
,
5540 gimple_assign_rhs1 (stmt
),
5541 gimple_assign_rhs2 (stmt
),
5549 /* If the definition is an AND or OR expression, we may be able to
5550 simplify by reassociating. */
5551 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5552 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5554 tree inner1
= gimple_assign_rhs1 (stmt
);
5555 tree inner2
= gimple_assign_rhs2 (stmt
);
5558 tree partial
= NULL_TREE
;
5559 bool is_or
= (innercode
== BIT_IOR_EXPR
);
5561 /* Check for boolean identities that don't require recursive examination
5563 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5564 inner1 OR (inner1 AND inner2) => inner1
5565 !inner1 OR (inner1 OR inner2) => true
5566 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5568 if (inner1
== true_test_var
)
5569 return (is_or
? var
: inner1
);
5570 else if (inner2
== true_test_var
)
5571 return (is_or
? var
: inner2
);
5572 else if (inner1
== false_test_var
)
5575 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5576 else if (inner2
== false_test_var
)
5579 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5581 /* Next, redistribute/reassociate the OR across the inner tests.
5582 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5583 if (TREE_CODE (inner1
) == SSA_NAME
5584 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5585 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5586 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5587 gimple_assign_rhs1 (s
),
5588 gimple_assign_rhs2 (s
),
5589 code2
, op2a
, op2b
)))
5591 /* Handle the OR case, where we are reassociating:
5592 (inner1 OR inner2) OR (op2a code2 op2b)
5594 If the partial result t is a constant, we win. Otherwise
5595 continue on to try reassociating with the other inner test. */
5598 if (integer_onep (t
))
5599 return boolean_true_node
;
5600 else if (integer_zerop (t
))
5604 /* Handle the AND case, where we are redistributing:
5605 (inner1 AND inner2) OR (op2a code2 op2b)
5606 => (t AND (inner2 OR (op2a code op2b))) */
5607 else if (integer_zerop (t
))
5608 return boolean_false_node
;
5610 /* Save partial result for later. */
5614 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5615 if (TREE_CODE (inner2
) == SSA_NAME
5616 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5617 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5618 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5619 gimple_assign_rhs1 (s
),
5620 gimple_assign_rhs2 (s
),
5621 code2
, op2a
, op2b
)))
5623 /* Handle the OR case, where we are reassociating:
5624 (inner1 OR inner2) OR (op2a code2 op2b)
5626 => (t OR partial) */
5629 if (integer_zerop (t
))
5631 else if (integer_onep (t
))
5632 return boolean_true_node
;
5633 /* If both are the same, we can apply the identity
5635 else if (partial
&& same_bool_result_p (t
, partial
))
5639 /* Handle the AND case, where we are redistributing:
5640 (inner1 AND inner2) OR (op2a code2 op2b)
5641 => (t AND (inner1 OR (op2a code2 op2b)))
5642 => (t AND partial) */
5645 if (integer_zerop (t
))
5646 return boolean_false_node
;
5649 /* We already got a simplification for the other
5650 operand to the redistributed AND expression. The
5651 interesting case is when at least one is true.
5652 Or, if both are the same, we can apply the identity
5654 if (integer_onep (partial
))
5656 else if (integer_onep (t
))
5658 else if (same_bool_result_p (t
, partial
))
5667 /* Try to simplify the OR of two comparisons defined by
5668 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5669 If this can be done without constructing an intermediate value,
5670 return the resulting tree; otherwise NULL_TREE is returned.
5671 This function is deliberately asymmetric as it recurses on SSA_DEFs
5672 in the first comparison but not the second. */
5675 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5676 enum tree_code code2
, tree op2a
, tree op2b
)
5678 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5680 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5681 if (operand_equal_p (op1a
, op2a
, 0)
5682 && operand_equal_p (op1b
, op2b
, 0))
5684 /* Result will be either NULL_TREE, or a combined comparison. */
5685 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5686 TRUTH_ORIF_EXPR
, code1
, code2
,
5687 truth_type
, op1a
, op1b
);
5692 /* Likewise the swapped case of the above. */
5693 if (operand_equal_p (op1a
, op2b
, 0)
5694 && operand_equal_p (op1b
, op2a
, 0))
5696 /* Result will be either NULL_TREE, or a combined comparison. */
5697 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5698 TRUTH_ORIF_EXPR
, code1
,
5699 swap_tree_comparison (code2
),
5700 truth_type
, op1a
, op1b
);
5705 /* If both comparisons are of the same value against constants, we might
5706 be able to merge them. */
5707 if (operand_equal_p (op1a
, op2a
, 0)
5708 && TREE_CODE (op1b
) == INTEGER_CST
5709 && TREE_CODE (op2b
) == INTEGER_CST
)
5711 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5713 /* If we have (op1a != op1b), we should either be able to
5714 return that or TRUE, depending on whether the constant op1b
5715 also satisfies the other comparison against op2b. */
5716 if (code1
== NE_EXPR
)
5722 case EQ_EXPR
: val
= (cmp
== 0); break;
5723 case NE_EXPR
: val
= (cmp
!= 0); break;
5724 case LT_EXPR
: val
= (cmp
< 0); break;
5725 case GT_EXPR
: val
= (cmp
> 0); break;
5726 case LE_EXPR
: val
= (cmp
<= 0); break;
5727 case GE_EXPR
: val
= (cmp
>= 0); break;
5728 default: done
= false;
5733 return boolean_true_node
;
5735 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5738 /* Likewise if the second comparison is a != comparison. */
5739 else if (code2
== NE_EXPR
)
5745 case EQ_EXPR
: val
= (cmp
== 0); break;
5746 case NE_EXPR
: val
= (cmp
!= 0); break;
5747 case LT_EXPR
: val
= (cmp
> 0); break;
5748 case GT_EXPR
: val
= (cmp
< 0); break;
5749 case LE_EXPR
: val
= (cmp
>= 0); break;
5750 case GE_EXPR
: val
= (cmp
<= 0); break;
5751 default: done
= false;
5756 return boolean_true_node
;
5758 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5762 /* See if an equality test is redundant with the other comparison. */
5763 else if (code1
== EQ_EXPR
)
5768 case EQ_EXPR
: val
= (cmp
== 0); break;
5769 case NE_EXPR
: val
= (cmp
!= 0); break;
5770 case LT_EXPR
: val
= (cmp
< 0); break;
5771 case GT_EXPR
: val
= (cmp
> 0); break;
5772 case LE_EXPR
: val
= (cmp
<= 0); break;
5773 case GE_EXPR
: val
= (cmp
>= 0); break;
5778 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5780 else if (code2
== EQ_EXPR
)
5785 case EQ_EXPR
: val
= (cmp
== 0); break;
5786 case NE_EXPR
: val
= (cmp
!= 0); break;
5787 case LT_EXPR
: val
= (cmp
> 0); break;
5788 case GT_EXPR
: val
= (cmp
< 0); break;
5789 case LE_EXPR
: val
= (cmp
>= 0); break;
5790 case GE_EXPR
: val
= (cmp
<= 0); break;
5795 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5798 /* Chose the less restrictive of two < or <= comparisons. */
5799 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5800 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5802 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5803 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5805 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5808 /* Likewise chose the less restrictive of two > or >= comparisons. */
5809 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5810 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5812 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5813 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5815 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5818 /* Check for singleton ranges. */
5820 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
5821 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
5822 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
5824 /* Check for less/greater pairs that don't restrict the range at all. */
5826 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5827 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5828 return boolean_true_node
;
5830 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5831 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5832 return boolean_true_node
;
5835 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5836 NAME's definition is a truth value. See if there are any simplifications
5837 that can be done against the NAME's definition. */
5838 if (TREE_CODE (op1a
) == SSA_NAME
5839 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5840 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5842 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5843 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5844 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5845 switch (gimple_code (stmt
))
5848 /* Try to simplify by copy-propagating the definition. */
5849 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5852 /* If every argument to the PHI produces the same result when
5853 ORed with the second comparison, we win.
5854 Do not do this unless the type is bool since we need a bool
5855 result here anyway. */
5856 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5858 tree result
= NULL_TREE
;
5860 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5862 tree arg
= gimple_phi_arg_def (stmt
, i
);
5864 /* If this PHI has itself as an argument, ignore it.
5865 If all the other args produce the same result,
5867 if (arg
== gimple_phi_result (stmt
))
5869 else if (TREE_CODE (arg
) == INTEGER_CST
)
5871 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
5874 result
= boolean_true_node
;
5875 else if (!integer_onep (result
))
5879 result
= fold_build2 (code2
, boolean_type_node
,
5881 else if (!same_bool_comparison_p (result
,
5885 else if (TREE_CODE (arg
) == SSA_NAME
5886 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5889 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5890 /* In simple cases we can look through PHI nodes,
5891 but we have to be careful with loops.
5893 if (! dom_info_available_p (CDI_DOMINATORS
)
5894 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5895 || dominated_by_p (CDI_DOMINATORS
,
5896 gimple_bb (def_stmt
),
5899 temp
= or_var_with_comparison (arg
, invert
, code2
,
5905 else if (!same_bool_result_p (result
, temp
))
5921 /* Try to simplify the OR of two comparisons, specified by
5922 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5923 If this can be simplified to a single expression (without requiring
5924 introducing more SSA variables to hold intermediate values),
5925 return the resulting tree. Otherwise return NULL_TREE.
5926 If the result expression is non-null, it has boolean type. */
5929 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5930 enum tree_code code2
, tree op2a
, tree op2b
)
5932 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5936 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5940 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5942 Either NULL_TREE, a simplified but non-constant or a constant
5945 ??? This should go into a gimple-fold-inline.h file to be eventually
5946 privatized with the single valueize function used in the various TUs
5947 to avoid the indirect function call overhead. */
5950 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
5951 tree (*gvalueize
) (tree
))
5955 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5956 edges if there are intermediate VARYING defs. For this reason
5957 do not follow SSA edges here even though SCCVN can technically
5958 just deal fine with that. */
5959 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
5961 tree res
= NULL_TREE
;
5962 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
5964 else if (mprts_hook
)
5965 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
5968 if (dump_file
&& dump_flags
& TDF_DETAILS
)
5970 fprintf (dump_file
, "Match-and-simplified ");
5971 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
5972 fprintf (dump_file
, " to ");
5973 print_generic_expr (dump_file
, res
);
5974 fprintf (dump_file
, "\n");
5980 location_t loc
= gimple_location (stmt
);
5981 switch (gimple_code (stmt
))
5985 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
5987 switch (get_gimple_rhs_class (subcode
))
5989 case GIMPLE_SINGLE_RHS
:
5991 tree rhs
= gimple_assign_rhs1 (stmt
);
5992 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
5994 if (TREE_CODE (rhs
) == SSA_NAME
)
5996 /* If the RHS is an SSA_NAME, return its known constant value,
5998 return (*valueize
) (rhs
);
6000 /* Handle propagating invariant addresses into address
6002 else if (TREE_CODE (rhs
) == ADDR_EXPR
6003 && !is_gimple_min_invariant (rhs
))
6005 HOST_WIDE_INT offset
= 0;
6007 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
6011 && (CONSTANT_CLASS_P (base
)
6012 || decl_address_invariant_p (base
)))
6013 return build_invariant_address (TREE_TYPE (rhs
),
6016 else if (TREE_CODE (rhs
) == CONSTRUCTOR
6017 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
6018 && (CONSTRUCTOR_NELTS (rhs
)
6019 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
6024 nelts
= TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
));
6025 auto_vec
<tree
, 32> vec (nelts
);
6026 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
6028 val
= (*valueize
) (val
);
6029 if (TREE_CODE (val
) == INTEGER_CST
6030 || TREE_CODE (val
) == REAL_CST
6031 || TREE_CODE (val
) == FIXED_CST
)
6032 vec
.quick_push (val
);
6037 return build_vector (TREE_TYPE (rhs
), vec
);
6039 if (subcode
== OBJ_TYPE_REF
)
6041 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
6042 /* If callee is constant, we can fold away the wrapper. */
6043 if (is_gimple_min_invariant (val
))
6047 if (kind
== tcc_reference
)
6049 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
6050 || TREE_CODE (rhs
) == REALPART_EXPR
6051 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
6052 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6054 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6055 return fold_unary_loc (EXPR_LOCATION (rhs
),
6057 TREE_TYPE (rhs
), val
);
6059 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
6060 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6062 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6063 return fold_ternary_loc (EXPR_LOCATION (rhs
),
6065 TREE_TYPE (rhs
), val
,
6066 TREE_OPERAND (rhs
, 1),
6067 TREE_OPERAND (rhs
, 2));
6069 else if (TREE_CODE (rhs
) == MEM_REF
6070 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6072 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6073 if (TREE_CODE (val
) == ADDR_EXPR
6074 && is_gimple_min_invariant (val
))
6076 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
6078 TREE_OPERAND (rhs
, 1));
6083 return fold_const_aggregate_ref_1 (rhs
, valueize
);
6085 else if (kind
== tcc_declaration
)
6086 return get_symbol_constant_value (rhs
);
6090 case GIMPLE_UNARY_RHS
:
6093 case GIMPLE_BINARY_RHS
:
6094 /* Translate &x + CST into an invariant form suitable for
6095 further propagation. */
6096 if (subcode
== POINTER_PLUS_EXPR
)
6098 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6099 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6100 if (TREE_CODE (op0
) == ADDR_EXPR
6101 && TREE_CODE (op1
) == INTEGER_CST
)
6103 tree off
= fold_convert (ptr_type_node
, op1
);
6104 return build_fold_addr_expr_loc
6106 fold_build2 (MEM_REF
,
6107 TREE_TYPE (TREE_TYPE (op0
)),
6108 unshare_expr (op0
), off
));
6111 /* Canonicalize bool != 0 and bool == 0 appearing after
6112 valueization. While gimple_simplify handles this
6113 it can get confused by the ~X == 1 -> X == 0 transform
6114 which we cant reduce to a SSA name or a constant
6115 (and we have no way to tell gimple_simplify to not
6116 consider those transforms in the first place). */
6117 else if (subcode
== EQ_EXPR
6118 || subcode
== NE_EXPR
)
6120 tree lhs
= gimple_assign_lhs (stmt
);
6121 tree op0
= gimple_assign_rhs1 (stmt
);
6122 if (useless_type_conversion_p (TREE_TYPE (lhs
),
6125 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6126 op0
= (*valueize
) (op0
);
6127 if (TREE_CODE (op0
) == INTEGER_CST
)
6128 std::swap (op0
, op1
);
6129 if (TREE_CODE (op1
) == INTEGER_CST
6130 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
6131 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
6137 case GIMPLE_TERNARY_RHS
:
6139 /* Handle ternary operators that can appear in GIMPLE form. */
6140 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6141 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6142 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
6143 return fold_ternary_loc (loc
, subcode
,
6144 gimple_expr_type (stmt
), op0
, op1
, op2
);
6155 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
6157 if (gimple_call_internal_p (stmt
))
6159 enum tree_code subcode
= ERROR_MARK
;
6160 switch (gimple_call_internal_fn (stmt
))
6162 case IFN_UBSAN_CHECK_ADD
:
6163 subcode
= PLUS_EXPR
;
6165 case IFN_UBSAN_CHECK_SUB
:
6166 subcode
= MINUS_EXPR
;
6168 case IFN_UBSAN_CHECK_MUL
:
6169 subcode
= MULT_EXPR
;
6171 case IFN_BUILTIN_EXPECT
:
6173 tree arg0
= gimple_call_arg (stmt
, 0);
6174 tree op0
= (*valueize
) (arg0
);
6175 if (TREE_CODE (op0
) == INTEGER_CST
)
6182 tree arg0
= gimple_call_arg (stmt
, 0);
6183 tree arg1
= gimple_call_arg (stmt
, 1);
6184 tree op0
= (*valueize
) (arg0
);
6185 tree op1
= (*valueize
) (arg1
);
6187 if (TREE_CODE (op0
) != INTEGER_CST
6188 || TREE_CODE (op1
) != INTEGER_CST
)
6193 /* x * 0 = 0 * x = 0 without overflow. */
6194 if (integer_zerop (op0
) || integer_zerop (op1
))
6195 return build_zero_cst (TREE_TYPE (arg0
));
6198 /* y - y = 0 without overflow. */
6199 if (operand_equal_p (op0
, op1
, 0))
6200 return build_zero_cst (TREE_TYPE (arg0
));
6207 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
6209 && TREE_CODE (res
) == INTEGER_CST
6210 && !TREE_OVERFLOW (res
))
6215 fn
= (*valueize
) (gimple_call_fn (stmt
));
6216 if (TREE_CODE (fn
) == ADDR_EXPR
6217 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
6218 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
6219 && gimple_builtin_call_types_compatible_p (stmt
,
6220 TREE_OPERAND (fn
, 0)))
6222 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
6225 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6226 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
6227 retval
= fold_builtin_call_array (loc
,
6228 gimple_call_return_type (call_stmt
),
6229 fn
, gimple_call_num_args (stmt
), args
);
6232 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6233 STRIP_NOPS (retval
);
6234 retval
= fold_convert (gimple_call_return_type (call_stmt
),
6247 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6248 Returns NULL_TREE if folding to a constant is not possible, otherwise
6249 returns a constant according to is_gimple_min_invariant. */
6252 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
6254 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
6255 if (res
&& is_gimple_min_invariant (res
))
6261 /* The following set of functions are supposed to fold references using
6262 their constant initializers. */
6264 /* See if we can find constructor defining value of BASE.
6265 When we know the consructor with constant offset (such as
6266 base is array[40] and we do know constructor of array), then
6267 BIT_OFFSET is adjusted accordingly.
6269 As a special case, return error_mark_node when constructor
6270 is not explicitly available, but it is known to be zero
6271 such as 'static const int a;'. */
6273 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
6274 tree (*valueize
)(tree
))
6276 HOST_WIDE_INT bit_offset2
, size
, max_size
;
6279 if (TREE_CODE (base
) == MEM_REF
)
6281 if (!integer_zerop (TREE_OPERAND (base
, 1)))
6283 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
6285 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
6290 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
6291 base
= valueize (TREE_OPERAND (base
, 0));
6292 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
6294 base
= TREE_OPERAND (base
, 0);
6297 && TREE_CODE (base
) == SSA_NAME
)
6298 base
= valueize (base
);
6300 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6301 DECL_INITIAL. If BASE is a nested reference into another
6302 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6303 the inner reference. */
6304 switch (TREE_CODE (base
))
6309 tree init
= ctor_for_folding (base
);
6311 /* Our semantic is exact opposite of ctor_for_folding;
6312 NULL means unknown, while error_mark_node is 0. */
6313 if (init
== error_mark_node
)
6316 return error_mark_node
;
6320 case VIEW_CONVERT_EXPR
:
6321 return get_base_constructor (TREE_OPERAND (base
, 0),
6322 bit_offset
, valueize
);
6326 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
6328 if (max_size
== -1 || size
!= max_size
)
6330 *bit_offset
+= bit_offset2
;
6331 return get_base_constructor (base
, bit_offset
, valueize
);
6337 if (CONSTANT_CLASS_P (base
))
6344 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6345 SIZE to the memory at bit OFFSET. */
6348 fold_array_ctor_reference (tree type
, tree ctor
,
6349 unsigned HOST_WIDE_INT offset
,
6350 unsigned HOST_WIDE_INT size
,
6353 offset_int low_bound
;
6354 offset_int elt_size
;
6355 offset_int access_index
;
6356 tree domain_type
= NULL_TREE
;
6357 HOST_WIDE_INT inner_offset
;
6359 /* Compute low bound and elt size. */
6360 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
6361 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
6362 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6364 /* Static constructors for variably sized objects makes no sense. */
6365 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
6367 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
6371 /* Static constructors for variably sized objects makes no sense. */
6372 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
6374 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
6376 /* We can handle only constantly sized accesses that are known to not
6377 be larger than size of array element. */
6378 if (!TYPE_SIZE_UNIT (type
)
6379 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
6380 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
6384 /* Compute the array index we look for. */
6385 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
6387 access_index
+= low_bound
;
6389 /* And offset within the access. */
6390 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
6392 /* See if the array field is large enough to span whole access. We do not
6393 care to fold accesses spanning multiple array indexes. */
6394 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
6396 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
6397 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
6399 /* When memory is not explicitely mentioned in constructor,
6400 it is 0 (or out of range). */
6401 return build_zero_cst (type
);
6404 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6405 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6408 fold_nonarray_ctor_reference (tree type
, tree ctor
,
6409 unsigned HOST_WIDE_INT offset
,
6410 unsigned HOST_WIDE_INT size
,
6413 unsigned HOST_WIDE_INT cnt
;
6416 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
6419 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
6420 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
6421 tree field_size
= DECL_SIZE (cfield
);
6422 offset_int bitoffset
;
6423 offset_int bitoffset_end
, access_end
;
6425 /* Variable sized objects in static constructors makes no sense,
6426 but field_size can be NULL for flexible array members. */
6427 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
6428 && TREE_CODE (byte_offset
) == INTEGER_CST
6429 && (field_size
!= NULL_TREE
6430 ? TREE_CODE (field_size
) == INTEGER_CST
6431 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
6433 /* Compute bit offset of the field. */
6434 bitoffset
= (wi::to_offset (field_offset
)
6435 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
6436 /* Compute bit offset where the field ends. */
6437 if (field_size
!= NULL_TREE
)
6438 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
6442 access_end
= offset_int (offset
) + size
;
6444 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6445 [BITOFFSET, BITOFFSET_END)? */
6446 if (wi::cmps (access_end
, bitoffset
) > 0
6447 && (field_size
== NULL_TREE
6448 || wi::lts_p (offset
, bitoffset_end
)))
6450 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
6451 /* We do have overlap. Now see if field is large enough to
6452 cover the access. Give up for accesses spanning multiple
6454 if (wi::cmps (access_end
, bitoffset_end
) > 0)
6456 if (offset
< bitoffset
)
6458 return fold_ctor_reference (type
, cval
,
6459 inner_offset
.to_uhwi (), size
,
6463 /* When memory is not explicitely mentioned in constructor, it is 0. */
6464 return build_zero_cst (type
);
6467 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6468 to the memory at bit OFFSET. */
6471 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
6472 unsigned HOST_WIDE_INT size
, tree from_decl
)
6476 /* We found the field with exact match. */
6477 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
6479 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6481 /* We are at the end of walk, see if we can view convert the
6483 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
6484 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6485 && !compare_tree_int (TYPE_SIZE (type
), size
)
6486 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
6488 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6491 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
6493 STRIP_USELESS_TYPE_CONVERSION (ret
);
6497 /* For constants and byte-aligned/sized reads try to go through
6498 native_encode/interpret. */
6499 if (CONSTANT_CLASS_P (ctor
)
6500 && BITS_PER_UNIT
== 8
6501 && offset
% BITS_PER_UNIT
== 0
6502 && size
% BITS_PER_UNIT
== 0
6503 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
6505 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6506 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
6507 offset
/ BITS_PER_UNIT
);
6509 return native_interpret_expr (type
, buf
, len
);
6511 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
6514 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
6515 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
6516 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
6519 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
6526 /* Return the tree representing the element referenced by T if T is an
6527 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6528 names using VALUEIZE. Return NULL_TREE otherwise. */
6531 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
6533 tree ctor
, idx
, base
;
6534 HOST_WIDE_INT offset
, size
, max_size
;
6538 if (TREE_THIS_VOLATILE (t
))
6542 return get_symbol_constant_value (t
);
6544 tem
= fold_read_from_constant_string (t
);
6548 switch (TREE_CODE (t
))
6551 case ARRAY_RANGE_REF
:
6552 /* Constant indexes are handled well by get_base_constructor.
6553 Only special case variable offsets.
6554 FIXME: This code can't handle nested references with variable indexes
6555 (they will be handled only by iteration of ccp). Perhaps we can bring
6556 get_ref_base_and_extent here and make it use a valueize callback. */
6557 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
6559 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
6560 && TREE_CODE (idx
) == INTEGER_CST
)
6562 tree low_bound
, unit_size
;
6564 /* If the resulting bit-offset is constant, track it. */
6565 if ((low_bound
= array_ref_low_bound (t
),
6566 TREE_CODE (low_bound
) == INTEGER_CST
)
6567 && (unit_size
= array_ref_element_size (t
),
6568 tree_fits_uhwi_p (unit_size
)))
6571 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
6572 TYPE_PRECISION (TREE_TYPE (idx
)));
6574 if (wi::fits_shwi_p (woffset
))
6576 offset
= woffset
.to_shwi ();
6577 /* TODO: This code seems wrong, multiply then check
6578 to see if it fits. */
6579 offset
*= tree_to_uhwi (unit_size
);
6580 offset
*= BITS_PER_UNIT
;
6582 base
= TREE_OPERAND (t
, 0);
6583 ctor
= get_base_constructor (base
, &offset
, valueize
);
6584 /* Empty constructor. Always fold to 0. */
6585 if (ctor
== error_mark_node
)
6586 return build_zero_cst (TREE_TYPE (t
));
6587 /* Out of bound array access. Value is undefined,
6591 /* We can not determine ctor. */
6594 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
6595 tree_to_uhwi (unit_size
)
6605 case TARGET_MEM_REF
:
6607 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
6608 ctor
= get_base_constructor (base
, &offset
, valueize
);
6610 /* Empty constructor. Always fold to 0. */
6611 if (ctor
== error_mark_node
)
6612 return build_zero_cst (TREE_TYPE (t
));
6613 /* We do not know precise address. */
6614 if (max_size
== -1 || max_size
!= size
)
6616 /* We can not determine ctor. */
6620 /* Out of bound array access. Value is undefined, but don't fold. */
6624 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
6630 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
6631 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
6632 return fold_build1_loc (EXPR_LOCATION (t
),
6633 TREE_CODE (t
), TREE_TYPE (t
), c
);
6645 fold_const_aggregate_ref (tree t
)
6647 return fold_const_aggregate_ref_1 (t
, NULL
);
6650 /* Lookup virtual method with index TOKEN in a virtual table V
6652 Set CAN_REFER if non-NULL to false if method
6653 is not referable or if the virtual table is ill-formed (such as rewriten
6654 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6657 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
6659 unsigned HOST_WIDE_INT offset
,
6662 tree vtable
= v
, init
, fn
;
6663 unsigned HOST_WIDE_INT size
;
6664 unsigned HOST_WIDE_INT elt_size
, access_index
;
6670 /* First of all double check we have virtual table. */
6671 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
6673 /* Pass down that we lost track of the target. */
6679 init
= ctor_for_folding (v
);
6681 /* The virtual tables should always be born with constructors
6682 and we always should assume that they are avaialble for
6683 folding. At the moment we do not stream them in all cases,
6684 but it should never happen that ctor seem unreachable. */
6686 if (init
== error_mark_node
)
6688 /* Pass down that we lost track of the target. */
6693 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
6694 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
6695 offset
*= BITS_PER_UNIT
;
6696 offset
+= token
* size
;
6698 /* Lookup the value in the constructor that is assumed to be array.
6699 This is equivalent to
6700 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6701 offset, size, NULL);
6702 but in a constant time. We expect that frontend produced a simple
6703 array without indexed initializers. */
6705 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
6706 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
6707 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
6708 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
6710 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
6711 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
6713 /* This code makes an assumption that there are no
6714 indexed fileds produced by C++ FE, so we can directly index the array. */
6715 if (access_index
< CONSTRUCTOR_NELTS (init
))
6717 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
6718 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
6724 /* For type inconsistent program we may end up looking up virtual method
6725 in virtual table that does not contain TOKEN entries. We may overrun
6726 the virtual table and pick up a constant or RTTI info pointer.
6727 In any case the call is undefined. */
6729 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
6730 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
6731 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6734 fn
= TREE_OPERAND (fn
, 0);
6736 /* When cgraph node is missing and function is not public, we cannot
6737 devirtualize. This can happen in WHOPR when the actual method
6738 ends up in other partition, because we found devirtualization
6739 possibility too late. */
6740 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
6751 /* Make sure we create a cgraph node for functions we'll reference.
6752 They can be non-existent if the reference comes from an entry
6753 of an external vtable for example. */
6754 cgraph_node::get_create (fn
);
6759 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6760 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6761 KNOWN_BINFO carries the binfo describing the true type of
6762 OBJ_TYPE_REF_OBJECT(REF).
6763 Set CAN_REFER if non-NULL to false if method
6764 is not referable or if the virtual table is ill-formed (such as rewriten
6765 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6768 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
6771 unsigned HOST_WIDE_INT offset
;
6774 v
= BINFO_VTABLE (known_binfo
);
6775 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6779 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
6785 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
6788 /* Given a pointer value T, return a simplified version of an
6789 indirection through T, or NULL_TREE if no simplification is
6790 possible. Note that the resulting type may be different from
6791 the type pointed to in the sense that it is still compatible
6792 from the langhooks point of view. */
6795 gimple_fold_indirect_ref (tree t
)
6797 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
6802 subtype
= TREE_TYPE (sub
);
6803 if (!POINTER_TYPE_P (subtype
)
6804 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
6807 if (TREE_CODE (sub
) == ADDR_EXPR
)
6809 tree op
= TREE_OPERAND (sub
, 0);
6810 tree optype
= TREE_TYPE (op
);
6812 if (useless_type_conversion_p (type
, optype
))
6815 /* *(foo *)&fooarray => fooarray[0] */
6816 if (TREE_CODE (optype
) == ARRAY_TYPE
6817 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
6818 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6820 tree type_domain
= TYPE_DOMAIN (optype
);
6821 tree min_val
= size_zero_node
;
6822 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6823 min_val
= TYPE_MIN_VALUE (type_domain
);
6824 if (TREE_CODE (min_val
) == INTEGER_CST
)
6825 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
6827 /* *(foo *)&complexfoo => __real__ complexfoo */
6828 else if (TREE_CODE (optype
) == COMPLEX_TYPE
6829 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6830 return fold_build1 (REALPART_EXPR
, type
, op
);
6831 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6832 else if (TREE_CODE (optype
) == VECTOR_TYPE
6833 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6835 tree part_width
= TYPE_SIZE (type
);
6836 tree index
= bitsize_int (0);
6837 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
6841 /* *(p + CST) -> ... */
6842 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
6843 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
6845 tree addr
= TREE_OPERAND (sub
, 0);
6846 tree off
= TREE_OPERAND (sub
, 1);
6850 addrtype
= TREE_TYPE (addr
);
6852 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6853 if (TREE_CODE (addr
) == ADDR_EXPR
6854 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
6855 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
6856 && tree_fits_uhwi_p (off
))
6858 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
6859 tree part_width
= TYPE_SIZE (type
);
6860 unsigned HOST_WIDE_INT part_widthi
6861 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
6862 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
6863 tree index
= bitsize_int (indexi
);
6864 if (offset
/ part_widthi
6865 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
6866 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
6870 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6871 if (TREE_CODE (addr
) == ADDR_EXPR
6872 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
6873 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
6875 tree size
= TYPE_SIZE_UNIT (type
);
6876 if (tree_int_cst_equal (size
, off
))
6877 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
6880 /* *(p + CST) -> MEM_REF <p, CST>. */
6881 if (TREE_CODE (addr
) != ADDR_EXPR
6882 || DECL_P (TREE_OPERAND (addr
, 0)))
6883 return fold_build2 (MEM_REF
, type
,
6885 wide_int_to_tree (ptype
, wi::to_wide (off
)));
6888 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6889 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6890 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6891 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6894 tree min_val
= size_zero_node
;
6896 sub
= gimple_fold_indirect_ref (sub
);
6898 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6899 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6900 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6901 min_val
= TYPE_MIN_VALUE (type_domain
);
6902 if (TREE_CODE (min_val
) == INTEGER_CST
)
6903 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6909 /* Return true if CODE is an operation that when operating on signed
6910 integer types involves undefined behavior on overflow and the
6911 operation can be expressed with unsigned arithmetic. */
6914 arith_code_with_undefined_signed_overflow (tree_code code
)
6922 case POINTER_PLUS_EXPR
:
6929 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6930 operation that can be transformed to unsigned arithmetic by converting
6931 its operand, carrying out the operation in the corresponding unsigned
6932 type and converting the result back to the original type.
6934 Returns a sequence of statements that replace STMT and also contain
6935 a modified form of STMT itself. */
6938 rewrite_to_defined_overflow (gimple
*stmt
)
6940 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6942 fprintf (dump_file
, "rewriting stmt with undefined signed "
6944 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6947 tree lhs
= gimple_assign_lhs (stmt
);
6948 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6949 gimple_seq stmts
= NULL
;
6950 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6952 tree op
= gimple_op (stmt
, i
);
6953 op
= gimple_convert (&stmts
, type
, op
);
6954 gimple_set_op (stmt
, i
, op
);
6956 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6957 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6958 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6959 gimple_seq_add_stmt (&stmts
, stmt
);
6960 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6961 gimple_seq_add_stmt (&stmts
, cvt
);
6967 /* The valueization hook we use for the gimple_build API simplification.
6968 This makes us match fold_buildN behavior by only combining with
6969 statements in the sequence(s) we are currently building. */
6972 gimple_build_valueize (tree op
)
6974 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6979 /* Build the expression CODE OP0 of type TYPE with location LOC,
6980 simplifying it first if possible. Returns the built
6981 expression value and appends statements possibly defining it
6985 gimple_build (gimple_seq
*seq
, location_t loc
,
6986 enum tree_code code
, tree type
, tree op0
)
6988 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6991 res
= create_tmp_reg_or_ssa_name (type
);
6993 if (code
== REALPART_EXPR
6994 || code
== IMAGPART_EXPR
6995 || code
== VIEW_CONVERT_EXPR
)
6996 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6998 stmt
= gimple_build_assign (res
, code
, op0
);
6999 gimple_set_location (stmt
, loc
);
7000 gimple_seq_add_stmt_without_update (seq
, stmt
);
7005 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7006 simplifying it first if possible. Returns the built
7007 expression value and appends statements possibly defining it
7011 gimple_build (gimple_seq
*seq
, location_t loc
,
7012 enum tree_code code
, tree type
, tree op0
, tree op1
)
7014 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
7017 res
= create_tmp_reg_or_ssa_name (type
);
7018 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
7019 gimple_set_location (stmt
, loc
);
7020 gimple_seq_add_stmt_without_update (seq
, stmt
);
7025 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7026 simplifying it first if possible. Returns the built
7027 expression value and appends statements possibly defining it
7031 gimple_build (gimple_seq
*seq
, location_t loc
,
7032 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
7034 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
7035 seq
, gimple_build_valueize
);
7038 res
= create_tmp_reg_or_ssa_name (type
);
7040 if (code
== BIT_FIELD_REF
)
7041 stmt
= gimple_build_assign (res
, code
,
7042 build3 (code
, type
, op0
, op1
, op2
));
7044 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
7045 gimple_set_location (stmt
, loc
);
7046 gimple_seq_add_stmt_without_update (seq
, stmt
);
7051 /* Build the call FN (ARG0) with a result of type TYPE
7052 (or no result if TYPE is void) with location LOC,
7053 simplifying it first if possible. Returns the built
7054 expression value (or NULL_TREE if TYPE is void) and appends
7055 statements possibly defining it to SEQ. */
7058 gimple_build (gimple_seq
*seq
, location_t loc
,
7059 enum built_in_function fn
, tree type
, tree arg0
)
7061 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
7064 tree decl
= builtin_decl_implicit (fn
);
7065 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
7066 if (!VOID_TYPE_P (type
))
7068 res
= create_tmp_reg_or_ssa_name (type
);
7069 gimple_call_set_lhs (stmt
, res
);
7071 gimple_set_location (stmt
, loc
);
7072 gimple_seq_add_stmt_without_update (seq
, stmt
);
7077 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7078 (or no result if TYPE is void) with location LOC,
7079 simplifying it first if possible. Returns the built
7080 expression value (or NULL_TREE if TYPE is void) and appends
7081 statements possibly defining it to SEQ. */
7084 gimple_build (gimple_seq
*seq
, location_t loc
,
7085 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
7087 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
7090 tree decl
= builtin_decl_implicit (fn
);
7091 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
7092 if (!VOID_TYPE_P (type
))
7094 res
= create_tmp_reg_or_ssa_name (type
);
7095 gimple_call_set_lhs (stmt
, res
);
7097 gimple_set_location (stmt
, loc
);
7098 gimple_seq_add_stmt_without_update (seq
, stmt
);
7103 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7104 (or no result if TYPE is void) with location LOC,
7105 simplifying it first if possible. Returns the built
7106 expression value (or NULL_TREE if TYPE is void) and appends
7107 statements possibly defining it to SEQ. */
7110 gimple_build (gimple_seq
*seq
, location_t loc
,
7111 enum built_in_function fn
, tree type
,
7112 tree arg0
, tree arg1
, tree arg2
)
7114 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
7115 seq
, gimple_build_valueize
);
7118 tree decl
= builtin_decl_implicit (fn
);
7119 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
7120 if (!VOID_TYPE_P (type
))
7122 res
= create_tmp_reg_or_ssa_name (type
);
7123 gimple_call_set_lhs (stmt
, res
);
7125 gimple_set_location (stmt
, loc
);
7126 gimple_seq_add_stmt_without_update (seq
, stmt
);
7131 /* Build the conversion (TYPE) OP with a result of type TYPE
7132 with location LOC if such conversion is neccesary in GIMPLE,
7133 simplifying it first.
7134 Returns the built expression value and appends
7135 statements possibly defining it to SEQ. */
7138 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
7140 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
7142 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
7145 /* Build the conversion (ptrofftype) OP with a result of a type
7146 compatible with ptrofftype with location LOC if such conversion
7147 is neccesary in GIMPLE, simplifying it first.
7148 Returns the built expression value and appends
7149 statements possibly defining it to SEQ. */
7152 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
7154 if (ptrofftype_p (TREE_TYPE (op
)))
7156 return gimple_convert (seq
, loc
, sizetype
, op
);
7159 /* Build a vector of type TYPE in which each element has the value OP.
7160 Return a gimple value for the result, appending any new statements
7164 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
7167 tree res
, vec
= build_vector_from_val (type
, op
);
7168 if (is_gimple_val (vec
))
7170 if (gimple_in_ssa_p (cfun
))
7171 res
= make_ssa_name (type
);
7173 res
= create_tmp_reg (type
);
7174 gimple
*stmt
= gimple_build_assign (res
, vec
);
7175 gimple_set_location (stmt
, loc
);
7176 gimple_seq_add_stmt_without_update (seq
, stmt
);
7180 /* Build a vector of type TYPE in which the elements have the values
7181 given by ELTS. Return a gimple value for the result, appending any
7182 new instructions to SEQ. */
7185 gimple_build_vector (gimple_seq
*seq
, location_t loc
, tree type
,
7188 unsigned int nelts
= elts
.length ();
7189 gcc_assert (nelts
== TYPE_VECTOR_SUBPARTS (type
));
7190 for (unsigned int i
= 0; i
< nelts
; ++i
)
7191 if (!TREE_CONSTANT (elts
[i
]))
7193 vec
<constructor_elt
, va_gc
> *v
;
7194 vec_alloc (v
, nelts
);
7195 for (i
= 0; i
< nelts
; ++i
)
7196 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, elts
[i
]);
7199 if (gimple_in_ssa_p (cfun
))
7200 res
= make_ssa_name (type
);
7202 res
= create_tmp_reg (type
);
7203 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
7204 gimple_set_location (stmt
, loc
);
7205 gimple_seq_add_stmt_without_update (seq
, stmt
);
7208 return build_vector (type
, elts
);
7211 /* Return true if the result of assignment STMT is known to be non-negative.
7212 If the return value is based on the assumption that signed overflow is
7213 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7214 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7217 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7220 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7221 switch (get_gimple_rhs_class (code
))
7223 case GIMPLE_UNARY_RHS
:
7224 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7225 gimple_expr_type (stmt
),
7226 gimple_assign_rhs1 (stmt
),
7227 strict_overflow_p
, depth
);
7228 case GIMPLE_BINARY_RHS
:
7229 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7230 gimple_expr_type (stmt
),
7231 gimple_assign_rhs1 (stmt
),
7232 gimple_assign_rhs2 (stmt
),
7233 strict_overflow_p
, depth
);
7234 case GIMPLE_TERNARY_RHS
:
7236 case GIMPLE_SINGLE_RHS
:
7237 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
7238 strict_overflow_p
, depth
);
7239 case GIMPLE_INVALID_RHS
:
7245 /* Return true if return value of call STMT is known to be non-negative.
7246 If the return value is based on the assumption that signed overflow is
7247 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7248 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7251 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7254 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
7255 gimple_call_arg (stmt
, 0) : NULL_TREE
;
7256 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
7257 gimple_call_arg (stmt
, 1) : NULL_TREE
;
7259 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
7260 gimple_call_combined_fn (stmt
),
7263 strict_overflow_p
, depth
);
7266 /* Return true if return value of call STMT is known to be non-negative.
7267 If the return value is based on the assumption that signed overflow is
7268 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7269 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7272 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7275 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7277 tree arg
= gimple_phi_arg_def (stmt
, i
);
7278 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
7284 /* Return true if STMT is known to compute a non-negative value.
7285 If the return value is based on the assumption that signed overflow is
7286 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7287 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7290 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7293 switch (gimple_code (stmt
))
7296 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7299 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7302 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7309 /* Return true if the floating-point value computed by assignment STMT
7310 is known to have an integer value. We also allow +Inf, -Inf and NaN
7311 to be considered integer values. Return false for signaling NaN.
7313 DEPTH is the current nesting depth of the query. */
7316 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
7318 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7319 switch (get_gimple_rhs_class (code
))
7321 case GIMPLE_UNARY_RHS
:
7322 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
7323 gimple_assign_rhs1 (stmt
), depth
);
7324 case GIMPLE_BINARY_RHS
:
7325 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
7326 gimple_assign_rhs1 (stmt
),
7327 gimple_assign_rhs2 (stmt
), depth
);
7328 case GIMPLE_TERNARY_RHS
:
7330 case GIMPLE_SINGLE_RHS
:
7331 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
7332 case GIMPLE_INVALID_RHS
:
7338 /* Return true if the floating-point value computed by call STMT is known
7339 to have an integer value. We also allow +Inf, -Inf and NaN to be
7340 considered integer values. Return false for signaling NaN.
7342 DEPTH is the current nesting depth of the query. */
7345 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
7347 tree arg0
= (gimple_call_num_args (stmt
) > 0
7348 ? gimple_call_arg (stmt
, 0)
7350 tree arg1
= (gimple_call_num_args (stmt
) > 1
7351 ? gimple_call_arg (stmt
, 1)
7353 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
7357 /* Return true if the floating-point result of phi STMT is known to have
7358 an integer value. We also allow +Inf, -Inf and NaN to be considered
7359 integer values. Return false for signaling NaN.
7361 DEPTH is the current nesting depth of the query. */
7364 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
7366 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7368 tree arg
= gimple_phi_arg_def (stmt
, i
);
7369 if (!integer_valued_real_single_p (arg
, depth
+ 1))
7375 /* Return true if the floating-point value computed by STMT is known
7376 to have an integer value. We also allow +Inf, -Inf and NaN to be
7377 considered integer values. Return false for signaling NaN.
7379 DEPTH is the current nesting depth of the query. */
7382 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
7384 switch (gimple_code (stmt
))
7387 return gimple_assign_integer_valued_real_p (stmt
, depth
);
7389 return gimple_call_integer_valued_real_p (stmt
, depth
);
7391 return gimple_phi_integer_valued_real_p (stmt
, depth
);