1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2018 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "gimple-fold.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
44 #include "tree-object-size.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
64 #include "diagnostic-core.h"
67 #include "tree-vector-builder.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
92 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
95 struct cgraph_node
*node
;
98 if (DECL_ABSTRACT_P (decl
))
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
103 || !VAR_OR_FUNCTION_DECL_P (decl
))
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab
->function_flags_ready
)
113 snode
= symtab_node::get (decl
);
114 if (!snode
|| !snode
->definition
)
116 node
= dyn_cast
<cgraph_node
*> (snode
);
117 return !node
|| !node
->global
.inlined_to
;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
124 || !VAR_P (from_decl
)
125 || (!DECL_EXTERNAL (from_decl
)
126 && (vnode
= varpool_node::get (from_decl
)) != NULL
127 && vnode
->definition
)
129 && (vnode
= varpool_node::get (from_decl
)) != NULL
130 && vnode
->in_other_partition
))
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl
)
136 && DECL_EXTERNAL (decl
)
137 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
138 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
156 if (!symtab
->function_flags_ready
)
159 snode
= symtab_node::get (decl
);
161 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
162 && (!snode
->in_other_partition
163 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
165 node
= dyn_cast
<cgraph_node
*> (snode
);
166 return !node
|| !node
->global
.inlined_to
;
169 /* Create a temporary for TYPE for a statement STMT. If the current function
170 is in SSA form, a SSA name is created. Otherwise a temporary register
174 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
176 if (gimple_in_ssa_p (cfun
))
177 return make_ssa_name (type
, stmt
);
179 return create_tmp_reg (type
);
182 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
183 acceptable form for is_gimple_min_invariant.
184 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
187 canonicalize_constructor_val (tree cval
, tree from_decl
)
189 tree orig_cval
= cval
;
191 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
192 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
194 tree ptr
= TREE_OPERAND (cval
, 0);
195 if (is_gimple_min_invariant (ptr
))
196 cval
= build1_loc (EXPR_LOCATION (cval
),
197 ADDR_EXPR
, TREE_TYPE (ptr
),
198 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
200 fold_convert (ptr_type_node
,
201 TREE_OPERAND (cval
, 1))));
203 if (TREE_CODE (cval
) == ADDR_EXPR
)
205 tree base
= NULL_TREE
;
206 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
208 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
210 TREE_OPERAND (cval
, 0) = base
;
213 base
= get_base_address (TREE_OPERAND (cval
, 0));
217 if (VAR_OR_FUNCTION_DECL_P (base
)
218 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
220 if (TREE_TYPE (base
) == error_mark_node
)
223 TREE_ADDRESSABLE (base
) = 1;
224 else if (TREE_CODE (base
) == FUNCTION_DECL
)
226 /* Make sure we create a cgraph node for functions we'll reference.
227 They can be non-existent if the reference comes from an entry
228 of an external vtable for example. */
229 cgraph_node::get_create (base
);
231 /* Fixup types in global initializers. */
232 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
233 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
235 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
236 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
239 if (TREE_OVERFLOW_P (cval
))
240 return drop_tree_overflow (cval
);
244 /* If SYM is a constant variable with known value, return the value.
245 NULL_TREE is returned otherwise. */
248 get_symbol_constant_value (tree sym
)
250 tree val
= ctor_for_folding (sym
);
251 if (val
!= error_mark_node
)
255 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
256 if (val
&& is_gimple_min_invariant (val
))
261 /* Variables declared 'const' without an initializer
262 have zero as the initializer if they may not be
263 overridden at link or run time. */
265 && is_gimple_reg_type (TREE_TYPE (sym
)))
266 return build_zero_cst (TREE_TYPE (sym
));
274 /* Subroutine of fold_stmt. We perform several simplifications of the
275 memory reference tree EXPR and make sure to re-gimplify them properly
276 after propagation of constant addresses. IS_LHS is true if the
277 reference is supposed to be an lvalue. */
280 maybe_fold_reference (tree expr
, bool is_lhs
)
284 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
285 || TREE_CODE (expr
) == REALPART_EXPR
286 || TREE_CODE (expr
) == IMAGPART_EXPR
)
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
288 return fold_unary_loc (EXPR_LOCATION (expr
),
291 TREE_OPERAND (expr
, 0));
292 else if (TREE_CODE (expr
) == BIT_FIELD_REF
293 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
294 return fold_ternary_loc (EXPR_LOCATION (expr
),
297 TREE_OPERAND (expr
, 0),
298 TREE_OPERAND (expr
, 1),
299 TREE_OPERAND (expr
, 2));
302 && (result
= fold_const_aggregate_ref (expr
))
303 && is_gimple_min_invariant (result
))
310 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
311 replacement rhs for the statement or NULL_TREE if no simplification
312 could be made. It is assumed that the operands have been previously
316 fold_gimple_assign (gimple_stmt_iterator
*si
)
318 gimple
*stmt
= gsi_stmt (*si
);
319 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
320 location_t loc
= gimple_location (stmt
);
322 tree result
= NULL_TREE
;
324 switch (get_gimple_rhs_class (subcode
))
326 case GIMPLE_SINGLE_RHS
:
328 tree rhs
= gimple_assign_rhs1 (stmt
);
330 if (TREE_CLOBBER_P (rhs
))
333 if (REFERENCE_CLASS_P (rhs
))
334 return maybe_fold_reference (rhs
, false);
336 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
338 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
339 if (is_gimple_min_invariant (val
))
341 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
344 vec
<cgraph_node
*>targets
345 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
346 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
348 if (dump_enabled_p ())
350 location_t loc
= gimple_location_safe (stmt
);
351 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
352 "resolving virtual function address "
353 "reference to function %s\n",
354 targets
.length () == 1
355 ? targets
[0]->name ()
358 if (targets
.length () == 1)
360 val
= fold_convert (TREE_TYPE (val
),
361 build_fold_addr_expr_loc
362 (loc
, targets
[0]->decl
));
363 STRIP_USELESS_TYPE_CONVERSION (val
);
366 /* We can not use __builtin_unreachable here because it
367 can not have address taken. */
368 val
= build_int_cst (TREE_TYPE (val
), 0);
374 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
376 tree ref
= TREE_OPERAND (rhs
, 0);
377 tree tem
= maybe_fold_reference (ref
, true);
379 && TREE_CODE (tem
) == MEM_REF
380 && integer_zerop (TREE_OPERAND (tem
, 1)))
381 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
383 result
= fold_convert (TREE_TYPE (rhs
),
384 build_fold_addr_expr_loc (loc
, tem
));
385 else if (TREE_CODE (ref
) == MEM_REF
386 && integer_zerop (TREE_OPERAND (ref
, 1)))
387 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
391 /* Strip away useless type conversions. Both the
392 NON_LVALUE_EXPR that may have been added by fold, and
393 "useless" type conversions that might now be apparent
394 due to propagation. */
395 STRIP_USELESS_TYPE_CONVERSION (result
);
397 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
402 else if (TREE_CODE (rhs
) == CONSTRUCTOR
403 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
410 if (! CONSTANT_CLASS_P (val
))
413 return build_vector_from_ctor (TREE_TYPE (rhs
),
414 CONSTRUCTOR_ELTS (rhs
));
417 else if (DECL_P (rhs
))
418 return get_symbol_constant_value (rhs
);
422 case GIMPLE_UNARY_RHS
:
425 case GIMPLE_BINARY_RHS
:
428 case GIMPLE_TERNARY_RHS
:
429 result
= fold_ternary_loc (loc
, subcode
,
430 TREE_TYPE (gimple_assign_lhs (stmt
)),
431 gimple_assign_rhs1 (stmt
),
432 gimple_assign_rhs2 (stmt
),
433 gimple_assign_rhs3 (stmt
));
437 STRIP_USELESS_TYPE_CONVERSION (result
);
438 if (valid_gimple_rhs_p (result
))
443 case GIMPLE_INVALID_RHS
:
451 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
452 adjusting the replacement stmts location and virtual operands.
453 If the statement has a lhs the last stmt in the sequence is expected
454 to assign to that lhs. */
457 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
459 gimple
*stmt
= gsi_stmt (*si_p
);
461 if (gimple_has_location (stmt
))
462 annotate_all_with_location (stmts
, gimple_location (stmt
));
464 /* First iterate over the replacement statements backward, assigning
465 virtual operands to their defining statements. */
466 gimple
*laststore
= NULL
;
467 for (gimple_stmt_iterator i
= gsi_last (stmts
);
468 !gsi_end_p (i
); gsi_prev (&i
))
470 gimple
*new_stmt
= gsi_stmt (i
);
471 if ((gimple_assign_single_p (new_stmt
)
472 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
473 || (is_gimple_call (new_stmt
)
474 && (gimple_call_flags (new_stmt
)
475 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
479 vdef
= gimple_vdef (stmt
);
481 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
482 gimple_set_vdef (new_stmt
, vdef
);
483 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
484 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
485 laststore
= new_stmt
;
489 /* Second iterate over the statements forward, assigning virtual
490 operands to their uses. */
491 tree reaching_vuse
= gimple_vuse (stmt
);
492 for (gimple_stmt_iterator i
= gsi_start (stmts
);
493 !gsi_end_p (i
); gsi_next (&i
))
495 gimple
*new_stmt
= gsi_stmt (i
);
496 /* If the new statement possibly has a VUSE, update it with exact SSA
497 name we know will reach this one. */
498 if (gimple_has_mem_ops (new_stmt
))
499 gimple_set_vuse (new_stmt
, reaching_vuse
);
500 gimple_set_modified (new_stmt
, true);
501 if (gimple_vdef (new_stmt
))
502 reaching_vuse
= gimple_vdef (new_stmt
);
505 /* If the new sequence does not do a store release the virtual
506 definition of the original statement. */
508 && reaching_vuse
== gimple_vuse (stmt
))
510 tree vdef
= gimple_vdef (stmt
);
512 && TREE_CODE (vdef
) == SSA_NAME
)
514 unlink_stmt_vdef (stmt
);
515 release_ssa_name (vdef
);
519 /* Finally replace the original statement with the sequence. */
520 gsi_replace_with_seq (si_p
, stmts
, false);
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
537 gimple
*stmt
, *new_stmt
;
538 gimple_stmt_iterator i
;
539 gimple_seq stmts
= NULL
;
541 stmt
= gsi_stmt (*si_p
);
543 gcc_assert (is_gimple_call (stmt
));
545 push_gimplify_context (gimple_in_ssa_p (cfun
));
547 lhs
= gimple_call_lhs (stmt
);
548 if (lhs
== NULL_TREE
)
550 gimplify_and_add (expr
, &stmts
);
551 /* We can end up with folding a memcpy of an empty class assignment
552 which gets optimized away by C++ gimplification. */
553 if (gimple_seq_empty_p (stmts
))
555 pop_gimplify_context (NULL
);
556 if (gimple_in_ssa_p (cfun
))
558 unlink_stmt_vdef (stmt
);
561 gsi_replace (si_p
, gimple_build_nop (), false);
567 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
568 new_stmt
= gimple_build_assign (lhs
, tmp
);
569 i
= gsi_last (stmts
);
570 gsi_insert_after_without_update (&i
, new_stmt
,
571 GSI_CONTINUE_LINKING
);
574 pop_gimplify_context (NULL
);
576 gsi_replace_with_seq_vops (si_p
, stmts
);
580 /* Replace the call at *GSI with the gimple value VAL. */
583 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
585 gimple
*stmt
= gsi_stmt (*gsi
);
586 tree lhs
= gimple_call_lhs (stmt
);
590 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
591 val
= fold_convert (TREE_TYPE (lhs
), val
);
592 repl
= gimple_build_assign (lhs
, val
);
595 repl
= gimple_build_nop ();
596 tree vdef
= gimple_vdef (stmt
);
597 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
599 unlink_stmt_vdef (stmt
);
600 release_ssa_name (vdef
);
602 gsi_replace (gsi
, repl
, false);
605 /* Replace the call at *GSI with the new call REPL and fold that
609 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
611 gimple
*stmt
= gsi_stmt (*gsi
);
612 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
613 gimple_set_location (repl
, gimple_location (stmt
));
614 if (gimple_vdef (stmt
)
615 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
617 gimple_set_vdef (repl
, gimple_vdef (stmt
));
618 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
620 if (gimple_vuse (stmt
))
621 gimple_set_vuse (repl
, gimple_vuse (stmt
));
622 gsi_replace (gsi
, repl
, false);
626 /* Return true if VAR is a VAR_DECL or a component thereof. */
629 var_decl_component_p (tree var
)
632 while (handled_component_p (inner
))
633 inner
= TREE_OPERAND (inner
, 0);
634 return SSA_VAR_P (inner
);
637 /* If the SIZE argument representing the size of an object is in a range
638 of values of which exactly one is valid (and that is zero), return
639 true, otherwise false. */
642 size_must_be_zero_p (tree size
)
644 if (integer_zerop (size
))
647 if (TREE_CODE (size
) != SSA_NAME
)
651 enum value_range_type rtype
= get_range_info (size
, &min
, &max
);
652 if (rtype
!= VR_ANTI_RANGE
)
655 tree type
= TREE_TYPE (size
);
656 int prec
= TYPE_PRECISION (type
);
658 wide_int wone
= wi::one (prec
);
660 /* Compute the value of SSIZE_MAX, the largest positive value that
661 can be stored in ssize_t, the signed counterpart of size_t. */
662 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
664 return wi::eq_p (min
, wone
) && wi::geu_p (max
, ssize_max
);
667 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
668 diagnose (otherwise undefined) overlapping copies without preventing
669 folding. When folded, GCC guarantees that overlapping memcpy has
670 the same semantics as memmove. Call to the library memcpy need not
671 provide the same guarantee. Return false if no simplification can
675 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
676 tree dest
, tree src
, int endp
)
678 gimple
*stmt
= gsi_stmt (*gsi
);
679 tree lhs
= gimple_call_lhs (stmt
);
680 tree len
= gimple_call_arg (stmt
, 2);
681 tree destvar
, srcvar
;
682 location_t loc
= gimple_location (stmt
);
684 tree func
= gimple_call_fndecl (stmt
);
685 bool nowarn
= gimple_no_warning_p (stmt
);
686 bool check_overlap
= (DECL_FUNCTION_CODE (func
) != BUILT_IN_MEMMOVE
687 && DECL_FUNCTION_CODE (func
) != BUILT_IN_MEMMOVE_CHK
690 /* If the LEN parameter is a constant zero or in range where
691 the only valid value is zero, return DEST. */
692 if (size_must_be_zero_p (len
))
695 if (gimple_call_lhs (stmt
))
696 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
698 repl
= gimple_build_nop ();
699 tree vdef
= gimple_vdef (stmt
);
700 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
702 unlink_stmt_vdef (stmt
);
703 release_ssa_name (vdef
);
705 gsi_replace (gsi
, repl
, false);
709 /* If SRC and DEST are the same (and not volatile), return
710 DEST{,+LEN,+LEN-1}. */
711 if (operand_equal_p (src
, dest
, 0))
713 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
714 It's safe and may even be emitted by GCC itself (see bug
715 32667). However, diagnose it in explicit calls to the memcpy
717 if (check_overlap
&& *IDENTIFIER_POINTER (DECL_NAME (func
)) != '_')
718 warning_at (loc
, OPT_Wrestrict
,
719 "%qD source argument is the same as destination",
722 unlink_stmt_vdef (stmt
);
723 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
724 release_ssa_name (gimple_vdef (stmt
));
727 gsi_replace (gsi
, gimple_build_nop (), false);
734 tree srctype
, desttype
;
735 unsigned int src_align
, dest_align
;
738 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
739 pointers as wide integer) and also may result in huge function
740 size because of inlined bounds copy. Thus don't inline for
741 functions we want to instrument. */
742 if (flag_check_pointer_bounds
743 && chkp_instrumentable_p (cfun
->decl
)
744 /* Even if data may contain pointers we can inline if copy
745 less than a pointer size. */
746 && (!tree_fits_uhwi_p (len
)
747 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align
= get_pointer_alignment (src
);
759 dest_align
= get_pointer_alignment (dest
);
760 if (tree_fits_uhwi_p (len
)
761 && compare_tree_int (len
, MOVE_MAX
) <= 0
762 /* ??? Don't transform copies from strings with known length this
763 confuses the tree-ssa-strlen.c. This doesn't handle
764 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
766 && !c_strlen (src
, 2))
768 unsigned ilen
= tree_to_uhwi (len
);
769 if (pow2p_hwi (ilen
))
771 /* Detect invalid bounds and overlapping copies and issue
772 either -Warray-bounds or -Wrestrict. */
774 && check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
775 dest
, src
, len
, len
))
776 gimple_set_no_warning (stmt
, true);
778 scalar_int_mode mode
;
779 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
781 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
782 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
783 /* If the destination pointer is not aligned we must be able
784 to emit an unaligned store. */
785 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
786 || !targetm
.slow_unaligned_access (mode
, dest_align
)
787 || (optab_handler (movmisalign_optab
, mode
)
788 != CODE_FOR_nothing
)))
791 tree desttype
= type
;
792 if (src_align
< GET_MODE_ALIGNMENT (mode
))
793 srctype
= build_aligned_type (type
, src_align
);
794 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
795 tree tem
= fold_const_aggregate_ref (srcmem
);
798 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
799 && targetm
.slow_unaligned_access (mode
, src_align
)
800 && (optab_handler (movmisalign_optab
, mode
)
801 == CODE_FOR_nothing
))
806 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
808 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
810 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
812 gimple_assign_set_lhs (new_stmt
, srcmem
);
813 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
814 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
816 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
817 desttype
= build_aligned_type (type
, dest_align
);
819 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
822 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
823 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
824 if (gimple_vdef (new_stmt
)
825 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
826 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
829 gsi_replace (gsi
, new_stmt
, false);
832 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
841 /* Both DEST and SRC must be pointer types.
842 ??? This is what old code did. Is the testing for pointer types
845 If either SRC is readonly or length is 1, we can use memcpy. */
846 if (!dest_align
|| !src_align
)
848 if (readonly_data_expr (src
)
849 || (tree_fits_uhwi_p (len
)
850 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
851 >= tree_to_uhwi (len
))))
853 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
856 gimple_call_set_fndecl (stmt
, fn
);
857 gimple_call_set_arg (stmt
, 0, dest
);
858 gimple_call_set_arg (stmt
, 1, src
);
863 /* If *src and *dest can't overlap, optimize into memcpy as well. */
864 if (TREE_CODE (src
) == ADDR_EXPR
865 && TREE_CODE (dest
) == ADDR_EXPR
)
867 tree src_base
, dest_base
, fn
;
868 poly_int64 src_offset
= 0, dest_offset
= 0;
871 srcvar
= TREE_OPERAND (src
, 0);
872 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
873 if (src_base
== NULL
)
875 destvar
= TREE_OPERAND (dest
, 0);
876 dest_base
= get_addr_base_and_unit_offset (destvar
,
878 if (dest_base
== NULL
)
880 if (!poly_int_tree_p (len
, &maxsize
))
882 if (SSA_VAR_P (src_base
)
883 && SSA_VAR_P (dest_base
))
885 if (operand_equal_p (src_base
, dest_base
, 0)
886 && ranges_maybe_overlap_p (src_offset
, maxsize
,
887 dest_offset
, maxsize
))
890 else if (TREE_CODE (src_base
) == MEM_REF
891 && TREE_CODE (dest_base
) == MEM_REF
)
893 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
894 TREE_OPERAND (dest_base
, 0), 0))
896 poly_offset_int full_src_offset
897 = mem_ref_offset (src_base
) + src_offset
;
898 poly_offset_int full_dest_offset
899 = mem_ref_offset (dest_base
) + dest_offset
;
900 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
901 full_dest_offset
, maxsize
))
907 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
910 gimple_call_set_fndecl (stmt
, fn
);
911 gimple_call_set_arg (stmt
, 0, dest
);
912 gimple_call_set_arg (stmt
, 1, src
);
917 /* If the destination and source do not alias optimize into
919 if ((is_gimple_min_invariant (dest
)
920 || TREE_CODE (dest
) == SSA_NAME
)
921 && (is_gimple_min_invariant (src
)
922 || TREE_CODE (src
) == SSA_NAME
))
925 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
926 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
927 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
930 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
933 gimple_call_set_fndecl (stmt
, fn
);
934 gimple_call_set_arg (stmt
, 0, dest
);
935 gimple_call_set_arg (stmt
, 1, src
);
944 if (!tree_fits_shwi_p (len
))
946 if (!POINTER_TYPE_P (TREE_TYPE (src
))
947 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
949 /* In the following try to find a type that is most natural to be
950 used for the memcpy source and destination and that allows
951 the most optimization when memcpy is turned into a plain assignment
952 using that type. In theory we could always use a char[len] type
953 but that only gains us that the destination and source possibly
954 no longer will have their address taken. */
955 srctype
= TREE_TYPE (TREE_TYPE (src
));
956 if (TREE_CODE (srctype
) == ARRAY_TYPE
957 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
958 srctype
= TREE_TYPE (srctype
);
959 desttype
= TREE_TYPE (TREE_TYPE (dest
));
960 if (TREE_CODE (desttype
) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
962 desttype
= TREE_TYPE (desttype
);
963 if (TREE_ADDRESSABLE (srctype
)
964 || TREE_ADDRESSABLE (desttype
))
967 /* Make sure we are not copying using a floating-point mode or
968 a type whose size possibly does not match its precision. */
969 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
970 || TREE_CODE (desttype
) == BOOLEAN_TYPE
971 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
972 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
973 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
974 || TREE_CODE (srctype
) == BOOLEAN_TYPE
975 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
976 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
984 src_align
= get_pointer_alignment (src
);
985 dest_align
= get_pointer_alignment (dest
);
986 if (dest_align
< TYPE_ALIGN (desttype
)
987 || src_align
< TYPE_ALIGN (srctype
))
991 if (TREE_CODE (dest
) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (dest
, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
994 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
997 if (TREE_CODE (src
) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (src
, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1002 || src_align
>= TYPE_ALIGN (desttype
))
1003 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1005 else if (!STRICT_ALIGNMENT
)
1007 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1009 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1013 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1016 if (srcvar
== NULL_TREE
)
1018 if (src_align
>= TYPE_ALIGN (desttype
))
1019 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1022 if (STRICT_ALIGNMENT
)
1024 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1026 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1029 else if (destvar
== NULL_TREE
)
1031 if (dest_align
>= TYPE_ALIGN (srctype
))
1032 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1035 if (STRICT_ALIGNMENT
)
1037 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1039 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1043 /* Detect invalid bounds and overlapping copies and issue either
1044 -Warray-bounds or -Wrestrict. */
1046 check_bounds_or_overlap (as_a
<gcall
*>(stmt
), dest
, src
, len
, len
);
1049 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1051 tree tem
= fold_const_aggregate_ref (srcvar
);
1054 if (! is_gimple_min_invariant (srcvar
))
1056 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1057 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1059 gimple_assign_set_lhs (new_stmt
, srcvar
);
1060 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1061 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1063 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1064 goto set_vop_and_replace
;
1067 /* We get an aggregate copy. Use an unsigned char[] type to
1068 perform the copying to preserve padding and to avoid any issues
1069 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1070 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1071 tree_to_uhwi (len
));
1073 if (src_align
> TYPE_ALIGN (srctype
))
1074 srctype
= build_aligned_type (srctype
, src_align
);
1075 if (dest_align
> TYPE_ALIGN (desttype
))
1076 desttype
= build_aligned_type (desttype
, dest_align
);
1078 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
, dest
, off0
),
1079 fold_build2 (MEM_REF
, srctype
, src
, off0
));
1080 set_vop_and_replace
:
1081 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1082 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1083 if (gimple_vdef (new_stmt
)
1084 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1085 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1088 gsi_replace (gsi
, new_stmt
, false);
1091 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1095 gimple_seq stmts
= NULL
;
1096 if (endp
== 0 || endp
== 3)
1099 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1101 if (endp
== 2 || endp
== 1)
1103 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1104 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1105 TREE_TYPE (dest
), dest
, len
);
1108 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1109 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1110 gsi_replace (gsi
, repl
, false);
1114 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1115 to built-in memcmp (a, b, len). */
1118 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1120 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1125 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1127 gimple
*stmt
= gsi_stmt (*gsi
);
1128 tree a
= gimple_call_arg (stmt
, 0);
1129 tree b
= gimple_call_arg (stmt
, 1);
1130 tree len
= gimple_call_arg (stmt
, 2);
1132 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1133 replace_call_with_call_and_fold (gsi
, repl
);
1138 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1139 to built-in memmove (dest, src, len). */
1142 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1144 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1149 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1150 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1151 len) into memmove (dest, src, len). */
1153 gimple
*stmt
= gsi_stmt (*gsi
);
1154 tree src
= gimple_call_arg (stmt
, 0);
1155 tree dest
= gimple_call_arg (stmt
, 1);
1156 tree len
= gimple_call_arg (stmt
, 2);
1158 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1159 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1160 replace_call_with_call_and_fold (gsi
, repl
);
1165 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1166 to built-in memset (dest, 0, len). */
1169 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1171 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1176 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1178 gimple
*stmt
= gsi_stmt (*gsi
);
1179 tree dest
= gimple_call_arg (stmt
, 0);
1180 tree len
= gimple_call_arg (stmt
, 1);
1182 gimple_seq seq
= NULL
;
1183 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1184 gimple_seq_add_stmt_without_update (&seq
, repl
);
1185 gsi_replace_with_seq_vops (gsi
, seq
);
1191 /* Fold function call to builtin memset or bzero at *GSI setting the
1192 memory of size LEN to VAL. Return whether a simplification was made. */
1195 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1197 gimple
*stmt
= gsi_stmt (*gsi
);
1199 unsigned HOST_WIDE_INT length
, cval
;
1201 /* If the LEN parameter is zero, return DEST. */
1202 if (integer_zerop (len
))
1204 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1208 if (! tree_fits_uhwi_p (len
))
1211 if (TREE_CODE (c
) != INTEGER_CST
)
1214 tree dest
= gimple_call_arg (stmt
, 0);
1216 if (TREE_CODE (var
) != ADDR_EXPR
)
1219 var
= TREE_OPERAND (var
, 0);
1220 if (TREE_THIS_VOLATILE (var
))
1223 etype
= TREE_TYPE (var
);
1224 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1225 etype
= TREE_TYPE (etype
);
1227 if (!INTEGRAL_TYPE_P (etype
)
1228 && !POINTER_TYPE_P (etype
))
1231 if (! var_decl_component_p (var
))
1234 length
= tree_to_uhwi (len
);
1235 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1236 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1239 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1242 if (integer_zerop (c
))
1246 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1249 cval
= TREE_INT_CST_LOW (c
);
1253 cval
|= (cval
<< 31) << 1;
1256 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1257 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1258 gimple_set_vuse (store
, gimple_vuse (stmt
));
1259 tree vdef
= gimple_vdef (stmt
);
1260 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1262 gimple_set_vdef (store
, gimple_vdef (stmt
));
1263 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1265 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1266 if (gimple_call_lhs (stmt
))
1268 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1269 gsi_replace (gsi
, asgn
, false);
1273 gimple_stmt_iterator gsi2
= *gsi
;
1275 gsi_remove (&gsi2
, true);
1282 /* Obtain the minimum and maximum string length or minimum and maximum
1283 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1284 If ARG is an SSA name variable, follow its use-def chains. When
1285 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1286 if we are unable to determine the length or value, return False.
1287 VISITED is a bitmap of visited variables.
1288 TYPE is 0 if string length should be obtained, 1 for maximum string
1289 length and 2 for maximum value ARG can have.
1290 When FUZZY is set and the length of a string cannot be determined,
1291 the function instead considers as the maximum possible length the
1292 size of a character array it may refer to.
1293 Set *FLEXP to true if the range of the string lengths has been
1294 obtained from the upper bound of an array at the end of a struct.
1295 Such an array may hold a string that's longer than its upper bound
1296 due to it being used as a poor-man's flexible array member. */
1299 get_range_strlen (tree arg
, tree length
[2], bitmap
*visited
, int type
,
1300 bool fuzzy
, bool *flexp
)
1305 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1306 but MINLEN may be cleared during the execution of the function. */
1307 tree
*minlen
= length
;
1308 tree
*const maxlen
= length
+ 1;
1310 if (TREE_CODE (arg
) != SSA_NAME
)
1312 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1313 if (TREE_CODE (arg
) == ADDR_EXPR
1314 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1315 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1317 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1318 if (TREE_CODE (aop0
) == INDIRECT_REF
1319 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1320 return get_range_strlen (TREE_OPERAND (aop0
, 0),
1321 length
, visited
, type
, fuzzy
, flexp
);
1327 if (TREE_CODE (val
) != INTEGER_CST
1328 || tree_int_cst_sgn (val
) < 0)
1332 val
= c_strlen (arg
, 1);
1336 if (TREE_CODE (arg
) == ADDR_EXPR
)
1337 return get_range_strlen (TREE_OPERAND (arg
, 0), length
,
1338 visited
, type
, fuzzy
, flexp
);
1340 if (TREE_CODE (arg
) == COMPONENT_REF
1341 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1))) == ARRAY_TYPE
)
1343 /* Use the type of the member array to determine the upper
1344 bound on the length of the array. This may be overly
1345 optimistic if the array itself isn't NUL-terminated and
1346 the caller relies on the subsequent member to contain
1348 Set *FLEXP to true if the array whose bound is being
1349 used is at the end of a struct. */
1350 if (array_at_struct_end_p (arg
))
1353 arg
= TREE_OPERAND (arg
, 1);
1354 val
= TYPE_SIZE_UNIT (TREE_TYPE (arg
));
1355 if (!val
|| integer_zerop (val
))
1357 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1359 /* Set the minimum size to zero since the string in
1360 the array could have zero length. */
1361 *minlen
= ssize_int (0);
1365 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
)
1367 val
= TYPE_SIZE_UNIT (TREE_TYPE (arg
));
1368 if (!val
|| TREE_CODE (val
) != INTEGER_CST
|| integer_zerop (val
))
1370 val
= wide_int_to_tree (TREE_TYPE (val
),
1371 wi::sub(wi::to_wide (val
), 1));
1372 /* Set the minimum size to zero since the string in
1373 the array could have zero length. */
1374 *minlen
= ssize_int (0);
1384 && TREE_CODE (*minlen
) == INTEGER_CST
1385 && TREE_CODE (val
) == INTEGER_CST
1386 && tree_int_cst_lt (val
, *minlen
))))
1393 if (TREE_CODE (*maxlen
) != INTEGER_CST
1394 || TREE_CODE (val
) != INTEGER_CST
)
1397 if (tree_int_cst_lt (*maxlen
, val
))
1401 else if (simple_cst_equal (val
, *maxlen
) != 1)
1409 /* If ARG is registered for SSA update we cannot look at its defining
1411 if (name_registered_for_update_p (arg
))
1414 /* If we were already here, break the infinite cycle. */
1416 *visited
= BITMAP_ALLOC (NULL
);
1417 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1421 def_stmt
= SSA_NAME_DEF_STMT (var
);
1423 switch (gimple_code (def_stmt
))
1426 /* The RHS of the statement defining VAR must either have a
1427 constant length or come from another SSA_NAME with a constant
1429 if (gimple_assign_single_p (def_stmt
)
1430 || gimple_assign_unary_nop_p (def_stmt
))
1432 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1433 return get_range_strlen (rhs
, length
, visited
, type
, fuzzy
, flexp
);
1435 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1437 tree op2
= gimple_assign_rhs2 (def_stmt
);
1438 tree op3
= gimple_assign_rhs3 (def_stmt
);
1439 return get_range_strlen (op2
, length
, visited
, type
, fuzzy
, flexp
)
1440 && get_range_strlen (op3
, length
, visited
, type
, fuzzy
, flexp
);
1446 /* All the arguments of the PHI node must have the same constant
1450 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1452 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1454 /* If this PHI has itself as an argument, we cannot
1455 determine the string length of this argument. However,
1456 if we can find a constant string length for the other
1457 PHI args then we can still be sure that this is a
1458 constant string length. So be optimistic and just
1459 continue with the next argument. */
1460 if (arg
== gimple_phi_result (def_stmt
))
1463 if (!get_range_strlen (arg
, length
, visited
, type
, fuzzy
, flexp
))
1466 *maxlen
= build_all_ones_cst (size_type_node
);
1479 /* Determine the minimum and maximum value or string length that ARG
1480 refers to and store each in the first two elements of MINMAXLEN.
1481 For expressions that point to strings of unknown lengths that are
1482 character arrays, use the upper bound of the array as the maximum
1483 length. For example, given an expression like 'x ? array : "xyz"'
1484 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1485 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1487 Return true if the range of the string lengths has been obtained
1488 from the upper bound of an array at the end of a struct. Such
1489 an array may hold a string that's longer than its upper bound
1490 due to it being used as a poor-man's flexible array member. */
1493 get_range_strlen (tree arg
, tree minmaxlen
[2])
1495 bitmap visited
= NULL
;
1497 minmaxlen
[0] = NULL_TREE
;
1498 minmaxlen
[1] = NULL_TREE
;
1500 bool flexarray
= false;
1501 get_range_strlen (arg
, minmaxlen
, &visited
, 1, true, &flexarray
);
1504 BITMAP_FREE (visited
);
1510 get_maxval_strlen (tree arg
, int type
)
1512 bitmap visited
= NULL
;
1513 tree len
[2] = { NULL_TREE
, NULL_TREE
};
1516 if (!get_range_strlen (arg
, len
, &visited
, type
, false, &dummy
))
1519 BITMAP_FREE (visited
);
1525 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1526 If LEN is not NULL, it represents the length of the string to be
1527 copied. Return NULL_TREE if no simplification can be made. */
1530 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1531 tree dest
, tree src
)
1533 gimple
*stmt
= gsi_stmt (*gsi
);
1534 location_t loc
= gimple_location (stmt
);
1537 /* If SRC and DEST are the same (and not volatile), return DEST. */
1538 if (operand_equal_p (src
, dest
, 0))
1540 tree func
= gimple_call_fndecl (stmt
);
1542 warning_at (loc
, OPT_Wrestrict
,
1543 "%qD source argument is the same as destination",
1546 replace_call_with_value (gsi
, dest
);
1550 if (optimize_function_for_size_p (cfun
))
1553 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1557 tree len
= get_maxval_strlen (src
, 0);
1561 len
= fold_convert_loc (loc
, size_type_node
, len
);
1562 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1563 len
= force_gimple_operand_gsi (gsi
, len
, true,
1564 NULL_TREE
, true, GSI_SAME_STMT
);
1565 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1566 replace_call_with_call_and_fold (gsi
, repl
);
1570 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1571 If SLEN is not NULL, it represents the length of the source string.
1572 Return NULL_TREE if no simplification can be made. */
1575 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1576 tree dest
, tree src
, tree len
)
1578 gimple
*stmt
= gsi_stmt (*gsi
);
1579 location_t loc
= gimple_location (stmt
);
1580 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
1582 /* If the LEN parameter is zero, return DEST. */
1583 if (integer_zerop (len
))
1585 /* Avoid warning if the destination refers to a an array/pointer
1586 decorate with attribute nonstring. */
1589 tree fndecl
= gimple_call_fndecl (stmt
);
1590 gcall
*call
= as_a
<gcall
*> (stmt
);
1592 /* Warn about the lack of nul termination: the result is not
1593 a (nul-terminated) string. */
1594 tree slen
= get_maxval_strlen (src
, 0);
1595 if (slen
&& !integer_zerop (slen
))
1596 warning_at (loc
, OPT_Wstringop_truncation
,
1597 "%G%qD destination unchanged after copying no bytes "
1598 "from a string of length %E",
1599 call
, fndecl
, slen
);
1601 warning_at (loc
, OPT_Wstringop_truncation
,
1602 "%G%qD destination unchanged after copying no bytes",
1606 replace_call_with_value (gsi
, dest
);
1610 /* We can't compare slen with len as constants below if len is not a
1612 if (TREE_CODE (len
) != INTEGER_CST
)
1615 /* Now, we must be passed a constant src ptr parameter. */
1616 tree slen
= get_maxval_strlen (src
, 0);
1617 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1620 /* The size of the source string including the terminating nul. */
1621 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1623 /* We do not support simplification of this case, though we do
1624 support it when expanding trees into RTL. */
1625 /* FIXME: generate a call to __builtin_memset. */
1626 if (tree_int_cst_lt (ssize
, len
))
1631 if (tree_int_cst_lt (len
, slen
))
1633 tree fndecl
= gimple_call_fndecl (stmt
);
1634 gcall
*call
= as_a
<gcall
*> (stmt
);
1636 warning_at (loc
, OPT_Wstringop_truncation
,
1637 (tree_int_cst_equal (size_one_node
, len
)
1638 ? G_("%G%qD output truncated copying %E byte "
1639 "from a string of length %E")
1640 : G_("%G%qD output truncated copying %E bytes "
1641 "from a string of length %E")),
1642 call
, fndecl
, len
, slen
);
1644 else if (tree_int_cst_equal (len
, slen
))
1646 tree fndecl
= gimple_call_fndecl (stmt
);
1647 gcall
*call
= as_a
<gcall
*> (stmt
);
1649 warning_at (loc
, OPT_Wstringop_truncation
,
1650 (tree_int_cst_equal (size_one_node
, len
)
1651 ? G_("%G%qD output truncated before terminating nul "
1652 "copying %E byte from a string of the same "
1654 : G_("%G%qD output truncated before terminating nul "
1655 "copying %E bytes from a string of the same "
1661 /* OK transform into builtin memcpy. */
1662 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1666 len
= fold_convert_loc (loc
, size_type_node
, len
);
1667 len
= force_gimple_operand_gsi (gsi
, len
, true,
1668 NULL_TREE
, true, GSI_SAME_STMT
);
1669 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1670 replace_call_with_call_and_fold (gsi
, repl
);
1675 /* Fold function call to builtin strchr or strrchr.
1676 If both arguments are constant, evaluate and fold the result,
1677 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1678 In general strlen is significantly faster than strchr
1679 due to being a simpler operation. */
1681 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1683 gimple
*stmt
= gsi_stmt (*gsi
);
1684 tree str
= gimple_call_arg (stmt
, 0);
1685 tree c
= gimple_call_arg (stmt
, 1);
1686 location_t loc
= gimple_location (stmt
);
1690 if (!gimple_call_lhs (stmt
))
1693 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1695 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1699 replace_call_with_value (gsi
, integer_zero_node
);
1703 tree len
= build_int_cst (size_type_node
, p1
- p
);
1704 gimple_seq stmts
= NULL
;
1705 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1706 POINTER_PLUS_EXPR
, str
, len
);
1707 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1708 gsi_replace_with_seq_vops (gsi
, stmts
);
1712 if (!integer_zerop (c
))
1715 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1716 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
1718 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1722 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
1723 replace_call_with_call_and_fold (gsi
, repl
);
1731 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1736 /* Create newstr = strlen (str). */
1737 gimple_seq stmts
= NULL
;
1738 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1739 gimple_set_location (new_stmt
, loc
);
1740 len
= create_tmp_reg_or_ssa_name (size_type_node
);
1741 gimple_call_set_lhs (new_stmt
, len
);
1742 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1744 /* Create (str p+ strlen (str)). */
1745 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1746 POINTER_PLUS_EXPR
, str
, len
);
1747 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1748 gsi_replace_with_seq_vops (gsi
, stmts
);
1749 /* gsi now points at the assignment to the lhs, get a
1750 stmt iterator to the strlen.
1751 ??? We can't use gsi_for_stmt as that doesn't work when the
1752 CFG isn't built yet. */
1753 gimple_stmt_iterator gsi2
= *gsi
;
1759 /* Fold function call to builtin strstr.
1760 If both arguments are constant, evaluate and fold the result,
1761 additionally fold strstr (x, "") into x and strstr (x, "c")
1762 into strchr (x, 'c'). */
1764 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
1766 gimple
*stmt
= gsi_stmt (*gsi
);
1767 tree haystack
= gimple_call_arg (stmt
, 0);
1768 tree needle
= gimple_call_arg (stmt
, 1);
1771 if (!gimple_call_lhs (stmt
))
1774 q
= c_getstr (needle
);
1778 if ((p
= c_getstr (haystack
)))
1780 const char *r
= strstr (p
, q
);
1784 replace_call_with_value (gsi
, integer_zero_node
);
1788 tree len
= build_int_cst (size_type_node
, r
- p
);
1789 gimple_seq stmts
= NULL
;
1791 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
1793 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1794 gsi_replace_with_seq_vops (gsi
, stmts
);
1798 /* For strstr (x, "") return x. */
1801 replace_call_with_value (gsi
, haystack
);
1805 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1808 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1811 tree c
= build_int_cst (integer_type_node
, q
[0]);
1812 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
1813 replace_call_with_call_and_fold (gsi
, repl
);
1821 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1824 Return NULL_TREE if no simplification was possible, otherwise return the
1825 simplified form of the call as a tree.
1827 The simplified form may be a constant or other expression which
1828 computes the same value, but in a more efficient manner (including
1829 calls to other builtin functions).
1831 The call may contain arguments which need to be evaluated, but
1832 which are not useful to determine the result of the call. In
1833 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1834 COMPOUND_EXPR will be an argument which must be evaluated.
1835 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1836 COMPOUND_EXPR in the chain will contain the tree for the simplified
1837 form of the builtin function call. */
1840 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1842 gimple
*stmt
= gsi_stmt (*gsi
);
1843 location_t loc
= gimple_location (stmt
);
1845 const char *p
= c_getstr (src
);
1847 /* If the string length is zero, return the dst parameter. */
1848 if (p
&& *p
== '\0')
1850 replace_call_with_value (gsi
, dst
);
1854 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1857 /* See if we can store by pieces into (dst + strlen(dst)). */
1859 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1860 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1862 if (!strlen_fn
|| !memcpy_fn
)
1865 /* If the length of the source string isn't computable don't
1866 split strcat into strlen and memcpy. */
1867 tree len
= get_maxval_strlen (src
, 0);
1871 /* Create strlen (dst). */
1872 gimple_seq stmts
= NULL
, stmts2
;
1873 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1874 gimple_set_location (repl
, loc
);
1875 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
1876 gimple_call_set_lhs (repl
, newdst
);
1877 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1879 /* Create (dst p+ strlen (dst)). */
1880 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1881 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1882 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1884 len
= fold_convert_loc (loc
, size_type_node
, len
);
1885 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1886 build_int_cst (size_type_node
, 1));
1887 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1888 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1890 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1891 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1892 if (gimple_call_lhs (stmt
))
1894 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1895 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1896 gsi_replace_with_seq_vops (gsi
, stmts
);
1897 /* gsi now points at the assignment to the lhs, get a
1898 stmt iterator to the memcpy call.
1899 ??? We can't use gsi_for_stmt as that doesn't work when the
1900 CFG isn't built yet. */
1901 gimple_stmt_iterator gsi2
= *gsi
;
1907 gsi_replace_with_seq_vops (gsi
, stmts
);
1913 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1914 are the arguments to the call. */
1917 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1919 gimple
*stmt
= gsi_stmt (*gsi
);
1920 tree dest
= gimple_call_arg (stmt
, 0);
1921 tree src
= gimple_call_arg (stmt
, 1);
1922 tree size
= gimple_call_arg (stmt
, 2);
1928 /* If the SRC parameter is "", return DEST. */
1929 if (p
&& *p
== '\0')
1931 replace_call_with_value (gsi
, dest
);
1935 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1938 /* If __builtin_strcat_chk is used, assume strcat is available. */
1939 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1943 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1944 replace_call_with_call_and_fold (gsi
, repl
);
1948 /* Simplify a call to the strncat builtin. */
1951 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1953 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1954 tree dst
= gimple_call_arg (stmt
, 0);
1955 tree src
= gimple_call_arg (stmt
, 1);
1956 tree len
= gimple_call_arg (stmt
, 2);
1958 const char *p
= c_getstr (src
);
1960 /* If the requested length is zero, or the src parameter string
1961 length is zero, return the dst parameter. */
1962 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1964 replace_call_with_value (gsi
, dst
);
1968 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
1971 unsigned srclen
= strlen (p
);
1973 int cmpsrc
= compare_tree_int (len
, srclen
);
1975 /* Return early if the requested len is less than the string length.
1976 Warnings will be issued elsewhere later. */
1980 unsigned HOST_WIDE_INT dstsize
;
1982 bool nowarn
= gimple_no_warning_p (stmt
);
1984 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
1986 int cmpdst
= compare_tree_int (len
, dstsize
);
1990 tree fndecl
= gimple_call_fndecl (stmt
);
1992 /* Strncat copies (at most) LEN bytes and always appends
1993 the terminating NUL so the specified bound should never
1994 be equal to (or greater than) the size of the destination.
1995 If it is, the copy could overflow. */
1996 location_t loc
= gimple_location (stmt
);
1997 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
1999 ? G_("%G%qD specified bound %E equals "
2001 : G_("%G%qD specified bound %E exceeds "
2002 "destination size %wu"),
2003 stmt
, fndecl
, len
, dstsize
);
2005 gimple_set_no_warning (stmt
, true);
2009 if (!nowarn
&& cmpsrc
== 0)
2011 tree fndecl
= gimple_call_fndecl (stmt
);
2013 /* To avoid certain truncation the specified bound should also
2014 not be equal to (or less than) the length of the source. */
2015 location_t loc
= gimple_location (stmt
);
2016 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2017 "%G%qD specified bound %E equals source length",
2019 gimple_set_no_warning (stmt
, true);
2022 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2024 /* If the replacement _DECL isn't initialized, don't do the
2029 /* Otherwise, emit a call to strcat. */
2030 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2031 replace_call_with_call_and_fold (gsi
, repl
);
2035 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2039 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2041 gimple
*stmt
= gsi_stmt (*gsi
);
2042 tree dest
= gimple_call_arg (stmt
, 0);
2043 tree src
= gimple_call_arg (stmt
, 1);
2044 tree len
= gimple_call_arg (stmt
, 2);
2045 tree size
= gimple_call_arg (stmt
, 3);
2050 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2051 if ((p
&& *p
== '\0')
2052 || integer_zerop (len
))
2054 replace_call_with_value (gsi
, dest
);
2058 if (! tree_fits_uhwi_p (size
))
2061 if (! integer_all_onesp (size
))
2063 tree src_len
= c_strlen (src
, 1);
2065 && tree_fits_uhwi_p (src_len
)
2066 && tree_fits_uhwi_p (len
)
2067 && ! tree_int_cst_lt (len
, src_len
))
2069 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2070 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2074 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2075 replace_call_with_call_and_fold (gsi
, repl
);
2081 /* If __builtin_strncat_chk is used, assume strncat is available. */
2082 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2086 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2087 replace_call_with_call_and_fold (gsi
, repl
);
2091 /* Build and append gimple statements to STMTS that would load a first
2092 character of a memory location identified by STR. LOC is location
2093 of the statement. */
2096 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2100 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2101 tree cst_uchar_ptr_node
2102 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2103 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2105 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2106 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2107 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2109 gimple_assign_set_lhs (stmt
, var
);
2110 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2115 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
2116 FCODE is the name of the builtin. */
2119 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2121 gimple
*stmt
= gsi_stmt (*gsi
);
2122 tree callee
= gimple_call_fndecl (stmt
);
2123 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2125 tree type
= integer_type_node
;
2126 tree str1
= gimple_call_arg (stmt
, 0);
2127 tree str2
= gimple_call_arg (stmt
, 1);
2128 tree lhs
= gimple_call_lhs (stmt
);
2129 HOST_WIDE_INT length
= -1;
2131 /* Handle strncmp and strncasecmp functions. */
2132 if (gimple_call_num_args (stmt
) == 3)
2134 tree len
= gimple_call_arg (stmt
, 2);
2135 if (tree_fits_uhwi_p (len
))
2136 length
= tree_to_uhwi (len
);
2139 /* If the LEN parameter is zero, return zero. */
2142 replace_call_with_value (gsi
, integer_zero_node
);
2146 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2147 if (operand_equal_p (str1
, str2
, 0))
2149 replace_call_with_value (gsi
, integer_zero_node
);
2153 const char *p1
= c_getstr (str1
);
2154 const char *p2
= c_getstr (str2
);
2156 /* For known strings, return an immediate value. */
2160 bool known_result
= false;
2164 case BUILT_IN_STRCMP
:
2166 r
= strcmp (p1
, p2
);
2167 known_result
= true;
2170 case BUILT_IN_STRNCMP
:
2174 r
= strncmp (p1
, p2
, length
);
2175 known_result
= true;
2178 /* Only handleable situation is where the string are equal (result 0),
2179 which is already handled by operand_equal_p case. */
2180 case BUILT_IN_STRCASECMP
:
2182 case BUILT_IN_STRNCASECMP
:
2186 r
= strncmp (p1
, p2
, length
);
2188 known_result
= true;
2197 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2202 bool nonzero_length
= length
>= 1
2203 || fcode
== BUILT_IN_STRCMP
2204 || fcode
== BUILT_IN_STRCASECMP
;
2206 location_t loc
= gimple_location (stmt
);
2208 /* If the second arg is "", return *(const unsigned char*)arg1. */
2209 if (p2
&& *p2
== '\0' && nonzero_length
)
2211 gimple_seq stmts
= NULL
;
2212 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2215 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2216 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2219 gsi_replace_with_seq_vops (gsi
, stmts
);
2223 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2224 if (p1
&& *p1
== '\0' && nonzero_length
)
2226 gimple_seq stmts
= NULL
;
2227 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2231 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2232 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2233 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2235 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2236 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2239 gsi_replace_with_seq_vops (gsi
, stmts
);
2243 /* If len parameter is one, return an expression corresponding to
2244 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2245 if (fcode
== BUILT_IN_STRNCMP
&& length
== 1)
2247 gimple_seq stmts
= NULL
;
2248 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2249 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2253 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2254 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2255 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2257 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2258 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2259 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2261 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2262 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2265 gsi_replace_with_seq_vops (gsi
, stmts
);
2269 /* If length is larger than the length of one constant string,
2270 replace strncmp with corresponding strcmp */
2271 if (fcode
== BUILT_IN_STRNCMP
2273 && ((p2
&& (size_t) length
> strlen (p2
))
2274 || (p1
&& (size_t) length
> strlen (p1
))))
2276 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2279 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2280 replace_call_with_call_and_fold (gsi
, repl
);
2287 /* Fold a call to the memchr pointed by GSI iterator. */
2290 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2292 gimple
*stmt
= gsi_stmt (*gsi
);
2293 tree lhs
= gimple_call_lhs (stmt
);
2294 tree arg1
= gimple_call_arg (stmt
, 0);
2295 tree arg2
= gimple_call_arg (stmt
, 1);
2296 tree len
= gimple_call_arg (stmt
, 2);
2298 /* If the LEN parameter is zero, return zero. */
2299 if (integer_zerop (len
))
2301 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2306 if (TREE_CODE (arg2
) != INTEGER_CST
2307 || !tree_fits_uhwi_p (len
)
2308 || !target_char_cst_p (arg2
, &c
))
2311 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2312 unsigned HOST_WIDE_INT string_length
;
2313 const char *p1
= c_getstr (arg1
, &string_length
);
2317 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2320 if (length
<= string_length
)
2322 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2328 unsigned HOST_WIDE_INT offset
= r
- p1
;
2329 gimple_seq stmts
= NULL
;
2330 if (lhs
!= NULL_TREE
)
2332 tree offset_cst
= build_int_cst (TREE_TYPE (len
), offset
);
2333 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2335 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2338 gimple_seq_add_stmt_without_update (&stmts
,
2339 gimple_build_nop ());
2341 gsi_replace_with_seq_vops (gsi
, stmts
);
2349 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2350 to the call. IGNORE is true if the value returned
2351 by the builtin will be ignored. UNLOCKED is true is true if this
2352 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2353 the known length of the string. Return NULL_TREE if no simplification
2357 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2358 tree arg0
, tree arg1
,
2361 gimple
*stmt
= gsi_stmt (*gsi
);
2363 /* If we're using an unlocked function, assume the other unlocked
2364 functions exist explicitly. */
2365 tree
const fn_fputc
= (unlocked
2366 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2367 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2368 tree
const fn_fwrite
= (unlocked
2369 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2370 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2372 /* If the return value is used, don't do the transformation. */
2373 if (gimple_call_lhs (stmt
))
2376 /* Get the length of the string passed to fputs. If the length
2377 can't be determined, punt. */
2378 tree len
= get_maxval_strlen (arg0
, 0);
2380 || TREE_CODE (len
) != INTEGER_CST
)
2383 switch (compare_tree_int (len
, 1))
2385 case -1: /* length is 0, delete the call entirely . */
2386 replace_call_with_value (gsi
, integer_zero_node
);
2389 case 0: /* length is 1, call fputc. */
2391 const char *p
= c_getstr (arg0
);
2397 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2399 (integer_type_node
, p
[0]), arg1
);
2400 replace_call_with_call_and_fold (gsi
, repl
);
2405 case 1: /* length is greater than 1, call fwrite. */
2407 /* If optimizing for size keep fputs. */
2408 if (optimize_function_for_size_p (cfun
))
2410 /* New argument list transforming fputs(string, stream) to
2411 fwrite(string, 1, len, stream). */
2415 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2416 size_one_node
, len
, arg1
);
2417 replace_call_with_call_and_fold (gsi
, repl
);
2426 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2427 DEST, SRC, LEN, and SIZE are the arguments to the call.
2428 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2429 code of the builtin. If MAXLEN is not NULL, it is maximum length
2430 passed as third argument. */
2433 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2434 tree dest
, tree src
, tree len
, tree size
,
2435 enum built_in_function fcode
)
2437 gimple
*stmt
= gsi_stmt (*gsi
);
2438 location_t loc
= gimple_location (stmt
);
2439 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2442 /* If SRC and DEST are the same (and not volatile), return DEST
2443 (resp. DEST+LEN for __mempcpy_chk). */
2444 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2446 if (fcode
!= BUILT_IN_MEMMOVE
&& fcode
!= BUILT_IN_MEMMOVE_CHK
)
2448 tree func
= gimple_call_fndecl (stmt
);
2450 warning_at (loc
, OPT_Wrestrict
,
2451 "%qD source argument is the same as destination",
2455 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2457 replace_call_with_value (gsi
, dest
);
2462 gimple_seq stmts
= NULL
;
2463 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2464 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2465 TREE_TYPE (dest
), dest
, len
);
2466 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2467 replace_call_with_value (gsi
, temp
);
2472 if (! tree_fits_uhwi_p (size
))
2475 tree maxlen
= get_maxval_strlen (len
, 2);
2476 if (! integer_all_onesp (size
))
2478 if (! tree_fits_uhwi_p (len
))
2480 /* If LEN is not constant, try MAXLEN too.
2481 For MAXLEN only allow optimizing into non-_ocs function
2482 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2483 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2485 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2487 /* (void) __mempcpy_chk () can be optimized into
2488 (void) __memcpy_chk (). */
2489 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2493 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2494 replace_call_with_call_and_fold (gsi
, repl
);
2503 if (tree_int_cst_lt (size
, maxlen
))
2508 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2509 mem{cpy,pcpy,move,set} is available. */
2512 case BUILT_IN_MEMCPY_CHK
:
2513 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2515 case BUILT_IN_MEMPCPY_CHK
:
2516 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2518 case BUILT_IN_MEMMOVE_CHK
:
2519 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2521 case BUILT_IN_MEMSET_CHK
:
2522 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2531 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2532 replace_call_with_call_and_fold (gsi
, repl
);
2536 /* Fold a call to the __st[rp]cpy_chk builtin.
2537 DEST, SRC, and SIZE are the arguments to the call.
2538 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2539 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2540 strings passed as second argument. */
2543 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2545 tree src
, tree size
,
2546 enum built_in_function fcode
)
2548 gimple
*stmt
= gsi_stmt (*gsi
);
2549 location_t loc
= gimple_location (stmt
);
2550 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2553 /* If SRC and DEST are the same (and not volatile), return DEST. */
2554 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2556 tree func
= gimple_call_fndecl (stmt
);
2558 warning_at (loc
, OPT_Wrestrict
,
2559 "%qD source argument is the same as destination",
2562 replace_call_with_value (gsi
, dest
);
2566 if (! tree_fits_uhwi_p (size
))
2569 tree maxlen
= get_maxval_strlen (src
, 1);
2570 if (! integer_all_onesp (size
))
2572 len
= c_strlen (src
, 1);
2573 if (! len
|| ! tree_fits_uhwi_p (len
))
2575 /* If LEN is not constant, try MAXLEN too.
2576 For MAXLEN only allow optimizing into non-_ocs function
2577 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2578 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2580 if (fcode
== BUILT_IN_STPCPY_CHK
)
2585 /* If return value of __stpcpy_chk is ignored,
2586 optimize into __strcpy_chk. */
2587 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2591 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2592 replace_call_with_call_and_fold (gsi
, repl
);
2596 if (! len
|| TREE_SIDE_EFFECTS (len
))
2599 /* If c_strlen returned something, but not a constant,
2600 transform __strcpy_chk into __memcpy_chk. */
2601 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2605 gimple_seq stmts
= NULL
;
2606 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2607 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2608 build_int_cst (size_type_node
, 1));
2609 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2610 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2611 replace_call_with_call_and_fold (gsi
, repl
);
2618 if (! tree_int_cst_lt (maxlen
, size
))
2622 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2623 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2624 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2628 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2629 replace_call_with_call_and_fold (gsi
, repl
);
2633 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2634 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2635 length passed as third argument. IGNORE is true if return value can be
2636 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2639 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2640 tree dest
, tree src
,
2641 tree len
, tree size
,
2642 enum built_in_function fcode
)
2644 gimple
*stmt
= gsi_stmt (*gsi
);
2645 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2648 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2650 /* If return value of __stpncpy_chk is ignored,
2651 optimize into __strncpy_chk. */
2652 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2655 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2656 replace_call_with_call_and_fold (gsi
, repl
);
2661 if (! tree_fits_uhwi_p (size
))
2664 tree maxlen
= get_maxval_strlen (len
, 2);
2665 if (! integer_all_onesp (size
))
2667 if (! tree_fits_uhwi_p (len
))
2669 /* If LEN is not constant, try MAXLEN too.
2670 For MAXLEN only allow optimizing into non-_ocs function
2671 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2672 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2678 if (tree_int_cst_lt (size
, maxlen
))
2682 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2683 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2684 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2688 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2689 replace_call_with_call_and_fold (gsi
, repl
);
2693 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2694 Return NULL_TREE if no simplification can be made. */
2697 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2699 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2700 location_t loc
= gimple_location (stmt
);
2701 tree dest
= gimple_call_arg (stmt
, 0);
2702 tree src
= gimple_call_arg (stmt
, 1);
2703 tree fn
, len
, lenp1
;
2705 /* If the result is unused, replace stpcpy with strcpy. */
2706 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2708 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2711 gimple_call_set_fndecl (stmt
, fn
);
2716 len
= c_strlen (src
, 1);
2718 || TREE_CODE (len
) != INTEGER_CST
)
2721 if (optimize_function_for_size_p (cfun
)
2722 /* If length is zero it's small enough. */
2723 && !integer_zerop (len
))
2726 /* If the source has a known length replace stpcpy with memcpy. */
2727 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2731 gimple_seq stmts
= NULL
;
2732 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2733 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2734 tem
, build_int_cst (size_type_node
, 1));
2735 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2736 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2737 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2738 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2739 if (gimple_vdef (repl
)
2740 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2741 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2742 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2743 /* Replace the result with dest + len. */
2745 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2746 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2747 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2748 POINTER_PLUS_EXPR
, dest
, tem
);
2749 gsi_replace (gsi
, ret
, false);
2750 /* Finally fold the memcpy call. */
2751 gimple_stmt_iterator gsi2
= *gsi
;
2757 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2758 NULL_TREE if a normal call should be emitted rather than expanding
2759 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2760 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2761 passed as second argument. */
2764 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2765 enum built_in_function fcode
)
2767 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2768 tree dest
, size
, len
, fn
, fmt
, flag
;
2769 const char *fmt_str
;
2771 /* Verify the required arguments in the original call. */
2772 if (gimple_call_num_args (stmt
) < 5)
2775 dest
= gimple_call_arg (stmt
, 0);
2776 len
= gimple_call_arg (stmt
, 1);
2777 flag
= gimple_call_arg (stmt
, 2);
2778 size
= gimple_call_arg (stmt
, 3);
2779 fmt
= gimple_call_arg (stmt
, 4);
2781 if (! tree_fits_uhwi_p (size
))
2784 if (! integer_all_onesp (size
))
2786 tree maxlen
= get_maxval_strlen (len
, 2);
2787 if (! tree_fits_uhwi_p (len
))
2789 /* If LEN is not constant, try MAXLEN too.
2790 For MAXLEN only allow optimizing into non-_ocs function
2791 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2792 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2798 if (tree_int_cst_lt (size
, maxlen
))
2802 if (!init_target_chars ())
2805 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2806 or if format doesn't contain % chars or is "%s". */
2807 if (! integer_zerop (flag
))
2809 fmt_str
= c_getstr (fmt
);
2810 if (fmt_str
== NULL
)
2812 if (strchr (fmt_str
, target_percent
) != NULL
2813 && strcmp (fmt_str
, target_percent_s
))
2817 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2819 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2820 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2824 /* Replace the called function and the first 5 argument by 3 retaining
2825 trailing varargs. */
2826 gimple_call_set_fndecl (stmt
, fn
);
2827 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2828 gimple_call_set_arg (stmt
, 0, dest
);
2829 gimple_call_set_arg (stmt
, 1, len
);
2830 gimple_call_set_arg (stmt
, 2, fmt
);
2831 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2832 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2833 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2838 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2839 Return NULL_TREE if a normal call should be emitted rather than
2840 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2841 or BUILT_IN_VSPRINTF_CHK. */
2844 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2845 enum built_in_function fcode
)
2847 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2848 tree dest
, size
, len
, fn
, fmt
, flag
;
2849 const char *fmt_str
;
2850 unsigned nargs
= gimple_call_num_args (stmt
);
2852 /* Verify the required arguments in the original call. */
2855 dest
= gimple_call_arg (stmt
, 0);
2856 flag
= gimple_call_arg (stmt
, 1);
2857 size
= gimple_call_arg (stmt
, 2);
2858 fmt
= gimple_call_arg (stmt
, 3);
2860 if (! tree_fits_uhwi_p (size
))
2865 if (!init_target_chars ())
2868 /* Check whether the format is a literal string constant. */
2869 fmt_str
= c_getstr (fmt
);
2870 if (fmt_str
!= NULL
)
2872 /* If the format doesn't contain % args or %%, we know the size. */
2873 if (strchr (fmt_str
, target_percent
) == 0)
2875 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2876 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2878 /* If the format is "%s" and first ... argument is a string literal,
2879 we know the size too. */
2880 else if (fcode
== BUILT_IN_SPRINTF_CHK
2881 && strcmp (fmt_str
, target_percent_s
) == 0)
2887 arg
= gimple_call_arg (stmt
, 4);
2888 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2890 len
= c_strlen (arg
, 1);
2891 if (! len
|| ! tree_fits_uhwi_p (len
))
2898 if (! integer_all_onesp (size
))
2900 if (! len
|| ! tree_int_cst_lt (len
, size
))
2904 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2905 or if format doesn't contain % chars or is "%s". */
2906 if (! integer_zerop (flag
))
2908 if (fmt_str
== NULL
)
2910 if (strchr (fmt_str
, target_percent
) != NULL
2911 && strcmp (fmt_str
, target_percent_s
))
2915 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2916 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2917 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2921 /* Replace the called function and the first 4 argument by 2 retaining
2922 trailing varargs. */
2923 gimple_call_set_fndecl (stmt
, fn
);
2924 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2925 gimple_call_set_arg (stmt
, 0, dest
);
2926 gimple_call_set_arg (stmt
, 1, fmt
);
2927 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2928 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2929 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2934 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2935 ORIG may be null if this is a 2-argument call. We don't attempt to
2936 simplify calls with more than 3 arguments.
2938 Return true if simplification was possible, otherwise false. */
2941 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2943 gimple
*stmt
= gsi_stmt (*gsi
);
2944 tree dest
= gimple_call_arg (stmt
, 0);
2945 tree fmt
= gimple_call_arg (stmt
, 1);
2946 tree orig
= NULL_TREE
;
2947 const char *fmt_str
= NULL
;
2949 /* Verify the required arguments in the original call. We deal with two
2950 types of sprintf() calls: 'sprintf (str, fmt)' and
2951 'sprintf (dest, "%s", orig)'. */
2952 if (gimple_call_num_args (stmt
) > 3)
2955 if (gimple_call_num_args (stmt
) == 3)
2956 orig
= gimple_call_arg (stmt
, 2);
2958 /* Check whether the format is a literal string constant. */
2959 fmt_str
= c_getstr (fmt
);
2960 if (fmt_str
== NULL
)
2963 if (!init_target_chars ())
2966 /* If the format doesn't contain % args or %%, use strcpy. */
2967 if (strchr (fmt_str
, target_percent
) == NULL
)
2969 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2974 /* Don't optimize sprintf (buf, "abc", ptr++). */
2978 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2979 'format' is known to contain no % formats. */
2980 gimple_seq stmts
= NULL
;
2981 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2982 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2983 if (gimple_call_lhs (stmt
))
2985 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2986 build_int_cst (integer_type_node
,
2988 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2989 gsi_replace_with_seq_vops (gsi
, stmts
);
2990 /* gsi now points at the assignment to the lhs, get a
2991 stmt iterator to the memcpy call.
2992 ??? We can't use gsi_for_stmt as that doesn't work when the
2993 CFG isn't built yet. */
2994 gimple_stmt_iterator gsi2
= *gsi
;
3000 gsi_replace_with_seq_vops (gsi
, stmts
);
3006 /* If the format is "%s", use strcpy if the result isn't used. */
3007 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3010 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3015 /* Don't crash on sprintf (str1, "%s"). */
3019 tree orig_len
= NULL_TREE
;
3020 if (gimple_call_lhs (stmt
))
3022 orig_len
= get_maxval_strlen (orig
, 0);
3027 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3028 gimple_seq stmts
= NULL
;
3029 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3030 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3031 if (gimple_call_lhs (stmt
))
3033 if (!useless_type_conversion_p (integer_type_node
,
3034 TREE_TYPE (orig_len
)))
3035 orig_len
= fold_convert (integer_type_node
, orig_len
);
3036 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
3037 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3038 gsi_replace_with_seq_vops (gsi
, stmts
);
3039 /* gsi now points at the assignment to the lhs, get a
3040 stmt iterator to the memcpy call.
3041 ??? We can't use gsi_for_stmt as that doesn't work when the
3042 CFG isn't built yet. */
3043 gimple_stmt_iterator gsi2
= *gsi
;
3049 gsi_replace_with_seq_vops (gsi
, stmts
);
3057 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3058 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3059 attempt to simplify calls with more than 4 arguments.
3061 Return true if simplification was possible, otherwise false. */
3064 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3066 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3067 tree dest
= gimple_call_arg (stmt
, 0);
3068 tree destsize
= gimple_call_arg (stmt
, 1);
3069 tree fmt
= gimple_call_arg (stmt
, 2);
3070 tree orig
= NULL_TREE
;
3071 const char *fmt_str
= NULL
;
3073 if (gimple_call_num_args (stmt
) > 4)
3076 if (gimple_call_num_args (stmt
) == 4)
3077 orig
= gimple_call_arg (stmt
, 3);
3079 if (!tree_fits_uhwi_p (destsize
))
3081 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3083 /* Check whether the format is a literal string constant. */
3084 fmt_str
= c_getstr (fmt
);
3085 if (fmt_str
== NULL
)
3088 if (!init_target_chars ())
3091 /* If the format doesn't contain % args or %%, use strcpy. */
3092 if (strchr (fmt_str
, target_percent
) == NULL
)
3094 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3098 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3102 /* We could expand this as
3103 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3105 memcpy (str, fmt_with_nul_at_cstm1, cst);
3106 but in the former case that might increase code size
3107 and in the latter case grow .rodata section too much.
3109 size_t len
= strlen (fmt_str
);
3113 gimple_seq stmts
= NULL
;
3114 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3115 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3116 if (gimple_call_lhs (stmt
))
3118 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
3119 build_int_cst (integer_type_node
, len
));
3120 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3121 gsi_replace_with_seq_vops (gsi
, stmts
);
3122 /* gsi now points at the assignment to the lhs, get a
3123 stmt iterator to the memcpy call.
3124 ??? We can't use gsi_for_stmt as that doesn't work when the
3125 CFG isn't built yet. */
3126 gimple_stmt_iterator gsi2
= *gsi
;
3132 gsi_replace_with_seq_vops (gsi
, stmts
);
3138 /* If the format is "%s", use strcpy if the result isn't used. */
3139 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3141 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3145 /* Don't crash on snprintf (str1, cst, "%s"). */
3149 tree orig_len
= get_maxval_strlen (orig
, 0);
3150 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3153 /* We could expand this as
3154 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3156 memcpy (str1, str2_with_nul_at_cstm1, cst);
3157 but in the former case that might increase code size
3158 and in the latter case grow .rodata section too much.
3160 if (compare_tree_int (orig_len
, destlen
) >= 0)
3163 /* Convert snprintf (str1, cst, "%s", str2) into
3164 strcpy (str1, str2) if strlen (str2) < cst. */
3165 gimple_seq stmts
= NULL
;
3166 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3167 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3168 if (gimple_call_lhs (stmt
))
3170 if (!useless_type_conversion_p (integer_type_node
,
3171 TREE_TYPE (orig_len
)))
3172 orig_len
= fold_convert (integer_type_node
, orig_len
);
3173 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
3174 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3175 gsi_replace_with_seq_vops (gsi
, stmts
);
3176 /* gsi now points at the assignment to the lhs, get a
3177 stmt iterator to the memcpy call.
3178 ??? We can't use gsi_for_stmt as that doesn't work when the
3179 CFG isn't built yet. */
3180 gimple_stmt_iterator gsi2
= *gsi
;
3186 gsi_replace_with_seq_vops (gsi
, stmts
);
3194 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3195 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3196 more than 3 arguments, and ARG may be null in the 2-argument case.
3198 Return NULL_TREE if no simplification was possible, otherwise return the
3199 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3200 code of the function to be simplified. */
3203 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3204 tree fp
, tree fmt
, tree arg
,
3205 enum built_in_function fcode
)
3207 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3208 tree fn_fputc
, fn_fputs
;
3209 const char *fmt_str
= NULL
;
3211 /* If the return value is used, don't do the transformation. */
3212 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3215 /* Check whether the format is a literal string constant. */
3216 fmt_str
= c_getstr (fmt
);
3217 if (fmt_str
== NULL
)
3220 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3222 /* If we're using an unlocked function, assume the other
3223 unlocked functions exist explicitly. */
3224 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3225 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3229 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3230 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3233 if (!init_target_chars ())
3236 /* If the format doesn't contain % args or %%, use strcpy. */
3237 if (strchr (fmt_str
, target_percent
) == NULL
)
3239 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3243 /* If the format specifier was "", fprintf does nothing. */
3244 if (fmt_str
[0] == '\0')
3246 replace_call_with_value (gsi
, NULL_TREE
);
3250 /* When "string" doesn't contain %, replace all cases of
3251 fprintf (fp, string) with fputs (string, fp). The fputs
3252 builtin will take care of special cases like length == 1. */
3255 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3256 replace_call_with_call_and_fold (gsi
, repl
);
3261 /* The other optimizations can be done only on the non-va_list variants. */
3262 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3265 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3266 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3268 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3272 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3273 replace_call_with_call_and_fold (gsi
, repl
);
3278 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3279 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3282 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3286 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3287 replace_call_with_call_and_fold (gsi
, repl
);
3295 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3296 FMT and ARG are the arguments to the call; we don't fold cases with
3297 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3299 Return NULL_TREE if no simplification was possible, otherwise return the
3300 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3301 code of the function to be simplified. */
3304 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3305 tree arg
, enum built_in_function fcode
)
3307 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3308 tree fn_putchar
, fn_puts
, newarg
;
3309 const char *fmt_str
= NULL
;
3311 /* If the return value is used, don't do the transformation. */
3312 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3315 /* Check whether the format is a literal string constant. */
3316 fmt_str
= c_getstr (fmt
);
3317 if (fmt_str
== NULL
)
3320 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3322 /* If we're using an unlocked function, assume the other
3323 unlocked functions exist explicitly. */
3324 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3325 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3329 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3330 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3333 if (!init_target_chars ())
3336 if (strcmp (fmt_str
, target_percent_s
) == 0
3337 || strchr (fmt_str
, target_percent
) == NULL
)
3341 if (strcmp (fmt_str
, target_percent_s
) == 0)
3343 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3346 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3349 str
= c_getstr (arg
);
3355 /* The format specifier doesn't contain any '%' characters. */
3356 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3362 /* If the string was "", printf does nothing. */
3365 replace_call_with_value (gsi
, NULL_TREE
);
3369 /* If the string has length of 1, call putchar. */
3372 /* Given printf("c"), (where c is any one character,)
3373 convert "c"[0] to an int and pass that to the replacement
3375 newarg
= build_int_cst (integer_type_node
, str
[0]);
3378 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3379 replace_call_with_call_and_fold (gsi
, repl
);
3385 /* If the string was "string\n", call puts("string"). */
3386 size_t len
= strlen (str
);
3387 if ((unsigned char)str
[len
- 1] == target_newline
3388 && (size_t) (int) len
== len
3392 tree offset_node
, string_cst
;
3394 /* Create a NUL-terminated string that's one char shorter
3395 than the original, stripping off the trailing '\n'. */
3396 newarg
= build_string_literal (len
, str
);
3397 string_cst
= string_constant (newarg
, &offset_node
);
3398 gcc_checking_assert (string_cst
3399 && (TREE_STRING_LENGTH (string_cst
)
3401 && integer_zerop (offset_node
)
3403 TREE_STRING_POINTER (string_cst
)[len
- 1]
3405 /* build_string_literal creates a new STRING_CST,
3406 modify it in place to avoid double copying. */
3407 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
3408 newstr
[len
- 1] = '\0';
3411 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3412 replace_call_with_call_and_fold (gsi
, repl
);
3417 /* We'd like to arrange to call fputs(string,stdout) here,
3418 but we need stdout and don't have a way to get it yet. */
3423 /* The other optimizations can be done only on the non-va_list variants. */
3424 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3427 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3428 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3430 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3434 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3435 replace_call_with_call_and_fold (gsi
, repl
);
3440 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3441 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3443 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3448 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3449 replace_call_with_call_and_fold (gsi
, repl
);
3459 /* Fold a call to __builtin_strlen with known length LEN. */
3462 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3464 gimple
*stmt
= gsi_stmt (*gsi
);
3465 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
3468 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
3469 replace_call_with_value (gsi
, len
);
3473 /* Fold a call to __builtin_acc_on_device. */
3476 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3478 /* Defer folding until we know which compiler we're in. */
3479 if (symtab
->state
!= EXPANSION
)
3482 unsigned val_host
= GOMP_DEVICE_HOST
;
3483 unsigned val_dev
= GOMP_DEVICE_NONE
;
3485 #ifdef ACCEL_COMPILER
3486 val_host
= GOMP_DEVICE_NOT_HOST
;
3487 val_dev
= ACCEL_COMPILER_acc_device
;
3490 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3492 tree host_eq
= make_ssa_name (boolean_type_node
);
3493 gimple
*host_ass
= gimple_build_assign
3494 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3495 gimple_set_location (host_ass
, loc
);
3496 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3498 tree dev_eq
= make_ssa_name (boolean_type_node
);
3499 gimple
*dev_ass
= gimple_build_assign
3500 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3501 gimple_set_location (dev_ass
, loc
);
3502 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3504 tree result
= make_ssa_name (boolean_type_node
);
3505 gimple
*result_ass
= gimple_build_assign
3506 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3507 gimple_set_location (result_ass
, loc
);
3508 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3510 replace_call_with_value (gsi
, result
);
3515 /* Fold realloc (0, n) -> malloc (n). */
3518 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3520 gimple
*stmt
= gsi_stmt (*gsi
);
3521 tree arg
= gimple_call_arg (stmt
, 0);
3522 tree size
= gimple_call_arg (stmt
, 1);
3524 if (operand_equal_p (arg
, null_pointer_node
, 0))
3526 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3529 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3530 replace_call_with_call_and_fold (gsi
, repl
);
3537 /* Fold the non-target builtin at *GSI and return whether any simplification
3541 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
3543 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
3544 tree callee
= gimple_call_fndecl (stmt
);
3546 /* Give up for always_inline inline builtins until they are
3548 if (avoid_folding_inline_builtin (callee
))
3551 unsigned n
= gimple_call_num_args (stmt
);
3552 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
3556 return gimple_fold_builtin_bcmp (gsi
);
3557 case BUILT_IN_BCOPY
:
3558 return gimple_fold_builtin_bcopy (gsi
);
3559 case BUILT_IN_BZERO
:
3560 return gimple_fold_builtin_bzero (gsi
);
3562 case BUILT_IN_MEMSET
:
3563 return gimple_fold_builtin_memset (gsi
,
3564 gimple_call_arg (stmt
, 1),
3565 gimple_call_arg (stmt
, 2));
3566 case BUILT_IN_MEMCPY
:
3567 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3568 gimple_call_arg (stmt
, 1), 0);
3569 case BUILT_IN_MEMPCPY
:
3570 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3571 gimple_call_arg (stmt
, 1), 1);
3572 case BUILT_IN_MEMMOVE
:
3573 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3574 gimple_call_arg (stmt
, 1), 3);
3575 case BUILT_IN_SPRINTF_CHK
:
3576 case BUILT_IN_VSPRINTF_CHK
:
3577 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
3578 case BUILT_IN_STRCAT_CHK
:
3579 return gimple_fold_builtin_strcat_chk (gsi
);
3580 case BUILT_IN_STRNCAT_CHK
:
3581 return gimple_fold_builtin_strncat_chk (gsi
);
3582 case BUILT_IN_STRLEN
:
3583 return gimple_fold_builtin_strlen (gsi
);
3584 case BUILT_IN_STRCPY
:
3585 return gimple_fold_builtin_strcpy (gsi
,
3586 gimple_call_arg (stmt
, 0),
3587 gimple_call_arg (stmt
, 1));
3588 case BUILT_IN_STRNCPY
:
3589 return gimple_fold_builtin_strncpy (gsi
,
3590 gimple_call_arg (stmt
, 0),
3591 gimple_call_arg (stmt
, 1),
3592 gimple_call_arg (stmt
, 2));
3593 case BUILT_IN_STRCAT
:
3594 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
3595 gimple_call_arg (stmt
, 1));
3596 case BUILT_IN_STRNCAT
:
3597 return gimple_fold_builtin_strncat (gsi
);
3598 case BUILT_IN_INDEX
:
3599 case BUILT_IN_STRCHR
:
3600 return gimple_fold_builtin_strchr (gsi
, false);
3601 case BUILT_IN_RINDEX
:
3602 case BUILT_IN_STRRCHR
:
3603 return gimple_fold_builtin_strchr (gsi
, true);
3604 case BUILT_IN_STRSTR
:
3605 return gimple_fold_builtin_strstr (gsi
);
3606 case BUILT_IN_STRCMP
:
3607 case BUILT_IN_STRCASECMP
:
3608 case BUILT_IN_STRNCMP
:
3609 case BUILT_IN_STRNCASECMP
:
3610 return gimple_fold_builtin_string_compare (gsi
);
3611 case BUILT_IN_MEMCHR
:
3612 return gimple_fold_builtin_memchr (gsi
);
3613 case BUILT_IN_FPUTS
:
3614 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3615 gimple_call_arg (stmt
, 1), false);
3616 case BUILT_IN_FPUTS_UNLOCKED
:
3617 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3618 gimple_call_arg (stmt
, 1), true);
3619 case BUILT_IN_MEMCPY_CHK
:
3620 case BUILT_IN_MEMPCPY_CHK
:
3621 case BUILT_IN_MEMMOVE_CHK
:
3622 case BUILT_IN_MEMSET_CHK
:
3623 return gimple_fold_builtin_memory_chk (gsi
,
3624 gimple_call_arg (stmt
, 0),
3625 gimple_call_arg (stmt
, 1),
3626 gimple_call_arg (stmt
, 2),
3627 gimple_call_arg (stmt
, 3),
3629 case BUILT_IN_STPCPY
:
3630 return gimple_fold_builtin_stpcpy (gsi
);
3631 case BUILT_IN_STRCPY_CHK
:
3632 case BUILT_IN_STPCPY_CHK
:
3633 return gimple_fold_builtin_stxcpy_chk (gsi
,
3634 gimple_call_arg (stmt
, 0),
3635 gimple_call_arg (stmt
, 1),
3636 gimple_call_arg (stmt
, 2),
3638 case BUILT_IN_STRNCPY_CHK
:
3639 case BUILT_IN_STPNCPY_CHK
:
3640 return gimple_fold_builtin_stxncpy_chk (gsi
,
3641 gimple_call_arg (stmt
, 0),
3642 gimple_call_arg (stmt
, 1),
3643 gimple_call_arg (stmt
, 2),
3644 gimple_call_arg (stmt
, 3),
3646 case BUILT_IN_SNPRINTF_CHK
:
3647 case BUILT_IN_VSNPRINTF_CHK
:
3648 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
3650 case BUILT_IN_FPRINTF
:
3651 case BUILT_IN_FPRINTF_UNLOCKED
:
3652 case BUILT_IN_VFPRINTF
:
3653 if (n
== 2 || n
== 3)
3654 return gimple_fold_builtin_fprintf (gsi
,
3655 gimple_call_arg (stmt
, 0),
3656 gimple_call_arg (stmt
, 1),
3658 ? gimple_call_arg (stmt
, 2)
3662 case BUILT_IN_FPRINTF_CHK
:
3663 case BUILT_IN_VFPRINTF_CHK
:
3664 if (n
== 3 || n
== 4)
3665 return gimple_fold_builtin_fprintf (gsi
,
3666 gimple_call_arg (stmt
, 0),
3667 gimple_call_arg (stmt
, 2),
3669 ? gimple_call_arg (stmt
, 3)
3673 case BUILT_IN_PRINTF
:
3674 case BUILT_IN_PRINTF_UNLOCKED
:
3675 case BUILT_IN_VPRINTF
:
3676 if (n
== 1 || n
== 2)
3677 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
3679 ? gimple_call_arg (stmt
, 1)
3680 : NULL_TREE
, fcode
);
3682 case BUILT_IN_PRINTF_CHK
:
3683 case BUILT_IN_VPRINTF_CHK
:
3684 if (n
== 2 || n
== 3)
3685 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3687 ? gimple_call_arg (stmt
, 2)
3688 : NULL_TREE
, fcode
);
3690 case BUILT_IN_ACC_ON_DEVICE
:
3691 return gimple_fold_builtin_acc_on_device (gsi
,
3692 gimple_call_arg (stmt
, 0));
3693 case BUILT_IN_REALLOC
:
3694 return gimple_fold_builtin_realloc (gsi
);
3699 /* Try the generic builtin folder. */
3700 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3701 tree result
= fold_call_stmt (stmt
, ignore
);
3705 STRIP_NOPS (result
);
3707 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3708 if (!update_call_from_tree (gsi
, result
))
3709 gimplify_and_update_call_from_tree (gsi
, result
);
3716 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3717 function calls to constants, where possible. */
3720 fold_internal_goacc_dim (const gimple
*call
)
3722 int axis
= oacc_get_ifn_dim_arg (call
);
3723 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
3724 tree result
= NULL_TREE
;
3725 tree type
= TREE_TYPE (gimple_call_lhs (call
));
3727 switch (gimple_call_internal_fn (call
))
3729 case IFN_GOACC_DIM_POS
:
3730 /* If the size is 1, we know the answer. */
3732 result
= build_int_cst (type
, 0);
3734 case IFN_GOACC_DIM_SIZE
:
3735 /* If the size is not dynamic, we know the answer. */
3737 result
= build_int_cst (type
, size
);
3746 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3747 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3748 &var where var is only addressable because of such calls. */
3751 optimize_atomic_compare_exchange_p (gimple
*stmt
)
3753 if (gimple_call_num_args (stmt
) != 6
3754 || !flag_inline_atomics
3756 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
3757 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3758 || !gimple_vdef (stmt
)
3759 || !gimple_vuse (stmt
))
3762 tree fndecl
= gimple_call_fndecl (stmt
);
3763 switch (DECL_FUNCTION_CODE (fndecl
))
3765 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
3766 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
3767 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
3768 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
3769 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
3775 tree expected
= gimple_call_arg (stmt
, 1);
3776 if (TREE_CODE (expected
) != ADDR_EXPR
3777 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
3780 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
3781 if (!is_gimple_reg_type (etype
)
3782 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
3783 || TREE_THIS_VOLATILE (etype
)
3784 || VECTOR_TYPE_P (etype
)
3785 || TREE_CODE (etype
) == COMPLEX_TYPE
3786 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3787 might not preserve all the bits. See PR71716. */
3788 || SCALAR_FLOAT_TYPE_P (etype
)
3789 || maybe_ne (TYPE_PRECISION (etype
),
3790 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
3793 tree weak
= gimple_call_arg (stmt
, 3);
3794 if (!integer_zerop (weak
) && !integer_onep (weak
))
3797 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3798 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3799 machine_mode mode
= TYPE_MODE (itype
);
3801 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
3803 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
3806 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
3813 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3815 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3816 i = IMAGPART_EXPR <t>;
3818 e = REALPART_EXPR <t>; */
3821 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
3823 gimple
*stmt
= gsi_stmt (*gsi
);
3824 tree fndecl
= gimple_call_fndecl (stmt
);
3825 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3826 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3827 tree ctype
= build_complex_type (itype
);
3828 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3829 bool throws
= false;
3831 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3833 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3834 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
3835 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
3837 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
3838 build1 (VIEW_CONVERT_EXPR
, itype
,
3839 gimple_assign_lhs (g
)));
3840 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3842 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
3843 + int_size_in_bytes (itype
);
3844 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
3845 gimple_call_arg (stmt
, 0),
3846 gimple_assign_lhs (g
),
3847 gimple_call_arg (stmt
, 2),
3848 build_int_cst (integer_type_node
, flag
),
3849 gimple_call_arg (stmt
, 4),
3850 gimple_call_arg (stmt
, 5));
3851 tree lhs
= make_ssa_name (ctype
);
3852 gimple_call_set_lhs (g
, lhs
);
3853 gimple_set_vdef (g
, gimple_vdef (stmt
));
3854 gimple_set_vuse (g
, gimple_vuse (stmt
));
3855 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
3856 tree oldlhs
= gimple_call_lhs (stmt
);
3857 if (stmt_can_throw_internal (stmt
))
3860 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
3862 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
3863 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
3864 gimple_call_set_lhs (stmt
, NULL_TREE
);
3865 gsi_replace (gsi
, g
, true);
3868 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
3869 build1 (IMAGPART_EXPR
, itype
, lhs
));
3872 gsi_insert_on_edge_immediate (e
, g
);
3873 *gsi
= gsi_for_stmt (g
);
3876 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3877 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
3878 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3880 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
3881 build1 (REALPART_EXPR
, itype
, lhs
));
3882 if (throws
&& oldlhs
== NULL_TREE
)
3884 gsi_insert_on_edge_immediate (e
, g
);
3885 *gsi
= gsi_for_stmt (g
);
3888 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3889 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
3891 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3893 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
3894 gimple_assign_lhs (g
)));
3895 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3897 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
3898 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3902 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3903 doesn't fit into TYPE. The test for overflow should be regardless of
3904 -fwrapv, and even for unsigned types. */
3907 arith_overflowed_p (enum tree_code code
, const_tree type
,
3908 const_tree arg0
, const_tree arg1
)
3910 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3911 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3913 widest2_int warg0
= widest2_int_cst (arg0
);
3914 widest2_int warg1
= widest2_int_cst (arg1
);
3918 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3919 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3920 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3921 default: gcc_unreachable ();
3923 signop sign
= TYPE_SIGN (type
);
3924 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3926 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3929 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3930 The statement may be replaced by another statement, e.g., if the call
3931 simplifies to a constant value. Return true if any changes were made.
3932 It is assumed that the operands have been previously folded. */
3935 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3937 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3939 bool changed
= false;
3942 /* Fold *& in call arguments. */
3943 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3944 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3946 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3949 gimple_call_set_arg (stmt
, i
, tmp
);
3954 /* Check for virtual calls that became direct calls. */
3955 callee
= gimple_call_fn (stmt
);
3956 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3958 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3960 if (dump_file
&& virtual_method_call_p (callee
)
3961 && !possible_polymorphic_call_target_p
3962 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3963 (OBJ_TYPE_REF_EXPR (callee
)))))
3966 "Type inheritance inconsistent devirtualization of ");
3967 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3968 fprintf (dump_file
, " to ");
3969 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3970 fprintf (dump_file
, "\n");
3973 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3976 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3979 vec
<cgraph_node
*>targets
3980 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3981 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3983 tree lhs
= gimple_call_lhs (stmt
);
3984 if (dump_enabled_p ())
3986 location_t loc
= gimple_location_safe (stmt
);
3987 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3988 "folding virtual function call to %s\n",
3989 targets
.length () == 1
3990 ? targets
[0]->name ()
3991 : "__builtin_unreachable");
3993 if (targets
.length () == 1)
3995 tree fndecl
= targets
[0]->decl
;
3996 gimple_call_set_fndecl (stmt
, fndecl
);
3998 /* If changing the call to __cxa_pure_virtual
3999 or similar noreturn function, adjust gimple_call_fntype
4001 if (gimple_call_noreturn_p (stmt
)
4002 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
4003 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
4004 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
4006 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
4007 /* If the call becomes noreturn, remove the lhs. */
4009 && gimple_call_noreturn_p (stmt
)
4010 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
4011 || should_remove_lhs_p (lhs
)))
4013 if (TREE_CODE (lhs
) == SSA_NAME
)
4015 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4016 tree def
= get_or_create_ssa_default_def (cfun
, var
);
4017 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
4018 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
4020 gimple_call_set_lhs (stmt
, NULL_TREE
);
4022 maybe_remove_unused_call_args (cfun
, stmt
);
4026 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4027 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
4028 gimple_set_location (new_stmt
, gimple_location (stmt
));
4029 /* If the call had a SSA name as lhs morph that into
4030 an uninitialized value. */
4031 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4033 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4034 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
4035 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
4036 set_ssa_default_def (cfun
, var
, lhs
);
4038 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4039 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4040 gsi_replace (gsi
, new_stmt
, false);
4047 /* Check for indirect calls that became direct calls, and then
4048 no longer require a static chain. */
4049 if (gimple_call_chain (stmt
))
4051 tree fn
= gimple_call_fndecl (stmt
);
4052 if (fn
&& !DECL_STATIC_CHAIN (fn
))
4054 gimple_call_set_chain (stmt
, NULL
);
4059 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
4062 gimple_call_set_chain (stmt
, tmp
);
4071 /* Check for builtins that CCP can handle using information not
4072 available in the generic fold routines. */
4073 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4075 if (gimple_fold_builtin (gsi
))
4078 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
4080 changed
|= targetm
.gimple_fold_builtin (gsi
);
4082 else if (gimple_call_internal_p (stmt
))
4084 enum tree_code subcode
= ERROR_MARK
;
4085 tree result
= NULL_TREE
;
4086 bool cplx_result
= false;
4087 tree overflow
= NULL_TREE
;
4088 switch (gimple_call_internal_fn (stmt
))
4090 case IFN_BUILTIN_EXPECT
:
4091 result
= fold_builtin_expect (gimple_location (stmt
),
4092 gimple_call_arg (stmt
, 0),
4093 gimple_call_arg (stmt
, 1),
4094 gimple_call_arg (stmt
, 2));
4096 case IFN_UBSAN_OBJECT_SIZE
:
4098 tree offset
= gimple_call_arg (stmt
, 1);
4099 tree objsize
= gimple_call_arg (stmt
, 2);
4100 if (integer_all_onesp (objsize
)
4101 || (TREE_CODE (offset
) == INTEGER_CST
4102 && TREE_CODE (objsize
) == INTEGER_CST
4103 && tree_int_cst_le (offset
, objsize
)))
4105 replace_call_with_value (gsi
, NULL_TREE
);
4111 if (integer_zerop (gimple_call_arg (stmt
, 1)))
4113 replace_call_with_value (gsi
, NULL_TREE
);
4117 case IFN_UBSAN_BOUNDS
:
4119 tree index
= gimple_call_arg (stmt
, 1);
4120 tree bound
= gimple_call_arg (stmt
, 2);
4121 if (TREE_CODE (index
) == INTEGER_CST
4122 && TREE_CODE (bound
) == INTEGER_CST
)
4124 index
= fold_convert (TREE_TYPE (bound
), index
);
4125 if (TREE_CODE (index
) == INTEGER_CST
4126 && tree_int_cst_le (index
, bound
))
4128 replace_call_with_value (gsi
, NULL_TREE
);
4134 case IFN_GOACC_DIM_SIZE
:
4135 case IFN_GOACC_DIM_POS
:
4136 result
= fold_internal_goacc_dim (stmt
);
4138 case IFN_UBSAN_CHECK_ADD
:
4139 subcode
= PLUS_EXPR
;
4141 case IFN_UBSAN_CHECK_SUB
:
4142 subcode
= MINUS_EXPR
;
4144 case IFN_UBSAN_CHECK_MUL
:
4145 subcode
= MULT_EXPR
;
4147 case IFN_ADD_OVERFLOW
:
4148 subcode
= PLUS_EXPR
;
4151 case IFN_SUB_OVERFLOW
:
4152 subcode
= MINUS_EXPR
;
4155 case IFN_MUL_OVERFLOW
:
4156 subcode
= MULT_EXPR
;
4162 if (subcode
!= ERROR_MARK
)
4164 tree arg0
= gimple_call_arg (stmt
, 0);
4165 tree arg1
= gimple_call_arg (stmt
, 1);
4166 tree type
= TREE_TYPE (arg0
);
4169 tree lhs
= gimple_call_lhs (stmt
);
4170 if (lhs
== NULL_TREE
)
4173 type
= TREE_TYPE (TREE_TYPE (lhs
));
4175 if (type
== NULL_TREE
)
4177 /* x = y + 0; x = y - 0; x = y * 0; */
4178 else if (integer_zerop (arg1
))
4179 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
4180 /* x = 0 + y; x = 0 * y; */
4181 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
4182 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
4184 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
4185 result
= integer_zero_node
;
4186 /* x = y * 1; x = 1 * y; */
4187 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
4189 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
4191 else if (TREE_CODE (arg0
) == INTEGER_CST
4192 && TREE_CODE (arg1
) == INTEGER_CST
)
4195 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
4196 fold_convert (type
, arg1
));
4198 result
= int_const_binop (subcode
, arg0
, arg1
);
4199 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
4202 overflow
= build_one_cst (type
);
4209 if (result
== integer_zero_node
)
4210 result
= build_zero_cst (type
);
4211 else if (cplx_result
&& TREE_TYPE (result
) != type
)
4213 if (TREE_CODE (result
) == INTEGER_CST
)
4215 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
4217 overflow
= build_one_cst (type
);
4219 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
4220 && TYPE_UNSIGNED (type
))
4221 || (TYPE_PRECISION (type
)
4222 < (TYPE_PRECISION (TREE_TYPE (result
))
4223 + (TYPE_UNSIGNED (TREE_TYPE (result
))
4224 && !TYPE_UNSIGNED (type
)))))
4227 result
= fold_convert (type
, result
);
4234 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
4235 result
= drop_tree_overflow (result
);
4238 if (overflow
== NULL_TREE
)
4239 overflow
= build_zero_cst (TREE_TYPE (result
));
4240 tree ctype
= build_complex_type (TREE_TYPE (result
));
4241 if (TREE_CODE (result
) == INTEGER_CST
4242 && TREE_CODE (overflow
) == INTEGER_CST
)
4243 result
= build_complex (ctype
, result
, overflow
);
4245 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
4246 ctype
, result
, overflow
);
4248 if (!update_call_from_tree (gsi
, result
))
4249 gimplify_and_update_call_from_tree (gsi
, result
);
4258 /* Return true whether NAME has a use on STMT. */
4261 has_use_on_stmt (tree name
, gimple
*stmt
)
4263 imm_use_iterator iter
;
4264 use_operand_p use_p
;
4265 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
4266 if (USE_STMT (use_p
) == stmt
)
4271 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4274 Replaces *GSI with the simplification result in RCODE and OPS
4275 and the associated statements in *SEQ. Does the replacement
4276 according to INPLACE and returns true if the operation succeeded. */
4279 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
4280 code_helper rcode
, tree
*ops
,
4281 gimple_seq
*seq
, bool inplace
)
4283 gimple
*stmt
= gsi_stmt (*gsi
);
4285 /* Play safe and do not allow abnormals to be mentioned in
4286 newly created statements. See also maybe_push_res_to_seq.
4287 As an exception allow such uses if there was a use of the
4288 same SSA name on the old stmt. */
4289 if ((TREE_CODE (ops
[0]) == SSA_NAME
4290 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
4291 && !has_use_on_stmt (ops
[0], stmt
))
4293 && TREE_CODE (ops
[1]) == SSA_NAME
4294 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
4295 && !has_use_on_stmt (ops
[1], stmt
))
4297 && TREE_CODE (ops
[2]) == SSA_NAME
4298 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
4299 && !has_use_on_stmt (ops
[2], stmt
))
4300 || (COMPARISON_CLASS_P (ops
[0])
4301 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
4302 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 0))
4303 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 0), stmt
))
4304 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
4305 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 1))
4306 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 1), stmt
)))))
4309 /* Don't insert new statements when INPLACE is true, even if we could
4310 reuse STMT for the final statement. */
4311 if (inplace
&& !gimple_seq_empty_p (*seq
))
4314 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
4316 gcc_assert (rcode
.is_tree_code ());
4317 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
4318 /* GIMPLE_CONDs condition may not throw. */
4319 && (!flag_exceptions
4320 || !cfun
->can_throw_non_call_exceptions
4321 || !operation_could_trap_p (rcode
,
4322 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
4324 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
4325 else if (rcode
== SSA_NAME
)
4326 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
4327 build_zero_cst (TREE_TYPE (ops
[0])));
4328 else if (rcode
== INTEGER_CST
)
4330 if (integer_zerop (ops
[0]))
4331 gimple_cond_make_false (cond_stmt
);
4333 gimple_cond_make_true (cond_stmt
);
4337 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
4341 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
4342 build_zero_cst (TREE_TYPE (res
)));
4346 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4348 fprintf (dump_file
, "gimple_simplified to ");
4349 if (!gimple_seq_empty_p (*seq
))
4350 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4351 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4354 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4357 else if (is_gimple_assign (stmt
)
4358 && rcode
.is_tree_code ())
4361 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
4363 maybe_build_generic_op (rcode
,
4364 TREE_TYPE (gimple_assign_lhs (stmt
)), ops
);
4365 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
4366 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4368 fprintf (dump_file
, "gimple_simplified to ");
4369 if (!gimple_seq_empty_p (*seq
))
4370 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4371 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4374 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4378 else if (rcode
.is_fn_code ()
4379 && gimple_call_combined_fn (stmt
) == rcode
)
4382 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4384 gcc_assert (ops
[i
] != NULL_TREE
);
4385 gimple_call_set_arg (stmt
, i
, ops
[i
]);
4388 gcc_assert (ops
[i
] == NULL_TREE
);
4389 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4391 fprintf (dump_file
, "gimple_simplified to ");
4392 if (!gimple_seq_empty_p (*seq
))
4393 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4394 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
4396 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4401 if (gimple_has_lhs (stmt
))
4403 tree lhs
= gimple_get_lhs (stmt
);
4404 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
4407 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4409 fprintf (dump_file
, "gimple_simplified to ");
4410 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4412 gsi_replace_with_seq_vops (gsi
, *seq
);
4422 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4425 maybe_canonicalize_mem_ref_addr (tree
*t
)
4429 if (TREE_CODE (*t
) == ADDR_EXPR
)
4430 t
= &TREE_OPERAND (*t
, 0);
4432 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4433 generic vector extension. The actual vector referenced is
4434 view-converted to an array type for this purpose. If the index
4435 is constant the canonical representation in the middle-end is a
4436 BIT_FIELD_REF so re-write the former to the latter here. */
4437 if (TREE_CODE (*t
) == ARRAY_REF
4438 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
4439 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
4440 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
4442 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
4443 if (VECTOR_TYPE_P (vtype
))
4445 tree low
= array_ref_low_bound (*t
);
4446 if (TREE_CODE (low
) == INTEGER_CST
)
4448 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
4450 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
4451 wi::to_widest (low
));
4452 idx
= wi::mul (idx
, wi::to_widest
4453 (TYPE_SIZE (TREE_TYPE (*t
))));
4455 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
4456 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
4458 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
4460 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
4461 TYPE_SIZE (TREE_TYPE (*t
)),
4462 wide_int_to_tree (bitsizetype
, idx
));
4470 while (handled_component_p (*t
))
4471 t
= &TREE_OPERAND (*t
, 0);
4473 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4474 of invariant addresses into a SSA name MEM_REF address. */
4475 if (TREE_CODE (*t
) == MEM_REF
4476 || TREE_CODE (*t
) == TARGET_MEM_REF
)
4478 tree addr
= TREE_OPERAND (*t
, 0);
4479 if (TREE_CODE (addr
) == ADDR_EXPR
4480 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
4481 || handled_component_p (TREE_OPERAND (addr
, 0))))
4485 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
4490 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
4491 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
4492 TREE_OPERAND (*t
, 1),
4493 size_int (coffset
));
4496 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
4497 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
4500 /* Canonicalize back MEM_REFs to plain reference trees if the object
4501 accessed is a decl that has the same access semantics as the MEM_REF. */
4502 if (TREE_CODE (*t
) == MEM_REF
4503 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
4504 && integer_zerop (TREE_OPERAND (*t
, 1))
4505 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
4507 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4508 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
4509 if (/* Same volatile qualification. */
4510 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
4511 /* Same TBAA behavior with -fstrict-aliasing. */
4512 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
4513 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
4514 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
4515 /* Same alignment. */
4516 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
4517 /* We have to look out here to not drop a required conversion
4518 from the rhs to the lhs if *t appears on the lhs or vice-versa
4519 if it appears on the rhs. Thus require strict type
4521 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
4523 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4528 /* Canonicalize TARGET_MEM_REF in particular with respect to
4529 the indexes becoming constant. */
4530 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
4532 tree tem
= maybe_fold_tmr (*t
);
4543 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4544 distinguishes both cases. */
4547 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
4549 bool changed
= false;
4550 gimple
*stmt
= gsi_stmt (*gsi
);
4551 bool nowarning
= gimple_no_warning_p (stmt
);
4553 fold_defer_overflow_warnings ();
4555 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4557 ??? This shouldn't be done in generic folding but in the
4558 propagation helpers which also know whether an address was
4560 Also canonicalize operand order. */
4561 switch (gimple_code (stmt
))
4564 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
4566 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
4567 if ((REFERENCE_CLASS_P (*rhs
)
4568 || TREE_CODE (*rhs
) == ADDR_EXPR
)
4569 && maybe_canonicalize_mem_ref_addr (rhs
))
4571 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
4572 if (REFERENCE_CLASS_P (*lhs
)
4573 && maybe_canonicalize_mem_ref_addr (lhs
))
4578 /* Canonicalize operand order. */
4579 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4580 if (TREE_CODE_CLASS (code
) == tcc_comparison
4581 || commutative_tree_code (code
)
4582 || commutative_ternary_tree_code (code
))
4584 tree rhs1
= gimple_assign_rhs1 (stmt
);
4585 tree rhs2
= gimple_assign_rhs2 (stmt
);
4586 if (tree_swap_operands_p (rhs1
, rhs2
))
4588 gimple_assign_set_rhs1 (stmt
, rhs2
);
4589 gimple_assign_set_rhs2 (stmt
, rhs1
);
4590 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4591 gimple_assign_set_rhs_code (stmt
,
4592 swap_tree_comparison (code
));
4600 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4602 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
4603 if (REFERENCE_CLASS_P (*arg
)
4604 && maybe_canonicalize_mem_ref_addr (arg
))
4607 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
4609 && REFERENCE_CLASS_P (*lhs
)
4610 && maybe_canonicalize_mem_ref_addr (lhs
))
4616 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4617 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4619 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4620 tree op
= TREE_VALUE (link
);
4621 if (REFERENCE_CLASS_P (op
)
4622 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4625 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4627 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4628 tree op
= TREE_VALUE (link
);
4629 if ((REFERENCE_CLASS_P (op
)
4630 || TREE_CODE (op
) == ADDR_EXPR
)
4631 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4637 if (gimple_debug_bind_p (stmt
))
4639 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
4641 && (REFERENCE_CLASS_P (*val
)
4642 || TREE_CODE (*val
) == ADDR_EXPR
)
4643 && maybe_canonicalize_mem_ref_addr (val
))
4649 /* Canonicalize operand order. */
4650 tree lhs
= gimple_cond_lhs (stmt
);
4651 tree rhs
= gimple_cond_rhs (stmt
);
4652 if (tree_swap_operands_p (lhs
, rhs
))
4654 gcond
*gc
= as_a
<gcond
*> (stmt
);
4655 gimple_cond_set_lhs (gc
, rhs
);
4656 gimple_cond_set_rhs (gc
, lhs
);
4657 gimple_cond_set_code (gc
,
4658 swap_tree_comparison (gimple_cond_code (gc
)));
4665 /* Dispatch to pattern-based folding. */
4667 || is_gimple_assign (stmt
)
4668 || gimple_code (stmt
) == GIMPLE_COND
)
4670 gimple_seq seq
= NULL
;
4673 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
4674 valueize
, valueize
))
4676 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
4679 gimple_seq_discard (seq
);
4683 stmt
= gsi_stmt (*gsi
);
4685 /* Fold the main computation performed by the statement. */
4686 switch (gimple_code (stmt
))
4690 /* Try to canonicalize for boolean-typed X the comparisons
4691 X == 0, X == 1, X != 0, and X != 1. */
4692 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
4693 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
4695 tree lhs
= gimple_assign_lhs (stmt
);
4696 tree op1
= gimple_assign_rhs1 (stmt
);
4697 tree op2
= gimple_assign_rhs2 (stmt
);
4698 tree type
= TREE_TYPE (op1
);
4700 /* Check whether the comparison operands are of the same boolean
4701 type as the result type is.
4702 Check that second operand is an integer-constant with value
4704 if (TREE_CODE (op2
) == INTEGER_CST
4705 && (integer_zerop (op2
) || integer_onep (op2
))
4706 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
4708 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
4709 bool is_logical_not
= false;
4711 /* X == 0 and X != 1 is a logical-not.of X
4712 X == 1 and X != 0 is X */
4713 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
4714 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
4715 is_logical_not
= true;
4717 if (is_logical_not
== false)
4718 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
4719 /* Only for one-bit precision typed X the transformation
4720 !X -> ~X is valied. */
4721 else if (TYPE_PRECISION (type
) == 1)
4722 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
4723 /* Otherwise we use !X -> X ^ 1. */
4725 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
4726 build_int_cst (type
, 1));
4732 unsigned old_num_ops
= gimple_num_ops (stmt
);
4733 tree lhs
= gimple_assign_lhs (stmt
);
4734 tree new_rhs
= fold_gimple_assign (gsi
);
4736 && !useless_type_conversion_p (TREE_TYPE (lhs
),
4737 TREE_TYPE (new_rhs
)))
4738 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
4741 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
4743 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
4750 changed
|= gimple_fold_call (gsi
, inplace
);
4754 /* Fold *& in asm operands. */
4756 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4758 const char **oconstraints
;
4759 const char *constraint
;
4760 bool allows_mem
, allows_reg
;
4762 noutputs
= gimple_asm_noutputs (asm_stmt
);
4763 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4765 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4767 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4768 tree op
= TREE_VALUE (link
);
4770 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4771 if (REFERENCE_CLASS_P (op
)
4772 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
4774 TREE_VALUE (link
) = op
;
4778 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4780 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4781 tree op
= TREE_VALUE (link
);
4783 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4784 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
4785 oconstraints
, &allows_mem
, &allows_reg
);
4786 if (REFERENCE_CLASS_P (op
)
4787 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
4790 TREE_VALUE (link
) = op
;
4798 if (gimple_debug_bind_p (stmt
))
4800 tree val
= gimple_debug_bind_get_value (stmt
);
4802 && REFERENCE_CLASS_P (val
))
4804 tree tem
= maybe_fold_reference (val
, false);
4807 gimple_debug_bind_set_value (stmt
, tem
);
4812 && TREE_CODE (val
) == ADDR_EXPR
)
4814 tree ref
= TREE_OPERAND (val
, 0);
4815 tree tem
= maybe_fold_reference (ref
, false);
4818 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
4819 gimple_debug_bind_set_value (stmt
, tem
);
4828 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
4829 tree ret
= gimple_return_retval(ret_stmt
);
4831 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
4833 tree val
= valueize (ret
);
4834 if (val
&& val
!= ret
4835 && may_propagate_copy (ret
, val
))
4837 gimple_return_set_retval (ret_stmt
, val
);
4847 stmt
= gsi_stmt (*gsi
);
4849 /* Fold *& on the lhs. */
4850 if (gimple_has_lhs (stmt
))
4852 tree lhs
= gimple_get_lhs (stmt
);
4853 if (lhs
&& REFERENCE_CLASS_P (lhs
))
4855 tree new_lhs
= maybe_fold_reference (lhs
, true);
4858 gimple_set_lhs (stmt
, new_lhs
);
4864 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
4868 /* Valueziation callback that ends up not following SSA edges. */
4871 no_follow_ssa_edges (tree
)
4876 /* Valueization callback that ends up following single-use SSA edges only. */
4879 follow_single_use_edges (tree val
)
4881 if (TREE_CODE (val
) == SSA_NAME
4882 && !has_single_use (val
))
4887 /* Fold the statement pointed to by GSI. In some cases, this function may
4888 replace the whole statement with a new one. Returns true iff folding
4890 The statement pointed to by GSI should be in valid gimple form but may
4891 be in unfolded state as resulting from for example constant propagation
4892 which can produce *&x = 0. */
4895 fold_stmt (gimple_stmt_iterator
*gsi
)
4897 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
4901 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
4903 return fold_stmt_1 (gsi
, false, valueize
);
4906 /* Perform the minimal folding on statement *GSI. Only operations like
4907 *&x created by constant propagation are handled. The statement cannot
4908 be replaced with a new one. Return true if the statement was
4909 changed, false otherwise.
4910 The statement *GSI should be in valid gimple form but may
4911 be in unfolded state as resulting from for example constant propagation
4912 which can produce *&x = 0. */
4915 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
4917 gimple
*stmt
= gsi_stmt (*gsi
);
4918 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
4919 gcc_assert (gsi_stmt (*gsi
) == stmt
);
4923 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4924 if EXPR is null or we don't know how.
4925 If non-null, the result always has boolean type. */
4928 canonicalize_bool (tree expr
, bool invert
)
4934 if (integer_nonzerop (expr
))
4935 return boolean_false_node
;
4936 else if (integer_zerop (expr
))
4937 return boolean_true_node
;
4938 else if (TREE_CODE (expr
) == SSA_NAME
)
4939 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
4940 build_int_cst (TREE_TYPE (expr
), 0));
4941 else if (COMPARISON_CLASS_P (expr
))
4942 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
4944 TREE_OPERAND (expr
, 0),
4945 TREE_OPERAND (expr
, 1));
4951 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4953 if (integer_nonzerop (expr
))
4954 return boolean_true_node
;
4955 else if (integer_zerop (expr
))
4956 return boolean_false_node
;
4957 else if (TREE_CODE (expr
) == SSA_NAME
)
4958 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
4959 build_int_cst (TREE_TYPE (expr
), 0));
4960 else if (COMPARISON_CLASS_P (expr
))
4961 return fold_build2 (TREE_CODE (expr
),
4963 TREE_OPERAND (expr
, 0),
4964 TREE_OPERAND (expr
, 1));
4970 /* Check to see if a boolean expression EXPR is logically equivalent to the
4971 comparison (OP1 CODE OP2). Check for various identities involving
4975 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
4976 const_tree op1
, const_tree op2
)
4980 /* The obvious case. */
4981 if (TREE_CODE (expr
) == code
4982 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
4983 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
4986 /* Check for comparing (name, name != 0) and the case where expr
4987 is an SSA_NAME with a definition matching the comparison. */
4988 if (TREE_CODE (expr
) == SSA_NAME
4989 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4991 if (operand_equal_p (expr
, op1
, 0))
4992 return ((code
== NE_EXPR
&& integer_zerop (op2
))
4993 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
4994 s
= SSA_NAME_DEF_STMT (expr
);
4995 if (is_gimple_assign (s
)
4996 && gimple_assign_rhs_code (s
) == code
4997 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
4998 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
5002 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5003 of name is a comparison, recurse. */
5004 if (TREE_CODE (op1
) == SSA_NAME
5005 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
5007 s
= SSA_NAME_DEF_STMT (op1
);
5008 if (is_gimple_assign (s
)
5009 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
5011 enum tree_code c
= gimple_assign_rhs_code (s
);
5012 if ((c
== NE_EXPR
&& integer_zerop (op2
))
5013 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
5014 return same_bool_comparison_p (expr
, c
,
5015 gimple_assign_rhs1 (s
),
5016 gimple_assign_rhs2 (s
));
5017 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
5018 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
5019 return same_bool_comparison_p (expr
,
5020 invert_tree_comparison (c
, false),
5021 gimple_assign_rhs1 (s
),
5022 gimple_assign_rhs2 (s
));
5028 /* Check to see if two boolean expressions OP1 and OP2 are logically
5032 same_bool_result_p (const_tree op1
, const_tree op2
)
5034 /* Simple cases first. */
5035 if (operand_equal_p (op1
, op2
, 0))
5038 /* Check the cases where at least one of the operands is a comparison.
5039 These are a bit smarter than operand_equal_p in that they apply some
5040 identifies on SSA_NAMEs. */
5041 if (COMPARISON_CLASS_P (op2
)
5042 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
5043 TREE_OPERAND (op2
, 0),
5044 TREE_OPERAND (op2
, 1)))
5046 if (COMPARISON_CLASS_P (op1
)
5047 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
5048 TREE_OPERAND (op1
, 0),
5049 TREE_OPERAND (op1
, 1)))
5056 /* Forward declarations for some mutually recursive functions. */
5059 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5060 enum tree_code code2
, tree op2a
, tree op2b
);
5062 and_var_with_comparison (tree var
, bool invert
,
5063 enum tree_code code2
, tree op2a
, tree op2b
);
5065 and_var_with_comparison_1 (gimple
*stmt
,
5066 enum tree_code code2
, tree op2a
, tree op2b
);
5068 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5069 enum tree_code code2
, tree op2a
, tree op2b
);
5071 or_var_with_comparison (tree var
, bool invert
,
5072 enum tree_code code2
, tree op2a
, tree op2b
);
5074 or_var_with_comparison_1 (gimple
*stmt
,
5075 enum tree_code code2
, tree op2a
, tree op2b
);
5077 /* Helper function for and_comparisons_1: try to simplify the AND of the
5078 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5079 If INVERT is true, invert the value of the VAR before doing the AND.
5080 Return NULL_EXPR if we can't simplify this to a single expression. */
5083 and_var_with_comparison (tree var
, bool invert
,
5084 enum tree_code code2
, tree op2a
, tree op2b
)
5087 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5089 /* We can only deal with variables whose definitions are assignments. */
5090 if (!is_gimple_assign (stmt
))
5093 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5094 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5095 Then we only have to consider the simpler non-inverted cases. */
5097 t
= or_var_with_comparison_1 (stmt
,
5098 invert_tree_comparison (code2
, false),
5101 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5102 return canonicalize_bool (t
, invert
);
5105 /* Try to simplify the AND of the ssa variable defined by the assignment
5106 STMT with the comparison specified by (OP2A CODE2 OP2B).
5107 Return NULL_EXPR if we can't simplify this to a single expression. */
5110 and_var_with_comparison_1 (gimple
*stmt
,
5111 enum tree_code code2
, tree op2a
, tree op2b
)
5113 tree var
= gimple_assign_lhs (stmt
);
5114 tree true_test_var
= NULL_TREE
;
5115 tree false_test_var
= NULL_TREE
;
5116 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5118 /* Check for identities like (var AND (var == 0)) => false. */
5119 if (TREE_CODE (op2a
) == SSA_NAME
5120 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5122 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5123 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5125 true_test_var
= op2a
;
5126 if (var
== true_test_var
)
5129 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5130 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5132 false_test_var
= op2a
;
5133 if (var
== false_test_var
)
5134 return boolean_false_node
;
5138 /* If the definition is a comparison, recurse on it. */
5139 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5141 tree t
= and_comparisons_1 (innercode
,
5142 gimple_assign_rhs1 (stmt
),
5143 gimple_assign_rhs2 (stmt
),
5151 /* If the definition is an AND or OR expression, we may be able to
5152 simplify by reassociating. */
5153 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5154 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5156 tree inner1
= gimple_assign_rhs1 (stmt
);
5157 tree inner2
= gimple_assign_rhs2 (stmt
);
5160 tree partial
= NULL_TREE
;
5161 bool is_and
= (innercode
== BIT_AND_EXPR
);
5163 /* Check for boolean identities that don't require recursive examination
5165 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5166 inner1 AND (inner1 OR inner2) => inner1
5167 !inner1 AND (inner1 AND inner2) => false
5168 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5169 Likewise for similar cases involving inner2. */
5170 if (inner1
== true_test_var
)
5171 return (is_and
? var
: inner1
);
5172 else if (inner2
== true_test_var
)
5173 return (is_and
? var
: inner2
);
5174 else if (inner1
== false_test_var
)
5176 ? boolean_false_node
5177 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5178 else if (inner2
== false_test_var
)
5180 ? boolean_false_node
5181 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5183 /* Next, redistribute/reassociate the AND across the inner tests.
5184 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5185 if (TREE_CODE (inner1
) == SSA_NAME
5186 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5187 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5188 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5189 gimple_assign_rhs1 (s
),
5190 gimple_assign_rhs2 (s
),
5191 code2
, op2a
, op2b
)))
5193 /* Handle the AND case, where we are reassociating:
5194 (inner1 AND inner2) AND (op2a code2 op2b)
5196 If the partial result t is a constant, we win. Otherwise
5197 continue on to try reassociating with the other inner test. */
5200 if (integer_onep (t
))
5202 else if (integer_zerop (t
))
5203 return boolean_false_node
;
5206 /* Handle the OR case, where we are redistributing:
5207 (inner1 OR inner2) AND (op2a code2 op2b)
5208 => (t OR (inner2 AND (op2a code2 op2b))) */
5209 else if (integer_onep (t
))
5210 return boolean_true_node
;
5212 /* Save partial result for later. */
5216 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5217 if (TREE_CODE (inner2
) == SSA_NAME
5218 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5219 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5220 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5221 gimple_assign_rhs1 (s
),
5222 gimple_assign_rhs2 (s
),
5223 code2
, op2a
, op2b
)))
5225 /* Handle the AND case, where we are reassociating:
5226 (inner1 AND inner2) AND (op2a code2 op2b)
5227 => (inner1 AND t) */
5230 if (integer_onep (t
))
5232 else if (integer_zerop (t
))
5233 return boolean_false_node
;
5234 /* If both are the same, we can apply the identity
5236 else if (partial
&& same_bool_result_p (t
, partial
))
5240 /* Handle the OR case. where we are redistributing:
5241 (inner1 OR inner2) AND (op2a code2 op2b)
5242 => (t OR (inner1 AND (op2a code2 op2b)))
5243 => (t OR partial) */
5246 if (integer_onep (t
))
5247 return boolean_true_node
;
5250 /* We already got a simplification for the other
5251 operand to the redistributed OR expression. The
5252 interesting case is when at least one is false.
5253 Or, if both are the same, we can apply the identity
5255 if (integer_zerop (partial
))
5257 else if (integer_zerop (t
))
5259 else if (same_bool_result_p (t
, partial
))
5268 /* Try to simplify the AND of two comparisons defined by
5269 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5270 If this can be done without constructing an intermediate value,
5271 return the resulting tree; otherwise NULL_TREE is returned.
5272 This function is deliberately asymmetric as it recurses on SSA_DEFs
5273 in the first comparison but not the second. */
5276 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5277 enum tree_code code2
, tree op2a
, tree op2b
)
5279 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5281 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5282 if (operand_equal_p (op1a
, op2a
, 0)
5283 && operand_equal_p (op1b
, op2b
, 0))
5285 /* Result will be either NULL_TREE, or a combined comparison. */
5286 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5287 TRUTH_ANDIF_EXPR
, code1
, code2
,
5288 truth_type
, op1a
, op1b
);
5293 /* Likewise the swapped case of the above. */
5294 if (operand_equal_p (op1a
, op2b
, 0)
5295 && operand_equal_p (op1b
, op2a
, 0))
5297 /* Result will be either NULL_TREE, or a combined comparison. */
5298 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5299 TRUTH_ANDIF_EXPR
, code1
,
5300 swap_tree_comparison (code2
),
5301 truth_type
, op1a
, op1b
);
5306 /* If both comparisons are of the same value against constants, we might
5307 be able to merge them. */
5308 if (operand_equal_p (op1a
, op2a
, 0)
5309 && TREE_CODE (op1b
) == INTEGER_CST
5310 && TREE_CODE (op2b
) == INTEGER_CST
)
5312 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5314 /* If we have (op1a == op1b), we should either be able to
5315 return that or FALSE, depending on whether the constant op1b
5316 also satisfies the other comparison against op2b. */
5317 if (code1
== EQ_EXPR
)
5323 case EQ_EXPR
: val
= (cmp
== 0); break;
5324 case NE_EXPR
: val
= (cmp
!= 0); break;
5325 case LT_EXPR
: val
= (cmp
< 0); break;
5326 case GT_EXPR
: val
= (cmp
> 0); break;
5327 case LE_EXPR
: val
= (cmp
<= 0); break;
5328 case GE_EXPR
: val
= (cmp
>= 0); break;
5329 default: done
= false;
5334 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5336 return boolean_false_node
;
5339 /* Likewise if the second comparison is an == comparison. */
5340 else if (code2
== EQ_EXPR
)
5346 case EQ_EXPR
: val
= (cmp
== 0); break;
5347 case NE_EXPR
: val
= (cmp
!= 0); break;
5348 case LT_EXPR
: val
= (cmp
> 0); break;
5349 case GT_EXPR
: val
= (cmp
< 0); break;
5350 case LE_EXPR
: val
= (cmp
>= 0); break;
5351 case GE_EXPR
: val
= (cmp
<= 0); break;
5352 default: done
= false;
5357 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5359 return boolean_false_node
;
5363 /* Same business with inequality tests. */
5364 else if (code1
== NE_EXPR
)
5369 case EQ_EXPR
: val
= (cmp
!= 0); break;
5370 case NE_EXPR
: val
= (cmp
== 0); break;
5371 case LT_EXPR
: val
= (cmp
>= 0); break;
5372 case GT_EXPR
: val
= (cmp
<= 0); break;
5373 case LE_EXPR
: val
= (cmp
> 0); break;
5374 case GE_EXPR
: val
= (cmp
< 0); break;
5379 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5381 else if (code2
== NE_EXPR
)
5386 case EQ_EXPR
: val
= (cmp
== 0); break;
5387 case NE_EXPR
: val
= (cmp
!= 0); break;
5388 case LT_EXPR
: val
= (cmp
<= 0); break;
5389 case GT_EXPR
: val
= (cmp
>= 0); break;
5390 case LE_EXPR
: val
= (cmp
< 0); break;
5391 case GE_EXPR
: val
= (cmp
> 0); break;
5396 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5399 /* Chose the more restrictive of two < or <= comparisons. */
5400 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5401 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5403 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5404 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5406 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5409 /* Likewise chose the more restrictive of two > or >= comparisons. */
5410 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5411 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5413 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5414 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5416 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5419 /* Check for singleton ranges. */
5421 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
5422 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
5423 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
5425 /* Check for disjoint ranges. */
5427 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5428 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5429 return boolean_false_node
;
5431 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5432 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5433 return boolean_false_node
;
5436 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5437 NAME's definition is a truth value. See if there are any simplifications
5438 that can be done against the NAME's definition. */
5439 if (TREE_CODE (op1a
) == SSA_NAME
5440 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5441 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5443 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5444 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5445 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5446 switch (gimple_code (stmt
))
5449 /* Try to simplify by copy-propagating the definition. */
5450 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5453 /* If every argument to the PHI produces the same result when
5454 ANDed with the second comparison, we win.
5455 Do not do this unless the type is bool since we need a bool
5456 result here anyway. */
5457 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5459 tree result
= NULL_TREE
;
5461 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5463 tree arg
= gimple_phi_arg_def (stmt
, i
);
5465 /* If this PHI has itself as an argument, ignore it.
5466 If all the other args produce the same result,
5468 if (arg
== gimple_phi_result (stmt
))
5470 else if (TREE_CODE (arg
) == INTEGER_CST
)
5472 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
5475 result
= boolean_false_node
;
5476 else if (!integer_zerop (result
))
5480 result
= fold_build2 (code2
, boolean_type_node
,
5482 else if (!same_bool_comparison_p (result
,
5486 else if (TREE_CODE (arg
) == SSA_NAME
5487 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5490 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5491 /* In simple cases we can look through PHI nodes,
5492 but we have to be careful with loops.
5494 if (! dom_info_available_p (CDI_DOMINATORS
)
5495 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5496 || dominated_by_p (CDI_DOMINATORS
,
5497 gimple_bb (def_stmt
),
5500 temp
= and_var_with_comparison (arg
, invert
, code2
,
5506 else if (!same_bool_result_p (result
, temp
))
5522 /* Try to simplify the AND of two comparisons, specified by
5523 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5524 If this can be simplified to a single expression (without requiring
5525 introducing more SSA variables to hold intermediate values),
5526 return the resulting tree. Otherwise return NULL_TREE.
5527 If the result expression is non-null, it has boolean type. */
5530 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5531 enum tree_code code2
, tree op2a
, tree op2b
)
5533 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5537 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5540 /* Helper function for or_comparisons_1: try to simplify the OR of the
5541 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5542 If INVERT is true, invert the value of VAR before doing the OR.
5543 Return NULL_EXPR if we can't simplify this to a single expression. */
5546 or_var_with_comparison (tree var
, bool invert
,
5547 enum tree_code code2
, tree op2a
, tree op2b
)
5550 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5552 /* We can only deal with variables whose definitions are assignments. */
5553 if (!is_gimple_assign (stmt
))
5556 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5557 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5558 Then we only have to consider the simpler non-inverted cases. */
5560 t
= and_var_with_comparison_1 (stmt
,
5561 invert_tree_comparison (code2
, false),
5564 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5565 return canonicalize_bool (t
, invert
);
5568 /* Try to simplify the OR of the ssa variable defined by the assignment
5569 STMT with the comparison specified by (OP2A CODE2 OP2B).
5570 Return NULL_EXPR if we can't simplify this to a single expression. */
5573 or_var_with_comparison_1 (gimple
*stmt
,
5574 enum tree_code code2
, tree op2a
, tree op2b
)
5576 tree var
= gimple_assign_lhs (stmt
);
5577 tree true_test_var
= NULL_TREE
;
5578 tree false_test_var
= NULL_TREE
;
5579 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5581 /* Check for identities like (var OR (var != 0)) => true . */
5582 if (TREE_CODE (op2a
) == SSA_NAME
5583 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5585 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5586 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5588 true_test_var
= op2a
;
5589 if (var
== true_test_var
)
5592 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5593 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5595 false_test_var
= op2a
;
5596 if (var
== false_test_var
)
5597 return boolean_true_node
;
5601 /* If the definition is a comparison, recurse on it. */
5602 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5604 tree t
= or_comparisons_1 (innercode
,
5605 gimple_assign_rhs1 (stmt
),
5606 gimple_assign_rhs2 (stmt
),
5614 /* If the definition is an AND or OR expression, we may be able to
5615 simplify by reassociating. */
5616 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5617 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5619 tree inner1
= gimple_assign_rhs1 (stmt
);
5620 tree inner2
= gimple_assign_rhs2 (stmt
);
5623 tree partial
= NULL_TREE
;
5624 bool is_or
= (innercode
== BIT_IOR_EXPR
);
5626 /* Check for boolean identities that don't require recursive examination
5628 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5629 inner1 OR (inner1 AND inner2) => inner1
5630 !inner1 OR (inner1 OR inner2) => true
5631 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5633 if (inner1
== true_test_var
)
5634 return (is_or
? var
: inner1
);
5635 else if (inner2
== true_test_var
)
5636 return (is_or
? var
: inner2
);
5637 else if (inner1
== false_test_var
)
5640 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5641 else if (inner2
== false_test_var
)
5644 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5646 /* Next, redistribute/reassociate the OR across the inner tests.
5647 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5648 if (TREE_CODE (inner1
) == SSA_NAME
5649 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5650 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5651 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5652 gimple_assign_rhs1 (s
),
5653 gimple_assign_rhs2 (s
),
5654 code2
, op2a
, op2b
)))
5656 /* Handle the OR case, where we are reassociating:
5657 (inner1 OR inner2) OR (op2a code2 op2b)
5659 If the partial result t is a constant, we win. Otherwise
5660 continue on to try reassociating with the other inner test. */
5663 if (integer_onep (t
))
5664 return boolean_true_node
;
5665 else if (integer_zerop (t
))
5669 /* Handle the AND case, where we are redistributing:
5670 (inner1 AND inner2) OR (op2a code2 op2b)
5671 => (t AND (inner2 OR (op2a code op2b))) */
5672 else if (integer_zerop (t
))
5673 return boolean_false_node
;
5675 /* Save partial result for later. */
5679 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5680 if (TREE_CODE (inner2
) == SSA_NAME
5681 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5682 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5683 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5684 gimple_assign_rhs1 (s
),
5685 gimple_assign_rhs2 (s
),
5686 code2
, op2a
, op2b
)))
5688 /* Handle the OR case, where we are reassociating:
5689 (inner1 OR inner2) OR (op2a code2 op2b)
5691 => (t OR partial) */
5694 if (integer_zerop (t
))
5696 else if (integer_onep (t
))
5697 return boolean_true_node
;
5698 /* If both are the same, we can apply the identity
5700 else if (partial
&& same_bool_result_p (t
, partial
))
5704 /* Handle the AND case, where we are redistributing:
5705 (inner1 AND inner2) OR (op2a code2 op2b)
5706 => (t AND (inner1 OR (op2a code2 op2b)))
5707 => (t AND partial) */
5710 if (integer_zerop (t
))
5711 return boolean_false_node
;
5714 /* We already got a simplification for the other
5715 operand to the redistributed AND expression. The
5716 interesting case is when at least one is true.
5717 Or, if both are the same, we can apply the identity
5719 if (integer_onep (partial
))
5721 else if (integer_onep (t
))
5723 else if (same_bool_result_p (t
, partial
))
5732 /* Try to simplify the OR of two comparisons defined by
5733 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5734 If this can be done without constructing an intermediate value,
5735 return the resulting tree; otherwise NULL_TREE is returned.
5736 This function is deliberately asymmetric as it recurses on SSA_DEFs
5737 in the first comparison but not the second. */
5740 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5741 enum tree_code code2
, tree op2a
, tree op2b
)
5743 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5745 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5746 if (operand_equal_p (op1a
, op2a
, 0)
5747 && operand_equal_p (op1b
, op2b
, 0))
5749 /* Result will be either NULL_TREE, or a combined comparison. */
5750 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5751 TRUTH_ORIF_EXPR
, code1
, code2
,
5752 truth_type
, op1a
, op1b
);
5757 /* Likewise the swapped case of the above. */
5758 if (operand_equal_p (op1a
, op2b
, 0)
5759 && operand_equal_p (op1b
, op2a
, 0))
5761 /* Result will be either NULL_TREE, or a combined comparison. */
5762 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5763 TRUTH_ORIF_EXPR
, code1
,
5764 swap_tree_comparison (code2
),
5765 truth_type
, op1a
, op1b
);
5770 /* If both comparisons are of the same value against constants, we might
5771 be able to merge them. */
5772 if (operand_equal_p (op1a
, op2a
, 0)
5773 && TREE_CODE (op1b
) == INTEGER_CST
5774 && TREE_CODE (op2b
) == INTEGER_CST
)
5776 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5778 /* If we have (op1a != op1b), we should either be able to
5779 return that or TRUE, depending on whether the constant op1b
5780 also satisfies the other comparison against op2b. */
5781 if (code1
== NE_EXPR
)
5787 case EQ_EXPR
: val
= (cmp
== 0); break;
5788 case NE_EXPR
: val
= (cmp
!= 0); break;
5789 case LT_EXPR
: val
= (cmp
< 0); break;
5790 case GT_EXPR
: val
= (cmp
> 0); break;
5791 case LE_EXPR
: val
= (cmp
<= 0); break;
5792 case GE_EXPR
: val
= (cmp
>= 0); break;
5793 default: done
= false;
5798 return boolean_true_node
;
5800 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5803 /* Likewise if the second comparison is a != comparison. */
5804 else if (code2
== NE_EXPR
)
5810 case EQ_EXPR
: val
= (cmp
== 0); break;
5811 case NE_EXPR
: val
= (cmp
!= 0); break;
5812 case LT_EXPR
: val
= (cmp
> 0); break;
5813 case GT_EXPR
: val
= (cmp
< 0); break;
5814 case LE_EXPR
: val
= (cmp
>= 0); break;
5815 case GE_EXPR
: val
= (cmp
<= 0); break;
5816 default: done
= false;
5821 return boolean_true_node
;
5823 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5827 /* See if an equality test is redundant with the other comparison. */
5828 else if (code1
== EQ_EXPR
)
5833 case EQ_EXPR
: val
= (cmp
== 0); break;
5834 case NE_EXPR
: val
= (cmp
!= 0); break;
5835 case LT_EXPR
: val
= (cmp
< 0); break;
5836 case GT_EXPR
: val
= (cmp
> 0); break;
5837 case LE_EXPR
: val
= (cmp
<= 0); break;
5838 case GE_EXPR
: val
= (cmp
>= 0); break;
5843 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5845 else if (code2
== EQ_EXPR
)
5850 case EQ_EXPR
: val
= (cmp
== 0); break;
5851 case NE_EXPR
: val
= (cmp
!= 0); break;
5852 case LT_EXPR
: val
= (cmp
> 0); break;
5853 case GT_EXPR
: val
= (cmp
< 0); break;
5854 case LE_EXPR
: val
= (cmp
>= 0); break;
5855 case GE_EXPR
: val
= (cmp
<= 0); break;
5860 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5863 /* Chose the less restrictive of two < or <= comparisons. */
5864 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5865 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5867 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5868 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5870 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5873 /* Likewise chose the less restrictive of two > or >= comparisons. */
5874 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5875 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5877 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5878 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5880 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5883 /* Check for singleton ranges. */
5885 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
5886 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
5887 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
5889 /* Check for less/greater pairs that don't restrict the range at all. */
5891 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5892 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5893 return boolean_true_node
;
5895 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5896 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5897 return boolean_true_node
;
5900 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5901 NAME's definition is a truth value. See if there are any simplifications
5902 that can be done against the NAME's definition. */
5903 if (TREE_CODE (op1a
) == SSA_NAME
5904 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5905 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5907 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5908 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5909 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5910 switch (gimple_code (stmt
))
5913 /* Try to simplify by copy-propagating the definition. */
5914 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5917 /* If every argument to the PHI produces the same result when
5918 ORed with the second comparison, we win.
5919 Do not do this unless the type is bool since we need a bool
5920 result here anyway. */
5921 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5923 tree result
= NULL_TREE
;
5925 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5927 tree arg
= gimple_phi_arg_def (stmt
, i
);
5929 /* If this PHI has itself as an argument, ignore it.
5930 If all the other args produce the same result,
5932 if (arg
== gimple_phi_result (stmt
))
5934 else if (TREE_CODE (arg
) == INTEGER_CST
)
5936 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
5939 result
= boolean_true_node
;
5940 else if (!integer_onep (result
))
5944 result
= fold_build2 (code2
, boolean_type_node
,
5946 else if (!same_bool_comparison_p (result
,
5950 else if (TREE_CODE (arg
) == SSA_NAME
5951 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5954 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5955 /* In simple cases we can look through PHI nodes,
5956 but we have to be careful with loops.
5958 if (! dom_info_available_p (CDI_DOMINATORS
)
5959 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5960 || dominated_by_p (CDI_DOMINATORS
,
5961 gimple_bb (def_stmt
),
5964 temp
= or_var_with_comparison (arg
, invert
, code2
,
5970 else if (!same_bool_result_p (result
, temp
))
5986 /* Try to simplify the OR of two comparisons, specified by
5987 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5988 If this can be simplified to a single expression (without requiring
5989 introducing more SSA variables to hold intermediate values),
5990 return the resulting tree. Otherwise return NULL_TREE.
5991 If the result expression is non-null, it has boolean type. */
5994 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5995 enum tree_code code2
, tree op2a
, tree op2b
)
5997 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
6001 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
6005 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6007 Either NULL_TREE, a simplified but non-constant or a constant
6010 ??? This should go into a gimple-fold-inline.h file to be eventually
6011 privatized with the single valueize function used in the various TUs
6012 to avoid the indirect function call overhead. */
6015 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
6016 tree (*gvalueize
) (tree
))
6020 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6021 edges if there are intermediate VARYING defs. For this reason
6022 do not follow SSA edges here even though SCCVN can technically
6023 just deal fine with that. */
6024 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
6026 tree res
= NULL_TREE
;
6027 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
6029 else if (mprts_hook
)
6030 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
6033 if (dump_file
&& dump_flags
& TDF_DETAILS
)
6035 fprintf (dump_file
, "Match-and-simplified ");
6036 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
6037 fprintf (dump_file
, " to ");
6038 print_generic_expr (dump_file
, res
);
6039 fprintf (dump_file
, "\n");
6045 location_t loc
= gimple_location (stmt
);
6046 switch (gimple_code (stmt
))
6050 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
6052 switch (get_gimple_rhs_class (subcode
))
6054 case GIMPLE_SINGLE_RHS
:
6056 tree rhs
= gimple_assign_rhs1 (stmt
);
6057 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
6059 if (TREE_CODE (rhs
) == SSA_NAME
)
6061 /* If the RHS is an SSA_NAME, return its known constant value,
6063 return (*valueize
) (rhs
);
6065 /* Handle propagating invariant addresses into address
6067 else if (TREE_CODE (rhs
) == ADDR_EXPR
6068 && !is_gimple_min_invariant (rhs
))
6070 poly_int64 offset
= 0;
6072 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
6076 && (CONSTANT_CLASS_P (base
)
6077 || decl_address_invariant_p (base
)))
6078 return build_invariant_address (TREE_TYPE (rhs
),
6081 else if (TREE_CODE (rhs
) == CONSTRUCTOR
6082 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
6083 && known_eq (CONSTRUCTOR_NELTS (rhs
),
6084 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
6089 nelts
= CONSTRUCTOR_NELTS (rhs
);
6090 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
6091 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
6093 val
= (*valueize
) (val
);
6094 if (TREE_CODE (val
) == INTEGER_CST
6095 || TREE_CODE (val
) == REAL_CST
6096 || TREE_CODE (val
) == FIXED_CST
)
6097 vec
.quick_push (val
);
6102 return vec
.build ();
6104 if (subcode
== OBJ_TYPE_REF
)
6106 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
6107 /* If callee is constant, we can fold away the wrapper. */
6108 if (is_gimple_min_invariant (val
))
6112 if (kind
== tcc_reference
)
6114 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
6115 || TREE_CODE (rhs
) == REALPART_EXPR
6116 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
6117 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6119 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6120 return fold_unary_loc (EXPR_LOCATION (rhs
),
6122 TREE_TYPE (rhs
), val
);
6124 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
6125 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6127 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6128 return fold_ternary_loc (EXPR_LOCATION (rhs
),
6130 TREE_TYPE (rhs
), val
,
6131 TREE_OPERAND (rhs
, 1),
6132 TREE_OPERAND (rhs
, 2));
6134 else if (TREE_CODE (rhs
) == MEM_REF
6135 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6137 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6138 if (TREE_CODE (val
) == ADDR_EXPR
6139 && is_gimple_min_invariant (val
))
6141 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
6143 TREE_OPERAND (rhs
, 1));
6148 return fold_const_aggregate_ref_1 (rhs
, valueize
);
6150 else if (kind
== tcc_declaration
)
6151 return get_symbol_constant_value (rhs
);
6155 case GIMPLE_UNARY_RHS
:
6158 case GIMPLE_BINARY_RHS
:
6159 /* Translate &x + CST into an invariant form suitable for
6160 further propagation. */
6161 if (subcode
== POINTER_PLUS_EXPR
)
6163 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6164 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6165 if (TREE_CODE (op0
) == ADDR_EXPR
6166 && TREE_CODE (op1
) == INTEGER_CST
)
6168 tree off
= fold_convert (ptr_type_node
, op1
);
6169 return build_fold_addr_expr_loc
6171 fold_build2 (MEM_REF
,
6172 TREE_TYPE (TREE_TYPE (op0
)),
6173 unshare_expr (op0
), off
));
6176 /* Canonicalize bool != 0 and bool == 0 appearing after
6177 valueization. While gimple_simplify handles this
6178 it can get confused by the ~X == 1 -> X == 0 transform
6179 which we cant reduce to a SSA name or a constant
6180 (and we have no way to tell gimple_simplify to not
6181 consider those transforms in the first place). */
6182 else if (subcode
== EQ_EXPR
6183 || subcode
== NE_EXPR
)
6185 tree lhs
= gimple_assign_lhs (stmt
);
6186 tree op0
= gimple_assign_rhs1 (stmt
);
6187 if (useless_type_conversion_p (TREE_TYPE (lhs
),
6190 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6191 op0
= (*valueize
) (op0
);
6192 if (TREE_CODE (op0
) == INTEGER_CST
)
6193 std::swap (op0
, op1
);
6194 if (TREE_CODE (op1
) == INTEGER_CST
6195 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
6196 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
6202 case GIMPLE_TERNARY_RHS
:
6204 /* Handle ternary operators that can appear in GIMPLE form. */
6205 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6206 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6207 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
6208 return fold_ternary_loc (loc
, subcode
,
6209 gimple_expr_type (stmt
), op0
, op1
, op2
);
6220 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
6222 if (gimple_call_internal_p (stmt
))
6224 enum tree_code subcode
= ERROR_MARK
;
6225 switch (gimple_call_internal_fn (stmt
))
6227 case IFN_UBSAN_CHECK_ADD
:
6228 subcode
= PLUS_EXPR
;
6230 case IFN_UBSAN_CHECK_SUB
:
6231 subcode
= MINUS_EXPR
;
6233 case IFN_UBSAN_CHECK_MUL
:
6234 subcode
= MULT_EXPR
;
6236 case IFN_BUILTIN_EXPECT
:
6238 tree arg0
= gimple_call_arg (stmt
, 0);
6239 tree op0
= (*valueize
) (arg0
);
6240 if (TREE_CODE (op0
) == INTEGER_CST
)
6247 tree arg0
= gimple_call_arg (stmt
, 0);
6248 tree arg1
= gimple_call_arg (stmt
, 1);
6249 tree op0
= (*valueize
) (arg0
);
6250 tree op1
= (*valueize
) (arg1
);
6252 if (TREE_CODE (op0
) != INTEGER_CST
6253 || TREE_CODE (op1
) != INTEGER_CST
)
6258 /* x * 0 = 0 * x = 0 without overflow. */
6259 if (integer_zerop (op0
) || integer_zerop (op1
))
6260 return build_zero_cst (TREE_TYPE (arg0
));
6263 /* y - y = 0 without overflow. */
6264 if (operand_equal_p (op0
, op1
, 0))
6265 return build_zero_cst (TREE_TYPE (arg0
));
6272 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
6274 && TREE_CODE (res
) == INTEGER_CST
6275 && !TREE_OVERFLOW (res
))
6280 fn
= (*valueize
) (gimple_call_fn (stmt
));
6281 if (TREE_CODE (fn
) == ADDR_EXPR
6282 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
6283 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
6284 && gimple_builtin_call_types_compatible_p (stmt
,
6285 TREE_OPERAND (fn
, 0)))
6287 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
6290 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6291 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
6292 retval
= fold_builtin_call_array (loc
,
6293 gimple_call_return_type (call_stmt
),
6294 fn
, gimple_call_num_args (stmt
), args
);
6297 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6298 STRIP_NOPS (retval
);
6299 retval
= fold_convert (gimple_call_return_type (call_stmt
),
6312 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6313 Returns NULL_TREE if folding to a constant is not possible, otherwise
6314 returns a constant according to is_gimple_min_invariant. */
6317 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
6319 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
6320 if (res
&& is_gimple_min_invariant (res
))
6326 /* The following set of functions are supposed to fold references using
6327 their constant initializers. */
6329 /* See if we can find constructor defining value of BASE.
6330 When we know the consructor with constant offset (such as
6331 base is array[40] and we do know constructor of array), then
6332 BIT_OFFSET is adjusted accordingly.
6334 As a special case, return error_mark_node when constructor
6335 is not explicitly available, but it is known to be zero
6336 such as 'static const int a;'. */
6338 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
6339 tree (*valueize
)(tree
))
6341 poly_int64 bit_offset2
, size
, max_size
;
6344 if (TREE_CODE (base
) == MEM_REF
)
6346 if (!integer_zerop (TREE_OPERAND (base
, 1)))
6348 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
6350 *bit_offset
+= (mem_ref_offset (base
).force_shwi ()
6355 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
6356 base
= valueize (TREE_OPERAND (base
, 0));
6357 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
6359 base
= TREE_OPERAND (base
, 0);
6362 && TREE_CODE (base
) == SSA_NAME
)
6363 base
= valueize (base
);
6365 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6366 DECL_INITIAL. If BASE is a nested reference into another
6367 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6368 the inner reference. */
6369 switch (TREE_CODE (base
))
6374 tree init
= ctor_for_folding (base
);
6376 /* Our semantic is exact opposite of ctor_for_folding;
6377 NULL means unknown, while error_mark_node is 0. */
6378 if (init
== error_mark_node
)
6381 return error_mark_node
;
6385 case VIEW_CONVERT_EXPR
:
6386 return get_base_constructor (TREE_OPERAND (base
, 0),
6387 bit_offset
, valueize
);
6391 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
6393 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
6395 *bit_offset
+= bit_offset2
;
6396 return get_base_constructor (base
, bit_offset
, valueize
);
6402 if (CONSTANT_CLASS_P (base
))
6409 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6410 SIZE to the memory at bit OFFSET. */
6413 fold_array_ctor_reference (tree type
, tree ctor
,
6414 unsigned HOST_WIDE_INT offset
,
6415 unsigned HOST_WIDE_INT size
,
6418 offset_int low_bound
;
6419 offset_int elt_size
;
6420 offset_int access_index
;
6421 tree domain_type
= NULL_TREE
;
6422 HOST_WIDE_INT inner_offset
;
6424 /* Compute low bound and elt size. */
6425 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
6426 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
6427 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6429 /* Static constructors for variably sized objects makes no sense. */
6430 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
6432 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
6436 /* Static constructors for variably sized objects makes no sense. */
6437 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
6439 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
6441 /* We can handle only constantly sized accesses that are known to not
6442 be larger than size of array element. */
6443 if (!TYPE_SIZE_UNIT (type
)
6444 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
6445 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
6449 /* Compute the array index we look for. */
6450 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
6452 access_index
+= low_bound
;
6454 /* And offset within the access. */
6455 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
6457 /* See if the array field is large enough to span whole access. We do not
6458 care to fold accesses spanning multiple array indexes. */
6459 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
6461 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
6462 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
6464 /* When memory is not explicitely mentioned in constructor,
6465 it is 0 (or out of range). */
6466 return build_zero_cst (type
);
6469 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6470 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6473 fold_nonarray_ctor_reference (tree type
, tree ctor
,
6474 unsigned HOST_WIDE_INT offset
,
6475 unsigned HOST_WIDE_INT size
,
6478 unsigned HOST_WIDE_INT cnt
;
6481 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
6484 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
6485 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
6486 tree field_size
= DECL_SIZE (cfield
);
6487 offset_int bitoffset
;
6488 offset_int bitoffset_end
, access_end
;
6490 /* Variable sized objects in static constructors makes no sense,
6491 but field_size can be NULL for flexible array members. */
6492 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
6493 && TREE_CODE (byte_offset
) == INTEGER_CST
6494 && (field_size
!= NULL_TREE
6495 ? TREE_CODE (field_size
) == INTEGER_CST
6496 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
6498 /* Compute bit offset of the field. */
6499 bitoffset
= (wi::to_offset (field_offset
)
6500 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
6501 /* Compute bit offset where the field ends. */
6502 if (field_size
!= NULL_TREE
)
6503 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
6507 access_end
= offset_int (offset
) + size
;
6509 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6510 [BITOFFSET, BITOFFSET_END)? */
6511 if (wi::cmps (access_end
, bitoffset
) > 0
6512 && (field_size
== NULL_TREE
6513 || wi::lts_p (offset
, bitoffset_end
)))
6515 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
6516 /* We do have overlap. Now see if field is large enough to
6517 cover the access. Give up for accesses spanning multiple
6519 if (wi::cmps (access_end
, bitoffset_end
) > 0)
6521 if (offset
< bitoffset
)
6523 return fold_ctor_reference (type
, cval
,
6524 inner_offset
.to_uhwi (), size
,
6528 /* When memory is not explicitely mentioned in constructor, it is 0. */
6529 return build_zero_cst (type
);
6532 /* CTOR is value initializing memory, fold reference of type TYPE and
6533 size POLY_SIZE to the memory at bit POLY_OFFSET. */
6536 fold_ctor_reference (tree type
, tree ctor
, poly_uint64 poly_offset
,
6537 poly_uint64 poly_size
, tree from_decl
)
6541 /* We found the field with exact match. */
6542 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
6543 && known_eq (poly_offset
, 0U))
6544 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6546 /* The remaining optimizations need a constant size and offset. */
6547 unsigned HOST_WIDE_INT size
, offset
;
6548 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
6551 /* We are at the end of walk, see if we can view convert the
6553 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
6554 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6555 && !compare_tree_int (TYPE_SIZE (type
), size
)
6556 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
6558 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6561 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
6563 STRIP_USELESS_TYPE_CONVERSION (ret
);
6567 /* For constants and byte-aligned/sized reads try to go through
6568 native_encode/interpret. */
6569 if (CONSTANT_CLASS_P (ctor
)
6570 && BITS_PER_UNIT
== 8
6571 && offset
% BITS_PER_UNIT
== 0
6572 && size
% BITS_PER_UNIT
== 0
6573 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
6575 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6576 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
6577 offset
/ BITS_PER_UNIT
);
6579 return native_interpret_expr (type
, buf
, len
);
6581 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
6584 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
6585 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
6586 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
6589 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
6596 /* Return the tree representing the element referenced by T if T is an
6597 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6598 names using VALUEIZE. Return NULL_TREE otherwise. */
6601 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
6603 tree ctor
, idx
, base
;
6604 poly_int64 offset
, size
, max_size
;
6608 if (TREE_THIS_VOLATILE (t
))
6612 return get_symbol_constant_value (t
);
6614 tem
= fold_read_from_constant_string (t
);
6618 switch (TREE_CODE (t
))
6621 case ARRAY_RANGE_REF
:
6622 /* Constant indexes are handled well by get_base_constructor.
6623 Only special case variable offsets.
6624 FIXME: This code can't handle nested references with variable indexes
6625 (they will be handled only by iteration of ccp). Perhaps we can bring
6626 get_ref_base_and_extent here and make it use a valueize callback. */
6627 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
6629 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
6630 && poly_int_tree_p (idx
))
6632 tree low_bound
, unit_size
;
6634 /* If the resulting bit-offset is constant, track it. */
6635 if ((low_bound
= array_ref_low_bound (t
),
6636 poly_int_tree_p (low_bound
))
6637 && (unit_size
= array_ref_element_size (t
),
6638 tree_fits_uhwi_p (unit_size
)))
6640 poly_offset_int woffset
6641 = wi::sext (wi::to_poly_offset (idx
)
6642 - wi::to_poly_offset (low_bound
),
6643 TYPE_PRECISION (TREE_TYPE (idx
)));
6645 if (woffset
.to_shwi (&offset
))
6647 /* TODO: This code seems wrong, multiply then check
6648 to see if it fits. */
6649 offset
*= tree_to_uhwi (unit_size
);
6650 offset
*= BITS_PER_UNIT
;
6652 base
= TREE_OPERAND (t
, 0);
6653 ctor
= get_base_constructor (base
, &offset
, valueize
);
6654 /* Empty constructor. Always fold to 0. */
6655 if (ctor
== error_mark_node
)
6656 return build_zero_cst (TREE_TYPE (t
));
6657 /* Out of bound array access. Value is undefined,
6659 if (maybe_lt (offset
, 0))
6661 /* We can not determine ctor. */
6664 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
6665 tree_to_uhwi (unit_size
)
6675 case TARGET_MEM_REF
:
6677 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
6678 ctor
= get_base_constructor (base
, &offset
, valueize
);
6680 /* Empty constructor. Always fold to 0. */
6681 if (ctor
== error_mark_node
)
6682 return build_zero_cst (TREE_TYPE (t
));
6683 /* We do not know precise address. */
6684 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
6686 /* We can not determine ctor. */
6690 /* Out of bound array access. Value is undefined, but don't fold. */
6691 if (maybe_lt (offset
, 0))
6694 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
6700 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
6701 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
6702 return fold_build1_loc (EXPR_LOCATION (t
),
6703 TREE_CODE (t
), TREE_TYPE (t
), c
);
6715 fold_const_aggregate_ref (tree t
)
6717 return fold_const_aggregate_ref_1 (t
, NULL
);
6720 /* Lookup virtual method with index TOKEN in a virtual table V
6722 Set CAN_REFER if non-NULL to false if method
6723 is not referable or if the virtual table is ill-formed (such as rewriten
6724 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6727 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
6729 unsigned HOST_WIDE_INT offset
,
6732 tree vtable
= v
, init
, fn
;
6733 unsigned HOST_WIDE_INT size
;
6734 unsigned HOST_WIDE_INT elt_size
, access_index
;
6740 /* First of all double check we have virtual table. */
6741 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
6743 /* Pass down that we lost track of the target. */
6749 init
= ctor_for_folding (v
);
6751 /* The virtual tables should always be born with constructors
6752 and we always should assume that they are avaialble for
6753 folding. At the moment we do not stream them in all cases,
6754 but it should never happen that ctor seem unreachable. */
6756 if (init
== error_mark_node
)
6758 /* Pass down that we lost track of the target. */
6763 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
6764 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
6765 offset
*= BITS_PER_UNIT
;
6766 offset
+= token
* size
;
6768 /* Lookup the value in the constructor that is assumed to be array.
6769 This is equivalent to
6770 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6771 offset, size, NULL);
6772 but in a constant time. We expect that frontend produced a simple
6773 array without indexed initializers. */
6775 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
6776 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
6777 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
6778 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
6780 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
6781 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
6783 /* This code makes an assumption that there are no
6784 indexed fileds produced by C++ FE, so we can directly index the array. */
6785 if (access_index
< CONSTRUCTOR_NELTS (init
))
6787 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
6788 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
6794 /* For type inconsistent program we may end up looking up virtual method
6795 in virtual table that does not contain TOKEN entries. We may overrun
6796 the virtual table and pick up a constant or RTTI info pointer.
6797 In any case the call is undefined. */
6799 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
6800 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
6801 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6804 fn
= TREE_OPERAND (fn
, 0);
6806 /* When cgraph node is missing and function is not public, we cannot
6807 devirtualize. This can happen in WHOPR when the actual method
6808 ends up in other partition, because we found devirtualization
6809 possibility too late. */
6810 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
6821 /* Make sure we create a cgraph node for functions we'll reference.
6822 They can be non-existent if the reference comes from an entry
6823 of an external vtable for example. */
6824 cgraph_node::get_create (fn
);
6829 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6830 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6831 KNOWN_BINFO carries the binfo describing the true type of
6832 OBJ_TYPE_REF_OBJECT(REF).
6833 Set CAN_REFER if non-NULL to false if method
6834 is not referable or if the virtual table is ill-formed (such as rewriten
6835 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6838 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
6841 unsigned HOST_WIDE_INT offset
;
6844 v
= BINFO_VTABLE (known_binfo
);
6845 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6849 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
6855 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
6858 /* Given a pointer value T, return a simplified version of an
6859 indirection through T, or NULL_TREE if no simplification is
6860 possible. Note that the resulting type may be different from
6861 the type pointed to in the sense that it is still compatible
6862 from the langhooks point of view. */
6865 gimple_fold_indirect_ref (tree t
)
6867 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
6872 subtype
= TREE_TYPE (sub
);
6873 if (!POINTER_TYPE_P (subtype
)
6874 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
6877 if (TREE_CODE (sub
) == ADDR_EXPR
)
6879 tree op
= TREE_OPERAND (sub
, 0);
6880 tree optype
= TREE_TYPE (op
);
6882 if (useless_type_conversion_p (type
, optype
))
6885 /* *(foo *)&fooarray => fooarray[0] */
6886 if (TREE_CODE (optype
) == ARRAY_TYPE
6887 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
6888 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6890 tree type_domain
= TYPE_DOMAIN (optype
);
6891 tree min_val
= size_zero_node
;
6892 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6893 min_val
= TYPE_MIN_VALUE (type_domain
);
6894 if (TREE_CODE (min_val
) == INTEGER_CST
)
6895 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
6897 /* *(foo *)&complexfoo => __real__ complexfoo */
6898 else if (TREE_CODE (optype
) == COMPLEX_TYPE
6899 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6900 return fold_build1 (REALPART_EXPR
, type
, op
);
6901 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6902 else if (TREE_CODE (optype
) == VECTOR_TYPE
6903 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6905 tree part_width
= TYPE_SIZE (type
);
6906 tree index
= bitsize_int (0);
6907 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
6911 /* *(p + CST) -> ... */
6912 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
6913 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
6915 tree addr
= TREE_OPERAND (sub
, 0);
6916 tree off
= TREE_OPERAND (sub
, 1);
6920 addrtype
= TREE_TYPE (addr
);
6922 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6923 if (TREE_CODE (addr
) == ADDR_EXPR
6924 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
6925 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
6926 && tree_fits_uhwi_p (off
))
6928 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
6929 tree part_width
= TYPE_SIZE (type
);
6930 unsigned HOST_WIDE_INT part_widthi
6931 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
6932 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
6933 tree index
= bitsize_int (indexi
);
6934 if (known_lt (offset
/ part_widthi
,
6935 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
6936 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
6940 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6941 if (TREE_CODE (addr
) == ADDR_EXPR
6942 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
6943 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
6945 tree size
= TYPE_SIZE_UNIT (type
);
6946 if (tree_int_cst_equal (size
, off
))
6947 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
6950 /* *(p + CST) -> MEM_REF <p, CST>. */
6951 if (TREE_CODE (addr
) != ADDR_EXPR
6952 || DECL_P (TREE_OPERAND (addr
, 0)))
6953 return fold_build2 (MEM_REF
, type
,
6955 wide_int_to_tree (ptype
, wi::to_wide (off
)));
6958 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6959 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6960 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6961 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6964 tree min_val
= size_zero_node
;
6966 sub
= gimple_fold_indirect_ref (sub
);
6968 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6969 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6970 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6971 min_val
= TYPE_MIN_VALUE (type_domain
);
6972 if (TREE_CODE (min_val
) == INTEGER_CST
)
6973 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6979 /* Return true if CODE is an operation that when operating on signed
6980 integer types involves undefined behavior on overflow and the
6981 operation can be expressed with unsigned arithmetic. */
6984 arith_code_with_undefined_signed_overflow (tree_code code
)
6992 case POINTER_PLUS_EXPR
:
6999 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7000 operation that can be transformed to unsigned arithmetic by converting
7001 its operand, carrying out the operation in the corresponding unsigned
7002 type and converting the result back to the original type.
7004 Returns a sequence of statements that replace STMT and also contain
7005 a modified form of STMT itself. */
7008 rewrite_to_defined_overflow (gimple
*stmt
)
7010 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7012 fprintf (dump_file
, "rewriting stmt with undefined signed "
7014 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
7017 tree lhs
= gimple_assign_lhs (stmt
);
7018 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
7019 gimple_seq stmts
= NULL
;
7020 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
7022 tree op
= gimple_op (stmt
, i
);
7023 op
= gimple_convert (&stmts
, type
, op
);
7024 gimple_set_op (stmt
, i
, op
);
7026 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
7027 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
7028 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
7029 gimple_seq_add_stmt (&stmts
, stmt
);
7030 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
7031 gimple_seq_add_stmt (&stmts
, cvt
);
7037 /* The valueization hook we use for the gimple_build API simplification.
7038 This makes us match fold_buildN behavior by only combining with
7039 statements in the sequence(s) we are currently building. */
7042 gimple_build_valueize (tree op
)
7044 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
7049 /* Build the expression CODE OP0 of type TYPE with location LOC,
7050 simplifying it first if possible. Returns the built
7051 expression value and appends statements possibly defining it
7055 gimple_build (gimple_seq
*seq
, location_t loc
,
7056 enum tree_code code
, tree type
, tree op0
)
7058 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
7061 res
= create_tmp_reg_or_ssa_name (type
);
7063 if (code
== REALPART_EXPR
7064 || code
== IMAGPART_EXPR
7065 || code
== VIEW_CONVERT_EXPR
)
7066 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
7068 stmt
= gimple_build_assign (res
, code
, op0
);
7069 gimple_set_location (stmt
, loc
);
7070 gimple_seq_add_stmt_without_update (seq
, stmt
);
7075 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7076 simplifying it first if possible. Returns the built
7077 expression value and appends statements possibly defining it
7081 gimple_build (gimple_seq
*seq
, location_t loc
,
7082 enum tree_code code
, tree type
, tree op0
, tree op1
)
7084 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
7087 res
= create_tmp_reg_or_ssa_name (type
);
7088 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
7089 gimple_set_location (stmt
, loc
);
7090 gimple_seq_add_stmt_without_update (seq
, stmt
);
7095 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7096 simplifying it first if possible. Returns the built
7097 expression value and appends statements possibly defining it
7101 gimple_build (gimple_seq
*seq
, location_t loc
,
7102 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
7104 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
7105 seq
, gimple_build_valueize
);
7108 res
= create_tmp_reg_or_ssa_name (type
);
7110 if (code
== BIT_FIELD_REF
)
7111 stmt
= gimple_build_assign (res
, code
,
7112 build3 (code
, type
, op0
, op1
, op2
));
7114 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
7115 gimple_set_location (stmt
, loc
);
7116 gimple_seq_add_stmt_without_update (seq
, stmt
);
7121 /* Build the call FN (ARG0) with a result of type TYPE
7122 (or no result if TYPE is void) with location LOC,
7123 simplifying it first if possible. Returns the built
7124 expression value (or NULL_TREE if TYPE is void) and appends
7125 statements possibly defining it to SEQ. */
7128 gimple_build (gimple_seq
*seq
, location_t loc
,
7129 enum built_in_function fn
, tree type
, tree arg0
)
7131 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
7134 tree decl
= builtin_decl_implicit (fn
);
7135 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
7136 if (!VOID_TYPE_P (type
))
7138 res
= create_tmp_reg_or_ssa_name (type
);
7139 gimple_call_set_lhs (stmt
, res
);
7141 gimple_set_location (stmt
, loc
);
7142 gimple_seq_add_stmt_without_update (seq
, stmt
);
7147 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7148 (or no result if TYPE is void) with location LOC,
7149 simplifying it first if possible. Returns the built
7150 expression value (or NULL_TREE if TYPE is void) and appends
7151 statements possibly defining it to SEQ. */
7154 gimple_build (gimple_seq
*seq
, location_t loc
,
7155 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
7157 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
7160 tree decl
= builtin_decl_implicit (fn
);
7161 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
7162 if (!VOID_TYPE_P (type
))
7164 res
= create_tmp_reg_or_ssa_name (type
);
7165 gimple_call_set_lhs (stmt
, res
);
7167 gimple_set_location (stmt
, loc
);
7168 gimple_seq_add_stmt_without_update (seq
, stmt
);
7173 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7174 (or no result if TYPE is void) with location LOC,
7175 simplifying it first if possible. Returns the built
7176 expression value (or NULL_TREE if TYPE is void) and appends
7177 statements possibly defining it to SEQ. */
7180 gimple_build (gimple_seq
*seq
, location_t loc
,
7181 enum built_in_function fn
, tree type
,
7182 tree arg0
, tree arg1
, tree arg2
)
7184 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
7185 seq
, gimple_build_valueize
);
7188 tree decl
= builtin_decl_implicit (fn
);
7189 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
7190 if (!VOID_TYPE_P (type
))
7192 res
= create_tmp_reg_or_ssa_name (type
);
7193 gimple_call_set_lhs (stmt
, res
);
7195 gimple_set_location (stmt
, loc
);
7196 gimple_seq_add_stmt_without_update (seq
, stmt
);
7201 /* Build the conversion (TYPE) OP with a result of type TYPE
7202 with location LOC if such conversion is neccesary in GIMPLE,
7203 simplifying it first.
7204 Returns the built expression value and appends
7205 statements possibly defining it to SEQ. */
7208 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
7210 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
7212 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
7215 /* Build the conversion (ptrofftype) OP with a result of a type
7216 compatible with ptrofftype with location LOC if such conversion
7217 is neccesary in GIMPLE, simplifying it first.
7218 Returns the built expression value and appends
7219 statements possibly defining it to SEQ. */
7222 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
7224 if (ptrofftype_p (TREE_TYPE (op
)))
7226 return gimple_convert (seq
, loc
, sizetype
, op
);
7229 /* Build a vector of type TYPE in which each element has the value OP.
7230 Return a gimple value for the result, appending any new statements
7234 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
7237 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
7238 && !CONSTANT_CLASS_P (op
))
7239 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
7241 tree res
, vec
= build_vector_from_val (type
, op
);
7242 if (is_gimple_val (vec
))
7244 if (gimple_in_ssa_p (cfun
))
7245 res
= make_ssa_name (type
);
7247 res
= create_tmp_reg (type
);
7248 gimple
*stmt
= gimple_build_assign (res
, vec
);
7249 gimple_set_location (stmt
, loc
);
7250 gimple_seq_add_stmt_without_update (seq
, stmt
);
7254 /* Build a vector from BUILDER, handling the case in which some elements
7255 are non-constant. Return a gimple value for the result, appending any
7256 new instructions to SEQ.
7258 BUILDER must not have a stepped encoding on entry. This is because
7259 the function is not geared up to handle the arithmetic that would
7260 be needed in the variable case, and any code building a vector that
7261 is known to be constant should use BUILDER->build () directly. */
7264 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
7265 tree_vector_builder
*builder
)
7267 gcc_assert (builder
->nelts_per_pattern () <= 2);
7268 unsigned int encoded_nelts
= builder
->encoded_nelts ();
7269 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
7270 if (!TREE_CONSTANT ((*builder
)[i
]))
7272 tree type
= builder
->type ();
7273 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
7274 vec
<constructor_elt
, va_gc
> *v
;
7275 vec_alloc (v
, nelts
);
7276 for (i
= 0; i
< nelts
; ++i
)
7277 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
7280 if (gimple_in_ssa_p (cfun
))
7281 res
= make_ssa_name (type
);
7283 res
= create_tmp_reg (type
);
7284 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
7285 gimple_set_location (stmt
, loc
);
7286 gimple_seq_add_stmt_without_update (seq
, stmt
);
7289 return builder
->build ();
7292 /* Return true if the result of assignment STMT is known to be non-negative.
7293 If the return value is based on the assumption that signed overflow is
7294 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7295 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7298 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7301 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7302 switch (get_gimple_rhs_class (code
))
7304 case GIMPLE_UNARY_RHS
:
7305 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7306 gimple_expr_type (stmt
),
7307 gimple_assign_rhs1 (stmt
),
7308 strict_overflow_p
, depth
);
7309 case GIMPLE_BINARY_RHS
:
7310 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7311 gimple_expr_type (stmt
),
7312 gimple_assign_rhs1 (stmt
),
7313 gimple_assign_rhs2 (stmt
),
7314 strict_overflow_p
, depth
);
7315 case GIMPLE_TERNARY_RHS
:
7317 case GIMPLE_SINGLE_RHS
:
7318 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
7319 strict_overflow_p
, depth
);
7320 case GIMPLE_INVALID_RHS
:
7326 /* Return true if return value of call STMT is known to be non-negative.
7327 If the return value is based on the assumption that signed overflow is
7328 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7329 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7332 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7335 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
7336 gimple_call_arg (stmt
, 0) : NULL_TREE
;
7337 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
7338 gimple_call_arg (stmt
, 1) : NULL_TREE
;
7340 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
7341 gimple_call_combined_fn (stmt
),
7344 strict_overflow_p
, depth
);
7347 /* Return true if return value of call STMT is known to be non-negative.
7348 If the return value is based on the assumption that signed overflow is
7349 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7350 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7353 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7356 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7358 tree arg
= gimple_phi_arg_def (stmt
, i
);
7359 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
7365 /* Return true if STMT is known to compute a non-negative value.
7366 If the return value is based on the assumption that signed overflow is
7367 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7368 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7371 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7374 switch (gimple_code (stmt
))
7377 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7380 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7383 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7390 /* Return true if the floating-point value computed by assignment STMT
7391 is known to have an integer value. We also allow +Inf, -Inf and NaN
7392 to be considered integer values. Return false for signaling NaN.
7394 DEPTH is the current nesting depth of the query. */
7397 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
7399 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7400 switch (get_gimple_rhs_class (code
))
7402 case GIMPLE_UNARY_RHS
:
7403 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
7404 gimple_assign_rhs1 (stmt
), depth
);
7405 case GIMPLE_BINARY_RHS
:
7406 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
7407 gimple_assign_rhs1 (stmt
),
7408 gimple_assign_rhs2 (stmt
), depth
);
7409 case GIMPLE_TERNARY_RHS
:
7411 case GIMPLE_SINGLE_RHS
:
7412 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
7413 case GIMPLE_INVALID_RHS
:
7419 /* Return true if the floating-point value computed by call STMT is known
7420 to have an integer value. We also allow +Inf, -Inf and NaN to be
7421 considered integer values. Return false for signaling NaN.
7423 DEPTH is the current nesting depth of the query. */
7426 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
7428 tree arg0
= (gimple_call_num_args (stmt
) > 0
7429 ? gimple_call_arg (stmt
, 0)
7431 tree arg1
= (gimple_call_num_args (stmt
) > 1
7432 ? gimple_call_arg (stmt
, 1)
7434 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
7438 /* Return true if the floating-point result of phi STMT is known to have
7439 an integer value. We also allow +Inf, -Inf and NaN to be considered
7440 integer values. Return false for signaling NaN.
7442 DEPTH is the current nesting depth of the query. */
7445 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
7447 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7449 tree arg
= gimple_phi_arg_def (stmt
, i
);
7450 if (!integer_valued_real_single_p (arg
, depth
+ 1))
7456 /* Return true if the floating-point value computed by STMT is known
7457 to have an integer value. We also allow +Inf, -Inf and NaN to be
7458 considered integer values. Return false for signaling NaN.
7460 DEPTH is the current nesting depth of the query. */
7463 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
7465 switch (gimple_code (stmt
))
7468 return gimple_assign_integer_valued_real_p (stmt
, depth
);
7470 return gimple_call_integer_valued_real_p (stmt
, depth
);
7472 return gimple_phi_integer_valued_real_p (stmt
, depth
);