1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "fold-const.h"
33 #include "insn-config.h"
42 #include "stor-layout.h"
44 #include "dominance.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "tree-into-ssa.h"
52 #include "tree-ssa-propagate.h"
55 #include "ipa-utils.h"
56 #include "gimple-pretty-print.h"
57 #include "tree-ssa-address.h"
58 #include "langhooks.h"
59 #include "gimplify-me.h"
64 #include "gimple-match.h"
65 #include "gomp-constants.h"
66 #include "optabs-query.h"
69 /* Return true when DECL can be referenced from current unit.
70 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
71 We can get declarations that are not possible to reference for various
74 1) When analyzing C++ virtual tables.
75 C++ virtual tables do have known constructors even
76 when they are keyed to other compilation unit.
77 Those tables can contain pointers to methods and vars
78 in other units. Those methods have both STATIC and EXTERNAL
80 2) In WHOPR mode devirtualization might lead to reference
81 to method that was partitioned elsehwere.
82 In this case we have static VAR_DECL or FUNCTION_DECL
83 that has no corresponding callgraph/varpool node
85 3) COMDAT functions referred by external vtables that
86 we devirtualize only during final compilation stage.
87 At this time we already decided that we will not output
88 the function body and thus we can't reference the symbol
92 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
95 struct cgraph_node
*node
;
98 if (DECL_ABSTRACT_P (decl
))
101 /* We are concerned only about static/external vars and functions. */
102 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
103 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
106 /* Static objects can be referred only if they was not optimized out yet. */
107 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
109 /* Before we start optimizing unreachable code we can be sure all
110 static objects are defined. */
111 if (symtab
->function_flags_ready
)
113 snode
= symtab_node::get (decl
);
114 if (!snode
|| !snode
->definition
)
116 node
= dyn_cast
<cgraph_node
*> (snode
);
117 return !node
|| !node
->global
.inlined_to
;
120 /* We will later output the initializer, so we can refer to it.
121 So we are concerned only when DECL comes from initializer of
122 external var or var that has been optimized out. */
124 || TREE_CODE (from_decl
) != VAR_DECL
125 || (!DECL_EXTERNAL (from_decl
)
126 && (vnode
= varpool_node::get (from_decl
)) != NULL
127 && vnode
->definition
)
129 && (vnode
= varpool_node::get (from_decl
)) != NULL
130 && vnode
->in_other_partition
))
132 /* We are folding reference from external vtable. The vtable may reffer
133 to a symbol keyed to other compilation unit. The other compilation
134 unit may be in separate DSO and the symbol may be hidden. */
135 if (DECL_VISIBILITY_SPECIFIED (decl
)
136 && DECL_EXTERNAL (decl
)
137 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
138 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
140 /* When function is public, we always can introduce new reference.
141 Exception are the COMDAT functions where introducing a direct
142 reference imply need to include function body in the curren tunit. */
143 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
145 /* We have COMDAT. We are going to check if we still have definition
146 or if the definition is going to be output in other partition.
147 Bypass this when gimplifying; all needed functions will be produced.
149 As observed in PR20991 for already optimized out comdat virtual functions
150 it may be tempting to not necessarily give up because the copy will be
151 output elsewhere when corresponding vtable is output.
152 This is however not possible - ABI specify that COMDATs are output in
153 units where they are used and when the other unit was compiled with LTO
154 it is possible that vtable was kept public while the function itself
156 if (!symtab
->function_flags_ready
)
159 snode
= symtab_node::get (decl
);
161 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
162 && (!snode
->in_other_partition
163 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
165 node
= dyn_cast
<cgraph_node
*> (snode
);
166 return !node
|| !node
->global
.inlined_to
;
169 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
170 acceptable form for is_gimple_min_invariant.
171 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
174 canonicalize_constructor_val (tree cval
, tree from_decl
)
176 tree orig_cval
= cval
;
178 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
179 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
181 tree ptr
= TREE_OPERAND (cval
, 0);
182 if (is_gimple_min_invariant (ptr
))
183 cval
= build1_loc (EXPR_LOCATION (cval
),
184 ADDR_EXPR
, TREE_TYPE (ptr
),
185 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
187 fold_convert (ptr_type_node
,
188 TREE_OPERAND (cval
, 1))));
190 if (TREE_CODE (cval
) == ADDR_EXPR
)
192 tree base
= NULL_TREE
;
193 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
195 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
197 TREE_OPERAND (cval
, 0) = base
;
200 base
= get_base_address (TREE_OPERAND (cval
, 0));
204 if ((TREE_CODE (base
) == VAR_DECL
205 || TREE_CODE (base
) == FUNCTION_DECL
)
206 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
208 if (TREE_CODE (base
) == VAR_DECL
)
209 TREE_ADDRESSABLE (base
) = 1;
210 else if (TREE_CODE (base
) == FUNCTION_DECL
)
212 /* Make sure we create a cgraph node for functions we'll reference.
213 They can be non-existent if the reference comes from an entry
214 of an external vtable for example. */
215 cgraph_node::get_create (base
);
217 /* Fixup types in global initializers. */
218 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
219 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
221 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
222 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
225 if (TREE_OVERFLOW_P (cval
))
226 return drop_tree_overflow (cval
);
230 /* If SYM is a constant variable with known value, return the value.
231 NULL_TREE is returned otherwise. */
234 get_symbol_constant_value (tree sym
)
236 tree val
= ctor_for_folding (sym
);
237 if (val
!= error_mark_node
)
241 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
242 if (val
&& is_gimple_min_invariant (val
))
247 /* Variables declared 'const' without an initializer
248 have zero as the initializer if they may not be
249 overridden at link or run time. */
251 && is_gimple_reg_type (TREE_TYPE (sym
)))
252 return build_zero_cst (TREE_TYPE (sym
));
260 /* Subroutine of fold_stmt. We perform several simplifications of the
261 memory reference tree EXPR and make sure to re-gimplify them properly
262 after propagation of constant addresses. IS_LHS is true if the
263 reference is supposed to be an lvalue. */
266 maybe_fold_reference (tree expr
, bool is_lhs
)
270 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
271 || TREE_CODE (expr
) == REALPART_EXPR
272 || TREE_CODE (expr
) == IMAGPART_EXPR
)
273 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
274 return fold_unary_loc (EXPR_LOCATION (expr
),
277 TREE_OPERAND (expr
, 0));
278 else if (TREE_CODE (expr
) == BIT_FIELD_REF
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
280 return fold_ternary_loc (EXPR_LOCATION (expr
),
283 TREE_OPERAND (expr
, 0),
284 TREE_OPERAND (expr
, 1),
285 TREE_OPERAND (expr
, 2));
288 && (result
= fold_const_aggregate_ref (expr
))
289 && is_gimple_min_invariant (result
))
296 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
297 replacement rhs for the statement or NULL_TREE if no simplification
298 could be made. It is assumed that the operands have been previously
302 fold_gimple_assign (gimple_stmt_iterator
*si
)
304 gimple
*stmt
= gsi_stmt (*si
);
305 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
306 location_t loc
= gimple_location (stmt
);
308 tree result
= NULL_TREE
;
310 switch (get_gimple_rhs_class (subcode
))
312 case GIMPLE_SINGLE_RHS
:
314 tree rhs
= gimple_assign_rhs1 (stmt
);
316 if (TREE_CLOBBER_P (rhs
))
319 if (REFERENCE_CLASS_P (rhs
))
320 return maybe_fold_reference (rhs
, false);
322 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
324 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
325 if (is_gimple_min_invariant (val
))
327 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
330 vec
<cgraph_node
*>targets
331 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
332 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
334 if (dump_enabled_p ())
336 location_t loc
= gimple_location_safe (stmt
);
337 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
338 "resolving virtual function address "
339 "reference to function %s\n",
340 targets
.length () == 1
341 ? targets
[0]->name ()
344 if (targets
.length () == 1)
346 val
= fold_convert (TREE_TYPE (val
),
347 build_fold_addr_expr_loc
348 (loc
, targets
[0]->decl
));
349 STRIP_USELESS_TYPE_CONVERSION (val
);
352 /* We can not use __builtin_unreachable here because it
353 can not have address taken. */
354 val
= build_int_cst (TREE_TYPE (val
), 0);
360 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
362 tree ref
= TREE_OPERAND (rhs
, 0);
363 tree tem
= maybe_fold_reference (ref
, true);
365 && TREE_CODE (tem
) == MEM_REF
366 && integer_zerop (TREE_OPERAND (tem
, 1)))
367 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
369 result
= fold_convert (TREE_TYPE (rhs
),
370 build_fold_addr_expr_loc (loc
, tem
));
371 else if (TREE_CODE (ref
) == MEM_REF
372 && integer_zerop (TREE_OPERAND (ref
, 1)))
373 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
376 else if (TREE_CODE (rhs
) == CONSTRUCTOR
377 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
378 && (CONSTRUCTOR_NELTS (rhs
)
379 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
381 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
386 if (TREE_CODE (val
) != INTEGER_CST
387 && TREE_CODE (val
) != REAL_CST
388 && TREE_CODE (val
) != FIXED_CST
)
391 return build_vector_from_ctor (TREE_TYPE (rhs
),
392 CONSTRUCTOR_ELTS (rhs
));
395 else if (DECL_P (rhs
))
396 return get_symbol_constant_value (rhs
);
398 /* If we couldn't fold the RHS, hand over to the generic
400 if (result
== NULL_TREE
)
403 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
404 that may have been added by fold, and "useless" type
405 conversions that might now be apparent due to propagation. */
406 STRIP_USELESS_TYPE_CONVERSION (result
);
408 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
415 case GIMPLE_UNARY_RHS
:
418 case GIMPLE_BINARY_RHS
:
421 case GIMPLE_TERNARY_RHS
:
422 result
= fold_ternary_loc (loc
, subcode
,
423 TREE_TYPE (gimple_assign_lhs (stmt
)),
424 gimple_assign_rhs1 (stmt
),
425 gimple_assign_rhs2 (stmt
),
426 gimple_assign_rhs3 (stmt
));
430 STRIP_USELESS_TYPE_CONVERSION (result
);
431 if (valid_gimple_rhs_p (result
))
436 case GIMPLE_INVALID_RHS
:
444 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
445 adjusting the replacement stmts location and virtual operands.
446 If the statement has a lhs the last stmt in the sequence is expected
447 to assign to that lhs. */
450 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
452 gimple
*stmt
= gsi_stmt (*si_p
);
454 if (gimple_has_location (stmt
))
455 annotate_all_with_location (stmts
, gimple_location (stmt
));
457 /* First iterate over the replacement statements backward, assigning
458 virtual operands to their defining statements. */
459 gimple
*laststore
= NULL
;
460 for (gimple_stmt_iterator i
= gsi_last (stmts
);
461 !gsi_end_p (i
); gsi_prev (&i
))
463 gimple
*new_stmt
= gsi_stmt (i
);
464 if ((gimple_assign_single_p (new_stmt
)
465 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
466 || (is_gimple_call (new_stmt
)
467 && (gimple_call_flags (new_stmt
)
468 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
472 vdef
= gimple_vdef (stmt
);
474 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
475 gimple_set_vdef (new_stmt
, vdef
);
476 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
477 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
478 laststore
= new_stmt
;
482 /* Second iterate over the statements forward, assigning virtual
483 operands to their uses. */
484 tree reaching_vuse
= gimple_vuse (stmt
);
485 for (gimple_stmt_iterator i
= gsi_start (stmts
);
486 !gsi_end_p (i
); gsi_next (&i
))
488 gimple
*new_stmt
= gsi_stmt (i
);
489 /* If the new statement possibly has a VUSE, update it with exact SSA
490 name we know will reach this one. */
491 if (gimple_has_mem_ops (new_stmt
))
492 gimple_set_vuse (new_stmt
, reaching_vuse
);
493 gimple_set_modified (new_stmt
, true);
494 if (gimple_vdef (new_stmt
))
495 reaching_vuse
= gimple_vdef (new_stmt
);
498 /* If the new sequence does not do a store release the virtual
499 definition of the original statement. */
501 && reaching_vuse
== gimple_vuse (stmt
))
503 tree vdef
= gimple_vdef (stmt
);
505 && TREE_CODE (vdef
) == SSA_NAME
)
507 unlink_stmt_vdef (stmt
);
508 release_ssa_name (vdef
);
512 /* Finally replace the original statement with the sequence. */
513 gsi_replace_with_seq (si_p
, stmts
, false);
516 /* Convert EXPR into a GIMPLE value suitable for substitution on the
517 RHS of an assignment. Insert the necessary statements before
518 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
519 is replaced. If the call is expected to produces a result, then it
520 is replaced by an assignment of the new RHS to the result variable.
521 If the result is to be ignored, then the call is replaced by a
522 GIMPLE_NOP. A proper VDEF chain is retained by making the first
523 VUSE and the last VDEF of the whole sequence be the same as the replaced
524 statement and using new SSA names for stores in between. */
527 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
530 gimple
*stmt
, *new_stmt
;
531 gimple_stmt_iterator i
;
532 gimple_seq stmts
= NULL
;
534 stmt
= gsi_stmt (*si_p
);
536 gcc_assert (is_gimple_call (stmt
));
538 push_gimplify_context (gimple_in_ssa_p (cfun
));
540 lhs
= gimple_call_lhs (stmt
);
541 if (lhs
== NULL_TREE
)
543 gimplify_and_add (expr
, &stmts
);
544 /* We can end up with folding a memcpy of an empty class assignment
545 which gets optimized away by C++ gimplification. */
546 if (gimple_seq_empty_p (stmts
))
548 pop_gimplify_context (NULL
);
549 if (gimple_in_ssa_p (cfun
))
551 unlink_stmt_vdef (stmt
);
554 gsi_replace (si_p
, gimple_build_nop (), false);
560 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
561 new_stmt
= gimple_build_assign (lhs
, tmp
);
562 i
= gsi_last (stmts
);
563 gsi_insert_after_without_update (&i
, new_stmt
,
564 GSI_CONTINUE_LINKING
);
567 pop_gimplify_context (NULL
);
569 gsi_replace_with_seq_vops (si_p
, stmts
);
573 /* Replace the call at *GSI with the gimple value VAL. */
576 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
578 gimple
*stmt
= gsi_stmt (*gsi
);
579 tree lhs
= gimple_call_lhs (stmt
);
583 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
584 val
= fold_convert (TREE_TYPE (lhs
), val
);
585 repl
= gimple_build_assign (lhs
, val
);
588 repl
= gimple_build_nop ();
589 tree vdef
= gimple_vdef (stmt
);
590 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
592 unlink_stmt_vdef (stmt
);
593 release_ssa_name (vdef
);
595 gsi_replace (gsi
, repl
, false);
598 /* Replace the call at *GSI with the new call REPL and fold that
602 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
604 gimple
*stmt
= gsi_stmt (*gsi
);
605 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
606 gimple_set_location (repl
, gimple_location (stmt
));
607 if (gimple_vdef (stmt
)
608 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
610 gimple_set_vdef (repl
, gimple_vdef (stmt
));
611 gimple_set_vuse (repl
, gimple_vuse (stmt
));
612 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
614 gsi_replace (gsi
, repl
, false);
618 /* Return true if VAR is a VAR_DECL or a component thereof. */
621 var_decl_component_p (tree var
)
624 while (handled_component_p (inner
))
625 inner
= TREE_OPERAND (inner
, 0);
626 return SSA_VAR_P (inner
);
629 /* Fold function call to builtin mem{{,p}cpy,move}. Return
630 false if no simplification can be made.
631 If ENDP is 0, return DEST (like memcpy).
632 If ENDP is 1, return DEST+LEN (like mempcpy).
633 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
634 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
638 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
639 tree dest
, tree src
, int endp
)
641 gimple
*stmt
= gsi_stmt (*gsi
);
642 tree lhs
= gimple_call_lhs (stmt
);
643 tree len
= gimple_call_arg (stmt
, 2);
644 tree destvar
, srcvar
;
645 location_t loc
= gimple_location (stmt
);
647 /* If the LEN parameter is zero, return DEST. */
648 if (integer_zerop (len
))
651 if (gimple_call_lhs (stmt
))
652 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
654 repl
= gimple_build_nop ();
655 tree vdef
= gimple_vdef (stmt
);
656 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
658 unlink_stmt_vdef (stmt
);
659 release_ssa_name (vdef
);
661 gsi_replace (gsi
, repl
, false);
665 /* If SRC and DEST are the same (and not volatile), return
666 DEST{,+LEN,+LEN-1}. */
667 if (operand_equal_p (src
, dest
, 0))
669 unlink_stmt_vdef (stmt
);
670 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
671 release_ssa_name (gimple_vdef (stmt
));
674 gsi_replace (gsi
, gimple_build_nop (), false);
681 tree srctype
, desttype
;
682 unsigned int src_align
, dest_align
;
685 /* Build accesses at offset zero with a ref-all character type. */
686 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
689 /* If we can perform the copy efficiently with first doing all loads
690 and then all stores inline it that way. Currently efficiently
691 means that we can load all the memory into a single integer
692 register which is what MOVE_MAX gives us. */
693 src_align
= get_pointer_alignment (src
);
694 dest_align
= get_pointer_alignment (dest
);
695 if (tree_fits_uhwi_p (len
)
696 && compare_tree_int (len
, MOVE_MAX
) <= 0
697 /* ??? Don't transform copies from strings with known length this
698 confuses the tree-ssa-strlen.c. This doesn't handle
699 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
701 && !c_strlen (src
, 2))
703 unsigned ilen
= tree_to_uhwi (len
);
704 if (exact_log2 (ilen
) != -1)
706 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
708 && TYPE_MODE (type
) != BLKmode
709 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
711 /* If the destination pointer is not aligned we must be able
712 to emit an unaligned store. */
713 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
714 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)
715 || (optab_handler (movmisalign_optab
, TYPE_MODE (type
))
716 != CODE_FOR_nothing
)))
719 tree desttype
= type
;
720 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
721 srctype
= build_aligned_type (type
, src_align
);
722 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
723 tree tem
= fold_const_aggregate_ref (srcmem
);
726 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
727 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
729 && (optab_handler (movmisalign_optab
,
731 == CODE_FOR_nothing
))
736 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
738 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
739 if (gimple_in_ssa_p (cfun
))
740 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
743 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
744 gimple_assign_set_lhs (new_stmt
, srcmem
);
745 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
746 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
748 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
749 desttype
= build_aligned_type (type
, dest_align
);
751 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
754 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
755 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
756 if (gimple_vdef (new_stmt
)
757 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
758 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
761 gsi_replace (gsi
, new_stmt
, false);
764 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
773 /* Both DEST and SRC must be pointer types.
774 ??? This is what old code did. Is the testing for pointer types
777 If either SRC is readonly or length is 1, we can use memcpy. */
778 if (!dest_align
|| !src_align
)
780 if (readonly_data_expr (src
)
781 || (tree_fits_uhwi_p (len
)
782 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
783 >= tree_to_uhwi (len
))))
785 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
788 gimple_call_set_fndecl (stmt
, fn
);
789 gimple_call_set_arg (stmt
, 0, dest
);
790 gimple_call_set_arg (stmt
, 1, src
);
795 /* If *src and *dest can't overlap, optimize into memcpy as well. */
796 if (TREE_CODE (src
) == ADDR_EXPR
797 && TREE_CODE (dest
) == ADDR_EXPR
)
799 tree src_base
, dest_base
, fn
;
800 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
801 HOST_WIDE_INT size
= -1;
802 HOST_WIDE_INT maxsize
= -1;
804 srcvar
= TREE_OPERAND (src
, 0);
805 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
807 destvar
= TREE_OPERAND (dest
, 0);
808 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
810 if (tree_fits_uhwi_p (len
))
811 maxsize
= tree_to_uhwi (len
);
814 src_offset
/= BITS_PER_UNIT
;
815 dest_offset
/= BITS_PER_UNIT
;
816 if (SSA_VAR_P (src_base
)
817 && SSA_VAR_P (dest_base
))
819 if (operand_equal_p (src_base
, dest_base
, 0)
820 && ranges_overlap_p (src_offset
, maxsize
,
821 dest_offset
, maxsize
))
824 else if (TREE_CODE (src_base
) == MEM_REF
825 && TREE_CODE (dest_base
) == MEM_REF
)
827 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
828 TREE_OPERAND (dest_base
, 0), 0))
830 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
831 if (!wi::fits_shwi_p (off
))
833 src_offset
= off
.to_shwi ();
835 off
= mem_ref_offset (dest_base
) + dest_offset
;
836 if (!wi::fits_shwi_p (off
))
838 dest_offset
= off
.to_shwi ();
839 if (ranges_overlap_p (src_offset
, maxsize
,
840 dest_offset
, maxsize
))
846 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
849 gimple_call_set_fndecl (stmt
, fn
);
850 gimple_call_set_arg (stmt
, 0, dest
);
851 gimple_call_set_arg (stmt
, 1, src
);
856 /* If the destination and source do not alias optimize into
858 if ((is_gimple_min_invariant (dest
)
859 || TREE_CODE (dest
) == SSA_NAME
)
860 && (is_gimple_min_invariant (src
)
861 || TREE_CODE (src
) == SSA_NAME
))
864 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
865 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
866 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
869 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
872 gimple_call_set_fndecl (stmt
, fn
);
873 gimple_call_set_arg (stmt
, 0, dest
);
874 gimple_call_set_arg (stmt
, 1, src
);
883 if (!tree_fits_shwi_p (len
))
886 This logic lose for arguments like (type *)malloc (sizeof (type)),
887 since we strip the casts of up to VOID return value from malloc.
888 Perhaps we ought to inherit type from non-VOID argument here? */
891 if (!POINTER_TYPE_P (TREE_TYPE (src
))
892 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
894 /* In the following try to find a type that is most natural to be
895 used for the memcpy source and destination and that allows
896 the most optimization when memcpy is turned into a plain assignment
897 using that type. In theory we could always use a char[len] type
898 but that only gains us that the destination and source possibly
899 no longer will have their address taken. */
900 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
901 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
903 tree tem
= TREE_OPERAND (src
, 0);
905 if (tem
!= TREE_OPERAND (src
, 0))
906 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
908 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
910 tree tem
= TREE_OPERAND (dest
, 0);
912 if (tem
!= TREE_OPERAND (dest
, 0))
913 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
915 srctype
= TREE_TYPE (TREE_TYPE (src
));
916 if (TREE_CODE (srctype
) == ARRAY_TYPE
917 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
919 srctype
= TREE_TYPE (srctype
);
921 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
923 desttype
= TREE_TYPE (TREE_TYPE (dest
));
924 if (TREE_CODE (desttype
) == ARRAY_TYPE
925 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
927 desttype
= TREE_TYPE (desttype
);
929 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
931 if (TREE_ADDRESSABLE (srctype
)
932 || TREE_ADDRESSABLE (desttype
))
935 /* Make sure we are not copying using a floating-point mode or
936 a type whose size possibly does not match its precision. */
937 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
938 || TREE_CODE (desttype
) == BOOLEAN_TYPE
939 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
940 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
941 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
942 || TREE_CODE (srctype
) == BOOLEAN_TYPE
943 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
944 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
952 src_align
= get_pointer_alignment (src
);
953 dest_align
= get_pointer_alignment (dest
);
954 if (dest_align
< TYPE_ALIGN (desttype
)
955 || src_align
< TYPE_ALIGN (srctype
))
959 STRIP_NOPS (destvar
);
960 if (TREE_CODE (destvar
) == ADDR_EXPR
961 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
962 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
963 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
969 if (TREE_CODE (srcvar
) == ADDR_EXPR
970 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
971 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
974 || src_align
>= TYPE_ALIGN (desttype
))
975 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
977 else if (!STRICT_ALIGNMENT
)
979 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
981 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
989 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
992 if (srcvar
== NULL_TREE
)
995 if (src_align
>= TYPE_ALIGN (desttype
))
996 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
999 if (STRICT_ALIGNMENT
)
1001 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1003 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1006 else if (destvar
== NULL_TREE
)
1009 if (dest_align
>= TYPE_ALIGN (srctype
))
1010 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1013 if (STRICT_ALIGNMENT
)
1015 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1017 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1022 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1024 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1025 if (gimple_in_ssa_p (cfun
))
1026 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1028 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1029 gimple_assign_set_lhs (new_stmt
, srcvar
);
1030 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1031 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1033 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1034 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1035 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1036 if (gimple_vdef (new_stmt
)
1037 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1038 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1041 gsi_replace (gsi
, new_stmt
, false);
1044 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1048 gimple_seq stmts
= NULL
;
1049 if (endp
== 0 || endp
== 3)
1052 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1054 if (endp
== 2 || endp
== 1)
1056 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1057 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1058 TREE_TYPE (dest
), dest
, len
);
1061 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1062 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1063 gsi_replace (gsi
, repl
, false);
1067 /* Fold function call to builtin memset or bzero at *GSI setting the
1068 memory of size LEN to VAL. Return whether a simplification was made. */
1071 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1073 gimple
*stmt
= gsi_stmt (*gsi
);
1075 unsigned HOST_WIDE_INT length
, cval
;
1077 /* If the LEN parameter is zero, return DEST. */
1078 if (integer_zerop (len
))
1080 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1084 if (! tree_fits_uhwi_p (len
))
1087 if (TREE_CODE (c
) != INTEGER_CST
)
1090 tree dest
= gimple_call_arg (stmt
, 0);
1092 if (TREE_CODE (var
) != ADDR_EXPR
)
1095 var
= TREE_OPERAND (var
, 0);
1096 if (TREE_THIS_VOLATILE (var
))
1099 etype
= TREE_TYPE (var
);
1100 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1101 etype
= TREE_TYPE (etype
);
1103 if (!INTEGRAL_TYPE_P (etype
)
1104 && !POINTER_TYPE_P (etype
))
1107 if (! var_decl_component_p (var
))
1110 length
= tree_to_uhwi (len
);
1111 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1112 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1115 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1118 if (integer_zerop (c
))
1122 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1125 cval
= TREE_INT_CST_LOW (c
);
1129 cval
|= (cval
<< 31) << 1;
1132 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1133 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1134 gimple_set_vuse (store
, gimple_vuse (stmt
));
1135 tree vdef
= gimple_vdef (stmt
);
1136 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1138 gimple_set_vdef (store
, gimple_vdef (stmt
));
1139 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1141 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1142 if (gimple_call_lhs (stmt
))
1144 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1145 gsi_replace (gsi
, asgn
, false);
1149 gimple_stmt_iterator gsi2
= *gsi
;
1151 gsi_remove (&gsi2
, true);
1158 /* Return the string length, maximum string length or maximum value of
1160 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1161 is not NULL and, for TYPE == 0, its value is not equal to the length
1162 we determine or if we are unable to determine the length or value,
1163 return false. VISITED is a bitmap of visited variables.
1164 TYPE is 0 if string length should be returned, 1 for maximum string
1165 length and 2 for maximum value ARG can have. */
1168 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1173 if (TREE_CODE (arg
) != SSA_NAME
)
1175 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1176 if (TREE_CODE (arg
) == ADDR_EXPR
1177 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1178 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1180 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1181 if (TREE_CODE (aop0
) == INDIRECT_REF
1182 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1183 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1184 length
, visited
, type
);
1190 if (TREE_CODE (val
) != INTEGER_CST
1191 || tree_int_cst_sgn (val
) < 0)
1195 val
= c_strlen (arg
, 1);
1203 if (TREE_CODE (*length
) != INTEGER_CST
1204 || TREE_CODE (val
) != INTEGER_CST
)
1207 if (tree_int_cst_lt (*length
, val
))
1211 else if (simple_cst_equal (val
, *length
) != 1)
1219 /* If ARG is registered for SSA update we cannot look at its defining
1221 if (name_registered_for_update_p (arg
))
1224 /* If we were already here, break the infinite cycle. */
1226 *visited
= BITMAP_ALLOC (NULL
);
1227 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1231 def_stmt
= SSA_NAME_DEF_STMT (var
);
1233 switch (gimple_code (def_stmt
))
1236 /* The RHS of the statement defining VAR must either have a
1237 constant length or come from another SSA_NAME with a constant
1239 if (gimple_assign_single_p (def_stmt
)
1240 || gimple_assign_unary_nop_p (def_stmt
))
1242 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1243 return get_maxval_strlen (rhs
, length
, visited
, type
);
1245 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1247 tree op2
= gimple_assign_rhs2 (def_stmt
);
1248 tree op3
= gimple_assign_rhs3 (def_stmt
);
1249 return get_maxval_strlen (op2
, length
, visited
, type
)
1250 && get_maxval_strlen (op3
, length
, visited
, type
);
1256 /* All the arguments of the PHI node must have the same constant
1260 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1262 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1264 /* If this PHI has itself as an argument, we cannot
1265 determine the string length of this argument. However,
1266 if we can find a constant string length for the other
1267 PHI args then we can still be sure that this is a
1268 constant string length. So be optimistic and just
1269 continue with the next argument. */
1270 if (arg
== gimple_phi_result (def_stmt
))
1273 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1285 get_maxval_strlen (tree arg
, int type
)
1287 bitmap visited
= NULL
;
1288 tree len
= NULL_TREE
;
1289 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1292 BITMAP_FREE (visited
);
1298 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1299 If LEN is not NULL, it represents the length of the string to be
1300 copied. Return NULL_TREE if no simplification can be made. */
1303 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1304 tree dest
, tree src
)
1306 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1309 /* If SRC and DEST are the same (and not volatile), return DEST. */
1310 if (operand_equal_p (src
, dest
, 0))
1312 replace_call_with_value (gsi
, dest
);
1316 if (optimize_function_for_size_p (cfun
))
1319 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1323 tree len
= get_maxval_strlen (src
, 0);
1327 len
= fold_convert_loc (loc
, size_type_node
, len
);
1328 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1329 len
= force_gimple_operand_gsi (gsi
, len
, true,
1330 NULL_TREE
, true, GSI_SAME_STMT
);
1331 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1332 replace_call_with_call_and_fold (gsi
, repl
);
1336 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1337 If SLEN is not NULL, it represents the length of the source string.
1338 Return NULL_TREE if no simplification can be made. */
1341 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1342 tree dest
, tree src
, tree len
)
1344 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1347 /* If the LEN parameter is zero, return DEST. */
1348 if (integer_zerop (len
))
1350 replace_call_with_value (gsi
, dest
);
1354 /* We can't compare slen with len as constants below if len is not a
1356 if (TREE_CODE (len
) != INTEGER_CST
)
1359 /* Now, we must be passed a constant src ptr parameter. */
1360 tree slen
= get_maxval_strlen (src
, 0);
1361 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1364 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1366 /* We do not support simplification of this case, though we do
1367 support it when expanding trees into RTL. */
1368 /* FIXME: generate a call to __builtin_memset. */
1369 if (tree_int_cst_lt (slen
, len
))
1372 /* OK transform into builtin memcpy. */
1373 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1377 len
= fold_convert_loc (loc
, size_type_node
, len
);
1378 len
= force_gimple_operand_gsi (gsi
, len
, true,
1379 NULL_TREE
, true, GSI_SAME_STMT
);
1380 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1381 replace_call_with_call_and_fold (gsi
, repl
);
1385 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1388 Return NULL_TREE if no simplification was possible, otherwise return the
1389 simplified form of the call as a tree.
1391 The simplified form may be a constant or other expression which
1392 computes the same value, but in a more efficient manner (including
1393 calls to other builtin functions).
1395 The call may contain arguments which need to be evaluated, but
1396 which are not useful to determine the result of the call. In
1397 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1398 COMPOUND_EXPR will be an argument which must be evaluated.
1399 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1400 COMPOUND_EXPR in the chain will contain the tree for the simplified
1401 form of the builtin function call. */
1404 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1406 gimple
*stmt
= gsi_stmt (*gsi
);
1407 location_t loc
= gimple_location (stmt
);
1409 const char *p
= c_getstr (src
);
1411 /* If the string length is zero, return the dst parameter. */
1412 if (p
&& *p
== '\0')
1414 replace_call_with_value (gsi
, dst
);
1418 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1421 /* See if we can store by pieces into (dst + strlen(dst)). */
1423 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1424 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1426 if (!strlen_fn
|| !memcpy_fn
)
1429 /* If the length of the source string isn't computable don't
1430 split strcat into strlen and memcpy. */
1431 tree len
= get_maxval_strlen (src
, 0);
1435 /* Create strlen (dst). */
1436 gimple_seq stmts
= NULL
, stmts2
;
1437 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1438 gimple_set_location (repl
, loc
);
1439 if (gimple_in_ssa_p (cfun
))
1440 newdst
= make_ssa_name (size_type_node
);
1442 newdst
= create_tmp_reg (size_type_node
);
1443 gimple_call_set_lhs (repl
, newdst
);
1444 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1446 /* Create (dst p+ strlen (dst)). */
1447 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1448 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1449 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1451 len
= fold_convert_loc (loc
, size_type_node
, len
);
1452 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1453 build_int_cst (size_type_node
, 1));
1454 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1455 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1457 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1458 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1459 if (gimple_call_lhs (stmt
))
1461 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1462 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1463 gsi_replace_with_seq_vops (gsi
, stmts
);
1464 /* gsi now points at the assignment to the lhs, get a
1465 stmt iterator to the memcpy call.
1466 ??? We can't use gsi_for_stmt as that doesn't work when the
1467 CFG isn't built yet. */
1468 gimple_stmt_iterator gsi2
= *gsi
;
1474 gsi_replace_with_seq_vops (gsi
, stmts
);
1480 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1481 are the arguments to the call. */
1484 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1486 gimple
*stmt
= gsi_stmt (*gsi
);
1487 tree dest
= gimple_call_arg (stmt
, 0);
1488 tree src
= gimple_call_arg (stmt
, 1);
1489 tree size
= gimple_call_arg (stmt
, 2);
1495 /* If the SRC parameter is "", return DEST. */
1496 if (p
&& *p
== '\0')
1498 replace_call_with_value (gsi
, dest
);
1502 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1505 /* If __builtin_strcat_chk is used, assume strcat is available. */
1506 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1510 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1511 replace_call_with_call_and_fold (gsi
, repl
);
1515 /* Simplify a call to the strncat builtin. */
1518 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1520 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1521 tree dst
= gimple_call_arg (stmt
, 0);
1522 tree src
= gimple_call_arg (stmt
, 1);
1523 tree len
= gimple_call_arg (stmt
, 2);
1525 const char *p
= c_getstr (src
);
1527 /* If the requested length is zero, or the src parameter string
1528 length is zero, return the dst parameter. */
1529 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1531 replace_call_with_value (gsi
, dst
);
1535 /* If the requested len is greater than or equal to the string
1536 length, call strcat. */
1537 if (TREE_CODE (len
) == INTEGER_CST
&& p
1538 && compare_tree_int (len
, strlen (p
)) >= 0)
1540 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1542 /* If the replacement _DECL isn't initialized, don't do the
1547 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1548 replace_call_with_call_and_fold (gsi
, repl
);
1555 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1559 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1561 gimple
*stmt
= gsi_stmt (*gsi
);
1562 tree dest
= gimple_call_arg (stmt
, 0);
1563 tree src
= gimple_call_arg (stmt
, 1);
1564 tree len
= gimple_call_arg (stmt
, 2);
1565 tree size
= gimple_call_arg (stmt
, 3);
1570 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1571 if ((p
&& *p
== '\0')
1572 || integer_zerop (len
))
1574 replace_call_with_value (gsi
, dest
);
1578 if (! tree_fits_uhwi_p (size
))
1581 if (! integer_all_onesp (size
))
1583 tree src_len
= c_strlen (src
, 1);
1585 && tree_fits_uhwi_p (src_len
)
1586 && tree_fits_uhwi_p (len
)
1587 && ! tree_int_cst_lt (len
, src_len
))
1589 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1590 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1594 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1595 replace_call_with_call_and_fold (gsi
, repl
);
1601 /* If __builtin_strncat_chk is used, assume strncat is available. */
1602 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1606 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1607 replace_call_with_call_and_fold (gsi
, repl
);
1611 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1612 to the call. IGNORE is true if the value returned
1613 by the builtin will be ignored. UNLOCKED is true is true if this
1614 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1615 the known length of the string. Return NULL_TREE if no simplification
1619 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1620 tree arg0
, tree arg1
,
1623 gimple
*stmt
= gsi_stmt (*gsi
);
1625 /* If we're using an unlocked function, assume the other unlocked
1626 functions exist explicitly. */
1627 tree
const fn_fputc
= (unlocked
1628 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1629 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1630 tree
const fn_fwrite
= (unlocked
1631 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1632 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1634 /* If the return value is used, don't do the transformation. */
1635 if (gimple_call_lhs (stmt
))
1638 /* Get the length of the string passed to fputs. If the length
1639 can't be determined, punt. */
1640 tree len
= get_maxval_strlen (arg0
, 0);
1642 || TREE_CODE (len
) != INTEGER_CST
)
1645 switch (compare_tree_int (len
, 1))
1647 case -1: /* length is 0, delete the call entirely . */
1648 replace_call_with_value (gsi
, integer_zero_node
);
1651 case 0: /* length is 1, call fputc. */
1653 const char *p
= c_getstr (arg0
);
1659 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
1661 (integer_type_node
, p
[0]), arg1
);
1662 replace_call_with_call_and_fold (gsi
, repl
);
1667 case 1: /* length is greater than 1, call fwrite. */
1669 /* If optimizing for size keep fputs. */
1670 if (optimize_function_for_size_p (cfun
))
1672 /* New argument list transforming fputs(string, stream) to
1673 fwrite(string, 1, len, stream). */
1677 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1678 size_one_node
, len
, arg1
);
1679 replace_call_with_call_and_fold (gsi
, repl
);
1688 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1689 DEST, SRC, LEN, and SIZE are the arguments to the call.
1690 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1691 code of the builtin. If MAXLEN is not NULL, it is maximum length
1692 passed as third argument. */
1695 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1696 tree dest
, tree src
, tree len
, tree size
,
1697 enum built_in_function fcode
)
1699 gimple
*stmt
= gsi_stmt (*gsi
);
1700 location_t loc
= gimple_location (stmt
);
1701 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1704 /* If SRC and DEST are the same (and not volatile), return DEST
1705 (resp. DEST+LEN for __mempcpy_chk). */
1706 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1708 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1710 replace_call_with_value (gsi
, dest
);
1715 gimple_seq stmts
= NULL
;
1716 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1717 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
, dest
, len
);
1718 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1719 replace_call_with_value (gsi
, temp
);
1724 if (! tree_fits_uhwi_p (size
))
1727 tree maxlen
= get_maxval_strlen (len
, 2);
1728 if (! integer_all_onesp (size
))
1730 if (! tree_fits_uhwi_p (len
))
1732 /* If LEN is not constant, try MAXLEN too.
1733 For MAXLEN only allow optimizing into non-_ocs function
1734 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1735 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1737 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1739 /* (void) __mempcpy_chk () can be optimized into
1740 (void) __memcpy_chk (). */
1741 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1745 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1746 replace_call_with_call_and_fold (gsi
, repl
);
1755 if (tree_int_cst_lt (size
, maxlen
))
1760 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1761 mem{cpy,pcpy,move,set} is available. */
1764 case BUILT_IN_MEMCPY_CHK
:
1765 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1767 case BUILT_IN_MEMPCPY_CHK
:
1768 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1770 case BUILT_IN_MEMMOVE_CHK
:
1771 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1773 case BUILT_IN_MEMSET_CHK
:
1774 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1783 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1784 replace_call_with_call_and_fold (gsi
, repl
);
1788 /* Fold a call to the __st[rp]cpy_chk builtin.
1789 DEST, SRC, and SIZE are the arguments to the call.
1790 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1791 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1792 strings passed as second argument. */
1795 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1797 tree src
, tree size
,
1798 enum built_in_function fcode
)
1800 gimple
*stmt
= gsi_stmt (*gsi
);
1801 location_t loc
= gimple_location (stmt
);
1802 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1805 /* If SRC and DEST are the same (and not volatile), return DEST. */
1806 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1808 replace_call_with_value (gsi
, dest
);
1812 if (! tree_fits_uhwi_p (size
))
1815 tree maxlen
= get_maxval_strlen (src
, 1);
1816 if (! integer_all_onesp (size
))
1818 len
= c_strlen (src
, 1);
1819 if (! len
|| ! tree_fits_uhwi_p (len
))
1821 /* If LEN is not constant, try MAXLEN too.
1822 For MAXLEN only allow optimizing into non-_ocs function
1823 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1824 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1826 if (fcode
== BUILT_IN_STPCPY_CHK
)
1831 /* If return value of __stpcpy_chk is ignored,
1832 optimize into __strcpy_chk. */
1833 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1837 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1838 replace_call_with_call_and_fold (gsi
, repl
);
1842 if (! len
|| TREE_SIDE_EFFECTS (len
))
1845 /* If c_strlen returned something, but not a constant,
1846 transform __strcpy_chk into __memcpy_chk. */
1847 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1851 gimple_seq stmts
= NULL
;
1852 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1853 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
1854 build_int_cst (size_type_node
, 1));
1855 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1856 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1857 replace_call_with_call_and_fold (gsi
, repl
);
1864 if (! tree_int_cst_lt (maxlen
, size
))
1868 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1869 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1870 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1874 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1875 replace_call_with_call_and_fold (gsi
, repl
);
1879 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1880 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1881 length passed as third argument. IGNORE is true if return value can be
1882 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1885 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1886 tree dest
, tree src
,
1887 tree len
, tree size
,
1888 enum built_in_function fcode
)
1890 gimple
*stmt
= gsi_stmt (*gsi
);
1891 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1894 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1896 /* If return value of __stpncpy_chk is ignored,
1897 optimize into __strncpy_chk. */
1898 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1901 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1902 replace_call_with_call_and_fold (gsi
, repl
);
1907 if (! tree_fits_uhwi_p (size
))
1910 tree maxlen
= get_maxval_strlen (len
, 2);
1911 if (! integer_all_onesp (size
))
1913 if (! tree_fits_uhwi_p (len
))
1915 /* If LEN is not constant, try MAXLEN too.
1916 For MAXLEN only allow optimizing into non-_ocs function
1917 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1918 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1924 if (tree_int_cst_lt (size
, maxlen
))
1928 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1929 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
1930 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
1934 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1935 replace_call_with_call_and_fold (gsi
, repl
);
1939 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1940 Return NULL_TREE if no simplification can be made. */
1943 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
1945 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1946 location_t loc
= gimple_location (stmt
);
1947 tree dest
= gimple_call_arg (stmt
, 0);
1948 tree src
= gimple_call_arg (stmt
, 1);
1949 tree fn
, len
, lenp1
;
1951 /* If the result is unused, replace stpcpy with strcpy. */
1952 if (gimple_call_lhs (stmt
) == NULL_TREE
)
1954 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
1957 gimple_call_set_fndecl (stmt
, fn
);
1962 len
= c_strlen (src
, 1);
1964 || TREE_CODE (len
) != INTEGER_CST
)
1967 if (optimize_function_for_size_p (cfun
)
1968 /* If length is zero it's small enough. */
1969 && !integer_zerop (len
))
1972 /* If the source has a known length replace stpcpy with memcpy. */
1973 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1977 gimple_seq stmts
= NULL
;
1978 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1979 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
1980 tem
, build_int_cst (size_type_node
, 1));
1981 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1982 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
1983 gimple_set_vuse (repl
, gimple_vuse (stmt
));
1984 gimple_set_vdef (repl
, gimple_vdef (stmt
));
1985 if (gimple_vdef (repl
)
1986 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
1987 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
1988 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
1989 /* Replace the result with dest + len. */
1991 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
1992 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1993 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
1994 POINTER_PLUS_EXPR
, dest
, tem
);
1995 gsi_replace (gsi
, ret
, false);
1996 /* Finally fold the memcpy call. */
1997 gimple_stmt_iterator gsi2
= *gsi
;
2003 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2004 NULL_TREE if a normal call should be emitted rather than expanding
2005 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2006 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2007 passed as second argument. */
2010 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2011 enum built_in_function fcode
)
2013 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2014 tree dest
, size
, len
, fn
, fmt
, flag
;
2015 const char *fmt_str
;
2017 /* Verify the required arguments in the original call. */
2018 if (gimple_call_num_args (stmt
) < 5)
2021 dest
= gimple_call_arg (stmt
, 0);
2022 len
= gimple_call_arg (stmt
, 1);
2023 flag
= gimple_call_arg (stmt
, 2);
2024 size
= gimple_call_arg (stmt
, 3);
2025 fmt
= gimple_call_arg (stmt
, 4);
2027 if (! tree_fits_uhwi_p (size
))
2030 if (! integer_all_onesp (size
))
2032 tree maxlen
= get_maxval_strlen (len
, 2);
2033 if (! tree_fits_uhwi_p (len
))
2035 /* If LEN is not constant, try MAXLEN too.
2036 For MAXLEN only allow optimizing into non-_ocs function
2037 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2038 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2044 if (tree_int_cst_lt (size
, maxlen
))
2048 if (!init_target_chars ())
2051 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2052 or if format doesn't contain % chars or is "%s". */
2053 if (! integer_zerop (flag
))
2055 fmt_str
= c_getstr (fmt
);
2056 if (fmt_str
== NULL
)
2058 if (strchr (fmt_str
, target_percent
) != NULL
2059 && strcmp (fmt_str
, target_percent_s
))
2063 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2065 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2066 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2070 /* Replace the called function and the first 5 argument by 3 retaining
2071 trailing varargs. */
2072 gimple_call_set_fndecl (stmt
, fn
);
2073 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2074 gimple_call_set_arg (stmt
, 0, dest
);
2075 gimple_call_set_arg (stmt
, 1, len
);
2076 gimple_call_set_arg (stmt
, 2, fmt
);
2077 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2078 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2079 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2084 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2085 Return NULL_TREE if a normal call should be emitted rather than
2086 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2087 or BUILT_IN_VSPRINTF_CHK. */
2090 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2091 enum built_in_function fcode
)
2093 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2094 tree dest
, size
, len
, fn
, fmt
, flag
;
2095 const char *fmt_str
;
2096 unsigned nargs
= gimple_call_num_args (stmt
);
2098 /* Verify the required arguments in the original call. */
2101 dest
= gimple_call_arg (stmt
, 0);
2102 flag
= gimple_call_arg (stmt
, 1);
2103 size
= gimple_call_arg (stmt
, 2);
2104 fmt
= gimple_call_arg (stmt
, 3);
2106 if (! tree_fits_uhwi_p (size
))
2111 if (!init_target_chars ())
2114 /* Check whether the format is a literal string constant. */
2115 fmt_str
= c_getstr (fmt
);
2116 if (fmt_str
!= NULL
)
2118 /* If the format doesn't contain % args or %%, we know the size. */
2119 if (strchr (fmt_str
, target_percent
) == 0)
2121 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2122 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2124 /* If the format is "%s" and first ... argument is a string literal,
2125 we know the size too. */
2126 else if (fcode
== BUILT_IN_SPRINTF_CHK
2127 && strcmp (fmt_str
, target_percent_s
) == 0)
2133 arg
= gimple_call_arg (stmt
, 4);
2134 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2136 len
= c_strlen (arg
, 1);
2137 if (! len
|| ! tree_fits_uhwi_p (len
))
2144 if (! integer_all_onesp (size
))
2146 if (! len
|| ! tree_int_cst_lt (len
, size
))
2150 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2151 or if format doesn't contain % chars or is "%s". */
2152 if (! integer_zerop (flag
))
2154 if (fmt_str
== NULL
)
2156 if (strchr (fmt_str
, target_percent
) != NULL
2157 && strcmp (fmt_str
, target_percent_s
))
2161 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2162 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2163 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2167 /* Replace the called function and the first 4 argument by 2 retaining
2168 trailing varargs. */
2169 gimple_call_set_fndecl (stmt
, fn
);
2170 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2171 gimple_call_set_arg (stmt
, 0, dest
);
2172 gimple_call_set_arg (stmt
, 1, fmt
);
2173 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2174 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2175 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2180 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2181 ORIG may be null if this is a 2-argument call. We don't attempt to
2182 simplify calls with more than 3 arguments.
2184 Return NULL_TREE if no simplification was possible, otherwise return the
2185 simplified form of the call as a tree. If IGNORED is true, it means that
2186 the caller does not use the returned value of the function. */
2189 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2191 gimple
*stmt
= gsi_stmt (*gsi
);
2192 tree dest
= gimple_call_arg (stmt
, 0);
2193 tree fmt
= gimple_call_arg (stmt
, 1);
2194 tree orig
= NULL_TREE
;
2195 const char *fmt_str
= NULL
;
2197 /* Verify the required arguments in the original call. We deal with two
2198 types of sprintf() calls: 'sprintf (str, fmt)' and
2199 'sprintf (dest, "%s", orig)'. */
2200 if (gimple_call_num_args (stmt
) > 3)
2203 if (gimple_call_num_args (stmt
) == 3)
2204 orig
= gimple_call_arg (stmt
, 2);
2206 /* Check whether the format is a literal string constant. */
2207 fmt_str
= c_getstr (fmt
);
2208 if (fmt_str
== NULL
)
2211 if (!init_target_chars ())
2214 /* If the format doesn't contain % args or %%, use strcpy. */
2215 if (strchr (fmt_str
, target_percent
) == NULL
)
2217 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2222 /* Don't optimize sprintf (buf, "abc", ptr++). */
2226 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2227 'format' is known to contain no % formats. */
2228 gimple_seq stmts
= NULL
;
2229 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2230 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2231 if (gimple_call_lhs (stmt
))
2233 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2234 build_int_cst (integer_type_node
,
2236 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2237 gsi_replace_with_seq_vops (gsi
, stmts
);
2238 /* gsi now points at the assignment to the lhs, get a
2239 stmt iterator to the memcpy call.
2240 ??? We can't use gsi_for_stmt as that doesn't work when the
2241 CFG isn't built yet. */
2242 gimple_stmt_iterator gsi2
= *gsi
;
2248 gsi_replace_with_seq_vops (gsi
, stmts
);
2254 /* If the format is "%s", use strcpy if the result isn't used. */
2255 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2258 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2263 /* Don't crash on sprintf (str1, "%s"). */
2267 tree orig_len
= NULL_TREE
;
2268 if (gimple_call_lhs (stmt
))
2270 orig_len
= get_maxval_strlen (orig
, 0);
2275 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2276 gimple_seq stmts
= NULL
;
2277 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2278 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2279 if (gimple_call_lhs (stmt
))
2281 if (!useless_type_conversion_p (integer_type_node
,
2282 TREE_TYPE (orig_len
)))
2283 orig_len
= fold_convert (integer_type_node
, orig_len
);
2284 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2285 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2286 gsi_replace_with_seq_vops (gsi
, stmts
);
2287 /* gsi now points at the assignment to the lhs, get a
2288 stmt iterator to the memcpy call.
2289 ??? We can't use gsi_for_stmt as that doesn't work when the
2290 CFG isn't built yet. */
2291 gimple_stmt_iterator gsi2
= *gsi
;
2297 gsi_replace_with_seq_vops (gsi
, stmts
);
2305 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2306 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2307 attempt to simplify calls with more than 4 arguments.
2309 Return NULL_TREE if no simplification was possible, otherwise return the
2310 simplified form of the call as a tree. If IGNORED is true, it means that
2311 the caller does not use the returned value of the function. */
2314 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2316 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2317 tree dest
= gimple_call_arg (stmt
, 0);
2318 tree destsize
= gimple_call_arg (stmt
, 1);
2319 tree fmt
= gimple_call_arg (stmt
, 2);
2320 tree orig
= NULL_TREE
;
2321 const char *fmt_str
= NULL
;
2323 if (gimple_call_num_args (stmt
) > 4)
2326 if (gimple_call_num_args (stmt
) == 4)
2327 orig
= gimple_call_arg (stmt
, 3);
2329 if (!tree_fits_uhwi_p (destsize
))
2331 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2333 /* Check whether the format is a literal string constant. */
2334 fmt_str
= c_getstr (fmt
);
2335 if (fmt_str
== NULL
)
2338 if (!init_target_chars ())
2341 /* If the format doesn't contain % args or %%, use strcpy. */
2342 if (strchr (fmt_str
, target_percent
) == NULL
)
2344 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2348 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2352 /* We could expand this as
2353 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2355 memcpy (str, fmt_with_nul_at_cstm1, cst);
2356 but in the former case that might increase code size
2357 and in the latter case grow .rodata section too much.
2359 size_t len
= strlen (fmt_str
);
2363 gimple_seq stmts
= NULL
;
2364 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2365 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2366 if (gimple_call_lhs (stmt
))
2368 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2369 build_int_cst (integer_type_node
, len
));
2370 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2371 gsi_replace_with_seq_vops (gsi
, stmts
);
2372 /* gsi now points at the assignment to the lhs, get a
2373 stmt iterator to the memcpy call.
2374 ??? We can't use gsi_for_stmt as that doesn't work when the
2375 CFG isn't built yet. */
2376 gimple_stmt_iterator gsi2
= *gsi
;
2382 gsi_replace_with_seq_vops (gsi
, stmts
);
2388 /* If the format is "%s", use strcpy if the result isn't used. */
2389 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2391 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2395 /* Don't crash on snprintf (str1, cst, "%s"). */
2399 tree orig_len
= get_maxval_strlen (orig
, 0);
2400 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2403 /* We could expand this as
2404 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2406 memcpy (str1, str2_with_nul_at_cstm1, cst);
2407 but in the former case that might increase code size
2408 and in the latter case grow .rodata section too much.
2410 if (compare_tree_int (orig_len
, destlen
) >= 0)
2413 /* Convert snprintf (str1, cst, "%s", str2) into
2414 strcpy (str1, str2) if strlen (str2) < cst. */
2415 gimple_seq stmts
= NULL
;
2416 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2417 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2418 if (gimple_call_lhs (stmt
))
2420 if (!useless_type_conversion_p (integer_type_node
,
2421 TREE_TYPE (orig_len
)))
2422 orig_len
= fold_convert (integer_type_node
, orig_len
);
2423 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2424 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2425 gsi_replace_with_seq_vops (gsi
, stmts
);
2426 /* gsi now points at the assignment to the lhs, get a
2427 stmt iterator to the memcpy call.
2428 ??? We can't use gsi_for_stmt as that doesn't work when the
2429 CFG isn't built yet. */
2430 gimple_stmt_iterator gsi2
= *gsi
;
2436 gsi_replace_with_seq_vops (gsi
, stmts
);
2444 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2445 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2446 more than 3 arguments, and ARG may be null in the 2-argument case.
2448 Return NULL_TREE if no simplification was possible, otherwise return the
2449 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2450 code of the function to be simplified. */
2453 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2454 tree fp
, tree fmt
, tree arg
,
2455 enum built_in_function fcode
)
2457 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2458 tree fn_fputc
, fn_fputs
;
2459 const char *fmt_str
= NULL
;
2461 /* If the return value is used, don't do the transformation. */
2462 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2465 /* Check whether the format is a literal string constant. */
2466 fmt_str
= c_getstr (fmt
);
2467 if (fmt_str
== NULL
)
2470 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2472 /* If we're using an unlocked function, assume the other
2473 unlocked functions exist explicitly. */
2474 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2475 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2479 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2480 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2483 if (!init_target_chars ())
2486 /* If the format doesn't contain % args or %%, use strcpy. */
2487 if (strchr (fmt_str
, target_percent
) == NULL
)
2489 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2493 /* If the format specifier was "", fprintf does nothing. */
2494 if (fmt_str
[0] == '\0')
2496 replace_call_with_value (gsi
, NULL_TREE
);
2500 /* When "string" doesn't contain %, replace all cases of
2501 fprintf (fp, string) with fputs (string, fp). The fputs
2502 builtin will take care of special cases like length == 1. */
2505 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2506 replace_call_with_call_and_fold (gsi
, repl
);
2511 /* The other optimizations can be done only on the non-va_list variants. */
2512 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2515 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2516 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2518 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2522 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2523 replace_call_with_call_and_fold (gsi
, repl
);
2528 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2529 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2532 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2536 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2537 replace_call_with_call_and_fold (gsi
, repl
);
2545 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2546 FMT and ARG are the arguments to the call; we don't fold cases with
2547 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2549 Return NULL_TREE if no simplification was possible, otherwise return the
2550 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2551 code of the function to be simplified. */
2554 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2555 tree arg
, enum built_in_function fcode
)
2557 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2558 tree fn_putchar
, fn_puts
, newarg
;
2559 const char *fmt_str
= NULL
;
2561 /* If the return value is used, don't do the transformation. */
2562 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2565 /* Check whether the format is a literal string constant. */
2566 fmt_str
= c_getstr (fmt
);
2567 if (fmt_str
== NULL
)
2570 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2572 /* If we're using an unlocked function, assume the other
2573 unlocked functions exist explicitly. */
2574 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2575 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2579 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2580 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2583 if (!init_target_chars ())
2586 if (strcmp (fmt_str
, target_percent_s
) == 0
2587 || strchr (fmt_str
, target_percent
) == NULL
)
2591 if (strcmp (fmt_str
, target_percent_s
) == 0)
2593 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2596 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2599 str
= c_getstr (arg
);
2605 /* The format specifier doesn't contain any '%' characters. */
2606 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2612 /* If the string was "", printf does nothing. */
2615 replace_call_with_value (gsi
, NULL_TREE
);
2619 /* If the string has length of 1, call putchar. */
2622 /* Given printf("c"), (where c is any one character,)
2623 convert "c"[0] to an int and pass that to the replacement
2625 newarg
= build_int_cst (integer_type_node
, str
[0]);
2628 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2629 replace_call_with_call_and_fold (gsi
, repl
);
2635 /* If the string was "string\n", call puts("string"). */
2636 size_t len
= strlen (str
);
2637 if ((unsigned char)str
[len
- 1] == target_newline
2638 && (size_t) (int) len
== len
2642 tree offset_node
, string_cst
;
2644 /* Create a NUL-terminated string that's one char shorter
2645 than the original, stripping off the trailing '\n'. */
2646 newarg
= build_string_literal (len
, str
);
2647 string_cst
= string_constant (newarg
, &offset_node
);
2648 gcc_checking_assert (string_cst
2649 && (TREE_STRING_LENGTH (string_cst
)
2651 && integer_zerop (offset_node
)
2653 TREE_STRING_POINTER (string_cst
)[len
- 1]
2655 /* build_string_literal creates a new STRING_CST,
2656 modify it in place to avoid double copying. */
2657 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2658 newstr
[len
- 1] = '\0';
2661 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2662 replace_call_with_call_and_fold (gsi
, repl
);
2667 /* We'd like to arrange to call fputs(string,stdout) here,
2668 but we need stdout and don't have a way to get it yet. */
2673 /* The other optimizations can be done only on the non-va_list variants. */
2674 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2677 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2678 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2680 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2684 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2685 replace_call_with_call_and_fold (gsi
, repl
);
2690 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2691 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2693 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2698 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2699 replace_call_with_call_and_fold (gsi
, repl
);
2709 /* Fold a call to __builtin_strlen with known length LEN. */
2712 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2714 gimple
*stmt
= gsi_stmt (*gsi
);
2715 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2718 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2719 replace_call_with_value (gsi
, len
);
2723 /* Fold a call to __builtin_acc_on_device. */
2726 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
2728 /* Defer folding until we know which compiler we're in. */
2729 if (symtab
->state
!= EXPANSION
)
2732 unsigned val_host
= GOMP_DEVICE_HOST
;
2733 unsigned val_dev
= GOMP_DEVICE_NONE
;
2735 #ifdef ACCEL_COMPILER
2736 val_host
= GOMP_DEVICE_NOT_HOST
;
2737 val_dev
= ACCEL_COMPILER_acc_device
;
2740 location_t loc
= gimple_location (gsi_stmt (*gsi
));
2742 tree host_eq
= make_ssa_name (boolean_type_node
);
2743 gimple
*host_ass
= gimple_build_assign
2744 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
2745 gimple_set_location (host_ass
, loc
);
2746 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
2748 tree dev_eq
= make_ssa_name (boolean_type_node
);
2749 gimple
*dev_ass
= gimple_build_assign
2750 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
2751 gimple_set_location (dev_ass
, loc
);
2752 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
2754 tree result
= make_ssa_name (boolean_type_node
);
2755 gimple
*result_ass
= gimple_build_assign
2756 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
2757 gimple_set_location (result_ass
, loc
);
2758 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
2760 replace_call_with_value (gsi
, result
);
2765 /* Fold the non-target builtin at *GSI and return whether any simplification
2769 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2771 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2772 tree callee
= gimple_call_fndecl (stmt
);
2774 /* Give up for always_inline inline builtins until they are
2776 if (avoid_folding_inline_builtin (callee
))
2779 unsigned n
= gimple_call_num_args (stmt
);
2780 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2783 case BUILT_IN_BZERO
:
2784 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2785 gimple_call_arg (stmt
, 1));
2786 case BUILT_IN_MEMSET
:
2787 return gimple_fold_builtin_memset (gsi
,
2788 gimple_call_arg (stmt
, 1),
2789 gimple_call_arg (stmt
, 2));
2790 case BUILT_IN_BCOPY
:
2791 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2792 gimple_call_arg (stmt
, 0), 3);
2793 case BUILT_IN_MEMCPY
:
2794 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2795 gimple_call_arg (stmt
, 1), 0);
2796 case BUILT_IN_MEMPCPY
:
2797 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2798 gimple_call_arg (stmt
, 1), 1);
2799 case BUILT_IN_MEMMOVE
:
2800 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2801 gimple_call_arg (stmt
, 1), 3);
2802 case BUILT_IN_SPRINTF_CHK
:
2803 case BUILT_IN_VSPRINTF_CHK
:
2804 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2805 case BUILT_IN_STRCAT_CHK
:
2806 return gimple_fold_builtin_strcat_chk (gsi
);
2807 case BUILT_IN_STRNCAT_CHK
:
2808 return gimple_fold_builtin_strncat_chk (gsi
);
2809 case BUILT_IN_STRLEN
:
2810 return gimple_fold_builtin_strlen (gsi
);
2811 case BUILT_IN_STRCPY
:
2812 return gimple_fold_builtin_strcpy (gsi
,
2813 gimple_call_arg (stmt
, 0),
2814 gimple_call_arg (stmt
, 1));
2815 case BUILT_IN_STRNCPY
:
2816 return gimple_fold_builtin_strncpy (gsi
,
2817 gimple_call_arg (stmt
, 0),
2818 gimple_call_arg (stmt
, 1),
2819 gimple_call_arg (stmt
, 2));
2820 case BUILT_IN_STRCAT
:
2821 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2822 gimple_call_arg (stmt
, 1));
2823 case BUILT_IN_STRNCAT
:
2824 return gimple_fold_builtin_strncat (gsi
);
2825 case BUILT_IN_FPUTS
:
2826 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2827 gimple_call_arg (stmt
, 1), false);
2828 case BUILT_IN_FPUTS_UNLOCKED
:
2829 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2830 gimple_call_arg (stmt
, 1), true);
2831 case BUILT_IN_MEMCPY_CHK
:
2832 case BUILT_IN_MEMPCPY_CHK
:
2833 case BUILT_IN_MEMMOVE_CHK
:
2834 case BUILT_IN_MEMSET_CHK
:
2835 return gimple_fold_builtin_memory_chk (gsi
,
2836 gimple_call_arg (stmt
, 0),
2837 gimple_call_arg (stmt
, 1),
2838 gimple_call_arg (stmt
, 2),
2839 gimple_call_arg (stmt
, 3),
2841 case BUILT_IN_STPCPY
:
2842 return gimple_fold_builtin_stpcpy (gsi
);
2843 case BUILT_IN_STRCPY_CHK
:
2844 case BUILT_IN_STPCPY_CHK
:
2845 return gimple_fold_builtin_stxcpy_chk (gsi
,
2846 gimple_call_arg (stmt
, 0),
2847 gimple_call_arg (stmt
, 1),
2848 gimple_call_arg (stmt
, 2),
2850 case BUILT_IN_STRNCPY_CHK
:
2851 case BUILT_IN_STPNCPY_CHK
:
2852 return gimple_fold_builtin_stxncpy_chk (gsi
,
2853 gimple_call_arg (stmt
, 0),
2854 gimple_call_arg (stmt
, 1),
2855 gimple_call_arg (stmt
, 2),
2856 gimple_call_arg (stmt
, 3),
2858 case BUILT_IN_SNPRINTF_CHK
:
2859 case BUILT_IN_VSNPRINTF_CHK
:
2860 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2861 case BUILT_IN_SNPRINTF
:
2862 return gimple_fold_builtin_snprintf (gsi
);
2863 case BUILT_IN_SPRINTF
:
2864 return gimple_fold_builtin_sprintf (gsi
);
2865 case BUILT_IN_FPRINTF
:
2866 case BUILT_IN_FPRINTF_UNLOCKED
:
2867 case BUILT_IN_VFPRINTF
:
2868 if (n
== 2 || n
== 3)
2869 return gimple_fold_builtin_fprintf (gsi
,
2870 gimple_call_arg (stmt
, 0),
2871 gimple_call_arg (stmt
, 1),
2873 ? gimple_call_arg (stmt
, 2)
2877 case BUILT_IN_FPRINTF_CHK
:
2878 case BUILT_IN_VFPRINTF_CHK
:
2879 if (n
== 3 || n
== 4)
2880 return gimple_fold_builtin_fprintf (gsi
,
2881 gimple_call_arg (stmt
, 0),
2882 gimple_call_arg (stmt
, 2),
2884 ? gimple_call_arg (stmt
, 3)
2888 case BUILT_IN_PRINTF
:
2889 case BUILT_IN_PRINTF_UNLOCKED
:
2890 case BUILT_IN_VPRINTF
:
2891 if (n
== 1 || n
== 2)
2892 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2894 ? gimple_call_arg (stmt
, 1)
2895 : NULL_TREE
, fcode
);
2897 case BUILT_IN_PRINTF_CHK
:
2898 case BUILT_IN_VPRINTF_CHK
:
2899 if (n
== 2 || n
== 3)
2900 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2902 ? gimple_call_arg (stmt
, 2)
2903 : NULL_TREE
, fcode
);
2905 case BUILT_IN_ACC_ON_DEVICE
:
2906 return gimple_fold_builtin_acc_on_device (gsi
,
2907 gimple_call_arg (stmt
, 0));
2911 /* Try the generic builtin folder. */
2912 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2913 tree result
= fold_call_stmt (stmt
, ignore
);
2917 STRIP_NOPS (result
);
2919 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2920 if (!update_call_from_tree (gsi
, result
))
2921 gimplify_and_update_call_from_tree (gsi
, result
);
2928 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2929 doesn't fit into TYPE. The test for overflow should be regardless of
2930 -fwrapv, and even for unsigned types. */
2933 arith_overflowed_p (enum tree_code code
, const_tree type
,
2934 const_tree arg0
, const_tree arg1
)
2936 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
2937 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
2939 widest2_int warg0
= widest2_int_cst (arg0
);
2940 widest2_int warg1
= widest2_int_cst (arg1
);
2944 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
2945 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
2946 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
2947 default: gcc_unreachable ();
2949 signop sign
= TYPE_SIGN (type
);
2950 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
2952 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
2955 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2956 The statement may be replaced by another statement, e.g., if the call
2957 simplifies to a constant value. Return true if any changes were made.
2958 It is assumed that the operands have been previously folded. */
2961 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2963 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2965 bool changed
= false;
2968 /* Fold *& in call arguments. */
2969 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2970 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2972 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2975 gimple_call_set_arg (stmt
, i
, tmp
);
2980 /* Check for virtual calls that became direct calls. */
2981 callee
= gimple_call_fn (stmt
);
2982 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2984 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2986 if (dump_file
&& virtual_method_call_p (callee
)
2987 && !possible_polymorphic_call_target_p
2988 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2989 (OBJ_TYPE_REF_EXPR (callee
)))))
2992 "Type inheritance inconsistent devirtualization of ");
2993 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2994 fprintf (dump_file
, " to ");
2995 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2996 fprintf (dump_file
, "\n");
2999 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3002 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3005 vec
<cgraph_node
*>targets
3006 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3007 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3009 tree lhs
= gimple_call_lhs (stmt
);
3010 if (dump_enabled_p ())
3012 location_t loc
= gimple_location_safe (stmt
);
3013 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3014 "folding virtual function call to %s\n",
3015 targets
.length () == 1
3016 ? targets
[0]->name ()
3017 : "__builtin_unreachable");
3019 if (targets
.length () == 1)
3021 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3023 /* If the call becomes noreturn, remove the lhs. */
3024 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3026 if (TREE_CODE (lhs
) == SSA_NAME
)
3028 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3029 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3030 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3031 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3033 gimple_call_set_lhs (stmt
, NULL_TREE
);
3035 maybe_remove_unused_call_args (cfun
, stmt
);
3039 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3040 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3041 gimple_set_location (new_stmt
, gimple_location (stmt
));
3042 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3044 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3045 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3047 /* To satisfy condition for
3048 cgraph_update_edges_for_call_stmt_node,
3049 we need to preserve GIMPLE_CALL statement
3050 at position of GSI iterator. */
3051 update_call_from_tree (gsi
, def
);
3052 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3056 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3057 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3058 gsi_replace (gsi
, new_stmt
, false);
3066 /* Check for indirect calls that became direct calls, and then
3067 no longer require a static chain. */
3068 if (gimple_call_chain (stmt
))
3070 tree fn
= gimple_call_fndecl (stmt
);
3071 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3073 gimple_call_set_chain (stmt
, NULL
);
3078 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3081 gimple_call_set_chain (stmt
, tmp
);
3090 /* Check for builtins that CCP can handle using information not
3091 available in the generic fold routines. */
3092 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3094 if (gimple_fold_builtin (gsi
))
3097 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3099 changed
|= targetm
.gimple_fold_builtin (gsi
);
3101 else if (gimple_call_internal_p (stmt
))
3103 enum tree_code subcode
= ERROR_MARK
;
3104 tree result
= NULL_TREE
;
3105 bool cplx_result
= false;
3106 tree overflow
= NULL_TREE
;
3107 switch (gimple_call_internal_fn (stmt
))
3109 case IFN_BUILTIN_EXPECT
:
3110 result
= fold_builtin_expect (gimple_location (stmt
),
3111 gimple_call_arg (stmt
, 0),
3112 gimple_call_arg (stmt
, 1),
3113 gimple_call_arg (stmt
, 2));
3115 case IFN_UBSAN_OBJECT_SIZE
:
3116 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3117 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3118 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3119 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3120 gimple_call_arg (stmt
, 2))))
3122 gsi_replace (gsi
, gimple_build_nop (), false);
3123 unlink_stmt_vdef (stmt
);
3124 release_defs (stmt
);
3128 case IFN_UBSAN_CHECK_ADD
:
3129 subcode
= PLUS_EXPR
;
3131 case IFN_UBSAN_CHECK_SUB
:
3132 subcode
= MINUS_EXPR
;
3134 case IFN_UBSAN_CHECK_MUL
:
3135 subcode
= MULT_EXPR
;
3137 case IFN_ADD_OVERFLOW
:
3138 subcode
= PLUS_EXPR
;
3141 case IFN_SUB_OVERFLOW
:
3142 subcode
= MINUS_EXPR
;
3145 case IFN_MUL_OVERFLOW
:
3146 subcode
= MULT_EXPR
;
3152 if (subcode
!= ERROR_MARK
)
3154 tree arg0
= gimple_call_arg (stmt
, 0);
3155 tree arg1
= gimple_call_arg (stmt
, 1);
3156 tree type
= TREE_TYPE (arg0
);
3159 tree lhs
= gimple_call_lhs (stmt
);
3160 if (lhs
== NULL_TREE
)
3163 type
= TREE_TYPE (TREE_TYPE (lhs
));
3165 if (type
== NULL_TREE
)
3167 /* x = y + 0; x = y - 0; x = y * 0; */
3168 else if (integer_zerop (arg1
))
3169 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3170 /* x = 0 + y; x = 0 * y; */
3171 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3172 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3174 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3175 result
= integer_zero_node
;
3176 /* x = y * 1; x = 1 * y; */
3177 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3179 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3181 else if (TREE_CODE (arg0
) == INTEGER_CST
3182 && TREE_CODE (arg1
) == INTEGER_CST
)
3185 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3186 fold_convert (type
, arg1
));
3188 result
= int_const_binop (subcode
, arg0
, arg1
);
3189 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3192 overflow
= build_one_cst (type
);
3199 if (result
== integer_zero_node
)
3200 result
= build_zero_cst (type
);
3201 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3203 if (TREE_CODE (result
) == INTEGER_CST
)
3205 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3207 overflow
= build_one_cst (type
);
3209 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3210 && TYPE_UNSIGNED (type
))
3211 || (TYPE_PRECISION (type
)
3212 < (TYPE_PRECISION (TREE_TYPE (result
))
3213 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3214 && !TYPE_UNSIGNED (type
)))))
3217 result
= fold_convert (type
, result
);
3224 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3225 result
= drop_tree_overflow (result
);
3228 if (overflow
== NULL_TREE
)
3229 overflow
= build_zero_cst (TREE_TYPE (result
));
3230 tree ctype
= build_complex_type (TREE_TYPE (result
));
3231 if (TREE_CODE (result
) == INTEGER_CST
3232 && TREE_CODE (overflow
) == INTEGER_CST
)
3233 result
= build_complex (ctype
, result
, overflow
);
3235 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3236 ctype
, result
, overflow
);
3238 if (!update_call_from_tree (gsi
, result
))
3239 gimplify_and_update_call_from_tree (gsi
, result
);
3248 /* Return true whether NAME has a use on STMT. */
3251 has_use_on_stmt (tree name
, gimple
*stmt
)
3253 imm_use_iterator iter
;
3254 use_operand_p use_p
;
3255 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
3256 if (USE_STMT (use_p
) == stmt
)
3261 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3264 Replaces *GSI with the simplification result in RCODE and OPS
3265 and the associated statements in *SEQ. Does the replacement
3266 according to INPLACE and returns true if the operation succeeded. */
3269 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3270 code_helper rcode
, tree
*ops
,
3271 gimple_seq
*seq
, bool inplace
)
3273 gimple
*stmt
= gsi_stmt (*gsi
);
3275 /* Play safe and do not allow abnormals to be mentioned in
3276 newly created statements. See also maybe_push_res_to_seq.
3277 As an exception allow such uses if there was a use of the
3278 same SSA name on the old stmt. */
3279 if ((TREE_CODE (ops
[0]) == SSA_NAME
3280 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
3281 && !has_use_on_stmt (ops
[0], stmt
))
3283 && TREE_CODE (ops
[1]) == SSA_NAME
3284 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
3285 && !has_use_on_stmt (ops
[1], stmt
))
3287 && TREE_CODE (ops
[2]) == SSA_NAME
3288 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
3289 && !has_use_on_stmt (ops
[2], stmt
)))
3292 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3294 gcc_assert (rcode
.is_tree_code ());
3295 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3296 /* GIMPLE_CONDs condition may not throw. */
3297 && (!flag_exceptions
3298 || !cfun
->can_throw_non_call_exceptions
3299 || !operation_could_trap_p (rcode
,
3300 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3302 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3303 else if (rcode
== SSA_NAME
)
3304 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3305 build_zero_cst (TREE_TYPE (ops
[0])));
3306 else if (rcode
== INTEGER_CST
)
3308 if (integer_zerop (ops
[0]))
3309 gimple_cond_make_false (cond_stmt
);
3311 gimple_cond_make_true (cond_stmt
);
3315 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3319 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3320 build_zero_cst (TREE_TYPE (res
)));
3324 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3326 fprintf (dump_file
, "gimple_simplified to ");
3327 if (!gimple_seq_empty_p (*seq
))
3328 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3329 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3332 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3335 else if (is_gimple_assign (stmt
)
3336 && rcode
.is_tree_code ())
3339 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3341 maybe_build_generic_op (rcode
,
3342 TREE_TYPE (gimple_assign_lhs (stmt
)),
3343 &ops
[0], ops
[1], ops
[2]);
3344 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3345 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3347 fprintf (dump_file
, "gimple_simplified to ");
3348 if (!gimple_seq_empty_p (*seq
))
3349 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3350 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3353 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3357 else if (rcode
.is_fn_code ()
3358 && gimple_call_builtin_p (stmt
, rcode
))
3361 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3363 gcc_assert (ops
[i
] != NULL_TREE
);
3364 gimple_call_set_arg (stmt
, i
, ops
[i
]);
3367 gcc_assert (ops
[i
] == NULL_TREE
);
3368 gcc_assert (gimple_seq_empty_p (*seq
));
3373 if (gimple_has_lhs (stmt
))
3375 tree lhs
= gimple_get_lhs (stmt
);
3376 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3379 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3381 fprintf (dump_file
, "gimple_simplified to ");
3382 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3384 gsi_replace_with_seq_vops (gsi
, *seq
);
3394 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3397 maybe_canonicalize_mem_ref_addr (tree
*t
)
3401 if (TREE_CODE (*t
) == ADDR_EXPR
)
3402 t
= &TREE_OPERAND (*t
, 0);
3404 while (handled_component_p (*t
))
3405 t
= &TREE_OPERAND (*t
, 0);
3407 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3408 of invariant addresses into a SSA name MEM_REF address. */
3409 if (TREE_CODE (*t
) == MEM_REF
3410 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3412 tree addr
= TREE_OPERAND (*t
, 0);
3413 if (TREE_CODE (addr
) == ADDR_EXPR
3414 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3415 || handled_component_p (TREE_OPERAND (addr
, 0))))
3418 HOST_WIDE_INT coffset
;
3419 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3424 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3425 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3426 TREE_OPERAND (*t
, 1),
3427 size_int (coffset
));
3430 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3431 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3434 /* Canonicalize back MEM_REFs to plain reference trees if the object
3435 accessed is a decl that has the same access semantics as the MEM_REF. */
3436 if (TREE_CODE (*t
) == MEM_REF
3437 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3438 && integer_zerop (TREE_OPERAND (*t
, 1))
3439 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3441 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3442 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3443 if (/* Same volatile qualification. */
3444 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3445 /* Same TBAA behavior with -fstrict-aliasing. */
3446 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3447 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3448 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3449 /* Same alignment. */
3450 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3451 /* We have to look out here to not drop a required conversion
3452 from the rhs to the lhs if *t appears on the lhs or vice-versa
3453 if it appears on the rhs. Thus require strict type
3455 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3457 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3462 /* Canonicalize TARGET_MEM_REF in particular with respect to
3463 the indexes becoming constant. */
3464 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3466 tree tem
= maybe_fold_tmr (*t
);
3477 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3478 distinguishes both cases. */
3481 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3483 bool changed
= false;
3484 gimple
*stmt
= gsi_stmt (*gsi
);
3487 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3489 ??? This shouldn't be done in generic folding but in the
3490 propagation helpers which also know whether an address was
3492 Also canonicalize operand order. */
3493 switch (gimple_code (stmt
))
3496 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3498 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3499 if ((REFERENCE_CLASS_P (*rhs
)
3500 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3501 && maybe_canonicalize_mem_ref_addr (rhs
))
3503 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3504 if (REFERENCE_CLASS_P (*lhs
)
3505 && maybe_canonicalize_mem_ref_addr (lhs
))
3510 /* Canonicalize operand order. */
3511 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3512 if (TREE_CODE_CLASS (code
) == tcc_comparison
3513 || commutative_tree_code (code
)
3514 || commutative_ternary_tree_code (code
))
3516 tree rhs1
= gimple_assign_rhs1 (stmt
);
3517 tree rhs2
= gimple_assign_rhs2 (stmt
);
3518 if (tree_swap_operands_p (rhs1
, rhs2
, false))
3520 gimple_assign_set_rhs1 (stmt
, rhs2
);
3521 gimple_assign_set_rhs2 (stmt
, rhs1
);
3522 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3523 gimple_assign_set_rhs_code (stmt
,
3524 swap_tree_comparison (code
));
3532 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3534 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3535 if (REFERENCE_CLASS_P (*arg
)
3536 && maybe_canonicalize_mem_ref_addr (arg
))
3539 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3541 && REFERENCE_CLASS_P (*lhs
)
3542 && maybe_canonicalize_mem_ref_addr (lhs
))
3548 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3549 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3551 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3552 tree op
= TREE_VALUE (link
);
3553 if (REFERENCE_CLASS_P (op
)
3554 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3557 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3559 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3560 tree op
= TREE_VALUE (link
);
3561 if ((REFERENCE_CLASS_P (op
)
3562 || TREE_CODE (op
) == ADDR_EXPR
)
3563 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3569 if (gimple_debug_bind_p (stmt
))
3571 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3573 && (REFERENCE_CLASS_P (*val
)
3574 || TREE_CODE (*val
) == ADDR_EXPR
)
3575 && maybe_canonicalize_mem_ref_addr (val
))
3581 /* Canonicalize operand order. */
3582 tree lhs
= gimple_cond_lhs (stmt
);
3583 tree rhs
= gimple_cond_rhs (stmt
);
3584 if (tree_swap_operands_p (lhs
, rhs
, false))
3586 gcond
*gc
= as_a
<gcond
*> (stmt
);
3587 gimple_cond_set_lhs (gc
, rhs
);
3588 gimple_cond_set_rhs (gc
, lhs
);
3589 gimple_cond_set_code (gc
,
3590 swap_tree_comparison (gimple_cond_code (gc
)));
3597 /* Dispatch to pattern-based folding. */
3599 || is_gimple_assign (stmt
)
3600 || gimple_code (stmt
) == GIMPLE_COND
)
3602 gimple_seq seq
= NULL
;
3605 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3606 valueize
, valueize
))
3608 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3611 gimple_seq_discard (seq
);
3615 stmt
= gsi_stmt (*gsi
);
3617 /* Fold the main computation performed by the statement. */
3618 switch (gimple_code (stmt
))
3622 /* Try to canonicalize for boolean-typed X the comparisons
3623 X == 0, X == 1, X != 0, and X != 1. */
3624 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
3625 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
3627 tree lhs
= gimple_assign_lhs (stmt
);
3628 tree op1
= gimple_assign_rhs1 (stmt
);
3629 tree op2
= gimple_assign_rhs2 (stmt
);
3630 tree type
= TREE_TYPE (op1
);
3632 /* Check whether the comparison operands are of the same boolean
3633 type as the result type is.
3634 Check that second operand is an integer-constant with value
3636 if (TREE_CODE (op2
) == INTEGER_CST
3637 && (integer_zerop (op2
) || integer_onep (op2
))
3638 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
3640 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
3641 bool is_logical_not
= false;
3643 /* X == 0 and X != 1 is a logical-not.of X
3644 X == 1 and X != 0 is X */
3645 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
3646 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
3647 is_logical_not
= true;
3649 if (is_logical_not
== false)
3650 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
3651 /* Only for one-bit precision typed X the transformation
3652 !X -> ~X is valied. */
3653 else if (TYPE_PRECISION (type
) == 1)
3654 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
3655 /* Otherwise we use !X -> X ^ 1. */
3657 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
3658 build_int_cst (type
, 1));
3664 unsigned old_num_ops
= gimple_num_ops (stmt
);
3665 tree lhs
= gimple_assign_lhs (stmt
);
3666 tree new_rhs
= fold_gimple_assign (gsi
);
3668 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3669 TREE_TYPE (new_rhs
)))
3670 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3673 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3675 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3682 changed
|= gimple_fold_call (gsi
, inplace
);
3686 /* Fold *& in asm operands. */
3688 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3690 const char **oconstraints
;
3691 const char *constraint
;
3692 bool allows_mem
, allows_reg
;
3694 noutputs
= gimple_asm_noutputs (asm_stmt
);
3695 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3697 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3699 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3700 tree op
= TREE_VALUE (link
);
3702 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3703 if (REFERENCE_CLASS_P (op
)
3704 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3706 TREE_VALUE (link
) = op
;
3710 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3712 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3713 tree op
= TREE_VALUE (link
);
3715 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3716 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3717 oconstraints
, &allows_mem
, &allows_reg
);
3718 if (REFERENCE_CLASS_P (op
)
3719 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3722 TREE_VALUE (link
) = op
;
3730 if (gimple_debug_bind_p (stmt
))
3732 tree val
= gimple_debug_bind_get_value (stmt
);
3734 && REFERENCE_CLASS_P (val
))
3736 tree tem
= maybe_fold_reference (val
, false);
3739 gimple_debug_bind_set_value (stmt
, tem
);
3744 && TREE_CODE (val
) == ADDR_EXPR
)
3746 tree ref
= TREE_OPERAND (val
, 0);
3747 tree tem
= maybe_fold_reference (ref
, false);
3750 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3751 gimple_debug_bind_set_value (stmt
, tem
);
3761 stmt
= gsi_stmt (*gsi
);
3763 /* Fold *& on the lhs. */
3764 if (gimple_has_lhs (stmt
))
3766 tree lhs
= gimple_get_lhs (stmt
);
3767 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3769 tree new_lhs
= maybe_fold_reference (lhs
, true);
3772 gimple_set_lhs (stmt
, new_lhs
);
3781 /* Valueziation callback that ends up not following SSA edges. */
3784 no_follow_ssa_edges (tree
)
3789 /* Valueization callback that ends up following single-use SSA edges only. */
3792 follow_single_use_edges (tree val
)
3794 if (TREE_CODE (val
) == SSA_NAME
3795 && !has_single_use (val
))
3800 /* Fold the statement pointed to by GSI. In some cases, this function may
3801 replace the whole statement with a new one. Returns true iff folding
3803 The statement pointed to by GSI should be in valid gimple form but may
3804 be in unfolded state as resulting from for example constant propagation
3805 which can produce *&x = 0. */
3808 fold_stmt (gimple_stmt_iterator
*gsi
)
3810 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3814 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3816 return fold_stmt_1 (gsi
, false, valueize
);
3819 /* Perform the minimal folding on statement *GSI. Only operations like
3820 *&x created by constant propagation are handled. The statement cannot
3821 be replaced with a new one. Return true if the statement was
3822 changed, false otherwise.
3823 The statement *GSI should be in valid gimple form but may
3824 be in unfolded state as resulting from for example constant propagation
3825 which can produce *&x = 0. */
3828 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3830 gimple
*stmt
= gsi_stmt (*gsi
);
3831 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3832 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3836 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3837 if EXPR is null or we don't know how.
3838 If non-null, the result always has boolean type. */
3841 canonicalize_bool (tree expr
, bool invert
)
3847 if (integer_nonzerop (expr
))
3848 return boolean_false_node
;
3849 else if (integer_zerop (expr
))
3850 return boolean_true_node
;
3851 else if (TREE_CODE (expr
) == SSA_NAME
)
3852 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3853 build_int_cst (TREE_TYPE (expr
), 0));
3854 else if (COMPARISON_CLASS_P (expr
))
3855 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3857 TREE_OPERAND (expr
, 0),
3858 TREE_OPERAND (expr
, 1));
3864 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3866 if (integer_nonzerop (expr
))
3867 return boolean_true_node
;
3868 else if (integer_zerop (expr
))
3869 return boolean_false_node
;
3870 else if (TREE_CODE (expr
) == SSA_NAME
)
3871 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3872 build_int_cst (TREE_TYPE (expr
), 0));
3873 else if (COMPARISON_CLASS_P (expr
))
3874 return fold_build2 (TREE_CODE (expr
),
3876 TREE_OPERAND (expr
, 0),
3877 TREE_OPERAND (expr
, 1));
3883 /* Check to see if a boolean expression EXPR is logically equivalent to the
3884 comparison (OP1 CODE OP2). Check for various identities involving
3888 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3889 const_tree op1
, const_tree op2
)
3893 /* The obvious case. */
3894 if (TREE_CODE (expr
) == code
3895 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3896 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3899 /* Check for comparing (name, name != 0) and the case where expr
3900 is an SSA_NAME with a definition matching the comparison. */
3901 if (TREE_CODE (expr
) == SSA_NAME
3902 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3904 if (operand_equal_p (expr
, op1
, 0))
3905 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3906 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3907 s
= SSA_NAME_DEF_STMT (expr
);
3908 if (is_gimple_assign (s
)
3909 && gimple_assign_rhs_code (s
) == code
3910 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3911 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3915 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3916 of name is a comparison, recurse. */
3917 if (TREE_CODE (op1
) == SSA_NAME
3918 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3920 s
= SSA_NAME_DEF_STMT (op1
);
3921 if (is_gimple_assign (s
)
3922 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3924 enum tree_code c
= gimple_assign_rhs_code (s
);
3925 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3926 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3927 return same_bool_comparison_p (expr
, c
,
3928 gimple_assign_rhs1 (s
),
3929 gimple_assign_rhs2 (s
));
3930 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3931 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3932 return same_bool_comparison_p (expr
,
3933 invert_tree_comparison (c
, false),
3934 gimple_assign_rhs1 (s
),
3935 gimple_assign_rhs2 (s
));
3941 /* Check to see if two boolean expressions OP1 and OP2 are logically
3945 same_bool_result_p (const_tree op1
, const_tree op2
)
3947 /* Simple cases first. */
3948 if (operand_equal_p (op1
, op2
, 0))
3951 /* Check the cases where at least one of the operands is a comparison.
3952 These are a bit smarter than operand_equal_p in that they apply some
3953 identifies on SSA_NAMEs. */
3954 if (COMPARISON_CLASS_P (op2
)
3955 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3956 TREE_OPERAND (op2
, 0),
3957 TREE_OPERAND (op2
, 1)))
3959 if (COMPARISON_CLASS_P (op1
)
3960 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3961 TREE_OPERAND (op1
, 0),
3962 TREE_OPERAND (op1
, 1)))
3969 /* Forward declarations for some mutually recursive functions. */
3972 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3973 enum tree_code code2
, tree op2a
, tree op2b
);
3975 and_var_with_comparison (tree var
, bool invert
,
3976 enum tree_code code2
, tree op2a
, tree op2b
);
3978 and_var_with_comparison_1 (gimple
*stmt
,
3979 enum tree_code code2
, tree op2a
, tree op2b
);
3981 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3982 enum tree_code code2
, tree op2a
, tree op2b
);
3984 or_var_with_comparison (tree var
, bool invert
,
3985 enum tree_code code2
, tree op2a
, tree op2b
);
3987 or_var_with_comparison_1 (gimple
*stmt
,
3988 enum tree_code code2
, tree op2a
, tree op2b
);
3990 /* Helper function for and_comparisons_1: try to simplify the AND of the
3991 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3992 If INVERT is true, invert the value of the VAR before doing the AND.
3993 Return NULL_EXPR if we can't simplify this to a single expression. */
3996 and_var_with_comparison (tree var
, bool invert
,
3997 enum tree_code code2
, tree op2a
, tree op2b
)
4000 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4002 /* We can only deal with variables whose definitions are assignments. */
4003 if (!is_gimple_assign (stmt
))
4006 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4007 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4008 Then we only have to consider the simpler non-inverted cases. */
4010 t
= or_var_with_comparison_1 (stmt
,
4011 invert_tree_comparison (code2
, false),
4014 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4015 return canonicalize_bool (t
, invert
);
4018 /* Try to simplify the AND of the ssa variable defined by the assignment
4019 STMT with the comparison specified by (OP2A CODE2 OP2B).
4020 Return NULL_EXPR if we can't simplify this to a single expression. */
4023 and_var_with_comparison_1 (gimple
*stmt
,
4024 enum tree_code code2
, tree op2a
, tree op2b
)
4026 tree var
= gimple_assign_lhs (stmt
);
4027 tree true_test_var
= NULL_TREE
;
4028 tree false_test_var
= NULL_TREE
;
4029 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4031 /* Check for identities like (var AND (var == 0)) => false. */
4032 if (TREE_CODE (op2a
) == SSA_NAME
4033 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4035 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4036 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4038 true_test_var
= op2a
;
4039 if (var
== true_test_var
)
4042 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4043 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4045 false_test_var
= op2a
;
4046 if (var
== false_test_var
)
4047 return boolean_false_node
;
4051 /* If the definition is a comparison, recurse on it. */
4052 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4054 tree t
= and_comparisons_1 (innercode
,
4055 gimple_assign_rhs1 (stmt
),
4056 gimple_assign_rhs2 (stmt
),
4064 /* If the definition is an AND or OR expression, we may be able to
4065 simplify by reassociating. */
4066 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4067 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4069 tree inner1
= gimple_assign_rhs1 (stmt
);
4070 tree inner2
= gimple_assign_rhs2 (stmt
);
4073 tree partial
= NULL_TREE
;
4074 bool is_and
= (innercode
== BIT_AND_EXPR
);
4076 /* Check for boolean identities that don't require recursive examination
4078 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4079 inner1 AND (inner1 OR inner2) => inner1
4080 !inner1 AND (inner1 AND inner2) => false
4081 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4082 Likewise for similar cases involving inner2. */
4083 if (inner1
== true_test_var
)
4084 return (is_and
? var
: inner1
);
4085 else if (inner2
== true_test_var
)
4086 return (is_and
? var
: inner2
);
4087 else if (inner1
== false_test_var
)
4089 ? boolean_false_node
4090 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4091 else if (inner2
== false_test_var
)
4093 ? boolean_false_node
4094 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4096 /* Next, redistribute/reassociate the AND across the inner tests.
4097 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4098 if (TREE_CODE (inner1
) == SSA_NAME
4099 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4100 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4101 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4102 gimple_assign_rhs1 (s
),
4103 gimple_assign_rhs2 (s
),
4104 code2
, op2a
, op2b
)))
4106 /* Handle the AND case, where we are reassociating:
4107 (inner1 AND inner2) AND (op2a code2 op2b)
4109 If the partial result t is a constant, we win. Otherwise
4110 continue on to try reassociating with the other inner test. */
4113 if (integer_onep (t
))
4115 else if (integer_zerop (t
))
4116 return boolean_false_node
;
4119 /* Handle the OR case, where we are redistributing:
4120 (inner1 OR inner2) AND (op2a code2 op2b)
4121 => (t OR (inner2 AND (op2a code2 op2b))) */
4122 else if (integer_onep (t
))
4123 return boolean_true_node
;
4125 /* Save partial result for later. */
4129 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4130 if (TREE_CODE (inner2
) == SSA_NAME
4131 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4132 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4133 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4134 gimple_assign_rhs1 (s
),
4135 gimple_assign_rhs2 (s
),
4136 code2
, op2a
, op2b
)))
4138 /* Handle the AND case, where we are reassociating:
4139 (inner1 AND inner2) AND (op2a code2 op2b)
4140 => (inner1 AND t) */
4143 if (integer_onep (t
))
4145 else if (integer_zerop (t
))
4146 return boolean_false_node
;
4147 /* If both are the same, we can apply the identity
4149 else if (partial
&& same_bool_result_p (t
, partial
))
4153 /* Handle the OR case. where we are redistributing:
4154 (inner1 OR inner2) AND (op2a code2 op2b)
4155 => (t OR (inner1 AND (op2a code2 op2b)))
4156 => (t OR partial) */
4159 if (integer_onep (t
))
4160 return boolean_true_node
;
4163 /* We already got a simplification for the other
4164 operand to the redistributed OR expression. The
4165 interesting case is when at least one is false.
4166 Or, if both are the same, we can apply the identity
4168 if (integer_zerop (partial
))
4170 else if (integer_zerop (t
))
4172 else if (same_bool_result_p (t
, partial
))
4181 /* Try to simplify the AND of two comparisons defined by
4182 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4183 If this can be done without constructing an intermediate value,
4184 return the resulting tree; otherwise NULL_TREE is returned.
4185 This function is deliberately asymmetric as it recurses on SSA_DEFs
4186 in the first comparison but not the second. */
4189 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4190 enum tree_code code2
, tree op2a
, tree op2b
)
4192 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4194 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4195 if (operand_equal_p (op1a
, op2a
, 0)
4196 && operand_equal_p (op1b
, op2b
, 0))
4198 /* Result will be either NULL_TREE, or a combined comparison. */
4199 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4200 TRUTH_ANDIF_EXPR
, code1
, code2
,
4201 truth_type
, op1a
, op1b
);
4206 /* Likewise the swapped case of the above. */
4207 if (operand_equal_p (op1a
, op2b
, 0)
4208 && operand_equal_p (op1b
, op2a
, 0))
4210 /* Result will be either NULL_TREE, or a combined comparison. */
4211 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4212 TRUTH_ANDIF_EXPR
, code1
,
4213 swap_tree_comparison (code2
),
4214 truth_type
, op1a
, op1b
);
4219 /* If both comparisons are of the same value against constants, we might
4220 be able to merge them. */
4221 if (operand_equal_p (op1a
, op2a
, 0)
4222 && TREE_CODE (op1b
) == INTEGER_CST
4223 && TREE_CODE (op2b
) == INTEGER_CST
)
4225 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4227 /* If we have (op1a == op1b), we should either be able to
4228 return that or FALSE, depending on whether the constant op1b
4229 also satisfies the other comparison against op2b. */
4230 if (code1
== EQ_EXPR
)
4236 case EQ_EXPR
: val
= (cmp
== 0); break;
4237 case NE_EXPR
: val
= (cmp
!= 0); break;
4238 case LT_EXPR
: val
= (cmp
< 0); break;
4239 case GT_EXPR
: val
= (cmp
> 0); break;
4240 case LE_EXPR
: val
= (cmp
<= 0); break;
4241 case GE_EXPR
: val
= (cmp
>= 0); break;
4242 default: done
= false;
4247 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4249 return boolean_false_node
;
4252 /* Likewise if the second comparison is an == comparison. */
4253 else if (code2
== EQ_EXPR
)
4259 case EQ_EXPR
: val
= (cmp
== 0); break;
4260 case NE_EXPR
: val
= (cmp
!= 0); break;
4261 case LT_EXPR
: val
= (cmp
> 0); break;
4262 case GT_EXPR
: val
= (cmp
< 0); break;
4263 case LE_EXPR
: val
= (cmp
>= 0); break;
4264 case GE_EXPR
: val
= (cmp
<= 0); break;
4265 default: done
= false;
4270 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4272 return boolean_false_node
;
4276 /* Same business with inequality tests. */
4277 else if (code1
== NE_EXPR
)
4282 case EQ_EXPR
: val
= (cmp
!= 0); break;
4283 case NE_EXPR
: val
= (cmp
== 0); break;
4284 case LT_EXPR
: val
= (cmp
>= 0); break;
4285 case GT_EXPR
: val
= (cmp
<= 0); break;
4286 case LE_EXPR
: val
= (cmp
> 0); break;
4287 case GE_EXPR
: val
= (cmp
< 0); break;
4292 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4294 else if (code2
== NE_EXPR
)
4299 case EQ_EXPR
: val
= (cmp
== 0); break;
4300 case NE_EXPR
: val
= (cmp
!= 0); break;
4301 case LT_EXPR
: val
= (cmp
<= 0); break;
4302 case GT_EXPR
: val
= (cmp
>= 0); break;
4303 case LE_EXPR
: val
= (cmp
< 0); break;
4304 case GE_EXPR
: val
= (cmp
> 0); break;
4309 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4312 /* Chose the more restrictive of two < or <= comparisons. */
4313 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4314 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4316 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4317 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4319 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4322 /* Likewise chose the more restrictive of two > or >= comparisons. */
4323 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4324 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4326 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4327 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4329 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4332 /* Check for singleton ranges. */
4334 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4335 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4336 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4338 /* Check for disjoint ranges. */
4340 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4341 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4342 return boolean_false_node
;
4344 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4345 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4346 return boolean_false_node
;
4349 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4350 NAME's definition is a truth value. See if there are any simplifications
4351 that can be done against the NAME's definition. */
4352 if (TREE_CODE (op1a
) == SSA_NAME
4353 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4354 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4356 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4357 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4358 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4359 switch (gimple_code (stmt
))
4362 /* Try to simplify by copy-propagating the definition. */
4363 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4366 /* If every argument to the PHI produces the same result when
4367 ANDed with the second comparison, we win.
4368 Do not do this unless the type is bool since we need a bool
4369 result here anyway. */
4370 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4372 tree result
= NULL_TREE
;
4374 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4376 tree arg
= gimple_phi_arg_def (stmt
, i
);
4378 /* If this PHI has itself as an argument, ignore it.
4379 If all the other args produce the same result,
4381 if (arg
== gimple_phi_result (stmt
))
4383 else if (TREE_CODE (arg
) == INTEGER_CST
)
4385 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4388 result
= boolean_false_node
;
4389 else if (!integer_zerop (result
))
4393 result
= fold_build2 (code2
, boolean_type_node
,
4395 else if (!same_bool_comparison_p (result
,
4399 else if (TREE_CODE (arg
) == SSA_NAME
4400 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4403 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4404 /* In simple cases we can look through PHI nodes,
4405 but we have to be careful with loops.
4407 if (! dom_info_available_p (CDI_DOMINATORS
)
4408 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4409 || dominated_by_p (CDI_DOMINATORS
,
4410 gimple_bb (def_stmt
),
4413 temp
= and_var_with_comparison (arg
, invert
, code2
,
4419 else if (!same_bool_result_p (result
, temp
))
4435 /* Try to simplify the AND of two comparisons, specified by
4436 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4437 If this can be simplified to a single expression (without requiring
4438 introducing more SSA variables to hold intermediate values),
4439 return the resulting tree. Otherwise return NULL_TREE.
4440 If the result expression is non-null, it has boolean type. */
4443 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4444 enum tree_code code2
, tree op2a
, tree op2b
)
4446 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4450 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4453 /* Helper function for or_comparisons_1: try to simplify the OR of the
4454 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4455 If INVERT is true, invert the value of VAR before doing the OR.
4456 Return NULL_EXPR if we can't simplify this to a single expression. */
4459 or_var_with_comparison (tree var
, bool invert
,
4460 enum tree_code code2
, tree op2a
, tree op2b
)
4463 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4465 /* We can only deal with variables whose definitions are assignments. */
4466 if (!is_gimple_assign (stmt
))
4469 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4470 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4471 Then we only have to consider the simpler non-inverted cases. */
4473 t
= and_var_with_comparison_1 (stmt
,
4474 invert_tree_comparison (code2
, false),
4477 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4478 return canonicalize_bool (t
, invert
);
4481 /* Try to simplify the OR of the ssa variable defined by the assignment
4482 STMT with the comparison specified by (OP2A CODE2 OP2B).
4483 Return NULL_EXPR if we can't simplify this to a single expression. */
4486 or_var_with_comparison_1 (gimple
*stmt
,
4487 enum tree_code code2
, tree op2a
, tree op2b
)
4489 tree var
= gimple_assign_lhs (stmt
);
4490 tree true_test_var
= NULL_TREE
;
4491 tree false_test_var
= NULL_TREE
;
4492 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4494 /* Check for identities like (var OR (var != 0)) => true . */
4495 if (TREE_CODE (op2a
) == SSA_NAME
4496 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4498 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4499 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4501 true_test_var
= op2a
;
4502 if (var
== true_test_var
)
4505 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4506 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4508 false_test_var
= op2a
;
4509 if (var
== false_test_var
)
4510 return boolean_true_node
;
4514 /* If the definition is a comparison, recurse on it. */
4515 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4517 tree t
= or_comparisons_1 (innercode
,
4518 gimple_assign_rhs1 (stmt
),
4519 gimple_assign_rhs2 (stmt
),
4527 /* If the definition is an AND or OR expression, we may be able to
4528 simplify by reassociating. */
4529 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4530 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4532 tree inner1
= gimple_assign_rhs1 (stmt
);
4533 tree inner2
= gimple_assign_rhs2 (stmt
);
4536 tree partial
= NULL_TREE
;
4537 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4539 /* Check for boolean identities that don't require recursive examination
4541 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4542 inner1 OR (inner1 AND inner2) => inner1
4543 !inner1 OR (inner1 OR inner2) => true
4544 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4546 if (inner1
== true_test_var
)
4547 return (is_or
? var
: inner1
);
4548 else if (inner2
== true_test_var
)
4549 return (is_or
? var
: inner2
);
4550 else if (inner1
== false_test_var
)
4553 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4554 else if (inner2
== false_test_var
)
4557 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4559 /* Next, redistribute/reassociate the OR across the inner tests.
4560 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4561 if (TREE_CODE (inner1
) == SSA_NAME
4562 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4563 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4564 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4565 gimple_assign_rhs1 (s
),
4566 gimple_assign_rhs2 (s
),
4567 code2
, op2a
, op2b
)))
4569 /* Handle the OR case, where we are reassociating:
4570 (inner1 OR inner2) OR (op2a code2 op2b)
4572 If the partial result t is a constant, we win. Otherwise
4573 continue on to try reassociating with the other inner test. */
4576 if (integer_onep (t
))
4577 return boolean_true_node
;
4578 else if (integer_zerop (t
))
4582 /* Handle the AND case, where we are redistributing:
4583 (inner1 AND inner2) OR (op2a code2 op2b)
4584 => (t AND (inner2 OR (op2a code op2b))) */
4585 else if (integer_zerop (t
))
4586 return boolean_false_node
;
4588 /* Save partial result for later. */
4592 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4593 if (TREE_CODE (inner2
) == SSA_NAME
4594 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4595 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4596 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4597 gimple_assign_rhs1 (s
),
4598 gimple_assign_rhs2 (s
),
4599 code2
, op2a
, op2b
)))
4601 /* Handle the OR case, where we are reassociating:
4602 (inner1 OR inner2) OR (op2a code2 op2b)
4604 => (t OR partial) */
4607 if (integer_zerop (t
))
4609 else if (integer_onep (t
))
4610 return boolean_true_node
;
4611 /* If both are the same, we can apply the identity
4613 else if (partial
&& same_bool_result_p (t
, partial
))
4617 /* Handle the AND case, where we are redistributing:
4618 (inner1 AND inner2) OR (op2a code2 op2b)
4619 => (t AND (inner1 OR (op2a code2 op2b)))
4620 => (t AND partial) */
4623 if (integer_zerop (t
))
4624 return boolean_false_node
;
4627 /* We already got a simplification for the other
4628 operand to the redistributed AND expression. The
4629 interesting case is when at least one is true.
4630 Or, if both are the same, we can apply the identity
4632 if (integer_onep (partial
))
4634 else if (integer_onep (t
))
4636 else if (same_bool_result_p (t
, partial
))
4645 /* Try to simplify the OR of two comparisons defined by
4646 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4647 If this can be done without constructing an intermediate value,
4648 return the resulting tree; otherwise NULL_TREE is returned.
4649 This function is deliberately asymmetric as it recurses on SSA_DEFs
4650 in the first comparison but not the second. */
4653 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4654 enum tree_code code2
, tree op2a
, tree op2b
)
4656 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4658 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4659 if (operand_equal_p (op1a
, op2a
, 0)
4660 && operand_equal_p (op1b
, op2b
, 0))
4662 /* Result will be either NULL_TREE, or a combined comparison. */
4663 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4664 TRUTH_ORIF_EXPR
, code1
, code2
,
4665 truth_type
, op1a
, op1b
);
4670 /* Likewise the swapped case of the above. */
4671 if (operand_equal_p (op1a
, op2b
, 0)
4672 && operand_equal_p (op1b
, op2a
, 0))
4674 /* Result will be either NULL_TREE, or a combined comparison. */
4675 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4676 TRUTH_ORIF_EXPR
, code1
,
4677 swap_tree_comparison (code2
),
4678 truth_type
, op1a
, op1b
);
4683 /* If both comparisons are of the same value against constants, we might
4684 be able to merge them. */
4685 if (operand_equal_p (op1a
, op2a
, 0)
4686 && TREE_CODE (op1b
) == INTEGER_CST
4687 && TREE_CODE (op2b
) == INTEGER_CST
)
4689 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4691 /* If we have (op1a != op1b), we should either be able to
4692 return that or TRUE, depending on whether the constant op1b
4693 also satisfies the other comparison against op2b. */
4694 if (code1
== NE_EXPR
)
4700 case EQ_EXPR
: val
= (cmp
== 0); break;
4701 case NE_EXPR
: val
= (cmp
!= 0); break;
4702 case LT_EXPR
: val
= (cmp
< 0); break;
4703 case GT_EXPR
: val
= (cmp
> 0); break;
4704 case LE_EXPR
: val
= (cmp
<= 0); break;
4705 case GE_EXPR
: val
= (cmp
>= 0); break;
4706 default: done
= false;
4711 return boolean_true_node
;
4713 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4716 /* Likewise if the second comparison is a != comparison. */
4717 else if (code2
== NE_EXPR
)
4723 case EQ_EXPR
: val
= (cmp
== 0); break;
4724 case NE_EXPR
: val
= (cmp
!= 0); break;
4725 case LT_EXPR
: val
= (cmp
> 0); break;
4726 case GT_EXPR
: val
= (cmp
< 0); break;
4727 case LE_EXPR
: val
= (cmp
>= 0); break;
4728 case GE_EXPR
: val
= (cmp
<= 0); break;
4729 default: done
= false;
4734 return boolean_true_node
;
4736 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4740 /* See if an equality test is redundant with the other comparison. */
4741 else if (code1
== EQ_EXPR
)
4746 case EQ_EXPR
: val
= (cmp
== 0); break;
4747 case NE_EXPR
: val
= (cmp
!= 0); break;
4748 case LT_EXPR
: val
= (cmp
< 0); break;
4749 case GT_EXPR
: val
= (cmp
> 0); break;
4750 case LE_EXPR
: val
= (cmp
<= 0); break;
4751 case GE_EXPR
: val
= (cmp
>= 0); break;
4756 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4758 else if (code2
== EQ_EXPR
)
4763 case EQ_EXPR
: val
= (cmp
== 0); break;
4764 case NE_EXPR
: val
= (cmp
!= 0); break;
4765 case LT_EXPR
: val
= (cmp
> 0); break;
4766 case GT_EXPR
: val
= (cmp
< 0); break;
4767 case LE_EXPR
: val
= (cmp
>= 0); break;
4768 case GE_EXPR
: val
= (cmp
<= 0); break;
4773 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4776 /* Chose the less restrictive of two < or <= comparisons. */
4777 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4778 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4780 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4781 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4783 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4786 /* Likewise chose the less restrictive of two > or >= comparisons. */
4787 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4788 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4790 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4791 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4793 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4796 /* Check for singleton ranges. */
4798 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4799 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4800 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4802 /* Check for less/greater pairs that don't restrict the range at all. */
4804 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4805 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4806 return boolean_true_node
;
4808 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4809 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4810 return boolean_true_node
;
4813 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4814 NAME's definition is a truth value. See if there are any simplifications
4815 that can be done against the NAME's definition. */
4816 if (TREE_CODE (op1a
) == SSA_NAME
4817 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4818 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4820 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4821 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4822 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4823 switch (gimple_code (stmt
))
4826 /* Try to simplify by copy-propagating the definition. */
4827 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4830 /* If every argument to the PHI produces the same result when
4831 ORed with the second comparison, we win.
4832 Do not do this unless the type is bool since we need a bool
4833 result here anyway. */
4834 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4836 tree result
= NULL_TREE
;
4838 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4840 tree arg
= gimple_phi_arg_def (stmt
, i
);
4842 /* If this PHI has itself as an argument, ignore it.
4843 If all the other args produce the same result,
4845 if (arg
== gimple_phi_result (stmt
))
4847 else if (TREE_CODE (arg
) == INTEGER_CST
)
4849 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4852 result
= boolean_true_node
;
4853 else if (!integer_onep (result
))
4857 result
= fold_build2 (code2
, boolean_type_node
,
4859 else if (!same_bool_comparison_p (result
,
4863 else if (TREE_CODE (arg
) == SSA_NAME
4864 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4867 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4868 /* In simple cases we can look through PHI nodes,
4869 but we have to be careful with loops.
4871 if (! dom_info_available_p (CDI_DOMINATORS
)
4872 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4873 || dominated_by_p (CDI_DOMINATORS
,
4874 gimple_bb (def_stmt
),
4877 temp
= or_var_with_comparison (arg
, invert
, code2
,
4883 else if (!same_bool_result_p (result
, temp
))
4899 /* Try to simplify the OR of two comparisons, specified by
4900 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4901 If this can be simplified to a single expression (without requiring
4902 introducing more SSA variables to hold intermediate values),
4903 return the resulting tree. Otherwise return NULL_TREE.
4904 If the result expression is non-null, it has boolean type. */
4907 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4908 enum tree_code code2
, tree op2a
, tree op2b
)
4910 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4914 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4918 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4920 Either NULL_TREE, a simplified but non-constant or a constant
4923 ??? This should go into a gimple-fold-inline.h file to be eventually
4924 privatized with the single valueize function used in the various TUs
4925 to avoid the indirect function call overhead. */
4928 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
4929 tree (*gvalueize
) (tree
))
4933 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4934 edges if there are intermediate VARYING defs. For this reason
4935 do not follow SSA edges here even though SCCVN can technically
4936 just deal fine with that. */
4937 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
4939 tree res
= NULL_TREE
;
4940 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
4942 else if (mprts_hook
)
4943 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
4946 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4948 fprintf (dump_file
, "Match-and-simplified ");
4949 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4950 fprintf (dump_file
, " to ");
4951 print_generic_expr (dump_file
, res
, 0);
4952 fprintf (dump_file
, "\n");
4958 location_t loc
= gimple_location (stmt
);
4959 switch (gimple_code (stmt
))
4963 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4965 switch (get_gimple_rhs_class (subcode
))
4967 case GIMPLE_SINGLE_RHS
:
4969 tree rhs
= gimple_assign_rhs1 (stmt
);
4970 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4972 if (TREE_CODE (rhs
) == SSA_NAME
)
4974 /* If the RHS is an SSA_NAME, return its known constant value,
4976 return (*valueize
) (rhs
);
4978 /* Handle propagating invariant addresses into address
4980 else if (TREE_CODE (rhs
) == ADDR_EXPR
4981 && !is_gimple_min_invariant (rhs
))
4983 HOST_WIDE_INT offset
= 0;
4985 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4989 && (CONSTANT_CLASS_P (base
)
4990 || decl_address_invariant_p (base
)))
4991 return build_invariant_address (TREE_TYPE (rhs
),
4994 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4995 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4996 && (CONSTRUCTOR_NELTS (rhs
)
4997 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
5002 vec
= XALLOCAVEC (tree
,
5003 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
5004 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
5006 val
= (*valueize
) (val
);
5007 if (TREE_CODE (val
) == INTEGER_CST
5008 || TREE_CODE (val
) == REAL_CST
5009 || TREE_CODE (val
) == FIXED_CST
)
5015 return build_vector (TREE_TYPE (rhs
), vec
);
5017 if (subcode
== OBJ_TYPE_REF
)
5019 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5020 /* If callee is constant, we can fold away the wrapper. */
5021 if (is_gimple_min_invariant (val
))
5025 if (kind
== tcc_reference
)
5027 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5028 || TREE_CODE (rhs
) == REALPART_EXPR
5029 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5030 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5032 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5033 return fold_unary_loc (EXPR_LOCATION (rhs
),
5035 TREE_TYPE (rhs
), val
);
5037 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5038 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5040 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5041 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5043 TREE_TYPE (rhs
), val
,
5044 TREE_OPERAND (rhs
, 1),
5045 TREE_OPERAND (rhs
, 2));
5047 else if (TREE_CODE (rhs
) == MEM_REF
5048 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5050 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5051 if (TREE_CODE (val
) == ADDR_EXPR
5052 && is_gimple_min_invariant (val
))
5054 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5056 TREE_OPERAND (rhs
, 1));
5061 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5063 else if (kind
== tcc_declaration
)
5064 return get_symbol_constant_value (rhs
);
5068 case GIMPLE_UNARY_RHS
:
5071 case GIMPLE_BINARY_RHS
:
5072 /* Translate &x + CST into an invariant form suitable for
5073 further propagation. */
5074 if (subcode
== POINTER_PLUS_EXPR
)
5076 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5077 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5078 if (TREE_CODE (op0
) == ADDR_EXPR
5079 && TREE_CODE (op1
) == INTEGER_CST
)
5081 tree off
= fold_convert (ptr_type_node
, op1
);
5082 return build_fold_addr_expr_loc
5084 fold_build2 (MEM_REF
,
5085 TREE_TYPE (TREE_TYPE (op0
)),
5086 unshare_expr (op0
), off
));
5089 /* Canonicalize bool != 0 and bool == 0 appearing after
5090 valueization. While gimple_simplify handles this
5091 it can get confused by the ~X == 1 -> X == 0 transform
5092 which we cant reduce to a SSA name or a constant
5093 (and we have no way to tell gimple_simplify to not
5094 consider those transforms in the first place). */
5095 else if (subcode
== EQ_EXPR
5096 || subcode
== NE_EXPR
)
5098 tree lhs
= gimple_assign_lhs (stmt
);
5099 tree op0
= gimple_assign_rhs1 (stmt
);
5100 if (useless_type_conversion_p (TREE_TYPE (lhs
),
5103 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5104 op0
= (*valueize
) (op0
);
5105 if (TREE_CODE (op0
) == INTEGER_CST
)
5106 std::swap (op0
, op1
);
5107 if (TREE_CODE (op1
) == INTEGER_CST
5108 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
5109 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
5115 case GIMPLE_TERNARY_RHS
:
5117 /* Handle ternary operators that can appear in GIMPLE form. */
5118 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5119 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5120 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5121 return fold_ternary_loc (loc
, subcode
,
5122 gimple_expr_type (stmt
), op0
, op1
, op2
);
5133 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5135 if (gimple_call_internal_p (stmt
))
5137 enum tree_code subcode
= ERROR_MARK
;
5138 switch (gimple_call_internal_fn (stmt
))
5140 case IFN_UBSAN_CHECK_ADD
:
5141 subcode
= PLUS_EXPR
;
5143 case IFN_UBSAN_CHECK_SUB
:
5144 subcode
= MINUS_EXPR
;
5146 case IFN_UBSAN_CHECK_MUL
:
5147 subcode
= MULT_EXPR
;
5152 tree arg0
= gimple_call_arg (stmt
, 0);
5153 tree arg1
= gimple_call_arg (stmt
, 1);
5154 tree op0
= (*valueize
) (arg0
);
5155 tree op1
= (*valueize
) (arg1
);
5157 if (TREE_CODE (op0
) != INTEGER_CST
5158 || TREE_CODE (op1
) != INTEGER_CST
)
5163 /* x * 0 = 0 * x = 0 without overflow. */
5164 if (integer_zerop (op0
) || integer_zerop (op1
))
5165 return build_zero_cst (TREE_TYPE (arg0
));
5168 /* y - y = 0 without overflow. */
5169 if (operand_equal_p (op0
, op1
, 0))
5170 return build_zero_cst (TREE_TYPE (arg0
));
5177 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5179 && TREE_CODE (res
) == INTEGER_CST
5180 && !TREE_OVERFLOW (res
))
5185 fn
= (*valueize
) (gimple_call_fn (stmt
));
5186 if (TREE_CODE (fn
) == ADDR_EXPR
5187 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5188 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5189 && gimple_builtin_call_types_compatible_p (stmt
,
5190 TREE_OPERAND (fn
, 0)))
5192 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5195 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5196 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5197 retval
= fold_builtin_call_array (loc
,
5198 gimple_call_return_type (call_stmt
),
5199 fn
, gimple_call_num_args (stmt
), args
);
5202 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5203 STRIP_NOPS (retval
);
5204 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5217 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5218 Returns NULL_TREE if folding to a constant is not possible, otherwise
5219 returns a constant according to is_gimple_min_invariant. */
5222 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
5224 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5225 if (res
&& is_gimple_min_invariant (res
))
5231 /* The following set of functions are supposed to fold references using
5232 their constant initializers. */
5234 /* See if we can find constructor defining value of BASE.
5235 When we know the consructor with constant offset (such as
5236 base is array[40] and we do know constructor of array), then
5237 BIT_OFFSET is adjusted accordingly.
5239 As a special case, return error_mark_node when constructor
5240 is not explicitly available, but it is known to be zero
5241 such as 'static const int a;'. */
5243 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5244 tree (*valueize
)(tree
))
5246 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5247 if (TREE_CODE (base
) == MEM_REF
)
5249 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5251 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5253 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5258 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5259 base
= valueize (TREE_OPERAND (base
, 0));
5260 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5262 base
= TREE_OPERAND (base
, 0);
5265 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5266 DECL_INITIAL. If BASE is a nested reference into another
5267 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5268 the inner reference. */
5269 switch (TREE_CODE (base
))
5274 tree init
= ctor_for_folding (base
);
5276 /* Our semantic is exact opposite of ctor_for_folding;
5277 NULL means unknown, while error_mark_node is 0. */
5278 if (init
== error_mark_node
)
5281 return error_mark_node
;
5287 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5288 if (max_size
== -1 || size
!= max_size
)
5290 *bit_offset
+= bit_offset2
;
5291 return get_base_constructor (base
, bit_offset
, valueize
);
5302 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5303 SIZE to the memory at bit OFFSET. */
5306 fold_array_ctor_reference (tree type
, tree ctor
,
5307 unsigned HOST_WIDE_INT offset
,
5308 unsigned HOST_WIDE_INT size
,
5311 unsigned HOST_WIDE_INT cnt
;
5313 offset_int low_bound
;
5314 offset_int elt_size
;
5315 offset_int index
, max_index
;
5316 offset_int access_index
;
5317 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5318 HOST_WIDE_INT inner_offset
;
5320 /* Compute low bound and elt size. */
5321 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5322 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5323 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5325 /* Static constructors for variably sized objects makes no sense. */
5326 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5327 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5328 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5332 /* Static constructors for variably sized objects makes no sense. */
5333 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5335 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5337 /* We can handle only constantly sized accesses that are known to not
5338 be larger than size of array element. */
5339 if (!TYPE_SIZE_UNIT (type
)
5340 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5341 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5345 /* Compute the array index we look for. */
5346 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5348 access_index
+= low_bound
;
5350 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5351 TYPE_SIGN (index_type
));
5353 /* And offset within the access. */
5354 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5356 /* See if the array field is large enough to span whole access. We do not
5357 care to fold accesses spanning multiple array indexes. */
5358 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5361 index
= low_bound
- 1;
5363 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5364 TYPE_SIGN (index_type
));
5366 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5368 /* Array constructor might explicitely set index, or specify range
5369 or leave index NULL meaning that it is next index after previous
5373 if (TREE_CODE (cfield
) == INTEGER_CST
)
5374 max_index
= index
= wi::to_offset (cfield
);
5377 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5378 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5379 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5386 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5387 TYPE_SIGN (index_type
));
5391 /* Do we have match? */
5392 if (wi::cmpu (access_index
, index
) >= 0
5393 && wi::cmpu (access_index
, max_index
) <= 0)
5394 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5397 /* When memory is not explicitely mentioned in constructor,
5398 it is 0 (or out of range). */
5399 return build_zero_cst (type
);
5402 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5403 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5406 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5407 unsigned HOST_WIDE_INT offset
,
5408 unsigned HOST_WIDE_INT size
,
5411 unsigned HOST_WIDE_INT cnt
;
5414 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5417 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5418 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5419 tree field_size
= DECL_SIZE (cfield
);
5420 offset_int bitoffset
;
5421 offset_int bitoffset_end
, access_end
;
5423 /* Variable sized objects in static constructors makes no sense,
5424 but field_size can be NULL for flexible array members. */
5425 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5426 && TREE_CODE (byte_offset
) == INTEGER_CST
5427 && (field_size
!= NULL_TREE
5428 ? TREE_CODE (field_size
) == INTEGER_CST
5429 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5431 /* Compute bit offset of the field. */
5432 bitoffset
= (wi::to_offset (field_offset
)
5433 + wi::lshift (wi::to_offset (byte_offset
),
5434 LOG2_BITS_PER_UNIT
));
5435 /* Compute bit offset where the field ends. */
5436 if (field_size
!= NULL_TREE
)
5437 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5441 access_end
= offset_int (offset
) + size
;
5443 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5444 [BITOFFSET, BITOFFSET_END)? */
5445 if (wi::cmps (access_end
, bitoffset
) > 0
5446 && (field_size
== NULL_TREE
5447 || wi::lts_p (offset
, bitoffset_end
)))
5449 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5450 /* We do have overlap. Now see if field is large enough to
5451 cover the access. Give up for accesses spanning multiple
5453 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5455 if (wi::lts_p (offset
, bitoffset
))
5457 return fold_ctor_reference (type
, cval
,
5458 inner_offset
.to_uhwi (), size
,
5462 /* When memory is not explicitely mentioned in constructor, it is 0. */
5463 return build_zero_cst (type
);
5466 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5467 to the memory at bit OFFSET. */
5470 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5471 unsigned HOST_WIDE_INT size
, tree from_decl
)
5475 /* We found the field with exact match. */
5476 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5478 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5480 /* We are at the end of walk, see if we can view convert the
5482 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5483 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5484 && !compare_tree_int (TYPE_SIZE (type
), size
)
5485 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5487 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5488 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5490 STRIP_USELESS_TYPE_CONVERSION (ret
);
5493 /* For constants and byte-aligned/sized reads try to go through
5494 native_encode/interpret. */
5495 if (CONSTANT_CLASS_P (ctor
)
5496 && BITS_PER_UNIT
== 8
5497 && offset
% BITS_PER_UNIT
== 0
5498 && size
% BITS_PER_UNIT
== 0
5499 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5501 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5502 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5503 offset
/ BITS_PER_UNIT
) > 0)
5504 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5506 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5509 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5510 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5511 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5514 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5521 /* Return the tree representing the element referenced by T if T is an
5522 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5523 names using VALUEIZE. Return NULL_TREE otherwise. */
5526 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5528 tree ctor
, idx
, base
;
5529 HOST_WIDE_INT offset
, size
, max_size
;
5532 if (TREE_THIS_VOLATILE (t
))
5536 return get_symbol_constant_value (t
);
5538 tem
= fold_read_from_constant_string (t
);
5542 switch (TREE_CODE (t
))
5545 case ARRAY_RANGE_REF
:
5546 /* Constant indexes are handled well by get_base_constructor.
5547 Only special case variable offsets.
5548 FIXME: This code can't handle nested references with variable indexes
5549 (they will be handled only by iteration of ccp). Perhaps we can bring
5550 get_ref_base_and_extent here and make it use a valueize callback. */
5551 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5553 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5554 && TREE_CODE (idx
) == INTEGER_CST
)
5556 tree low_bound
, unit_size
;
5558 /* If the resulting bit-offset is constant, track it. */
5559 if ((low_bound
= array_ref_low_bound (t
),
5560 TREE_CODE (low_bound
) == INTEGER_CST
)
5561 && (unit_size
= array_ref_element_size (t
),
5562 tree_fits_uhwi_p (unit_size
)))
5565 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5566 TYPE_PRECISION (TREE_TYPE (idx
)));
5568 if (wi::fits_shwi_p (woffset
))
5570 offset
= woffset
.to_shwi ();
5571 /* TODO: This code seems wrong, multiply then check
5572 to see if it fits. */
5573 offset
*= tree_to_uhwi (unit_size
);
5574 offset
*= BITS_PER_UNIT
;
5576 base
= TREE_OPERAND (t
, 0);
5577 ctor
= get_base_constructor (base
, &offset
, valueize
);
5578 /* Empty constructor. Always fold to 0. */
5579 if (ctor
== error_mark_node
)
5580 return build_zero_cst (TREE_TYPE (t
));
5581 /* Out of bound array access. Value is undefined,
5585 /* We can not determine ctor. */
5588 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5589 tree_to_uhwi (unit_size
)
5599 case TARGET_MEM_REF
:
5601 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5602 ctor
= get_base_constructor (base
, &offset
, valueize
);
5604 /* Empty constructor. Always fold to 0. */
5605 if (ctor
== error_mark_node
)
5606 return build_zero_cst (TREE_TYPE (t
));
5607 /* We do not know precise address. */
5608 if (max_size
== -1 || max_size
!= size
)
5610 /* We can not determine ctor. */
5614 /* Out of bound array access. Value is undefined, but don't fold. */
5618 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5624 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5625 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5626 return fold_build1_loc (EXPR_LOCATION (t
),
5627 TREE_CODE (t
), TREE_TYPE (t
), c
);
5639 fold_const_aggregate_ref (tree t
)
5641 return fold_const_aggregate_ref_1 (t
, NULL
);
5644 /* Lookup virtual method with index TOKEN in a virtual table V
5646 Set CAN_REFER if non-NULL to false if method
5647 is not referable or if the virtual table is ill-formed (such as rewriten
5648 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5651 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5653 unsigned HOST_WIDE_INT offset
,
5656 tree vtable
= v
, init
, fn
;
5657 unsigned HOST_WIDE_INT size
;
5658 unsigned HOST_WIDE_INT elt_size
, access_index
;
5664 /* First of all double check we have virtual table. */
5665 if (TREE_CODE (v
) != VAR_DECL
5666 || !DECL_VIRTUAL_P (v
))
5668 /* Pass down that we lost track of the target. */
5674 init
= ctor_for_folding (v
);
5676 /* The virtual tables should always be born with constructors
5677 and we always should assume that they are avaialble for
5678 folding. At the moment we do not stream them in all cases,
5679 but it should never happen that ctor seem unreachable. */
5681 if (init
== error_mark_node
)
5683 gcc_assert (in_lto_p
);
5684 /* Pass down that we lost track of the target. */
5689 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5690 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5691 offset
*= BITS_PER_UNIT
;
5692 offset
+= token
* size
;
5694 /* Lookup the value in the constructor that is assumed to be array.
5695 This is equivalent to
5696 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5697 offset, size, NULL);
5698 but in a constant time. We expect that frontend produced a simple
5699 array without indexed initializers. */
5701 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5702 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5703 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5704 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5706 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5707 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5709 /* This code makes an assumption that there are no
5710 indexed fileds produced by C++ FE, so we can directly index the array. */
5711 if (access_index
< CONSTRUCTOR_NELTS (init
))
5713 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5714 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5720 /* For type inconsistent program we may end up looking up virtual method
5721 in virtual table that does not contain TOKEN entries. We may overrun
5722 the virtual table and pick up a constant or RTTI info pointer.
5723 In any case the call is undefined. */
5725 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5726 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5727 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5730 fn
= TREE_OPERAND (fn
, 0);
5732 /* When cgraph node is missing and function is not public, we cannot
5733 devirtualize. This can happen in WHOPR when the actual method
5734 ends up in other partition, because we found devirtualization
5735 possibility too late. */
5736 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5747 /* Make sure we create a cgraph node for functions we'll reference.
5748 They can be non-existent if the reference comes from an entry
5749 of an external vtable for example. */
5750 cgraph_node::get_create (fn
);
5755 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5756 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5757 KNOWN_BINFO carries the binfo describing the true type of
5758 OBJ_TYPE_REF_OBJECT(REF).
5759 Set CAN_REFER if non-NULL to false if method
5760 is not referable or if the virtual table is ill-formed (such as rewriten
5761 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5764 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5767 unsigned HOST_WIDE_INT offset
;
5770 v
= BINFO_VTABLE (known_binfo
);
5771 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5775 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5781 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5784 /* Given a pointer value OP0, return a simplified version of an
5785 indirection through OP0, or NULL_TREE if no simplification is
5786 possible. Note that the resulting type may be different from
5787 the type pointed to in the sense that it is still compatible
5788 from the langhooks point of view. */
5791 gimple_fold_indirect_ref (tree t
)
5793 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5798 subtype
= TREE_TYPE (sub
);
5799 if (!POINTER_TYPE_P (subtype
))
5802 if (TREE_CODE (sub
) == ADDR_EXPR
)
5804 tree op
= TREE_OPERAND (sub
, 0);
5805 tree optype
= TREE_TYPE (op
);
5807 if (useless_type_conversion_p (type
, optype
))
5810 /* *(foo *)&fooarray => fooarray[0] */
5811 if (TREE_CODE (optype
) == ARRAY_TYPE
5812 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5813 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5815 tree type_domain
= TYPE_DOMAIN (optype
);
5816 tree min_val
= size_zero_node
;
5817 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5818 min_val
= TYPE_MIN_VALUE (type_domain
);
5819 if (TREE_CODE (min_val
) == INTEGER_CST
)
5820 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5822 /* *(foo *)&complexfoo => __real__ complexfoo */
5823 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5824 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5825 return fold_build1 (REALPART_EXPR
, type
, op
);
5826 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5827 else if (TREE_CODE (optype
) == VECTOR_TYPE
5828 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5830 tree part_width
= TYPE_SIZE (type
);
5831 tree index
= bitsize_int (0);
5832 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5836 /* *(p + CST) -> ... */
5837 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5838 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5840 tree addr
= TREE_OPERAND (sub
, 0);
5841 tree off
= TREE_OPERAND (sub
, 1);
5845 addrtype
= TREE_TYPE (addr
);
5847 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5848 if (TREE_CODE (addr
) == ADDR_EXPR
5849 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5850 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5851 && tree_fits_uhwi_p (off
))
5853 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5854 tree part_width
= TYPE_SIZE (type
);
5855 unsigned HOST_WIDE_INT part_widthi
5856 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5857 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5858 tree index
= bitsize_int (indexi
);
5859 if (offset
/ part_widthi
5860 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5861 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5865 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5866 if (TREE_CODE (addr
) == ADDR_EXPR
5867 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5868 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5870 tree size
= TYPE_SIZE_UNIT (type
);
5871 if (tree_int_cst_equal (size
, off
))
5872 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5875 /* *(p + CST) -> MEM_REF <p, CST>. */
5876 if (TREE_CODE (addr
) != ADDR_EXPR
5877 || DECL_P (TREE_OPERAND (addr
, 0)))
5878 return fold_build2 (MEM_REF
, type
,
5880 wide_int_to_tree (ptype
, off
));
5883 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5884 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5885 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5886 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5889 tree min_val
= size_zero_node
;
5891 sub
= gimple_fold_indirect_ref (sub
);
5893 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5894 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5895 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5896 min_val
= TYPE_MIN_VALUE (type_domain
);
5897 if (TREE_CODE (min_val
) == INTEGER_CST
)
5898 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5904 /* Return true if CODE is an operation that when operating on signed
5905 integer types involves undefined behavior on overflow and the
5906 operation can be expressed with unsigned arithmetic. */
5909 arith_code_with_undefined_signed_overflow (tree_code code
)
5917 case POINTER_PLUS_EXPR
:
5924 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5925 operation that can be transformed to unsigned arithmetic by converting
5926 its operand, carrying out the operation in the corresponding unsigned
5927 type and converting the result back to the original type.
5929 Returns a sequence of statements that replace STMT and also contain
5930 a modified form of STMT itself. */
5933 rewrite_to_defined_overflow (gimple
*stmt
)
5935 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5937 fprintf (dump_file
, "rewriting stmt with undefined signed "
5939 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5942 tree lhs
= gimple_assign_lhs (stmt
);
5943 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5944 gimple_seq stmts
= NULL
;
5945 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5947 tree op
= gimple_op (stmt
, i
);
5948 op
= gimple_convert (&stmts
, type
, op
);
5949 gimple_set_op (stmt
, i
, op
);
5951 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5952 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5953 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5954 gimple_seq_add_stmt (&stmts
, stmt
);
5955 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
5956 gimple_seq_add_stmt (&stmts
, cvt
);
5962 /* The valueization hook we use for the gimple_build API simplification.
5963 This makes us match fold_buildN behavior by only combining with
5964 statements in the sequence(s) we are currently building. */
5967 gimple_build_valueize (tree op
)
5969 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
5974 /* Build the expression CODE OP0 of type TYPE with location LOC,
5975 simplifying it first if possible. Returns the built
5976 expression value and appends statements possibly defining it
5980 gimple_build (gimple_seq
*seq
, location_t loc
,
5981 enum tree_code code
, tree type
, tree op0
)
5983 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
5986 if (gimple_in_ssa_p (cfun
))
5987 res
= make_ssa_name (type
);
5989 res
= create_tmp_reg (type
);
5991 if (code
== REALPART_EXPR
5992 || code
== IMAGPART_EXPR
5993 || code
== VIEW_CONVERT_EXPR
)
5994 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
5996 stmt
= gimple_build_assign (res
, code
, op0
);
5997 gimple_set_location (stmt
, loc
);
5998 gimple_seq_add_stmt_without_update (seq
, stmt
);
6003 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6004 simplifying it first if possible. Returns the built
6005 expression value and appends statements possibly defining it
6009 gimple_build (gimple_seq
*seq
, location_t loc
,
6010 enum tree_code code
, tree type
, tree op0
, tree op1
)
6012 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6015 if (gimple_in_ssa_p (cfun
))
6016 res
= make_ssa_name (type
);
6018 res
= create_tmp_reg (type
);
6019 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6020 gimple_set_location (stmt
, loc
);
6021 gimple_seq_add_stmt_without_update (seq
, stmt
);
6026 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6027 simplifying it first if possible. Returns the built
6028 expression value and appends statements possibly defining it
6032 gimple_build (gimple_seq
*seq
, location_t loc
,
6033 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6035 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6036 seq
, gimple_build_valueize
);
6039 if (gimple_in_ssa_p (cfun
))
6040 res
= make_ssa_name (type
);
6042 res
= create_tmp_reg (type
);
6044 if (code
== BIT_FIELD_REF
)
6045 stmt
= gimple_build_assign (res
, code
,
6046 build3 (code
, type
, op0
, op1
, op2
));
6048 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6049 gimple_set_location (stmt
, loc
);
6050 gimple_seq_add_stmt_without_update (seq
, stmt
);
6055 /* Build the call FN (ARG0) with a result of type TYPE
6056 (or no result if TYPE is void) with location LOC,
6057 simplifying it first if possible. Returns the built
6058 expression value (or NULL_TREE if TYPE is void) and appends
6059 statements possibly defining it to SEQ. */
6062 gimple_build (gimple_seq
*seq
, location_t loc
,
6063 enum built_in_function fn
, tree type
, tree arg0
)
6065 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6068 tree decl
= builtin_decl_implicit (fn
);
6069 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
6070 if (!VOID_TYPE_P (type
))
6072 if (gimple_in_ssa_p (cfun
))
6073 res
= make_ssa_name (type
);
6075 res
= create_tmp_reg (type
);
6076 gimple_call_set_lhs (stmt
, res
);
6078 gimple_set_location (stmt
, loc
);
6079 gimple_seq_add_stmt_without_update (seq
, stmt
);
6084 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6085 (or no result if TYPE is void) with location LOC,
6086 simplifying it first if possible. Returns the built
6087 expression value (or NULL_TREE if TYPE is void) and appends
6088 statements possibly defining it to SEQ. */
6091 gimple_build (gimple_seq
*seq
, location_t loc
,
6092 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6094 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6097 tree decl
= builtin_decl_implicit (fn
);
6098 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6099 if (!VOID_TYPE_P (type
))
6101 if (gimple_in_ssa_p (cfun
))
6102 res
= make_ssa_name (type
);
6104 res
= create_tmp_reg (type
);
6105 gimple_call_set_lhs (stmt
, res
);
6107 gimple_set_location (stmt
, loc
);
6108 gimple_seq_add_stmt_without_update (seq
, stmt
);
6113 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6114 (or no result if TYPE is void) with location LOC,
6115 simplifying it first if possible. Returns the built
6116 expression value (or NULL_TREE if TYPE is void) and appends
6117 statements possibly defining it to SEQ. */
6120 gimple_build (gimple_seq
*seq
, location_t loc
,
6121 enum built_in_function fn
, tree type
,
6122 tree arg0
, tree arg1
, tree arg2
)
6124 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6125 seq
, gimple_build_valueize
);
6128 tree decl
= builtin_decl_implicit (fn
);
6129 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6130 if (!VOID_TYPE_P (type
))
6132 if (gimple_in_ssa_p (cfun
))
6133 res
= make_ssa_name (type
);
6135 res
= create_tmp_reg (type
);
6136 gimple_call_set_lhs (stmt
, res
);
6138 gimple_set_location (stmt
, loc
);
6139 gimple_seq_add_stmt_without_update (seq
, stmt
);
6144 /* Build the conversion (TYPE) OP with a result of type TYPE
6145 with location LOC if such conversion is neccesary in GIMPLE,
6146 simplifying it first.
6147 Returns the built expression value and appends
6148 statements possibly defining it to SEQ. */
6151 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6153 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6155 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
6158 /* Build the conversion (ptrofftype) OP with a result of a type
6159 compatible with ptrofftype with location LOC if such conversion
6160 is neccesary in GIMPLE, simplifying it first.
6161 Returns the built expression value and appends
6162 statements possibly defining it to SEQ. */
6165 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
6167 if (ptrofftype_p (TREE_TYPE (op
)))
6169 return gimple_convert (seq
, loc
, sizetype
, op
);
6172 /* Return true if the result of assignment STMT is known to be non-negative.
6173 If the return value is based on the assumption that signed overflow is
6174 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6175 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6178 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6181 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6182 switch (get_gimple_rhs_class (code
))
6184 case GIMPLE_UNARY_RHS
:
6185 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6186 gimple_expr_type (stmt
),
6187 gimple_assign_rhs1 (stmt
),
6188 strict_overflow_p
, depth
);
6189 case GIMPLE_BINARY_RHS
:
6190 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6191 gimple_expr_type (stmt
),
6192 gimple_assign_rhs1 (stmt
),
6193 gimple_assign_rhs2 (stmt
),
6194 strict_overflow_p
, depth
);
6195 case GIMPLE_TERNARY_RHS
:
6197 case GIMPLE_SINGLE_RHS
:
6198 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
6199 strict_overflow_p
, depth
);
6200 case GIMPLE_INVALID_RHS
:
6206 /* Return true if return value of call STMT is known to be non-negative.
6207 If the return value is based on the assumption that signed overflow is
6208 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6209 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6212 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6215 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
6216 gimple_call_arg (stmt
, 0) : NULL_TREE
;
6217 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
6218 gimple_call_arg (stmt
, 1) : NULL_TREE
;
6220 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
6221 gimple_call_fndecl (stmt
),
6224 strict_overflow_p
, depth
);
6227 /* Return true if return value of call STMT is known to be non-negative.
6228 If the return value is based on the assumption that signed overflow is
6229 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6230 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6233 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6236 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6238 tree arg
= gimple_phi_arg_def (stmt
, i
);
6239 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
6245 /* Return true if STMT is known to compute a non-negative value.
6246 If the return value is based on the assumption that signed overflow is
6247 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6248 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6251 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6254 switch (gimple_code (stmt
))
6257 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6260 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6263 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,