1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
58 /* Return true when DECL can be referenced from current unit.
59 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
60 We can get declarations that are not possible to reference for various
63 1) When analyzing C++ virtual tables.
64 C++ virtual tables do have known constructors even
65 when they are keyed to other compilation unit.
66 Those tables can contain pointers to methods and vars
67 in other units. Those methods have both STATIC and EXTERNAL
69 2) In WHOPR mode devirtualization might lead to reference
70 to method that was partitioned elsehwere.
71 In this case we have static VAR_DECL or FUNCTION_DECL
72 that has no corresponding callgraph/varpool node
74 3) COMDAT functions referred by external vtables that
75 we devirtualize only during final compilation stage.
76 At this time we already decided that we will not output
77 the function body and thus we can't reference the symbol
81 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
84 struct cgraph_node
*node
;
87 if (DECL_ABSTRACT_P (decl
))
90 /* We are concerned only about static/external vars and functions. */
91 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
92 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
95 /* Static objects can be referred only if they was not optimized out yet. */
96 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
98 /* Before we start optimizing unreachable code we can be sure all
99 static objects are defined. */
100 if (symtab
->function_flags_ready
)
102 snode
= symtab_node::get (decl
);
103 if (!snode
|| !snode
->definition
)
105 node
= dyn_cast
<cgraph_node
*> (snode
);
106 return !node
|| !node
->global
.inlined_to
;
109 /* We will later output the initializer, so we can refer to it.
110 So we are concerned only when DECL comes from initializer of
111 external var or var that has been optimized out. */
113 || TREE_CODE (from_decl
) != VAR_DECL
114 || (!DECL_EXTERNAL (from_decl
)
115 && (vnode
= varpool_node::get (from_decl
)) != NULL
116 && vnode
->definition
)
118 && (vnode
= varpool_node::get (from_decl
)) != NULL
119 && vnode
->in_other_partition
))
121 /* We are folding reference from external vtable. The vtable may reffer
122 to a symbol keyed to other compilation unit. The other compilation
123 unit may be in separate DSO and the symbol may be hidden. */
124 if (DECL_VISIBILITY_SPECIFIED (decl
)
125 && DECL_EXTERNAL (decl
)
126 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
127 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
129 /* When function is public, we always can introduce new reference.
130 Exception are the COMDAT functions where introducing a direct
131 reference imply need to include function body in the curren tunit. */
132 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
134 /* We have COMDAT. We are going to check if we still have definition
135 or if the definition is going to be output in other partition.
136 Bypass this when gimplifying; all needed functions will be produced.
138 As observed in PR20991 for already optimized out comdat virtual functions
139 it may be tempting to not necessarily give up because the copy will be
140 output elsewhere when corresponding vtable is output.
141 This is however not possible - ABI specify that COMDATs are output in
142 units where they are used and when the other unit was compiled with LTO
143 it is possible that vtable was kept public while the function itself
145 if (!symtab
->function_flags_ready
)
148 snode
= symtab_node::get (decl
);
150 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
151 && (!snode
->in_other_partition
152 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
154 node
= dyn_cast
<cgraph_node
*> (snode
);
155 return !node
|| !node
->global
.inlined_to
;
158 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
159 acceptable form for is_gimple_min_invariant.
160 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
163 canonicalize_constructor_val (tree cval
, tree from_decl
)
165 tree orig_cval
= cval
;
167 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
168 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
170 tree ptr
= TREE_OPERAND (cval
, 0);
171 if (is_gimple_min_invariant (ptr
))
172 cval
= build1_loc (EXPR_LOCATION (cval
),
173 ADDR_EXPR
, TREE_TYPE (ptr
),
174 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
176 fold_convert (ptr_type_node
,
177 TREE_OPERAND (cval
, 1))));
179 if (TREE_CODE (cval
) == ADDR_EXPR
)
181 tree base
= NULL_TREE
;
182 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
184 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
186 TREE_OPERAND (cval
, 0) = base
;
189 base
= get_base_address (TREE_OPERAND (cval
, 0));
193 if ((TREE_CODE (base
) == VAR_DECL
194 || TREE_CODE (base
) == FUNCTION_DECL
)
195 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
197 if (TREE_CODE (base
) == VAR_DECL
)
198 TREE_ADDRESSABLE (base
) = 1;
199 else if (TREE_CODE (base
) == FUNCTION_DECL
)
201 /* Make sure we create a cgraph node for functions we'll reference.
202 They can be non-existent if the reference comes from an entry
203 of an external vtable for example. */
204 cgraph_node::get_create (base
);
206 /* Fixup types in global initializers. */
207 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
208 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
210 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
211 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
214 if (TREE_OVERFLOW_P (cval
))
215 return drop_tree_overflow (cval
);
219 /* If SYM is a constant variable with known value, return the value.
220 NULL_TREE is returned otherwise. */
223 get_symbol_constant_value (tree sym
)
225 tree val
= ctor_for_folding (sym
);
226 if (val
!= error_mark_node
)
230 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
231 if (val
&& is_gimple_min_invariant (val
))
236 /* Variables declared 'const' without an initializer
237 have zero as the initializer if they may not be
238 overridden at link or run time. */
240 && is_gimple_reg_type (TREE_TYPE (sym
)))
241 return build_zero_cst (TREE_TYPE (sym
));
249 /* Subroutine of fold_stmt. We perform several simplifications of the
250 memory reference tree EXPR and make sure to re-gimplify them properly
251 after propagation of constant addresses. IS_LHS is true if the
252 reference is supposed to be an lvalue. */
255 maybe_fold_reference (tree expr
, bool is_lhs
)
259 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
260 || TREE_CODE (expr
) == REALPART_EXPR
261 || TREE_CODE (expr
) == IMAGPART_EXPR
)
262 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
263 return fold_unary_loc (EXPR_LOCATION (expr
),
266 TREE_OPERAND (expr
, 0));
267 else if (TREE_CODE (expr
) == BIT_FIELD_REF
268 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
269 return fold_ternary_loc (EXPR_LOCATION (expr
),
272 TREE_OPERAND (expr
, 0),
273 TREE_OPERAND (expr
, 1),
274 TREE_OPERAND (expr
, 2));
277 && (result
= fold_const_aggregate_ref (expr
))
278 && is_gimple_min_invariant (result
))
285 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
286 replacement rhs for the statement or NULL_TREE if no simplification
287 could be made. It is assumed that the operands have been previously
291 fold_gimple_assign (gimple_stmt_iterator
*si
)
293 gimple
*stmt
= gsi_stmt (*si
);
294 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
295 location_t loc
= gimple_location (stmt
);
297 tree result
= NULL_TREE
;
299 switch (get_gimple_rhs_class (subcode
))
301 case GIMPLE_SINGLE_RHS
:
303 tree rhs
= gimple_assign_rhs1 (stmt
);
305 if (TREE_CLOBBER_P (rhs
))
308 if (REFERENCE_CLASS_P (rhs
))
309 return maybe_fold_reference (rhs
, false);
311 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
313 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
314 if (is_gimple_min_invariant (val
))
316 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
319 vec
<cgraph_node
*>targets
320 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
321 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
323 if (dump_enabled_p ())
325 location_t loc
= gimple_location_safe (stmt
);
326 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
327 "resolving virtual function address "
328 "reference to function %s\n",
329 targets
.length () == 1
330 ? targets
[0]->name ()
333 if (targets
.length () == 1)
335 val
= fold_convert (TREE_TYPE (val
),
336 build_fold_addr_expr_loc
337 (loc
, targets
[0]->decl
));
338 STRIP_USELESS_TYPE_CONVERSION (val
);
341 /* We can not use __builtin_unreachable here because it
342 can not have address taken. */
343 val
= build_int_cst (TREE_TYPE (val
), 0);
349 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
351 tree ref
= TREE_OPERAND (rhs
, 0);
352 tree tem
= maybe_fold_reference (ref
, true);
354 && TREE_CODE (tem
) == MEM_REF
355 && integer_zerop (TREE_OPERAND (tem
, 1)))
356 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
358 result
= fold_convert (TREE_TYPE (rhs
),
359 build_fold_addr_expr_loc (loc
, tem
));
360 else if (TREE_CODE (ref
) == MEM_REF
361 && integer_zerop (TREE_OPERAND (ref
, 1)))
362 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
366 /* Strip away useless type conversions. Both the
367 NON_LVALUE_EXPR that may have been added by fold, and
368 "useless" type conversions that might now be apparent
369 due to propagation. */
370 STRIP_USELESS_TYPE_CONVERSION (result
);
372 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
377 else if (TREE_CODE (rhs
) == CONSTRUCTOR
378 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
380 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
384 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
385 if (! CONSTANT_CLASS_P (val
))
388 return build_vector_from_ctor (TREE_TYPE (rhs
),
389 CONSTRUCTOR_ELTS (rhs
));
392 else if (DECL_P (rhs
))
393 return get_symbol_constant_value (rhs
);
397 case GIMPLE_UNARY_RHS
:
400 case GIMPLE_BINARY_RHS
:
403 case GIMPLE_TERNARY_RHS
:
404 result
= fold_ternary_loc (loc
, subcode
,
405 TREE_TYPE (gimple_assign_lhs (stmt
)),
406 gimple_assign_rhs1 (stmt
),
407 gimple_assign_rhs2 (stmt
),
408 gimple_assign_rhs3 (stmt
));
412 STRIP_USELESS_TYPE_CONVERSION (result
);
413 if (valid_gimple_rhs_p (result
))
418 case GIMPLE_INVALID_RHS
:
426 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
427 adjusting the replacement stmts location and virtual operands.
428 If the statement has a lhs the last stmt in the sequence is expected
429 to assign to that lhs. */
432 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
434 gimple
*stmt
= gsi_stmt (*si_p
);
436 if (gimple_has_location (stmt
))
437 annotate_all_with_location (stmts
, gimple_location (stmt
));
439 /* First iterate over the replacement statements backward, assigning
440 virtual operands to their defining statements. */
441 gimple
*laststore
= NULL
;
442 for (gimple_stmt_iterator i
= gsi_last (stmts
);
443 !gsi_end_p (i
); gsi_prev (&i
))
445 gimple
*new_stmt
= gsi_stmt (i
);
446 if ((gimple_assign_single_p (new_stmt
)
447 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
448 || (is_gimple_call (new_stmt
)
449 && (gimple_call_flags (new_stmt
)
450 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
454 vdef
= gimple_vdef (stmt
);
456 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
457 gimple_set_vdef (new_stmt
, vdef
);
458 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
459 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
460 laststore
= new_stmt
;
464 /* Second iterate over the statements forward, assigning virtual
465 operands to their uses. */
466 tree reaching_vuse
= gimple_vuse (stmt
);
467 for (gimple_stmt_iterator i
= gsi_start (stmts
);
468 !gsi_end_p (i
); gsi_next (&i
))
470 gimple
*new_stmt
= gsi_stmt (i
);
471 /* If the new statement possibly has a VUSE, update it with exact SSA
472 name we know will reach this one. */
473 if (gimple_has_mem_ops (new_stmt
))
474 gimple_set_vuse (new_stmt
, reaching_vuse
);
475 gimple_set_modified (new_stmt
, true);
476 if (gimple_vdef (new_stmt
))
477 reaching_vuse
= gimple_vdef (new_stmt
);
480 /* If the new sequence does not do a store release the virtual
481 definition of the original statement. */
483 && reaching_vuse
== gimple_vuse (stmt
))
485 tree vdef
= gimple_vdef (stmt
);
487 && TREE_CODE (vdef
) == SSA_NAME
)
489 unlink_stmt_vdef (stmt
);
490 release_ssa_name (vdef
);
494 /* Finally replace the original statement with the sequence. */
495 gsi_replace_with_seq (si_p
, stmts
, false);
498 /* Convert EXPR into a GIMPLE value suitable for substitution on the
499 RHS of an assignment. Insert the necessary statements before
500 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
501 is replaced. If the call is expected to produces a result, then it
502 is replaced by an assignment of the new RHS to the result variable.
503 If the result is to be ignored, then the call is replaced by a
504 GIMPLE_NOP. A proper VDEF chain is retained by making the first
505 VUSE and the last VDEF of the whole sequence be the same as the replaced
506 statement and using new SSA names for stores in between. */
509 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
512 gimple
*stmt
, *new_stmt
;
513 gimple_stmt_iterator i
;
514 gimple_seq stmts
= NULL
;
516 stmt
= gsi_stmt (*si_p
);
518 gcc_assert (is_gimple_call (stmt
));
520 push_gimplify_context (gimple_in_ssa_p (cfun
));
522 lhs
= gimple_call_lhs (stmt
);
523 if (lhs
== NULL_TREE
)
525 gimplify_and_add (expr
, &stmts
);
526 /* We can end up with folding a memcpy of an empty class assignment
527 which gets optimized away by C++ gimplification. */
528 if (gimple_seq_empty_p (stmts
))
530 pop_gimplify_context (NULL
);
531 if (gimple_in_ssa_p (cfun
))
533 unlink_stmt_vdef (stmt
);
536 gsi_replace (si_p
, gimple_build_nop (), false);
542 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
543 new_stmt
= gimple_build_assign (lhs
, tmp
);
544 i
= gsi_last (stmts
);
545 gsi_insert_after_without_update (&i
, new_stmt
,
546 GSI_CONTINUE_LINKING
);
549 pop_gimplify_context (NULL
);
551 gsi_replace_with_seq_vops (si_p
, stmts
);
555 /* Replace the call at *GSI with the gimple value VAL. */
558 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
560 gimple
*stmt
= gsi_stmt (*gsi
);
561 tree lhs
= gimple_call_lhs (stmt
);
565 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
566 val
= fold_convert (TREE_TYPE (lhs
), val
);
567 repl
= gimple_build_assign (lhs
, val
);
570 repl
= gimple_build_nop ();
571 tree vdef
= gimple_vdef (stmt
);
572 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
574 unlink_stmt_vdef (stmt
);
575 release_ssa_name (vdef
);
577 gsi_replace (gsi
, repl
, false);
580 /* Replace the call at *GSI with the new call REPL and fold that
584 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
586 gimple
*stmt
= gsi_stmt (*gsi
);
587 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
588 gimple_set_location (repl
, gimple_location (stmt
));
589 if (gimple_vdef (stmt
)
590 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
592 gimple_set_vdef (repl
, gimple_vdef (stmt
));
593 gimple_set_vuse (repl
, gimple_vuse (stmt
));
594 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
596 gsi_replace (gsi
, repl
, false);
600 /* Return true if VAR is a VAR_DECL or a component thereof. */
603 var_decl_component_p (tree var
)
606 while (handled_component_p (inner
))
607 inner
= TREE_OPERAND (inner
, 0);
608 return SSA_VAR_P (inner
);
611 /* Fold function call to builtin mem{{,p}cpy,move}. Return
612 false if no simplification can be made.
613 If ENDP is 0, return DEST (like memcpy).
614 If ENDP is 1, return DEST+LEN (like mempcpy).
615 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
616 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
620 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
621 tree dest
, tree src
, int endp
)
623 gimple
*stmt
= gsi_stmt (*gsi
);
624 tree lhs
= gimple_call_lhs (stmt
);
625 tree len
= gimple_call_arg (stmt
, 2);
626 tree destvar
, srcvar
;
627 location_t loc
= gimple_location (stmt
);
629 /* If the LEN parameter is zero, return DEST. */
630 if (integer_zerop (len
))
633 if (gimple_call_lhs (stmt
))
634 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
636 repl
= gimple_build_nop ();
637 tree vdef
= gimple_vdef (stmt
);
638 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
640 unlink_stmt_vdef (stmt
);
641 release_ssa_name (vdef
);
643 gsi_replace (gsi
, repl
, false);
647 /* If SRC and DEST are the same (and not volatile), return
648 DEST{,+LEN,+LEN-1}. */
649 if (operand_equal_p (src
, dest
, 0))
651 unlink_stmt_vdef (stmt
);
652 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
653 release_ssa_name (gimple_vdef (stmt
));
656 gsi_replace (gsi
, gimple_build_nop (), false);
663 tree srctype
, desttype
;
664 unsigned int src_align
, dest_align
;
667 /* Build accesses at offset zero with a ref-all character type. */
668 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
671 /* If we can perform the copy efficiently with first doing all loads
672 and then all stores inline it that way. Currently efficiently
673 means that we can load all the memory into a single integer
674 register which is what MOVE_MAX gives us. */
675 src_align
= get_pointer_alignment (src
);
676 dest_align
= get_pointer_alignment (dest
);
677 if (tree_fits_uhwi_p (len
)
678 && compare_tree_int (len
, MOVE_MAX
) <= 0
679 /* ??? Don't transform copies from strings with known length this
680 confuses the tree-ssa-strlen.c. This doesn't handle
681 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
683 && !c_strlen (src
, 2))
685 unsigned ilen
= tree_to_uhwi (len
);
686 if (exact_log2 (ilen
) != -1)
688 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
690 && TYPE_MODE (type
) != BLKmode
691 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
693 /* If the destination pointer is not aligned we must be able
694 to emit an unaligned store. */
695 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
696 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)
697 || (optab_handler (movmisalign_optab
, TYPE_MODE (type
))
698 != CODE_FOR_nothing
)))
701 tree desttype
= type
;
702 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
703 srctype
= build_aligned_type (type
, src_align
);
704 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
705 tree tem
= fold_const_aggregate_ref (srcmem
);
708 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
709 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
711 && (optab_handler (movmisalign_optab
,
713 == CODE_FOR_nothing
))
718 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
720 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
721 if (gimple_in_ssa_p (cfun
))
722 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
725 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
726 gimple_assign_set_lhs (new_stmt
, srcmem
);
727 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
728 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
730 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
731 desttype
= build_aligned_type (type
, dest_align
);
733 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
736 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
737 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
738 if (gimple_vdef (new_stmt
)
739 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
740 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
743 gsi_replace (gsi
, new_stmt
, false);
746 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
755 /* Both DEST and SRC must be pointer types.
756 ??? This is what old code did. Is the testing for pointer types
759 If either SRC is readonly or length is 1, we can use memcpy. */
760 if (!dest_align
|| !src_align
)
762 if (readonly_data_expr (src
)
763 || (tree_fits_uhwi_p (len
)
764 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
765 >= tree_to_uhwi (len
))))
767 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
770 gimple_call_set_fndecl (stmt
, fn
);
771 gimple_call_set_arg (stmt
, 0, dest
);
772 gimple_call_set_arg (stmt
, 1, src
);
777 /* If *src and *dest can't overlap, optimize into memcpy as well. */
778 if (TREE_CODE (src
) == ADDR_EXPR
779 && TREE_CODE (dest
) == ADDR_EXPR
)
781 tree src_base
, dest_base
, fn
;
782 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
783 HOST_WIDE_INT size
= -1;
784 HOST_WIDE_INT maxsize
= -1;
786 srcvar
= TREE_OPERAND (src
, 0);
787 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
789 destvar
= TREE_OPERAND (dest
, 0);
790 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
792 if (tree_fits_uhwi_p (len
))
793 maxsize
= tree_to_uhwi (len
);
796 src_offset
/= BITS_PER_UNIT
;
797 dest_offset
/= BITS_PER_UNIT
;
798 if (SSA_VAR_P (src_base
)
799 && SSA_VAR_P (dest_base
))
801 if (operand_equal_p (src_base
, dest_base
, 0)
802 && ranges_overlap_p (src_offset
, maxsize
,
803 dest_offset
, maxsize
))
806 else if (TREE_CODE (src_base
) == MEM_REF
807 && TREE_CODE (dest_base
) == MEM_REF
)
809 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
810 TREE_OPERAND (dest_base
, 0), 0))
812 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
813 if (!wi::fits_shwi_p (off
))
815 src_offset
= off
.to_shwi ();
817 off
= mem_ref_offset (dest_base
) + dest_offset
;
818 if (!wi::fits_shwi_p (off
))
820 dest_offset
= off
.to_shwi ();
821 if (ranges_overlap_p (src_offset
, maxsize
,
822 dest_offset
, maxsize
))
828 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
831 gimple_call_set_fndecl (stmt
, fn
);
832 gimple_call_set_arg (stmt
, 0, dest
);
833 gimple_call_set_arg (stmt
, 1, src
);
838 /* If the destination and source do not alias optimize into
840 if ((is_gimple_min_invariant (dest
)
841 || TREE_CODE (dest
) == SSA_NAME
)
842 && (is_gimple_min_invariant (src
)
843 || TREE_CODE (src
) == SSA_NAME
))
846 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
847 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
848 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
851 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
854 gimple_call_set_fndecl (stmt
, fn
);
855 gimple_call_set_arg (stmt
, 0, dest
);
856 gimple_call_set_arg (stmt
, 1, src
);
865 if (!tree_fits_shwi_p (len
))
868 This logic lose for arguments like (type *)malloc (sizeof (type)),
869 since we strip the casts of up to VOID return value from malloc.
870 Perhaps we ought to inherit type from non-VOID argument here? */
873 if (!POINTER_TYPE_P (TREE_TYPE (src
))
874 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
876 /* In the following try to find a type that is most natural to be
877 used for the memcpy source and destination and that allows
878 the most optimization when memcpy is turned into a plain assignment
879 using that type. In theory we could always use a char[len] type
880 but that only gains us that the destination and source possibly
881 no longer will have their address taken. */
882 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
883 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
885 tree tem
= TREE_OPERAND (src
, 0);
887 if (tem
!= TREE_OPERAND (src
, 0))
888 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
890 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
892 tree tem
= TREE_OPERAND (dest
, 0);
894 if (tem
!= TREE_OPERAND (dest
, 0))
895 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
897 srctype
= TREE_TYPE (TREE_TYPE (src
));
898 if (TREE_CODE (srctype
) == ARRAY_TYPE
899 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
901 srctype
= TREE_TYPE (srctype
);
903 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
905 desttype
= TREE_TYPE (TREE_TYPE (dest
));
906 if (TREE_CODE (desttype
) == ARRAY_TYPE
907 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
909 desttype
= TREE_TYPE (desttype
);
911 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
913 if (TREE_ADDRESSABLE (srctype
)
914 || TREE_ADDRESSABLE (desttype
))
917 /* Make sure we are not copying using a floating-point mode or
918 a type whose size possibly does not match its precision. */
919 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
920 || TREE_CODE (desttype
) == BOOLEAN_TYPE
921 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
922 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
923 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
924 || TREE_CODE (srctype
) == BOOLEAN_TYPE
925 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
926 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
934 src_align
= get_pointer_alignment (src
);
935 dest_align
= get_pointer_alignment (dest
);
936 if (dest_align
< TYPE_ALIGN (desttype
)
937 || src_align
< TYPE_ALIGN (srctype
))
941 STRIP_NOPS (destvar
);
942 if (TREE_CODE (destvar
) == ADDR_EXPR
943 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
944 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
945 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
951 if (TREE_CODE (srcvar
) == ADDR_EXPR
952 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
953 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
956 || src_align
>= TYPE_ALIGN (desttype
))
957 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
959 else if (!STRICT_ALIGNMENT
)
961 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
963 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
971 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
974 if (srcvar
== NULL_TREE
)
977 if (src_align
>= TYPE_ALIGN (desttype
))
978 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
981 if (STRICT_ALIGNMENT
)
983 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
985 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
988 else if (destvar
== NULL_TREE
)
991 if (dest_align
>= TYPE_ALIGN (srctype
))
992 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
995 if (STRICT_ALIGNMENT
)
997 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
999 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1004 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1006 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1007 if (gimple_in_ssa_p (cfun
))
1008 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1010 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1011 gimple_assign_set_lhs (new_stmt
, srcvar
);
1012 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1013 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1015 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1016 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1017 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1018 if (gimple_vdef (new_stmt
)
1019 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1020 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1023 gsi_replace (gsi
, new_stmt
, false);
1026 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1030 gimple_seq stmts
= NULL
;
1031 if (endp
== 0 || endp
== 3)
1034 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1036 if (endp
== 2 || endp
== 1)
1038 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1039 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1040 TREE_TYPE (dest
), dest
, len
);
1043 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1044 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1045 gsi_replace (gsi
, repl
, false);
1049 /* Fold function call to builtin memset or bzero at *GSI setting the
1050 memory of size LEN to VAL. Return whether a simplification was made. */
1053 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1055 gimple
*stmt
= gsi_stmt (*gsi
);
1057 unsigned HOST_WIDE_INT length
, cval
;
1059 /* If the LEN parameter is zero, return DEST. */
1060 if (integer_zerop (len
))
1062 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1066 if (! tree_fits_uhwi_p (len
))
1069 if (TREE_CODE (c
) != INTEGER_CST
)
1072 tree dest
= gimple_call_arg (stmt
, 0);
1074 if (TREE_CODE (var
) != ADDR_EXPR
)
1077 var
= TREE_OPERAND (var
, 0);
1078 if (TREE_THIS_VOLATILE (var
))
1081 etype
= TREE_TYPE (var
);
1082 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1083 etype
= TREE_TYPE (etype
);
1085 if (!INTEGRAL_TYPE_P (etype
)
1086 && !POINTER_TYPE_P (etype
))
1089 if (! var_decl_component_p (var
))
1092 length
= tree_to_uhwi (len
);
1093 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1094 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1097 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1100 if (integer_zerop (c
))
1104 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1107 cval
= TREE_INT_CST_LOW (c
);
1111 cval
|= (cval
<< 31) << 1;
1114 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1115 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1116 gimple_set_vuse (store
, gimple_vuse (stmt
));
1117 tree vdef
= gimple_vdef (stmt
);
1118 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1120 gimple_set_vdef (store
, gimple_vdef (stmt
));
1121 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1123 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1124 if (gimple_call_lhs (stmt
))
1126 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1127 gsi_replace (gsi
, asgn
, false);
1131 gimple_stmt_iterator gsi2
= *gsi
;
1133 gsi_remove (&gsi2
, true);
1140 /* Return the string length, maximum string length or maximum value of
1142 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1143 is not NULL and, for TYPE == 0, its value is not equal to the length
1144 we determine or if we are unable to determine the length or value,
1145 return false. VISITED is a bitmap of visited variables.
1146 TYPE is 0 if string length should be returned, 1 for maximum string
1147 length and 2 for maximum value ARG can have. */
1150 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1155 if (TREE_CODE (arg
) != SSA_NAME
)
1157 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1158 if (TREE_CODE (arg
) == ADDR_EXPR
1159 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1160 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1162 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1163 if (TREE_CODE (aop0
) == INDIRECT_REF
1164 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1165 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1166 length
, visited
, type
);
1172 if (TREE_CODE (val
) != INTEGER_CST
1173 || tree_int_cst_sgn (val
) < 0)
1177 val
= c_strlen (arg
, 1);
1185 if (TREE_CODE (*length
) != INTEGER_CST
1186 || TREE_CODE (val
) != INTEGER_CST
)
1189 if (tree_int_cst_lt (*length
, val
))
1193 else if (simple_cst_equal (val
, *length
) != 1)
1201 /* If ARG is registered for SSA update we cannot look at its defining
1203 if (name_registered_for_update_p (arg
))
1206 /* If we were already here, break the infinite cycle. */
1208 *visited
= BITMAP_ALLOC (NULL
);
1209 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1213 def_stmt
= SSA_NAME_DEF_STMT (var
);
1215 switch (gimple_code (def_stmt
))
1218 /* The RHS of the statement defining VAR must either have a
1219 constant length or come from another SSA_NAME with a constant
1221 if (gimple_assign_single_p (def_stmt
)
1222 || gimple_assign_unary_nop_p (def_stmt
))
1224 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1225 return get_maxval_strlen (rhs
, length
, visited
, type
);
1227 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1229 tree op2
= gimple_assign_rhs2 (def_stmt
);
1230 tree op3
= gimple_assign_rhs3 (def_stmt
);
1231 return get_maxval_strlen (op2
, length
, visited
, type
)
1232 && get_maxval_strlen (op3
, length
, visited
, type
);
1238 /* All the arguments of the PHI node must have the same constant
1242 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1244 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1246 /* If this PHI has itself as an argument, we cannot
1247 determine the string length of this argument. However,
1248 if we can find a constant string length for the other
1249 PHI args then we can still be sure that this is a
1250 constant string length. So be optimistic and just
1251 continue with the next argument. */
1252 if (arg
== gimple_phi_result (def_stmt
))
1255 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1267 get_maxval_strlen (tree arg
, int type
)
1269 bitmap visited
= NULL
;
1270 tree len
= NULL_TREE
;
1271 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1274 BITMAP_FREE (visited
);
1280 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1281 If LEN is not NULL, it represents the length of the string to be
1282 copied. Return NULL_TREE if no simplification can be made. */
1285 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1286 tree dest
, tree src
)
1288 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1291 /* If SRC and DEST are the same (and not volatile), return DEST. */
1292 if (operand_equal_p (src
, dest
, 0))
1294 replace_call_with_value (gsi
, dest
);
1298 if (optimize_function_for_size_p (cfun
))
1301 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1305 tree len
= get_maxval_strlen (src
, 0);
1309 len
= fold_convert_loc (loc
, size_type_node
, len
);
1310 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1311 len
= force_gimple_operand_gsi (gsi
, len
, true,
1312 NULL_TREE
, true, GSI_SAME_STMT
);
1313 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1314 replace_call_with_call_and_fold (gsi
, repl
);
1318 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1319 If SLEN is not NULL, it represents the length of the source string.
1320 Return NULL_TREE if no simplification can be made. */
1323 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1324 tree dest
, tree src
, tree len
)
1326 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1329 /* If the LEN parameter is zero, return DEST. */
1330 if (integer_zerop (len
))
1332 replace_call_with_value (gsi
, dest
);
1336 /* We can't compare slen with len as constants below if len is not a
1338 if (TREE_CODE (len
) != INTEGER_CST
)
1341 /* Now, we must be passed a constant src ptr parameter. */
1342 tree slen
= get_maxval_strlen (src
, 0);
1343 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1346 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1348 /* We do not support simplification of this case, though we do
1349 support it when expanding trees into RTL. */
1350 /* FIXME: generate a call to __builtin_memset. */
1351 if (tree_int_cst_lt (slen
, len
))
1354 /* OK transform into builtin memcpy. */
1355 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1359 len
= fold_convert_loc (loc
, size_type_node
, len
);
1360 len
= force_gimple_operand_gsi (gsi
, len
, true,
1361 NULL_TREE
, true, GSI_SAME_STMT
);
1362 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1363 replace_call_with_call_and_fold (gsi
, repl
);
1367 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1370 Return NULL_TREE if no simplification was possible, otherwise return the
1371 simplified form of the call as a tree.
1373 The simplified form may be a constant or other expression which
1374 computes the same value, but in a more efficient manner (including
1375 calls to other builtin functions).
1377 The call may contain arguments which need to be evaluated, but
1378 which are not useful to determine the result of the call. In
1379 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1380 COMPOUND_EXPR will be an argument which must be evaluated.
1381 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1382 COMPOUND_EXPR in the chain will contain the tree for the simplified
1383 form of the builtin function call. */
1386 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1388 gimple
*stmt
= gsi_stmt (*gsi
);
1389 location_t loc
= gimple_location (stmt
);
1391 const char *p
= c_getstr (src
);
1393 /* If the string length is zero, return the dst parameter. */
1394 if (p
&& *p
== '\0')
1396 replace_call_with_value (gsi
, dst
);
1400 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1403 /* See if we can store by pieces into (dst + strlen(dst)). */
1405 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1406 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1408 if (!strlen_fn
|| !memcpy_fn
)
1411 /* If the length of the source string isn't computable don't
1412 split strcat into strlen and memcpy. */
1413 tree len
= get_maxval_strlen (src
, 0);
1417 /* Create strlen (dst). */
1418 gimple_seq stmts
= NULL
, stmts2
;
1419 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1420 gimple_set_location (repl
, loc
);
1421 if (gimple_in_ssa_p (cfun
))
1422 newdst
= make_ssa_name (size_type_node
);
1424 newdst
= create_tmp_reg (size_type_node
);
1425 gimple_call_set_lhs (repl
, newdst
);
1426 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1428 /* Create (dst p+ strlen (dst)). */
1429 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1430 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1431 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1433 len
= fold_convert_loc (loc
, size_type_node
, len
);
1434 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1435 build_int_cst (size_type_node
, 1));
1436 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1437 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1439 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1440 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1441 if (gimple_call_lhs (stmt
))
1443 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1444 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1445 gsi_replace_with_seq_vops (gsi
, stmts
);
1446 /* gsi now points at the assignment to the lhs, get a
1447 stmt iterator to the memcpy call.
1448 ??? We can't use gsi_for_stmt as that doesn't work when the
1449 CFG isn't built yet. */
1450 gimple_stmt_iterator gsi2
= *gsi
;
1456 gsi_replace_with_seq_vops (gsi
, stmts
);
1462 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1463 are the arguments to the call. */
1466 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1468 gimple
*stmt
= gsi_stmt (*gsi
);
1469 tree dest
= gimple_call_arg (stmt
, 0);
1470 tree src
= gimple_call_arg (stmt
, 1);
1471 tree size
= gimple_call_arg (stmt
, 2);
1477 /* If the SRC parameter is "", return DEST. */
1478 if (p
&& *p
== '\0')
1480 replace_call_with_value (gsi
, dest
);
1484 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1487 /* If __builtin_strcat_chk is used, assume strcat is available. */
1488 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1492 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1493 replace_call_with_call_and_fold (gsi
, repl
);
1497 /* Simplify a call to the strncat builtin. */
1500 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1502 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1503 tree dst
= gimple_call_arg (stmt
, 0);
1504 tree src
= gimple_call_arg (stmt
, 1);
1505 tree len
= gimple_call_arg (stmt
, 2);
1507 const char *p
= c_getstr (src
);
1509 /* If the requested length is zero, or the src parameter string
1510 length is zero, return the dst parameter. */
1511 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1513 replace_call_with_value (gsi
, dst
);
1517 /* If the requested len is greater than or equal to the string
1518 length, call strcat. */
1519 if (TREE_CODE (len
) == INTEGER_CST
&& p
1520 && compare_tree_int (len
, strlen (p
)) >= 0)
1522 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1524 /* If the replacement _DECL isn't initialized, don't do the
1529 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1530 replace_call_with_call_and_fold (gsi
, repl
);
1537 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1541 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1543 gimple
*stmt
= gsi_stmt (*gsi
);
1544 tree dest
= gimple_call_arg (stmt
, 0);
1545 tree src
= gimple_call_arg (stmt
, 1);
1546 tree len
= gimple_call_arg (stmt
, 2);
1547 tree size
= gimple_call_arg (stmt
, 3);
1552 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1553 if ((p
&& *p
== '\0')
1554 || integer_zerop (len
))
1556 replace_call_with_value (gsi
, dest
);
1560 if (! tree_fits_uhwi_p (size
))
1563 if (! integer_all_onesp (size
))
1565 tree src_len
= c_strlen (src
, 1);
1567 && tree_fits_uhwi_p (src_len
)
1568 && tree_fits_uhwi_p (len
)
1569 && ! tree_int_cst_lt (len
, src_len
))
1571 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1572 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1576 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1577 replace_call_with_call_and_fold (gsi
, repl
);
1583 /* If __builtin_strncat_chk is used, assume strncat is available. */
1584 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1588 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1589 replace_call_with_call_and_fold (gsi
, repl
);
1593 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1594 to the call. IGNORE is true if the value returned
1595 by the builtin will be ignored. UNLOCKED is true is true if this
1596 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1597 the known length of the string. Return NULL_TREE if no simplification
1601 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1602 tree arg0
, tree arg1
,
1605 gimple
*stmt
= gsi_stmt (*gsi
);
1607 /* If we're using an unlocked function, assume the other unlocked
1608 functions exist explicitly. */
1609 tree
const fn_fputc
= (unlocked
1610 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1611 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1612 tree
const fn_fwrite
= (unlocked
1613 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1614 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1616 /* If the return value is used, don't do the transformation. */
1617 if (gimple_call_lhs (stmt
))
1620 /* Get the length of the string passed to fputs. If the length
1621 can't be determined, punt. */
1622 tree len
= get_maxval_strlen (arg0
, 0);
1624 || TREE_CODE (len
) != INTEGER_CST
)
1627 switch (compare_tree_int (len
, 1))
1629 case -1: /* length is 0, delete the call entirely . */
1630 replace_call_with_value (gsi
, integer_zero_node
);
1633 case 0: /* length is 1, call fputc. */
1635 const char *p
= c_getstr (arg0
);
1641 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
1643 (integer_type_node
, p
[0]), arg1
);
1644 replace_call_with_call_and_fold (gsi
, repl
);
1649 case 1: /* length is greater than 1, call fwrite. */
1651 /* If optimizing for size keep fputs. */
1652 if (optimize_function_for_size_p (cfun
))
1654 /* New argument list transforming fputs(string, stream) to
1655 fwrite(string, 1, len, stream). */
1659 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1660 size_one_node
, len
, arg1
);
1661 replace_call_with_call_and_fold (gsi
, repl
);
1670 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1671 DEST, SRC, LEN, and SIZE are the arguments to the call.
1672 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1673 code of the builtin. If MAXLEN is not NULL, it is maximum length
1674 passed as third argument. */
1677 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1678 tree dest
, tree src
, tree len
, tree size
,
1679 enum built_in_function fcode
)
1681 gimple
*stmt
= gsi_stmt (*gsi
);
1682 location_t loc
= gimple_location (stmt
);
1683 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1686 /* If SRC and DEST are the same (and not volatile), return DEST
1687 (resp. DEST+LEN for __mempcpy_chk). */
1688 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1690 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1692 replace_call_with_value (gsi
, dest
);
1697 gimple_seq stmts
= NULL
;
1698 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1699 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
, dest
, len
);
1700 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1701 replace_call_with_value (gsi
, temp
);
1706 if (! tree_fits_uhwi_p (size
))
1709 tree maxlen
= get_maxval_strlen (len
, 2);
1710 if (! integer_all_onesp (size
))
1712 if (! tree_fits_uhwi_p (len
))
1714 /* If LEN is not constant, try MAXLEN too.
1715 For MAXLEN only allow optimizing into non-_ocs function
1716 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1717 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1719 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1721 /* (void) __mempcpy_chk () can be optimized into
1722 (void) __memcpy_chk (). */
1723 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1727 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1728 replace_call_with_call_and_fold (gsi
, repl
);
1737 if (tree_int_cst_lt (size
, maxlen
))
1742 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1743 mem{cpy,pcpy,move,set} is available. */
1746 case BUILT_IN_MEMCPY_CHK
:
1747 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1749 case BUILT_IN_MEMPCPY_CHK
:
1750 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1752 case BUILT_IN_MEMMOVE_CHK
:
1753 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1755 case BUILT_IN_MEMSET_CHK
:
1756 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1765 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1766 replace_call_with_call_and_fold (gsi
, repl
);
1770 /* Fold a call to the __st[rp]cpy_chk builtin.
1771 DEST, SRC, and SIZE are the arguments to the call.
1772 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1773 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1774 strings passed as second argument. */
1777 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1779 tree src
, tree size
,
1780 enum built_in_function fcode
)
1782 gimple
*stmt
= gsi_stmt (*gsi
);
1783 location_t loc
= gimple_location (stmt
);
1784 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1787 /* If SRC and DEST are the same (and not volatile), return DEST. */
1788 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1790 replace_call_with_value (gsi
, dest
);
1794 if (! tree_fits_uhwi_p (size
))
1797 tree maxlen
= get_maxval_strlen (src
, 1);
1798 if (! integer_all_onesp (size
))
1800 len
= c_strlen (src
, 1);
1801 if (! len
|| ! tree_fits_uhwi_p (len
))
1803 /* If LEN is not constant, try MAXLEN too.
1804 For MAXLEN only allow optimizing into non-_ocs function
1805 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1806 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1808 if (fcode
== BUILT_IN_STPCPY_CHK
)
1813 /* If return value of __stpcpy_chk is ignored,
1814 optimize into __strcpy_chk. */
1815 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1819 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1820 replace_call_with_call_and_fold (gsi
, repl
);
1824 if (! len
|| TREE_SIDE_EFFECTS (len
))
1827 /* If c_strlen returned something, but not a constant,
1828 transform __strcpy_chk into __memcpy_chk. */
1829 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1833 gimple_seq stmts
= NULL
;
1834 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1835 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
1836 build_int_cst (size_type_node
, 1));
1837 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1838 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1839 replace_call_with_call_and_fold (gsi
, repl
);
1846 if (! tree_int_cst_lt (maxlen
, size
))
1850 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1851 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1852 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1856 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1857 replace_call_with_call_and_fold (gsi
, repl
);
1861 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1862 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1863 length passed as third argument. IGNORE is true if return value can be
1864 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1867 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1868 tree dest
, tree src
,
1869 tree len
, tree size
,
1870 enum built_in_function fcode
)
1872 gimple
*stmt
= gsi_stmt (*gsi
);
1873 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1876 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1878 /* If return value of __stpncpy_chk is ignored,
1879 optimize into __strncpy_chk. */
1880 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1883 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1884 replace_call_with_call_and_fold (gsi
, repl
);
1889 if (! tree_fits_uhwi_p (size
))
1892 tree maxlen
= get_maxval_strlen (len
, 2);
1893 if (! integer_all_onesp (size
))
1895 if (! tree_fits_uhwi_p (len
))
1897 /* If LEN is not constant, try MAXLEN too.
1898 For MAXLEN only allow optimizing into non-_ocs function
1899 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1900 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1906 if (tree_int_cst_lt (size
, maxlen
))
1910 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1911 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
1912 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
1916 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1917 replace_call_with_call_and_fold (gsi
, repl
);
1921 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1922 Return NULL_TREE if no simplification can be made. */
1925 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
1927 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1928 location_t loc
= gimple_location (stmt
);
1929 tree dest
= gimple_call_arg (stmt
, 0);
1930 tree src
= gimple_call_arg (stmt
, 1);
1931 tree fn
, len
, lenp1
;
1933 /* If the result is unused, replace stpcpy with strcpy. */
1934 if (gimple_call_lhs (stmt
) == NULL_TREE
)
1936 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
1939 gimple_call_set_fndecl (stmt
, fn
);
1944 len
= c_strlen (src
, 1);
1946 || TREE_CODE (len
) != INTEGER_CST
)
1949 if (optimize_function_for_size_p (cfun
)
1950 /* If length is zero it's small enough. */
1951 && !integer_zerop (len
))
1954 /* If the source has a known length replace stpcpy with memcpy. */
1955 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1959 gimple_seq stmts
= NULL
;
1960 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1961 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
1962 tem
, build_int_cst (size_type_node
, 1));
1963 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1964 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
1965 gimple_set_vuse (repl
, gimple_vuse (stmt
));
1966 gimple_set_vdef (repl
, gimple_vdef (stmt
));
1967 if (gimple_vdef (repl
)
1968 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
1969 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
1970 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
1971 /* Replace the result with dest + len. */
1973 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
1974 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1975 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
1976 POINTER_PLUS_EXPR
, dest
, tem
);
1977 gsi_replace (gsi
, ret
, false);
1978 /* Finally fold the memcpy call. */
1979 gimple_stmt_iterator gsi2
= *gsi
;
1985 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1986 NULL_TREE if a normal call should be emitted rather than expanding
1987 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1988 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1989 passed as second argument. */
1992 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
1993 enum built_in_function fcode
)
1995 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1996 tree dest
, size
, len
, fn
, fmt
, flag
;
1997 const char *fmt_str
;
1999 /* Verify the required arguments in the original call. */
2000 if (gimple_call_num_args (stmt
) < 5)
2003 dest
= gimple_call_arg (stmt
, 0);
2004 len
= gimple_call_arg (stmt
, 1);
2005 flag
= gimple_call_arg (stmt
, 2);
2006 size
= gimple_call_arg (stmt
, 3);
2007 fmt
= gimple_call_arg (stmt
, 4);
2009 if (! tree_fits_uhwi_p (size
))
2012 if (! integer_all_onesp (size
))
2014 tree maxlen
= get_maxval_strlen (len
, 2);
2015 if (! tree_fits_uhwi_p (len
))
2017 /* If LEN is not constant, try MAXLEN too.
2018 For MAXLEN only allow optimizing into non-_ocs function
2019 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2020 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2026 if (tree_int_cst_lt (size
, maxlen
))
2030 if (!init_target_chars ())
2033 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2034 or if format doesn't contain % chars or is "%s". */
2035 if (! integer_zerop (flag
))
2037 fmt_str
= c_getstr (fmt
);
2038 if (fmt_str
== NULL
)
2040 if (strchr (fmt_str
, target_percent
) != NULL
2041 && strcmp (fmt_str
, target_percent_s
))
2045 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2047 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2048 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2052 /* Replace the called function and the first 5 argument by 3 retaining
2053 trailing varargs. */
2054 gimple_call_set_fndecl (stmt
, fn
);
2055 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2056 gimple_call_set_arg (stmt
, 0, dest
);
2057 gimple_call_set_arg (stmt
, 1, len
);
2058 gimple_call_set_arg (stmt
, 2, fmt
);
2059 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2060 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2061 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2066 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2067 Return NULL_TREE if a normal call should be emitted rather than
2068 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2069 or BUILT_IN_VSPRINTF_CHK. */
2072 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2073 enum built_in_function fcode
)
2075 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2076 tree dest
, size
, len
, fn
, fmt
, flag
;
2077 const char *fmt_str
;
2078 unsigned nargs
= gimple_call_num_args (stmt
);
2080 /* Verify the required arguments in the original call. */
2083 dest
= gimple_call_arg (stmt
, 0);
2084 flag
= gimple_call_arg (stmt
, 1);
2085 size
= gimple_call_arg (stmt
, 2);
2086 fmt
= gimple_call_arg (stmt
, 3);
2088 if (! tree_fits_uhwi_p (size
))
2093 if (!init_target_chars ())
2096 /* Check whether the format is a literal string constant. */
2097 fmt_str
= c_getstr (fmt
);
2098 if (fmt_str
!= NULL
)
2100 /* If the format doesn't contain % args or %%, we know the size. */
2101 if (strchr (fmt_str
, target_percent
) == 0)
2103 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2104 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2106 /* If the format is "%s" and first ... argument is a string literal,
2107 we know the size too. */
2108 else if (fcode
== BUILT_IN_SPRINTF_CHK
2109 && strcmp (fmt_str
, target_percent_s
) == 0)
2115 arg
= gimple_call_arg (stmt
, 4);
2116 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2118 len
= c_strlen (arg
, 1);
2119 if (! len
|| ! tree_fits_uhwi_p (len
))
2126 if (! integer_all_onesp (size
))
2128 if (! len
|| ! tree_int_cst_lt (len
, size
))
2132 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2133 or if format doesn't contain % chars or is "%s". */
2134 if (! integer_zerop (flag
))
2136 if (fmt_str
== NULL
)
2138 if (strchr (fmt_str
, target_percent
) != NULL
2139 && strcmp (fmt_str
, target_percent_s
))
2143 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2144 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2145 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2149 /* Replace the called function and the first 4 argument by 2 retaining
2150 trailing varargs. */
2151 gimple_call_set_fndecl (stmt
, fn
);
2152 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2153 gimple_call_set_arg (stmt
, 0, dest
);
2154 gimple_call_set_arg (stmt
, 1, fmt
);
2155 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2156 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2157 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2162 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2163 ORIG may be null if this is a 2-argument call. We don't attempt to
2164 simplify calls with more than 3 arguments.
2166 Return NULL_TREE if no simplification was possible, otherwise return the
2167 simplified form of the call as a tree. If IGNORED is true, it means that
2168 the caller does not use the returned value of the function. */
2171 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2173 gimple
*stmt
= gsi_stmt (*gsi
);
2174 tree dest
= gimple_call_arg (stmt
, 0);
2175 tree fmt
= gimple_call_arg (stmt
, 1);
2176 tree orig
= NULL_TREE
;
2177 const char *fmt_str
= NULL
;
2179 /* Verify the required arguments in the original call. We deal with two
2180 types of sprintf() calls: 'sprintf (str, fmt)' and
2181 'sprintf (dest, "%s", orig)'. */
2182 if (gimple_call_num_args (stmt
) > 3)
2185 if (gimple_call_num_args (stmt
) == 3)
2186 orig
= gimple_call_arg (stmt
, 2);
2188 /* Check whether the format is a literal string constant. */
2189 fmt_str
= c_getstr (fmt
);
2190 if (fmt_str
== NULL
)
2193 if (!init_target_chars ())
2196 /* If the format doesn't contain % args or %%, use strcpy. */
2197 if (strchr (fmt_str
, target_percent
) == NULL
)
2199 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2204 /* Don't optimize sprintf (buf, "abc", ptr++). */
2208 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2209 'format' is known to contain no % formats. */
2210 gimple_seq stmts
= NULL
;
2211 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2212 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2213 if (gimple_call_lhs (stmt
))
2215 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2216 build_int_cst (integer_type_node
,
2218 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2219 gsi_replace_with_seq_vops (gsi
, stmts
);
2220 /* gsi now points at the assignment to the lhs, get a
2221 stmt iterator to the memcpy call.
2222 ??? We can't use gsi_for_stmt as that doesn't work when the
2223 CFG isn't built yet. */
2224 gimple_stmt_iterator gsi2
= *gsi
;
2230 gsi_replace_with_seq_vops (gsi
, stmts
);
2236 /* If the format is "%s", use strcpy if the result isn't used. */
2237 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2240 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2245 /* Don't crash on sprintf (str1, "%s"). */
2249 tree orig_len
= NULL_TREE
;
2250 if (gimple_call_lhs (stmt
))
2252 orig_len
= get_maxval_strlen (orig
, 0);
2257 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2258 gimple_seq stmts
= NULL
;
2259 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2260 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2261 if (gimple_call_lhs (stmt
))
2263 if (!useless_type_conversion_p (integer_type_node
,
2264 TREE_TYPE (orig_len
)))
2265 orig_len
= fold_convert (integer_type_node
, orig_len
);
2266 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2267 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2268 gsi_replace_with_seq_vops (gsi
, stmts
);
2269 /* gsi now points at the assignment to the lhs, get a
2270 stmt iterator to the memcpy call.
2271 ??? We can't use gsi_for_stmt as that doesn't work when the
2272 CFG isn't built yet. */
2273 gimple_stmt_iterator gsi2
= *gsi
;
2279 gsi_replace_with_seq_vops (gsi
, stmts
);
2287 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2288 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2289 attempt to simplify calls with more than 4 arguments.
2291 Return NULL_TREE if no simplification was possible, otherwise return the
2292 simplified form of the call as a tree. If IGNORED is true, it means that
2293 the caller does not use the returned value of the function. */
2296 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2298 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2299 tree dest
= gimple_call_arg (stmt
, 0);
2300 tree destsize
= gimple_call_arg (stmt
, 1);
2301 tree fmt
= gimple_call_arg (stmt
, 2);
2302 tree orig
= NULL_TREE
;
2303 const char *fmt_str
= NULL
;
2305 if (gimple_call_num_args (stmt
) > 4)
2308 if (gimple_call_num_args (stmt
) == 4)
2309 orig
= gimple_call_arg (stmt
, 3);
2311 if (!tree_fits_uhwi_p (destsize
))
2313 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2315 /* Check whether the format is a literal string constant. */
2316 fmt_str
= c_getstr (fmt
);
2317 if (fmt_str
== NULL
)
2320 if (!init_target_chars ())
2323 /* If the format doesn't contain % args or %%, use strcpy. */
2324 if (strchr (fmt_str
, target_percent
) == NULL
)
2326 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2330 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2334 /* We could expand this as
2335 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2337 memcpy (str, fmt_with_nul_at_cstm1, cst);
2338 but in the former case that might increase code size
2339 and in the latter case grow .rodata section too much.
2341 size_t len
= strlen (fmt_str
);
2345 gimple_seq stmts
= NULL
;
2346 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2347 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2348 if (gimple_call_lhs (stmt
))
2350 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2351 build_int_cst (integer_type_node
, len
));
2352 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2353 gsi_replace_with_seq_vops (gsi
, stmts
);
2354 /* gsi now points at the assignment to the lhs, get a
2355 stmt iterator to the memcpy call.
2356 ??? We can't use gsi_for_stmt as that doesn't work when the
2357 CFG isn't built yet. */
2358 gimple_stmt_iterator gsi2
= *gsi
;
2364 gsi_replace_with_seq_vops (gsi
, stmts
);
2370 /* If the format is "%s", use strcpy if the result isn't used. */
2371 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2373 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2377 /* Don't crash on snprintf (str1, cst, "%s"). */
2381 tree orig_len
= get_maxval_strlen (orig
, 0);
2382 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2385 /* We could expand this as
2386 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2388 memcpy (str1, str2_with_nul_at_cstm1, cst);
2389 but in the former case that might increase code size
2390 and in the latter case grow .rodata section too much.
2392 if (compare_tree_int (orig_len
, destlen
) >= 0)
2395 /* Convert snprintf (str1, cst, "%s", str2) into
2396 strcpy (str1, str2) if strlen (str2) < cst. */
2397 gimple_seq stmts
= NULL
;
2398 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2399 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2400 if (gimple_call_lhs (stmt
))
2402 if (!useless_type_conversion_p (integer_type_node
,
2403 TREE_TYPE (orig_len
)))
2404 orig_len
= fold_convert (integer_type_node
, orig_len
);
2405 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2406 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2407 gsi_replace_with_seq_vops (gsi
, stmts
);
2408 /* gsi now points at the assignment to the lhs, get a
2409 stmt iterator to the memcpy call.
2410 ??? We can't use gsi_for_stmt as that doesn't work when the
2411 CFG isn't built yet. */
2412 gimple_stmt_iterator gsi2
= *gsi
;
2418 gsi_replace_with_seq_vops (gsi
, stmts
);
2426 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2427 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2428 more than 3 arguments, and ARG may be null in the 2-argument case.
2430 Return NULL_TREE if no simplification was possible, otherwise return the
2431 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2432 code of the function to be simplified. */
2435 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2436 tree fp
, tree fmt
, tree arg
,
2437 enum built_in_function fcode
)
2439 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2440 tree fn_fputc
, fn_fputs
;
2441 const char *fmt_str
= NULL
;
2443 /* If the return value is used, don't do the transformation. */
2444 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2447 /* Check whether the format is a literal string constant. */
2448 fmt_str
= c_getstr (fmt
);
2449 if (fmt_str
== NULL
)
2452 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2454 /* If we're using an unlocked function, assume the other
2455 unlocked functions exist explicitly. */
2456 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2457 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2461 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2462 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2465 if (!init_target_chars ())
2468 /* If the format doesn't contain % args or %%, use strcpy. */
2469 if (strchr (fmt_str
, target_percent
) == NULL
)
2471 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2475 /* If the format specifier was "", fprintf does nothing. */
2476 if (fmt_str
[0] == '\0')
2478 replace_call_with_value (gsi
, NULL_TREE
);
2482 /* When "string" doesn't contain %, replace all cases of
2483 fprintf (fp, string) with fputs (string, fp). The fputs
2484 builtin will take care of special cases like length == 1. */
2487 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2488 replace_call_with_call_and_fold (gsi
, repl
);
2493 /* The other optimizations can be done only on the non-va_list variants. */
2494 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2497 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2498 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2500 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2504 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2505 replace_call_with_call_and_fold (gsi
, repl
);
2510 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2511 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2514 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2518 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2519 replace_call_with_call_and_fold (gsi
, repl
);
2527 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2528 FMT and ARG are the arguments to the call; we don't fold cases with
2529 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2531 Return NULL_TREE if no simplification was possible, otherwise return the
2532 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2533 code of the function to be simplified. */
2536 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2537 tree arg
, enum built_in_function fcode
)
2539 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2540 tree fn_putchar
, fn_puts
, newarg
;
2541 const char *fmt_str
= NULL
;
2543 /* If the return value is used, don't do the transformation. */
2544 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2547 /* Check whether the format is a literal string constant. */
2548 fmt_str
= c_getstr (fmt
);
2549 if (fmt_str
== NULL
)
2552 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2554 /* If we're using an unlocked function, assume the other
2555 unlocked functions exist explicitly. */
2556 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2557 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2561 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2562 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2565 if (!init_target_chars ())
2568 if (strcmp (fmt_str
, target_percent_s
) == 0
2569 || strchr (fmt_str
, target_percent
) == NULL
)
2573 if (strcmp (fmt_str
, target_percent_s
) == 0)
2575 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2578 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2581 str
= c_getstr (arg
);
2587 /* The format specifier doesn't contain any '%' characters. */
2588 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2594 /* If the string was "", printf does nothing. */
2597 replace_call_with_value (gsi
, NULL_TREE
);
2601 /* If the string has length of 1, call putchar. */
2604 /* Given printf("c"), (where c is any one character,)
2605 convert "c"[0] to an int and pass that to the replacement
2607 newarg
= build_int_cst (integer_type_node
, str
[0]);
2610 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2611 replace_call_with_call_and_fold (gsi
, repl
);
2617 /* If the string was "string\n", call puts("string"). */
2618 size_t len
= strlen (str
);
2619 if ((unsigned char)str
[len
- 1] == target_newline
2620 && (size_t) (int) len
== len
2624 tree offset_node
, string_cst
;
2626 /* Create a NUL-terminated string that's one char shorter
2627 than the original, stripping off the trailing '\n'. */
2628 newarg
= build_string_literal (len
, str
);
2629 string_cst
= string_constant (newarg
, &offset_node
);
2630 gcc_checking_assert (string_cst
2631 && (TREE_STRING_LENGTH (string_cst
)
2633 && integer_zerop (offset_node
)
2635 TREE_STRING_POINTER (string_cst
)[len
- 1]
2637 /* build_string_literal creates a new STRING_CST,
2638 modify it in place to avoid double copying. */
2639 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2640 newstr
[len
- 1] = '\0';
2643 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2644 replace_call_with_call_and_fold (gsi
, repl
);
2649 /* We'd like to arrange to call fputs(string,stdout) here,
2650 but we need stdout and don't have a way to get it yet. */
2655 /* The other optimizations can be done only on the non-va_list variants. */
2656 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2659 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2660 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2662 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2666 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2667 replace_call_with_call_and_fold (gsi
, repl
);
2672 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2673 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2675 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2680 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2681 replace_call_with_call_and_fold (gsi
, repl
);
2691 /* Fold a call to __builtin_strlen with known length LEN. */
2694 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2696 gimple
*stmt
= gsi_stmt (*gsi
);
2697 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2700 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2701 replace_call_with_value (gsi
, len
);
2705 /* Fold a call to __builtin_acc_on_device. */
2708 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
2710 /* Defer folding until we know which compiler we're in. */
2711 if (symtab
->state
!= EXPANSION
)
2714 unsigned val_host
= GOMP_DEVICE_HOST
;
2715 unsigned val_dev
= GOMP_DEVICE_NONE
;
2717 #ifdef ACCEL_COMPILER
2718 val_host
= GOMP_DEVICE_NOT_HOST
;
2719 val_dev
= ACCEL_COMPILER_acc_device
;
2722 location_t loc
= gimple_location (gsi_stmt (*gsi
));
2724 tree host_eq
= make_ssa_name (boolean_type_node
);
2725 gimple
*host_ass
= gimple_build_assign
2726 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
2727 gimple_set_location (host_ass
, loc
);
2728 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
2730 tree dev_eq
= make_ssa_name (boolean_type_node
);
2731 gimple
*dev_ass
= gimple_build_assign
2732 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
2733 gimple_set_location (dev_ass
, loc
);
2734 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
2736 tree result
= make_ssa_name (boolean_type_node
);
2737 gimple
*result_ass
= gimple_build_assign
2738 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
2739 gimple_set_location (result_ass
, loc
);
2740 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
2742 replace_call_with_value (gsi
, result
);
2747 /* Fold the non-target builtin at *GSI and return whether any simplification
2751 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2753 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2754 tree callee
= gimple_call_fndecl (stmt
);
2756 /* Give up for always_inline inline builtins until they are
2758 if (avoid_folding_inline_builtin (callee
))
2761 unsigned n
= gimple_call_num_args (stmt
);
2762 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2765 case BUILT_IN_BZERO
:
2766 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2767 gimple_call_arg (stmt
, 1));
2768 case BUILT_IN_MEMSET
:
2769 return gimple_fold_builtin_memset (gsi
,
2770 gimple_call_arg (stmt
, 1),
2771 gimple_call_arg (stmt
, 2));
2772 case BUILT_IN_BCOPY
:
2773 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2774 gimple_call_arg (stmt
, 0), 3);
2775 case BUILT_IN_MEMCPY
:
2776 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2777 gimple_call_arg (stmt
, 1), 0);
2778 case BUILT_IN_MEMPCPY
:
2779 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2780 gimple_call_arg (stmt
, 1), 1);
2781 case BUILT_IN_MEMMOVE
:
2782 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2783 gimple_call_arg (stmt
, 1), 3);
2784 case BUILT_IN_SPRINTF_CHK
:
2785 case BUILT_IN_VSPRINTF_CHK
:
2786 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2787 case BUILT_IN_STRCAT_CHK
:
2788 return gimple_fold_builtin_strcat_chk (gsi
);
2789 case BUILT_IN_STRNCAT_CHK
:
2790 return gimple_fold_builtin_strncat_chk (gsi
);
2791 case BUILT_IN_STRLEN
:
2792 return gimple_fold_builtin_strlen (gsi
);
2793 case BUILT_IN_STRCPY
:
2794 return gimple_fold_builtin_strcpy (gsi
,
2795 gimple_call_arg (stmt
, 0),
2796 gimple_call_arg (stmt
, 1));
2797 case BUILT_IN_STRNCPY
:
2798 return gimple_fold_builtin_strncpy (gsi
,
2799 gimple_call_arg (stmt
, 0),
2800 gimple_call_arg (stmt
, 1),
2801 gimple_call_arg (stmt
, 2));
2802 case BUILT_IN_STRCAT
:
2803 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2804 gimple_call_arg (stmt
, 1));
2805 case BUILT_IN_STRNCAT
:
2806 return gimple_fold_builtin_strncat (gsi
);
2807 case BUILT_IN_FPUTS
:
2808 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2809 gimple_call_arg (stmt
, 1), false);
2810 case BUILT_IN_FPUTS_UNLOCKED
:
2811 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2812 gimple_call_arg (stmt
, 1), true);
2813 case BUILT_IN_MEMCPY_CHK
:
2814 case BUILT_IN_MEMPCPY_CHK
:
2815 case BUILT_IN_MEMMOVE_CHK
:
2816 case BUILT_IN_MEMSET_CHK
:
2817 return gimple_fold_builtin_memory_chk (gsi
,
2818 gimple_call_arg (stmt
, 0),
2819 gimple_call_arg (stmt
, 1),
2820 gimple_call_arg (stmt
, 2),
2821 gimple_call_arg (stmt
, 3),
2823 case BUILT_IN_STPCPY
:
2824 return gimple_fold_builtin_stpcpy (gsi
);
2825 case BUILT_IN_STRCPY_CHK
:
2826 case BUILT_IN_STPCPY_CHK
:
2827 return gimple_fold_builtin_stxcpy_chk (gsi
,
2828 gimple_call_arg (stmt
, 0),
2829 gimple_call_arg (stmt
, 1),
2830 gimple_call_arg (stmt
, 2),
2832 case BUILT_IN_STRNCPY_CHK
:
2833 case BUILT_IN_STPNCPY_CHK
:
2834 return gimple_fold_builtin_stxncpy_chk (gsi
,
2835 gimple_call_arg (stmt
, 0),
2836 gimple_call_arg (stmt
, 1),
2837 gimple_call_arg (stmt
, 2),
2838 gimple_call_arg (stmt
, 3),
2840 case BUILT_IN_SNPRINTF_CHK
:
2841 case BUILT_IN_VSNPRINTF_CHK
:
2842 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2843 case BUILT_IN_SNPRINTF
:
2844 return gimple_fold_builtin_snprintf (gsi
);
2845 case BUILT_IN_SPRINTF
:
2846 return gimple_fold_builtin_sprintf (gsi
);
2847 case BUILT_IN_FPRINTF
:
2848 case BUILT_IN_FPRINTF_UNLOCKED
:
2849 case BUILT_IN_VFPRINTF
:
2850 if (n
== 2 || n
== 3)
2851 return gimple_fold_builtin_fprintf (gsi
,
2852 gimple_call_arg (stmt
, 0),
2853 gimple_call_arg (stmt
, 1),
2855 ? gimple_call_arg (stmt
, 2)
2859 case BUILT_IN_FPRINTF_CHK
:
2860 case BUILT_IN_VFPRINTF_CHK
:
2861 if (n
== 3 || n
== 4)
2862 return gimple_fold_builtin_fprintf (gsi
,
2863 gimple_call_arg (stmt
, 0),
2864 gimple_call_arg (stmt
, 2),
2866 ? gimple_call_arg (stmt
, 3)
2870 case BUILT_IN_PRINTF
:
2871 case BUILT_IN_PRINTF_UNLOCKED
:
2872 case BUILT_IN_VPRINTF
:
2873 if (n
== 1 || n
== 2)
2874 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2876 ? gimple_call_arg (stmt
, 1)
2877 : NULL_TREE
, fcode
);
2879 case BUILT_IN_PRINTF_CHK
:
2880 case BUILT_IN_VPRINTF_CHK
:
2881 if (n
== 2 || n
== 3)
2882 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2884 ? gimple_call_arg (stmt
, 2)
2885 : NULL_TREE
, fcode
);
2887 case BUILT_IN_ACC_ON_DEVICE
:
2888 return gimple_fold_builtin_acc_on_device (gsi
,
2889 gimple_call_arg (stmt
, 0));
2893 /* Try the generic builtin folder. */
2894 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2895 tree result
= fold_call_stmt (stmt
, ignore
);
2899 STRIP_NOPS (result
);
2901 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2902 if (!update_call_from_tree (gsi
, result
))
2903 gimplify_and_update_call_from_tree (gsi
, result
);
2910 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2911 function calls to constants, where possible. */
2914 fold_internal_goacc_dim (const gimple
*call
)
2916 int axis
= get_oacc_ifn_dim_arg (call
);
2917 int size
= get_oacc_fn_dim_size (current_function_decl
, axis
);
2918 bool is_pos
= gimple_call_internal_fn (call
) == IFN_GOACC_DIM_POS
;
2919 tree result
= NULL_TREE
;
2921 /* If the size is 1, or we only want the size and it is not dynamic,
2922 we know the answer. */
2923 if (size
== 1 || (!is_pos
&& size
))
2925 tree type
= TREE_TYPE (gimple_call_lhs (call
));
2926 result
= build_int_cst (type
, size
- is_pos
);
2932 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2933 doesn't fit into TYPE. The test for overflow should be regardless of
2934 -fwrapv, and even for unsigned types. */
2937 arith_overflowed_p (enum tree_code code
, const_tree type
,
2938 const_tree arg0
, const_tree arg1
)
2940 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
2941 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
2943 widest2_int warg0
= widest2_int_cst (arg0
);
2944 widest2_int warg1
= widest2_int_cst (arg1
);
2948 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
2949 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
2950 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
2951 default: gcc_unreachable ();
2953 signop sign
= TYPE_SIGN (type
);
2954 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
2956 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
2959 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2960 The statement may be replaced by another statement, e.g., if the call
2961 simplifies to a constant value. Return true if any changes were made.
2962 It is assumed that the operands have been previously folded. */
2965 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2967 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2969 bool changed
= false;
2972 /* Fold *& in call arguments. */
2973 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2974 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2976 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2979 gimple_call_set_arg (stmt
, i
, tmp
);
2984 /* Check for virtual calls that became direct calls. */
2985 callee
= gimple_call_fn (stmt
);
2986 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2988 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2990 if (dump_file
&& virtual_method_call_p (callee
)
2991 && !possible_polymorphic_call_target_p
2992 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2993 (OBJ_TYPE_REF_EXPR (callee
)))))
2996 "Type inheritance inconsistent devirtualization of ");
2997 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2998 fprintf (dump_file
, " to ");
2999 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3000 fprintf (dump_file
, "\n");
3003 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3006 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3009 vec
<cgraph_node
*>targets
3010 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3011 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3013 tree lhs
= gimple_call_lhs (stmt
);
3014 if (dump_enabled_p ())
3016 location_t loc
= gimple_location_safe (stmt
);
3017 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3018 "folding virtual function call to %s\n",
3019 targets
.length () == 1
3020 ? targets
[0]->name ()
3021 : "__builtin_unreachable");
3023 if (targets
.length () == 1)
3025 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3027 /* If the call becomes noreturn, remove the lhs. */
3028 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3030 if (TREE_CODE (lhs
) == SSA_NAME
)
3032 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3033 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3034 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3035 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3037 gimple_call_set_lhs (stmt
, NULL_TREE
);
3039 maybe_remove_unused_call_args (cfun
, stmt
);
3043 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3044 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3045 gimple_set_location (new_stmt
, gimple_location (stmt
));
3046 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3048 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3049 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3051 /* To satisfy condition for
3052 cgraph_update_edges_for_call_stmt_node,
3053 we need to preserve GIMPLE_CALL statement
3054 at position of GSI iterator. */
3055 update_call_from_tree (gsi
, def
);
3056 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3060 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3061 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3062 gsi_replace (gsi
, new_stmt
, false);
3070 /* Check for indirect calls that became direct calls, and then
3071 no longer require a static chain. */
3072 if (gimple_call_chain (stmt
))
3074 tree fn
= gimple_call_fndecl (stmt
);
3075 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3077 gimple_call_set_chain (stmt
, NULL
);
3082 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3085 gimple_call_set_chain (stmt
, tmp
);
3094 /* Check for builtins that CCP can handle using information not
3095 available in the generic fold routines. */
3096 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3098 if (gimple_fold_builtin (gsi
))
3101 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3103 changed
|= targetm
.gimple_fold_builtin (gsi
);
3105 else if (gimple_call_internal_p (stmt
))
3107 enum tree_code subcode
= ERROR_MARK
;
3108 tree result
= NULL_TREE
;
3109 bool cplx_result
= false;
3110 tree overflow
= NULL_TREE
;
3111 switch (gimple_call_internal_fn (stmt
))
3113 case IFN_BUILTIN_EXPECT
:
3114 result
= fold_builtin_expect (gimple_location (stmt
),
3115 gimple_call_arg (stmt
, 0),
3116 gimple_call_arg (stmt
, 1),
3117 gimple_call_arg (stmt
, 2));
3119 case IFN_UBSAN_OBJECT_SIZE
:
3120 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3121 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3122 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3123 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3124 gimple_call_arg (stmt
, 2))))
3126 gsi_replace (gsi
, gimple_build_nop (), false);
3127 unlink_stmt_vdef (stmt
);
3128 release_defs (stmt
);
3132 case IFN_GOACC_DIM_SIZE
:
3133 case IFN_GOACC_DIM_POS
:
3134 result
= fold_internal_goacc_dim (stmt
);
3136 case IFN_UBSAN_CHECK_ADD
:
3137 subcode
= PLUS_EXPR
;
3139 case IFN_UBSAN_CHECK_SUB
:
3140 subcode
= MINUS_EXPR
;
3142 case IFN_UBSAN_CHECK_MUL
:
3143 subcode
= MULT_EXPR
;
3145 case IFN_ADD_OVERFLOW
:
3146 subcode
= PLUS_EXPR
;
3149 case IFN_SUB_OVERFLOW
:
3150 subcode
= MINUS_EXPR
;
3153 case IFN_MUL_OVERFLOW
:
3154 subcode
= MULT_EXPR
;
3160 if (subcode
!= ERROR_MARK
)
3162 tree arg0
= gimple_call_arg (stmt
, 0);
3163 tree arg1
= gimple_call_arg (stmt
, 1);
3164 tree type
= TREE_TYPE (arg0
);
3167 tree lhs
= gimple_call_lhs (stmt
);
3168 if (lhs
== NULL_TREE
)
3171 type
= TREE_TYPE (TREE_TYPE (lhs
));
3173 if (type
== NULL_TREE
)
3175 /* x = y + 0; x = y - 0; x = y * 0; */
3176 else if (integer_zerop (arg1
))
3177 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3178 /* x = 0 + y; x = 0 * y; */
3179 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3180 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3182 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3183 result
= integer_zero_node
;
3184 /* x = y * 1; x = 1 * y; */
3185 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3187 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3189 else if (TREE_CODE (arg0
) == INTEGER_CST
3190 && TREE_CODE (arg1
) == INTEGER_CST
)
3193 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3194 fold_convert (type
, arg1
));
3196 result
= int_const_binop (subcode
, arg0
, arg1
);
3197 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3200 overflow
= build_one_cst (type
);
3207 if (result
== integer_zero_node
)
3208 result
= build_zero_cst (type
);
3209 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3211 if (TREE_CODE (result
) == INTEGER_CST
)
3213 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3215 overflow
= build_one_cst (type
);
3217 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3218 && TYPE_UNSIGNED (type
))
3219 || (TYPE_PRECISION (type
)
3220 < (TYPE_PRECISION (TREE_TYPE (result
))
3221 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3222 && !TYPE_UNSIGNED (type
)))))
3225 result
= fold_convert (type
, result
);
3232 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3233 result
= drop_tree_overflow (result
);
3236 if (overflow
== NULL_TREE
)
3237 overflow
= build_zero_cst (TREE_TYPE (result
));
3238 tree ctype
= build_complex_type (TREE_TYPE (result
));
3239 if (TREE_CODE (result
) == INTEGER_CST
3240 && TREE_CODE (overflow
) == INTEGER_CST
)
3241 result
= build_complex (ctype
, result
, overflow
);
3243 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3244 ctype
, result
, overflow
);
3246 if (!update_call_from_tree (gsi
, result
))
3247 gimplify_and_update_call_from_tree (gsi
, result
);
3256 /* Return true whether NAME has a use on STMT. */
3259 has_use_on_stmt (tree name
, gimple
*stmt
)
3261 imm_use_iterator iter
;
3262 use_operand_p use_p
;
3263 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
3264 if (USE_STMT (use_p
) == stmt
)
3269 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3272 Replaces *GSI with the simplification result in RCODE and OPS
3273 and the associated statements in *SEQ. Does the replacement
3274 according to INPLACE and returns true if the operation succeeded. */
3277 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3278 code_helper rcode
, tree
*ops
,
3279 gimple_seq
*seq
, bool inplace
)
3281 gimple
*stmt
= gsi_stmt (*gsi
);
3283 /* Play safe and do not allow abnormals to be mentioned in
3284 newly created statements. See also maybe_push_res_to_seq.
3285 As an exception allow such uses if there was a use of the
3286 same SSA name on the old stmt. */
3287 if ((TREE_CODE (ops
[0]) == SSA_NAME
3288 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
3289 && !has_use_on_stmt (ops
[0], stmt
))
3291 && TREE_CODE (ops
[1]) == SSA_NAME
3292 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
3293 && !has_use_on_stmt (ops
[1], stmt
))
3295 && TREE_CODE (ops
[2]) == SSA_NAME
3296 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
3297 && !has_use_on_stmt (ops
[2], stmt
)))
3300 /* Don't insert new statements when INPLACE is true, even if we could
3301 reuse STMT for the final statement. */
3302 if (inplace
&& !gimple_seq_empty_p (*seq
))
3305 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3307 gcc_assert (rcode
.is_tree_code ());
3308 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3309 /* GIMPLE_CONDs condition may not throw. */
3310 && (!flag_exceptions
3311 || !cfun
->can_throw_non_call_exceptions
3312 || !operation_could_trap_p (rcode
,
3313 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3315 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3316 else if (rcode
== SSA_NAME
)
3317 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3318 build_zero_cst (TREE_TYPE (ops
[0])));
3319 else if (rcode
== INTEGER_CST
)
3321 if (integer_zerop (ops
[0]))
3322 gimple_cond_make_false (cond_stmt
);
3324 gimple_cond_make_true (cond_stmt
);
3328 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3332 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3333 build_zero_cst (TREE_TYPE (res
)));
3337 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3339 fprintf (dump_file
, "gimple_simplified to ");
3340 if (!gimple_seq_empty_p (*seq
))
3341 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3342 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3345 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3348 else if (is_gimple_assign (stmt
)
3349 && rcode
.is_tree_code ())
3352 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3354 maybe_build_generic_op (rcode
,
3355 TREE_TYPE (gimple_assign_lhs (stmt
)),
3356 &ops
[0], ops
[1], ops
[2]);
3357 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3358 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3360 fprintf (dump_file
, "gimple_simplified to ");
3361 if (!gimple_seq_empty_p (*seq
))
3362 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3363 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3366 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3370 else if (rcode
.is_fn_code ()
3371 && gimple_call_builtin_p (stmt
, rcode
))
3374 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3376 gcc_assert (ops
[i
] != NULL_TREE
);
3377 gimple_call_set_arg (stmt
, i
, ops
[i
]);
3380 gcc_assert (ops
[i
] == NULL_TREE
);
3381 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3383 fprintf (dump_file
, "gimple_simplified to ");
3384 if (!gimple_seq_empty_p (*seq
))
3385 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3386 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
3388 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3393 if (gimple_has_lhs (stmt
))
3395 tree lhs
= gimple_get_lhs (stmt
);
3396 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3399 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3401 fprintf (dump_file
, "gimple_simplified to ");
3402 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3404 gsi_replace_with_seq_vops (gsi
, *seq
);
3414 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3417 maybe_canonicalize_mem_ref_addr (tree
*t
)
3421 if (TREE_CODE (*t
) == ADDR_EXPR
)
3422 t
= &TREE_OPERAND (*t
, 0);
3424 while (handled_component_p (*t
))
3425 t
= &TREE_OPERAND (*t
, 0);
3427 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3428 of invariant addresses into a SSA name MEM_REF address. */
3429 if (TREE_CODE (*t
) == MEM_REF
3430 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3432 tree addr
= TREE_OPERAND (*t
, 0);
3433 if (TREE_CODE (addr
) == ADDR_EXPR
3434 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3435 || handled_component_p (TREE_OPERAND (addr
, 0))))
3438 HOST_WIDE_INT coffset
;
3439 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3444 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3445 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3446 TREE_OPERAND (*t
, 1),
3447 size_int (coffset
));
3450 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3451 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3454 /* Canonicalize back MEM_REFs to plain reference trees if the object
3455 accessed is a decl that has the same access semantics as the MEM_REF. */
3456 if (TREE_CODE (*t
) == MEM_REF
3457 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3458 && integer_zerop (TREE_OPERAND (*t
, 1))
3459 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3461 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3462 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3463 if (/* Same volatile qualification. */
3464 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3465 /* Same TBAA behavior with -fstrict-aliasing. */
3466 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3467 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3468 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3469 /* Same alignment. */
3470 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3471 /* We have to look out here to not drop a required conversion
3472 from the rhs to the lhs if *t appears on the lhs or vice-versa
3473 if it appears on the rhs. Thus require strict type
3475 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3477 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3482 /* Canonicalize TARGET_MEM_REF in particular with respect to
3483 the indexes becoming constant. */
3484 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3486 tree tem
= maybe_fold_tmr (*t
);
3497 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3498 distinguishes both cases. */
3501 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3503 bool changed
= false;
3504 gimple
*stmt
= gsi_stmt (*gsi
);
3507 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3509 ??? This shouldn't be done in generic folding but in the
3510 propagation helpers which also know whether an address was
3512 Also canonicalize operand order. */
3513 switch (gimple_code (stmt
))
3516 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3518 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3519 if ((REFERENCE_CLASS_P (*rhs
)
3520 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3521 && maybe_canonicalize_mem_ref_addr (rhs
))
3523 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3524 if (REFERENCE_CLASS_P (*lhs
)
3525 && maybe_canonicalize_mem_ref_addr (lhs
))
3530 /* Canonicalize operand order. */
3531 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3532 if (TREE_CODE_CLASS (code
) == tcc_comparison
3533 || commutative_tree_code (code
)
3534 || commutative_ternary_tree_code (code
))
3536 tree rhs1
= gimple_assign_rhs1 (stmt
);
3537 tree rhs2
= gimple_assign_rhs2 (stmt
);
3538 if (tree_swap_operands_p (rhs1
, rhs2
, false))
3540 gimple_assign_set_rhs1 (stmt
, rhs2
);
3541 gimple_assign_set_rhs2 (stmt
, rhs1
);
3542 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3543 gimple_assign_set_rhs_code (stmt
,
3544 swap_tree_comparison (code
));
3552 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3554 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3555 if (REFERENCE_CLASS_P (*arg
)
3556 && maybe_canonicalize_mem_ref_addr (arg
))
3559 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3561 && REFERENCE_CLASS_P (*lhs
)
3562 && maybe_canonicalize_mem_ref_addr (lhs
))
3568 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3569 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3571 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3572 tree op
= TREE_VALUE (link
);
3573 if (REFERENCE_CLASS_P (op
)
3574 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3577 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3579 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3580 tree op
= TREE_VALUE (link
);
3581 if ((REFERENCE_CLASS_P (op
)
3582 || TREE_CODE (op
) == ADDR_EXPR
)
3583 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3589 if (gimple_debug_bind_p (stmt
))
3591 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3593 && (REFERENCE_CLASS_P (*val
)
3594 || TREE_CODE (*val
) == ADDR_EXPR
)
3595 && maybe_canonicalize_mem_ref_addr (val
))
3601 /* Canonicalize operand order. */
3602 tree lhs
= gimple_cond_lhs (stmt
);
3603 tree rhs
= gimple_cond_rhs (stmt
);
3604 if (tree_swap_operands_p (lhs
, rhs
, false))
3606 gcond
*gc
= as_a
<gcond
*> (stmt
);
3607 gimple_cond_set_lhs (gc
, rhs
);
3608 gimple_cond_set_rhs (gc
, lhs
);
3609 gimple_cond_set_code (gc
,
3610 swap_tree_comparison (gimple_cond_code (gc
)));
3617 /* Dispatch to pattern-based folding. */
3619 || is_gimple_assign (stmt
)
3620 || gimple_code (stmt
) == GIMPLE_COND
)
3622 gimple_seq seq
= NULL
;
3625 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3626 valueize
, valueize
))
3628 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3631 gimple_seq_discard (seq
);
3635 stmt
= gsi_stmt (*gsi
);
3637 /* Fold the main computation performed by the statement. */
3638 switch (gimple_code (stmt
))
3642 /* Try to canonicalize for boolean-typed X the comparisons
3643 X == 0, X == 1, X != 0, and X != 1. */
3644 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
3645 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
3647 tree lhs
= gimple_assign_lhs (stmt
);
3648 tree op1
= gimple_assign_rhs1 (stmt
);
3649 tree op2
= gimple_assign_rhs2 (stmt
);
3650 tree type
= TREE_TYPE (op1
);
3652 /* Check whether the comparison operands are of the same boolean
3653 type as the result type is.
3654 Check that second operand is an integer-constant with value
3656 if (TREE_CODE (op2
) == INTEGER_CST
3657 && (integer_zerop (op2
) || integer_onep (op2
))
3658 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
3660 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
3661 bool is_logical_not
= false;
3663 /* X == 0 and X != 1 is a logical-not.of X
3664 X == 1 and X != 0 is X */
3665 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
3666 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
3667 is_logical_not
= true;
3669 if (is_logical_not
== false)
3670 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
3671 /* Only for one-bit precision typed X the transformation
3672 !X -> ~X is valied. */
3673 else if (TYPE_PRECISION (type
) == 1)
3674 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
3675 /* Otherwise we use !X -> X ^ 1. */
3677 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
3678 build_int_cst (type
, 1));
3684 unsigned old_num_ops
= gimple_num_ops (stmt
);
3685 tree lhs
= gimple_assign_lhs (stmt
);
3686 tree new_rhs
= fold_gimple_assign (gsi
);
3688 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3689 TREE_TYPE (new_rhs
)))
3690 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3693 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3695 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3702 changed
|= gimple_fold_call (gsi
, inplace
);
3706 /* Fold *& in asm operands. */
3708 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3710 const char **oconstraints
;
3711 const char *constraint
;
3712 bool allows_mem
, allows_reg
;
3714 noutputs
= gimple_asm_noutputs (asm_stmt
);
3715 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3717 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3719 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3720 tree op
= TREE_VALUE (link
);
3722 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3723 if (REFERENCE_CLASS_P (op
)
3724 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3726 TREE_VALUE (link
) = op
;
3730 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3732 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3733 tree op
= TREE_VALUE (link
);
3735 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3736 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3737 oconstraints
, &allows_mem
, &allows_reg
);
3738 if (REFERENCE_CLASS_P (op
)
3739 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3742 TREE_VALUE (link
) = op
;
3750 if (gimple_debug_bind_p (stmt
))
3752 tree val
= gimple_debug_bind_get_value (stmt
);
3754 && REFERENCE_CLASS_P (val
))
3756 tree tem
= maybe_fold_reference (val
, false);
3759 gimple_debug_bind_set_value (stmt
, tem
);
3764 && TREE_CODE (val
) == ADDR_EXPR
)
3766 tree ref
= TREE_OPERAND (val
, 0);
3767 tree tem
= maybe_fold_reference (ref
, false);
3770 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3771 gimple_debug_bind_set_value (stmt
, tem
);
3781 stmt
= gsi_stmt (*gsi
);
3783 /* Fold *& on the lhs. */
3784 if (gimple_has_lhs (stmt
))
3786 tree lhs
= gimple_get_lhs (stmt
);
3787 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3789 tree new_lhs
= maybe_fold_reference (lhs
, true);
3792 gimple_set_lhs (stmt
, new_lhs
);
3801 /* Valueziation callback that ends up not following SSA edges. */
3804 no_follow_ssa_edges (tree
)
3809 /* Valueization callback that ends up following single-use SSA edges only. */
3812 follow_single_use_edges (tree val
)
3814 if (TREE_CODE (val
) == SSA_NAME
3815 && !has_single_use (val
))
3820 /* Fold the statement pointed to by GSI. In some cases, this function may
3821 replace the whole statement with a new one. Returns true iff folding
3823 The statement pointed to by GSI should be in valid gimple form but may
3824 be in unfolded state as resulting from for example constant propagation
3825 which can produce *&x = 0. */
3828 fold_stmt (gimple_stmt_iterator
*gsi
)
3830 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3834 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3836 return fold_stmt_1 (gsi
, false, valueize
);
3839 /* Perform the minimal folding on statement *GSI. Only operations like
3840 *&x created by constant propagation are handled. The statement cannot
3841 be replaced with a new one. Return true if the statement was
3842 changed, false otherwise.
3843 The statement *GSI should be in valid gimple form but may
3844 be in unfolded state as resulting from for example constant propagation
3845 which can produce *&x = 0. */
3848 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3850 gimple
*stmt
= gsi_stmt (*gsi
);
3851 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3852 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3856 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3857 if EXPR is null or we don't know how.
3858 If non-null, the result always has boolean type. */
3861 canonicalize_bool (tree expr
, bool invert
)
3867 if (integer_nonzerop (expr
))
3868 return boolean_false_node
;
3869 else if (integer_zerop (expr
))
3870 return boolean_true_node
;
3871 else if (TREE_CODE (expr
) == SSA_NAME
)
3872 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3873 build_int_cst (TREE_TYPE (expr
), 0));
3874 else if (COMPARISON_CLASS_P (expr
))
3875 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3877 TREE_OPERAND (expr
, 0),
3878 TREE_OPERAND (expr
, 1));
3884 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3886 if (integer_nonzerop (expr
))
3887 return boolean_true_node
;
3888 else if (integer_zerop (expr
))
3889 return boolean_false_node
;
3890 else if (TREE_CODE (expr
) == SSA_NAME
)
3891 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3892 build_int_cst (TREE_TYPE (expr
), 0));
3893 else if (COMPARISON_CLASS_P (expr
))
3894 return fold_build2 (TREE_CODE (expr
),
3896 TREE_OPERAND (expr
, 0),
3897 TREE_OPERAND (expr
, 1));
3903 /* Check to see if a boolean expression EXPR is logically equivalent to the
3904 comparison (OP1 CODE OP2). Check for various identities involving
3908 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3909 const_tree op1
, const_tree op2
)
3913 /* The obvious case. */
3914 if (TREE_CODE (expr
) == code
3915 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3916 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3919 /* Check for comparing (name, name != 0) and the case where expr
3920 is an SSA_NAME with a definition matching the comparison. */
3921 if (TREE_CODE (expr
) == SSA_NAME
3922 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3924 if (operand_equal_p (expr
, op1
, 0))
3925 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3926 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3927 s
= SSA_NAME_DEF_STMT (expr
);
3928 if (is_gimple_assign (s
)
3929 && gimple_assign_rhs_code (s
) == code
3930 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3931 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3935 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3936 of name is a comparison, recurse. */
3937 if (TREE_CODE (op1
) == SSA_NAME
3938 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3940 s
= SSA_NAME_DEF_STMT (op1
);
3941 if (is_gimple_assign (s
)
3942 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3944 enum tree_code c
= gimple_assign_rhs_code (s
);
3945 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3946 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3947 return same_bool_comparison_p (expr
, c
,
3948 gimple_assign_rhs1 (s
),
3949 gimple_assign_rhs2 (s
));
3950 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3951 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3952 return same_bool_comparison_p (expr
,
3953 invert_tree_comparison (c
, false),
3954 gimple_assign_rhs1 (s
),
3955 gimple_assign_rhs2 (s
));
3961 /* Check to see if two boolean expressions OP1 and OP2 are logically
3965 same_bool_result_p (const_tree op1
, const_tree op2
)
3967 /* Simple cases first. */
3968 if (operand_equal_p (op1
, op2
, 0))
3971 /* Check the cases where at least one of the operands is a comparison.
3972 These are a bit smarter than operand_equal_p in that they apply some
3973 identifies on SSA_NAMEs. */
3974 if (COMPARISON_CLASS_P (op2
)
3975 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3976 TREE_OPERAND (op2
, 0),
3977 TREE_OPERAND (op2
, 1)))
3979 if (COMPARISON_CLASS_P (op1
)
3980 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3981 TREE_OPERAND (op1
, 0),
3982 TREE_OPERAND (op1
, 1)))
3989 /* Forward declarations for some mutually recursive functions. */
3992 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3993 enum tree_code code2
, tree op2a
, tree op2b
);
3995 and_var_with_comparison (tree var
, bool invert
,
3996 enum tree_code code2
, tree op2a
, tree op2b
);
3998 and_var_with_comparison_1 (gimple
*stmt
,
3999 enum tree_code code2
, tree op2a
, tree op2b
);
4001 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4002 enum tree_code code2
, tree op2a
, tree op2b
);
4004 or_var_with_comparison (tree var
, bool invert
,
4005 enum tree_code code2
, tree op2a
, tree op2b
);
4007 or_var_with_comparison_1 (gimple
*stmt
,
4008 enum tree_code code2
, tree op2a
, tree op2b
);
4010 /* Helper function for and_comparisons_1: try to simplify the AND of the
4011 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4012 If INVERT is true, invert the value of the VAR before doing the AND.
4013 Return NULL_EXPR if we can't simplify this to a single expression. */
4016 and_var_with_comparison (tree var
, bool invert
,
4017 enum tree_code code2
, tree op2a
, tree op2b
)
4020 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4022 /* We can only deal with variables whose definitions are assignments. */
4023 if (!is_gimple_assign (stmt
))
4026 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4027 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4028 Then we only have to consider the simpler non-inverted cases. */
4030 t
= or_var_with_comparison_1 (stmt
,
4031 invert_tree_comparison (code2
, false),
4034 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4035 return canonicalize_bool (t
, invert
);
4038 /* Try to simplify the AND of the ssa variable defined by the assignment
4039 STMT with the comparison specified by (OP2A CODE2 OP2B).
4040 Return NULL_EXPR if we can't simplify this to a single expression. */
4043 and_var_with_comparison_1 (gimple
*stmt
,
4044 enum tree_code code2
, tree op2a
, tree op2b
)
4046 tree var
= gimple_assign_lhs (stmt
);
4047 tree true_test_var
= NULL_TREE
;
4048 tree false_test_var
= NULL_TREE
;
4049 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4051 /* Check for identities like (var AND (var == 0)) => false. */
4052 if (TREE_CODE (op2a
) == SSA_NAME
4053 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4055 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4056 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4058 true_test_var
= op2a
;
4059 if (var
== true_test_var
)
4062 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4063 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4065 false_test_var
= op2a
;
4066 if (var
== false_test_var
)
4067 return boolean_false_node
;
4071 /* If the definition is a comparison, recurse on it. */
4072 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4074 tree t
= and_comparisons_1 (innercode
,
4075 gimple_assign_rhs1 (stmt
),
4076 gimple_assign_rhs2 (stmt
),
4084 /* If the definition is an AND or OR expression, we may be able to
4085 simplify by reassociating. */
4086 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4087 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4089 tree inner1
= gimple_assign_rhs1 (stmt
);
4090 tree inner2
= gimple_assign_rhs2 (stmt
);
4093 tree partial
= NULL_TREE
;
4094 bool is_and
= (innercode
== BIT_AND_EXPR
);
4096 /* Check for boolean identities that don't require recursive examination
4098 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4099 inner1 AND (inner1 OR inner2) => inner1
4100 !inner1 AND (inner1 AND inner2) => false
4101 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4102 Likewise for similar cases involving inner2. */
4103 if (inner1
== true_test_var
)
4104 return (is_and
? var
: inner1
);
4105 else if (inner2
== true_test_var
)
4106 return (is_and
? var
: inner2
);
4107 else if (inner1
== false_test_var
)
4109 ? boolean_false_node
4110 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4111 else if (inner2
== false_test_var
)
4113 ? boolean_false_node
4114 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4116 /* Next, redistribute/reassociate the AND across the inner tests.
4117 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4118 if (TREE_CODE (inner1
) == SSA_NAME
4119 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4120 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4121 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4122 gimple_assign_rhs1 (s
),
4123 gimple_assign_rhs2 (s
),
4124 code2
, op2a
, op2b
)))
4126 /* Handle the AND case, where we are reassociating:
4127 (inner1 AND inner2) AND (op2a code2 op2b)
4129 If the partial result t is a constant, we win. Otherwise
4130 continue on to try reassociating with the other inner test. */
4133 if (integer_onep (t
))
4135 else if (integer_zerop (t
))
4136 return boolean_false_node
;
4139 /* Handle the OR case, where we are redistributing:
4140 (inner1 OR inner2) AND (op2a code2 op2b)
4141 => (t OR (inner2 AND (op2a code2 op2b))) */
4142 else if (integer_onep (t
))
4143 return boolean_true_node
;
4145 /* Save partial result for later. */
4149 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4150 if (TREE_CODE (inner2
) == SSA_NAME
4151 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4152 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4153 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4154 gimple_assign_rhs1 (s
),
4155 gimple_assign_rhs2 (s
),
4156 code2
, op2a
, op2b
)))
4158 /* Handle the AND case, where we are reassociating:
4159 (inner1 AND inner2) AND (op2a code2 op2b)
4160 => (inner1 AND t) */
4163 if (integer_onep (t
))
4165 else if (integer_zerop (t
))
4166 return boolean_false_node
;
4167 /* If both are the same, we can apply the identity
4169 else if (partial
&& same_bool_result_p (t
, partial
))
4173 /* Handle the OR case. where we are redistributing:
4174 (inner1 OR inner2) AND (op2a code2 op2b)
4175 => (t OR (inner1 AND (op2a code2 op2b)))
4176 => (t OR partial) */
4179 if (integer_onep (t
))
4180 return boolean_true_node
;
4183 /* We already got a simplification for the other
4184 operand to the redistributed OR expression. The
4185 interesting case is when at least one is false.
4186 Or, if both are the same, we can apply the identity
4188 if (integer_zerop (partial
))
4190 else if (integer_zerop (t
))
4192 else if (same_bool_result_p (t
, partial
))
4201 /* Try to simplify the AND of two comparisons defined by
4202 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4203 If this can be done without constructing an intermediate value,
4204 return the resulting tree; otherwise NULL_TREE is returned.
4205 This function is deliberately asymmetric as it recurses on SSA_DEFs
4206 in the first comparison but not the second. */
4209 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4210 enum tree_code code2
, tree op2a
, tree op2b
)
4212 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4214 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4215 if (operand_equal_p (op1a
, op2a
, 0)
4216 && operand_equal_p (op1b
, op2b
, 0))
4218 /* Result will be either NULL_TREE, or a combined comparison. */
4219 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4220 TRUTH_ANDIF_EXPR
, code1
, code2
,
4221 truth_type
, op1a
, op1b
);
4226 /* Likewise the swapped case of the above. */
4227 if (operand_equal_p (op1a
, op2b
, 0)
4228 && operand_equal_p (op1b
, op2a
, 0))
4230 /* Result will be either NULL_TREE, or a combined comparison. */
4231 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4232 TRUTH_ANDIF_EXPR
, code1
,
4233 swap_tree_comparison (code2
),
4234 truth_type
, op1a
, op1b
);
4239 /* If both comparisons are of the same value against constants, we might
4240 be able to merge them. */
4241 if (operand_equal_p (op1a
, op2a
, 0)
4242 && TREE_CODE (op1b
) == INTEGER_CST
4243 && TREE_CODE (op2b
) == INTEGER_CST
)
4245 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4247 /* If we have (op1a == op1b), we should either be able to
4248 return that or FALSE, depending on whether the constant op1b
4249 also satisfies the other comparison against op2b. */
4250 if (code1
== EQ_EXPR
)
4256 case EQ_EXPR
: val
= (cmp
== 0); break;
4257 case NE_EXPR
: val
= (cmp
!= 0); break;
4258 case LT_EXPR
: val
= (cmp
< 0); break;
4259 case GT_EXPR
: val
= (cmp
> 0); break;
4260 case LE_EXPR
: val
= (cmp
<= 0); break;
4261 case GE_EXPR
: val
= (cmp
>= 0); break;
4262 default: done
= false;
4267 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4269 return boolean_false_node
;
4272 /* Likewise if the second comparison is an == comparison. */
4273 else if (code2
== EQ_EXPR
)
4279 case EQ_EXPR
: val
= (cmp
== 0); break;
4280 case NE_EXPR
: val
= (cmp
!= 0); break;
4281 case LT_EXPR
: val
= (cmp
> 0); break;
4282 case GT_EXPR
: val
= (cmp
< 0); break;
4283 case LE_EXPR
: val
= (cmp
>= 0); break;
4284 case GE_EXPR
: val
= (cmp
<= 0); break;
4285 default: done
= false;
4290 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4292 return boolean_false_node
;
4296 /* Same business with inequality tests. */
4297 else if (code1
== NE_EXPR
)
4302 case EQ_EXPR
: val
= (cmp
!= 0); break;
4303 case NE_EXPR
: val
= (cmp
== 0); break;
4304 case LT_EXPR
: val
= (cmp
>= 0); break;
4305 case GT_EXPR
: val
= (cmp
<= 0); break;
4306 case LE_EXPR
: val
= (cmp
> 0); break;
4307 case GE_EXPR
: val
= (cmp
< 0); break;
4312 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4314 else if (code2
== NE_EXPR
)
4319 case EQ_EXPR
: val
= (cmp
== 0); break;
4320 case NE_EXPR
: val
= (cmp
!= 0); break;
4321 case LT_EXPR
: val
= (cmp
<= 0); break;
4322 case GT_EXPR
: val
= (cmp
>= 0); break;
4323 case LE_EXPR
: val
= (cmp
< 0); break;
4324 case GE_EXPR
: val
= (cmp
> 0); break;
4329 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4332 /* Chose the more restrictive of two < or <= comparisons. */
4333 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4334 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4336 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4337 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4339 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4342 /* Likewise chose the more restrictive of two > or >= comparisons. */
4343 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4344 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4346 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4347 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4349 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4352 /* Check for singleton ranges. */
4354 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4355 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4356 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4358 /* Check for disjoint ranges. */
4360 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4361 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4362 return boolean_false_node
;
4364 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4365 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4366 return boolean_false_node
;
4369 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4370 NAME's definition is a truth value. See if there are any simplifications
4371 that can be done against the NAME's definition. */
4372 if (TREE_CODE (op1a
) == SSA_NAME
4373 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4374 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4376 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4377 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4378 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4379 switch (gimple_code (stmt
))
4382 /* Try to simplify by copy-propagating the definition. */
4383 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4386 /* If every argument to the PHI produces the same result when
4387 ANDed with the second comparison, we win.
4388 Do not do this unless the type is bool since we need a bool
4389 result here anyway. */
4390 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4392 tree result
= NULL_TREE
;
4394 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4396 tree arg
= gimple_phi_arg_def (stmt
, i
);
4398 /* If this PHI has itself as an argument, ignore it.
4399 If all the other args produce the same result,
4401 if (arg
== gimple_phi_result (stmt
))
4403 else if (TREE_CODE (arg
) == INTEGER_CST
)
4405 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4408 result
= boolean_false_node
;
4409 else if (!integer_zerop (result
))
4413 result
= fold_build2 (code2
, boolean_type_node
,
4415 else if (!same_bool_comparison_p (result
,
4419 else if (TREE_CODE (arg
) == SSA_NAME
4420 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4423 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4424 /* In simple cases we can look through PHI nodes,
4425 but we have to be careful with loops.
4427 if (! dom_info_available_p (CDI_DOMINATORS
)
4428 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4429 || dominated_by_p (CDI_DOMINATORS
,
4430 gimple_bb (def_stmt
),
4433 temp
= and_var_with_comparison (arg
, invert
, code2
,
4439 else if (!same_bool_result_p (result
, temp
))
4455 /* Try to simplify the AND of two comparisons, specified by
4456 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4457 If this can be simplified to a single expression (without requiring
4458 introducing more SSA variables to hold intermediate values),
4459 return the resulting tree. Otherwise return NULL_TREE.
4460 If the result expression is non-null, it has boolean type. */
4463 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4464 enum tree_code code2
, tree op2a
, tree op2b
)
4466 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4470 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4473 /* Helper function for or_comparisons_1: try to simplify the OR of the
4474 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4475 If INVERT is true, invert the value of VAR before doing the OR.
4476 Return NULL_EXPR if we can't simplify this to a single expression. */
4479 or_var_with_comparison (tree var
, bool invert
,
4480 enum tree_code code2
, tree op2a
, tree op2b
)
4483 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4485 /* We can only deal with variables whose definitions are assignments. */
4486 if (!is_gimple_assign (stmt
))
4489 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4490 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4491 Then we only have to consider the simpler non-inverted cases. */
4493 t
= and_var_with_comparison_1 (stmt
,
4494 invert_tree_comparison (code2
, false),
4497 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4498 return canonicalize_bool (t
, invert
);
4501 /* Try to simplify the OR of the ssa variable defined by the assignment
4502 STMT with the comparison specified by (OP2A CODE2 OP2B).
4503 Return NULL_EXPR if we can't simplify this to a single expression. */
4506 or_var_with_comparison_1 (gimple
*stmt
,
4507 enum tree_code code2
, tree op2a
, tree op2b
)
4509 tree var
= gimple_assign_lhs (stmt
);
4510 tree true_test_var
= NULL_TREE
;
4511 tree false_test_var
= NULL_TREE
;
4512 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4514 /* Check for identities like (var OR (var != 0)) => true . */
4515 if (TREE_CODE (op2a
) == SSA_NAME
4516 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4518 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4519 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4521 true_test_var
= op2a
;
4522 if (var
== true_test_var
)
4525 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4526 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4528 false_test_var
= op2a
;
4529 if (var
== false_test_var
)
4530 return boolean_true_node
;
4534 /* If the definition is a comparison, recurse on it. */
4535 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4537 tree t
= or_comparisons_1 (innercode
,
4538 gimple_assign_rhs1 (stmt
),
4539 gimple_assign_rhs2 (stmt
),
4547 /* If the definition is an AND or OR expression, we may be able to
4548 simplify by reassociating. */
4549 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4550 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4552 tree inner1
= gimple_assign_rhs1 (stmt
);
4553 tree inner2
= gimple_assign_rhs2 (stmt
);
4556 tree partial
= NULL_TREE
;
4557 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4559 /* Check for boolean identities that don't require recursive examination
4561 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4562 inner1 OR (inner1 AND inner2) => inner1
4563 !inner1 OR (inner1 OR inner2) => true
4564 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4566 if (inner1
== true_test_var
)
4567 return (is_or
? var
: inner1
);
4568 else if (inner2
== true_test_var
)
4569 return (is_or
? var
: inner2
);
4570 else if (inner1
== false_test_var
)
4573 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4574 else if (inner2
== false_test_var
)
4577 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4579 /* Next, redistribute/reassociate the OR across the inner tests.
4580 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4581 if (TREE_CODE (inner1
) == SSA_NAME
4582 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4583 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4584 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4585 gimple_assign_rhs1 (s
),
4586 gimple_assign_rhs2 (s
),
4587 code2
, op2a
, op2b
)))
4589 /* Handle the OR case, where we are reassociating:
4590 (inner1 OR inner2) OR (op2a code2 op2b)
4592 If the partial result t is a constant, we win. Otherwise
4593 continue on to try reassociating with the other inner test. */
4596 if (integer_onep (t
))
4597 return boolean_true_node
;
4598 else if (integer_zerop (t
))
4602 /* Handle the AND case, where we are redistributing:
4603 (inner1 AND inner2) OR (op2a code2 op2b)
4604 => (t AND (inner2 OR (op2a code op2b))) */
4605 else if (integer_zerop (t
))
4606 return boolean_false_node
;
4608 /* Save partial result for later. */
4612 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4613 if (TREE_CODE (inner2
) == SSA_NAME
4614 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4615 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4616 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4617 gimple_assign_rhs1 (s
),
4618 gimple_assign_rhs2 (s
),
4619 code2
, op2a
, op2b
)))
4621 /* Handle the OR case, where we are reassociating:
4622 (inner1 OR inner2) OR (op2a code2 op2b)
4624 => (t OR partial) */
4627 if (integer_zerop (t
))
4629 else if (integer_onep (t
))
4630 return boolean_true_node
;
4631 /* If both are the same, we can apply the identity
4633 else if (partial
&& same_bool_result_p (t
, partial
))
4637 /* Handle the AND case, where we are redistributing:
4638 (inner1 AND inner2) OR (op2a code2 op2b)
4639 => (t AND (inner1 OR (op2a code2 op2b)))
4640 => (t AND partial) */
4643 if (integer_zerop (t
))
4644 return boolean_false_node
;
4647 /* We already got a simplification for the other
4648 operand to the redistributed AND expression. The
4649 interesting case is when at least one is true.
4650 Or, if both are the same, we can apply the identity
4652 if (integer_onep (partial
))
4654 else if (integer_onep (t
))
4656 else if (same_bool_result_p (t
, partial
))
4665 /* Try to simplify the OR of two comparisons defined by
4666 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4667 If this can be done without constructing an intermediate value,
4668 return the resulting tree; otherwise NULL_TREE is returned.
4669 This function is deliberately asymmetric as it recurses on SSA_DEFs
4670 in the first comparison but not the second. */
4673 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4674 enum tree_code code2
, tree op2a
, tree op2b
)
4676 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4678 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4679 if (operand_equal_p (op1a
, op2a
, 0)
4680 && operand_equal_p (op1b
, op2b
, 0))
4682 /* Result will be either NULL_TREE, or a combined comparison. */
4683 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4684 TRUTH_ORIF_EXPR
, code1
, code2
,
4685 truth_type
, op1a
, op1b
);
4690 /* Likewise the swapped case of the above. */
4691 if (operand_equal_p (op1a
, op2b
, 0)
4692 && operand_equal_p (op1b
, op2a
, 0))
4694 /* Result will be either NULL_TREE, or a combined comparison. */
4695 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4696 TRUTH_ORIF_EXPR
, code1
,
4697 swap_tree_comparison (code2
),
4698 truth_type
, op1a
, op1b
);
4703 /* If both comparisons are of the same value against constants, we might
4704 be able to merge them. */
4705 if (operand_equal_p (op1a
, op2a
, 0)
4706 && TREE_CODE (op1b
) == INTEGER_CST
4707 && TREE_CODE (op2b
) == INTEGER_CST
)
4709 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4711 /* If we have (op1a != op1b), we should either be able to
4712 return that or TRUE, depending on whether the constant op1b
4713 also satisfies the other comparison against op2b. */
4714 if (code1
== NE_EXPR
)
4720 case EQ_EXPR
: val
= (cmp
== 0); break;
4721 case NE_EXPR
: val
= (cmp
!= 0); break;
4722 case LT_EXPR
: val
= (cmp
< 0); break;
4723 case GT_EXPR
: val
= (cmp
> 0); break;
4724 case LE_EXPR
: val
= (cmp
<= 0); break;
4725 case GE_EXPR
: val
= (cmp
>= 0); break;
4726 default: done
= false;
4731 return boolean_true_node
;
4733 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4736 /* Likewise if the second comparison is a != comparison. */
4737 else if (code2
== NE_EXPR
)
4743 case EQ_EXPR
: val
= (cmp
== 0); break;
4744 case NE_EXPR
: val
= (cmp
!= 0); break;
4745 case LT_EXPR
: val
= (cmp
> 0); break;
4746 case GT_EXPR
: val
= (cmp
< 0); break;
4747 case LE_EXPR
: val
= (cmp
>= 0); break;
4748 case GE_EXPR
: val
= (cmp
<= 0); break;
4749 default: done
= false;
4754 return boolean_true_node
;
4756 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4760 /* See if an equality test is redundant with the other comparison. */
4761 else if (code1
== EQ_EXPR
)
4766 case EQ_EXPR
: val
= (cmp
== 0); break;
4767 case NE_EXPR
: val
= (cmp
!= 0); break;
4768 case LT_EXPR
: val
= (cmp
< 0); break;
4769 case GT_EXPR
: val
= (cmp
> 0); break;
4770 case LE_EXPR
: val
= (cmp
<= 0); break;
4771 case GE_EXPR
: val
= (cmp
>= 0); break;
4776 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4778 else if (code2
== EQ_EXPR
)
4783 case EQ_EXPR
: val
= (cmp
== 0); break;
4784 case NE_EXPR
: val
= (cmp
!= 0); break;
4785 case LT_EXPR
: val
= (cmp
> 0); break;
4786 case GT_EXPR
: val
= (cmp
< 0); break;
4787 case LE_EXPR
: val
= (cmp
>= 0); break;
4788 case GE_EXPR
: val
= (cmp
<= 0); break;
4793 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4796 /* Chose the less restrictive of two < or <= comparisons. */
4797 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4798 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4800 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4801 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4803 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4806 /* Likewise chose the less restrictive of two > or >= comparisons. */
4807 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4808 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4810 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4811 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4813 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4816 /* Check for singleton ranges. */
4818 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4819 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4820 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4822 /* Check for less/greater pairs that don't restrict the range at all. */
4824 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4825 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4826 return boolean_true_node
;
4828 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4829 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4830 return boolean_true_node
;
4833 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4834 NAME's definition is a truth value. See if there are any simplifications
4835 that can be done against the NAME's definition. */
4836 if (TREE_CODE (op1a
) == SSA_NAME
4837 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4838 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4840 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4841 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4842 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4843 switch (gimple_code (stmt
))
4846 /* Try to simplify by copy-propagating the definition. */
4847 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4850 /* If every argument to the PHI produces the same result when
4851 ORed with the second comparison, we win.
4852 Do not do this unless the type is bool since we need a bool
4853 result here anyway. */
4854 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4856 tree result
= NULL_TREE
;
4858 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4860 tree arg
= gimple_phi_arg_def (stmt
, i
);
4862 /* If this PHI has itself as an argument, ignore it.
4863 If all the other args produce the same result,
4865 if (arg
== gimple_phi_result (stmt
))
4867 else if (TREE_CODE (arg
) == INTEGER_CST
)
4869 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4872 result
= boolean_true_node
;
4873 else if (!integer_onep (result
))
4877 result
= fold_build2 (code2
, boolean_type_node
,
4879 else if (!same_bool_comparison_p (result
,
4883 else if (TREE_CODE (arg
) == SSA_NAME
4884 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4887 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4888 /* In simple cases we can look through PHI nodes,
4889 but we have to be careful with loops.
4891 if (! dom_info_available_p (CDI_DOMINATORS
)
4892 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4893 || dominated_by_p (CDI_DOMINATORS
,
4894 gimple_bb (def_stmt
),
4897 temp
= or_var_with_comparison (arg
, invert
, code2
,
4903 else if (!same_bool_result_p (result
, temp
))
4919 /* Try to simplify the OR of two comparisons, specified by
4920 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4921 If this can be simplified to a single expression (without requiring
4922 introducing more SSA variables to hold intermediate values),
4923 return the resulting tree. Otherwise return NULL_TREE.
4924 If the result expression is non-null, it has boolean type. */
4927 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4928 enum tree_code code2
, tree op2a
, tree op2b
)
4930 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4934 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4938 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4940 Either NULL_TREE, a simplified but non-constant or a constant
4943 ??? This should go into a gimple-fold-inline.h file to be eventually
4944 privatized with the single valueize function used in the various TUs
4945 to avoid the indirect function call overhead. */
4948 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
4949 tree (*gvalueize
) (tree
))
4953 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4954 edges if there are intermediate VARYING defs. For this reason
4955 do not follow SSA edges here even though SCCVN can technically
4956 just deal fine with that. */
4957 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
4959 tree res
= NULL_TREE
;
4960 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
4962 else if (mprts_hook
)
4963 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
4966 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4968 fprintf (dump_file
, "Match-and-simplified ");
4969 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4970 fprintf (dump_file
, " to ");
4971 print_generic_expr (dump_file
, res
, 0);
4972 fprintf (dump_file
, "\n");
4978 location_t loc
= gimple_location (stmt
);
4979 switch (gimple_code (stmt
))
4983 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4985 switch (get_gimple_rhs_class (subcode
))
4987 case GIMPLE_SINGLE_RHS
:
4989 tree rhs
= gimple_assign_rhs1 (stmt
);
4990 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4992 if (TREE_CODE (rhs
) == SSA_NAME
)
4994 /* If the RHS is an SSA_NAME, return its known constant value,
4996 return (*valueize
) (rhs
);
4998 /* Handle propagating invariant addresses into address
5000 else if (TREE_CODE (rhs
) == ADDR_EXPR
5001 && !is_gimple_min_invariant (rhs
))
5003 HOST_WIDE_INT offset
= 0;
5005 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
5009 && (CONSTANT_CLASS_P (base
)
5010 || decl_address_invariant_p (base
)))
5011 return build_invariant_address (TREE_TYPE (rhs
),
5014 else if (TREE_CODE (rhs
) == CONSTRUCTOR
5015 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
5016 && (CONSTRUCTOR_NELTS (rhs
)
5017 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
5022 vec
= XALLOCAVEC (tree
,
5023 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
5024 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
5026 val
= (*valueize
) (val
);
5027 if (TREE_CODE (val
) == INTEGER_CST
5028 || TREE_CODE (val
) == REAL_CST
5029 || TREE_CODE (val
) == FIXED_CST
)
5035 return build_vector (TREE_TYPE (rhs
), vec
);
5037 if (subcode
== OBJ_TYPE_REF
)
5039 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5040 /* If callee is constant, we can fold away the wrapper. */
5041 if (is_gimple_min_invariant (val
))
5045 if (kind
== tcc_reference
)
5047 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5048 || TREE_CODE (rhs
) == REALPART_EXPR
5049 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5050 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5052 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5053 return fold_unary_loc (EXPR_LOCATION (rhs
),
5055 TREE_TYPE (rhs
), val
);
5057 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5058 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5060 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5061 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5063 TREE_TYPE (rhs
), val
,
5064 TREE_OPERAND (rhs
, 1),
5065 TREE_OPERAND (rhs
, 2));
5067 else if (TREE_CODE (rhs
) == MEM_REF
5068 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5070 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5071 if (TREE_CODE (val
) == ADDR_EXPR
5072 && is_gimple_min_invariant (val
))
5074 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5076 TREE_OPERAND (rhs
, 1));
5081 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5083 else if (kind
== tcc_declaration
)
5084 return get_symbol_constant_value (rhs
);
5088 case GIMPLE_UNARY_RHS
:
5091 case GIMPLE_BINARY_RHS
:
5092 /* Translate &x + CST into an invariant form suitable for
5093 further propagation. */
5094 if (subcode
== POINTER_PLUS_EXPR
)
5096 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5097 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5098 if (TREE_CODE (op0
) == ADDR_EXPR
5099 && TREE_CODE (op1
) == INTEGER_CST
)
5101 tree off
= fold_convert (ptr_type_node
, op1
);
5102 return build_fold_addr_expr_loc
5104 fold_build2 (MEM_REF
,
5105 TREE_TYPE (TREE_TYPE (op0
)),
5106 unshare_expr (op0
), off
));
5109 /* Canonicalize bool != 0 and bool == 0 appearing after
5110 valueization. While gimple_simplify handles this
5111 it can get confused by the ~X == 1 -> X == 0 transform
5112 which we cant reduce to a SSA name or a constant
5113 (and we have no way to tell gimple_simplify to not
5114 consider those transforms in the first place). */
5115 else if (subcode
== EQ_EXPR
5116 || subcode
== NE_EXPR
)
5118 tree lhs
= gimple_assign_lhs (stmt
);
5119 tree op0
= gimple_assign_rhs1 (stmt
);
5120 if (useless_type_conversion_p (TREE_TYPE (lhs
),
5123 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5124 op0
= (*valueize
) (op0
);
5125 if (TREE_CODE (op0
) == INTEGER_CST
)
5126 std::swap (op0
, op1
);
5127 if (TREE_CODE (op1
) == INTEGER_CST
5128 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
5129 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
5135 case GIMPLE_TERNARY_RHS
:
5137 /* Handle ternary operators that can appear in GIMPLE form. */
5138 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5139 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5140 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5141 return fold_ternary_loc (loc
, subcode
,
5142 gimple_expr_type (stmt
), op0
, op1
, op2
);
5153 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5155 if (gimple_call_internal_p (stmt
))
5157 enum tree_code subcode
= ERROR_MARK
;
5158 switch (gimple_call_internal_fn (stmt
))
5160 case IFN_UBSAN_CHECK_ADD
:
5161 subcode
= PLUS_EXPR
;
5163 case IFN_UBSAN_CHECK_SUB
:
5164 subcode
= MINUS_EXPR
;
5166 case IFN_UBSAN_CHECK_MUL
:
5167 subcode
= MULT_EXPR
;
5172 tree arg0
= gimple_call_arg (stmt
, 0);
5173 tree arg1
= gimple_call_arg (stmt
, 1);
5174 tree op0
= (*valueize
) (arg0
);
5175 tree op1
= (*valueize
) (arg1
);
5177 if (TREE_CODE (op0
) != INTEGER_CST
5178 || TREE_CODE (op1
) != INTEGER_CST
)
5183 /* x * 0 = 0 * x = 0 without overflow. */
5184 if (integer_zerop (op0
) || integer_zerop (op1
))
5185 return build_zero_cst (TREE_TYPE (arg0
));
5188 /* y - y = 0 without overflow. */
5189 if (operand_equal_p (op0
, op1
, 0))
5190 return build_zero_cst (TREE_TYPE (arg0
));
5197 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5199 && TREE_CODE (res
) == INTEGER_CST
5200 && !TREE_OVERFLOW (res
))
5205 fn
= (*valueize
) (gimple_call_fn (stmt
));
5206 if (TREE_CODE (fn
) == ADDR_EXPR
5207 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5208 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5209 && gimple_builtin_call_types_compatible_p (stmt
,
5210 TREE_OPERAND (fn
, 0)))
5212 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5215 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5216 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5217 retval
= fold_builtin_call_array (loc
,
5218 gimple_call_return_type (call_stmt
),
5219 fn
, gimple_call_num_args (stmt
), args
);
5222 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5223 STRIP_NOPS (retval
);
5224 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5237 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5238 Returns NULL_TREE if folding to a constant is not possible, otherwise
5239 returns a constant according to is_gimple_min_invariant. */
5242 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
5244 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5245 if (res
&& is_gimple_min_invariant (res
))
5251 /* The following set of functions are supposed to fold references using
5252 their constant initializers. */
5254 /* See if we can find constructor defining value of BASE.
5255 When we know the consructor with constant offset (such as
5256 base is array[40] and we do know constructor of array), then
5257 BIT_OFFSET is adjusted accordingly.
5259 As a special case, return error_mark_node when constructor
5260 is not explicitly available, but it is known to be zero
5261 such as 'static const int a;'. */
5263 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5264 tree (*valueize
)(tree
))
5266 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5267 if (TREE_CODE (base
) == MEM_REF
)
5269 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5271 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5273 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5278 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5279 base
= valueize (TREE_OPERAND (base
, 0));
5280 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5282 base
= TREE_OPERAND (base
, 0);
5285 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5286 DECL_INITIAL. If BASE is a nested reference into another
5287 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5288 the inner reference. */
5289 switch (TREE_CODE (base
))
5294 tree init
= ctor_for_folding (base
);
5296 /* Our semantic is exact opposite of ctor_for_folding;
5297 NULL means unknown, while error_mark_node is 0. */
5298 if (init
== error_mark_node
)
5301 return error_mark_node
;
5307 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5308 if (max_size
== -1 || size
!= max_size
)
5310 *bit_offset
+= bit_offset2
;
5311 return get_base_constructor (base
, bit_offset
, valueize
);
5322 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5323 SIZE to the memory at bit OFFSET. */
5326 fold_array_ctor_reference (tree type
, tree ctor
,
5327 unsigned HOST_WIDE_INT offset
,
5328 unsigned HOST_WIDE_INT size
,
5331 offset_int low_bound
;
5332 offset_int elt_size
;
5333 offset_int access_index
;
5334 tree domain_type
= NULL_TREE
;
5335 HOST_WIDE_INT inner_offset
;
5337 /* Compute low bound and elt size. */
5338 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5339 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5340 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5342 /* Static constructors for variably sized objects makes no sense. */
5343 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5344 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5348 /* Static constructors for variably sized objects makes no sense. */
5349 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5351 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5353 /* We can handle only constantly sized accesses that are known to not
5354 be larger than size of array element. */
5355 if (!TYPE_SIZE_UNIT (type
)
5356 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5357 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5361 /* Compute the array index we look for. */
5362 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5364 access_index
+= low_bound
;
5366 /* And offset within the access. */
5367 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5369 /* See if the array field is large enough to span whole access. We do not
5370 care to fold accesses spanning multiple array indexes. */
5371 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5373 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
5374 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
5376 /* When memory is not explicitely mentioned in constructor,
5377 it is 0 (or out of range). */
5378 return build_zero_cst (type
);
5381 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5382 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5385 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5386 unsigned HOST_WIDE_INT offset
,
5387 unsigned HOST_WIDE_INT size
,
5390 unsigned HOST_WIDE_INT cnt
;
5393 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5396 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5397 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5398 tree field_size
= DECL_SIZE (cfield
);
5399 offset_int bitoffset
;
5400 offset_int bitoffset_end
, access_end
;
5402 /* Variable sized objects in static constructors makes no sense,
5403 but field_size can be NULL for flexible array members. */
5404 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5405 && TREE_CODE (byte_offset
) == INTEGER_CST
5406 && (field_size
!= NULL_TREE
5407 ? TREE_CODE (field_size
) == INTEGER_CST
5408 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5410 /* Compute bit offset of the field. */
5411 bitoffset
= (wi::to_offset (field_offset
)
5412 + wi::lshift (wi::to_offset (byte_offset
),
5413 LOG2_BITS_PER_UNIT
));
5414 /* Compute bit offset where the field ends. */
5415 if (field_size
!= NULL_TREE
)
5416 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5420 access_end
= offset_int (offset
) + size
;
5422 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5423 [BITOFFSET, BITOFFSET_END)? */
5424 if (wi::cmps (access_end
, bitoffset
) > 0
5425 && (field_size
== NULL_TREE
5426 || wi::lts_p (offset
, bitoffset_end
)))
5428 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5429 /* We do have overlap. Now see if field is large enough to
5430 cover the access. Give up for accesses spanning multiple
5432 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5434 if (wi::lts_p (offset
, bitoffset
))
5436 return fold_ctor_reference (type
, cval
,
5437 inner_offset
.to_uhwi (), size
,
5441 /* When memory is not explicitely mentioned in constructor, it is 0. */
5442 return build_zero_cst (type
);
5445 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5446 to the memory at bit OFFSET. */
5449 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5450 unsigned HOST_WIDE_INT size
, tree from_decl
)
5454 /* We found the field with exact match. */
5455 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5457 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5459 /* We are at the end of walk, see if we can view convert the
5461 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5462 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5463 && !compare_tree_int (TYPE_SIZE (type
), size
)
5464 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5466 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5467 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5469 STRIP_USELESS_TYPE_CONVERSION (ret
);
5472 /* For constants and byte-aligned/sized reads try to go through
5473 native_encode/interpret. */
5474 if (CONSTANT_CLASS_P (ctor
)
5475 && BITS_PER_UNIT
== 8
5476 && offset
% BITS_PER_UNIT
== 0
5477 && size
% BITS_PER_UNIT
== 0
5478 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5480 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5481 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5482 offset
/ BITS_PER_UNIT
) > 0)
5483 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5485 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5488 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5489 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5490 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5493 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5500 /* Return the tree representing the element referenced by T if T is an
5501 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5502 names using VALUEIZE. Return NULL_TREE otherwise. */
5505 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5507 tree ctor
, idx
, base
;
5508 HOST_WIDE_INT offset
, size
, max_size
;
5511 if (TREE_THIS_VOLATILE (t
))
5515 return get_symbol_constant_value (t
);
5517 tem
= fold_read_from_constant_string (t
);
5521 switch (TREE_CODE (t
))
5524 case ARRAY_RANGE_REF
:
5525 /* Constant indexes are handled well by get_base_constructor.
5526 Only special case variable offsets.
5527 FIXME: This code can't handle nested references with variable indexes
5528 (they will be handled only by iteration of ccp). Perhaps we can bring
5529 get_ref_base_and_extent here and make it use a valueize callback. */
5530 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5532 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5533 && TREE_CODE (idx
) == INTEGER_CST
)
5535 tree low_bound
, unit_size
;
5537 /* If the resulting bit-offset is constant, track it. */
5538 if ((low_bound
= array_ref_low_bound (t
),
5539 TREE_CODE (low_bound
) == INTEGER_CST
)
5540 && (unit_size
= array_ref_element_size (t
),
5541 tree_fits_uhwi_p (unit_size
)))
5544 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5545 TYPE_PRECISION (TREE_TYPE (idx
)));
5547 if (wi::fits_shwi_p (woffset
))
5549 offset
= woffset
.to_shwi ();
5550 /* TODO: This code seems wrong, multiply then check
5551 to see if it fits. */
5552 offset
*= tree_to_uhwi (unit_size
);
5553 offset
*= BITS_PER_UNIT
;
5555 base
= TREE_OPERAND (t
, 0);
5556 ctor
= get_base_constructor (base
, &offset
, valueize
);
5557 /* Empty constructor. Always fold to 0. */
5558 if (ctor
== error_mark_node
)
5559 return build_zero_cst (TREE_TYPE (t
));
5560 /* Out of bound array access. Value is undefined,
5564 /* We can not determine ctor. */
5567 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5568 tree_to_uhwi (unit_size
)
5578 case TARGET_MEM_REF
:
5580 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5581 ctor
= get_base_constructor (base
, &offset
, valueize
);
5583 /* Empty constructor. Always fold to 0. */
5584 if (ctor
== error_mark_node
)
5585 return build_zero_cst (TREE_TYPE (t
));
5586 /* We do not know precise address. */
5587 if (max_size
== -1 || max_size
!= size
)
5589 /* We can not determine ctor. */
5593 /* Out of bound array access. Value is undefined, but don't fold. */
5597 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5603 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5604 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5605 return fold_build1_loc (EXPR_LOCATION (t
),
5606 TREE_CODE (t
), TREE_TYPE (t
), c
);
5618 fold_const_aggregate_ref (tree t
)
5620 return fold_const_aggregate_ref_1 (t
, NULL
);
5623 /* Lookup virtual method with index TOKEN in a virtual table V
5625 Set CAN_REFER if non-NULL to false if method
5626 is not referable or if the virtual table is ill-formed (such as rewriten
5627 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5630 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5632 unsigned HOST_WIDE_INT offset
,
5635 tree vtable
= v
, init
, fn
;
5636 unsigned HOST_WIDE_INT size
;
5637 unsigned HOST_WIDE_INT elt_size
, access_index
;
5643 /* First of all double check we have virtual table. */
5644 if (TREE_CODE (v
) != VAR_DECL
5645 || !DECL_VIRTUAL_P (v
))
5647 /* Pass down that we lost track of the target. */
5653 init
= ctor_for_folding (v
);
5655 /* The virtual tables should always be born with constructors
5656 and we always should assume that they are avaialble for
5657 folding. At the moment we do not stream them in all cases,
5658 but it should never happen that ctor seem unreachable. */
5660 if (init
== error_mark_node
)
5662 gcc_assert (in_lto_p
);
5663 /* Pass down that we lost track of the target. */
5668 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5669 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5670 offset
*= BITS_PER_UNIT
;
5671 offset
+= token
* size
;
5673 /* Lookup the value in the constructor that is assumed to be array.
5674 This is equivalent to
5675 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5676 offset, size, NULL);
5677 but in a constant time. We expect that frontend produced a simple
5678 array without indexed initializers. */
5680 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5681 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5682 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5683 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5685 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5686 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5688 /* This code makes an assumption that there are no
5689 indexed fileds produced by C++ FE, so we can directly index the array. */
5690 if (access_index
< CONSTRUCTOR_NELTS (init
))
5692 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5693 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5699 /* For type inconsistent program we may end up looking up virtual method
5700 in virtual table that does not contain TOKEN entries. We may overrun
5701 the virtual table and pick up a constant or RTTI info pointer.
5702 In any case the call is undefined. */
5704 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5705 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5706 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5709 fn
= TREE_OPERAND (fn
, 0);
5711 /* When cgraph node is missing and function is not public, we cannot
5712 devirtualize. This can happen in WHOPR when the actual method
5713 ends up in other partition, because we found devirtualization
5714 possibility too late. */
5715 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5726 /* Make sure we create a cgraph node for functions we'll reference.
5727 They can be non-existent if the reference comes from an entry
5728 of an external vtable for example. */
5729 cgraph_node::get_create (fn
);
5734 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5735 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5736 KNOWN_BINFO carries the binfo describing the true type of
5737 OBJ_TYPE_REF_OBJECT(REF).
5738 Set CAN_REFER if non-NULL to false if method
5739 is not referable or if the virtual table is ill-formed (such as rewriten
5740 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5743 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5746 unsigned HOST_WIDE_INT offset
;
5749 v
= BINFO_VTABLE (known_binfo
);
5750 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5754 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5760 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5763 /* Given a pointer value OP0, return a simplified version of an
5764 indirection through OP0, or NULL_TREE if no simplification is
5765 possible. Note that the resulting type may be different from
5766 the type pointed to in the sense that it is still compatible
5767 from the langhooks point of view. */
5770 gimple_fold_indirect_ref (tree t
)
5772 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5777 subtype
= TREE_TYPE (sub
);
5778 if (!POINTER_TYPE_P (subtype
))
5781 if (TREE_CODE (sub
) == ADDR_EXPR
)
5783 tree op
= TREE_OPERAND (sub
, 0);
5784 tree optype
= TREE_TYPE (op
);
5786 if (useless_type_conversion_p (type
, optype
))
5789 /* *(foo *)&fooarray => fooarray[0] */
5790 if (TREE_CODE (optype
) == ARRAY_TYPE
5791 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5792 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5794 tree type_domain
= TYPE_DOMAIN (optype
);
5795 tree min_val
= size_zero_node
;
5796 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5797 min_val
= TYPE_MIN_VALUE (type_domain
);
5798 if (TREE_CODE (min_val
) == INTEGER_CST
)
5799 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5801 /* *(foo *)&complexfoo => __real__ complexfoo */
5802 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5803 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5804 return fold_build1 (REALPART_EXPR
, type
, op
);
5805 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5806 else if (TREE_CODE (optype
) == VECTOR_TYPE
5807 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5809 tree part_width
= TYPE_SIZE (type
);
5810 tree index
= bitsize_int (0);
5811 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5815 /* *(p + CST) -> ... */
5816 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5817 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5819 tree addr
= TREE_OPERAND (sub
, 0);
5820 tree off
= TREE_OPERAND (sub
, 1);
5824 addrtype
= TREE_TYPE (addr
);
5826 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5827 if (TREE_CODE (addr
) == ADDR_EXPR
5828 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5829 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5830 && tree_fits_uhwi_p (off
))
5832 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5833 tree part_width
= TYPE_SIZE (type
);
5834 unsigned HOST_WIDE_INT part_widthi
5835 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5836 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5837 tree index
= bitsize_int (indexi
);
5838 if (offset
/ part_widthi
5839 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5840 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5844 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5845 if (TREE_CODE (addr
) == ADDR_EXPR
5846 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5847 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5849 tree size
= TYPE_SIZE_UNIT (type
);
5850 if (tree_int_cst_equal (size
, off
))
5851 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5854 /* *(p + CST) -> MEM_REF <p, CST>. */
5855 if (TREE_CODE (addr
) != ADDR_EXPR
5856 || DECL_P (TREE_OPERAND (addr
, 0)))
5857 return fold_build2 (MEM_REF
, type
,
5859 wide_int_to_tree (ptype
, off
));
5862 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5863 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5864 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5865 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5868 tree min_val
= size_zero_node
;
5870 sub
= gimple_fold_indirect_ref (sub
);
5872 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5873 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5874 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5875 min_val
= TYPE_MIN_VALUE (type_domain
);
5876 if (TREE_CODE (min_val
) == INTEGER_CST
)
5877 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5883 /* Return true if CODE is an operation that when operating on signed
5884 integer types involves undefined behavior on overflow and the
5885 operation can be expressed with unsigned arithmetic. */
5888 arith_code_with_undefined_signed_overflow (tree_code code
)
5896 case POINTER_PLUS_EXPR
:
5903 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5904 operation that can be transformed to unsigned arithmetic by converting
5905 its operand, carrying out the operation in the corresponding unsigned
5906 type and converting the result back to the original type.
5908 Returns a sequence of statements that replace STMT and also contain
5909 a modified form of STMT itself. */
5912 rewrite_to_defined_overflow (gimple
*stmt
)
5914 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5916 fprintf (dump_file
, "rewriting stmt with undefined signed "
5918 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5921 tree lhs
= gimple_assign_lhs (stmt
);
5922 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5923 gimple_seq stmts
= NULL
;
5924 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5926 tree op
= gimple_op (stmt
, i
);
5927 op
= gimple_convert (&stmts
, type
, op
);
5928 gimple_set_op (stmt
, i
, op
);
5930 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5931 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5932 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5933 gimple_seq_add_stmt (&stmts
, stmt
);
5934 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
5935 gimple_seq_add_stmt (&stmts
, cvt
);
5941 /* The valueization hook we use for the gimple_build API simplification.
5942 This makes us match fold_buildN behavior by only combining with
5943 statements in the sequence(s) we are currently building. */
5946 gimple_build_valueize (tree op
)
5948 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
5953 /* Build the expression CODE OP0 of type TYPE with location LOC,
5954 simplifying it first if possible. Returns the built
5955 expression value and appends statements possibly defining it
5959 gimple_build (gimple_seq
*seq
, location_t loc
,
5960 enum tree_code code
, tree type
, tree op0
)
5962 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
5965 if (gimple_in_ssa_p (cfun
))
5966 res
= make_ssa_name (type
);
5968 res
= create_tmp_reg (type
);
5970 if (code
== REALPART_EXPR
5971 || code
== IMAGPART_EXPR
5972 || code
== VIEW_CONVERT_EXPR
)
5973 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
5975 stmt
= gimple_build_assign (res
, code
, op0
);
5976 gimple_set_location (stmt
, loc
);
5977 gimple_seq_add_stmt_without_update (seq
, stmt
);
5982 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5983 simplifying it first if possible. Returns the built
5984 expression value and appends statements possibly defining it
5988 gimple_build (gimple_seq
*seq
, location_t loc
,
5989 enum tree_code code
, tree type
, tree op0
, tree op1
)
5991 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
5994 if (gimple_in_ssa_p (cfun
))
5995 res
= make_ssa_name (type
);
5997 res
= create_tmp_reg (type
);
5998 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
5999 gimple_set_location (stmt
, loc
);
6000 gimple_seq_add_stmt_without_update (seq
, stmt
);
6005 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6006 simplifying it first if possible. Returns the built
6007 expression value and appends statements possibly defining it
6011 gimple_build (gimple_seq
*seq
, location_t loc
,
6012 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6014 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6015 seq
, gimple_build_valueize
);
6018 if (gimple_in_ssa_p (cfun
))
6019 res
= make_ssa_name (type
);
6021 res
= create_tmp_reg (type
);
6023 if (code
== BIT_FIELD_REF
)
6024 stmt
= gimple_build_assign (res
, code
,
6025 build3 (code
, type
, op0
, op1
, op2
));
6027 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6028 gimple_set_location (stmt
, loc
);
6029 gimple_seq_add_stmt_without_update (seq
, stmt
);
6034 /* Build the call FN (ARG0) with a result of type TYPE
6035 (or no result if TYPE is void) with location LOC,
6036 simplifying it first if possible. Returns the built
6037 expression value (or NULL_TREE if TYPE is void) and appends
6038 statements possibly defining it to SEQ. */
6041 gimple_build (gimple_seq
*seq
, location_t loc
,
6042 enum built_in_function fn
, tree type
, tree arg0
)
6044 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6047 tree decl
= builtin_decl_implicit (fn
);
6048 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
6049 if (!VOID_TYPE_P (type
))
6051 if (gimple_in_ssa_p (cfun
))
6052 res
= make_ssa_name (type
);
6054 res
= create_tmp_reg (type
);
6055 gimple_call_set_lhs (stmt
, res
);
6057 gimple_set_location (stmt
, loc
);
6058 gimple_seq_add_stmt_without_update (seq
, stmt
);
6063 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6064 (or no result if TYPE is void) with location LOC,
6065 simplifying it first if possible. Returns the built
6066 expression value (or NULL_TREE if TYPE is void) and appends
6067 statements possibly defining it to SEQ. */
6070 gimple_build (gimple_seq
*seq
, location_t loc
,
6071 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6073 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6076 tree decl
= builtin_decl_implicit (fn
);
6077 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6078 if (!VOID_TYPE_P (type
))
6080 if (gimple_in_ssa_p (cfun
))
6081 res
= make_ssa_name (type
);
6083 res
= create_tmp_reg (type
);
6084 gimple_call_set_lhs (stmt
, res
);
6086 gimple_set_location (stmt
, loc
);
6087 gimple_seq_add_stmt_without_update (seq
, stmt
);
6092 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6093 (or no result if TYPE is void) with location LOC,
6094 simplifying it first if possible. Returns the built
6095 expression value (or NULL_TREE if TYPE is void) and appends
6096 statements possibly defining it to SEQ. */
6099 gimple_build (gimple_seq
*seq
, location_t loc
,
6100 enum built_in_function fn
, tree type
,
6101 tree arg0
, tree arg1
, tree arg2
)
6103 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6104 seq
, gimple_build_valueize
);
6107 tree decl
= builtin_decl_implicit (fn
);
6108 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6109 if (!VOID_TYPE_P (type
))
6111 if (gimple_in_ssa_p (cfun
))
6112 res
= make_ssa_name (type
);
6114 res
= create_tmp_reg (type
);
6115 gimple_call_set_lhs (stmt
, res
);
6117 gimple_set_location (stmt
, loc
);
6118 gimple_seq_add_stmt_without_update (seq
, stmt
);
6123 /* Build the conversion (TYPE) OP with a result of type TYPE
6124 with location LOC if such conversion is neccesary in GIMPLE,
6125 simplifying it first.
6126 Returns the built expression value and appends
6127 statements possibly defining it to SEQ. */
6130 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6132 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6134 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
6137 /* Build the conversion (ptrofftype) OP with a result of a type
6138 compatible with ptrofftype with location LOC if such conversion
6139 is neccesary in GIMPLE, simplifying it first.
6140 Returns the built expression value and appends
6141 statements possibly defining it to SEQ. */
6144 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
6146 if (ptrofftype_p (TREE_TYPE (op
)))
6148 return gimple_convert (seq
, loc
, sizetype
, op
);
6151 /* Return true if the result of assignment STMT is known to be non-negative.
6152 If the return value is based on the assumption that signed overflow is
6153 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6154 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6157 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6160 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6161 switch (get_gimple_rhs_class (code
))
6163 case GIMPLE_UNARY_RHS
:
6164 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6165 gimple_expr_type (stmt
),
6166 gimple_assign_rhs1 (stmt
),
6167 strict_overflow_p
, depth
);
6168 case GIMPLE_BINARY_RHS
:
6169 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6170 gimple_expr_type (stmt
),
6171 gimple_assign_rhs1 (stmt
),
6172 gimple_assign_rhs2 (stmt
),
6173 strict_overflow_p
, depth
);
6174 case GIMPLE_TERNARY_RHS
:
6176 case GIMPLE_SINGLE_RHS
:
6177 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
6178 strict_overflow_p
, depth
);
6179 case GIMPLE_INVALID_RHS
:
6185 /* Return true if return value of call STMT is known to be non-negative.
6186 If the return value is based on the assumption that signed overflow is
6187 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6188 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6191 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6194 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
6195 gimple_call_arg (stmt
, 0) : NULL_TREE
;
6196 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
6197 gimple_call_arg (stmt
, 1) : NULL_TREE
;
6199 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
6200 gimple_call_fndecl (stmt
),
6203 strict_overflow_p
, depth
);
6206 /* Return true if return value of call STMT is known to be non-negative.
6207 If the return value is based on the assumption that signed overflow is
6208 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6209 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6212 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6215 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6217 tree arg
= gimple_phi_arg_def (stmt
, i
);
6218 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
6224 /* Return true if STMT is known to compute a non-negative value.
6225 If the return value is based on the assumption that signed overflow is
6226 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6227 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6230 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6233 switch (gimple_code (stmt
))
6236 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6239 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6242 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6249 /* Return true if the floating-point value computed by assignment STMT
6250 is known to have an integer value. We also allow +Inf, -Inf and NaN
6251 to be considered integer values.
6253 DEPTH is the current nesting depth of the query. */
6256 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
6258 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6259 switch (get_gimple_rhs_class (code
))
6261 case GIMPLE_UNARY_RHS
:
6262 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
6263 gimple_assign_rhs1 (stmt
), depth
);
6264 case GIMPLE_BINARY_RHS
:
6265 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
6266 gimple_assign_rhs1 (stmt
),
6267 gimple_assign_rhs2 (stmt
), depth
);
6268 case GIMPLE_TERNARY_RHS
:
6270 case GIMPLE_SINGLE_RHS
:
6271 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
6272 case GIMPLE_INVALID_RHS
:
6278 /* Return true if the floating-point value computed by call STMT is known
6279 to have an integer value. We also allow +Inf, -Inf and NaN to be
6280 considered integer values.
6282 DEPTH is the current nesting depth of the query. */
6285 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
6287 tree arg0
= (gimple_call_num_args (stmt
) > 0
6288 ? gimple_call_arg (stmt
, 0)
6290 tree arg1
= (gimple_call_num_args (stmt
) > 1
6291 ? gimple_call_arg (stmt
, 1)
6293 return integer_valued_real_call_p (gimple_call_fndecl (stmt
),
6297 /* Return true if the floating-point result of phi STMT is known to have
6298 an integer value. We also allow +Inf, -Inf and NaN to be considered
6301 DEPTH is the current nesting depth of the query. */
6304 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
6306 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6308 tree arg
= gimple_phi_arg_def (stmt
, i
);
6309 if (!integer_valued_real_single_p (arg
, depth
+ 1))
6315 /* Return true if the floating-point value computed by STMT is known
6316 to have an integer value. We also allow +Inf, -Inf and NaN to be
6317 considered integer values.
6319 DEPTH is the current nesting depth of the query. */
6322 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
6324 switch (gimple_code (stmt
))
6327 return gimple_assign_integer_valued_real_p (stmt
, depth
);
6329 return gimple_call_integer_valued_real_p (stmt
, depth
);
6331 return gimple_phi_integer_valued_real_p (stmt
, depth
);