1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-general.h"
58 #include "fold-const-call.h"
59 #include "stringpool.h"
63 /* Return true when DECL can be referenced from current unit.
64 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
65 We can get declarations that are not possible to reference for various
68 1) When analyzing C++ virtual tables.
69 C++ virtual tables do have known constructors even
70 when they are keyed to other compilation unit.
71 Those tables can contain pointers to methods and vars
72 in other units. Those methods have both STATIC and EXTERNAL
74 2) In WHOPR mode devirtualization might lead to reference
75 to method that was partitioned elsehwere.
76 In this case we have static VAR_DECL or FUNCTION_DECL
77 that has no corresponding callgraph/varpool node
79 3) COMDAT functions referred by external vtables that
80 we devirtualize only during final compilation stage.
81 At this time we already decided that we will not output
82 the function body and thus we can't reference the symbol
86 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
89 struct cgraph_node
*node
;
92 if (DECL_ABSTRACT_P (decl
))
95 /* We are concerned only about static/external vars and functions. */
96 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
97 || !VAR_OR_FUNCTION_DECL_P (decl
))
100 /* Static objects can be referred only if they was not optimized out yet. */
101 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
103 /* Before we start optimizing unreachable code we can be sure all
104 static objects are defined. */
105 if (symtab
->function_flags_ready
)
107 snode
= symtab_node::get (decl
);
108 if (!snode
|| !snode
->definition
)
110 node
= dyn_cast
<cgraph_node
*> (snode
);
111 return !node
|| !node
->global
.inlined_to
;
114 /* We will later output the initializer, so we can refer to it.
115 So we are concerned only when DECL comes from initializer of
116 external var or var that has been optimized out. */
118 || !VAR_P (from_decl
)
119 || (!DECL_EXTERNAL (from_decl
)
120 && (vnode
= varpool_node::get (from_decl
)) != NULL
121 && vnode
->definition
)
123 && (vnode
= varpool_node::get (from_decl
)) != NULL
124 && vnode
->in_other_partition
))
126 /* We are folding reference from external vtable. The vtable may reffer
127 to a symbol keyed to other compilation unit. The other compilation
128 unit may be in separate DSO and the symbol may be hidden. */
129 if (DECL_VISIBILITY_SPECIFIED (decl
)
130 && DECL_EXTERNAL (decl
)
131 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
132 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
134 /* When function is public, we always can introduce new reference.
135 Exception are the COMDAT functions where introducing a direct
136 reference imply need to include function body in the curren tunit. */
137 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
139 /* We have COMDAT. We are going to check if we still have definition
140 or if the definition is going to be output in other partition.
141 Bypass this when gimplifying; all needed functions will be produced.
143 As observed in PR20991 for already optimized out comdat virtual functions
144 it may be tempting to not necessarily give up because the copy will be
145 output elsewhere when corresponding vtable is output.
146 This is however not possible - ABI specify that COMDATs are output in
147 units where they are used and when the other unit was compiled with LTO
148 it is possible that vtable was kept public while the function itself
150 if (!symtab
->function_flags_ready
)
153 snode
= symtab_node::get (decl
);
155 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
156 && (!snode
->in_other_partition
157 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
159 node
= dyn_cast
<cgraph_node
*> (snode
);
160 return !node
|| !node
->global
.inlined_to
;
163 /* Create a temporary for TYPE for a statement STMT. If the current function
164 is in SSA form, a SSA name is created. Otherwise a temporary register
168 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
170 if (gimple_in_ssa_p (cfun
))
171 return make_ssa_name (type
, stmt
);
173 return create_tmp_reg (type
);
176 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
177 acceptable form for is_gimple_min_invariant.
178 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
181 canonicalize_constructor_val (tree cval
, tree from_decl
)
183 tree orig_cval
= cval
;
185 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
186 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
188 tree ptr
= TREE_OPERAND (cval
, 0);
189 if (is_gimple_min_invariant (ptr
))
190 cval
= build1_loc (EXPR_LOCATION (cval
),
191 ADDR_EXPR
, TREE_TYPE (ptr
),
192 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
194 fold_convert (ptr_type_node
,
195 TREE_OPERAND (cval
, 1))));
197 if (TREE_CODE (cval
) == ADDR_EXPR
)
199 tree base
= NULL_TREE
;
200 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
202 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
204 TREE_OPERAND (cval
, 0) = base
;
207 base
= get_base_address (TREE_OPERAND (cval
, 0));
211 if (VAR_OR_FUNCTION_DECL_P (base
)
212 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
214 if (TREE_TYPE (base
) == error_mark_node
)
217 TREE_ADDRESSABLE (base
) = 1;
218 else if (TREE_CODE (base
) == FUNCTION_DECL
)
220 /* Make sure we create a cgraph node for functions we'll reference.
221 They can be non-existent if the reference comes from an entry
222 of an external vtable for example. */
223 cgraph_node::get_create (base
);
225 /* Fixup types in global initializers. */
226 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
227 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
229 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
230 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
233 if (TREE_OVERFLOW_P (cval
))
234 return drop_tree_overflow (cval
);
238 /* If SYM is a constant variable with known value, return the value.
239 NULL_TREE is returned otherwise. */
242 get_symbol_constant_value (tree sym
)
244 tree val
= ctor_for_folding (sym
);
245 if (val
!= error_mark_node
)
249 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
250 if (val
&& is_gimple_min_invariant (val
))
255 /* Variables declared 'const' without an initializer
256 have zero as the initializer if they may not be
257 overridden at link or run time. */
259 && is_gimple_reg_type (TREE_TYPE (sym
)))
260 return build_zero_cst (TREE_TYPE (sym
));
268 /* Subroutine of fold_stmt. We perform several simplifications of the
269 memory reference tree EXPR and make sure to re-gimplify them properly
270 after propagation of constant addresses. IS_LHS is true if the
271 reference is supposed to be an lvalue. */
274 maybe_fold_reference (tree expr
, bool is_lhs
)
278 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
279 || TREE_CODE (expr
) == REALPART_EXPR
280 || TREE_CODE (expr
) == IMAGPART_EXPR
)
281 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
282 return fold_unary_loc (EXPR_LOCATION (expr
),
285 TREE_OPERAND (expr
, 0));
286 else if (TREE_CODE (expr
) == BIT_FIELD_REF
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
288 return fold_ternary_loc (EXPR_LOCATION (expr
),
291 TREE_OPERAND (expr
, 0),
292 TREE_OPERAND (expr
, 1),
293 TREE_OPERAND (expr
, 2));
296 && (result
= fold_const_aggregate_ref (expr
))
297 && is_gimple_min_invariant (result
))
304 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
305 replacement rhs for the statement or NULL_TREE if no simplification
306 could be made. It is assumed that the operands have been previously
310 fold_gimple_assign (gimple_stmt_iterator
*si
)
312 gimple
*stmt
= gsi_stmt (*si
);
313 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
314 location_t loc
= gimple_location (stmt
);
316 tree result
= NULL_TREE
;
318 switch (get_gimple_rhs_class (subcode
))
320 case GIMPLE_SINGLE_RHS
:
322 tree rhs
= gimple_assign_rhs1 (stmt
);
324 if (TREE_CLOBBER_P (rhs
))
327 if (REFERENCE_CLASS_P (rhs
))
328 return maybe_fold_reference (rhs
, false);
330 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
332 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
333 if (is_gimple_min_invariant (val
))
335 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
338 vec
<cgraph_node
*>targets
339 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
340 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
342 if (dump_enabled_p ())
344 location_t loc
= gimple_location_safe (stmt
);
345 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
346 "resolving virtual function address "
347 "reference to function %s\n",
348 targets
.length () == 1
349 ? targets
[0]->name ()
352 if (targets
.length () == 1)
354 val
= fold_convert (TREE_TYPE (val
),
355 build_fold_addr_expr_loc
356 (loc
, targets
[0]->decl
));
357 STRIP_USELESS_TYPE_CONVERSION (val
);
360 /* We can not use __builtin_unreachable here because it
361 can not have address taken. */
362 val
= build_int_cst (TREE_TYPE (val
), 0);
368 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
370 tree ref
= TREE_OPERAND (rhs
, 0);
371 tree tem
= maybe_fold_reference (ref
, true);
373 && TREE_CODE (tem
) == MEM_REF
374 && integer_zerop (TREE_OPERAND (tem
, 1)))
375 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
377 result
= fold_convert (TREE_TYPE (rhs
),
378 build_fold_addr_expr_loc (loc
, tem
));
379 else if (TREE_CODE (ref
) == MEM_REF
380 && integer_zerop (TREE_OPERAND (ref
, 1)))
381 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
385 /* Strip away useless type conversions. Both the
386 NON_LVALUE_EXPR that may have been added by fold, and
387 "useless" type conversions that might now be apparent
388 due to propagation. */
389 STRIP_USELESS_TYPE_CONVERSION (result
);
391 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
396 else if (TREE_CODE (rhs
) == CONSTRUCTOR
397 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
399 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
403 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
404 if (! CONSTANT_CLASS_P (val
))
407 return build_vector_from_ctor (TREE_TYPE (rhs
),
408 CONSTRUCTOR_ELTS (rhs
));
411 else if (DECL_P (rhs
))
412 return get_symbol_constant_value (rhs
);
416 case GIMPLE_UNARY_RHS
:
419 case GIMPLE_BINARY_RHS
:
422 case GIMPLE_TERNARY_RHS
:
423 result
= fold_ternary_loc (loc
, subcode
,
424 TREE_TYPE (gimple_assign_lhs (stmt
)),
425 gimple_assign_rhs1 (stmt
),
426 gimple_assign_rhs2 (stmt
),
427 gimple_assign_rhs3 (stmt
));
431 STRIP_USELESS_TYPE_CONVERSION (result
);
432 if (valid_gimple_rhs_p (result
))
437 case GIMPLE_INVALID_RHS
:
445 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
446 adjusting the replacement stmts location and virtual operands.
447 If the statement has a lhs the last stmt in the sequence is expected
448 to assign to that lhs. */
451 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
453 gimple
*stmt
= gsi_stmt (*si_p
);
455 if (gimple_has_location (stmt
))
456 annotate_all_with_location (stmts
, gimple_location (stmt
));
458 /* First iterate over the replacement statements backward, assigning
459 virtual operands to their defining statements. */
460 gimple
*laststore
= NULL
;
461 for (gimple_stmt_iterator i
= gsi_last (stmts
);
462 !gsi_end_p (i
); gsi_prev (&i
))
464 gimple
*new_stmt
= gsi_stmt (i
);
465 if ((gimple_assign_single_p (new_stmt
)
466 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
467 || (is_gimple_call (new_stmt
)
468 && (gimple_call_flags (new_stmt
)
469 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
473 vdef
= gimple_vdef (stmt
);
475 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
476 gimple_set_vdef (new_stmt
, vdef
);
477 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
478 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
479 laststore
= new_stmt
;
483 /* Second iterate over the statements forward, assigning virtual
484 operands to their uses. */
485 tree reaching_vuse
= gimple_vuse (stmt
);
486 for (gimple_stmt_iterator i
= gsi_start (stmts
);
487 !gsi_end_p (i
); gsi_next (&i
))
489 gimple
*new_stmt
= gsi_stmt (i
);
490 /* If the new statement possibly has a VUSE, update it with exact SSA
491 name we know will reach this one. */
492 if (gimple_has_mem_ops (new_stmt
))
493 gimple_set_vuse (new_stmt
, reaching_vuse
);
494 gimple_set_modified (new_stmt
, true);
495 if (gimple_vdef (new_stmt
))
496 reaching_vuse
= gimple_vdef (new_stmt
);
499 /* If the new sequence does not do a store release the virtual
500 definition of the original statement. */
502 && reaching_vuse
== gimple_vuse (stmt
))
504 tree vdef
= gimple_vdef (stmt
);
506 && TREE_CODE (vdef
) == SSA_NAME
)
508 unlink_stmt_vdef (stmt
);
509 release_ssa_name (vdef
);
513 /* Finally replace the original statement with the sequence. */
514 gsi_replace_with_seq (si_p
, stmts
, false);
517 /* Convert EXPR into a GIMPLE value suitable for substitution on the
518 RHS of an assignment. Insert the necessary statements before
519 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
520 is replaced. If the call is expected to produces a result, then it
521 is replaced by an assignment of the new RHS to the result variable.
522 If the result is to be ignored, then the call is replaced by a
523 GIMPLE_NOP. A proper VDEF chain is retained by making the first
524 VUSE and the last VDEF of the whole sequence be the same as the replaced
525 statement and using new SSA names for stores in between. */
528 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
531 gimple
*stmt
, *new_stmt
;
532 gimple_stmt_iterator i
;
533 gimple_seq stmts
= NULL
;
535 stmt
= gsi_stmt (*si_p
);
537 gcc_assert (is_gimple_call (stmt
));
539 push_gimplify_context (gimple_in_ssa_p (cfun
));
541 lhs
= gimple_call_lhs (stmt
);
542 if (lhs
== NULL_TREE
)
544 gimplify_and_add (expr
, &stmts
);
545 /* We can end up with folding a memcpy of an empty class assignment
546 which gets optimized away by C++ gimplification. */
547 if (gimple_seq_empty_p (stmts
))
549 pop_gimplify_context (NULL
);
550 if (gimple_in_ssa_p (cfun
))
552 unlink_stmt_vdef (stmt
);
555 gsi_replace (si_p
, gimple_build_nop (), false);
561 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
562 new_stmt
= gimple_build_assign (lhs
, tmp
);
563 i
= gsi_last (stmts
);
564 gsi_insert_after_without_update (&i
, new_stmt
,
565 GSI_CONTINUE_LINKING
);
568 pop_gimplify_context (NULL
);
570 gsi_replace_with_seq_vops (si_p
, stmts
);
574 /* Replace the call at *GSI with the gimple value VAL. */
577 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
579 gimple
*stmt
= gsi_stmt (*gsi
);
580 tree lhs
= gimple_call_lhs (stmt
);
584 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
585 val
= fold_convert (TREE_TYPE (lhs
), val
);
586 repl
= gimple_build_assign (lhs
, val
);
589 repl
= gimple_build_nop ();
590 tree vdef
= gimple_vdef (stmt
);
591 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
593 unlink_stmt_vdef (stmt
);
594 release_ssa_name (vdef
);
596 gsi_replace (gsi
, repl
, false);
599 /* Replace the call at *GSI with the new call REPL and fold that
603 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
605 gimple
*stmt
= gsi_stmt (*gsi
);
606 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
607 gimple_set_location (repl
, gimple_location (stmt
));
608 if (gimple_vdef (stmt
)
609 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
611 gimple_set_vdef (repl
, gimple_vdef (stmt
));
612 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
614 if (gimple_vuse (stmt
))
615 gimple_set_vuse (repl
, gimple_vuse (stmt
));
616 gsi_replace (gsi
, repl
, false);
620 /* Return true if VAR is a VAR_DECL or a component thereof. */
623 var_decl_component_p (tree var
)
626 while (handled_component_p (inner
))
627 inner
= TREE_OPERAND (inner
, 0);
628 return SSA_VAR_P (inner
);
631 /* If the SIZE argument representing the size of an object is in a range
632 of values of which exactly one is valid (and that is zero), return
633 true, otherwise false. */
636 size_must_be_zero_p (tree size
)
638 if (integer_zerop (size
))
641 if (TREE_CODE (size
) != SSA_NAME
)
645 enum value_range_type rtype
= get_range_info (size
, &min
, &max
);
646 if (rtype
!= VR_ANTI_RANGE
)
649 tree type
= TREE_TYPE (size
);
650 int prec
= TYPE_PRECISION (type
);
652 wide_int wone
= wi::one (prec
);
654 /* Compute the value of SSIZE_MAX, the largest positive value that
655 can be stored in ssize_t, the signed counterpart of size_t. */
656 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
658 return wi::eq_p (min
, wone
) && wi::geu_p (max
, ssize_max
);
661 /* Fold function call to builtin mem{{,p}cpy,move}. Return
662 false if no simplification can be made.
663 If ENDP is 0, return DEST (like memcpy).
664 If ENDP is 1, return DEST+LEN (like mempcpy).
665 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
666 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
670 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
671 tree dest
, tree src
, int endp
)
673 gimple
*stmt
= gsi_stmt (*gsi
);
674 tree lhs
= gimple_call_lhs (stmt
);
675 tree len
= gimple_call_arg (stmt
, 2);
676 tree destvar
, srcvar
;
677 location_t loc
= gimple_location (stmt
);
679 /* If the LEN parameter is a constant zero or in range where
680 the only valid value is zero, return DEST. */
681 if (size_must_be_zero_p (len
))
684 if (gimple_call_lhs (stmt
))
685 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
687 repl
= gimple_build_nop ();
688 tree vdef
= gimple_vdef (stmt
);
689 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
691 unlink_stmt_vdef (stmt
);
692 release_ssa_name (vdef
);
694 gsi_replace (gsi
, repl
, false);
698 /* If SRC and DEST are the same (and not volatile), return
699 DEST{,+LEN,+LEN-1}. */
700 if (operand_equal_p (src
, dest
, 0))
702 unlink_stmt_vdef (stmt
);
703 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
704 release_ssa_name (gimple_vdef (stmt
));
707 gsi_replace (gsi
, gimple_build_nop (), false);
714 tree srctype
, desttype
;
715 unsigned int src_align
, dest_align
;
718 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
719 pointers as wide integer) and also may result in huge function
720 size because of inlined bounds copy. Thus don't inline for
721 functions we want to instrument. */
722 if (flag_check_pointer_bounds
723 && chkp_instrumentable_p (cfun
->decl
)
724 /* Even if data may contain pointers we can inline if copy
725 less than a pointer size. */
726 && (!tree_fits_uhwi_p (len
)
727 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
730 /* Build accesses at offset zero with a ref-all character type. */
731 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
734 /* If we can perform the copy efficiently with first doing all loads
735 and then all stores inline it that way. Currently efficiently
736 means that we can load all the memory into a single integer
737 register which is what MOVE_MAX gives us. */
738 src_align
= get_pointer_alignment (src
);
739 dest_align
= get_pointer_alignment (dest
);
740 if (tree_fits_uhwi_p (len
)
741 && compare_tree_int (len
, MOVE_MAX
) <= 0
742 /* ??? Don't transform copies from strings with known length this
743 confuses the tree-ssa-strlen.c. This doesn't handle
744 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
746 && !c_strlen (src
, 2))
748 unsigned ilen
= tree_to_uhwi (len
);
749 if (pow2p_hwi (ilen
))
751 scalar_int_mode mode
;
752 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
754 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
755 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
756 /* If the destination pointer is not aligned we must be able
757 to emit an unaligned store. */
758 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
759 || !targetm
.slow_unaligned_access (mode
, dest_align
)
760 || (optab_handler (movmisalign_optab
, mode
)
761 != CODE_FOR_nothing
)))
764 tree desttype
= type
;
765 if (src_align
< GET_MODE_ALIGNMENT (mode
))
766 srctype
= build_aligned_type (type
, src_align
);
767 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
768 tree tem
= fold_const_aggregate_ref (srcmem
);
771 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
772 && targetm
.slow_unaligned_access (mode
, src_align
)
773 && (optab_handler (movmisalign_optab
, mode
)
774 == CODE_FOR_nothing
))
779 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
781 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
783 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
785 gimple_assign_set_lhs (new_stmt
, srcmem
);
786 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
787 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
789 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
790 desttype
= build_aligned_type (type
, dest_align
);
792 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
795 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
796 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
797 if (gimple_vdef (new_stmt
)
798 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
799 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
802 gsi_replace (gsi
, new_stmt
, false);
805 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
814 /* Both DEST and SRC must be pointer types.
815 ??? This is what old code did. Is the testing for pointer types
818 If either SRC is readonly or length is 1, we can use memcpy. */
819 if (!dest_align
|| !src_align
)
821 if (readonly_data_expr (src
)
822 || (tree_fits_uhwi_p (len
)
823 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
824 >= tree_to_uhwi (len
))))
826 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
829 gimple_call_set_fndecl (stmt
, fn
);
830 gimple_call_set_arg (stmt
, 0, dest
);
831 gimple_call_set_arg (stmt
, 1, src
);
836 /* If *src and *dest can't overlap, optimize into memcpy as well. */
837 if (TREE_CODE (src
) == ADDR_EXPR
838 && TREE_CODE (dest
) == ADDR_EXPR
)
840 tree src_base
, dest_base
, fn
;
841 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
842 HOST_WIDE_INT maxsize
;
844 srcvar
= TREE_OPERAND (src
, 0);
845 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
846 if (src_base
== NULL
)
848 destvar
= TREE_OPERAND (dest
, 0);
849 dest_base
= get_addr_base_and_unit_offset (destvar
,
851 if (dest_base
== NULL
)
853 if (tree_fits_uhwi_p (len
))
854 maxsize
= tree_to_uhwi (len
);
857 if (SSA_VAR_P (src_base
)
858 && SSA_VAR_P (dest_base
))
860 if (operand_equal_p (src_base
, dest_base
, 0)
861 && ranges_overlap_p (src_offset
, maxsize
,
862 dest_offset
, maxsize
))
865 else if (TREE_CODE (src_base
) == MEM_REF
866 && TREE_CODE (dest_base
) == MEM_REF
)
868 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
869 TREE_OPERAND (dest_base
, 0), 0))
871 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
872 if (!wi::fits_shwi_p (off
))
874 src_offset
= off
.to_shwi ();
876 off
= mem_ref_offset (dest_base
) + dest_offset
;
877 if (!wi::fits_shwi_p (off
))
879 dest_offset
= off
.to_shwi ();
880 if (ranges_overlap_p (src_offset
, maxsize
,
881 dest_offset
, maxsize
))
887 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
890 gimple_call_set_fndecl (stmt
, fn
);
891 gimple_call_set_arg (stmt
, 0, dest
);
892 gimple_call_set_arg (stmt
, 1, src
);
897 /* If the destination and source do not alias optimize into
899 if ((is_gimple_min_invariant (dest
)
900 || TREE_CODE (dest
) == SSA_NAME
)
901 && (is_gimple_min_invariant (src
)
902 || TREE_CODE (src
) == SSA_NAME
))
905 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
906 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
907 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
910 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
913 gimple_call_set_fndecl (stmt
, fn
);
914 gimple_call_set_arg (stmt
, 0, dest
);
915 gimple_call_set_arg (stmt
, 1, src
);
924 if (!tree_fits_shwi_p (len
))
927 This logic lose for arguments like (type *)malloc (sizeof (type)),
928 since we strip the casts of up to VOID return value from malloc.
929 Perhaps we ought to inherit type from non-VOID argument here? */
932 if (!POINTER_TYPE_P (TREE_TYPE (src
))
933 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
935 /* In the following try to find a type that is most natural to be
936 used for the memcpy source and destination and that allows
937 the most optimization when memcpy is turned into a plain assignment
938 using that type. In theory we could always use a char[len] type
939 but that only gains us that the destination and source possibly
940 no longer will have their address taken. */
941 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
942 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
944 tree tem
= TREE_OPERAND (src
, 0);
946 if (tem
!= TREE_OPERAND (src
, 0))
947 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
949 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
951 tree tem
= TREE_OPERAND (dest
, 0);
953 if (tem
!= TREE_OPERAND (dest
, 0))
954 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
956 srctype
= TREE_TYPE (TREE_TYPE (src
));
957 if (TREE_CODE (srctype
) == ARRAY_TYPE
958 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
960 srctype
= TREE_TYPE (srctype
);
962 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
964 desttype
= TREE_TYPE (TREE_TYPE (dest
));
965 if (TREE_CODE (desttype
) == ARRAY_TYPE
966 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
968 desttype
= TREE_TYPE (desttype
);
970 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
972 if (TREE_ADDRESSABLE (srctype
)
973 || TREE_ADDRESSABLE (desttype
))
976 /* Make sure we are not copying using a floating-point mode or
977 a type whose size possibly does not match its precision. */
978 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
979 || TREE_CODE (desttype
) == BOOLEAN_TYPE
980 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
981 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
982 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
983 || TREE_CODE (srctype
) == BOOLEAN_TYPE
984 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
985 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
993 src_align
= get_pointer_alignment (src
);
994 dest_align
= get_pointer_alignment (dest
);
995 if (dest_align
< TYPE_ALIGN (desttype
)
996 || src_align
< TYPE_ALIGN (srctype
))
1000 STRIP_NOPS (destvar
);
1001 if (TREE_CODE (destvar
) == ADDR_EXPR
1002 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1003 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1004 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1006 destvar
= NULL_TREE
;
1009 STRIP_NOPS (srcvar
);
1010 if (TREE_CODE (srcvar
) == ADDR_EXPR
1011 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1012 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1015 || src_align
>= TYPE_ALIGN (desttype
))
1016 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1018 else if (!STRICT_ALIGNMENT
)
1020 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1022 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1030 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1033 if (srcvar
== NULL_TREE
)
1036 if (src_align
>= TYPE_ALIGN (desttype
))
1037 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1040 if (STRICT_ALIGNMENT
)
1042 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1044 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1047 else if (destvar
== NULL_TREE
)
1050 if (dest_align
>= TYPE_ALIGN (srctype
))
1051 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1054 if (STRICT_ALIGNMENT
)
1056 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1058 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1063 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1065 tree tem
= fold_const_aggregate_ref (srcvar
);
1068 if (! is_gimple_min_invariant (srcvar
))
1070 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1071 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1073 gimple_assign_set_lhs (new_stmt
, srcvar
);
1074 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1075 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1078 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1079 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1080 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1081 if (gimple_vdef (new_stmt
)
1082 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1083 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1086 gsi_replace (gsi
, new_stmt
, false);
1089 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1093 gimple_seq stmts
= NULL
;
1094 if (endp
== 0 || endp
== 3)
1097 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1099 if (endp
== 2 || endp
== 1)
1101 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1102 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1103 TREE_TYPE (dest
), dest
, len
);
1106 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1107 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1108 gsi_replace (gsi
, repl
, false);
1112 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1113 to built-in memcmp (a, b, len). */
1116 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1118 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1123 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1125 gimple
*stmt
= gsi_stmt (*gsi
);
1126 tree a
= gimple_call_arg (stmt
, 0);
1127 tree b
= gimple_call_arg (stmt
, 1);
1128 tree len
= gimple_call_arg (stmt
, 2);
1130 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1131 replace_call_with_call_and_fold (gsi
, repl
);
1136 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1137 to built-in memmove (dest, src, len). */
1140 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1142 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1147 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1148 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1149 len) into memmove (dest, src, len). */
1151 gimple
*stmt
= gsi_stmt (*gsi
);
1152 tree src
= gimple_call_arg (stmt
, 0);
1153 tree dest
= gimple_call_arg (stmt
, 1);
1154 tree len
= gimple_call_arg (stmt
, 2);
1156 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1157 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1158 replace_call_with_call_and_fold (gsi
, repl
);
1163 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1164 to built-in memset (dest, 0, len). */
1167 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1169 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1174 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1176 gimple
*stmt
= gsi_stmt (*gsi
);
1177 tree dest
= gimple_call_arg (stmt
, 0);
1178 tree len
= gimple_call_arg (stmt
, 1);
1180 gimple_seq seq
= NULL
;
1181 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1182 gimple_seq_add_stmt_without_update (&seq
, repl
);
1183 gsi_replace_with_seq_vops (gsi
, seq
);
1189 /* Fold function call to builtin memset or bzero at *GSI setting the
1190 memory of size LEN to VAL. Return whether a simplification was made. */
1193 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1195 gimple
*stmt
= gsi_stmt (*gsi
);
1197 unsigned HOST_WIDE_INT length
, cval
;
1199 /* If the LEN parameter is zero, return DEST. */
1200 if (integer_zerop (len
))
1202 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1206 if (! tree_fits_uhwi_p (len
))
1209 if (TREE_CODE (c
) != INTEGER_CST
)
1212 tree dest
= gimple_call_arg (stmt
, 0);
1214 if (TREE_CODE (var
) != ADDR_EXPR
)
1217 var
= TREE_OPERAND (var
, 0);
1218 if (TREE_THIS_VOLATILE (var
))
1221 etype
= TREE_TYPE (var
);
1222 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1223 etype
= TREE_TYPE (etype
);
1225 if (!INTEGRAL_TYPE_P (etype
)
1226 && !POINTER_TYPE_P (etype
))
1229 if (! var_decl_component_p (var
))
1232 length
= tree_to_uhwi (len
);
1233 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1234 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1237 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1240 if (integer_zerop (c
))
1244 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1247 cval
= TREE_INT_CST_LOW (c
);
1251 cval
|= (cval
<< 31) << 1;
1254 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1255 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1256 gimple_set_vuse (store
, gimple_vuse (stmt
));
1257 tree vdef
= gimple_vdef (stmt
);
1258 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1260 gimple_set_vdef (store
, gimple_vdef (stmt
));
1261 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1263 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1264 if (gimple_call_lhs (stmt
))
1266 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1267 gsi_replace (gsi
, asgn
, false);
1271 gimple_stmt_iterator gsi2
= *gsi
;
1273 gsi_remove (&gsi2
, true);
1280 /* Obtain the minimum and maximum string length or minimum and maximum
1281 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1282 If ARG is an SSA name variable, follow its use-def chains. When
1283 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1284 if we are unable to determine the length or value, return False.
1285 VISITED is a bitmap of visited variables.
1286 TYPE is 0 if string length should be obtained, 1 for maximum string
1287 length and 2 for maximum value ARG can have.
1288 When FUZZY is set and the length of a string cannot be determined,
1289 the function instead considers as the maximum possible length the
1290 size of a character array it may refer to.
1291 Set *FLEXP to true if the range of the string lengths has been
1292 obtained from the upper bound of an array at the end of a struct.
1293 Such an array may hold a string that's longer than its upper bound
1294 due to it being used as a poor-man's flexible array member. */
1297 get_range_strlen (tree arg
, tree length
[2], bitmap
*visited
, int type
,
1298 bool fuzzy
, bool *flexp
)
1303 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1304 but MINLEN may be cleared during the execution of the function. */
1305 tree
*minlen
= length
;
1306 tree
*const maxlen
= length
+ 1;
1308 if (TREE_CODE (arg
) != SSA_NAME
)
1310 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1311 if (TREE_CODE (arg
) == ADDR_EXPR
1312 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1313 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1315 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1316 if (TREE_CODE (aop0
) == INDIRECT_REF
1317 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1318 return get_range_strlen (TREE_OPERAND (aop0
, 0),
1319 length
, visited
, type
, fuzzy
, flexp
);
1325 if (TREE_CODE (val
) != INTEGER_CST
1326 || tree_int_cst_sgn (val
) < 0)
1330 val
= c_strlen (arg
, 1);
1334 if (TREE_CODE (arg
) == ADDR_EXPR
)
1335 return get_range_strlen (TREE_OPERAND (arg
, 0), length
,
1336 visited
, type
, fuzzy
, flexp
);
1338 if (TREE_CODE (arg
) == COMPONENT_REF
1339 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1))) == ARRAY_TYPE
)
1341 /* Use the type of the member array to determine the upper
1342 bound on the length of the array. This may be overly
1343 optimistic if the array itself isn't NUL-terminated and
1344 the caller relies on the subsequent member to contain
1346 Set *FLEXP to true if the array whose bound is being
1347 used is at the end of a struct. */
1348 if (array_at_struct_end_p (arg
))
1351 arg
= TREE_OPERAND (arg
, 1);
1352 val
= TYPE_SIZE_UNIT (TREE_TYPE (arg
));
1353 if (!val
|| integer_zerop (val
))
1355 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1357 /* Set the minimum size to zero since the string in
1358 the array could have zero length. */
1359 *minlen
= ssize_int (0);
1369 && TREE_CODE (*minlen
) == INTEGER_CST
1370 && TREE_CODE (val
) == INTEGER_CST
1371 && tree_int_cst_lt (val
, *minlen
))))
1378 if (TREE_CODE (*maxlen
) != INTEGER_CST
1379 || TREE_CODE (val
) != INTEGER_CST
)
1382 if (tree_int_cst_lt (*maxlen
, val
))
1386 else if (simple_cst_equal (val
, *maxlen
) != 1)
1394 /* If ARG is registered for SSA update we cannot look at its defining
1396 if (name_registered_for_update_p (arg
))
1399 /* If we were already here, break the infinite cycle. */
1401 *visited
= BITMAP_ALLOC (NULL
);
1402 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1406 def_stmt
= SSA_NAME_DEF_STMT (var
);
1408 switch (gimple_code (def_stmt
))
1411 /* The RHS of the statement defining VAR must either have a
1412 constant length or come from another SSA_NAME with a constant
1414 if (gimple_assign_single_p (def_stmt
)
1415 || gimple_assign_unary_nop_p (def_stmt
))
1417 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1418 return get_range_strlen (rhs
, length
, visited
, type
, fuzzy
, flexp
);
1420 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1422 tree op2
= gimple_assign_rhs2 (def_stmt
);
1423 tree op3
= gimple_assign_rhs3 (def_stmt
);
1424 return get_range_strlen (op2
, length
, visited
, type
, fuzzy
, flexp
)
1425 && get_range_strlen (op3
, length
, visited
, type
, fuzzy
, flexp
);
1431 /* All the arguments of the PHI node must have the same constant
1435 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1437 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1439 /* If this PHI has itself as an argument, we cannot
1440 determine the string length of this argument. However,
1441 if we can find a constant string length for the other
1442 PHI args then we can still be sure that this is a
1443 constant string length. So be optimistic and just
1444 continue with the next argument. */
1445 if (arg
== gimple_phi_result (def_stmt
))
1448 if (!get_range_strlen (arg
, length
, visited
, type
, fuzzy
, flexp
))
1451 *maxlen
= build_all_ones_cst (size_type_node
);
1464 /* Determine the minimum and maximum value or string length that ARG
1465 refers to and store each in the first two elements of MINMAXLEN.
1466 For expressions that point to strings of unknown lengths that are
1467 character arrays, use the upper bound of the array as the maximum
1468 length. For example, given an expression like 'x ? array : "xyz"'
1469 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1470 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1472 Return true if the range of the string lengths has been obtained
1473 from the upper bound of an array at the end of a struct. Such
1474 an array may hold a string that's longer than its upper bound
1475 due to it being used as a poor-man's flexible array member. */
1478 get_range_strlen (tree arg
, tree minmaxlen
[2])
1480 bitmap visited
= NULL
;
1482 minmaxlen
[0] = NULL_TREE
;
1483 minmaxlen
[1] = NULL_TREE
;
1485 bool flexarray
= false;
1486 get_range_strlen (arg
, minmaxlen
, &visited
, 1, true, &flexarray
);
1489 BITMAP_FREE (visited
);
1495 get_maxval_strlen (tree arg
, int type
)
1497 bitmap visited
= NULL
;
1498 tree len
[2] = { NULL_TREE
, NULL_TREE
};
1501 if (!get_range_strlen (arg
, len
, &visited
, type
, false, &dummy
))
1504 BITMAP_FREE (visited
);
1510 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1511 If LEN is not NULL, it represents the length of the string to be
1512 copied. Return NULL_TREE if no simplification can be made. */
1515 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1516 tree dest
, tree src
)
1518 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1521 /* If SRC and DEST are the same (and not volatile), return DEST. */
1522 if (operand_equal_p (src
, dest
, 0))
1524 replace_call_with_value (gsi
, dest
);
1528 if (optimize_function_for_size_p (cfun
))
1531 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1535 tree len
= get_maxval_strlen (src
, 0);
1539 len
= fold_convert_loc (loc
, size_type_node
, len
);
1540 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1541 len
= force_gimple_operand_gsi (gsi
, len
, true,
1542 NULL_TREE
, true, GSI_SAME_STMT
);
1543 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1544 replace_call_with_call_and_fold (gsi
, repl
);
1548 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1549 If SLEN is not NULL, it represents the length of the source string.
1550 Return NULL_TREE if no simplification can be made. */
1553 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1554 tree dest
, tree src
, tree len
)
1556 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1559 /* If the LEN parameter is zero, return DEST. */
1560 if (integer_zerop (len
))
1562 replace_call_with_value (gsi
, dest
);
1566 /* We can't compare slen with len as constants below if len is not a
1568 if (TREE_CODE (len
) != INTEGER_CST
)
1571 /* Now, we must be passed a constant src ptr parameter. */
1572 tree slen
= get_maxval_strlen (src
, 0);
1573 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1576 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1578 /* We do not support simplification of this case, though we do
1579 support it when expanding trees into RTL. */
1580 /* FIXME: generate a call to __builtin_memset. */
1581 if (tree_int_cst_lt (slen
, len
))
1584 /* OK transform into builtin memcpy. */
1585 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1589 len
= fold_convert_loc (loc
, size_type_node
, len
);
1590 len
= force_gimple_operand_gsi (gsi
, len
, true,
1591 NULL_TREE
, true, GSI_SAME_STMT
);
1592 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1593 replace_call_with_call_and_fold (gsi
, repl
);
1597 /* Fold function call to builtin strchr or strrchr.
1598 If both arguments are constant, evaluate and fold the result,
1599 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1600 In general strlen is significantly faster than strchr
1601 due to being a simpler operation. */
1603 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1605 gimple
*stmt
= gsi_stmt (*gsi
);
1606 tree str
= gimple_call_arg (stmt
, 0);
1607 tree c
= gimple_call_arg (stmt
, 1);
1608 location_t loc
= gimple_location (stmt
);
1612 if (!gimple_call_lhs (stmt
))
1615 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1617 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1621 replace_call_with_value (gsi
, integer_zero_node
);
1625 tree len
= build_int_cst (size_type_node
, p1
- p
);
1626 gimple_seq stmts
= NULL
;
1627 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1628 POINTER_PLUS_EXPR
, str
, len
);
1629 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1630 gsi_replace_with_seq_vops (gsi
, stmts
);
1634 if (!integer_zerop (c
))
1637 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1638 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
1640 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1644 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
1645 replace_call_with_call_and_fold (gsi
, repl
);
1653 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1658 /* Create newstr = strlen (str). */
1659 gimple_seq stmts
= NULL
;
1660 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1661 gimple_set_location (new_stmt
, loc
);
1662 len
= create_tmp_reg_or_ssa_name (size_type_node
);
1663 gimple_call_set_lhs (new_stmt
, len
);
1664 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1666 /* Create (str p+ strlen (str)). */
1667 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1668 POINTER_PLUS_EXPR
, str
, len
);
1669 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1670 gsi_replace_with_seq_vops (gsi
, stmts
);
1671 /* gsi now points at the assignment to the lhs, get a
1672 stmt iterator to the strlen.
1673 ??? We can't use gsi_for_stmt as that doesn't work when the
1674 CFG isn't built yet. */
1675 gimple_stmt_iterator gsi2
= *gsi
;
1681 /* Fold function call to builtin strstr.
1682 If both arguments are constant, evaluate and fold the result,
1683 additionally fold strstr (x, "") into x and strstr (x, "c")
1684 into strchr (x, 'c'). */
1686 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
1688 gimple
*stmt
= gsi_stmt (*gsi
);
1689 tree haystack
= gimple_call_arg (stmt
, 0);
1690 tree needle
= gimple_call_arg (stmt
, 1);
1693 if (!gimple_call_lhs (stmt
))
1696 q
= c_getstr (needle
);
1700 if ((p
= c_getstr (haystack
)))
1702 const char *r
= strstr (p
, q
);
1706 replace_call_with_value (gsi
, integer_zero_node
);
1710 tree len
= build_int_cst (size_type_node
, r
- p
);
1711 gimple_seq stmts
= NULL
;
1713 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
1715 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1716 gsi_replace_with_seq_vops (gsi
, stmts
);
1720 /* For strstr (x, "") return x. */
1723 replace_call_with_value (gsi
, haystack
);
1727 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1730 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1733 tree c
= build_int_cst (integer_type_node
, q
[0]);
1734 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
1735 replace_call_with_call_and_fold (gsi
, repl
);
1743 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1746 Return NULL_TREE if no simplification was possible, otherwise return the
1747 simplified form of the call as a tree.
1749 The simplified form may be a constant or other expression which
1750 computes the same value, but in a more efficient manner (including
1751 calls to other builtin functions).
1753 The call may contain arguments which need to be evaluated, but
1754 which are not useful to determine the result of the call. In
1755 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1756 COMPOUND_EXPR will be an argument which must be evaluated.
1757 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1758 COMPOUND_EXPR in the chain will contain the tree for the simplified
1759 form of the builtin function call. */
1762 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1764 gimple
*stmt
= gsi_stmt (*gsi
);
1765 location_t loc
= gimple_location (stmt
);
1767 const char *p
= c_getstr (src
);
1769 /* If the string length is zero, return the dst parameter. */
1770 if (p
&& *p
== '\0')
1772 replace_call_with_value (gsi
, dst
);
1776 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1779 /* See if we can store by pieces into (dst + strlen(dst)). */
1781 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1782 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1784 if (!strlen_fn
|| !memcpy_fn
)
1787 /* If the length of the source string isn't computable don't
1788 split strcat into strlen and memcpy. */
1789 tree len
= get_maxval_strlen (src
, 0);
1793 /* Create strlen (dst). */
1794 gimple_seq stmts
= NULL
, stmts2
;
1795 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1796 gimple_set_location (repl
, loc
);
1797 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
1798 gimple_call_set_lhs (repl
, newdst
);
1799 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1801 /* Create (dst p+ strlen (dst)). */
1802 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1803 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1804 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1806 len
= fold_convert_loc (loc
, size_type_node
, len
);
1807 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1808 build_int_cst (size_type_node
, 1));
1809 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1810 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1812 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1813 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1814 if (gimple_call_lhs (stmt
))
1816 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1817 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1818 gsi_replace_with_seq_vops (gsi
, stmts
);
1819 /* gsi now points at the assignment to the lhs, get a
1820 stmt iterator to the memcpy call.
1821 ??? We can't use gsi_for_stmt as that doesn't work when the
1822 CFG isn't built yet. */
1823 gimple_stmt_iterator gsi2
= *gsi
;
1829 gsi_replace_with_seq_vops (gsi
, stmts
);
1835 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1836 are the arguments to the call. */
1839 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1841 gimple
*stmt
= gsi_stmt (*gsi
);
1842 tree dest
= gimple_call_arg (stmt
, 0);
1843 tree src
= gimple_call_arg (stmt
, 1);
1844 tree size
= gimple_call_arg (stmt
, 2);
1850 /* If the SRC parameter is "", return DEST. */
1851 if (p
&& *p
== '\0')
1853 replace_call_with_value (gsi
, dest
);
1857 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1860 /* If __builtin_strcat_chk is used, assume strcat is available. */
1861 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1865 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1866 replace_call_with_call_and_fold (gsi
, repl
);
1870 /* Simplify a call to the strncat builtin. */
1873 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1875 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1876 tree dst
= gimple_call_arg (stmt
, 0);
1877 tree src
= gimple_call_arg (stmt
, 1);
1878 tree len
= gimple_call_arg (stmt
, 2);
1880 const char *p
= c_getstr (src
);
1882 /* If the requested length is zero, or the src parameter string
1883 length is zero, return the dst parameter. */
1884 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1886 replace_call_with_value (gsi
, dst
);
1890 /* If the requested len is greater than or equal to the string
1891 length, call strcat. */
1892 if (TREE_CODE (len
) == INTEGER_CST
&& p
1893 && compare_tree_int (len
, strlen (p
)) >= 0)
1895 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1897 /* If the replacement _DECL isn't initialized, don't do the
1902 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1903 replace_call_with_call_and_fold (gsi
, repl
);
1910 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1914 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1916 gimple
*stmt
= gsi_stmt (*gsi
);
1917 tree dest
= gimple_call_arg (stmt
, 0);
1918 tree src
= gimple_call_arg (stmt
, 1);
1919 tree len
= gimple_call_arg (stmt
, 2);
1920 tree size
= gimple_call_arg (stmt
, 3);
1925 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1926 if ((p
&& *p
== '\0')
1927 || integer_zerop (len
))
1929 replace_call_with_value (gsi
, dest
);
1933 if (! tree_fits_uhwi_p (size
))
1936 if (! integer_all_onesp (size
))
1938 tree src_len
= c_strlen (src
, 1);
1940 && tree_fits_uhwi_p (src_len
)
1941 && tree_fits_uhwi_p (len
)
1942 && ! tree_int_cst_lt (len
, src_len
))
1944 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1945 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1949 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1950 replace_call_with_call_and_fold (gsi
, repl
);
1956 /* If __builtin_strncat_chk is used, assume strncat is available. */
1957 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1961 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1962 replace_call_with_call_and_fold (gsi
, repl
);
1966 /* Build and append gimple statements to STMTS that would load a first
1967 character of a memory location identified by STR. LOC is location
1968 of the statement. */
1971 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
1975 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
1976 tree cst_uchar_ptr_node
1977 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
1978 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
1980 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
1981 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
1982 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
1984 gimple_assign_set_lhs (stmt
, var
);
1985 gimple_seq_add_stmt_without_update (stmts
, stmt
);
1990 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1991 FCODE is the name of the builtin. */
1994 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
1996 gimple
*stmt
= gsi_stmt (*gsi
);
1997 tree callee
= gimple_call_fndecl (stmt
);
1998 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2000 tree type
= integer_type_node
;
2001 tree str1
= gimple_call_arg (stmt
, 0);
2002 tree str2
= gimple_call_arg (stmt
, 1);
2003 tree lhs
= gimple_call_lhs (stmt
);
2004 HOST_WIDE_INT length
= -1;
2006 /* Handle strncmp and strncasecmp functions. */
2007 if (gimple_call_num_args (stmt
) == 3)
2009 tree len
= gimple_call_arg (stmt
, 2);
2010 if (tree_fits_uhwi_p (len
))
2011 length
= tree_to_uhwi (len
);
2014 /* If the LEN parameter is zero, return zero. */
2017 replace_call_with_value (gsi
, integer_zero_node
);
2021 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2022 if (operand_equal_p (str1
, str2
, 0))
2024 replace_call_with_value (gsi
, integer_zero_node
);
2028 const char *p1
= c_getstr (str1
);
2029 const char *p2
= c_getstr (str2
);
2031 /* For known strings, return an immediate value. */
2035 bool known_result
= false;
2039 case BUILT_IN_STRCMP
:
2041 r
= strcmp (p1
, p2
);
2042 known_result
= true;
2045 case BUILT_IN_STRNCMP
:
2049 r
= strncmp (p1
, p2
, length
);
2050 known_result
= true;
2053 /* Only handleable situation is where the string are equal (result 0),
2054 which is already handled by operand_equal_p case. */
2055 case BUILT_IN_STRCASECMP
:
2057 case BUILT_IN_STRNCASECMP
:
2061 r
= strncmp (p1
, p2
, length
);
2063 known_result
= true;
2072 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2077 bool nonzero_length
= length
>= 1
2078 || fcode
== BUILT_IN_STRCMP
2079 || fcode
== BUILT_IN_STRCASECMP
;
2081 location_t loc
= gimple_location (stmt
);
2083 /* If the second arg is "", return *(const unsigned char*)arg1. */
2084 if (p2
&& *p2
== '\0' && nonzero_length
)
2086 gimple_seq stmts
= NULL
;
2087 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2090 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2091 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2094 gsi_replace_with_seq_vops (gsi
, stmts
);
2098 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2099 if (p1
&& *p1
== '\0' && nonzero_length
)
2101 gimple_seq stmts
= NULL
;
2102 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2106 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2107 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2108 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2110 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2111 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2114 gsi_replace_with_seq_vops (gsi
, stmts
);
2118 /* If len parameter is one, return an expression corresponding to
2119 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2120 if (fcode
== BUILT_IN_STRNCMP
&& length
== 1)
2122 gimple_seq stmts
= NULL
;
2123 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2124 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2128 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2129 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2130 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2132 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2133 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2134 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2136 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2137 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2140 gsi_replace_with_seq_vops (gsi
, stmts
);
2147 /* Fold a call to the memchr pointed by GSI iterator. */
2150 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2152 gimple
*stmt
= gsi_stmt (*gsi
);
2153 tree lhs
= gimple_call_lhs (stmt
);
2154 tree arg1
= gimple_call_arg (stmt
, 0);
2155 tree arg2
= gimple_call_arg (stmt
, 1);
2156 tree len
= gimple_call_arg (stmt
, 2);
2158 /* If the LEN parameter is zero, return zero. */
2159 if (integer_zerop (len
))
2161 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2166 if (TREE_CODE (arg2
) != INTEGER_CST
2167 || !tree_fits_uhwi_p (len
)
2168 || !target_char_cst_p (arg2
, &c
))
2171 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2172 unsigned HOST_WIDE_INT string_length
;
2173 const char *p1
= c_getstr (arg1
, &string_length
);
2177 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2180 if (length
<= string_length
)
2182 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2188 unsigned HOST_WIDE_INT offset
= r
- p1
;
2189 gimple_seq stmts
= NULL
;
2190 if (lhs
!= NULL_TREE
)
2192 tree offset_cst
= build_int_cst (TREE_TYPE (len
), offset
);
2193 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2195 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2198 gimple_seq_add_stmt_without_update (&stmts
,
2199 gimple_build_nop ());
2201 gsi_replace_with_seq_vops (gsi
, stmts
);
2209 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2210 to the call. IGNORE is true if the value returned
2211 by the builtin will be ignored. UNLOCKED is true is true if this
2212 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2213 the known length of the string. Return NULL_TREE if no simplification
2217 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2218 tree arg0
, tree arg1
,
2221 gimple
*stmt
= gsi_stmt (*gsi
);
2223 /* If we're using an unlocked function, assume the other unlocked
2224 functions exist explicitly. */
2225 tree
const fn_fputc
= (unlocked
2226 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2227 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2228 tree
const fn_fwrite
= (unlocked
2229 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2230 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2232 /* If the return value is used, don't do the transformation. */
2233 if (gimple_call_lhs (stmt
))
2236 /* Get the length of the string passed to fputs. If the length
2237 can't be determined, punt. */
2238 tree len
= get_maxval_strlen (arg0
, 0);
2240 || TREE_CODE (len
) != INTEGER_CST
)
2243 switch (compare_tree_int (len
, 1))
2245 case -1: /* length is 0, delete the call entirely . */
2246 replace_call_with_value (gsi
, integer_zero_node
);
2249 case 0: /* length is 1, call fputc. */
2251 const char *p
= c_getstr (arg0
);
2257 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2259 (integer_type_node
, p
[0]), arg1
);
2260 replace_call_with_call_and_fold (gsi
, repl
);
2265 case 1: /* length is greater than 1, call fwrite. */
2267 /* If optimizing for size keep fputs. */
2268 if (optimize_function_for_size_p (cfun
))
2270 /* New argument list transforming fputs(string, stream) to
2271 fwrite(string, 1, len, stream). */
2275 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2276 size_one_node
, len
, arg1
);
2277 replace_call_with_call_and_fold (gsi
, repl
);
2286 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2287 DEST, SRC, LEN, and SIZE are the arguments to the call.
2288 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2289 code of the builtin. If MAXLEN is not NULL, it is maximum length
2290 passed as third argument. */
2293 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2294 tree dest
, tree src
, tree len
, tree size
,
2295 enum built_in_function fcode
)
2297 gimple
*stmt
= gsi_stmt (*gsi
);
2298 location_t loc
= gimple_location (stmt
);
2299 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2302 /* If SRC and DEST are the same (and not volatile), return DEST
2303 (resp. DEST+LEN for __mempcpy_chk). */
2304 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2306 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2308 replace_call_with_value (gsi
, dest
);
2313 gimple_seq stmts
= NULL
;
2314 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2315 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2316 TREE_TYPE (dest
), dest
, len
);
2317 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2318 replace_call_with_value (gsi
, temp
);
2323 if (! tree_fits_uhwi_p (size
))
2326 tree maxlen
= get_maxval_strlen (len
, 2);
2327 if (! integer_all_onesp (size
))
2329 if (! tree_fits_uhwi_p (len
))
2331 /* If LEN is not constant, try MAXLEN too.
2332 For MAXLEN only allow optimizing into non-_ocs function
2333 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2334 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2336 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2338 /* (void) __mempcpy_chk () can be optimized into
2339 (void) __memcpy_chk (). */
2340 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2344 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2345 replace_call_with_call_and_fold (gsi
, repl
);
2354 if (tree_int_cst_lt (size
, maxlen
))
2359 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2360 mem{cpy,pcpy,move,set} is available. */
2363 case BUILT_IN_MEMCPY_CHK
:
2364 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2366 case BUILT_IN_MEMPCPY_CHK
:
2367 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2369 case BUILT_IN_MEMMOVE_CHK
:
2370 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2372 case BUILT_IN_MEMSET_CHK
:
2373 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2382 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2383 replace_call_with_call_and_fold (gsi
, repl
);
2387 /* Fold a call to the __st[rp]cpy_chk builtin.
2388 DEST, SRC, and SIZE are the arguments to the call.
2389 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2390 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2391 strings passed as second argument. */
2394 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2396 tree src
, tree size
,
2397 enum built_in_function fcode
)
2399 gimple
*stmt
= gsi_stmt (*gsi
);
2400 location_t loc
= gimple_location (stmt
);
2401 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2404 /* If SRC and DEST are the same (and not volatile), return DEST. */
2405 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2407 replace_call_with_value (gsi
, dest
);
2411 if (! tree_fits_uhwi_p (size
))
2414 tree maxlen
= get_maxval_strlen (src
, 1);
2415 if (! integer_all_onesp (size
))
2417 len
= c_strlen (src
, 1);
2418 if (! len
|| ! tree_fits_uhwi_p (len
))
2420 /* If LEN is not constant, try MAXLEN too.
2421 For MAXLEN only allow optimizing into non-_ocs function
2422 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2423 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2425 if (fcode
== BUILT_IN_STPCPY_CHK
)
2430 /* If return value of __stpcpy_chk is ignored,
2431 optimize into __strcpy_chk. */
2432 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2436 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2437 replace_call_with_call_and_fold (gsi
, repl
);
2441 if (! len
|| TREE_SIDE_EFFECTS (len
))
2444 /* If c_strlen returned something, but not a constant,
2445 transform __strcpy_chk into __memcpy_chk. */
2446 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2450 gimple_seq stmts
= NULL
;
2451 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2452 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2453 build_int_cst (size_type_node
, 1));
2454 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2455 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2456 replace_call_with_call_and_fold (gsi
, repl
);
2463 if (! tree_int_cst_lt (maxlen
, size
))
2467 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2468 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2469 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2473 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2474 replace_call_with_call_and_fold (gsi
, repl
);
2478 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2479 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2480 length passed as third argument. IGNORE is true if return value can be
2481 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2484 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2485 tree dest
, tree src
,
2486 tree len
, tree size
,
2487 enum built_in_function fcode
)
2489 gimple
*stmt
= gsi_stmt (*gsi
);
2490 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2493 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2495 /* If return value of __stpncpy_chk is ignored,
2496 optimize into __strncpy_chk. */
2497 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2500 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2501 replace_call_with_call_and_fold (gsi
, repl
);
2506 if (! tree_fits_uhwi_p (size
))
2509 tree maxlen
= get_maxval_strlen (len
, 2);
2510 if (! integer_all_onesp (size
))
2512 if (! tree_fits_uhwi_p (len
))
2514 /* If LEN is not constant, try MAXLEN too.
2515 For MAXLEN only allow optimizing into non-_ocs function
2516 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2517 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2523 if (tree_int_cst_lt (size
, maxlen
))
2527 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2528 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2529 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2533 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2534 replace_call_with_call_and_fold (gsi
, repl
);
2538 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2539 Return NULL_TREE if no simplification can be made. */
2542 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2544 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2545 location_t loc
= gimple_location (stmt
);
2546 tree dest
= gimple_call_arg (stmt
, 0);
2547 tree src
= gimple_call_arg (stmt
, 1);
2548 tree fn
, len
, lenp1
;
2550 /* If the result is unused, replace stpcpy with strcpy. */
2551 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2553 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2556 gimple_call_set_fndecl (stmt
, fn
);
2561 len
= c_strlen (src
, 1);
2563 || TREE_CODE (len
) != INTEGER_CST
)
2566 if (optimize_function_for_size_p (cfun
)
2567 /* If length is zero it's small enough. */
2568 && !integer_zerop (len
))
2571 /* If the source has a known length replace stpcpy with memcpy. */
2572 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2576 gimple_seq stmts
= NULL
;
2577 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2578 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2579 tem
, build_int_cst (size_type_node
, 1));
2580 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2581 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2582 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2583 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2584 if (gimple_vdef (repl
)
2585 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2586 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2587 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2588 /* Replace the result with dest + len. */
2590 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2591 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2592 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2593 POINTER_PLUS_EXPR
, dest
, tem
);
2594 gsi_replace (gsi
, ret
, false);
2595 /* Finally fold the memcpy call. */
2596 gimple_stmt_iterator gsi2
= *gsi
;
2602 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2603 NULL_TREE if a normal call should be emitted rather than expanding
2604 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2605 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2606 passed as second argument. */
2609 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2610 enum built_in_function fcode
)
2612 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2613 tree dest
, size
, len
, fn
, fmt
, flag
;
2614 const char *fmt_str
;
2616 /* Verify the required arguments in the original call. */
2617 if (gimple_call_num_args (stmt
) < 5)
2620 dest
= gimple_call_arg (stmt
, 0);
2621 len
= gimple_call_arg (stmt
, 1);
2622 flag
= gimple_call_arg (stmt
, 2);
2623 size
= gimple_call_arg (stmt
, 3);
2624 fmt
= gimple_call_arg (stmt
, 4);
2626 if (! tree_fits_uhwi_p (size
))
2629 if (! integer_all_onesp (size
))
2631 tree maxlen
= get_maxval_strlen (len
, 2);
2632 if (! tree_fits_uhwi_p (len
))
2634 /* If LEN is not constant, try MAXLEN too.
2635 For MAXLEN only allow optimizing into non-_ocs function
2636 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2637 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2643 if (tree_int_cst_lt (size
, maxlen
))
2647 if (!init_target_chars ())
2650 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2651 or if format doesn't contain % chars or is "%s". */
2652 if (! integer_zerop (flag
))
2654 fmt_str
= c_getstr (fmt
);
2655 if (fmt_str
== NULL
)
2657 if (strchr (fmt_str
, target_percent
) != NULL
2658 && strcmp (fmt_str
, target_percent_s
))
2662 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2664 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2665 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2669 /* Replace the called function and the first 5 argument by 3 retaining
2670 trailing varargs. */
2671 gimple_call_set_fndecl (stmt
, fn
);
2672 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2673 gimple_call_set_arg (stmt
, 0, dest
);
2674 gimple_call_set_arg (stmt
, 1, len
);
2675 gimple_call_set_arg (stmt
, 2, fmt
);
2676 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2677 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2678 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2683 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2684 Return NULL_TREE if a normal call should be emitted rather than
2685 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2686 or BUILT_IN_VSPRINTF_CHK. */
2689 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2690 enum built_in_function fcode
)
2692 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2693 tree dest
, size
, len
, fn
, fmt
, flag
;
2694 const char *fmt_str
;
2695 unsigned nargs
= gimple_call_num_args (stmt
);
2697 /* Verify the required arguments in the original call. */
2700 dest
= gimple_call_arg (stmt
, 0);
2701 flag
= gimple_call_arg (stmt
, 1);
2702 size
= gimple_call_arg (stmt
, 2);
2703 fmt
= gimple_call_arg (stmt
, 3);
2705 if (! tree_fits_uhwi_p (size
))
2710 if (!init_target_chars ())
2713 /* Check whether the format is a literal string constant. */
2714 fmt_str
= c_getstr (fmt
);
2715 if (fmt_str
!= NULL
)
2717 /* If the format doesn't contain % args or %%, we know the size. */
2718 if (strchr (fmt_str
, target_percent
) == 0)
2720 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2721 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2723 /* If the format is "%s" and first ... argument is a string literal,
2724 we know the size too. */
2725 else if (fcode
== BUILT_IN_SPRINTF_CHK
2726 && strcmp (fmt_str
, target_percent_s
) == 0)
2732 arg
= gimple_call_arg (stmt
, 4);
2733 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2735 len
= c_strlen (arg
, 1);
2736 if (! len
|| ! tree_fits_uhwi_p (len
))
2743 if (! integer_all_onesp (size
))
2745 if (! len
|| ! tree_int_cst_lt (len
, size
))
2749 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2750 or if format doesn't contain % chars or is "%s". */
2751 if (! integer_zerop (flag
))
2753 if (fmt_str
== NULL
)
2755 if (strchr (fmt_str
, target_percent
) != NULL
2756 && strcmp (fmt_str
, target_percent_s
))
2760 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2761 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2762 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2766 /* Replace the called function and the first 4 argument by 2 retaining
2767 trailing varargs. */
2768 gimple_call_set_fndecl (stmt
, fn
);
2769 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2770 gimple_call_set_arg (stmt
, 0, dest
);
2771 gimple_call_set_arg (stmt
, 1, fmt
);
2772 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2773 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2774 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2779 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2780 ORIG may be null if this is a 2-argument call. We don't attempt to
2781 simplify calls with more than 3 arguments.
2783 Return true if simplification was possible, otherwise false. */
2786 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2788 gimple
*stmt
= gsi_stmt (*gsi
);
2789 tree dest
= gimple_call_arg (stmt
, 0);
2790 tree fmt
= gimple_call_arg (stmt
, 1);
2791 tree orig
= NULL_TREE
;
2792 const char *fmt_str
= NULL
;
2794 /* Verify the required arguments in the original call. We deal with two
2795 types of sprintf() calls: 'sprintf (str, fmt)' and
2796 'sprintf (dest, "%s", orig)'. */
2797 if (gimple_call_num_args (stmt
) > 3)
2800 if (gimple_call_num_args (stmt
) == 3)
2801 orig
= gimple_call_arg (stmt
, 2);
2803 /* Check whether the format is a literal string constant. */
2804 fmt_str
= c_getstr (fmt
);
2805 if (fmt_str
== NULL
)
2808 if (!init_target_chars ())
2811 /* If the format doesn't contain % args or %%, use strcpy. */
2812 if (strchr (fmt_str
, target_percent
) == NULL
)
2814 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2819 /* Don't optimize sprintf (buf, "abc", ptr++). */
2823 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2824 'format' is known to contain no % formats. */
2825 gimple_seq stmts
= NULL
;
2826 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2827 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2828 if (gimple_call_lhs (stmt
))
2830 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2831 build_int_cst (integer_type_node
,
2833 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2834 gsi_replace_with_seq_vops (gsi
, stmts
);
2835 /* gsi now points at the assignment to the lhs, get a
2836 stmt iterator to the memcpy call.
2837 ??? We can't use gsi_for_stmt as that doesn't work when the
2838 CFG isn't built yet. */
2839 gimple_stmt_iterator gsi2
= *gsi
;
2845 gsi_replace_with_seq_vops (gsi
, stmts
);
2851 /* If the format is "%s", use strcpy if the result isn't used. */
2852 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2855 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2860 /* Don't crash on sprintf (str1, "%s"). */
2864 tree orig_len
= NULL_TREE
;
2865 if (gimple_call_lhs (stmt
))
2867 orig_len
= get_maxval_strlen (orig
, 0);
2872 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2873 gimple_seq stmts
= NULL
;
2874 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2875 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2876 if (gimple_call_lhs (stmt
))
2878 if (!useless_type_conversion_p (integer_type_node
,
2879 TREE_TYPE (orig_len
)))
2880 orig_len
= fold_convert (integer_type_node
, orig_len
);
2881 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2882 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2883 gsi_replace_with_seq_vops (gsi
, stmts
);
2884 /* gsi now points at the assignment to the lhs, get a
2885 stmt iterator to the memcpy call.
2886 ??? We can't use gsi_for_stmt as that doesn't work when the
2887 CFG isn't built yet. */
2888 gimple_stmt_iterator gsi2
= *gsi
;
2894 gsi_replace_with_seq_vops (gsi
, stmts
);
2902 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2903 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2904 attempt to simplify calls with more than 4 arguments.
2906 Return true if simplification was possible, otherwise false. */
2909 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2911 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2912 tree dest
= gimple_call_arg (stmt
, 0);
2913 tree destsize
= gimple_call_arg (stmt
, 1);
2914 tree fmt
= gimple_call_arg (stmt
, 2);
2915 tree orig
= NULL_TREE
;
2916 const char *fmt_str
= NULL
;
2918 if (gimple_call_num_args (stmt
) > 4)
2921 if (gimple_call_num_args (stmt
) == 4)
2922 orig
= gimple_call_arg (stmt
, 3);
2924 if (!tree_fits_uhwi_p (destsize
))
2926 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2928 /* Check whether the format is a literal string constant. */
2929 fmt_str
= c_getstr (fmt
);
2930 if (fmt_str
== NULL
)
2933 if (!init_target_chars ())
2936 /* If the format doesn't contain % args or %%, use strcpy. */
2937 if (strchr (fmt_str
, target_percent
) == NULL
)
2939 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2943 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2947 /* We could expand this as
2948 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2950 memcpy (str, fmt_with_nul_at_cstm1, cst);
2951 but in the former case that might increase code size
2952 and in the latter case grow .rodata section too much.
2954 size_t len
= strlen (fmt_str
);
2958 gimple_seq stmts
= NULL
;
2959 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2960 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2961 if (gimple_call_lhs (stmt
))
2963 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2964 build_int_cst (integer_type_node
, len
));
2965 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2966 gsi_replace_with_seq_vops (gsi
, stmts
);
2967 /* gsi now points at the assignment to the lhs, get a
2968 stmt iterator to the memcpy call.
2969 ??? We can't use gsi_for_stmt as that doesn't work when the
2970 CFG isn't built yet. */
2971 gimple_stmt_iterator gsi2
= *gsi
;
2977 gsi_replace_with_seq_vops (gsi
, stmts
);
2983 /* If the format is "%s", use strcpy if the result isn't used. */
2984 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2986 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2990 /* Don't crash on snprintf (str1, cst, "%s"). */
2994 tree orig_len
= get_maxval_strlen (orig
, 0);
2995 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2998 /* We could expand this as
2999 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3001 memcpy (str1, str2_with_nul_at_cstm1, cst);
3002 but in the former case that might increase code size
3003 and in the latter case grow .rodata section too much.
3005 if (compare_tree_int (orig_len
, destlen
) >= 0)
3008 /* Convert snprintf (str1, cst, "%s", str2) into
3009 strcpy (str1, str2) if strlen (str2) < cst. */
3010 gimple_seq stmts
= NULL
;
3011 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3012 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3013 if (gimple_call_lhs (stmt
))
3015 if (!useless_type_conversion_p (integer_type_node
,
3016 TREE_TYPE (orig_len
)))
3017 orig_len
= fold_convert (integer_type_node
, orig_len
);
3018 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
3019 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3020 gsi_replace_with_seq_vops (gsi
, stmts
);
3021 /* gsi now points at the assignment to the lhs, get a
3022 stmt iterator to the memcpy call.
3023 ??? We can't use gsi_for_stmt as that doesn't work when the
3024 CFG isn't built yet. */
3025 gimple_stmt_iterator gsi2
= *gsi
;
3031 gsi_replace_with_seq_vops (gsi
, stmts
);
3039 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3040 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3041 more than 3 arguments, and ARG may be null in the 2-argument case.
3043 Return NULL_TREE if no simplification was possible, otherwise return the
3044 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3045 code of the function to be simplified. */
3048 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3049 tree fp
, tree fmt
, tree arg
,
3050 enum built_in_function fcode
)
3052 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3053 tree fn_fputc
, fn_fputs
;
3054 const char *fmt_str
= NULL
;
3056 /* If the return value is used, don't do the transformation. */
3057 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3060 /* Check whether the format is a literal string constant. */
3061 fmt_str
= c_getstr (fmt
);
3062 if (fmt_str
== NULL
)
3065 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3067 /* If we're using an unlocked function, assume the other
3068 unlocked functions exist explicitly. */
3069 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3070 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3074 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3075 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3078 if (!init_target_chars ())
3081 /* If the format doesn't contain % args or %%, use strcpy. */
3082 if (strchr (fmt_str
, target_percent
) == NULL
)
3084 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3088 /* If the format specifier was "", fprintf does nothing. */
3089 if (fmt_str
[0] == '\0')
3091 replace_call_with_value (gsi
, NULL_TREE
);
3095 /* When "string" doesn't contain %, replace all cases of
3096 fprintf (fp, string) with fputs (string, fp). The fputs
3097 builtin will take care of special cases like length == 1. */
3100 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3101 replace_call_with_call_and_fold (gsi
, repl
);
3106 /* The other optimizations can be done only on the non-va_list variants. */
3107 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3110 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3111 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3113 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3117 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3118 replace_call_with_call_and_fold (gsi
, repl
);
3123 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3124 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3127 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3131 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3132 replace_call_with_call_and_fold (gsi
, repl
);
3140 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3141 FMT and ARG are the arguments to the call; we don't fold cases with
3142 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3144 Return NULL_TREE if no simplification was possible, otherwise return the
3145 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3146 code of the function to be simplified. */
3149 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3150 tree arg
, enum built_in_function fcode
)
3152 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3153 tree fn_putchar
, fn_puts
, newarg
;
3154 const char *fmt_str
= NULL
;
3156 /* If the return value is used, don't do the transformation. */
3157 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3160 /* Check whether the format is a literal string constant. */
3161 fmt_str
= c_getstr (fmt
);
3162 if (fmt_str
== NULL
)
3165 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3167 /* If we're using an unlocked function, assume the other
3168 unlocked functions exist explicitly. */
3169 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3170 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3174 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3175 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3178 if (!init_target_chars ())
3181 if (strcmp (fmt_str
, target_percent_s
) == 0
3182 || strchr (fmt_str
, target_percent
) == NULL
)
3186 if (strcmp (fmt_str
, target_percent_s
) == 0)
3188 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3191 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3194 str
= c_getstr (arg
);
3200 /* The format specifier doesn't contain any '%' characters. */
3201 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3207 /* If the string was "", printf does nothing. */
3210 replace_call_with_value (gsi
, NULL_TREE
);
3214 /* If the string has length of 1, call putchar. */
3217 /* Given printf("c"), (where c is any one character,)
3218 convert "c"[0] to an int and pass that to the replacement
3220 newarg
= build_int_cst (integer_type_node
, str
[0]);
3223 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3224 replace_call_with_call_and_fold (gsi
, repl
);
3230 /* If the string was "string\n", call puts("string"). */
3231 size_t len
= strlen (str
);
3232 if ((unsigned char)str
[len
- 1] == target_newline
3233 && (size_t) (int) len
== len
3237 tree offset_node
, string_cst
;
3239 /* Create a NUL-terminated string that's one char shorter
3240 than the original, stripping off the trailing '\n'. */
3241 newarg
= build_string_literal (len
, str
);
3242 string_cst
= string_constant (newarg
, &offset_node
);
3243 gcc_checking_assert (string_cst
3244 && (TREE_STRING_LENGTH (string_cst
)
3246 && integer_zerop (offset_node
)
3248 TREE_STRING_POINTER (string_cst
)[len
- 1]
3250 /* build_string_literal creates a new STRING_CST,
3251 modify it in place to avoid double copying. */
3252 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
3253 newstr
[len
- 1] = '\0';
3256 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3257 replace_call_with_call_and_fold (gsi
, repl
);
3262 /* We'd like to arrange to call fputs(string,stdout) here,
3263 but we need stdout and don't have a way to get it yet. */
3268 /* The other optimizations can be done only on the non-va_list variants. */
3269 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3272 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3273 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3275 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3279 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3280 replace_call_with_call_and_fold (gsi
, repl
);
3285 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3286 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3288 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3293 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3294 replace_call_with_call_and_fold (gsi
, repl
);
3304 /* Fold a call to __builtin_strlen with known length LEN. */
3307 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3309 gimple
*stmt
= gsi_stmt (*gsi
);
3310 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
3313 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
3314 replace_call_with_value (gsi
, len
);
3318 /* Fold a call to __builtin_acc_on_device. */
3321 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3323 /* Defer folding until we know which compiler we're in. */
3324 if (symtab
->state
!= EXPANSION
)
3327 unsigned val_host
= GOMP_DEVICE_HOST
;
3328 unsigned val_dev
= GOMP_DEVICE_NONE
;
3330 #ifdef ACCEL_COMPILER
3331 val_host
= GOMP_DEVICE_NOT_HOST
;
3332 val_dev
= ACCEL_COMPILER_acc_device
;
3335 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3337 tree host_eq
= make_ssa_name (boolean_type_node
);
3338 gimple
*host_ass
= gimple_build_assign
3339 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3340 gimple_set_location (host_ass
, loc
);
3341 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3343 tree dev_eq
= make_ssa_name (boolean_type_node
);
3344 gimple
*dev_ass
= gimple_build_assign
3345 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3346 gimple_set_location (dev_ass
, loc
);
3347 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3349 tree result
= make_ssa_name (boolean_type_node
);
3350 gimple
*result_ass
= gimple_build_assign
3351 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3352 gimple_set_location (result_ass
, loc
);
3353 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3355 replace_call_with_value (gsi
, result
);
3360 /* Fold realloc (0, n) -> malloc (n). */
3363 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3365 gimple
*stmt
= gsi_stmt (*gsi
);
3366 tree arg
= gimple_call_arg (stmt
, 0);
3367 tree size
= gimple_call_arg (stmt
, 1);
3369 if (operand_equal_p (arg
, null_pointer_node
, 0))
3371 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3374 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3375 replace_call_with_call_and_fold (gsi
, repl
);
3382 /* Fold the non-target builtin at *GSI and return whether any simplification
3386 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
3388 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
3389 tree callee
= gimple_call_fndecl (stmt
);
3391 /* Give up for always_inline inline builtins until they are
3393 if (avoid_folding_inline_builtin (callee
))
3396 unsigned n
= gimple_call_num_args (stmt
);
3397 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
3401 return gimple_fold_builtin_bcmp (gsi
);
3402 case BUILT_IN_BCOPY
:
3403 return gimple_fold_builtin_bcopy (gsi
);
3404 case BUILT_IN_BZERO
:
3405 return gimple_fold_builtin_bzero (gsi
);
3407 case BUILT_IN_MEMSET
:
3408 return gimple_fold_builtin_memset (gsi
,
3409 gimple_call_arg (stmt
, 1),
3410 gimple_call_arg (stmt
, 2));
3411 case BUILT_IN_MEMCPY
:
3412 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3413 gimple_call_arg (stmt
, 1), 0);
3414 case BUILT_IN_MEMPCPY
:
3415 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3416 gimple_call_arg (stmt
, 1), 1);
3417 case BUILT_IN_MEMMOVE
:
3418 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3419 gimple_call_arg (stmt
, 1), 3);
3420 case BUILT_IN_SPRINTF_CHK
:
3421 case BUILT_IN_VSPRINTF_CHK
:
3422 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
3423 case BUILT_IN_STRCAT_CHK
:
3424 return gimple_fold_builtin_strcat_chk (gsi
);
3425 case BUILT_IN_STRNCAT_CHK
:
3426 return gimple_fold_builtin_strncat_chk (gsi
);
3427 case BUILT_IN_STRLEN
:
3428 return gimple_fold_builtin_strlen (gsi
);
3429 case BUILT_IN_STRCPY
:
3430 return gimple_fold_builtin_strcpy (gsi
,
3431 gimple_call_arg (stmt
, 0),
3432 gimple_call_arg (stmt
, 1));
3433 case BUILT_IN_STRNCPY
:
3434 return gimple_fold_builtin_strncpy (gsi
,
3435 gimple_call_arg (stmt
, 0),
3436 gimple_call_arg (stmt
, 1),
3437 gimple_call_arg (stmt
, 2));
3438 case BUILT_IN_STRCAT
:
3439 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
3440 gimple_call_arg (stmt
, 1));
3441 case BUILT_IN_STRNCAT
:
3442 return gimple_fold_builtin_strncat (gsi
);
3443 case BUILT_IN_INDEX
:
3444 case BUILT_IN_STRCHR
:
3445 return gimple_fold_builtin_strchr (gsi
, false);
3446 case BUILT_IN_RINDEX
:
3447 case BUILT_IN_STRRCHR
:
3448 return gimple_fold_builtin_strchr (gsi
, true);
3449 case BUILT_IN_STRSTR
:
3450 return gimple_fold_builtin_strstr (gsi
);
3451 case BUILT_IN_STRCMP
:
3452 case BUILT_IN_STRCASECMP
:
3453 case BUILT_IN_STRNCMP
:
3454 case BUILT_IN_STRNCASECMP
:
3455 return gimple_fold_builtin_string_compare (gsi
);
3456 case BUILT_IN_MEMCHR
:
3457 return gimple_fold_builtin_memchr (gsi
);
3458 case BUILT_IN_FPUTS
:
3459 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3460 gimple_call_arg (stmt
, 1), false);
3461 case BUILT_IN_FPUTS_UNLOCKED
:
3462 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3463 gimple_call_arg (stmt
, 1), true);
3464 case BUILT_IN_MEMCPY_CHK
:
3465 case BUILT_IN_MEMPCPY_CHK
:
3466 case BUILT_IN_MEMMOVE_CHK
:
3467 case BUILT_IN_MEMSET_CHK
:
3468 return gimple_fold_builtin_memory_chk (gsi
,
3469 gimple_call_arg (stmt
, 0),
3470 gimple_call_arg (stmt
, 1),
3471 gimple_call_arg (stmt
, 2),
3472 gimple_call_arg (stmt
, 3),
3474 case BUILT_IN_STPCPY
:
3475 return gimple_fold_builtin_stpcpy (gsi
);
3476 case BUILT_IN_STRCPY_CHK
:
3477 case BUILT_IN_STPCPY_CHK
:
3478 return gimple_fold_builtin_stxcpy_chk (gsi
,
3479 gimple_call_arg (stmt
, 0),
3480 gimple_call_arg (stmt
, 1),
3481 gimple_call_arg (stmt
, 2),
3483 case BUILT_IN_STRNCPY_CHK
:
3484 case BUILT_IN_STPNCPY_CHK
:
3485 return gimple_fold_builtin_stxncpy_chk (gsi
,
3486 gimple_call_arg (stmt
, 0),
3487 gimple_call_arg (stmt
, 1),
3488 gimple_call_arg (stmt
, 2),
3489 gimple_call_arg (stmt
, 3),
3491 case BUILT_IN_SNPRINTF_CHK
:
3492 case BUILT_IN_VSNPRINTF_CHK
:
3493 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
3495 case BUILT_IN_FPRINTF
:
3496 case BUILT_IN_FPRINTF_UNLOCKED
:
3497 case BUILT_IN_VFPRINTF
:
3498 if (n
== 2 || n
== 3)
3499 return gimple_fold_builtin_fprintf (gsi
,
3500 gimple_call_arg (stmt
, 0),
3501 gimple_call_arg (stmt
, 1),
3503 ? gimple_call_arg (stmt
, 2)
3507 case BUILT_IN_FPRINTF_CHK
:
3508 case BUILT_IN_VFPRINTF_CHK
:
3509 if (n
== 3 || n
== 4)
3510 return gimple_fold_builtin_fprintf (gsi
,
3511 gimple_call_arg (stmt
, 0),
3512 gimple_call_arg (stmt
, 2),
3514 ? gimple_call_arg (stmt
, 3)
3518 case BUILT_IN_PRINTF
:
3519 case BUILT_IN_PRINTF_UNLOCKED
:
3520 case BUILT_IN_VPRINTF
:
3521 if (n
== 1 || n
== 2)
3522 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
3524 ? gimple_call_arg (stmt
, 1)
3525 : NULL_TREE
, fcode
);
3527 case BUILT_IN_PRINTF_CHK
:
3528 case BUILT_IN_VPRINTF_CHK
:
3529 if (n
== 2 || n
== 3)
3530 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3532 ? gimple_call_arg (stmt
, 2)
3533 : NULL_TREE
, fcode
);
3535 case BUILT_IN_ACC_ON_DEVICE
:
3536 return gimple_fold_builtin_acc_on_device (gsi
,
3537 gimple_call_arg (stmt
, 0));
3538 case BUILT_IN_REALLOC
:
3539 return gimple_fold_builtin_realloc (gsi
);
3544 /* Try the generic builtin folder. */
3545 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3546 tree result
= fold_call_stmt (stmt
, ignore
);
3550 STRIP_NOPS (result
);
3552 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3553 if (!update_call_from_tree (gsi
, result
))
3554 gimplify_and_update_call_from_tree (gsi
, result
);
3561 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3562 function calls to constants, where possible. */
3565 fold_internal_goacc_dim (const gimple
*call
)
3567 int axis
= oacc_get_ifn_dim_arg (call
);
3568 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
3569 bool is_pos
= gimple_call_internal_fn (call
) == IFN_GOACC_DIM_POS
;
3570 tree result
= NULL_TREE
;
3572 /* If the size is 1, or we only want the size and it is not dynamic,
3573 we know the answer. */
3574 if (size
== 1 || (!is_pos
&& size
))
3576 tree type
= TREE_TYPE (gimple_call_lhs (call
));
3577 result
= build_int_cst (type
, size
- is_pos
);
3583 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3584 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3585 &var where var is only addressable because of such calls. */
3588 optimize_atomic_compare_exchange_p (gimple
*stmt
)
3590 if (gimple_call_num_args (stmt
) != 6
3591 || !flag_inline_atomics
3593 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
3594 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3595 || !gimple_vdef (stmt
)
3596 || !gimple_vuse (stmt
))
3599 tree fndecl
= gimple_call_fndecl (stmt
);
3600 switch (DECL_FUNCTION_CODE (fndecl
))
3602 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
3603 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
3604 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
3605 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
3606 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
3612 tree expected
= gimple_call_arg (stmt
, 1);
3613 if (TREE_CODE (expected
) != ADDR_EXPR
3614 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
3617 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
3618 if (!is_gimple_reg_type (etype
)
3619 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
3620 || TREE_THIS_VOLATILE (etype
)
3621 || VECTOR_TYPE_P (etype
)
3622 || TREE_CODE (etype
) == COMPLEX_TYPE
3623 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3624 might not preserve all the bits. See PR71716. */
3625 || SCALAR_FLOAT_TYPE_P (etype
)
3626 || TYPE_PRECISION (etype
) != GET_MODE_BITSIZE (TYPE_MODE (etype
)))
3629 tree weak
= gimple_call_arg (stmt
, 3);
3630 if (!integer_zerop (weak
) && !integer_onep (weak
))
3633 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3634 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3635 machine_mode mode
= TYPE_MODE (itype
);
3637 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
3639 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
3642 if (int_size_in_bytes (etype
) != GET_MODE_SIZE (mode
))
3649 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3651 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3652 i = IMAGPART_EXPR <t>;
3654 e = REALPART_EXPR <t>; */
3657 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
3659 gimple
*stmt
= gsi_stmt (*gsi
);
3660 tree fndecl
= gimple_call_fndecl (stmt
);
3661 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3662 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3663 tree ctype
= build_complex_type (itype
);
3664 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3665 bool throws
= false;
3667 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3669 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3670 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
3671 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
3673 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
3674 build1 (VIEW_CONVERT_EXPR
, itype
,
3675 gimple_assign_lhs (g
)));
3676 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3678 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
3679 + int_size_in_bytes (itype
);
3680 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
3681 gimple_call_arg (stmt
, 0),
3682 gimple_assign_lhs (g
),
3683 gimple_call_arg (stmt
, 2),
3684 build_int_cst (integer_type_node
, flag
),
3685 gimple_call_arg (stmt
, 4),
3686 gimple_call_arg (stmt
, 5));
3687 tree lhs
= make_ssa_name (ctype
);
3688 gimple_call_set_lhs (g
, lhs
);
3689 gimple_set_vdef (g
, gimple_vdef (stmt
));
3690 gimple_set_vuse (g
, gimple_vuse (stmt
));
3691 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
3692 tree oldlhs
= gimple_call_lhs (stmt
);
3693 if (stmt_can_throw_internal (stmt
))
3696 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
3698 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
3699 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
3700 gimple_call_set_lhs (stmt
, NULL_TREE
);
3701 gsi_replace (gsi
, g
, true);
3704 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
3705 build1 (IMAGPART_EXPR
, itype
, lhs
));
3708 gsi_insert_on_edge_immediate (e
, g
);
3709 *gsi
= gsi_for_stmt (g
);
3712 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3713 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
3714 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3716 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
3717 build1 (REALPART_EXPR
, itype
, lhs
));
3718 if (throws
&& oldlhs
== NULL_TREE
)
3720 gsi_insert_on_edge_immediate (e
, g
);
3721 *gsi
= gsi_for_stmt (g
);
3724 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3725 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
3727 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3729 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
3730 gimple_assign_lhs (g
)));
3731 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3733 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
3734 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3738 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3739 doesn't fit into TYPE. The test for overflow should be regardless of
3740 -fwrapv, and even for unsigned types. */
3743 arith_overflowed_p (enum tree_code code
, const_tree type
,
3744 const_tree arg0
, const_tree arg1
)
3746 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3747 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3749 widest2_int warg0
= widest2_int_cst (arg0
);
3750 widest2_int warg1
= widest2_int_cst (arg1
);
3754 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3755 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3756 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3757 default: gcc_unreachable ();
3759 signop sign
= TYPE_SIGN (type
);
3760 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3762 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3765 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3766 The statement may be replaced by another statement, e.g., if the call
3767 simplifies to a constant value. Return true if any changes were made.
3768 It is assumed that the operands have been previously folded. */
3771 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3773 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3775 bool changed
= false;
3778 /* Fold *& in call arguments. */
3779 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3780 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3782 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3785 gimple_call_set_arg (stmt
, i
, tmp
);
3790 /* Check for virtual calls that became direct calls. */
3791 callee
= gimple_call_fn (stmt
);
3792 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3794 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3796 if (dump_file
&& virtual_method_call_p (callee
)
3797 && !possible_polymorphic_call_target_p
3798 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3799 (OBJ_TYPE_REF_EXPR (callee
)))))
3802 "Type inheritance inconsistent devirtualization of ");
3803 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3804 fprintf (dump_file
, " to ");
3805 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3806 fprintf (dump_file
, "\n");
3809 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3812 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3815 vec
<cgraph_node
*>targets
3816 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3817 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3819 tree lhs
= gimple_call_lhs (stmt
);
3820 if (dump_enabled_p ())
3822 location_t loc
= gimple_location_safe (stmt
);
3823 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3824 "folding virtual function call to %s\n",
3825 targets
.length () == 1
3826 ? targets
[0]->name ()
3827 : "__builtin_unreachable");
3829 if (targets
.length () == 1)
3831 tree fndecl
= targets
[0]->decl
;
3832 gimple_call_set_fndecl (stmt
, fndecl
);
3834 /* If changing the call to __cxa_pure_virtual
3835 or similar noreturn function, adjust gimple_call_fntype
3837 if (gimple_call_noreturn_p (stmt
)
3838 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
3839 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
3840 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
3842 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
3843 /* If the call becomes noreturn, remove the lhs. */
3845 && gimple_call_noreturn_p (stmt
)
3846 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
3847 || should_remove_lhs_p (lhs
)))
3849 if (TREE_CODE (lhs
) == SSA_NAME
)
3851 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3852 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3853 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3854 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3856 gimple_call_set_lhs (stmt
, NULL_TREE
);
3858 maybe_remove_unused_call_args (cfun
, stmt
);
3862 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3863 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3864 gimple_set_location (new_stmt
, gimple_location (stmt
));
3865 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3867 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3868 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3870 /* To satisfy condition for
3871 cgraph_update_edges_for_call_stmt_node,
3872 we need to preserve GIMPLE_CALL statement
3873 at position of GSI iterator. */
3874 update_call_from_tree (gsi
, def
);
3875 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3879 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3880 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3881 gsi_replace (gsi
, new_stmt
, false);
3889 /* Check for indirect calls that became direct calls, and then
3890 no longer require a static chain. */
3891 if (gimple_call_chain (stmt
))
3893 tree fn
= gimple_call_fndecl (stmt
);
3894 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3896 gimple_call_set_chain (stmt
, NULL
);
3901 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3904 gimple_call_set_chain (stmt
, tmp
);
3913 /* Check for builtins that CCP can handle using information not
3914 available in the generic fold routines. */
3915 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3917 if (gimple_fold_builtin (gsi
))
3920 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3922 changed
|= targetm
.gimple_fold_builtin (gsi
);
3924 else if (gimple_call_internal_p (stmt
))
3926 enum tree_code subcode
= ERROR_MARK
;
3927 tree result
= NULL_TREE
;
3928 bool cplx_result
= false;
3929 tree overflow
= NULL_TREE
;
3930 switch (gimple_call_internal_fn (stmt
))
3932 case IFN_BUILTIN_EXPECT
:
3933 result
= fold_builtin_expect (gimple_location (stmt
),
3934 gimple_call_arg (stmt
, 0),
3935 gimple_call_arg (stmt
, 1),
3936 gimple_call_arg (stmt
, 2));
3938 case IFN_UBSAN_OBJECT_SIZE
:
3940 tree offset
= gimple_call_arg (stmt
, 1);
3941 tree objsize
= gimple_call_arg (stmt
, 2);
3942 if (integer_all_onesp (objsize
)
3943 || (TREE_CODE (offset
) == INTEGER_CST
3944 && TREE_CODE (objsize
) == INTEGER_CST
3945 && tree_int_cst_le (offset
, objsize
)))
3947 replace_call_with_value (gsi
, NULL_TREE
);
3953 if (integer_zerop (gimple_call_arg (stmt
, 1)))
3955 replace_call_with_value (gsi
, NULL_TREE
);
3959 case IFN_UBSAN_BOUNDS
:
3961 tree index
= gimple_call_arg (stmt
, 1);
3962 tree bound
= gimple_call_arg (stmt
, 2);
3963 if (TREE_CODE (index
) == INTEGER_CST
3964 && TREE_CODE (bound
) == INTEGER_CST
)
3966 index
= fold_convert (TREE_TYPE (bound
), index
);
3967 if (TREE_CODE (index
) == INTEGER_CST
3968 && tree_int_cst_le (index
, bound
))
3970 replace_call_with_value (gsi
, NULL_TREE
);
3976 case IFN_GOACC_DIM_SIZE
:
3977 case IFN_GOACC_DIM_POS
:
3978 result
= fold_internal_goacc_dim (stmt
);
3980 case IFN_UBSAN_CHECK_ADD
:
3981 subcode
= PLUS_EXPR
;
3983 case IFN_UBSAN_CHECK_SUB
:
3984 subcode
= MINUS_EXPR
;
3986 case IFN_UBSAN_CHECK_MUL
:
3987 subcode
= MULT_EXPR
;
3989 case IFN_ADD_OVERFLOW
:
3990 subcode
= PLUS_EXPR
;
3993 case IFN_SUB_OVERFLOW
:
3994 subcode
= MINUS_EXPR
;
3997 case IFN_MUL_OVERFLOW
:
3998 subcode
= MULT_EXPR
;
4004 if (subcode
!= ERROR_MARK
)
4006 tree arg0
= gimple_call_arg (stmt
, 0);
4007 tree arg1
= gimple_call_arg (stmt
, 1);
4008 tree type
= TREE_TYPE (arg0
);
4011 tree lhs
= gimple_call_lhs (stmt
);
4012 if (lhs
== NULL_TREE
)
4015 type
= TREE_TYPE (TREE_TYPE (lhs
));
4017 if (type
== NULL_TREE
)
4019 /* x = y + 0; x = y - 0; x = y * 0; */
4020 else if (integer_zerop (arg1
))
4021 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
4022 /* x = 0 + y; x = 0 * y; */
4023 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
4024 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
4026 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
4027 result
= integer_zero_node
;
4028 /* x = y * 1; x = 1 * y; */
4029 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
4031 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
4033 else if (TREE_CODE (arg0
) == INTEGER_CST
4034 && TREE_CODE (arg1
) == INTEGER_CST
)
4037 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
4038 fold_convert (type
, arg1
));
4040 result
= int_const_binop (subcode
, arg0
, arg1
);
4041 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
4044 overflow
= build_one_cst (type
);
4051 if (result
== integer_zero_node
)
4052 result
= build_zero_cst (type
);
4053 else if (cplx_result
&& TREE_TYPE (result
) != type
)
4055 if (TREE_CODE (result
) == INTEGER_CST
)
4057 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
4059 overflow
= build_one_cst (type
);
4061 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
4062 && TYPE_UNSIGNED (type
))
4063 || (TYPE_PRECISION (type
)
4064 < (TYPE_PRECISION (TREE_TYPE (result
))
4065 + (TYPE_UNSIGNED (TREE_TYPE (result
))
4066 && !TYPE_UNSIGNED (type
)))))
4069 result
= fold_convert (type
, result
);
4076 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
4077 result
= drop_tree_overflow (result
);
4080 if (overflow
== NULL_TREE
)
4081 overflow
= build_zero_cst (TREE_TYPE (result
));
4082 tree ctype
= build_complex_type (TREE_TYPE (result
));
4083 if (TREE_CODE (result
) == INTEGER_CST
4084 && TREE_CODE (overflow
) == INTEGER_CST
)
4085 result
= build_complex (ctype
, result
, overflow
);
4087 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
4088 ctype
, result
, overflow
);
4090 if (!update_call_from_tree (gsi
, result
))
4091 gimplify_and_update_call_from_tree (gsi
, result
);
4100 /* Return true whether NAME has a use on STMT. */
4103 has_use_on_stmt (tree name
, gimple
*stmt
)
4105 imm_use_iterator iter
;
4106 use_operand_p use_p
;
4107 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
4108 if (USE_STMT (use_p
) == stmt
)
4113 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4116 Replaces *GSI with the simplification result in RCODE and OPS
4117 and the associated statements in *SEQ. Does the replacement
4118 according to INPLACE and returns true if the operation succeeded. */
4121 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
4122 code_helper rcode
, tree
*ops
,
4123 gimple_seq
*seq
, bool inplace
)
4125 gimple
*stmt
= gsi_stmt (*gsi
);
4127 /* Play safe and do not allow abnormals to be mentioned in
4128 newly created statements. See also maybe_push_res_to_seq.
4129 As an exception allow such uses if there was a use of the
4130 same SSA name on the old stmt. */
4131 if ((TREE_CODE (ops
[0]) == SSA_NAME
4132 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
4133 && !has_use_on_stmt (ops
[0], stmt
))
4135 && TREE_CODE (ops
[1]) == SSA_NAME
4136 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
4137 && !has_use_on_stmt (ops
[1], stmt
))
4139 && TREE_CODE (ops
[2]) == SSA_NAME
4140 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
4141 && !has_use_on_stmt (ops
[2], stmt
))
4142 || (COMPARISON_CLASS_P (ops
[0])
4143 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
4144 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 0))
4145 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 0), stmt
))
4146 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
4147 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 1))
4148 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 1), stmt
)))))
4151 /* Don't insert new statements when INPLACE is true, even if we could
4152 reuse STMT for the final statement. */
4153 if (inplace
&& !gimple_seq_empty_p (*seq
))
4156 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
4158 gcc_assert (rcode
.is_tree_code ());
4159 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
4160 /* GIMPLE_CONDs condition may not throw. */
4161 && (!flag_exceptions
4162 || !cfun
->can_throw_non_call_exceptions
4163 || !operation_could_trap_p (rcode
,
4164 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
4166 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
4167 else if (rcode
== SSA_NAME
)
4168 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
4169 build_zero_cst (TREE_TYPE (ops
[0])));
4170 else if (rcode
== INTEGER_CST
)
4172 if (integer_zerop (ops
[0]))
4173 gimple_cond_make_false (cond_stmt
);
4175 gimple_cond_make_true (cond_stmt
);
4179 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
4183 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
4184 build_zero_cst (TREE_TYPE (res
)));
4188 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4190 fprintf (dump_file
, "gimple_simplified to ");
4191 if (!gimple_seq_empty_p (*seq
))
4192 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4193 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4196 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4199 else if (is_gimple_assign (stmt
)
4200 && rcode
.is_tree_code ())
4203 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
4205 maybe_build_generic_op (rcode
,
4206 TREE_TYPE (gimple_assign_lhs (stmt
)), ops
);
4207 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
4208 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4210 fprintf (dump_file
, "gimple_simplified to ");
4211 if (!gimple_seq_empty_p (*seq
))
4212 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4213 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4216 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4220 else if (rcode
.is_fn_code ()
4221 && gimple_call_combined_fn (stmt
) == rcode
)
4224 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4226 gcc_assert (ops
[i
] != NULL_TREE
);
4227 gimple_call_set_arg (stmt
, i
, ops
[i
]);
4230 gcc_assert (ops
[i
] == NULL_TREE
);
4231 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4233 fprintf (dump_file
, "gimple_simplified to ");
4234 if (!gimple_seq_empty_p (*seq
))
4235 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4236 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
4238 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4243 if (gimple_has_lhs (stmt
))
4245 tree lhs
= gimple_get_lhs (stmt
);
4246 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
4249 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4251 fprintf (dump_file
, "gimple_simplified to ");
4252 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4254 gsi_replace_with_seq_vops (gsi
, *seq
);
4264 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4267 maybe_canonicalize_mem_ref_addr (tree
*t
)
4271 if (TREE_CODE (*t
) == ADDR_EXPR
)
4272 t
= &TREE_OPERAND (*t
, 0);
4274 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4275 generic vector extension. The actual vector referenced is
4276 view-converted to an array type for this purpose. If the index
4277 is constant the canonical representation in the middle-end is a
4278 BIT_FIELD_REF so re-write the former to the latter here. */
4279 if (TREE_CODE (*t
) == ARRAY_REF
4280 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
4281 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
4282 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
4284 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
4285 if (VECTOR_TYPE_P (vtype
))
4287 tree low
= array_ref_low_bound (*t
);
4288 if (TREE_CODE (low
) == INTEGER_CST
)
4290 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
4292 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
4293 wi::to_widest (low
));
4294 idx
= wi::mul (idx
, wi::to_widest
4295 (TYPE_SIZE (TREE_TYPE (*t
))));
4297 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
4298 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
4300 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
4302 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
4303 TYPE_SIZE (TREE_TYPE (*t
)),
4304 wide_int_to_tree (bitsizetype
, idx
));
4312 while (handled_component_p (*t
))
4313 t
= &TREE_OPERAND (*t
, 0);
4315 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4316 of invariant addresses into a SSA name MEM_REF address. */
4317 if (TREE_CODE (*t
) == MEM_REF
4318 || TREE_CODE (*t
) == TARGET_MEM_REF
)
4320 tree addr
= TREE_OPERAND (*t
, 0);
4321 if (TREE_CODE (addr
) == ADDR_EXPR
4322 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
4323 || handled_component_p (TREE_OPERAND (addr
, 0))))
4326 HOST_WIDE_INT coffset
;
4327 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
4332 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
4333 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
4334 TREE_OPERAND (*t
, 1),
4335 size_int (coffset
));
4338 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
4339 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
4342 /* Canonicalize back MEM_REFs to plain reference trees if the object
4343 accessed is a decl that has the same access semantics as the MEM_REF. */
4344 if (TREE_CODE (*t
) == MEM_REF
4345 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
4346 && integer_zerop (TREE_OPERAND (*t
, 1))
4347 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
4349 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4350 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
4351 if (/* Same volatile qualification. */
4352 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
4353 /* Same TBAA behavior with -fstrict-aliasing. */
4354 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
4355 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
4356 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
4357 /* Same alignment. */
4358 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
4359 /* We have to look out here to not drop a required conversion
4360 from the rhs to the lhs if *t appears on the lhs or vice-versa
4361 if it appears on the rhs. Thus require strict type
4363 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
4365 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4370 /* Canonicalize TARGET_MEM_REF in particular with respect to
4371 the indexes becoming constant. */
4372 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
4374 tree tem
= maybe_fold_tmr (*t
);
4385 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4386 distinguishes both cases. */
4389 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
4391 bool changed
= false;
4392 gimple
*stmt
= gsi_stmt (*gsi
);
4393 bool nowarning
= gimple_no_warning_p (stmt
);
4395 fold_defer_overflow_warnings ();
4397 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4399 ??? This shouldn't be done in generic folding but in the
4400 propagation helpers which also know whether an address was
4402 Also canonicalize operand order. */
4403 switch (gimple_code (stmt
))
4406 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
4408 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
4409 if ((REFERENCE_CLASS_P (*rhs
)
4410 || TREE_CODE (*rhs
) == ADDR_EXPR
)
4411 && maybe_canonicalize_mem_ref_addr (rhs
))
4413 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
4414 if (REFERENCE_CLASS_P (*lhs
)
4415 && maybe_canonicalize_mem_ref_addr (lhs
))
4420 /* Canonicalize operand order. */
4421 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4422 if (TREE_CODE_CLASS (code
) == tcc_comparison
4423 || commutative_tree_code (code
)
4424 || commutative_ternary_tree_code (code
))
4426 tree rhs1
= gimple_assign_rhs1 (stmt
);
4427 tree rhs2
= gimple_assign_rhs2 (stmt
);
4428 if (tree_swap_operands_p (rhs1
, rhs2
))
4430 gimple_assign_set_rhs1 (stmt
, rhs2
);
4431 gimple_assign_set_rhs2 (stmt
, rhs1
);
4432 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4433 gimple_assign_set_rhs_code (stmt
,
4434 swap_tree_comparison (code
));
4442 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4444 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
4445 if (REFERENCE_CLASS_P (*arg
)
4446 && maybe_canonicalize_mem_ref_addr (arg
))
4449 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
4451 && REFERENCE_CLASS_P (*lhs
)
4452 && maybe_canonicalize_mem_ref_addr (lhs
))
4458 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4459 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4461 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4462 tree op
= TREE_VALUE (link
);
4463 if (REFERENCE_CLASS_P (op
)
4464 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4467 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4469 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4470 tree op
= TREE_VALUE (link
);
4471 if ((REFERENCE_CLASS_P (op
)
4472 || TREE_CODE (op
) == ADDR_EXPR
)
4473 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4479 if (gimple_debug_bind_p (stmt
))
4481 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
4483 && (REFERENCE_CLASS_P (*val
)
4484 || TREE_CODE (*val
) == ADDR_EXPR
)
4485 && maybe_canonicalize_mem_ref_addr (val
))
4491 /* Canonicalize operand order. */
4492 tree lhs
= gimple_cond_lhs (stmt
);
4493 tree rhs
= gimple_cond_rhs (stmt
);
4494 if (tree_swap_operands_p (lhs
, rhs
))
4496 gcond
*gc
= as_a
<gcond
*> (stmt
);
4497 gimple_cond_set_lhs (gc
, rhs
);
4498 gimple_cond_set_rhs (gc
, lhs
);
4499 gimple_cond_set_code (gc
,
4500 swap_tree_comparison (gimple_cond_code (gc
)));
4507 /* Dispatch to pattern-based folding. */
4509 || is_gimple_assign (stmt
)
4510 || gimple_code (stmt
) == GIMPLE_COND
)
4512 gimple_seq seq
= NULL
;
4515 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
4516 valueize
, valueize
))
4518 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
4521 gimple_seq_discard (seq
);
4525 stmt
= gsi_stmt (*gsi
);
4527 /* Fold the main computation performed by the statement. */
4528 switch (gimple_code (stmt
))
4532 /* Try to canonicalize for boolean-typed X the comparisons
4533 X == 0, X == 1, X != 0, and X != 1. */
4534 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
4535 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
4537 tree lhs
= gimple_assign_lhs (stmt
);
4538 tree op1
= gimple_assign_rhs1 (stmt
);
4539 tree op2
= gimple_assign_rhs2 (stmt
);
4540 tree type
= TREE_TYPE (op1
);
4542 /* Check whether the comparison operands are of the same boolean
4543 type as the result type is.
4544 Check that second operand is an integer-constant with value
4546 if (TREE_CODE (op2
) == INTEGER_CST
4547 && (integer_zerop (op2
) || integer_onep (op2
))
4548 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
4550 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
4551 bool is_logical_not
= false;
4553 /* X == 0 and X != 1 is a logical-not.of X
4554 X == 1 and X != 0 is X */
4555 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
4556 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
4557 is_logical_not
= true;
4559 if (is_logical_not
== false)
4560 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
4561 /* Only for one-bit precision typed X the transformation
4562 !X -> ~X is valied. */
4563 else if (TYPE_PRECISION (type
) == 1)
4564 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
4565 /* Otherwise we use !X -> X ^ 1. */
4567 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
4568 build_int_cst (type
, 1));
4574 unsigned old_num_ops
= gimple_num_ops (stmt
);
4575 tree lhs
= gimple_assign_lhs (stmt
);
4576 tree new_rhs
= fold_gimple_assign (gsi
);
4578 && !useless_type_conversion_p (TREE_TYPE (lhs
),
4579 TREE_TYPE (new_rhs
)))
4580 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
4583 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
4585 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
4592 changed
|= gimple_fold_call (gsi
, inplace
);
4596 /* Fold *& in asm operands. */
4598 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4600 const char **oconstraints
;
4601 const char *constraint
;
4602 bool allows_mem
, allows_reg
;
4604 noutputs
= gimple_asm_noutputs (asm_stmt
);
4605 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4607 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4609 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4610 tree op
= TREE_VALUE (link
);
4612 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4613 if (REFERENCE_CLASS_P (op
)
4614 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
4616 TREE_VALUE (link
) = op
;
4620 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4622 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4623 tree op
= TREE_VALUE (link
);
4625 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4626 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
4627 oconstraints
, &allows_mem
, &allows_reg
);
4628 if (REFERENCE_CLASS_P (op
)
4629 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
4632 TREE_VALUE (link
) = op
;
4640 if (gimple_debug_bind_p (stmt
))
4642 tree val
= gimple_debug_bind_get_value (stmt
);
4644 && REFERENCE_CLASS_P (val
))
4646 tree tem
= maybe_fold_reference (val
, false);
4649 gimple_debug_bind_set_value (stmt
, tem
);
4654 && TREE_CODE (val
) == ADDR_EXPR
)
4656 tree ref
= TREE_OPERAND (val
, 0);
4657 tree tem
= maybe_fold_reference (ref
, false);
4660 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
4661 gimple_debug_bind_set_value (stmt
, tem
);
4670 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
4671 tree ret
= gimple_return_retval(ret_stmt
);
4673 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
4675 tree val
= valueize (ret
);
4676 if (val
&& val
!= ret
4677 && may_propagate_copy (ret
, val
))
4679 gimple_return_set_retval (ret_stmt
, val
);
4689 stmt
= gsi_stmt (*gsi
);
4691 /* Fold *& on the lhs. */
4692 if (gimple_has_lhs (stmt
))
4694 tree lhs
= gimple_get_lhs (stmt
);
4695 if (lhs
&& REFERENCE_CLASS_P (lhs
))
4697 tree new_lhs
= maybe_fold_reference (lhs
, true);
4700 gimple_set_lhs (stmt
, new_lhs
);
4706 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
4710 /* Valueziation callback that ends up not following SSA edges. */
4713 no_follow_ssa_edges (tree
)
4718 /* Valueization callback that ends up following single-use SSA edges only. */
4721 follow_single_use_edges (tree val
)
4723 if (TREE_CODE (val
) == SSA_NAME
4724 && !has_single_use (val
))
4729 /* Fold the statement pointed to by GSI. In some cases, this function may
4730 replace the whole statement with a new one. Returns true iff folding
4732 The statement pointed to by GSI should be in valid gimple form but may
4733 be in unfolded state as resulting from for example constant propagation
4734 which can produce *&x = 0. */
4737 fold_stmt (gimple_stmt_iterator
*gsi
)
4739 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
4743 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
4745 return fold_stmt_1 (gsi
, false, valueize
);
4748 /* Perform the minimal folding on statement *GSI. Only operations like
4749 *&x created by constant propagation are handled. The statement cannot
4750 be replaced with a new one. Return true if the statement was
4751 changed, false otherwise.
4752 The statement *GSI should be in valid gimple form but may
4753 be in unfolded state as resulting from for example constant propagation
4754 which can produce *&x = 0. */
4757 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
4759 gimple
*stmt
= gsi_stmt (*gsi
);
4760 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
4761 gcc_assert (gsi_stmt (*gsi
) == stmt
);
4765 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4766 if EXPR is null or we don't know how.
4767 If non-null, the result always has boolean type. */
4770 canonicalize_bool (tree expr
, bool invert
)
4776 if (integer_nonzerop (expr
))
4777 return boolean_false_node
;
4778 else if (integer_zerop (expr
))
4779 return boolean_true_node
;
4780 else if (TREE_CODE (expr
) == SSA_NAME
)
4781 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
4782 build_int_cst (TREE_TYPE (expr
), 0));
4783 else if (COMPARISON_CLASS_P (expr
))
4784 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
4786 TREE_OPERAND (expr
, 0),
4787 TREE_OPERAND (expr
, 1));
4793 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4795 if (integer_nonzerop (expr
))
4796 return boolean_true_node
;
4797 else if (integer_zerop (expr
))
4798 return boolean_false_node
;
4799 else if (TREE_CODE (expr
) == SSA_NAME
)
4800 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
4801 build_int_cst (TREE_TYPE (expr
), 0));
4802 else if (COMPARISON_CLASS_P (expr
))
4803 return fold_build2 (TREE_CODE (expr
),
4805 TREE_OPERAND (expr
, 0),
4806 TREE_OPERAND (expr
, 1));
4812 /* Check to see if a boolean expression EXPR is logically equivalent to the
4813 comparison (OP1 CODE OP2). Check for various identities involving
4817 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
4818 const_tree op1
, const_tree op2
)
4822 /* The obvious case. */
4823 if (TREE_CODE (expr
) == code
4824 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
4825 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
4828 /* Check for comparing (name, name != 0) and the case where expr
4829 is an SSA_NAME with a definition matching the comparison. */
4830 if (TREE_CODE (expr
) == SSA_NAME
4831 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4833 if (operand_equal_p (expr
, op1
, 0))
4834 return ((code
== NE_EXPR
&& integer_zerop (op2
))
4835 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
4836 s
= SSA_NAME_DEF_STMT (expr
);
4837 if (is_gimple_assign (s
)
4838 && gimple_assign_rhs_code (s
) == code
4839 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
4840 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
4844 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4845 of name is a comparison, recurse. */
4846 if (TREE_CODE (op1
) == SSA_NAME
4847 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
4849 s
= SSA_NAME_DEF_STMT (op1
);
4850 if (is_gimple_assign (s
)
4851 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
4853 enum tree_code c
= gimple_assign_rhs_code (s
);
4854 if ((c
== NE_EXPR
&& integer_zerop (op2
))
4855 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
4856 return same_bool_comparison_p (expr
, c
,
4857 gimple_assign_rhs1 (s
),
4858 gimple_assign_rhs2 (s
));
4859 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
4860 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
4861 return same_bool_comparison_p (expr
,
4862 invert_tree_comparison (c
, false),
4863 gimple_assign_rhs1 (s
),
4864 gimple_assign_rhs2 (s
));
4870 /* Check to see if two boolean expressions OP1 and OP2 are logically
4874 same_bool_result_p (const_tree op1
, const_tree op2
)
4876 /* Simple cases first. */
4877 if (operand_equal_p (op1
, op2
, 0))
4880 /* Check the cases where at least one of the operands is a comparison.
4881 These are a bit smarter than operand_equal_p in that they apply some
4882 identifies on SSA_NAMEs. */
4883 if (COMPARISON_CLASS_P (op2
)
4884 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
4885 TREE_OPERAND (op2
, 0),
4886 TREE_OPERAND (op2
, 1)))
4888 if (COMPARISON_CLASS_P (op1
)
4889 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
4890 TREE_OPERAND (op1
, 0),
4891 TREE_OPERAND (op1
, 1)))
4898 /* Forward declarations for some mutually recursive functions. */
4901 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4902 enum tree_code code2
, tree op2a
, tree op2b
);
4904 and_var_with_comparison (tree var
, bool invert
,
4905 enum tree_code code2
, tree op2a
, tree op2b
);
4907 and_var_with_comparison_1 (gimple
*stmt
,
4908 enum tree_code code2
, tree op2a
, tree op2b
);
4910 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4911 enum tree_code code2
, tree op2a
, tree op2b
);
4913 or_var_with_comparison (tree var
, bool invert
,
4914 enum tree_code code2
, tree op2a
, tree op2b
);
4916 or_var_with_comparison_1 (gimple
*stmt
,
4917 enum tree_code code2
, tree op2a
, tree op2b
);
4919 /* Helper function for and_comparisons_1: try to simplify the AND of the
4920 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4921 If INVERT is true, invert the value of the VAR before doing the AND.
4922 Return NULL_EXPR if we can't simplify this to a single expression. */
4925 and_var_with_comparison (tree var
, bool invert
,
4926 enum tree_code code2
, tree op2a
, tree op2b
)
4929 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4931 /* We can only deal with variables whose definitions are assignments. */
4932 if (!is_gimple_assign (stmt
))
4935 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4936 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4937 Then we only have to consider the simpler non-inverted cases. */
4939 t
= or_var_with_comparison_1 (stmt
,
4940 invert_tree_comparison (code2
, false),
4943 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4944 return canonicalize_bool (t
, invert
);
4947 /* Try to simplify the AND of the ssa variable defined by the assignment
4948 STMT with the comparison specified by (OP2A CODE2 OP2B).
4949 Return NULL_EXPR if we can't simplify this to a single expression. */
4952 and_var_with_comparison_1 (gimple
*stmt
,
4953 enum tree_code code2
, tree op2a
, tree op2b
)
4955 tree var
= gimple_assign_lhs (stmt
);
4956 tree true_test_var
= NULL_TREE
;
4957 tree false_test_var
= NULL_TREE
;
4958 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4960 /* Check for identities like (var AND (var == 0)) => false. */
4961 if (TREE_CODE (op2a
) == SSA_NAME
4962 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4964 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4965 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4967 true_test_var
= op2a
;
4968 if (var
== true_test_var
)
4971 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4972 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4974 false_test_var
= op2a
;
4975 if (var
== false_test_var
)
4976 return boolean_false_node
;
4980 /* If the definition is a comparison, recurse on it. */
4981 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4983 tree t
= and_comparisons_1 (innercode
,
4984 gimple_assign_rhs1 (stmt
),
4985 gimple_assign_rhs2 (stmt
),
4993 /* If the definition is an AND or OR expression, we may be able to
4994 simplify by reassociating. */
4995 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4996 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4998 tree inner1
= gimple_assign_rhs1 (stmt
);
4999 tree inner2
= gimple_assign_rhs2 (stmt
);
5002 tree partial
= NULL_TREE
;
5003 bool is_and
= (innercode
== BIT_AND_EXPR
);
5005 /* Check for boolean identities that don't require recursive examination
5007 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5008 inner1 AND (inner1 OR inner2) => inner1
5009 !inner1 AND (inner1 AND inner2) => false
5010 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5011 Likewise for similar cases involving inner2. */
5012 if (inner1
== true_test_var
)
5013 return (is_and
? var
: inner1
);
5014 else if (inner2
== true_test_var
)
5015 return (is_and
? var
: inner2
);
5016 else if (inner1
== false_test_var
)
5018 ? boolean_false_node
5019 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5020 else if (inner2
== false_test_var
)
5022 ? boolean_false_node
5023 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5025 /* Next, redistribute/reassociate the AND across the inner tests.
5026 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5027 if (TREE_CODE (inner1
) == SSA_NAME
5028 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5029 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5030 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5031 gimple_assign_rhs1 (s
),
5032 gimple_assign_rhs2 (s
),
5033 code2
, op2a
, op2b
)))
5035 /* Handle the AND case, where we are reassociating:
5036 (inner1 AND inner2) AND (op2a code2 op2b)
5038 If the partial result t is a constant, we win. Otherwise
5039 continue on to try reassociating with the other inner test. */
5042 if (integer_onep (t
))
5044 else if (integer_zerop (t
))
5045 return boolean_false_node
;
5048 /* Handle the OR case, where we are redistributing:
5049 (inner1 OR inner2) AND (op2a code2 op2b)
5050 => (t OR (inner2 AND (op2a code2 op2b))) */
5051 else if (integer_onep (t
))
5052 return boolean_true_node
;
5054 /* Save partial result for later. */
5058 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5059 if (TREE_CODE (inner2
) == SSA_NAME
5060 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5061 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5062 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5063 gimple_assign_rhs1 (s
),
5064 gimple_assign_rhs2 (s
),
5065 code2
, op2a
, op2b
)))
5067 /* Handle the AND case, where we are reassociating:
5068 (inner1 AND inner2) AND (op2a code2 op2b)
5069 => (inner1 AND t) */
5072 if (integer_onep (t
))
5074 else if (integer_zerop (t
))
5075 return boolean_false_node
;
5076 /* If both are the same, we can apply the identity
5078 else if (partial
&& same_bool_result_p (t
, partial
))
5082 /* Handle the OR case. where we are redistributing:
5083 (inner1 OR inner2) AND (op2a code2 op2b)
5084 => (t OR (inner1 AND (op2a code2 op2b)))
5085 => (t OR partial) */
5088 if (integer_onep (t
))
5089 return boolean_true_node
;
5092 /* We already got a simplification for the other
5093 operand to the redistributed OR expression. The
5094 interesting case is when at least one is false.
5095 Or, if both are the same, we can apply the identity
5097 if (integer_zerop (partial
))
5099 else if (integer_zerop (t
))
5101 else if (same_bool_result_p (t
, partial
))
5110 /* Try to simplify the AND of two comparisons defined by
5111 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5112 If this can be done without constructing an intermediate value,
5113 return the resulting tree; otherwise NULL_TREE is returned.
5114 This function is deliberately asymmetric as it recurses on SSA_DEFs
5115 in the first comparison but not the second. */
5118 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5119 enum tree_code code2
, tree op2a
, tree op2b
)
5121 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5123 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5124 if (operand_equal_p (op1a
, op2a
, 0)
5125 && operand_equal_p (op1b
, op2b
, 0))
5127 /* Result will be either NULL_TREE, or a combined comparison. */
5128 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5129 TRUTH_ANDIF_EXPR
, code1
, code2
,
5130 truth_type
, op1a
, op1b
);
5135 /* Likewise the swapped case of the above. */
5136 if (operand_equal_p (op1a
, op2b
, 0)
5137 && operand_equal_p (op1b
, op2a
, 0))
5139 /* Result will be either NULL_TREE, or a combined comparison. */
5140 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5141 TRUTH_ANDIF_EXPR
, code1
,
5142 swap_tree_comparison (code2
),
5143 truth_type
, op1a
, op1b
);
5148 /* If both comparisons are of the same value against constants, we might
5149 be able to merge them. */
5150 if (operand_equal_p (op1a
, op2a
, 0)
5151 && TREE_CODE (op1b
) == INTEGER_CST
5152 && TREE_CODE (op2b
) == INTEGER_CST
)
5154 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5156 /* If we have (op1a == op1b), we should either be able to
5157 return that or FALSE, depending on whether the constant op1b
5158 also satisfies the other comparison against op2b. */
5159 if (code1
== EQ_EXPR
)
5165 case EQ_EXPR
: val
= (cmp
== 0); break;
5166 case NE_EXPR
: val
= (cmp
!= 0); break;
5167 case LT_EXPR
: val
= (cmp
< 0); break;
5168 case GT_EXPR
: val
= (cmp
> 0); break;
5169 case LE_EXPR
: val
= (cmp
<= 0); break;
5170 case GE_EXPR
: val
= (cmp
>= 0); break;
5171 default: done
= false;
5176 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5178 return boolean_false_node
;
5181 /* Likewise if the second comparison is an == comparison. */
5182 else if (code2
== EQ_EXPR
)
5188 case EQ_EXPR
: val
= (cmp
== 0); break;
5189 case NE_EXPR
: val
= (cmp
!= 0); break;
5190 case LT_EXPR
: val
= (cmp
> 0); break;
5191 case GT_EXPR
: val
= (cmp
< 0); break;
5192 case LE_EXPR
: val
= (cmp
>= 0); break;
5193 case GE_EXPR
: val
= (cmp
<= 0); break;
5194 default: done
= false;
5199 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5201 return boolean_false_node
;
5205 /* Same business with inequality tests. */
5206 else if (code1
== NE_EXPR
)
5211 case EQ_EXPR
: val
= (cmp
!= 0); break;
5212 case NE_EXPR
: val
= (cmp
== 0); break;
5213 case LT_EXPR
: val
= (cmp
>= 0); break;
5214 case GT_EXPR
: val
= (cmp
<= 0); break;
5215 case LE_EXPR
: val
= (cmp
> 0); break;
5216 case GE_EXPR
: val
= (cmp
< 0); break;
5221 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5223 else if (code2
== NE_EXPR
)
5228 case EQ_EXPR
: val
= (cmp
== 0); break;
5229 case NE_EXPR
: val
= (cmp
!= 0); break;
5230 case LT_EXPR
: val
= (cmp
<= 0); break;
5231 case GT_EXPR
: val
= (cmp
>= 0); break;
5232 case LE_EXPR
: val
= (cmp
< 0); break;
5233 case GE_EXPR
: val
= (cmp
> 0); break;
5238 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5241 /* Chose the more restrictive of two < or <= comparisons. */
5242 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5243 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5245 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5246 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5248 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5251 /* Likewise chose the more restrictive of two > or >= comparisons. */
5252 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5253 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5255 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5256 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5258 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5261 /* Check for singleton ranges. */
5263 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
5264 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
5265 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
5267 /* Check for disjoint ranges. */
5269 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5270 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5271 return boolean_false_node
;
5273 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5274 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5275 return boolean_false_node
;
5278 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5279 NAME's definition is a truth value. See if there are any simplifications
5280 that can be done against the NAME's definition. */
5281 if (TREE_CODE (op1a
) == SSA_NAME
5282 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5283 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5285 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5286 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5287 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5288 switch (gimple_code (stmt
))
5291 /* Try to simplify by copy-propagating the definition. */
5292 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5295 /* If every argument to the PHI produces the same result when
5296 ANDed with the second comparison, we win.
5297 Do not do this unless the type is bool since we need a bool
5298 result here anyway. */
5299 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5301 tree result
= NULL_TREE
;
5303 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5305 tree arg
= gimple_phi_arg_def (stmt
, i
);
5307 /* If this PHI has itself as an argument, ignore it.
5308 If all the other args produce the same result,
5310 if (arg
== gimple_phi_result (stmt
))
5312 else if (TREE_CODE (arg
) == INTEGER_CST
)
5314 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
5317 result
= boolean_false_node
;
5318 else if (!integer_zerop (result
))
5322 result
= fold_build2 (code2
, boolean_type_node
,
5324 else if (!same_bool_comparison_p (result
,
5328 else if (TREE_CODE (arg
) == SSA_NAME
5329 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5332 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5333 /* In simple cases we can look through PHI nodes,
5334 but we have to be careful with loops.
5336 if (! dom_info_available_p (CDI_DOMINATORS
)
5337 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5338 || dominated_by_p (CDI_DOMINATORS
,
5339 gimple_bb (def_stmt
),
5342 temp
= and_var_with_comparison (arg
, invert
, code2
,
5348 else if (!same_bool_result_p (result
, temp
))
5364 /* Try to simplify the AND of two comparisons, specified by
5365 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5366 If this can be simplified to a single expression (without requiring
5367 introducing more SSA variables to hold intermediate values),
5368 return the resulting tree. Otherwise return NULL_TREE.
5369 If the result expression is non-null, it has boolean type. */
5372 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5373 enum tree_code code2
, tree op2a
, tree op2b
)
5375 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5379 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5382 /* Helper function for or_comparisons_1: try to simplify the OR of the
5383 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5384 If INVERT is true, invert the value of VAR before doing the OR.
5385 Return NULL_EXPR if we can't simplify this to a single expression. */
5388 or_var_with_comparison (tree var
, bool invert
,
5389 enum tree_code code2
, tree op2a
, tree op2b
)
5392 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5394 /* We can only deal with variables whose definitions are assignments. */
5395 if (!is_gimple_assign (stmt
))
5398 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5399 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5400 Then we only have to consider the simpler non-inverted cases. */
5402 t
= and_var_with_comparison_1 (stmt
,
5403 invert_tree_comparison (code2
, false),
5406 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5407 return canonicalize_bool (t
, invert
);
5410 /* Try to simplify the OR of the ssa variable defined by the assignment
5411 STMT with the comparison specified by (OP2A CODE2 OP2B).
5412 Return NULL_EXPR if we can't simplify this to a single expression. */
5415 or_var_with_comparison_1 (gimple
*stmt
,
5416 enum tree_code code2
, tree op2a
, tree op2b
)
5418 tree var
= gimple_assign_lhs (stmt
);
5419 tree true_test_var
= NULL_TREE
;
5420 tree false_test_var
= NULL_TREE
;
5421 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5423 /* Check for identities like (var OR (var != 0)) => true . */
5424 if (TREE_CODE (op2a
) == SSA_NAME
5425 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5427 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5428 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5430 true_test_var
= op2a
;
5431 if (var
== true_test_var
)
5434 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5435 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5437 false_test_var
= op2a
;
5438 if (var
== false_test_var
)
5439 return boolean_true_node
;
5443 /* If the definition is a comparison, recurse on it. */
5444 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5446 tree t
= or_comparisons_1 (innercode
,
5447 gimple_assign_rhs1 (stmt
),
5448 gimple_assign_rhs2 (stmt
),
5456 /* If the definition is an AND or OR expression, we may be able to
5457 simplify by reassociating. */
5458 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5459 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5461 tree inner1
= gimple_assign_rhs1 (stmt
);
5462 tree inner2
= gimple_assign_rhs2 (stmt
);
5465 tree partial
= NULL_TREE
;
5466 bool is_or
= (innercode
== BIT_IOR_EXPR
);
5468 /* Check for boolean identities that don't require recursive examination
5470 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5471 inner1 OR (inner1 AND inner2) => inner1
5472 !inner1 OR (inner1 OR inner2) => true
5473 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5475 if (inner1
== true_test_var
)
5476 return (is_or
? var
: inner1
);
5477 else if (inner2
== true_test_var
)
5478 return (is_or
? var
: inner2
);
5479 else if (inner1
== false_test_var
)
5482 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5483 else if (inner2
== false_test_var
)
5486 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5488 /* Next, redistribute/reassociate the OR across the inner tests.
5489 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5490 if (TREE_CODE (inner1
) == SSA_NAME
5491 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5492 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5493 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5494 gimple_assign_rhs1 (s
),
5495 gimple_assign_rhs2 (s
),
5496 code2
, op2a
, op2b
)))
5498 /* Handle the OR case, where we are reassociating:
5499 (inner1 OR inner2) OR (op2a code2 op2b)
5501 If the partial result t is a constant, we win. Otherwise
5502 continue on to try reassociating with the other inner test. */
5505 if (integer_onep (t
))
5506 return boolean_true_node
;
5507 else if (integer_zerop (t
))
5511 /* Handle the AND case, where we are redistributing:
5512 (inner1 AND inner2) OR (op2a code2 op2b)
5513 => (t AND (inner2 OR (op2a code op2b))) */
5514 else if (integer_zerop (t
))
5515 return boolean_false_node
;
5517 /* Save partial result for later. */
5521 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5522 if (TREE_CODE (inner2
) == SSA_NAME
5523 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5524 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5525 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5526 gimple_assign_rhs1 (s
),
5527 gimple_assign_rhs2 (s
),
5528 code2
, op2a
, op2b
)))
5530 /* Handle the OR case, where we are reassociating:
5531 (inner1 OR inner2) OR (op2a code2 op2b)
5533 => (t OR partial) */
5536 if (integer_zerop (t
))
5538 else if (integer_onep (t
))
5539 return boolean_true_node
;
5540 /* If both are the same, we can apply the identity
5542 else if (partial
&& same_bool_result_p (t
, partial
))
5546 /* Handle the AND case, where we are redistributing:
5547 (inner1 AND inner2) OR (op2a code2 op2b)
5548 => (t AND (inner1 OR (op2a code2 op2b)))
5549 => (t AND partial) */
5552 if (integer_zerop (t
))
5553 return boolean_false_node
;
5556 /* We already got a simplification for the other
5557 operand to the redistributed AND expression. The
5558 interesting case is when at least one is true.
5559 Or, if both are the same, we can apply the identity
5561 if (integer_onep (partial
))
5563 else if (integer_onep (t
))
5565 else if (same_bool_result_p (t
, partial
))
5574 /* Try to simplify the OR of two comparisons defined by
5575 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5576 If this can be done without constructing an intermediate value,
5577 return the resulting tree; otherwise NULL_TREE is returned.
5578 This function is deliberately asymmetric as it recurses on SSA_DEFs
5579 in the first comparison but not the second. */
5582 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5583 enum tree_code code2
, tree op2a
, tree op2b
)
5585 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5587 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5588 if (operand_equal_p (op1a
, op2a
, 0)
5589 && operand_equal_p (op1b
, op2b
, 0))
5591 /* Result will be either NULL_TREE, or a combined comparison. */
5592 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5593 TRUTH_ORIF_EXPR
, code1
, code2
,
5594 truth_type
, op1a
, op1b
);
5599 /* Likewise the swapped case of the above. */
5600 if (operand_equal_p (op1a
, op2b
, 0)
5601 && operand_equal_p (op1b
, op2a
, 0))
5603 /* Result will be either NULL_TREE, or a combined comparison. */
5604 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5605 TRUTH_ORIF_EXPR
, code1
,
5606 swap_tree_comparison (code2
),
5607 truth_type
, op1a
, op1b
);
5612 /* If both comparisons are of the same value against constants, we might
5613 be able to merge them. */
5614 if (operand_equal_p (op1a
, op2a
, 0)
5615 && TREE_CODE (op1b
) == INTEGER_CST
5616 && TREE_CODE (op2b
) == INTEGER_CST
)
5618 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5620 /* If we have (op1a != op1b), we should either be able to
5621 return that or TRUE, depending on whether the constant op1b
5622 also satisfies the other comparison against op2b. */
5623 if (code1
== NE_EXPR
)
5629 case EQ_EXPR
: val
= (cmp
== 0); break;
5630 case NE_EXPR
: val
= (cmp
!= 0); break;
5631 case LT_EXPR
: val
= (cmp
< 0); break;
5632 case GT_EXPR
: val
= (cmp
> 0); break;
5633 case LE_EXPR
: val
= (cmp
<= 0); break;
5634 case GE_EXPR
: val
= (cmp
>= 0); break;
5635 default: done
= false;
5640 return boolean_true_node
;
5642 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5645 /* Likewise if the second comparison is a != comparison. */
5646 else if (code2
== NE_EXPR
)
5652 case EQ_EXPR
: val
= (cmp
== 0); break;
5653 case NE_EXPR
: val
= (cmp
!= 0); break;
5654 case LT_EXPR
: val
= (cmp
> 0); break;
5655 case GT_EXPR
: val
= (cmp
< 0); break;
5656 case LE_EXPR
: val
= (cmp
>= 0); break;
5657 case GE_EXPR
: val
= (cmp
<= 0); break;
5658 default: done
= false;
5663 return boolean_true_node
;
5665 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5669 /* See if an equality test is redundant with the other comparison. */
5670 else if (code1
== EQ_EXPR
)
5675 case EQ_EXPR
: val
= (cmp
== 0); break;
5676 case NE_EXPR
: val
= (cmp
!= 0); break;
5677 case LT_EXPR
: val
= (cmp
< 0); break;
5678 case GT_EXPR
: val
= (cmp
> 0); break;
5679 case LE_EXPR
: val
= (cmp
<= 0); break;
5680 case GE_EXPR
: val
= (cmp
>= 0); break;
5685 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5687 else if (code2
== EQ_EXPR
)
5692 case EQ_EXPR
: val
= (cmp
== 0); break;
5693 case NE_EXPR
: val
= (cmp
!= 0); break;
5694 case LT_EXPR
: val
= (cmp
> 0); break;
5695 case GT_EXPR
: val
= (cmp
< 0); break;
5696 case LE_EXPR
: val
= (cmp
>= 0); break;
5697 case GE_EXPR
: val
= (cmp
<= 0); break;
5702 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5705 /* Chose the less restrictive of two < or <= comparisons. */
5706 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5707 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5709 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5710 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5712 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5715 /* Likewise chose the less restrictive of two > or >= comparisons. */
5716 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5717 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5719 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5720 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5722 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5725 /* Check for singleton ranges. */
5727 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
5728 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
5729 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
5731 /* Check for less/greater pairs that don't restrict the range at all. */
5733 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5734 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5735 return boolean_true_node
;
5737 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5738 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5739 return boolean_true_node
;
5742 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5743 NAME's definition is a truth value. See if there are any simplifications
5744 that can be done against the NAME's definition. */
5745 if (TREE_CODE (op1a
) == SSA_NAME
5746 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5747 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5749 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5750 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5751 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5752 switch (gimple_code (stmt
))
5755 /* Try to simplify by copy-propagating the definition. */
5756 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5759 /* If every argument to the PHI produces the same result when
5760 ORed with the second comparison, we win.
5761 Do not do this unless the type is bool since we need a bool
5762 result here anyway. */
5763 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5765 tree result
= NULL_TREE
;
5767 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5769 tree arg
= gimple_phi_arg_def (stmt
, i
);
5771 /* If this PHI has itself as an argument, ignore it.
5772 If all the other args produce the same result,
5774 if (arg
== gimple_phi_result (stmt
))
5776 else if (TREE_CODE (arg
) == INTEGER_CST
)
5778 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
5781 result
= boolean_true_node
;
5782 else if (!integer_onep (result
))
5786 result
= fold_build2 (code2
, boolean_type_node
,
5788 else if (!same_bool_comparison_p (result
,
5792 else if (TREE_CODE (arg
) == SSA_NAME
5793 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5796 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5797 /* In simple cases we can look through PHI nodes,
5798 but we have to be careful with loops.
5800 if (! dom_info_available_p (CDI_DOMINATORS
)
5801 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5802 || dominated_by_p (CDI_DOMINATORS
,
5803 gimple_bb (def_stmt
),
5806 temp
= or_var_with_comparison (arg
, invert
, code2
,
5812 else if (!same_bool_result_p (result
, temp
))
5828 /* Try to simplify the OR of two comparisons, specified by
5829 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5830 If this can be simplified to a single expression (without requiring
5831 introducing more SSA variables to hold intermediate values),
5832 return the resulting tree. Otherwise return NULL_TREE.
5833 If the result expression is non-null, it has boolean type. */
5836 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5837 enum tree_code code2
, tree op2a
, tree op2b
)
5839 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5843 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5847 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5849 Either NULL_TREE, a simplified but non-constant or a constant
5852 ??? This should go into a gimple-fold-inline.h file to be eventually
5853 privatized with the single valueize function used in the various TUs
5854 to avoid the indirect function call overhead. */
5857 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
5858 tree (*gvalueize
) (tree
))
5862 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5863 edges if there are intermediate VARYING defs. For this reason
5864 do not follow SSA edges here even though SCCVN can technically
5865 just deal fine with that. */
5866 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
5868 tree res
= NULL_TREE
;
5869 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
5871 else if (mprts_hook
)
5872 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
5875 if (dump_file
&& dump_flags
& TDF_DETAILS
)
5877 fprintf (dump_file
, "Match-and-simplified ");
5878 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
5879 fprintf (dump_file
, " to ");
5880 print_generic_expr (dump_file
, res
);
5881 fprintf (dump_file
, "\n");
5887 location_t loc
= gimple_location (stmt
);
5888 switch (gimple_code (stmt
))
5892 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
5894 switch (get_gimple_rhs_class (subcode
))
5896 case GIMPLE_SINGLE_RHS
:
5898 tree rhs
= gimple_assign_rhs1 (stmt
);
5899 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
5901 if (TREE_CODE (rhs
) == SSA_NAME
)
5903 /* If the RHS is an SSA_NAME, return its known constant value,
5905 return (*valueize
) (rhs
);
5907 /* Handle propagating invariant addresses into address
5909 else if (TREE_CODE (rhs
) == ADDR_EXPR
5910 && !is_gimple_min_invariant (rhs
))
5912 HOST_WIDE_INT offset
= 0;
5914 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
5918 && (CONSTANT_CLASS_P (base
)
5919 || decl_address_invariant_p (base
)))
5920 return build_invariant_address (TREE_TYPE (rhs
),
5923 else if (TREE_CODE (rhs
) == CONSTRUCTOR
5924 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
5925 && (CONSTRUCTOR_NELTS (rhs
)
5926 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
5931 vec
= XALLOCAVEC (tree
,
5932 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
5933 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
5935 val
= (*valueize
) (val
);
5936 if (TREE_CODE (val
) == INTEGER_CST
5937 || TREE_CODE (val
) == REAL_CST
5938 || TREE_CODE (val
) == FIXED_CST
)
5944 return build_vector (TREE_TYPE (rhs
), vec
);
5946 if (subcode
== OBJ_TYPE_REF
)
5948 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5949 /* If callee is constant, we can fold away the wrapper. */
5950 if (is_gimple_min_invariant (val
))
5954 if (kind
== tcc_reference
)
5956 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5957 || TREE_CODE (rhs
) == REALPART_EXPR
5958 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5959 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5961 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5962 return fold_unary_loc (EXPR_LOCATION (rhs
),
5964 TREE_TYPE (rhs
), val
);
5966 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5967 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5969 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5970 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5972 TREE_TYPE (rhs
), val
,
5973 TREE_OPERAND (rhs
, 1),
5974 TREE_OPERAND (rhs
, 2));
5976 else if (TREE_CODE (rhs
) == MEM_REF
5977 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5979 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5980 if (TREE_CODE (val
) == ADDR_EXPR
5981 && is_gimple_min_invariant (val
))
5983 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5985 TREE_OPERAND (rhs
, 1));
5990 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5992 else if (kind
== tcc_declaration
)
5993 return get_symbol_constant_value (rhs
);
5997 case GIMPLE_UNARY_RHS
:
6000 case GIMPLE_BINARY_RHS
:
6001 /* Translate &x + CST into an invariant form suitable for
6002 further propagation. */
6003 if (subcode
== POINTER_PLUS_EXPR
)
6005 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6006 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6007 if (TREE_CODE (op0
) == ADDR_EXPR
6008 && TREE_CODE (op1
) == INTEGER_CST
)
6010 tree off
= fold_convert (ptr_type_node
, op1
);
6011 return build_fold_addr_expr_loc
6013 fold_build2 (MEM_REF
,
6014 TREE_TYPE (TREE_TYPE (op0
)),
6015 unshare_expr (op0
), off
));
6018 /* Canonicalize bool != 0 and bool == 0 appearing after
6019 valueization. While gimple_simplify handles this
6020 it can get confused by the ~X == 1 -> X == 0 transform
6021 which we cant reduce to a SSA name or a constant
6022 (and we have no way to tell gimple_simplify to not
6023 consider those transforms in the first place). */
6024 else if (subcode
== EQ_EXPR
6025 || subcode
== NE_EXPR
)
6027 tree lhs
= gimple_assign_lhs (stmt
);
6028 tree op0
= gimple_assign_rhs1 (stmt
);
6029 if (useless_type_conversion_p (TREE_TYPE (lhs
),
6032 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6033 op0
= (*valueize
) (op0
);
6034 if (TREE_CODE (op0
) == INTEGER_CST
)
6035 std::swap (op0
, op1
);
6036 if (TREE_CODE (op1
) == INTEGER_CST
6037 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
6038 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
6044 case GIMPLE_TERNARY_RHS
:
6046 /* Handle ternary operators that can appear in GIMPLE form. */
6047 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6048 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6049 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
6050 return fold_ternary_loc (loc
, subcode
,
6051 gimple_expr_type (stmt
), op0
, op1
, op2
);
6062 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
6064 if (gimple_call_internal_p (stmt
))
6066 enum tree_code subcode
= ERROR_MARK
;
6067 switch (gimple_call_internal_fn (stmt
))
6069 case IFN_UBSAN_CHECK_ADD
:
6070 subcode
= PLUS_EXPR
;
6072 case IFN_UBSAN_CHECK_SUB
:
6073 subcode
= MINUS_EXPR
;
6075 case IFN_UBSAN_CHECK_MUL
:
6076 subcode
= MULT_EXPR
;
6078 case IFN_BUILTIN_EXPECT
:
6080 tree arg0
= gimple_call_arg (stmt
, 0);
6081 tree op0
= (*valueize
) (arg0
);
6082 if (TREE_CODE (op0
) == INTEGER_CST
)
6089 tree arg0
= gimple_call_arg (stmt
, 0);
6090 tree arg1
= gimple_call_arg (stmt
, 1);
6091 tree op0
= (*valueize
) (arg0
);
6092 tree op1
= (*valueize
) (arg1
);
6094 if (TREE_CODE (op0
) != INTEGER_CST
6095 || TREE_CODE (op1
) != INTEGER_CST
)
6100 /* x * 0 = 0 * x = 0 without overflow. */
6101 if (integer_zerop (op0
) || integer_zerop (op1
))
6102 return build_zero_cst (TREE_TYPE (arg0
));
6105 /* y - y = 0 without overflow. */
6106 if (operand_equal_p (op0
, op1
, 0))
6107 return build_zero_cst (TREE_TYPE (arg0
));
6114 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
6116 && TREE_CODE (res
) == INTEGER_CST
6117 && !TREE_OVERFLOW (res
))
6122 fn
= (*valueize
) (gimple_call_fn (stmt
));
6123 if (TREE_CODE (fn
) == ADDR_EXPR
6124 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
6125 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
6126 && gimple_builtin_call_types_compatible_p (stmt
,
6127 TREE_OPERAND (fn
, 0)))
6129 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
6132 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6133 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
6134 retval
= fold_builtin_call_array (loc
,
6135 gimple_call_return_type (call_stmt
),
6136 fn
, gimple_call_num_args (stmt
), args
);
6139 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6140 STRIP_NOPS (retval
);
6141 retval
= fold_convert (gimple_call_return_type (call_stmt
),
6154 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6155 Returns NULL_TREE if folding to a constant is not possible, otherwise
6156 returns a constant according to is_gimple_min_invariant. */
6159 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
6161 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
6162 if (res
&& is_gimple_min_invariant (res
))
6168 /* The following set of functions are supposed to fold references using
6169 their constant initializers. */
6171 /* See if we can find constructor defining value of BASE.
6172 When we know the consructor with constant offset (such as
6173 base is array[40] and we do know constructor of array), then
6174 BIT_OFFSET is adjusted accordingly.
6176 As a special case, return error_mark_node when constructor
6177 is not explicitly available, but it is known to be zero
6178 such as 'static const int a;'. */
6180 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
6181 tree (*valueize
)(tree
))
6183 HOST_WIDE_INT bit_offset2
, size
, max_size
;
6186 if (TREE_CODE (base
) == MEM_REF
)
6188 if (!integer_zerop (TREE_OPERAND (base
, 1)))
6190 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
6192 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
6197 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
6198 base
= valueize (TREE_OPERAND (base
, 0));
6199 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
6201 base
= TREE_OPERAND (base
, 0);
6204 && TREE_CODE (base
) == SSA_NAME
)
6205 base
= valueize (base
);
6207 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6208 DECL_INITIAL. If BASE is a nested reference into another
6209 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6210 the inner reference. */
6211 switch (TREE_CODE (base
))
6216 tree init
= ctor_for_folding (base
);
6218 /* Our semantic is exact opposite of ctor_for_folding;
6219 NULL means unknown, while error_mark_node is 0. */
6220 if (init
== error_mark_node
)
6223 return error_mark_node
;
6227 case VIEW_CONVERT_EXPR
:
6228 return get_base_constructor (TREE_OPERAND (base
, 0),
6229 bit_offset
, valueize
);
6233 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
6235 if (max_size
== -1 || size
!= max_size
)
6237 *bit_offset
+= bit_offset2
;
6238 return get_base_constructor (base
, bit_offset
, valueize
);
6244 if (CONSTANT_CLASS_P (base
))
6251 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6252 SIZE to the memory at bit OFFSET. */
6255 fold_array_ctor_reference (tree type
, tree ctor
,
6256 unsigned HOST_WIDE_INT offset
,
6257 unsigned HOST_WIDE_INT size
,
6260 offset_int low_bound
;
6261 offset_int elt_size
;
6262 offset_int access_index
;
6263 tree domain_type
= NULL_TREE
;
6264 HOST_WIDE_INT inner_offset
;
6266 /* Compute low bound and elt size. */
6267 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
6268 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
6269 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6271 /* Static constructors for variably sized objects makes no sense. */
6272 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
6274 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
6278 /* Static constructors for variably sized objects makes no sense. */
6279 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
6281 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
6283 /* We can handle only constantly sized accesses that are known to not
6284 be larger than size of array element. */
6285 if (!TYPE_SIZE_UNIT (type
)
6286 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
6287 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
6291 /* Compute the array index we look for. */
6292 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
6294 access_index
+= low_bound
;
6296 /* And offset within the access. */
6297 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
6299 /* See if the array field is large enough to span whole access. We do not
6300 care to fold accesses spanning multiple array indexes. */
6301 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
6303 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
6304 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
6306 /* When memory is not explicitely mentioned in constructor,
6307 it is 0 (or out of range). */
6308 return build_zero_cst (type
);
6311 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6312 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6315 fold_nonarray_ctor_reference (tree type
, tree ctor
,
6316 unsigned HOST_WIDE_INT offset
,
6317 unsigned HOST_WIDE_INT size
,
6320 unsigned HOST_WIDE_INT cnt
;
6323 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
6326 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
6327 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
6328 tree field_size
= DECL_SIZE (cfield
);
6329 offset_int bitoffset
;
6330 offset_int bitoffset_end
, access_end
;
6332 /* Variable sized objects in static constructors makes no sense,
6333 but field_size can be NULL for flexible array members. */
6334 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
6335 && TREE_CODE (byte_offset
) == INTEGER_CST
6336 && (field_size
!= NULL_TREE
6337 ? TREE_CODE (field_size
) == INTEGER_CST
6338 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
6340 /* Compute bit offset of the field. */
6341 bitoffset
= (wi::to_offset (field_offset
)
6342 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
6343 /* Compute bit offset where the field ends. */
6344 if (field_size
!= NULL_TREE
)
6345 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
6349 access_end
= offset_int (offset
) + size
;
6351 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6352 [BITOFFSET, BITOFFSET_END)? */
6353 if (wi::cmps (access_end
, bitoffset
) > 0
6354 && (field_size
== NULL_TREE
6355 || wi::lts_p (offset
, bitoffset_end
)))
6357 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
6358 /* We do have overlap. Now see if field is large enough to
6359 cover the access. Give up for accesses spanning multiple
6361 if (wi::cmps (access_end
, bitoffset_end
) > 0)
6363 if (offset
< bitoffset
)
6365 return fold_ctor_reference (type
, cval
,
6366 inner_offset
.to_uhwi (), size
,
6370 /* When memory is not explicitely mentioned in constructor, it is 0. */
6371 return build_zero_cst (type
);
6374 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6375 to the memory at bit OFFSET. */
6378 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
6379 unsigned HOST_WIDE_INT size
, tree from_decl
)
6383 /* We found the field with exact match. */
6384 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
6386 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6388 /* We are at the end of walk, see if we can view convert the
6390 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
6391 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6392 && !compare_tree_int (TYPE_SIZE (type
), size
)
6393 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
6395 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6398 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
6400 STRIP_USELESS_TYPE_CONVERSION (ret
);
6404 /* For constants and byte-aligned/sized reads try to go through
6405 native_encode/interpret. */
6406 if (CONSTANT_CLASS_P (ctor
)
6407 && BITS_PER_UNIT
== 8
6408 && offset
% BITS_PER_UNIT
== 0
6409 && size
% BITS_PER_UNIT
== 0
6410 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
6412 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6413 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
6414 offset
/ BITS_PER_UNIT
);
6416 return native_interpret_expr (type
, buf
, len
);
6418 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
6421 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
6422 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
6423 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
6426 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
6433 /* Return the tree representing the element referenced by T if T is an
6434 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6435 names using VALUEIZE. Return NULL_TREE otherwise. */
6438 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
6440 tree ctor
, idx
, base
;
6441 HOST_WIDE_INT offset
, size
, max_size
;
6445 if (TREE_THIS_VOLATILE (t
))
6449 return get_symbol_constant_value (t
);
6451 tem
= fold_read_from_constant_string (t
);
6455 switch (TREE_CODE (t
))
6458 case ARRAY_RANGE_REF
:
6459 /* Constant indexes are handled well by get_base_constructor.
6460 Only special case variable offsets.
6461 FIXME: This code can't handle nested references with variable indexes
6462 (they will be handled only by iteration of ccp). Perhaps we can bring
6463 get_ref_base_and_extent here and make it use a valueize callback. */
6464 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
6466 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
6467 && TREE_CODE (idx
) == INTEGER_CST
)
6469 tree low_bound
, unit_size
;
6471 /* If the resulting bit-offset is constant, track it. */
6472 if ((low_bound
= array_ref_low_bound (t
),
6473 TREE_CODE (low_bound
) == INTEGER_CST
)
6474 && (unit_size
= array_ref_element_size (t
),
6475 tree_fits_uhwi_p (unit_size
)))
6478 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
6479 TYPE_PRECISION (TREE_TYPE (idx
)));
6481 if (wi::fits_shwi_p (woffset
))
6483 offset
= woffset
.to_shwi ();
6484 /* TODO: This code seems wrong, multiply then check
6485 to see if it fits. */
6486 offset
*= tree_to_uhwi (unit_size
);
6487 offset
*= BITS_PER_UNIT
;
6489 base
= TREE_OPERAND (t
, 0);
6490 ctor
= get_base_constructor (base
, &offset
, valueize
);
6491 /* Empty constructor. Always fold to 0. */
6492 if (ctor
== error_mark_node
)
6493 return build_zero_cst (TREE_TYPE (t
));
6494 /* Out of bound array access. Value is undefined,
6498 /* We can not determine ctor. */
6501 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
6502 tree_to_uhwi (unit_size
)
6512 case TARGET_MEM_REF
:
6514 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
6515 ctor
= get_base_constructor (base
, &offset
, valueize
);
6517 /* Empty constructor. Always fold to 0. */
6518 if (ctor
== error_mark_node
)
6519 return build_zero_cst (TREE_TYPE (t
));
6520 /* We do not know precise address. */
6521 if (max_size
== -1 || max_size
!= size
)
6523 /* We can not determine ctor. */
6527 /* Out of bound array access. Value is undefined, but don't fold. */
6531 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
6537 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
6538 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
6539 return fold_build1_loc (EXPR_LOCATION (t
),
6540 TREE_CODE (t
), TREE_TYPE (t
), c
);
6552 fold_const_aggregate_ref (tree t
)
6554 return fold_const_aggregate_ref_1 (t
, NULL
);
6557 /* Lookup virtual method with index TOKEN in a virtual table V
6559 Set CAN_REFER if non-NULL to false if method
6560 is not referable or if the virtual table is ill-formed (such as rewriten
6561 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6564 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
6566 unsigned HOST_WIDE_INT offset
,
6569 tree vtable
= v
, init
, fn
;
6570 unsigned HOST_WIDE_INT size
;
6571 unsigned HOST_WIDE_INT elt_size
, access_index
;
6577 /* First of all double check we have virtual table. */
6578 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
6580 /* Pass down that we lost track of the target. */
6586 init
= ctor_for_folding (v
);
6588 /* The virtual tables should always be born with constructors
6589 and we always should assume that they are avaialble for
6590 folding. At the moment we do not stream them in all cases,
6591 but it should never happen that ctor seem unreachable. */
6593 if (init
== error_mark_node
)
6595 gcc_assert (in_lto_p
);
6596 /* Pass down that we lost track of the target. */
6601 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
6602 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
6603 offset
*= BITS_PER_UNIT
;
6604 offset
+= token
* size
;
6606 /* Lookup the value in the constructor that is assumed to be array.
6607 This is equivalent to
6608 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6609 offset, size, NULL);
6610 but in a constant time. We expect that frontend produced a simple
6611 array without indexed initializers. */
6613 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
6614 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
6615 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
6616 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
6618 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
6619 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
6621 /* This code makes an assumption that there are no
6622 indexed fileds produced by C++ FE, so we can directly index the array. */
6623 if (access_index
< CONSTRUCTOR_NELTS (init
))
6625 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
6626 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
6632 /* For type inconsistent program we may end up looking up virtual method
6633 in virtual table that does not contain TOKEN entries. We may overrun
6634 the virtual table and pick up a constant or RTTI info pointer.
6635 In any case the call is undefined. */
6637 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
6638 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
6639 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6642 fn
= TREE_OPERAND (fn
, 0);
6644 /* When cgraph node is missing and function is not public, we cannot
6645 devirtualize. This can happen in WHOPR when the actual method
6646 ends up in other partition, because we found devirtualization
6647 possibility too late. */
6648 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
6659 /* Make sure we create a cgraph node for functions we'll reference.
6660 They can be non-existent if the reference comes from an entry
6661 of an external vtable for example. */
6662 cgraph_node::get_create (fn
);
6667 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6668 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6669 KNOWN_BINFO carries the binfo describing the true type of
6670 OBJ_TYPE_REF_OBJECT(REF).
6671 Set CAN_REFER if non-NULL to false if method
6672 is not referable or if the virtual table is ill-formed (such as rewriten
6673 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6676 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
6679 unsigned HOST_WIDE_INT offset
;
6682 v
= BINFO_VTABLE (known_binfo
);
6683 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6687 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
6693 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
6696 /* Given a pointer value T, return a simplified version of an
6697 indirection through T, or NULL_TREE if no simplification is
6698 possible. Note that the resulting type may be different from
6699 the type pointed to in the sense that it is still compatible
6700 from the langhooks point of view. */
6703 gimple_fold_indirect_ref (tree t
)
6705 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
6710 subtype
= TREE_TYPE (sub
);
6711 if (!POINTER_TYPE_P (subtype
)
6712 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
6715 if (TREE_CODE (sub
) == ADDR_EXPR
)
6717 tree op
= TREE_OPERAND (sub
, 0);
6718 tree optype
= TREE_TYPE (op
);
6720 if (useless_type_conversion_p (type
, optype
))
6723 /* *(foo *)&fooarray => fooarray[0] */
6724 if (TREE_CODE (optype
) == ARRAY_TYPE
6725 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
6726 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6728 tree type_domain
= TYPE_DOMAIN (optype
);
6729 tree min_val
= size_zero_node
;
6730 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6731 min_val
= TYPE_MIN_VALUE (type_domain
);
6732 if (TREE_CODE (min_val
) == INTEGER_CST
)
6733 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
6735 /* *(foo *)&complexfoo => __real__ complexfoo */
6736 else if (TREE_CODE (optype
) == COMPLEX_TYPE
6737 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6738 return fold_build1 (REALPART_EXPR
, type
, op
);
6739 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6740 else if (TREE_CODE (optype
) == VECTOR_TYPE
6741 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6743 tree part_width
= TYPE_SIZE (type
);
6744 tree index
= bitsize_int (0);
6745 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
6749 /* *(p + CST) -> ... */
6750 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
6751 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
6753 tree addr
= TREE_OPERAND (sub
, 0);
6754 tree off
= TREE_OPERAND (sub
, 1);
6758 addrtype
= TREE_TYPE (addr
);
6760 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6761 if (TREE_CODE (addr
) == ADDR_EXPR
6762 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
6763 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
6764 && tree_fits_uhwi_p (off
))
6766 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
6767 tree part_width
= TYPE_SIZE (type
);
6768 unsigned HOST_WIDE_INT part_widthi
6769 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
6770 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
6771 tree index
= bitsize_int (indexi
);
6772 if (offset
/ part_widthi
6773 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
6774 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
6778 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6779 if (TREE_CODE (addr
) == ADDR_EXPR
6780 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
6781 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
6783 tree size
= TYPE_SIZE_UNIT (type
);
6784 if (tree_int_cst_equal (size
, off
))
6785 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
6788 /* *(p + CST) -> MEM_REF <p, CST>. */
6789 if (TREE_CODE (addr
) != ADDR_EXPR
6790 || DECL_P (TREE_OPERAND (addr
, 0)))
6791 return fold_build2 (MEM_REF
, type
,
6793 wide_int_to_tree (ptype
, off
));
6796 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6797 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6798 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6799 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6802 tree min_val
= size_zero_node
;
6804 sub
= gimple_fold_indirect_ref (sub
);
6806 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6807 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6808 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6809 min_val
= TYPE_MIN_VALUE (type_domain
);
6810 if (TREE_CODE (min_val
) == INTEGER_CST
)
6811 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6817 /* Return true if CODE is an operation that when operating on signed
6818 integer types involves undefined behavior on overflow and the
6819 operation can be expressed with unsigned arithmetic. */
6822 arith_code_with_undefined_signed_overflow (tree_code code
)
6830 case POINTER_PLUS_EXPR
:
6837 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6838 operation that can be transformed to unsigned arithmetic by converting
6839 its operand, carrying out the operation in the corresponding unsigned
6840 type and converting the result back to the original type.
6842 Returns a sequence of statements that replace STMT and also contain
6843 a modified form of STMT itself. */
6846 rewrite_to_defined_overflow (gimple
*stmt
)
6848 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6850 fprintf (dump_file
, "rewriting stmt with undefined signed "
6852 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6855 tree lhs
= gimple_assign_lhs (stmt
);
6856 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6857 gimple_seq stmts
= NULL
;
6858 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6860 tree op
= gimple_op (stmt
, i
);
6861 op
= gimple_convert (&stmts
, type
, op
);
6862 gimple_set_op (stmt
, i
, op
);
6864 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6865 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6866 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6867 gimple_seq_add_stmt (&stmts
, stmt
);
6868 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6869 gimple_seq_add_stmt (&stmts
, cvt
);
6875 /* The valueization hook we use for the gimple_build API simplification.
6876 This makes us match fold_buildN behavior by only combining with
6877 statements in the sequence(s) we are currently building. */
6880 gimple_build_valueize (tree op
)
6882 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6887 /* Build the expression CODE OP0 of type TYPE with location LOC,
6888 simplifying it first if possible. Returns the built
6889 expression value and appends statements possibly defining it
6893 gimple_build (gimple_seq
*seq
, location_t loc
,
6894 enum tree_code code
, tree type
, tree op0
)
6896 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6899 res
= create_tmp_reg_or_ssa_name (type
);
6901 if (code
== REALPART_EXPR
6902 || code
== IMAGPART_EXPR
6903 || code
== VIEW_CONVERT_EXPR
)
6904 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6906 stmt
= gimple_build_assign (res
, code
, op0
);
6907 gimple_set_location (stmt
, loc
);
6908 gimple_seq_add_stmt_without_update (seq
, stmt
);
6913 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6914 simplifying it first if possible. Returns the built
6915 expression value and appends statements possibly defining it
6919 gimple_build (gimple_seq
*seq
, location_t loc
,
6920 enum tree_code code
, tree type
, tree op0
, tree op1
)
6922 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6925 res
= create_tmp_reg_or_ssa_name (type
);
6926 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6927 gimple_set_location (stmt
, loc
);
6928 gimple_seq_add_stmt_without_update (seq
, stmt
);
6933 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6934 simplifying it first if possible. Returns the built
6935 expression value and appends statements possibly defining it
6939 gimple_build (gimple_seq
*seq
, location_t loc
,
6940 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6942 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6943 seq
, gimple_build_valueize
);
6946 res
= create_tmp_reg_or_ssa_name (type
);
6948 if (code
== BIT_FIELD_REF
)
6949 stmt
= gimple_build_assign (res
, code
,
6950 build3 (code
, type
, op0
, op1
, op2
));
6952 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6953 gimple_set_location (stmt
, loc
);
6954 gimple_seq_add_stmt_without_update (seq
, stmt
);
6959 /* Build the call FN (ARG0) with a result of type TYPE
6960 (or no result if TYPE is void) with location LOC,
6961 simplifying it first if possible. Returns the built
6962 expression value (or NULL_TREE if TYPE is void) and appends
6963 statements possibly defining it to SEQ. */
6966 gimple_build (gimple_seq
*seq
, location_t loc
,
6967 enum built_in_function fn
, tree type
, tree arg0
)
6969 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6972 tree decl
= builtin_decl_implicit (fn
);
6973 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
6974 if (!VOID_TYPE_P (type
))
6976 res
= create_tmp_reg_or_ssa_name (type
);
6977 gimple_call_set_lhs (stmt
, res
);
6979 gimple_set_location (stmt
, loc
);
6980 gimple_seq_add_stmt_without_update (seq
, stmt
);
6985 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6986 (or no result if TYPE is void) with location LOC,
6987 simplifying it first if possible. Returns the built
6988 expression value (or NULL_TREE if TYPE is void) and appends
6989 statements possibly defining it to SEQ. */
6992 gimple_build (gimple_seq
*seq
, location_t loc
,
6993 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6995 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6998 tree decl
= builtin_decl_implicit (fn
);
6999 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
7000 if (!VOID_TYPE_P (type
))
7002 res
= create_tmp_reg_or_ssa_name (type
);
7003 gimple_call_set_lhs (stmt
, res
);
7005 gimple_set_location (stmt
, loc
);
7006 gimple_seq_add_stmt_without_update (seq
, stmt
);
7011 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7012 (or no result if TYPE is void) with location LOC,
7013 simplifying it first if possible. Returns the built
7014 expression value (or NULL_TREE if TYPE is void) and appends
7015 statements possibly defining it to SEQ. */
7018 gimple_build (gimple_seq
*seq
, location_t loc
,
7019 enum built_in_function fn
, tree type
,
7020 tree arg0
, tree arg1
, tree arg2
)
7022 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
7023 seq
, gimple_build_valueize
);
7026 tree decl
= builtin_decl_implicit (fn
);
7027 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
7028 if (!VOID_TYPE_P (type
))
7030 res
= create_tmp_reg_or_ssa_name (type
);
7031 gimple_call_set_lhs (stmt
, res
);
7033 gimple_set_location (stmt
, loc
);
7034 gimple_seq_add_stmt_without_update (seq
, stmt
);
7039 /* Build the conversion (TYPE) OP with a result of type TYPE
7040 with location LOC if such conversion is neccesary in GIMPLE,
7041 simplifying it first.
7042 Returns the built expression value and appends
7043 statements possibly defining it to SEQ. */
7046 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
7048 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
7050 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
7053 /* Build the conversion (ptrofftype) OP with a result of a type
7054 compatible with ptrofftype with location LOC if such conversion
7055 is neccesary in GIMPLE, simplifying it first.
7056 Returns the built expression value and appends
7057 statements possibly defining it to SEQ. */
7060 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
7062 if (ptrofftype_p (TREE_TYPE (op
)))
7064 return gimple_convert (seq
, loc
, sizetype
, op
);
7067 /* Return true if the result of assignment STMT is known to be non-negative.
7068 If the return value is based on the assumption that signed overflow is
7069 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7070 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7073 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7076 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7077 switch (get_gimple_rhs_class (code
))
7079 case GIMPLE_UNARY_RHS
:
7080 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7081 gimple_expr_type (stmt
),
7082 gimple_assign_rhs1 (stmt
),
7083 strict_overflow_p
, depth
);
7084 case GIMPLE_BINARY_RHS
:
7085 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7086 gimple_expr_type (stmt
),
7087 gimple_assign_rhs1 (stmt
),
7088 gimple_assign_rhs2 (stmt
),
7089 strict_overflow_p
, depth
);
7090 case GIMPLE_TERNARY_RHS
:
7092 case GIMPLE_SINGLE_RHS
:
7093 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
7094 strict_overflow_p
, depth
);
7095 case GIMPLE_INVALID_RHS
:
7101 /* Return true if return value of call STMT is known to be non-negative.
7102 If the return value is based on the assumption that signed overflow is
7103 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7104 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7107 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7110 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
7111 gimple_call_arg (stmt
, 0) : NULL_TREE
;
7112 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
7113 gimple_call_arg (stmt
, 1) : NULL_TREE
;
7115 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
7116 gimple_call_combined_fn (stmt
),
7119 strict_overflow_p
, depth
);
7122 /* Return true if return value of call STMT is known to be non-negative.
7123 If the return value is based on the assumption that signed overflow is
7124 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7125 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7128 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7131 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7133 tree arg
= gimple_phi_arg_def (stmt
, i
);
7134 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
7140 /* Return true if STMT is known to compute a non-negative value.
7141 If the return value is based on the assumption that signed overflow is
7142 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7143 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7146 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7149 switch (gimple_code (stmt
))
7152 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7155 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7158 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7165 /* Return true if the floating-point value computed by assignment STMT
7166 is known to have an integer value. We also allow +Inf, -Inf and NaN
7167 to be considered integer values. Return false for signaling NaN.
7169 DEPTH is the current nesting depth of the query. */
7172 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
7174 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7175 switch (get_gimple_rhs_class (code
))
7177 case GIMPLE_UNARY_RHS
:
7178 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
7179 gimple_assign_rhs1 (stmt
), depth
);
7180 case GIMPLE_BINARY_RHS
:
7181 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
7182 gimple_assign_rhs1 (stmt
),
7183 gimple_assign_rhs2 (stmt
), depth
);
7184 case GIMPLE_TERNARY_RHS
:
7186 case GIMPLE_SINGLE_RHS
:
7187 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
7188 case GIMPLE_INVALID_RHS
:
7194 /* Return true if the floating-point value computed by call STMT is known
7195 to have an integer value. We also allow +Inf, -Inf and NaN to be
7196 considered integer values. Return false for signaling NaN.
7198 DEPTH is the current nesting depth of the query. */
7201 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
7203 tree arg0
= (gimple_call_num_args (stmt
) > 0
7204 ? gimple_call_arg (stmt
, 0)
7206 tree arg1
= (gimple_call_num_args (stmt
) > 1
7207 ? gimple_call_arg (stmt
, 1)
7209 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
7213 /* Return true if the floating-point result of phi STMT is known to have
7214 an integer value. We also allow +Inf, -Inf and NaN to be considered
7215 integer values. Return false for signaling NaN.
7217 DEPTH is the current nesting depth of the query. */
7220 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
7222 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7224 tree arg
= gimple_phi_arg_def (stmt
, i
);
7225 if (!integer_valued_real_single_p (arg
, depth
+ 1))
7231 /* Return true if the floating-point value computed by STMT is known
7232 to have an integer value. We also allow +Inf, -Inf and NaN to be
7233 considered integer values. Return false for signaling NaN.
7235 DEPTH is the current nesting depth of the query. */
7238 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
7240 switch (gimple_code (stmt
))
7243 return gimple_assign_integer_valued_real_p (stmt
, depth
);
7245 return gimple_call_integer_valued_real_p (stmt
, depth
);
7247 return gimple_phi_integer_valued_real_p (stmt
, depth
);