1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2020 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-restrict.h"
34 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "gimple-fold.h"
41 #include "gimple-iterator.h"
42 #include "tree-into-ssa.h"
44 #include "tree-object-size.h"
46 #include "tree-ssa-propagate.h"
47 #include "ipa-utils.h"
48 #include "tree-ssa-address.h"
49 #include "langhooks.h"
50 #include "gimplify-me.h"
54 #include "gimple-match.h"
55 #include "gomp-constants.h"
56 #include "optabs-query.h"
57 #include "omp-general.h"
59 #include "fold-const-call.h"
60 #include "stringpool.h"
63 #include "diagnostic-core.h"
66 #include "tree-vector-builder.h"
67 #include "tree-ssa-strlen.h"
70 enum strlen_range_kind
{
71 /* Compute the exact constant string length. */
73 /* Compute the maximum constant string length. */
75 /* Compute a range of string lengths bounded by object sizes. When
76 the length of a string cannot be determined, consider as the upper
77 bound the size of the enclosing object the string may be a member
78 or element of. Also determine the size of the largest character
79 array the string may refer to. */
81 /* Determine the integer value of the argument (not string length). */
86 get_range_strlen (tree
, bitmap
*, strlen_range_kind
, c_strlen_data
*, unsigned);
88 /* Return true when DECL can be referenced from current unit.
89 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
90 We can get declarations that are not possible to reference for various
93 1) When analyzing C++ virtual tables.
94 C++ virtual tables do have known constructors even
95 when they are keyed to other compilation unit.
96 Those tables can contain pointers to methods and vars
97 in other units. Those methods have both STATIC and EXTERNAL
99 2) In WHOPR mode devirtualization might lead to reference
100 to method that was partitioned elsehwere.
101 In this case we have static VAR_DECL or FUNCTION_DECL
102 that has no corresponding callgraph/varpool node
104 3) COMDAT functions referred by external vtables that
105 we devirtualize only during final compilation stage.
106 At this time we already decided that we will not output
107 the function body and thus we can't reference the symbol
111 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
114 struct cgraph_node
*node
;
117 if (DECL_ABSTRACT_P (decl
))
120 /* We are concerned only about static/external vars and functions. */
121 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
122 || !VAR_OR_FUNCTION_DECL_P (decl
))
125 /* Static objects can be referred only if they are defined and not optimized
127 if (!TREE_PUBLIC (decl
))
129 if (DECL_EXTERNAL (decl
))
131 /* Before we start optimizing unreachable code we can be sure all
132 static objects are defined. */
133 if (symtab
->function_flags_ready
)
135 snode
= symtab_node::get (decl
);
136 if (!snode
|| !snode
->definition
)
138 node
= dyn_cast
<cgraph_node
*> (snode
);
139 return !node
|| !node
->inlined_to
;
142 /* We will later output the initializer, so we can refer to it.
143 So we are concerned only when DECL comes from initializer of
144 external var or var that has been optimized out. */
146 || !VAR_P (from_decl
)
147 || (!DECL_EXTERNAL (from_decl
)
148 && (vnode
= varpool_node::get (from_decl
)) != NULL
149 && vnode
->definition
)
151 && (vnode
= varpool_node::get (from_decl
)) != NULL
152 && vnode
->in_other_partition
))
154 /* We are folding reference from external vtable. The vtable may reffer
155 to a symbol keyed to other compilation unit. The other compilation
156 unit may be in separate DSO and the symbol may be hidden. */
157 if (DECL_VISIBILITY_SPECIFIED (decl
)
158 && DECL_EXTERNAL (decl
)
159 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
160 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
162 /* When function is public, we always can introduce new reference.
163 Exception are the COMDAT functions where introducing a direct
164 reference imply need to include function body in the curren tunit. */
165 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
167 /* We have COMDAT. We are going to check if we still have definition
168 or if the definition is going to be output in other partition.
169 Bypass this when gimplifying; all needed functions will be produced.
171 As observed in PR20991 for already optimized out comdat virtual functions
172 it may be tempting to not necessarily give up because the copy will be
173 output elsewhere when corresponding vtable is output.
174 This is however not possible - ABI specify that COMDATs are output in
175 units where they are used and when the other unit was compiled with LTO
176 it is possible that vtable was kept public while the function itself
178 if (!symtab
->function_flags_ready
)
181 snode
= symtab_node::get (decl
);
183 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
184 && (!snode
->in_other_partition
185 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
187 node
= dyn_cast
<cgraph_node
*> (snode
);
188 return !node
|| !node
->inlined_to
;
191 /* Create a temporary for TYPE for a statement STMT. If the current function
192 is in SSA form, a SSA name is created. Otherwise a temporary register
196 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
198 if (gimple_in_ssa_p (cfun
))
199 return make_ssa_name (type
, stmt
);
201 return create_tmp_reg (type
);
204 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
205 acceptable form for is_gimple_min_invariant.
206 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
209 canonicalize_constructor_val (tree cval
, tree from_decl
)
211 if (CONSTANT_CLASS_P (cval
))
214 tree orig_cval
= cval
;
216 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
217 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
219 tree ptr
= TREE_OPERAND (cval
, 0);
220 if (is_gimple_min_invariant (ptr
))
221 cval
= build1_loc (EXPR_LOCATION (cval
),
222 ADDR_EXPR
, TREE_TYPE (ptr
),
223 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
225 fold_convert (ptr_type_node
,
226 TREE_OPERAND (cval
, 1))));
228 if (TREE_CODE (cval
) == ADDR_EXPR
)
230 tree base
= NULL_TREE
;
231 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
233 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
235 TREE_OPERAND (cval
, 0) = base
;
238 base
= get_base_address (TREE_OPERAND (cval
, 0));
242 if (VAR_OR_FUNCTION_DECL_P (base
)
243 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
245 if (TREE_TYPE (base
) == error_mark_node
)
248 TREE_ADDRESSABLE (base
) = 1;
249 else if (TREE_CODE (base
) == FUNCTION_DECL
)
251 /* Make sure we create a cgraph node for functions we'll reference.
252 They can be non-existent if the reference comes from an entry
253 of an external vtable for example. */
254 cgraph_node::get_create (base
);
256 /* Fixup types in global initializers. */
257 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
258 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
260 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
261 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
264 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
265 if (TREE_CODE (cval
) == INTEGER_CST
)
267 if (TREE_OVERFLOW_P (cval
))
268 cval
= drop_tree_overflow (cval
);
269 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
270 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
276 /* If SYM is a constant variable with known value, return the value.
277 NULL_TREE is returned otherwise. */
280 get_symbol_constant_value (tree sym
)
282 tree val
= ctor_for_folding (sym
);
283 if (val
!= error_mark_node
)
287 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
288 if (val
&& is_gimple_min_invariant (val
))
293 /* Variables declared 'const' without an initializer
294 have zero as the initializer if they may not be
295 overridden at link or run time. */
297 && is_gimple_reg_type (TREE_TYPE (sym
)))
298 return build_zero_cst (TREE_TYPE (sym
));
306 /* Subroutine of fold_stmt. We perform several simplifications of the
307 memory reference tree EXPR and make sure to re-gimplify them properly
308 after propagation of constant addresses. IS_LHS is true if the
309 reference is supposed to be an lvalue. */
312 maybe_fold_reference (tree expr
, bool is_lhs
)
316 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
317 || TREE_CODE (expr
) == REALPART_EXPR
318 || TREE_CODE (expr
) == IMAGPART_EXPR
)
319 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
320 return fold_unary_loc (EXPR_LOCATION (expr
),
323 TREE_OPERAND (expr
, 0));
324 else if (TREE_CODE (expr
) == BIT_FIELD_REF
325 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
326 return fold_ternary_loc (EXPR_LOCATION (expr
),
329 TREE_OPERAND (expr
, 0),
330 TREE_OPERAND (expr
, 1),
331 TREE_OPERAND (expr
, 2));
334 && (result
= fold_const_aggregate_ref (expr
))
335 && is_gimple_min_invariant (result
))
342 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
343 replacement rhs for the statement or NULL_TREE if no simplification
344 could be made. It is assumed that the operands have been previously
348 fold_gimple_assign (gimple_stmt_iterator
*si
)
350 gimple
*stmt
= gsi_stmt (*si
);
351 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
352 location_t loc
= gimple_location (stmt
);
354 tree result
= NULL_TREE
;
356 switch (get_gimple_rhs_class (subcode
))
358 case GIMPLE_SINGLE_RHS
:
360 tree rhs
= gimple_assign_rhs1 (stmt
);
362 if (TREE_CLOBBER_P (rhs
))
365 if (REFERENCE_CLASS_P (rhs
))
366 return maybe_fold_reference (rhs
, false);
368 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
370 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
371 if (is_gimple_min_invariant (val
))
373 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
376 vec
<cgraph_node
*>targets
377 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
378 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
380 if (dump_enabled_p ())
382 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
383 "resolving virtual function address "
384 "reference to function %s\n",
385 targets
.length () == 1
386 ? targets
[0]->name ()
389 if (targets
.length () == 1)
391 val
= fold_convert (TREE_TYPE (val
),
392 build_fold_addr_expr_loc
393 (loc
, targets
[0]->decl
));
394 STRIP_USELESS_TYPE_CONVERSION (val
);
397 /* We cannot use __builtin_unreachable here because it
398 cannot have address taken. */
399 val
= build_int_cst (TREE_TYPE (val
), 0);
405 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
407 tree ref
= TREE_OPERAND (rhs
, 0);
408 tree tem
= maybe_fold_reference (ref
, true);
410 && TREE_CODE (tem
) == MEM_REF
411 && integer_zerop (TREE_OPERAND (tem
, 1)))
412 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
414 result
= fold_convert (TREE_TYPE (rhs
),
415 build_fold_addr_expr_loc (loc
, tem
));
416 else if (TREE_CODE (ref
) == MEM_REF
417 && integer_zerop (TREE_OPERAND (ref
, 1)))
418 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
422 /* Strip away useless type conversions. Both the
423 NON_LVALUE_EXPR that may have been added by fold, and
424 "useless" type conversions that might now be apparent
425 due to propagation. */
426 STRIP_USELESS_TYPE_CONVERSION (result
);
428 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
433 else if (TREE_CODE (rhs
) == CONSTRUCTOR
434 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
436 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
440 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
441 if (! CONSTANT_CLASS_P (val
))
444 return build_vector_from_ctor (TREE_TYPE (rhs
),
445 CONSTRUCTOR_ELTS (rhs
));
448 else if (DECL_P (rhs
))
449 return get_symbol_constant_value (rhs
);
453 case GIMPLE_UNARY_RHS
:
456 case GIMPLE_BINARY_RHS
:
459 case GIMPLE_TERNARY_RHS
:
460 result
= fold_ternary_loc (loc
, subcode
,
461 TREE_TYPE (gimple_assign_lhs (stmt
)),
462 gimple_assign_rhs1 (stmt
),
463 gimple_assign_rhs2 (stmt
),
464 gimple_assign_rhs3 (stmt
));
468 STRIP_USELESS_TYPE_CONVERSION (result
);
469 if (valid_gimple_rhs_p (result
))
474 case GIMPLE_INVALID_RHS
:
482 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
483 adjusting the replacement stmts location and virtual operands.
484 If the statement has a lhs the last stmt in the sequence is expected
485 to assign to that lhs. */
488 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
490 gimple
*stmt
= gsi_stmt (*si_p
);
492 if (gimple_has_location (stmt
))
493 annotate_all_with_location (stmts
, gimple_location (stmt
));
495 /* First iterate over the replacement statements backward, assigning
496 virtual operands to their defining statements. */
497 gimple
*laststore
= NULL
;
498 for (gimple_stmt_iterator i
= gsi_last (stmts
);
499 !gsi_end_p (i
); gsi_prev (&i
))
501 gimple
*new_stmt
= gsi_stmt (i
);
502 if ((gimple_assign_single_p (new_stmt
)
503 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
504 || (is_gimple_call (new_stmt
)
505 && (gimple_call_flags (new_stmt
)
506 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
510 vdef
= gimple_vdef (stmt
);
512 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
513 gimple_set_vdef (new_stmt
, vdef
);
514 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
515 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
516 laststore
= new_stmt
;
520 /* Second iterate over the statements forward, assigning virtual
521 operands to their uses. */
522 tree reaching_vuse
= gimple_vuse (stmt
);
523 for (gimple_stmt_iterator i
= gsi_start (stmts
);
524 !gsi_end_p (i
); gsi_next (&i
))
526 gimple
*new_stmt
= gsi_stmt (i
);
527 /* If the new statement possibly has a VUSE, update it with exact SSA
528 name we know will reach this one. */
529 if (gimple_has_mem_ops (new_stmt
))
530 gimple_set_vuse (new_stmt
, reaching_vuse
);
531 gimple_set_modified (new_stmt
, true);
532 if (gimple_vdef (new_stmt
))
533 reaching_vuse
= gimple_vdef (new_stmt
);
536 /* If the new sequence does not do a store release the virtual
537 definition of the original statement. */
539 && reaching_vuse
== gimple_vuse (stmt
))
541 tree vdef
= gimple_vdef (stmt
);
543 && TREE_CODE (vdef
) == SSA_NAME
)
545 unlink_stmt_vdef (stmt
);
546 release_ssa_name (vdef
);
550 /* Finally replace the original statement with the sequence. */
551 gsi_replace_with_seq (si_p
, stmts
, false);
554 /* Convert EXPR into a GIMPLE value suitable for substitution on the
555 RHS of an assignment. Insert the necessary statements before
556 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
557 is replaced. If the call is expected to produces a result, then it
558 is replaced by an assignment of the new RHS to the result variable.
559 If the result is to be ignored, then the call is replaced by a
560 GIMPLE_NOP. A proper VDEF chain is retained by making the first
561 VUSE and the last VDEF of the whole sequence be the same as the replaced
562 statement and using new SSA names for stores in between. */
565 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
568 gimple
*stmt
, *new_stmt
;
569 gimple_stmt_iterator i
;
570 gimple_seq stmts
= NULL
;
572 stmt
= gsi_stmt (*si_p
);
574 gcc_assert (is_gimple_call (stmt
));
576 push_gimplify_context (gimple_in_ssa_p (cfun
));
578 lhs
= gimple_call_lhs (stmt
);
579 if (lhs
== NULL_TREE
)
581 gimplify_and_add (expr
, &stmts
);
582 /* We can end up with folding a memcpy of an empty class assignment
583 which gets optimized away by C++ gimplification. */
584 if (gimple_seq_empty_p (stmts
))
586 pop_gimplify_context (NULL
);
587 if (gimple_in_ssa_p (cfun
))
589 unlink_stmt_vdef (stmt
);
592 gsi_replace (si_p
, gimple_build_nop (), false);
598 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
599 new_stmt
= gimple_build_assign (lhs
, tmp
);
600 i
= gsi_last (stmts
);
601 gsi_insert_after_without_update (&i
, new_stmt
,
602 GSI_CONTINUE_LINKING
);
605 pop_gimplify_context (NULL
);
607 gsi_replace_with_seq_vops (si_p
, stmts
);
611 /* Replace the call at *GSI with the gimple value VAL. */
614 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
616 gimple
*stmt
= gsi_stmt (*gsi
);
617 tree lhs
= gimple_call_lhs (stmt
);
621 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
622 val
= fold_convert (TREE_TYPE (lhs
), val
);
623 repl
= gimple_build_assign (lhs
, val
);
626 repl
= gimple_build_nop ();
627 tree vdef
= gimple_vdef (stmt
);
628 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
630 unlink_stmt_vdef (stmt
);
631 release_ssa_name (vdef
);
633 gsi_replace (gsi
, repl
, false);
636 /* Replace the call at *GSI with the new call REPL and fold that
640 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
642 gimple
*stmt
= gsi_stmt (*gsi
);
643 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
644 gimple_set_location (repl
, gimple_location (stmt
));
645 gimple_move_vops (repl
, stmt
);
646 gsi_replace (gsi
, repl
, false);
650 /* Return true if VAR is a VAR_DECL or a component thereof. */
653 var_decl_component_p (tree var
)
656 while (handled_component_p (inner
))
657 inner
= TREE_OPERAND (inner
, 0);
658 return (DECL_P (inner
)
659 || (TREE_CODE (inner
) == MEM_REF
660 && TREE_CODE (TREE_OPERAND (inner
, 0)) == ADDR_EXPR
));
663 /* Return TRUE if the SIZE argument, representing the size of an
664 object, is in a range of values of which exactly zero is valid. */
667 size_must_be_zero_p (tree size
)
669 if (integer_zerop (size
))
672 if (TREE_CODE (size
) != SSA_NAME
|| !INTEGRAL_TYPE_P (TREE_TYPE (size
)))
675 tree type
= TREE_TYPE (size
);
676 int prec
= TYPE_PRECISION (type
);
678 /* Compute the value of SSIZE_MAX, the largest positive value that
679 can be stored in ssize_t, the signed counterpart of size_t. */
680 wide_int ssize_max
= wi::lshift (wi::one (prec
), prec
- 1) - 1;
681 value_range
valid_range (build_int_cst (type
, 0),
682 wide_int_to_tree (type
, ssize_max
));
684 get_range_info (size
, vr
);
685 vr
.intersect (&valid_range
);
689 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
690 diagnose (otherwise undefined) overlapping copies without preventing
691 folding. When folded, GCC guarantees that overlapping memcpy has
692 the same semantics as memmove. Call to the library memcpy need not
693 provide the same guarantee. Return false if no simplification can
697 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
698 tree dest
, tree src
, enum built_in_function code
)
700 gimple
*stmt
= gsi_stmt (*gsi
);
701 tree lhs
= gimple_call_lhs (stmt
);
702 tree len
= gimple_call_arg (stmt
, 2);
703 tree destvar
, srcvar
;
704 location_t loc
= gimple_location (stmt
);
706 /* If the LEN parameter is a constant zero or in range where
707 the only valid value is zero, return DEST. */
708 if (size_must_be_zero_p (len
))
711 if (gimple_call_lhs (stmt
))
712 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
714 repl
= gimple_build_nop ();
715 tree vdef
= gimple_vdef (stmt
);
716 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
718 unlink_stmt_vdef (stmt
);
719 release_ssa_name (vdef
);
721 gsi_replace (gsi
, repl
, false);
725 /* If SRC and DEST are the same (and not volatile), return
726 DEST{,+LEN,+LEN-1}. */
727 if (operand_equal_p (src
, dest
, 0))
729 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
730 It's safe and may even be emitted by GCC itself (see bug
732 unlink_stmt_vdef (stmt
);
733 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
734 release_ssa_name (gimple_vdef (stmt
));
737 gsi_replace (gsi
, gimple_build_nop (), false);
744 tree srctype
, desttype
;
745 unsigned int src_align
, dest_align
;
748 unsigned HOST_WIDE_INT tmp_len
;
750 /* Build accesses at offset zero with a ref-all character type. */
751 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
754 /* If we can perform the copy efficiently with first doing all loads
755 and then all stores inline it that way. Currently efficiently
756 means that we can load all the memory into a single integer
757 register which is what MOVE_MAX gives us. */
758 src_align
= get_pointer_alignment (src
);
759 dest_align
= get_pointer_alignment (dest
);
760 if (tree_fits_uhwi_p (len
)
761 && compare_tree_int (len
, MOVE_MAX
) <= 0
762 /* FIXME: Don't transform copies from strings with known length.
763 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
764 from being handled, and the case was XFAILed for that reason.
765 Now that it is handled and the XFAIL removed, as soon as other
766 strlenopt tests that rely on it for passing are adjusted, this
767 hack can be removed. */
768 && !c_strlen (src
, 1)
769 && !((tmp_str
= c_getstr (src
, &tmp_len
)) != NULL
770 && memchr (tmp_str
, 0, tmp_len
) == NULL
))
772 unsigned ilen
= tree_to_uhwi (len
);
773 if (pow2p_hwi (ilen
))
775 /* Detect out-of-bounds accesses without issuing warnings.
776 Avoid folding out-of-bounds copies but to avoid false
777 positives for unreachable code defer warning until after
778 DCE has worked its magic.
779 -Wrestrict is still diagnosed. */
780 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
783 if (warning
!= OPT_Wrestrict
)
786 scalar_int_mode mode
;
787 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
789 && is_a
<scalar_int_mode
> (TYPE_MODE (type
), &mode
)
790 && GET_MODE_SIZE (mode
) * BITS_PER_UNIT
== ilen
* 8
791 /* If the destination pointer is not aligned we must be able
792 to emit an unaligned store. */
793 && (dest_align
>= GET_MODE_ALIGNMENT (mode
)
794 || !targetm
.slow_unaligned_access (mode
, dest_align
)
795 || (optab_handler (movmisalign_optab
, mode
)
796 != CODE_FOR_nothing
)))
799 tree desttype
= type
;
800 if (src_align
< GET_MODE_ALIGNMENT (mode
))
801 srctype
= build_aligned_type (type
, src_align
);
802 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
803 tree tem
= fold_const_aggregate_ref (srcmem
);
806 else if (src_align
< GET_MODE_ALIGNMENT (mode
)
807 && targetm
.slow_unaligned_access (mode
, src_align
)
808 && (optab_handler (movmisalign_optab
, mode
)
809 == CODE_FOR_nothing
))
814 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
816 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
818 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
820 gimple_assign_set_lhs (new_stmt
, srcmem
);
821 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
822 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
824 if (dest_align
< GET_MODE_ALIGNMENT (mode
))
825 desttype
= build_aligned_type (type
, dest_align
);
827 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
830 gimple_move_vops (new_stmt
, stmt
);
833 gsi_replace (gsi
, new_stmt
, false);
836 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
843 if (code
== BUILT_IN_MEMMOVE
)
845 /* Both DEST and SRC must be pointer types.
846 ??? This is what old code did. Is the testing for pointer types
849 If either SRC is readonly or length is 1, we can use memcpy. */
850 if (!dest_align
|| !src_align
)
852 if (readonly_data_expr (src
)
853 || (tree_fits_uhwi_p (len
)
854 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
855 >= tree_to_uhwi (len
))))
857 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
860 gimple_call_set_fndecl (stmt
, fn
);
861 gimple_call_set_arg (stmt
, 0, dest
);
862 gimple_call_set_arg (stmt
, 1, src
);
867 /* If *src and *dest can't overlap, optimize into memcpy as well. */
868 if (TREE_CODE (src
) == ADDR_EXPR
869 && TREE_CODE (dest
) == ADDR_EXPR
)
871 tree src_base
, dest_base
, fn
;
872 poly_int64 src_offset
= 0, dest_offset
= 0;
875 srcvar
= TREE_OPERAND (src
, 0);
876 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
877 if (src_base
== NULL
)
879 destvar
= TREE_OPERAND (dest
, 0);
880 dest_base
= get_addr_base_and_unit_offset (destvar
,
882 if (dest_base
== NULL
)
884 if (!poly_int_tree_p (len
, &maxsize
))
886 if (SSA_VAR_P (src_base
)
887 && SSA_VAR_P (dest_base
))
889 if (operand_equal_p (src_base
, dest_base
, 0)
890 && ranges_maybe_overlap_p (src_offset
, maxsize
,
891 dest_offset
, maxsize
))
894 else if (TREE_CODE (src_base
) == MEM_REF
895 && TREE_CODE (dest_base
) == MEM_REF
)
897 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
898 TREE_OPERAND (dest_base
, 0), 0))
900 poly_offset_int full_src_offset
901 = mem_ref_offset (src_base
) + src_offset
;
902 poly_offset_int full_dest_offset
903 = mem_ref_offset (dest_base
) + dest_offset
;
904 if (ranges_maybe_overlap_p (full_src_offset
, maxsize
,
905 full_dest_offset
, maxsize
))
911 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
914 gimple_call_set_fndecl (stmt
, fn
);
915 gimple_call_set_arg (stmt
, 0, dest
);
916 gimple_call_set_arg (stmt
, 1, src
);
921 /* If the destination and source do not alias optimize into
923 if ((is_gimple_min_invariant (dest
)
924 || TREE_CODE (dest
) == SSA_NAME
)
925 && (is_gimple_min_invariant (src
)
926 || TREE_CODE (src
) == SSA_NAME
))
929 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
930 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
931 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
934 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
937 gimple_call_set_fndecl (stmt
, fn
);
938 gimple_call_set_arg (stmt
, 0, dest
);
939 gimple_call_set_arg (stmt
, 1, src
);
948 if (!tree_fits_shwi_p (len
))
950 if (!POINTER_TYPE_P (TREE_TYPE (src
))
951 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
953 /* In the following try to find a type that is most natural to be
954 used for the memcpy source and destination and that allows
955 the most optimization when memcpy is turned into a plain assignment
956 using that type. In theory we could always use a char[len] type
957 but that only gains us that the destination and source possibly
958 no longer will have their address taken. */
959 srctype
= TREE_TYPE (TREE_TYPE (src
));
960 if (TREE_CODE (srctype
) == ARRAY_TYPE
961 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
962 srctype
= TREE_TYPE (srctype
);
963 desttype
= TREE_TYPE (TREE_TYPE (dest
));
964 if (TREE_CODE (desttype
) == ARRAY_TYPE
965 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
966 desttype
= TREE_TYPE (desttype
);
967 if (TREE_ADDRESSABLE (srctype
)
968 || TREE_ADDRESSABLE (desttype
))
971 /* Make sure we are not copying using a floating-point mode or
972 a type whose size possibly does not match its precision. */
973 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
974 || TREE_CODE (desttype
) == BOOLEAN_TYPE
975 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
976 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
977 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
978 || TREE_CODE (srctype
) == BOOLEAN_TYPE
979 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
980 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
988 src_align
= get_pointer_alignment (src
);
989 dest_align
= get_pointer_alignment (dest
);
991 /* Choose between src and destination type for the access based
992 on alignment, whether the access constitutes a register access
993 and whether it may actually expose a declaration for SSA rewrite
994 or SRA decomposition. */
997 if (TREE_CODE (dest
) == ADDR_EXPR
998 && var_decl_component_p (TREE_OPERAND (dest
, 0))
999 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
)
1000 && dest_align
>= TYPE_ALIGN (desttype
)
1001 && (is_gimple_reg_type (desttype
)
1002 || src_align
>= TYPE_ALIGN (desttype
)))
1003 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1004 else if (TREE_CODE (src
) == ADDR_EXPR
1005 && var_decl_component_p (TREE_OPERAND (src
, 0))
1006 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
)
1007 && src_align
>= TYPE_ALIGN (srctype
)
1008 && (is_gimple_reg_type (srctype
)
1009 || dest_align
>= TYPE_ALIGN (srctype
)))
1010 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1011 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1014 /* Now that we chose an access type express the other side in
1015 terms of it if the target allows that with respect to alignment
1017 if (srcvar
== NULL_TREE
)
1019 if (src_align
>= TYPE_ALIGN (desttype
))
1020 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1023 if (STRICT_ALIGNMENT
)
1025 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1027 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1030 else if (destvar
== NULL_TREE
)
1032 if (dest_align
>= TYPE_ALIGN (srctype
))
1033 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1036 if (STRICT_ALIGNMENT
)
1038 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1040 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1044 /* Same as above, detect out-of-bounds accesses without issuing
1045 warnings. Avoid folding out-of-bounds copies but to avoid
1046 false positives for unreachable code defer warning until
1047 after DCE has worked its magic.
1048 -Wrestrict is still diagnosed. */
1049 if (int warning
= check_bounds_or_overlap (as_a
<gcall
*>(stmt
),
1050 dest
, src
, len
, len
,
1052 if (warning
!= OPT_Wrestrict
)
1056 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1058 tree tem
= fold_const_aggregate_ref (srcvar
);
1061 if (! is_gimple_min_invariant (srcvar
))
1063 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1064 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1066 gimple_assign_set_lhs (new_stmt
, srcvar
);
1067 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1068 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1070 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1071 goto set_vop_and_replace
;
1074 /* We get an aggregate copy. Use an unsigned char[] type to
1075 perform the copying to preserve padding and to avoid any issues
1076 with TREE_ADDRESSABLE types or float modes behavior on copying. */
1077 desttype
= build_array_type_nelts (unsigned_char_type_node
,
1078 tree_to_uhwi (len
));
1080 if (src_align
> TYPE_ALIGN (srctype
))
1081 srctype
= build_aligned_type (srctype
, src_align
);
1082 if (dest_align
> TYPE_ALIGN (desttype
))
1083 desttype
= build_aligned_type (desttype
, dest_align
);
1085 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
, dest
, off0
),
1086 fold_build2 (MEM_REF
, srctype
, src
, off0
));
1087 set_vop_and_replace
:
1088 gimple_move_vops (new_stmt
, stmt
);
1091 gsi_replace (gsi
, new_stmt
, false);
1094 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1098 gimple_seq stmts
= NULL
;
1099 if (code
== BUILT_IN_MEMCPY
|| code
== BUILT_IN_MEMMOVE
)
1101 else if (code
== BUILT_IN_MEMPCPY
)
1103 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1104 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1105 TREE_TYPE (dest
), dest
, len
);
1110 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1111 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1112 gsi_replace (gsi
, repl
, false);
1116 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1117 to built-in memcmp (a, b, len). */
1120 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1122 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1127 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1129 gimple
*stmt
= gsi_stmt (*gsi
);
1130 tree a
= gimple_call_arg (stmt
, 0);
1131 tree b
= gimple_call_arg (stmt
, 1);
1132 tree len
= gimple_call_arg (stmt
, 2);
1134 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1135 replace_call_with_call_and_fold (gsi
, repl
);
1140 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1141 to built-in memmove (dest, src, len). */
1144 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1146 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1151 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1152 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1153 len) into memmove (dest, src, len). */
1155 gimple
*stmt
= gsi_stmt (*gsi
);
1156 tree src
= gimple_call_arg (stmt
, 0);
1157 tree dest
= gimple_call_arg (stmt
, 1);
1158 tree len
= gimple_call_arg (stmt
, 2);
1160 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1161 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1162 replace_call_with_call_and_fold (gsi
, repl
);
1167 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1168 to built-in memset (dest, 0, len). */
1171 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1173 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1178 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1180 gimple
*stmt
= gsi_stmt (*gsi
);
1181 tree dest
= gimple_call_arg (stmt
, 0);
1182 tree len
= gimple_call_arg (stmt
, 1);
1184 gimple_seq seq
= NULL
;
1185 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1186 gimple_seq_add_stmt_without_update (&seq
, repl
);
1187 gsi_replace_with_seq_vops (gsi
, seq
);
1193 /* Fold function call to builtin memset or bzero at *GSI setting the
1194 memory of size LEN to VAL. Return whether a simplification was made. */
1197 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1199 gimple
*stmt
= gsi_stmt (*gsi
);
1201 unsigned HOST_WIDE_INT length
, cval
;
1203 /* If the LEN parameter is zero, return DEST. */
1204 if (integer_zerop (len
))
1206 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1210 if (! tree_fits_uhwi_p (len
))
1213 if (TREE_CODE (c
) != INTEGER_CST
)
1216 tree dest
= gimple_call_arg (stmt
, 0);
1218 if (TREE_CODE (var
) != ADDR_EXPR
)
1221 var
= TREE_OPERAND (var
, 0);
1222 if (TREE_THIS_VOLATILE (var
))
1225 etype
= TREE_TYPE (var
);
1226 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1227 etype
= TREE_TYPE (etype
);
1229 if (!INTEGRAL_TYPE_P (etype
)
1230 && !POINTER_TYPE_P (etype
))
1233 if (! var_decl_component_p (var
))
1236 length
= tree_to_uhwi (len
);
1237 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype
)) != length
1238 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1241 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1244 if (integer_zerop (c
))
1248 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1251 cval
= TREE_INT_CST_LOW (c
);
1255 cval
|= (cval
<< 31) << 1;
1258 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1259 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1260 gimple_move_vops (store
, stmt
);
1261 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1262 if (gimple_call_lhs (stmt
))
1264 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1265 gsi_replace (gsi
, asgn
, false);
1269 gimple_stmt_iterator gsi2
= *gsi
;
1271 gsi_remove (&gsi2
, true);
1277 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1280 get_range_strlen_tree (tree arg
, bitmap
*visited
, strlen_range_kind rkind
,
1281 c_strlen_data
*pdata
, unsigned eltsize
)
1283 gcc_assert (TREE_CODE (arg
) != SSA_NAME
);
1285 /* The length computed by this invocation of the function. */
1286 tree val
= NULL_TREE
;
1288 /* True if VAL is an optimistic (tight) bound determined from
1289 the size of the character array in which the string may be
1290 stored. In that case, the computed VAL is used to set
1292 bool tight_bound
= false;
1294 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1295 if (TREE_CODE (arg
) == ADDR_EXPR
1296 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
)
1298 tree op
= TREE_OPERAND (arg
, 0);
1299 if (integer_zerop (TREE_OPERAND (op
, 1)))
1301 tree aop0
= TREE_OPERAND (op
, 0);
1302 if (TREE_CODE (aop0
) == INDIRECT_REF
1303 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1304 return get_range_strlen (TREE_OPERAND (aop0
, 0), visited
, rkind
,
1307 else if (TREE_CODE (TREE_OPERAND (op
, 0)) == COMPONENT_REF
1308 && rkind
== SRK_LENRANGE
)
1310 /* Fail if an array is the last member of a struct object
1311 since it could be treated as a (fake) flexible array
1313 tree idx
= TREE_OPERAND (op
, 1);
1315 arg
= TREE_OPERAND (op
, 0);
1316 tree optype
= TREE_TYPE (arg
);
1317 if (tree dom
= TYPE_DOMAIN (optype
))
1318 if (tree bound
= TYPE_MAX_VALUE (dom
))
1319 if (TREE_CODE (bound
) == INTEGER_CST
1320 && TREE_CODE (idx
) == INTEGER_CST
1321 && tree_int_cst_lt (bound
, idx
))
1326 if (rkind
== SRK_INT_VALUE
)
1328 /* We are computing the maximum value (not string length). */
1330 if (TREE_CODE (val
) != INTEGER_CST
1331 || tree_int_cst_sgn (val
) < 0)
1336 c_strlen_data lendata
= { };
1337 val
= c_strlen (arg
, 1, &lendata
, eltsize
);
1339 if (!val
&& lendata
.decl
)
1341 /* ARG refers to an unterminated const character array.
1342 DATA.DECL with size DATA.LEN. */
1343 val
= lendata
.minlen
;
1344 pdata
->decl
= lendata
.decl
;
1348 /* Set if VAL represents the maximum length based on array size (set
1349 when exact length cannot be determined). */
1350 bool maxbound
= false;
1352 if (!val
&& rkind
== SRK_LENRANGE
)
1354 if (TREE_CODE (arg
) == ADDR_EXPR
)
1355 return get_range_strlen (TREE_OPERAND (arg
, 0), visited
, rkind
,
1358 if (TREE_CODE (arg
) == ARRAY_REF
)
1360 tree optype
= TREE_TYPE (TREE_OPERAND (arg
, 0));
1362 /* Determine the "innermost" array type. */
1363 while (TREE_CODE (optype
) == ARRAY_TYPE
1364 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1365 optype
= TREE_TYPE (optype
);
1367 /* Avoid arrays of pointers. */
1368 tree eltype
= TREE_TYPE (optype
);
1369 if (TREE_CODE (optype
) != ARRAY_TYPE
1370 || !INTEGRAL_TYPE_P (eltype
))
1373 /* Fail when the array bound is unknown or zero. */
1374 val
= TYPE_SIZE_UNIT (optype
);
1375 if (!val
|| integer_zerop (val
))
1378 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1381 /* Set the minimum size to zero since the string in
1382 the array could have zero length. */
1383 pdata
->minlen
= ssize_int (0);
1387 else if (TREE_CODE (arg
) == COMPONENT_REF
1388 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1)))
1391 /* Use the type of the member array to determine the upper
1392 bound on the length of the array. This may be overly
1393 optimistic if the array itself isn't NUL-terminated and
1394 the caller relies on the subsequent member to contain
1395 the NUL but that would only be considered valid if
1396 the array were the last member of a struct. */
1398 tree fld
= TREE_OPERAND (arg
, 1);
1400 tree optype
= TREE_TYPE (fld
);
1402 /* Determine the "innermost" array type. */
1403 while (TREE_CODE (optype
) == ARRAY_TYPE
1404 && TREE_CODE (TREE_TYPE (optype
)) == ARRAY_TYPE
)
1405 optype
= TREE_TYPE (optype
);
1407 /* Fail when the array bound is unknown or zero. */
1408 val
= TYPE_SIZE_UNIT (optype
);
1409 if (!val
|| integer_zerop (val
))
1411 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1414 /* Set the minimum size to zero since the string in
1415 the array could have zero length. */
1416 pdata
->minlen
= ssize_int (0);
1418 /* The array size determined above is an optimistic bound
1419 on the length. If the array isn't nul-terminated the
1420 length computed by the library function would be greater.
1421 Even though using strlen to cross the subobject boundary
1422 is undefined, avoid drawing conclusions from the member
1423 type about the length here. */
1426 else if (TREE_CODE (arg
) == MEM_REF
1427 && TREE_CODE (TREE_TYPE (arg
)) == ARRAY_TYPE
1428 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg
))) == INTEGER_TYPE
1429 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ADDR_EXPR
)
1431 /* Handle a MEM_REF into a DECL accessing an array of integers,
1432 being conservative about references to extern structures with
1433 flexible array members that can be initialized to arbitrary
1434 numbers of elements as an extension (static structs are okay).
1435 FIXME: Make this less conservative -- see
1436 component_ref_size in tree.c. */
1437 tree ref
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1438 if ((TREE_CODE (ref
) == PARM_DECL
|| VAR_P (ref
))
1439 && (decl_binds_to_current_def_p (ref
)
1440 || !array_at_struct_end_p (arg
)))
1442 /* Fail if the offset is out of bounds. Such accesses
1443 should be diagnosed at some point. */
1444 val
= DECL_SIZE_UNIT (ref
);
1445 if (!val
|| integer_zerop (val
))
1448 poly_offset_int psiz
= wi::to_offset (val
);
1449 poly_offset_int poff
= mem_ref_offset (arg
);
1450 if (known_le (psiz
, poff
))
1453 pdata
->minlen
= ssize_int (0);
1455 /* Subtract the offset and one for the terminating nul. */
1458 val
= wide_int_to_tree (TREE_TYPE (val
), psiz
);
1459 /* Since VAL reflects the size of a declared object
1460 rather the type of the access it is not a tight bound. */
1463 else if (TREE_CODE (arg
) == PARM_DECL
|| VAR_P (arg
))
1465 /* Avoid handling pointers to arrays. GCC might misuse
1466 a pointer to an array of one bound to point to an array
1467 object of a greater bound. */
1468 tree argtype
= TREE_TYPE (arg
);
1469 if (TREE_CODE (argtype
) == ARRAY_TYPE
)
1471 val
= TYPE_SIZE_UNIT (argtype
);
1473 || TREE_CODE (val
) != INTEGER_CST
1474 || integer_zerop (val
))
1476 val
= wide_int_to_tree (TREE_TYPE (val
),
1477 wi::sub (wi::to_wide (val
), 1));
1479 /* Set the minimum size to zero since the string in
1480 the array could have zero length. */
1481 pdata
->minlen
= ssize_int (0);
1490 /* Adjust the lower bound on the string length as necessary. */
1492 || (rkind
!= SRK_STRLEN
1493 && TREE_CODE (pdata
->minlen
) == INTEGER_CST
1494 && TREE_CODE (val
) == INTEGER_CST
1495 && tree_int_cst_lt (val
, pdata
->minlen
)))
1496 pdata
->minlen
= val
;
1498 if (pdata
->maxbound
&& TREE_CODE (pdata
->maxbound
) == INTEGER_CST
)
1500 /* Adjust the tighter (more optimistic) string length bound
1501 if necessary and proceed to adjust the more conservative
1503 if (TREE_CODE (val
) == INTEGER_CST
)
1505 if (tree_int_cst_lt (pdata
->maxbound
, val
))
1506 pdata
->maxbound
= val
;
1509 pdata
->maxbound
= val
;
1511 else if (pdata
->maxbound
|| maxbound
)
1512 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1513 if VAL corresponds to the maximum length determined based
1514 on the type of the object. */
1515 pdata
->maxbound
= val
;
1519 /* VAL computed above represents an optimistically tight bound
1520 on the length of the string based on the referenced object's
1521 or subobject's type. Determine the conservative upper bound
1522 based on the enclosing object's size if possible. */
1523 if (rkind
== SRK_LENRANGE
)
1526 tree base
= get_addr_base_and_unit_offset (arg
, &offset
);
1529 /* When the call above fails due to a non-constant offset
1530 assume the offset is zero and use the size of the whole
1531 enclosing object instead. */
1532 base
= get_base_address (arg
);
1535 /* If the base object is a pointer no upper bound on the length
1536 can be determined. Otherwise the maximum length is equal to
1537 the size of the enclosing object minus the offset of
1538 the referenced subobject minus 1 (for the terminating nul). */
1539 tree type
= TREE_TYPE (base
);
1540 if (TREE_CODE (type
) == POINTER_TYPE
1541 || (TREE_CODE (base
) != PARM_DECL
&& !VAR_P (base
))
1542 || !(val
= DECL_SIZE_UNIT (base
)))
1543 val
= build_all_ones_cst (size_type_node
);
1546 val
= DECL_SIZE_UNIT (base
);
1547 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1548 size_int (offset
+ 1));
1557 /* Adjust the more conservative bound if possible/necessary
1558 and fail otherwise. */
1559 if (rkind
!= SRK_STRLEN
)
1561 if (TREE_CODE (pdata
->maxlen
) != INTEGER_CST
1562 || TREE_CODE (val
) != INTEGER_CST
)
1565 if (tree_int_cst_lt (pdata
->maxlen
, val
))
1566 pdata
->maxlen
= val
;
1569 else if (simple_cst_equal (val
, pdata
->maxlen
) != 1)
1571 /* Fail if the length of this ARG is different from that
1572 previously determined from another ARG. */
1577 pdata
->maxlen
= val
;
1578 return rkind
== SRK_LENRANGE
|| !integer_all_onesp (val
);
1581 /* For an ARG referencing one or more strings, try to obtain the range
1582 of their lengths, or the size of the largest array ARG referes to if
1583 the range of lengths cannot be determined, and store all in *PDATA.
1584 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1585 the maximum constant value.
1586 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1587 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1588 length or if we are unable to determine the length, return false.
1589 VISITED is a bitmap of visited variables.
1590 RKIND determines the kind of value or range to obtain (see
1592 Set PDATA->DECL if ARG refers to an unterminated constant array.
1593 On input, set ELTSIZE to 1 for normal single byte character strings,
1594 and either 2 or 4 for wide characer strings (the size of wchar_t).
1595 Return true if *PDATA was successfully populated and false otherwise. */
1598 get_range_strlen (tree arg
, bitmap
*visited
,
1599 strlen_range_kind rkind
,
1600 c_strlen_data
*pdata
, unsigned eltsize
)
1603 if (TREE_CODE (arg
) != SSA_NAME
)
1604 return get_range_strlen_tree (arg
, visited
, rkind
, pdata
, eltsize
);
1606 /* If ARG is registered for SSA update we cannot look at its defining
1608 if (name_registered_for_update_p (arg
))
1611 /* If we were already here, break the infinite cycle. */
1613 *visited
= BITMAP_ALLOC (NULL
);
1614 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1618 gimple
*def_stmt
= SSA_NAME_DEF_STMT (var
);
1620 switch (gimple_code (def_stmt
))
1623 /* The RHS of the statement defining VAR must either have a
1624 constant length or come from another SSA_NAME with a constant
1626 if (gimple_assign_single_p (def_stmt
)
1627 || gimple_assign_unary_nop_p (def_stmt
))
1629 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1630 return get_range_strlen (rhs
, visited
, rkind
, pdata
, eltsize
);
1632 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1634 tree ops
[2] = { gimple_assign_rhs2 (def_stmt
),
1635 gimple_assign_rhs3 (def_stmt
) };
1637 for (unsigned int i
= 0; i
< 2; i
++)
1638 if (!get_range_strlen (ops
[i
], visited
, rkind
, pdata
, eltsize
))
1640 if (rkind
!= SRK_LENRANGE
)
1642 /* Set the upper bound to the maximum to prevent
1643 it from being adjusted in the next iteration but
1644 leave MINLEN and the more conservative MAXBOUND
1645 determined so far alone (or leave them null if
1646 they haven't been set yet). That the MINLEN is
1647 in fact zero can be determined from MAXLEN being
1648 unbounded but the discovered minimum is used for
1650 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1657 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1658 must have a constant length. */
1659 for (unsigned i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1661 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1663 /* If this PHI has itself as an argument, we cannot
1664 determine the string length of this argument. However,
1665 if we can find a constant string length for the other
1666 PHI args then we can still be sure that this is a
1667 constant string length. So be optimistic and just
1668 continue with the next argument. */
1669 if (arg
== gimple_phi_result (def_stmt
))
1672 if (!get_range_strlen (arg
, visited
, rkind
, pdata
, eltsize
))
1674 if (rkind
!= SRK_LENRANGE
)
1676 /* Set the upper bound to the maximum to prevent
1677 it from being adjusted in the next iteration but
1678 leave MINLEN and the more conservative MAXBOUND
1679 determined so far alone (or leave them null if
1680 they haven't been set yet). That the MINLEN is
1681 in fact zero can be determined from MAXLEN being
1682 unbounded but the discovered minimum is used for
1684 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1694 /* Try to obtain the range of the lengths of the string(s) referenced
1695 by ARG, or the size of the largest array ARG refers to if the range
1696 of lengths cannot be determined, and store all in *PDATA which must
1697 be zero-initialized on input except PDATA->MAXBOUND may be set to
1698 a non-null tree node other than INTEGER_CST to request to have it
1699 set to the length of the longest string in a PHI. ELTSIZE is
1700 the expected size of the string element in bytes: 1 for char and
1701 some power of 2 for wide characters.
1702 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1703 for optimization. Returning false means that a nonzero PDATA->MINLEN
1704 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1705 is -1 (in that case, the actual range is indeterminate, i.e.,
1706 [0, PTRDIFF_MAX - 2]. */
1709 get_range_strlen (tree arg
, c_strlen_data
*pdata
, unsigned eltsize
)
1711 bitmap visited
= NULL
;
1712 tree maxbound
= pdata
->maxbound
;
1714 if (!get_range_strlen (arg
, &visited
, SRK_LENRANGE
, pdata
, eltsize
))
1716 /* On failure extend the length range to an impossible maximum
1717 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1718 members can stay unchanged regardless. */
1719 pdata
->minlen
= ssize_int (0);
1720 pdata
->maxlen
= build_all_ones_cst (size_type_node
);
1722 else if (!pdata
->minlen
)
1723 pdata
->minlen
= ssize_int (0);
1725 /* If it's unchanged from it initial non-null value, set the conservative
1726 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1727 if (maxbound
&& pdata
->maxbound
== maxbound
)
1728 pdata
->maxbound
= build_all_ones_cst (size_type_node
);
1731 BITMAP_FREE (visited
);
1733 return !integer_all_onesp (pdata
->maxlen
);
1736 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1737 For ARG of pointer types, NONSTR indicates if the caller is prepared
1738 to handle unterminated strings. For integer ARG and when RKIND ==
1739 SRK_INT_VALUE, NONSTR must be null.
1741 If an unterminated array is discovered and our caller handles
1742 unterminated arrays, then bubble up the offending DECL and
1743 return the maximum size. Otherwise return NULL. */
1746 get_maxval_strlen (tree arg
, strlen_range_kind rkind
, tree
*nonstr
= NULL
)
1748 /* A non-null NONSTR is meaningless when determining the maximum
1749 value of an integer ARG. */
1750 gcc_assert (rkind
!= SRK_INT_VALUE
|| nonstr
== NULL
);
1751 /* ARG must have an integral type when RKIND says so. */
1752 gcc_assert (rkind
!= SRK_INT_VALUE
|| INTEGRAL_TYPE_P (TREE_TYPE (arg
)));
1754 bitmap visited
= NULL
;
1756 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
1758 c_strlen_data lendata
= { };
1759 if (!get_range_strlen (arg
, &visited
, rkind
, &lendata
, /* eltsize = */1))
1760 lendata
.maxlen
= NULL_TREE
;
1761 else if (lendata
.maxlen
&& integer_all_onesp (lendata
.maxlen
))
1762 lendata
.maxlen
= NULL_TREE
;
1765 BITMAP_FREE (visited
);
1769 /* For callers prepared to handle unterminated arrays set
1770 *NONSTR to point to the declaration of the array and return
1771 the maximum length/size. */
1772 *nonstr
= lendata
.decl
;
1773 return lendata
.maxlen
;
1776 /* Fail if the constant array isn't nul-terminated. */
1777 return lendata
.decl
? NULL_TREE
: lendata
.maxlen
;
1781 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1782 If LEN is not NULL, it represents the length of the string to be
1783 copied. Return NULL_TREE if no simplification can be made. */
1786 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1787 tree dest
, tree src
)
1789 gimple
*stmt
= gsi_stmt (*gsi
);
1790 location_t loc
= gimple_location (stmt
);
1793 /* If SRC and DEST are the same (and not volatile), return DEST. */
1794 if (operand_equal_p (src
, dest
, 0))
1796 /* Issue -Wrestrict unless the pointers are null (those do
1797 not point to objects and so do not indicate an overlap;
1798 such calls could be the result of sanitization and jump
1800 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
1802 tree func
= gimple_call_fndecl (stmt
);
1804 warning_at (loc
, OPT_Wrestrict
,
1805 "%qD source argument is the same as destination",
1809 replace_call_with_value (gsi
, dest
);
1813 if (optimize_function_for_size_p (cfun
))
1816 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1820 /* Set to non-null if ARG refers to an unterminated array. */
1822 tree len
= get_maxval_strlen (src
, SRK_STRLEN
, &nonstr
);
1826 /* Avoid folding calls with unterminated arrays. */
1827 if (!gimple_no_warning_p (stmt
))
1828 warn_string_no_nul (loc
, "strcpy", src
, nonstr
);
1829 gimple_set_no_warning (stmt
, true);
1836 len
= fold_convert_loc (loc
, size_type_node
, len
);
1837 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1838 len
= force_gimple_operand_gsi (gsi
, len
, true,
1839 NULL_TREE
, true, GSI_SAME_STMT
);
1840 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1841 replace_call_with_call_and_fold (gsi
, repl
);
1845 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1846 If SLEN is not NULL, it represents the length of the source string.
1847 Return NULL_TREE if no simplification can be made. */
1850 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1851 tree dest
, tree src
, tree len
)
1853 gimple
*stmt
= gsi_stmt (*gsi
);
1854 location_t loc
= gimple_location (stmt
);
1855 bool nonstring
= get_attr_nonstring_decl (dest
) != NULL_TREE
;
1857 /* If the LEN parameter is zero, return DEST. */
1858 if (integer_zerop (len
))
1860 /* Avoid warning if the destination refers to a an array/pointer
1861 decorate with attribute nonstring. */
1864 tree fndecl
= gimple_call_fndecl (stmt
);
1866 /* Warn about the lack of nul termination: the result is not
1867 a (nul-terminated) string. */
1868 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
1869 if (slen
&& !integer_zerop (slen
))
1870 warning_at (loc
, OPT_Wstringop_truncation
,
1871 "%G%qD destination unchanged after copying no bytes "
1872 "from a string of length %E",
1873 stmt
, fndecl
, slen
);
1875 warning_at (loc
, OPT_Wstringop_truncation
,
1876 "%G%qD destination unchanged after copying no bytes",
1880 replace_call_with_value (gsi
, dest
);
1884 /* We can't compare slen with len as constants below if len is not a
1886 if (TREE_CODE (len
) != INTEGER_CST
)
1889 /* Now, we must be passed a constant src ptr parameter. */
1890 tree slen
= get_maxval_strlen (src
, SRK_STRLEN
);
1891 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1894 /* The size of the source string including the terminating nul. */
1895 tree ssize
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1897 /* We do not support simplification of this case, though we do
1898 support it when expanding trees into RTL. */
1899 /* FIXME: generate a call to __builtin_memset. */
1900 if (tree_int_cst_lt (ssize
, len
))
1903 /* Diagnose truncation that leaves the copy unterminated. */
1904 maybe_diag_stxncpy_trunc (*gsi
, src
, len
);
1906 /* OK transform into builtin memcpy. */
1907 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1911 len
= fold_convert_loc (loc
, size_type_node
, len
);
1912 len
= force_gimple_operand_gsi (gsi
, len
, true,
1913 NULL_TREE
, true, GSI_SAME_STMT
);
1914 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1915 replace_call_with_call_and_fold (gsi
, repl
);
1920 /* Fold function call to builtin strchr or strrchr.
1921 If both arguments are constant, evaluate and fold the result,
1922 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1923 In general strlen is significantly faster than strchr
1924 due to being a simpler operation. */
1926 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1928 gimple
*stmt
= gsi_stmt (*gsi
);
1929 tree str
= gimple_call_arg (stmt
, 0);
1930 tree c
= gimple_call_arg (stmt
, 1);
1931 location_t loc
= gimple_location (stmt
);
1935 if (!gimple_call_lhs (stmt
))
1938 /* Avoid folding if the first argument is not a nul-terminated array.
1939 Defer warning until later. */
1940 if (!check_nul_terminated_array (NULL_TREE
, str
))
1943 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1945 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1949 replace_call_with_value (gsi
, integer_zero_node
);
1953 tree len
= build_int_cst (size_type_node
, p1
- p
);
1954 gimple_seq stmts
= NULL
;
1955 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1956 POINTER_PLUS_EXPR
, str
, len
);
1957 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1958 gsi_replace_with_seq_vops (gsi
, stmts
);
1962 if (!integer_zerop (c
))
1965 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1966 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
1968 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1972 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
1973 replace_call_with_call_and_fold (gsi
, repl
);
1981 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1986 /* Create newstr = strlen (str). */
1987 gimple_seq stmts
= NULL
;
1988 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1989 gimple_set_location (new_stmt
, loc
);
1990 len
= create_tmp_reg_or_ssa_name (size_type_node
);
1991 gimple_call_set_lhs (new_stmt
, len
);
1992 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1994 /* Create (str p+ strlen (str)). */
1995 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1996 POINTER_PLUS_EXPR
, str
, len
);
1997 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1998 gsi_replace_with_seq_vops (gsi
, stmts
);
1999 /* gsi now points at the assignment to the lhs, get a
2000 stmt iterator to the strlen.
2001 ??? We can't use gsi_for_stmt as that doesn't work when the
2002 CFG isn't built yet. */
2003 gimple_stmt_iterator gsi2
= *gsi
;
2009 /* Fold function call to builtin strstr.
2010 If both arguments are constant, evaluate and fold the result,
2011 additionally fold strstr (x, "") into x and strstr (x, "c")
2012 into strchr (x, 'c'). */
2014 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
2016 gimple
*stmt
= gsi_stmt (*gsi
);
2017 if (!gimple_call_lhs (stmt
))
2020 tree haystack
= gimple_call_arg (stmt
, 0);
2021 tree needle
= gimple_call_arg (stmt
, 1);
2023 /* Avoid folding if either argument is not a nul-terminated array.
2024 Defer warning until later. */
2025 if (!check_nul_terminated_array (NULL_TREE
, haystack
)
2026 || !check_nul_terminated_array (NULL_TREE
, needle
))
2029 const char *q
= c_getstr (needle
);
2033 if (const char *p
= c_getstr (haystack
))
2035 const char *r
= strstr (p
, q
);
2039 replace_call_with_value (gsi
, integer_zero_node
);
2043 tree len
= build_int_cst (size_type_node
, r
- p
);
2044 gimple_seq stmts
= NULL
;
2046 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
2048 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
2049 gsi_replace_with_seq_vops (gsi
, stmts
);
2053 /* For strstr (x, "") return x. */
2056 replace_call_with_value (gsi
, haystack
);
2060 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2063 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
2066 tree c
= build_int_cst (integer_type_node
, q
[0]);
2067 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
2068 replace_call_with_call_and_fold (gsi
, repl
);
2076 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2079 Return NULL_TREE if no simplification was possible, otherwise return the
2080 simplified form of the call as a tree.
2082 The simplified form may be a constant or other expression which
2083 computes the same value, but in a more efficient manner (including
2084 calls to other builtin functions).
2086 The call may contain arguments which need to be evaluated, but
2087 which are not useful to determine the result of the call. In
2088 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2089 COMPOUND_EXPR will be an argument which must be evaluated.
2090 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2091 COMPOUND_EXPR in the chain will contain the tree for the simplified
2092 form of the builtin function call. */
2095 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
2097 gimple
*stmt
= gsi_stmt (*gsi
);
2098 location_t loc
= gimple_location (stmt
);
2100 const char *p
= c_getstr (src
);
2102 /* If the string length is zero, return the dst parameter. */
2103 if (p
&& *p
== '\0')
2105 replace_call_with_value (gsi
, dst
);
2109 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
2112 /* See if we can store by pieces into (dst + strlen(dst)). */
2114 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
2115 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2117 if (!strlen_fn
|| !memcpy_fn
)
2120 /* If the length of the source string isn't computable don't
2121 split strcat into strlen and memcpy. */
2122 tree len
= get_maxval_strlen (src
, SRK_STRLEN
);
2126 /* Create strlen (dst). */
2127 gimple_seq stmts
= NULL
, stmts2
;
2128 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
2129 gimple_set_location (repl
, loc
);
2130 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
2131 gimple_call_set_lhs (repl
, newdst
);
2132 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2134 /* Create (dst p+ strlen (dst)). */
2135 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
2136 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
2137 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2139 len
= fold_convert_loc (loc
, size_type_node
, len
);
2140 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
2141 build_int_cst (size_type_node
, 1));
2142 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
2143 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
2145 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
2146 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2147 if (gimple_call_lhs (stmt
))
2149 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
2150 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2151 gsi_replace_with_seq_vops (gsi
, stmts
);
2152 /* gsi now points at the assignment to the lhs, get a
2153 stmt iterator to the memcpy call.
2154 ??? We can't use gsi_for_stmt as that doesn't work when the
2155 CFG isn't built yet. */
2156 gimple_stmt_iterator gsi2
= *gsi
;
2162 gsi_replace_with_seq_vops (gsi
, stmts
);
2168 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2169 are the arguments to the call. */
2172 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
2174 gimple
*stmt
= gsi_stmt (*gsi
);
2175 tree dest
= gimple_call_arg (stmt
, 0);
2176 tree src
= gimple_call_arg (stmt
, 1);
2177 tree size
= gimple_call_arg (stmt
, 2);
2183 /* If the SRC parameter is "", return DEST. */
2184 if (p
&& *p
== '\0')
2186 replace_call_with_value (gsi
, dest
);
2190 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
2193 /* If __builtin_strcat_chk is used, assume strcat is available. */
2194 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
2198 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2199 replace_call_with_call_and_fold (gsi
, repl
);
2203 /* Simplify a call to the strncat builtin. */
2206 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
2208 gimple
*stmt
= gsi_stmt (*gsi
);
2209 tree dst
= gimple_call_arg (stmt
, 0);
2210 tree src
= gimple_call_arg (stmt
, 1);
2211 tree len
= gimple_call_arg (stmt
, 2);
2213 const char *p
= c_getstr (src
);
2215 /* If the requested length is zero, or the src parameter string
2216 length is zero, return the dst parameter. */
2217 if (integer_zerop (len
) || (p
&& *p
== '\0'))
2219 replace_call_with_value (gsi
, dst
);
2223 if (TREE_CODE (len
) != INTEGER_CST
|| !p
)
2226 unsigned srclen
= strlen (p
);
2228 int cmpsrc
= compare_tree_int (len
, srclen
);
2230 /* Return early if the requested len is less than the string length.
2231 Warnings will be issued elsewhere later. */
2235 unsigned HOST_WIDE_INT dstsize
;
2237 bool nowarn
= gimple_no_warning_p (stmt
);
2239 if (!nowarn
&& compute_builtin_object_size (dst
, 1, &dstsize
))
2241 int cmpdst
= compare_tree_int (len
, dstsize
);
2245 tree fndecl
= gimple_call_fndecl (stmt
);
2247 /* Strncat copies (at most) LEN bytes and always appends
2248 the terminating NUL so the specified bound should never
2249 be equal to (or greater than) the size of the destination.
2250 If it is, the copy could overflow. */
2251 location_t loc
= gimple_location (stmt
);
2252 nowarn
= warning_at (loc
, OPT_Wstringop_overflow_
,
2254 ? G_("%G%qD specified bound %E equals "
2256 : G_("%G%qD specified bound %E exceeds "
2257 "destination size %wu"),
2258 stmt
, fndecl
, len
, dstsize
);
2260 gimple_set_no_warning (stmt
, true);
2264 if (!nowarn
&& cmpsrc
== 0)
2266 tree fndecl
= gimple_call_fndecl (stmt
);
2267 location_t loc
= gimple_location (stmt
);
2269 /* To avoid possible overflow the specified bound should also
2270 not be equal to the length of the source, even when the size
2271 of the destination is unknown (it's not an uncommon mistake
2272 to specify as the bound to strncpy the length of the source). */
2273 if (warning_at (loc
, OPT_Wstringop_overflow_
,
2274 "%G%qD specified bound %E equals source length",
2276 gimple_set_no_warning (stmt
, true);
2279 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
2281 /* If the replacement _DECL isn't initialized, don't do the
2286 /* Otherwise, emit a call to strcat. */
2287 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
2288 replace_call_with_call_and_fold (gsi
, repl
);
2292 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2296 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
2298 gimple
*stmt
= gsi_stmt (*gsi
);
2299 tree dest
= gimple_call_arg (stmt
, 0);
2300 tree src
= gimple_call_arg (stmt
, 1);
2301 tree len
= gimple_call_arg (stmt
, 2);
2302 tree size
= gimple_call_arg (stmt
, 3);
2307 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2308 if ((p
&& *p
== '\0')
2309 || integer_zerop (len
))
2311 replace_call_with_value (gsi
, dest
);
2315 if (! tree_fits_uhwi_p (size
))
2318 if (! integer_all_onesp (size
))
2320 tree src_len
= c_strlen (src
, 1);
2322 && tree_fits_uhwi_p (src_len
)
2323 && tree_fits_uhwi_p (len
)
2324 && ! tree_int_cst_lt (len
, src_len
))
2326 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2327 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
2331 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2332 replace_call_with_call_and_fold (gsi
, repl
);
2338 /* If __builtin_strncat_chk is used, assume strncat is available. */
2339 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
2343 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2344 replace_call_with_call_and_fold (gsi
, repl
);
2348 /* Build and append gimple statements to STMTS that would load a first
2349 character of a memory location identified by STR. LOC is location
2350 of the statement. */
2353 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
2357 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
2358 tree cst_uchar_ptr_node
2359 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
2360 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
2362 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
2363 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
2364 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
2366 gimple_assign_set_lhs (stmt
, var
);
2367 gimple_seq_add_stmt_without_update (stmts
, stmt
);
2372 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2375 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
2377 gimple
*stmt
= gsi_stmt (*gsi
);
2378 tree callee
= gimple_call_fndecl (stmt
);
2379 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2381 tree type
= integer_type_node
;
2382 tree str1
= gimple_call_arg (stmt
, 0);
2383 tree str2
= gimple_call_arg (stmt
, 1);
2384 tree lhs
= gimple_call_lhs (stmt
);
2386 tree bound_node
= NULL_TREE
;
2387 unsigned HOST_WIDE_INT bound
= HOST_WIDE_INT_M1U
;
2389 /* Handle strncmp and strncasecmp functions. */
2390 if (gimple_call_num_args (stmt
) == 3)
2392 bound_node
= gimple_call_arg (stmt
, 2);
2393 if (tree_fits_uhwi_p (bound_node
))
2394 bound
= tree_to_uhwi (bound_node
);
2397 /* If the BOUND parameter is zero, return zero. */
2400 replace_call_with_value (gsi
, integer_zero_node
);
2404 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2405 if (operand_equal_p (str1
, str2
, 0))
2407 replace_call_with_value (gsi
, integer_zero_node
);
2411 /* Initially set to the number of characters, including the terminating
2412 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2413 the array Sx is not terminated by a nul.
2414 For nul-terminated strings then adjusted to their length so that
2415 LENx == NULPOSx holds. */
2416 unsigned HOST_WIDE_INT len1
= HOST_WIDE_INT_MAX
, len2
= len1
;
2417 const char *p1
= c_getstr (str1
, &len1
);
2418 const char *p2
= c_getstr (str2
, &len2
);
2420 /* The position of the terminating nul character if one exists, otherwise
2421 a value greater than LENx. */
2422 unsigned HOST_WIDE_INT nulpos1
= HOST_WIDE_INT_MAX
, nulpos2
= nulpos1
;
2426 size_t n
= strnlen (p1
, len1
);
2433 size_t n
= strnlen (p2
, len2
);
2438 /* For known strings, return an immediate value. */
2442 bool known_result
= false;
2446 case BUILT_IN_STRCMP
:
2447 case BUILT_IN_STRCMP_EQ
:
2448 if (len1
!= nulpos1
|| len2
!= nulpos2
)
2451 r
= strcmp (p1
, p2
);
2452 known_result
= true;
2455 case BUILT_IN_STRNCMP
:
2456 case BUILT_IN_STRNCMP_EQ
:
2458 if (bound
== HOST_WIDE_INT_M1U
)
2461 /* Reduce the bound to be no more than the length
2462 of the shorter of the two strings, or the sizes
2463 of the unterminated arrays. */
2464 unsigned HOST_WIDE_INT n
= bound
;
2466 if (len1
== nulpos1
&& len1
< n
)
2468 if (len2
== nulpos2
&& len2
< n
)
2471 if (MIN (nulpos1
, nulpos2
) + 1 < n
)
2474 r
= strncmp (p1
, p2
, n
);
2475 known_result
= true;
2478 /* Only handleable situation is where the string are equal (result 0),
2479 which is already handled by operand_equal_p case. */
2480 case BUILT_IN_STRCASECMP
:
2482 case BUILT_IN_STRNCASECMP
:
2484 if (bound
== HOST_WIDE_INT_M1U
)
2486 r
= strncmp (p1
, p2
, bound
);
2488 known_result
= true;
2497 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2502 bool nonzero_bound
= (bound
>= 1 && bound
< HOST_WIDE_INT_M1U
)
2503 || fcode
== BUILT_IN_STRCMP
2504 || fcode
== BUILT_IN_STRCMP_EQ
2505 || fcode
== BUILT_IN_STRCASECMP
;
2507 location_t loc
= gimple_location (stmt
);
2509 /* If the second arg is "", return *(const unsigned char*)arg1. */
2510 if (p2
&& *p2
== '\0' && nonzero_bound
)
2512 gimple_seq stmts
= NULL
;
2513 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2516 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2517 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2520 gsi_replace_with_seq_vops (gsi
, stmts
);
2524 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2525 if (p1
&& *p1
== '\0' && nonzero_bound
)
2527 gimple_seq stmts
= NULL
;
2528 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2532 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2533 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2534 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2536 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2537 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2540 gsi_replace_with_seq_vops (gsi
, stmts
);
2544 /* If BOUND is one, return an expression corresponding to
2545 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2546 if (fcode
== BUILT_IN_STRNCMP
&& bound
== 1)
2548 gimple_seq stmts
= NULL
;
2549 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2550 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2554 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2555 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2556 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2558 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2559 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2560 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2562 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2563 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2566 gsi_replace_with_seq_vops (gsi
, stmts
);
2570 /* If BOUND is greater than the length of one constant string,
2571 and the other argument is also a nul-terminated string, replace
2572 strncmp with strcmp. */
2573 if (fcode
== BUILT_IN_STRNCMP
2574 && bound
> 0 && bound
< HOST_WIDE_INT_M1U
2575 && ((p2
&& len2
< bound
&& len2
== nulpos2
)
2576 || (p1
&& len1
< bound
&& len1
== nulpos1
)))
2578 tree fn
= builtin_decl_implicit (BUILT_IN_STRCMP
);
2581 gimple
*repl
= gimple_build_call (fn
, 2, str1
, str2
);
2582 replace_call_with_call_and_fold (gsi
, repl
);
2589 /* Fold a call to the memchr pointed by GSI iterator. */
2592 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2594 gimple
*stmt
= gsi_stmt (*gsi
);
2595 tree lhs
= gimple_call_lhs (stmt
);
2596 tree arg1
= gimple_call_arg (stmt
, 0);
2597 tree arg2
= gimple_call_arg (stmt
, 1);
2598 tree len
= gimple_call_arg (stmt
, 2);
2600 /* If the LEN parameter is zero, return zero. */
2601 if (integer_zerop (len
))
2603 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2608 if (TREE_CODE (arg2
) != INTEGER_CST
2609 || !tree_fits_uhwi_p (len
)
2610 || !target_char_cst_p (arg2
, &c
))
2613 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2614 unsigned HOST_WIDE_INT string_length
;
2615 const char *p1
= c_getstr (arg1
, &string_length
);
2619 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2622 tree mem_size
, offset_node
;
2623 string_constant (arg1
, &offset_node
, &mem_size
, NULL
);
2624 unsigned HOST_WIDE_INT offset
= (offset_node
== NULL_TREE
)
2625 ? 0 : tree_to_uhwi (offset_node
);
2626 /* MEM_SIZE is the size of the array the string literal
2628 unsigned HOST_WIDE_INT string_size
= tree_to_uhwi (mem_size
) - offset
;
2629 gcc_checking_assert (string_length
<= string_size
);
2630 if (length
<= string_size
)
2632 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2638 unsigned HOST_WIDE_INT offset
= r
- p1
;
2639 gimple_seq stmts
= NULL
;
2640 if (lhs
!= NULL_TREE
)
2642 tree offset_cst
= build_int_cst (TREE_TYPE (len
), offset
);
2643 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2645 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2648 gimple_seq_add_stmt_without_update (&stmts
,
2649 gimple_build_nop ());
2651 gsi_replace_with_seq_vops (gsi
, stmts
);
2659 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2660 to the call. IGNORE is true if the value returned
2661 by the builtin will be ignored. UNLOCKED is true is true if this
2662 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2663 the known length of the string. Return NULL_TREE if no simplification
2667 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2668 tree arg0
, tree arg1
,
2671 gimple
*stmt
= gsi_stmt (*gsi
);
2673 /* If we're using an unlocked function, assume the other unlocked
2674 functions exist explicitly. */
2675 tree
const fn_fputc
= (unlocked
2676 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2677 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2678 tree
const fn_fwrite
= (unlocked
2679 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2680 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2682 /* If the return value is used, don't do the transformation. */
2683 if (gimple_call_lhs (stmt
))
2686 /* Get the length of the string passed to fputs. If the length
2687 can't be determined, punt. */
2688 tree len
= get_maxval_strlen (arg0
, SRK_STRLEN
);
2690 || TREE_CODE (len
) != INTEGER_CST
)
2693 switch (compare_tree_int (len
, 1))
2695 case -1: /* length is 0, delete the call entirely . */
2696 replace_call_with_value (gsi
, integer_zero_node
);
2699 case 0: /* length is 1, call fputc. */
2701 const char *p
= c_getstr (arg0
);
2707 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2709 (integer_type_node
, p
[0]), arg1
);
2710 replace_call_with_call_and_fold (gsi
, repl
);
2715 case 1: /* length is greater than 1, call fwrite. */
2717 /* If optimizing for size keep fputs. */
2718 if (optimize_function_for_size_p (cfun
))
2720 /* New argument list transforming fputs(string, stream) to
2721 fwrite(string, 1, len, stream). */
2725 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2726 size_one_node
, len
, arg1
);
2727 replace_call_with_call_and_fold (gsi
, repl
);
2736 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2737 DEST, SRC, LEN, and SIZE are the arguments to the call.
2738 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2739 code of the builtin. If MAXLEN is not NULL, it is maximum length
2740 passed as third argument. */
2743 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2744 tree dest
, tree src
, tree len
, tree size
,
2745 enum built_in_function fcode
)
2747 gimple
*stmt
= gsi_stmt (*gsi
);
2748 location_t loc
= gimple_location (stmt
);
2749 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2752 /* If SRC and DEST are the same (and not volatile), return DEST
2753 (resp. DEST+LEN for __mempcpy_chk). */
2754 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2756 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2758 replace_call_with_value (gsi
, dest
);
2763 gimple_seq stmts
= NULL
;
2764 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2765 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2766 TREE_TYPE (dest
), dest
, len
);
2767 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2768 replace_call_with_value (gsi
, temp
);
2773 if (! tree_fits_uhwi_p (size
))
2776 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
2777 if (! integer_all_onesp (size
))
2779 if (! tree_fits_uhwi_p (len
))
2781 /* If LEN is not constant, try MAXLEN too.
2782 For MAXLEN only allow optimizing into non-_ocs function
2783 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2784 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2786 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2788 /* (void) __mempcpy_chk () can be optimized into
2789 (void) __memcpy_chk (). */
2790 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2794 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2795 replace_call_with_call_and_fold (gsi
, repl
);
2804 if (tree_int_cst_lt (size
, maxlen
))
2809 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2810 mem{cpy,pcpy,move,set} is available. */
2813 case BUILT_IN_MEMCPY_CHK
:
2814 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2816 case BUILT_IN_MEMPCPY_CHK
:
2817 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2819 case BUILT_IN_MEMMOVE_CHK
:
2820 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2822 case BUILT_IN_MEMSET_CHK
:
2823 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2832 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2833 replace_call_with_call_and_fold (gsi
, repl
);
2837 /* Fold a call to the __st[rp]cpy_chk builtin.
2838 DEST, SRC, and SIZE are the arguments to the call.
2839 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2840 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2841 strings passed as second argument. */
2844 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2846 tree src
, tree size
,
2847 enum built_in_function fcode
)
2849 gimple
*stmt
= gsi_stmt (*gsi
);
2850 location_t loc
= gimple_location (stmt
);
2851 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2854 /* If SRC and DEST are the same (and not volatile), return DEST. */
2855 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2857 /* Issue -Wrestrict unless the pointers are null (those do
2858 not point to objects and so do not indicate an overlap;
2859 such calls could be the result of sanitization and jump
2861 if (!integer_zerop (dest
) && !gimple_no_warning_p (stmt
))
2863 tree func
= gimple_call_fndecl (stmt
);
2865 warning_at (loc
, OPT_Wrestrict
,
2866 "%qD source argument is the same as destination",
2870 replace_call_with_value (gsi
, dest
);
2874 if (! tree_fits_uhwi_p (size
))
2877 tree maxlen
= get_maxval_strlen (src
, SRK_STRLENMAX
);
2878 if (! integer_all_onesp (size
))
2880 len
= c_strlen (src
, 1);
2881 if (! len
|| ! tree_fits_uhwi_p (len
))
2883 /* If LEN is not constant, try MAXLEN too.
2884 For MAXLEN only allow optimizing into non-_ocs function
2885 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2886 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2888 if (fcode
== BUILT_IN_STPCPY_CHK
)
2893 /* If return value of __stpcpy_chk is ignored,
2894 optimize into __strcpy_chk. */
2895 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2899 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2900 replace_call_with_call_and_fold (gsi
, repl
);
2904 if (! len
|| TREE_SIDE_EFFECTS (len
))
2907 /* If c_strlen returned something, but not a constant,
2908 transform __strcpy_chk into __memcpy_chk. */
2909 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2913 gimple_seq stmts
= NULL
;
2914 len
= force_gimple_operand (len
, &stmts
, true, NULL_TREE
);
2915 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2916 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2917 build_int_cst (size_type_node
, 1));
2918 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2919 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2920 replace_call_with_call_and_fold (gsi
, repl
);
2927 if (! tree_int_cst_lt (maxlen
, size
))
2931 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2932 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2933 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2937 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2938 replace_call_with_call_and_fold (gsi
, repl
);
2942 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2943 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2944 length passed as third argument. IGNORE is true if return value can be
2945 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2948 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2949 tree dest
, tree src
,
2950 tree len
, tree size
,
2951 enum built_in_function fcode
)
2953 gimple
*stmt
= gsi_stmt (*gsi
);
2954 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2957 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2959 /* If return value of __stpncpy_chk is ignored,
2960 optimize into __strncpy_chk. */
2961 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2964 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2965 replace_call_with_call_and_fold (gsi
, repl
);
2970 if (! tree_fits_uhwi_p (size
))
2973 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
2974 if (! integer_all_onesp (size
))
2976 if (! tree_fits_uhwi_p (len
))
2978 /* If LEN is not constant, try MAXLEN too.
2979 For MAXLEN only allow optimizing into non-_ocs function
2980 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2981 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2987 if (tree_int_cst_lt (size
, maxlen
))
2991 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2992 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2993 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2997 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2998 replace_call_with_call_and_fold (gsi
, repl
);
3002 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3003 Return NULL_TREE if no simplification can be made. */
3006 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
3008 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3009 location_t loc
= gimple_location (stmt
);
3010 tree dest
= gimple_call_arg (stmt
, 0);
3011 tree src
= gimple_call_arg (stmt
, 1);
3014 /* If the result is unused, replace stpcpy with strcpy. */
3015 if (gimple_call_lhs (stmt
) == NULL_TREE
)
3017 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3020 gimple_call_set_fndecl (stmt
, fn
);
3025 /* Set to non-null if ARG refers to an unterminated array. */
3026 c_strlen_data data
= { };
3027 tree len
= c_strlen (src
, 1, &data
, 1);
3029 || TREE_CODE (len
) != INTEGER_CST
)
3031 data
.decl
= unterminated_array (src
);
3038 /* Avoid folding calls with unterminated arrays. */
3039 if (!gimple_no_warning_p (stmt
))
3040 warn_string_no_nul (loc
, "stpcpy", src
, data
.decl
);
3041 gimple_set_no_warning (stmt
, true);
3045 if (optimize_function_for_size_p (cfun
)
3046 /* If length is zero it's small enough. */
3047 && !integer_zerop (len
))
3050 /* If the source has a known length replace stpcpy with memcpy. */
3051 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
3055 gimple_seq stmts
= NULL
;
3056 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
3057 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
3058 tem
, build_int_cst (size_type_node
, 1));
3059 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3060 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
3061 gimple_move_vops (repl
, stmt
);
3062 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
3063 /* Replace the result with dest + len. */
3065 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
3066 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
3067 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
3068 POINTER_PLUS_EXPR
, dest
, tem
);
3069 gsi_replace (gsi
, ret
, false);
3070 /* Finally fold the memcpy call. */
3071 gimple_stmt_iterator gsi2
= *gsi
;
3077 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3078 NULL_TREE if a normal call should be emitted rather than expanding
3079 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3080 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3081 passed as second argument. */
3084 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
3085 enum built_in_function fcode
)
3087 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3088 tree dest
, size
, len
, fn
, fmt
, flag
;
3089 const char *fmt_str
;
3091 /* Verify the required arguments in the original call. */
3092 if (gimple_call_num_args (stmt
) < 5)
3095 dest
= gimple_call_arg (stmt
, 0);
3096 len
= gimple_call_arg (stmt
, 1);
3097 flag
= gimple_call_arg (stmt
, 2);
3098 size
= gimple_call_arg (stmt
, 3);
3099 fmt
= gimple_call_arg (stmt
, 4);
3101 if (! tree_fits_uhwi_p (size
))
3104 if (! integer_all_onesp (size
))
3106 tree maxlen
= get_maxval_strlen (len
, SRK_INT_VALUE
);
3107 if (! tree_fits_uhwi_p (len
))
3109 /* If LEN is not constant, try MAXLEN too.
3110 For MAXLEN only allow optimizing into non-_ocs function
3111 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
3112 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
3118 if (tree_int_cst_lt (size
, maxlen
))
3122 if (!init_target_chars ())
3125 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3126 or if format doesn't contain % chars or is "%s". */
3127 if (! integer_zerop (flag
))
3129 fmt_str
= c_getstr (fmt
);
3130 if (fmt_str
== NULL
)
3132 if (strchr (fmt_str
, target_percent
) != NULL
3133 && strcmp (fmt_str
, target_percent_s
))
3137 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3139 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
3140 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
3144 /* Replace the called function and the first 5 argument by 3 retaining
3145 trailing varargs. */
3146 gimple_call_set_fndecl (stmt
, fn
);
3147 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3148 gimple_call_set_arg (stmt
, 0, dest
);
3149 gimple_call_set_arg (stmt
, 1, len
);
3150 gimple_call_set_arg (stmt
, 2, fmt
);
3151 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3152 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3153 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3158 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3159 Return NULL_TREE if a normal call should be emitted rather than
3160 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3161 or BUILT_IN_VSPRINTF_CHK. */
3164 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
3165 enum built_in_function fcode
)
3167 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3168 tree dest
, size
, len
, fn
, fmt
, flag
;
3169 const char *fmt_str
;
3170 unsigned nargs
= gimple_call_num_args (stmt
);
3172 /* Verify the required arguments in the original call. */
3175 dest
= gimple_call_arg (stmt
, 0);
3176 flag
= gimple_call_arg (stmt
, 1);
3177 size
= gimple_call_arg (stmt
, 2);
3178 fmt
= gimple_call_arg (stmt
, 3);
3180 if (! tree_fits_uhwi_p (size
))
3185 if (!init_target_chars ())
3188 /* Check whether the format is a literal string constant. */
3189 fmt_str
= c_getstr (fmt
);
3190 if (fmt_str
!= NULL
)
3192 /* If the format doesn't contain % args or %%, we know the size. */
3193 if (strchr (fmt_str
, target_percent
) == 0)
3195 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
3196 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
3198 /* If the format is "%s" and first ... argument is a string literal,
3199 we know the size too. */
3200 else if (fcode
== BUILT_IN_SPRINTF_CHK
3201 && strcmp (fmt_str
, target_percent_s
) == 0)
3207 arg
= gimple_call_arg (stmt
, 4);
3208 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
3210 len
= c_strlen (arg
, 1);
3211 if (! len
|| ! tree_fits_uhwi_p (len
))
3218 if (! integer_all_onesp (size
))
3220 if (! len
|| ! tree_int_cst_lt (len
, size
))
3224 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3225 or if format doesn't contain % chars or is "%s". */
3226 if (! integer_zerop (flag
))
3228 if (fmt_str
== NULL
)
3230 if (strchr (fmt_str
, target_percent
) != NULL
3231 && strcmp (fmt_str
, target_percent_s
))
3235 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3236 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
3237 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
3241 /* Replace the called function and the first 4 argument by 2 retaining
3242 trailing varargs. */
3243 gimple_call_set_fndecl (stmt
, fn
);
3244 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
3245 gimple_call_set_arg (stmt
, 0, dest
);
3246 gimple_call_set_arg (stmt
, 1, fmt
);
3247 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
3248 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
3249 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
3254 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3255 ORIG may be null if this is a 2-argument call. We don't attempt to
3256 simplify calls with more than 3 arguments.
3258 Return true if simplification was possible, otherwise false. */
3261 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
3263 gimple
*stmt
= gsi_stmt (*gsi
);
3264 tree dest
= gimple_call_arg (stmt
, 0);
3265 tree fmt
= gimple_call_arg (stmt
, 1);
3266 tree orig
= NULL_TREE
;
3267 const char *fmt_str
= NULL
;
3269 /* Verify the required arguments in the original call. We deal with two
3270 types of sprintf() calls: 'sprintf (str, fmt)' and
3271 'sprintf (dest, "%s", orig)'. */
3272 if (gimple_call_num_args (stmt
) > 3)
3275 if (gimple_call_num_args (stmt
) == 3)
3276 orig
= gimple_call_arg (stmt
, 2);
3278 /* Check whether the format is a literal string constant. */
3279 fmt_str
= c_getstr (fmt
);
3280 if (fmt_str
== NULL
)
3283 if (!init_target_chars ())
3286 /* If the format doesn't contain % args or %%, use strcpy. */
3287 if (strchr (fmt_str
, target_percent
) == NULL
)
3289 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3294 /* Don't optimize sprintf (buf, "abc", ptr++). */
3298 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3299 'format' is known to contain no % formats. */
3300 gimple_seq stmts
= NULL
;
3301 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3303 /* Propagate the NO_WARNING bit to avoid issuing the same
3304 warning more than once. */
3305 if (gimple_no_warning_p (stmt
))
3306 gimple_set_no_warning (repl
, true);
3308 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3309 if (tree lhs
= gimple_call_lhs (stmt
))
3311 repl
= gimple_build_assign (lhs
, build_int_cst (TREE_TYPE (lhs
),
3313 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3314 gsi_replace_with_seq_vops (gsi
, stmts
);
3315 /* gsi now points at the assignment to the lhs, get a
3316 stmt iterator to the memcpy call.
3317 ??? We can't use gsi_for_stmt as that doesn't work when the
3318 CFG isn't built yet. */
3319 gimple_stmt_iterator gsi2
= *gsi
;
3325 gsi_replace_with_seq_vops (gsi
, stmts
);
3331 /* If the format is "%s", use strcpy if the result isn't used. */
3332 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3335 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3340 /* Don't crash on sprintf (str1, "%s"). */
3344 tree orig_len
= NULL_TREE
;
3345 if (gimple_call_lhs (stmt
))
3347 orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3352 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3353 gimple_seq stmts
= NULL
;
3354 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3356 /* Propagate the NO_WARNING bit to avoid issuing the same
3357 warning more than once. */
3358 if (gimple_no_warning_p (stmt
))
3359 gimple_set_no_warning (repl
, true);
3361 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3362 if (tree lhs
= gimple_call_lhs (stmt
))
3364 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3365 TREE_TYPE (orig_len
)))
3366 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3367 repl
= gimple_build_assign (lhs
, orig_len
);
3368 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3369 gsi_replace_with_seq_vops (gsi
, stmts
);
3370 /* gsi now points at the assignment to the lhs, get a
3371 stmt iterator to the memcpy call.
3372 ??? We can't use gsi_for_stmt as that doesn't work when the
3373 CFG isn't built yet. */
3374 gimple_stmt_iterator gsi2
= *gsi
;
3380 gsi_replace_with_seq_vops (gsi
, stmts
);
3388 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3389 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3390 attempt to simplify calls with more than 4 arguments.
3392 Return true if simplification was possible, otherwise false. */
3395 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
3397 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3398 tree dest
= gimple_call_arg (stmt
, 0);
3399 tree destsize
= gimple_call_arg (stmt
, 1);
3400 tree fmt
= gimple_call_arg (stmt
, 2);
3401 tree orig
= NULL_TREE
;
3402 const char *fmt_str
= NULL
;
3404 if (gimple_call_num_args (stmt
) > 4)
3407 if (gimple_call_num_args (stmt
) == 4)
3408 orig
= gimple_call_arg (stmt
, 3);
3410 if (!tree_fits_uhwi_p (destsize
))
3412 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
3414 /* Check whether the format is a literal string constant. */
3415 fmt_str
= c_getstr (fmt
);
3416 if (fmt_str
== NULL
)
3419 if (!init_target_chars ())
3422 /* If the format doesn't contain % args or %%, use strcpy. */
3423 if (strchr (fmt_str
, target_percent
) == NULL
)
3425 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3429 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3433 /* We could expand this as
3434 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3436 memcpy (str, fmt_with_nul_at_cstm1, cst);
3437 but in the former case that might increase code size
3438 and in the latter case grow .rodata section too much.
3440 size_t len
= strlen (fmt_str
);
3444 gimple_seq stmts
= NULL
;
3445 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
3446 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3447 if (tree lhs
= gimple_call_lhs (stmt
))
3449 repl
= gimple_build_assign (lhs
,
3450 build_int_cst (TREE_TYPE (lhs
), len
));
3451 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3452 gsi_replace_with_seq_vops (gsi
, stmts
);
3453 /* gsi now points at the assignment to the lhs, get a
3454 stmt iterator to the memcpy call.
3455 ??? We can't use gsi_for_stmt as that doesn't work when the
3456 CFG isn't built yet. */
3457 gimple_stmt_iterator gsi2
= *gsi
;
3463 gsi_replace_with_seq_vops (gsi
, stmts
);
3469 /* If the format is "%s", use strcpy if the result isn't used. */
3470 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
3472 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
3476 /* Don't crash on snprintf (str1, cst, "%s"). */
3480 tree orig_len
= get_maxval_strlen (orig
, SRK_STRLEN
);
3481 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
3484 /* We could expand this as
3485 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3487 memcpy (str1, str2_with_nul_at_cstm1, cst);
3488 but in the former case that might increase code size
3489 and in the latter case grow .rodata section too much.
3491 if (compare_tree_int (orig_len
, destlen
) >= 0)
3494 /* Convert snprintf (str1, cst, "%s", str2) into
3495 strcpy (str1, str2) if strlen (str2) < cst. */
3496 gimple_seq stmts
= NULL
;
3497 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
3498 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3499 if (tree lhs
= gimple_call_lhs (stmt
))
3501 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
3502 TREE_TYPE (orig_len
)))
3503 orig_len
= fold_convert (TREE_TYPE (lhs
), orig_len
);
3504 repl
= gimple_build_assign (lhs
, orig_len
);
3505 gimple_seq_add_stmt_without_update (&stmts
, repl
);
3506 gsi_replace_with_seq_vops (gsi
, stmts
);
3507 /* gsi now points at the assignment to the lhs, get a
3508 stmt iterator to the memcpy call.
3509 ??? We can't use gsi_for_stmt as that doesn't work when the
3510 CFG isn't built yet. */
3511 gimple_stmt_iterator gsi2
= *gsi
;
3517 gsi_replace_with_seq_vops (gsi
, stmts
);
3525 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3526 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3527 more than 3 arguments, and ARG may be null in the 2-argument case.
3529 Return NULL_TREE if no simplification was possible, otherwise return the
3530 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3531 code of the function to be simplified. */
3534 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3535 tree fp
, tree fmt
, tree arg
,
3536 enum built_in_function fcode
)
3538 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3539 tree fn_fputc
, fn_fputs
;
3540 const char *fmt_str
= NULL
;
3542 /* If the return value is used, don't do the transformation. */
3543 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3546 /* Check whether the format is a literal string constant. */
3547 fmt_str
= c_getstr (fmt
);
3548 if (fmt_str
== NULL
)
3551 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3553 /* If we're using an unlocked function, assume the other
3554 unlocked functions exist explicitly. */
3555 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3556 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3560 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3561 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3564 if (!init_target_chars ())
3567 /* If the format doesn't contain % args or %%, use strcpy. */
3568 if (strchr (fmt_str
, target_percent
) == NULL
)
3570 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3574 /* If the format specifier was "", fprintf does nothing. */
3575 if (fmt_str
[0] == '\0')
3577 replace_call_with_value (gsi
, NULL_TREE
);
3581 /* When "string" doesn't contain %, replace all cases of
3582 fprintf (fp, string) with fputs (string, fp). The fputs
3583 builtin will take care of special cases like length == 1. */
3586 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3587 replace_call_with_call_and_fold (gsi
, repl
);
3592 /* The other optimizations can be done only on the non-va_list variants. */
3593 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3596 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3597 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3599 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3603 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3604 replace_call_with_call_and_fold (gsi
, repl
);
3609 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3610 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3613 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3617 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3618 replace_call_with_call_and_fold (gsi
, repl
);
3626 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3627 FMT and ARG are the arguments to the call; we don't fold cases with
3628 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3630 Return NULL_TREE if no simplification was possible, otherwise return the
3631 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3632 code of the function to be simplified. */
3635 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3636 tree arg
, enum built_in_function fcode
)
3638 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3639 tree fn_putchar
, fn_puts
, newarg
;
3640 const char *fmt_str
= NULL
;
3642 /* If the return value is used, don't do the transformation. */
3643 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3646 /* Check whether the format is a literal string constant. */
3647 fmt_str
= c_getstr (fmt
);
3648 if (fmt_str
== NULL
)
3651 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3653 /* If we're using an unlocked function, assume the other
3654 unlocked functions exist explicitly. */
3655 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3656 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3660 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3661 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3664 if (!init_target_chars ())
3667 if (strcmp (fmt_str
, target_percent_s
) == 0
3668 || strchr (fmt_str
, target_percent
) == NULL
)
3672 if (strcmp (fmt_str
, target_percent_s
) == 0)
3674 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3677 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3680 str
= c_getstr (arg
);
3686 /* The format specifier doesn't contain any '%' characters. */
3687 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3693 /* If the string was "", printf does nothing. */
3696 replace_call_with_value (gsi
, NULL_TREE
);
3700 /* If the string has length of 1, call putchar. */
3703 /* Given printf("c"), (where c is any one character,)
3704 convert "c"[0] to an int and pass that to the replacement
3706 newarg
= build_int_cst (integer_type_node
, str
[0]);
3709 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3710 replace_call_with_call_and_fold (gsi
, repl
);
3716 /* If the string was "string\n", call puts("string"). */
3717 size_t len
= strlen (str
);
3718 if ((unsigned char)str
[len
- 1] == target_newline
3719 && (size_t) (int) len
== len
3724 /* Create a NUL-terminated string that's one char shorter
3725 than the original, stripping off the trailing '\n'. */
3726 newstr
= xstrdup (str
);
3727 newstr
[len
- 1] = '\0';
3728 newarg
= build_string_literal (len
, newstr
);
3732 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3733 replace_call_with_call_and_fold (gsi
, repl
);
3738 /* We'd like to arrange to call fputs(string,stdout) here,
3739 but we need stdout and don't have a way to get it yet. */
3744 /* The other optimizations can be done only on the non-va_list variants. */
3745 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3748 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3749 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3751 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3755 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3756 replace_call_with_call_and_fold (gsi
, repl
);
3761 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3762 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3764 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3769 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3770 replace_call_with_call_and_fold (gsi
, repl
);
3780 /* Fold a call to __builtin_strlen with known length LEN. */
3783 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3785 gimple
*stmt
= gsi_stmt (*gsi
);
3786 tree arg
= gimple_call_arg (stmt
, 0);
3791 c_strlen_data lendata
= { };
3792 if (get_range_strlen (arg
, &lendata
, /* eltsize = */ 1)
3794 && lendata
.minlen
&& TREE_CODE (lendata
.minlen
) == INTEGER_CST
3795 && lendata
.maxlen
&& TREE_CODE (lendata
.maxlen
) == INTEGER_CST
)
3797 /* The range of lengths refers to either a single constant
3798 string or to the longest and shortest constant string
3799 referenced by the argument of the strlen() call, or to
3800 the strings that can possibly be stored in the arrays
3801 the argument refers to. */
3802 minlen
= wi::to_wide (lendata
.minlen
);
3803 maxlen
= wi::to_wide (lendata
.maxlen
);
3807 unsigned prec
= TYPE_PRECISION (sizetype
);
3809 minlen
= wi::shwi (0, prec
);
3810 maxlen
= wi::to_wide (max_object_size (), prec
) - 2;
3813 if (minlen
== maxlen
)
3815 /* Fold the strlen call to a constant. */
3816 tree type
= TREE_TYPE (lendata
.minlen
);
3817 tree len
= force_gimple_operand_gsi (gsi
,
3818 wide_int_to_tree (type
, minlen
),
3819 true, NULL
, true, GSI_SAME_STMT
);
3820 replace_call_with_value (gsi
, len
);
3824 /* Set the strlen() range to [0, MAXLEN]. */
3825 if (tree lhs
= gimple_call_lhs (stmt
))
3826 set_strlen_range (lhs
, minlen
, maxlen
);
3831 /* Fold a call to __builtin_acc_on_device. */
3834 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3836 /* Defer folding until we know which compiler we're in. */
3837 if (symtab
->state
!= EXPANSION
)
3840 unsigned val_host
= GOMP_DEVICE_HOST
;
3841 unsigned val_dev
= GOMP_DEVICE_NONE
;
3843 #ifdef ACCEL_COMPILER
3844 val_host
= GOMP_DEVICE_NOT_HOST
;
3845 val_dev
= ACCEL_COMPILER_acc_device
;
3848 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3850 tree host_eq
= make_ssa_name (boolean_type_node
);
3851 gimple
*host_ass
= gimple_build_assign
3852 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3853 gimple_set_location (host_ass
, loc
);
3854 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3856 tree dev_eq
= make_ssa_name (boolean_type_node
);
3857 gimple
*dev_ass
= gimple_build_assign
3858 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3859 gimple_set_location (dev_ass
, loc
);
3860 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3862 tree result
= make_ssa_name (boolean_type_node
);
3863 gimple
*result_ass
= gimple_build_assign
3864 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3865 gimple_set_location (result_ass
, loc
);
3866 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3868 replace_call_with_value (gsi
, result
);
3873 /* Fold realloc (0, n) -> malloc (n). */
3876 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3878 gimple
*stmt
= gsi_stmt (*gsi
);
3879 tree arg
= gimple_call_arg (stmt
, 0);
3880 tree size
= gimple_call_arg (stmt
, 1);
3882 if (operand_equal_p (arg
, null_pointer_node
, 0))
3884 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3887 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3888 replace_call_with_call_and_fold (gsi
, repl
);
3895 /* Fold the non-target builtin at *GSI and return whether any simplification
3899 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
3901 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
3902 tree callee
= gimple_call_fndecl (stmt
);
3904 /* Give up for always_inline inline builtins until they are
3906 if (avoid_folding_inline_builtin (callee
))
3909 unsigned n
= gimple_call_num_args (stmt
);
3910 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
3914 return gimple_fold_builtin_bcmp (gsi
);
3915 case BUILT_IN_BCOPY
:
3916 return gimple_fold_builtin_bcopy (gsi
);
3917 case BUILT_IN_BZERO
:
3918 return gimple_fold_builtin_bzero (gsi
);
3920 case BUILT_IN_MEMSET
:
3921 return gimple_fold_builtin_memset (gsi
,
3922 gimple_call_arg (stmt
, 1),
3923 gimple_call_arg (stmt
, 2));
3924 case BUILT_IN_MEMCPY
:
3925 case BUILT_IN_MEMPCPY
:
3926 case BUILT_IN_MEMMOVE
:
3927 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3928 gimple_call_arg (stmt
, 1), fcode
);
3929 case BUILT_IN_SPRINTF_CHK
:
3930 case BUILT_IN_VSPRINTF_CHK
:
3931 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
3932 case BUILT_IN_STRCAT_CHK
:
3933 return gimple_fold_builtin_strcat_chk (gsi
);
3934 case BUILT_IN_STRNCAT_CHK
:
3935 return gimple_fold_builtin_strncat_chk (gsi
);
3936 case BUILT_IN_STRLEN
:
3937 return gimple_fold_builtin_strlen (gsi
);
3938 case BUILT_IN_STRCPY
:
3939 return gimple_fold_builtin_strcpy (gsi
,
3940 gimple_call_arg (stmt
, 0),
3941 gimple_call_arg (stmt
, 1));
3942 case BUILT_IN_STRNCPY
:
3943 return gimple_fold_builtin_strncpy (gsi
,
3944 gimple_call_arg (stmt
, 0),
3945 gimple_call_arg (stmt
, 1),
3946 gimple_call_arg (stmt
, 2));
3947 case BUILT_IN_STRCAT
:
3948 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
3949 gimple_call_arg (stmt
, 1));
3950 case BUILT_IN_STRNCAT
:
3951 return gimple_fold_builtin_strncat (gsi
);
3952 case BUILT_IN_INDEX
:
3953 case BUILT_IN_STRCHR
:
3954 return gimple_fold_builtin_strchr (gsi
, false);
3955 case BUILT_IN_RINDEX
:
3956 case BUILT_IN_STRRCHR
:
3957 return gimple_fold_builtin_strchr (gsi
, true);
3958 case BUILT_IN_STRSTR
:
3959 return gimple_fold_builtin_strstr (gsi
);
3960 case BUILT_IN_STRCMP
:
3961 case BUILT_IN_STRCMP_EQ
:
3962 case BUILT_IN_STRCASECMP
:
3963 case BUILT_IN_STRNCMP
:
3964 case BUILT_IN_STRNCMP_EQ
:
3965 case BUILT_IN_STRNCASECMP
:
3966 return gimple_fold_builtin_string_compare (gsi
);
3967 case BUILT_IN_MEMCHR
:
3968 return gimple_fold_builtin_memchr (gsi
);
3969 case BUILT_IN_FPUTS
:
3970 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3971 gimple_call_arg (stmt
, 1), false);
3972 case BUILT_IN_FPUTS_UNLOCKED
:
3973 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3974 gimple_call_arg (stmt
, 1), true);
3975 case BUILT_IN_MEMCPY_CHK
:
3976 case BUILT_IN_MEMPCPY_CHK
:
3977 case BUILT_IN_MEMMOVE_CHK
:
3978 case BUILT_IN_MEMSET_CHK
:
3979 return gimple_fold_builtin_memory_chk (gsi
,
3980 gimple_call_arg (stmt
, 0),
3981 gimple_call_arg (stmt
, 1),
3982 gimple_call_arg (stmt
, 2),
3983 gimple_call_arg (stmt
, 3),
3985 case BUILT_IN_STPCPY
:
3986 return gimple_fold_builtin_stpcpy (gsi
);
3987 case BUILT_IN_STRCPY_CHK
:
3988 case BUILT_IN_STPCPY_CHK
:
3989 return gimple_fold_builtin_stxcpy_chk (gsi
,
3990 gimple_call_arg (stmt
, 0),
3991 gimple_call_arg (stmt
, 1),
3992 gimple_call_arg (stmt
, 2),
3994 case BUILT_IN_STRNCPY_CHK
:
3995 case BUILT_IN_STPNCPY_CHK
:
3996 return gimple_fold_builtin_stxncpy_chk (gsi
,
3997 gimple_call_arg (stmt
, 0),
3998 gimple_call_arg (stmt
, 1),
3999 gimple_call_arg (stmt
, 2),
4000 gimple_call_arg (stmt
, 3),
4002 case BUILT_IN_SNPRINTF_CHK
:
4003 case BUILT_IN_VSNPRINTF_CHK
:
4004 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
4006 case BUILT_IN_FPRINTF
:
4007 case BUILT_IN_FPRINTF_UNLOCKED
:
4008 case BUILT_IN_VFPRINTF
:
4009 if (n
== 2 || n
== 3)
4010 return gimple_fold_builtin_fprintf (gsi
,
4011 gimple_call_arg (stmt
, 0),
4012 gimple_call_arg (stmt
, 1),
4014 ? gimple_call_arg (stmt
, 2)
4018 case BUILT_IN_FPRINTF_CHK
:
4019 case BUILT_IN_VFPRINTF_CHK
:
4020 if (n
== 3 || n
== 4)
4021 return gimple_fold_builtin_fprintf (gsi
,
4022 gimple_call_arg (stmt
, 0),
4023 gimple_call_arg (stmt
, 2),
4025 ? gimple_call_arg (stmt
, 3)
4029 case BUILT_IN_PRINTF
:
4030 case BUILT_IN_PRINTF_UNLOCKED
:
4031 case BUILT_IN_VPRINTF
:
4032 if (n
== 1 || n
== 2)
4033 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
4035 ? gimple_call_arg (stmt
, 1)
4036 : NULL_TREE
, fcode
);
4038 case BUILT_IN_PRINTF_CHK
:
4039 case BUILT_IN_VPRINTF_CHK
:
4040 if (n
== 2 || n
== 3)
4041 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
4043 ? gimple_call_arg (stmt
, 2)
4044 : NULL_TREE
, fcode
);
4046 case BUILT_IN_ACC_ON_DEVICE
:
4047 return gimple_fold_builtin_acc_on_device (gsi
,
4048 gimple_call_arg (stmt
, 0));
4049 case BUILT_IN_REALLOC
:
4050 return gimple_fold_builtin_realloc (gsi
);
4055 /* Try the generic builtin folder. */
4056 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
4057 tree result
= fold_call_stmt (stmt
, ignore
);
4061 STRIP_NOPS (result
);
4063 result
= fold_convert (gimple_call_return_type (stmt
), result
);
4064 if (!update_call_from_tree (gsi
, result
))
4065 gimplify_and_update_call_from_tree (gsi
, result
);
4072 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
4073 function calls to constants, where possible. */
4076 fold_internal_goacc_dim (const gimple
*call
)
4078 int axis
= oacc_get_ifn_dim_arg (call
);
4079 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
4080 tree result
= NULL_TREE
;
4081 tree type
= TREE_TYPE (gimple_call_lhs (call
));
4083 switch (gimple_call_internal_fn (call
))
4085 case IFN_GOACC_DIM_POS
:
4086 /* If the size is 1, we know the answer. */
4088 result
= build_int_cst (type
, 0);
4090 case IFN_GOACC_DIM_SIZE
:
4091 /* If the size is not dynamic, we know the answer. */
4093 result
= build_int_cst (type
, size
);
4102 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
4103 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
4104 &var where var is only addressable because of such calls. */
4107 optimize_atomic_compare_exchange_p (gimple
*stmt
)
4109 if (gimple_call_num_args (stmt
) != 6
4110 || !flag_inline_atomics
4112 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
4113 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
4114 || !gimple_vdef (stmt
)
4115 || !gimple_vuse (stmt
))
4118 tree fndecl
= gimple_call_fndecl (stmt
);
4119 switch (DECL_FUNCTION_CODE (fndecl
))
4121 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
4122 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
4123 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
4124 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
4125 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
4131 tree expected
= gimple_call_arg (stmt
, 1);
4132 if (TREE_CODE (expected
) != ADDR_EXPR
4133 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
4136 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
4137 if (!is_gimple_reg_type (etype
)
4138 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
4139 || TREE_THIS_VOLATILE (etype
)
4140 || VECTOR_TYPE_P (etype
)
4141 || TREE_CODE (etype
) == COMPLEX_TYPE
4142 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
4143 might not preserve all the bits. See PR71716. */
4144 || SCALAR_FLOAT_TYPE_P (etype
)
4145 || maybe_ne (TYPE_PRECISION (etype
),
4146 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
4149 tree weak
= gimple_call_arg (stmt
, 3);
4150 if (!integer_zerop (weak
) && !integer_onep (weak
))
4153 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4154 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
4155 machine_mode mode
= TYPE_MODE (itype
);
4157 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
4159 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
4162 if (maybe_ne (int_size_in_bytes (etype
), GET_MODE_SIZE (mode
)))
4169 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
4171 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
4172 i = IMAGPART_EXPR <t>;
4174 e = REALPART_EXPR <t>; */
4177 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
4179 gimple
*stmt
= gsi_stmt (*gsi
);
4180 tree fndecl
= gimple_call_fndecl (stmt
);
4181 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
4182 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
4183 tree ctype
= build_complex_type (itype
);
4184 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
4185 bool throws
= false;
4187 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
4189 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4190 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
4191 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
4193 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
4194 build1 (VIEW_CONVERT_EXPR
, itype
,
4195 gimple_assign_lhs (g
)));
4196 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
4198 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
4199 + int_size_in_bytes (itype
);
4200 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
4201 gimple_call_arg (stmt
, 0),
4202 gimple_assign_lhs (g
),
4203 gimple_call_arg (stmt
, 2),
4204 build_int_cst (integer_type_node
, flag
),
4205 gimple_call_arg (stmt
, 4),
4206 gimple_call_arg (stmt
, 5));
4207 tree lhs
= make_ssa_name (ctype
);
4208 gimple_call_set_lhs (g
, lhs
);
4209 gimple_move_vops (g
, stmt
);
4210 tree oldlhs
= gimple_call_lhs (stmt
);
4211 if (stmt_can_throw_internal (cfun
, stmt
))
4214 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
4216 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
4217 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
4218 gimple_call_set_lhs (stmt
, NULL_TREE
);
4219 gsi_replace (gsi
, g
, true);
4222 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
4223 build1 (IMAGPART_EXPR
, itype
, lhs
));
4226 gsi_insert_on_edge_immediate (e
, g
);
4227 *gsi
= gsi_for_stmt (g
);
4230 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4231 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
4232 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4234 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
4235 build1 (REALPART_EXPR
, itype
, lhs
));
4236 if (throws
&& oldlhs
== NULL_TREE
)
4238 gsi_insert_on_edge_immediate (e
, g
);
4239 *gsi
= gsi_for_stmt (g
);
4242 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4243 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
4245 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
4247 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
4248 gimple_assign_lhs (g
)));
4249 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4251 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
4252 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
4256 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
4257 doesn't fit into TYPE. The test for overflow should be regardless of
4258 -fwrapv, and even for unsigned types. */
4261 arith_overflowed_p (enum tree_code code
, const_tree type
,
4262 const_tree arg0
, const_tree arg1
)
4264 widest2_int warg0
= widest2_int_cst (arg0
);
4265 widest2_int warg1
= widest2_int_cst (arg1
);
4269 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
4270 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
4271 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
4272 default: gcc_unreachable ();
4274 signop sign
= TYPE_SIGN (type
);
4275 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
4277 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
4280 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
4281 for the memory it references, otherwise return null. VECTYPE is the
4282 type of the memory vector. */
4285 gimple_fold_mask_load_store_mem_ref (gcall
*call
, tree vectype
)
4287 tree ptr
= gimple_call_arg (call
, 0);
4288 tree alias_align
= gimple_call_arg (call
, 1);
4289 tree mask
= gimple_call_arg (call
, 2);
4290 if (!tree_fits_uhwi_p (alias_align
) || !integer_all_onesp (mask
))
4293 unsigned HOST_WIDE_INT align
= tree_to_uhwi (alias_align
) * BITS_PER_UNIT
;
4294 if (TYPE_ALIGN (vectype
) != align
)
4295 vectype
= build_aligned_type (vectype
, align
);
4296 tree offset
= build_zero_cst (TREE_TYPE (alias_align
));
4297 return fold_build2 (MEM_REF
, vectype
, ptr
, offset
);
4300 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
4303 gimple_fold_mask_load (gimple_stmt_iterator
*gsi
, gcall
*call
)
4305 tree lhs
= gimple_call_lhs (call
);
4309 if (tree rhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (lhs
)))
4311 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
4312 gimple_set_location (new_stmt
, gimple_location (call
));
4313 gimple_move_vops (new_stmt
, call
);
4314 gsi_replace (gsi
, new_stmt
, false);
4320 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
4323 gimple_fold_mask_store (gimple_stmt_iterator
*gsi
, gcall
*call
)
4325 tree rhs
= gimple_call_arg (call
, 3);
4326 if (tree lhs
= gimple_fold_mask_load_store_mem_ref (call
, TREE_TYPE (rhs
)))
4328 gassign
*new_stmt
= gimple_build_assign (lhs
, rhs
);
4329 gimple_set_location (new_stmt
, gimple_location (call
));
4330 gimple_move_vops (new_stmt
, call
);
4331 gsi_replace (gsi
, new_stmt
, false);
4337 /* Attempt to fold a call statement referenced by the statement iterator GSI.
4338 The statement may be replaced by another statement, e.g., if the call
4339 simplifies to a constant value. Return true if any changes were made.
4340 It is assumed that the operands have been previously folded. */
4343 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
4345 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
4347 bool changed
= false;
4350 /* Fold *& in call arguments. */
4351 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4352 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
4354 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
4357 gimple_call_set_arg (stmt
, i
, tmp
);
4362 /* Check for virtual calls that became direct calls. */
4363 callee
= gimple_call_fn (stmt
);
4364 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
4366 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
4368 if (dump_file
&& virtual_method_call_p (callee
)
4369 && !possible_polymorphic_call_target_p
4370 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
4371 (OBJ_TYPE_REF_EXPR (callee
)))))
4374 "Type inheritance inconsistent devirtualization of ");
4375 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
4376 fprintf (dump_file
, " to ");
4377 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
4378 fprintf (dump_file
, "\n");
4381 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
4384 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
4387 vec
<cgraph_node
*>targets
4388 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
4389 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
4391 tree lhs
= gimple_call_lhs (stmt
);
4392 if (dump_enabled_p ())
4394 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
4395 "folding virtual function call to %s\n",
4396 targets
.length () == 1
4397 ? targets
[0]->name ()
4398 : "__builtin_unreachable");
4400 if (targets
.length () == 1)
4402 tree fndecl
= targets
[0]->decl
;
4403 gimple_call_set_fndecl (stmt
, fndecl
);
4405 /* If changing the call to __cxa_pure_virtual
4406 or similar noreturn function, adjust gimple_call_fntype
4408 if (gimple_call_noreturn_p (stmt
)
4409 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
4410 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
4411 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
4413 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
4414 /* If the call becomes noreturn, remove the lhs. */
4416 && gimple_call_noreturn_p (stmt
)
4417 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
4418 || should_remove_lhs_p (lhs
)))
4420 if (TREE_CODE (lhs
) == SSA_NAME
)
4422 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4423 tree def
= get_or_create_ssa_default_def (cfun
, var
);
4424 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
4425 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
4427 gimple_call_set_lhs (stmt
, NULL_TREE
);
4429 maybe_remove_unused_call_args (cfun
, stmt
);
4433 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4434 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
4435 gimple_set_location (new_stmt
, gimple_location (stmt
));
4436 /* If the call had a SSA name as lhs morph that into
4437 an uninitialized value. */
4438 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4440 tree var
= create_tmp_var (TREE_TYPE (lhs
));
4441 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs
, var
);
4442 SSA_NAME_DEF_STMT (lhs
) = gimple_build_nop ();
4443 set_ssa_default_def (cfun
, var
, lhs
);
4445 gimple_move_vops (new_stmt
, stmt
);
4446 gsi_replace (gsi
, new_stmt
, false);
4453 /* Check for indirect calls that became direct calls, and then
4454 no longer require a static chain. */
4455 if (gimple_call_chain (stmt
))
4457 tree fn
= gimple_call_fndecl (stmt
);
4458 if (fn
&& !DECL_STATIC_CHAIN (fn
))
4460 gimple_call_set_chain (stmt
, NULL
);
4465 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
4468 gimple_call_set_chain (stmt
, tmp
);
4477 /* Check for builtins that CCP can handle using information not
4478 available in the generic fold routines. */
4479 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
4481 if (gimple_fold_builtin (gsi
))
4484 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
4486 changed
|= targetm
.gimple_fold_builtin (gsi
);
4488 else if (gimple_call_internal_p (stmt
))
4490 enum tree_code subcode
= ERROR_MARK
;
4491 tree result
= NULL_TREE
;
4492 bool cplx_result
= false;
4493 tree overflow
= NULL_TREE
;
4494 switch (gimple_call_internal_fn (stmt
))
4496 case IFN_BUILTIN_EXPECT
:
4497 result
= fold_builtin_expect (gimple_location (stmt
),
4498 gimple_call_arg (stmt
, 0),
4499 gimple_call_arg (stmt
, 1),
4500 gimple_call_arg (stmt
, 2),
4503 case IFN_UBSAN_OBJECT_SIZE
:
4505 tree offset
= gimple_call_arg (stmt
, 1);
4506 tree objsize
= gimple_call_arg (stmt
, 2);
4507 if (integer_all_onesp (objsize
)
4508 || (TREE_CODE (offset
) == INTEGER_CST
4509 && TREE_CODE (objsize
) == INTEGER_CST
4510 && tree_int_cst_le (offset
, objsize
)))
4512 replace_call_with_value (gsi
, NULL_TREE
);
4518 if (integer_zerop (gimple_call_arg (stmt
, 1)))
4520 replace_call_with_value (gsi
, NULL_TREE
);
4524 case IFN_UBSAN_BOUNDS
:
4526 tree index
= gimple_call_arg (stmt
, 1);
4527 tree bound
= gimple_call_arg (stmt
, 2);
4528 if (TREE_CODE (index
) == INTEGER_CST
4529 && TREE_CODE (bound
) == INTEGER_CST
)
4531 index
= fold_convert (TREE_TYPE (bound
), index
);
4532 if (TREE_CODE (index
) == INTEGER_CST
4533 && tree_int_cst_le (index
, bound
))
4535 replace_call_with_value (gsi
, NULL_TREE
);
4541 case IFN_GOACC_DIM_SIZE
:
4542 case IFN_GOACC_DIM_POS
:
4543 result
= fold_internal_goacc_dim (stmt
);
4545 case IFN_UBSAN_CHECK_ADD
:
4546 subcode
= PLUS_EXPR
;
4548 case IFN_UBSAN_CHECK_SUB
:
4549 subcode
= MINUS_EXPR
;
4551 case IFN_UBSAN_CHECK_MUL
:
4552 subcode
= MULT_EXPR
;
4554 case IFN_ADD_OVERFLOW
:
4555 subcode
= PLUS_EXPR
;
4558 case IFN_SUB_OVERFLOW
:
4559 subcode
= MINUS_EXPR
;
4562 case IFN_MUL_OVERFLOW
:
4563 subcode
= MULT_EXPR
;
4567 changed
|= gimple_fold_mask_load (gsi
, stmt
);
4569 case IFN_MASK_STORE
:
4570 changed
|= gimple_fold_mask_store (gsi
, stmt
);
4575 if (subcode
!= ERROR_MARK
)
4577 tree arg0
= gimple_call_arg (stmt
, 0);
4578 tree arg1
= gimple_call_arg (stmt
, 1);
4579 tree type
= TREE_TYPE (arg0
);
4582 tree lhs
= gimple_call_lhs (stmt
);
4583 if (lhs
== NULL_TREE
)
4586 type
= TREE_TYPE (TREE_TYPE (lhs
));
4588 if (type
== NULL_TREE
)
4590 /* x = y + 0; x = y - 0; x = y * 0; */
4591 else if (integer_zerop (arg1
))
4592 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
4593 /* x = 0 + y; x = 0 * y; */
4594 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
4595 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
4597 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
4598 result
= integer_zero_node
;
4599 /* x = y * 1; x = 1 * y; */
4600 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
4602 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
4604 else if (TREE_CODE (arg0
) == INTEGER_CST
4605 && TREE_CODE (arg1
) == INTEGER_CST
)
4608 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
4609 fold_convert (type
, arg1
));
4611 result
= int_const_binop (subcode
, arg0
, arg1
);
4612 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
4615 overflow
= build_one_cst (type
);
4622 if (result
== integer_zero_node
)
4623 result
= build_zero_cst (type
);
4624 else if (cplx_result
&& TREE_TYPE (result
) != type
)
4626 if (TREE_CODE (result
) == INTEGER_CST
)
4628 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
4630 overflow
= build_one_cst (type
);
4632 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
4633 && TYPE_UNSIGNED (type
))
4634 || (TYPE_PRECISION (type
)
4635 < (TYPE_PRECISION (TREE_TYPE (result
))
4636 + (TYPE_UNSIGNED (TREE_TYPE (result
))
4637 && !TYPE_UNSIGNED (type
)))))
4640 result
= fold_convert (type
, result
);
4647 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
4648 result
= drop_tree_overflow (result
);
4651 if (overflow
== NULL_TREE
)
4652 overflow
= build_zero_cst (TREE_TYPE (result
));
4653 tree ctype
= build_complex_type (TREE_TYPE (result
));
4654 if (TREE_CODE (result
) == INTEGER_CST
4655 && TREE_CODE (overflow
) == INTEGER_CST
)
4656 result
= build_complex (ctype
, result
, overflow
);
4658 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
4659 ctype
, result
, overflow
);
4661 if (!update_call_from_tree (gsi
, result
))
4662 gimplify_and_update_call_from_tree (gsi
, result
);
4671 /* Return true whether NAME has a use on STMT. */
4674 has_use_on_stmt (tree name
, gimple
*stmt
)
4676 imm_use_iterator iter
;
4677 use_operand_p use_p
;
4678 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
4679 if (USE_STMT (use_p
) == stmt
)
4684 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4687 Replaces *GSI with the simplification result in RCODE and OPS
4688 and the associated statements in *SEQ. Does the replacement
4689 according to INPLACE and returns true if the operation succeeded. */
4692 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
4693 gimple_match_op
*res_op
,
4694 gimple_seq
*seq
, bool inplace
)
4696 gimple
*stmt
= gsi_stmt (*gsi
);
4697 tree
*ops
= res_op
->ops
;
4698 unsigned int num_ops
= res_op
->num_ops
;
4700 /* Play safe and do not allow abnormals to be mentioned in
4701 newly created statements. See also maybe_push_res_to_seq.
4702 As an exception allow such uses if there was a use of the
4703 same SSA name on the old stmt. */
4704 for (unsigned int i
= 0; i
< num_ops
; ++i
)
4705 if (TREE_CODE (ops
[i
]) == SSA_NAME
4706 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[i
])
4707 && !has_use_on_stmt (ops
[i
], stmt
))
4710 if (num_ops
> 0 && COMPARISON_CLASS_P (ops
[0]))
4711 for (unsigned int i
= 0; i
< 2; ++i
)
4712 if (TREE_CODE (TREE_OPERAND (ops
[0], i
)) == SSA_NAME
4713 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], i
))
4714 && !has_use_on_stmt (TREE_OPERAND (ops
[0], i
), stmt
))
4717 /* Don't insert new statements when INPLACE is true, even if we could
4718 reuse STMT for the final statement. */
4719 if (inplace
&& !gimple_seq_empty_p (*seq
))
4722 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
4724 gcc_assert (res_op
->code
.is_tree_code ());
4725 if (TREE_CODE_CLASS ((enum tree_code
) res_op
->code
) == tcc_comparison
4726 /* GIMPLE_CONDs condition may not throw. */
4727 && (!flag_exceptions
4728 || !cfun
->can_throw_non_call_exceptions
4729 || !operation_could_trap_p (res_op
->code
,
4730 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
4732 gimple_cond_set_condition (cond_stmt
, res_op
->code
, ops
[0], ops
[1]);
4733 else if (res_op
->code
== SSA_NAME
)
4734 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
4735 build_zero_cst (TREE_TYPE (ops
[0])));
4736 else if (res_op
->code
== INTEGER_CST
)
4738 if (integer_zerop (ops
[0]))
4739 gimple_cond_make_false (cond_stmt
);
4741 gimple_cond_make_true (cond_stmt
);
4745 tree res
= maybe_push_res_to_seq (res_op
, seq
);
4748 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
4749 build_zero_cst (TREE_TYPE (res
)));
4753 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4755 fprintf (dump_file
, "gimple_simplified to ");
4756 if (!gimple_seq_empty_p (*seq
))
4757 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4758 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4761 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4764 else if (is_gimple_assign (stmt
)
4765 && res_op
->code
.is_tree_code ())
4768 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (res_op
->code
))
4770 maybe_build_generic_op (res_op
);
4771 gimple_assign_set_rhs_with_ops (gsi
, res_op
->code
,
4772 res_op
->op_or_null (0),
4773 res_op
->op_or_null (1),
4774 res_op
->op_or_null (2));
4775 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4777 fprintf (dump_file
, "gimple_simplified to ");
4778 if (!gimple_seq_empty_p (*seq
))
4779 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4780 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4783 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4787 else if (res_op
->code
.is_fn_code ()
4788 && gimple_call_combined_fn (stmt
) == res_op
->code
)
4790 gcc_assert (num_ops
== gimple_call_num_args (stmt
));
4791 for (unsigned int i
= 0; i
< num_ops
; ++i
)
4792 gimple_call_set_arg (stmt
, i
, ops
[i
]);
4793 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4795 fprintf (dump_file
, "gimple_simplified to ");
4796 if (!gimple_seq_empty_p (*seq
))
4797 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4798 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
4800 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4805 if (gimple_has_lhs (stmt
))
4807 tree lhs
= gimple_get_lhs (stmt
);
4808 if (!maybe_push_res_to_seq (res_op
, seq
, lhs
))
4810 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4812 fprintf (dump_file
, "gimple_simplified to ");
4813 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4815 gsi_replace_with_seq_vops (gsi
, *seq
);
4825 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4828 maybe_canonicalize_mem_ref_addr (tree
*t
)
4832 if (TREE_CODE (*t
) == ADDR_EXPR
)
4833 t
= &TREE_OPERAND (*t
, 0);
4835 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4836 generic vector extension. The actual vector referenced is
4837 view-converted to an array type for this purpose. If the index
4838 is constant the canonical representation in the middle-end is a
4839 BIT_FIELD_REF so re-write the former to the latter here. */
4840 if (TREE_CODE (*t
) == ARRAY_REF
4841 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
4842 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
4843 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
4845 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
4846 if (VECTOR_TYPE_P (vtype
))
4848 tree low
= array_ref_low_bound (*t
);
4849 if (TREE_CODE (low
) == INTEGER_CST
)
4851 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
4853 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
4854 wi::to_widest (low
));
4855 idx
= wi::mul (idx
, wi::to_widest
4856 (TYPE_SIZE (TREE_TYPE (*t
))));
4858 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
4859 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
4861 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
4863 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
4864 TYPE_SIZE (TREE_TYPE (*t
)),
4865 wide_int_to_tree (bitsizetype
, idx
));
4873 while (handled_component_p (*t
))
4874 t
= &TREE_OPERAND (*t
, 0);
4876 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4877 of invariant addresses into a SSA name MEM_REF address. */
4878 if (TREE_CODE (*t
) == MEM_REF
4879 || TREE_CODE (*t
) == TARGET_MEM_REF
)
4881 tree addr
= TREE_OPERAND (*t
, 0);
4882 if (TREE_CODE (addr
) == ADDR_EXPR
4883 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
4884 || handled_component_p (TREE_OPERAND (addr
, 0))))
4888 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
4893 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
4894 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
4895 TREE_OPERAND (*t
, 1),
4896 size_int (coffset
));
4899 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
4900 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
4903 /* Canonicalize back MEM_REFs to plain reference trees if the object
4904 accessed is a decl that has the same access semantics as the MEM_REF. */
4905 if (TREE_CODE (*t
) == MEM_REF
4906 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
4907 && integer_zerop (TREE_OPERAND (*t
, 1))
4908 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
4910 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4911 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
4912 if (/* Same volatile qualification. */
4913 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
4914 /* Same TBAA behavior with -fstrict-aliasing. */
4915 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
4916 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
4917 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
4918 /* Same alignment. */
4919 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
4920 /* We have to look out here to not drop a required conversion
4921 from the rhs to the lhs if *t appears on the lhs or vice-versa
4922 if it appears on the rhs. Thus require strict type
4924 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
4926 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4931 /* Canonicalize TARGET_MEM_REF in particular with respect to
4932 the indexes becoming constant. */
4933 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
4935 tree tem
= maybe_fold_tmr (*t
);
4946 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4947 distinguishes both cases. */
4950 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
4952 bool changed
= false;
4953 gimple
*stmt
= gsi_stmt (*gsi
);
4954 bool nowarning
= gimple_no_warning_p (stmt
);
4956 fold_defer_overflow_warnings ();
4958 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4960 ??? This shouldn't be done in generic folding but in the
4961 propagation helpers which also know whether an address was
4963 Also canonicalize operand order. */
4964 switch (gimple_code (stmt
))
4967 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
4969 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
4970 if ((REFERENCE_CLASS_P (*rhs
)
4971 || TREE_CODE (*rhs
) == ADDR_EXPR
)
4972 && maybe_canonicalize_mem_ref_addr (rhs
))
4974 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
4975 if (REFERENCE_CLASS_P (*lhs
)
4976 && maybe_canonicalize_mem_ref_addr (lhs
))
4981 /* Canonicalize operand order. */
4982 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4983 if (TREE_CODE_CLASS (code
) == tcc_comparison
4984 || commutative_tree_code (code
)
4985 || commutative_ternary_tree_code (code
))
4987 tree rhs1
= gimple_assign_rhs1 (stmt
);
4988 tree rhs2
= gimple_assign_rhs2 (stmt
);
4989 if (tree_swap_operands_p (rhs1
, rhs2
))
4991 gimple_assign_set_rhs1 (stmt
, rhs2
);
4992 gimple_assign_set_rhs2 (stmt
, rhs1
);
4993 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4994 gimple_assign_set_rhs_code (stmt
,
4995 swap_tree_comparison (code
));
5003 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5005 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
5006 if (REFERENCE_CLASS_P (*arg
)
5007 && maybe_canonicalize_mem_ref_addr (arg
))
5010 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
5012 && REFERENCE_CLASS_P (*lhs
)
5013 && maybe_canonicalize_mem_ref_addr (lhs
))
5019 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5020 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
5022 tree link
= gimple_asm_output_op (asm_stmt
, i
);
5023 tree op
= TREE_VALUE (link
);
5024 if (REFERENCE_CLASS_P (op
)
5025 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
5028 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
5030 tree link
= gimple_asm_input_op (asm_stmt
, i
);
5031 tree op
= TREE_VALUE (link
);
5032 if ((REFERENCE_CLASS_P (op
)
5033 || TREE_CODE (op
) == ADDR_EXPR
)
5034 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
5040 if (gimple_debug_bind_p (stmt
))
5042 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
5044 && (REFERENCE_CLASS_P (*val
)
5045 || TREE_CODE (*val
) == ADDR_EXPR
)
5046 && maybe_canonicalize_mem_ref_addr (val
))
5052 /* Canonicalize operand order. */
5053 tree lhs
= gimple_cond_lhs (stmt
);
5054 tree rhs
= gimple_cond_rhs (stmt
);
5055 if (tree_swap_operands_p (lhs
, rhs
))
5057 gcond
*gc
= as_a
<gcond
*> (stmt
);
5058 gimple_cond_set_lhs (gc
, rhs
);
5059 gimple_cond_set_rhs (gc
, lhs
);
5060 gimple_cond_set_code (gc
,
5061 swap_tree_comparison (gimple_cond_code (gc
)));
5068 /* Dispatch to pattern-based folding. */
5070 || is_gimple_assign (stmt
)
5071 || gimple_code (stmt
) == GIMPLE_COND
)
5073 gimple_seq seq
= NULL
;
5074 gimple_match_op res_op
;
5075 if (gimple_simplify (stmt
, &res_op
, inplace
? NULL
: &seq
,
5076 valueize
, valueize
))
5078 if (replace_stmt_with_simplification (gsi
, &res_op
, &seq
, inplace
))
5081 gimple_seq_discard (seq
);
5085 stmt
= gsi_stmt (*gsi
);
5087 /* Fold the main computation performed by the statement. */
5088 switch (gimple_code (stmt
))
5092 /* Try to canonicalize for boolean-typed X the comparisons
5093 X == 0, X == 1, X != 0, and X != 1. */
5094 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
5095 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
5097 tree lhs
= gimple_assign_lhs (stmt
);
5098 tree op1
= gimple_assign_rhs1 (stmt
);
5099 tree op2
= gimple_assign_rhs2 (stmt
);
5100 tree type
= TREE_TYPE (op1
);
5102 /* Check whether the comparison operands are of the same boolean
5103 type as the result type is.
5104 Check that second operand is an integer-constant with value
5106 if (TREE_CODE (op2
) == INTEGER_CST
5107 && (integer_zerop (op2
) || integer_onep (op2
))
5108 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
5110 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
5111 bool is_logical_not
= false;
5113 /* X == 0 and X != 1 is a logical-not.of X
5114 X == 1 and X != 0 is X */
5115 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
5116 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
5117 is_logical_not
= true;
5119 if (is_logical_not
== false)
5120 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
5121 /* Only for one-bit precision typed X the transformation
5122 !X -> ~X is valied. */
5123 else if (TYPE_PRECISION (type
) == 1)
5124 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
5125 /* Otherwise we use !X -> X ^ 1. */
5127 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
5128 build_int_cst (type
, 1));
5134 unsigned old_num_ops
= gimple_num_ops (stmt
);
5135 tree lhs
= gimple_assign_lhs (stmt
);
5136 tree new_rhs
= fold_gimple_assign (gsi
);
5138 && !useless_type_conversion_p (TREE_TYPE (lhs
),
5139 TREE_TYPE (new_rhs
)))
5140 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
5143 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
5145 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
5152 changed
|= gimple_fold_call (gsi
, inplace
);
5156 /* Fold *& in asm operands. */
5158 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
5160 const char **oconstraints
;
5161 const char *constraint
;
5162 bool allows_mem
, allows_reg
;
5164 noutputs
= gimple_asm_noutputs (asm_stmt
);
5165 oconstraints
= XALLOCAVEC (const char *, noutputs
);
5167 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
5169 tree link
= gimple_asm_output_op (asm_stmt
, i
);
5170 tree op
= TREE_VALUE (link
);
5172 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5173 if (REFERENCE_CLASS_P (op
)
5174 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
5176 TREE_VALUE (link
) = op
;
5180 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
5182 tree link
= gimple_asm_input_op (asm_stmt
, i
);
5183 tree op
= TREE_VALUE (link
);
5185 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
5186 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
5187 oconstraints
, &allows_mem
, &allows_reg
);
5188 if (REFERENCE_CLASS_P (op
)
5189 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
5192 TREE_VALUE (link
) = op
;
5200 if (gimple_debug_bind_p (stmt
))
5202 tree val
= gimple_debug_bind_get_value (stmt
);
5204 && REFERENCE_CLASS_P (val
))
5206 tree tem
= maybe_fold_reference (val
, false);
5209 gimple_debug_bind_set_value (stmt
, tem
);
5214 && TREE_CODE (val
) == ADDR_EXPR
)
5216 tree ref
= TREE_OPERAND (val
, 0);
5217 tree tem
= maybe_fold_reference (ref
, false);
5220 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
5221 gimple_debug_bind_set_value (stmt
, tem
);
5230 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
5231 tree ret
= gimple_return_retval(ret_stmt
);
5233 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
5235 tree val
= valueize (ret
);
5236 if (val
&& val
!= ret
5237 && may_propagate_copy (ret
, val
))
5239 gimple_return_set_retval (ret_stmt
, val
);
5249 stmt
= gsi_stmt (*gsi
);
5251 /* Fold *& on the lhs. */
5252 if (gimple_has_lhs (stmt
))
5254 tree lhs
= gimple_get_lhs (stmt
);
5255 if (lhs
&& REFERENCE_CLASS_P (lhs
))
5257 tree new_lhs
= maybe_fold_reference (lhs
, true);
5260 gimple_set_lhs (stmt
, new_lhs
);
5266 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
5270 /* Valueziation callback that ends up not following SSA edges. */
5273 no_follow_ssa_edges (tree
)
5278 /* Valueization callback that ends up following single-use SSA edges only. */
5281 follow_single_use_edges (tree val
)
5283 if (TREE_CODE (val
) == SSA_NAME
5284 && !has_single_use (val
))
5289 /* Valueization callback that follows all SSA edges. */
5292 follow_all_ssa_edges (tree val
)
5297 /* Fold the statement pointed to by GSI. In some cases, this function may
5298 replace the whole statement with a new one. Returns true iff folding
5300 The statement pointed to by GSI should be in valid gimple form but may
5301 be in unfolded state as resulting from for example constant propagation
5302 which can produce *&x = 0. */
5305 fold_stmt (gimple_stmt_iterator
*gsi
)
5307 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
5311 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
5313 return fold_stmt_1 (gsi
, false, valueize
);
5316 /* Perform the minimal folding on statement *GSI. Only operations like
5317 *&x created by constant propagation are handled. The statement cannot
5318 be replaced with a new one. Return true if the statement was
5319 changed, false otherwise.
5320 The statement *GSI should be in valid gimple form but may
5321 be in unfolded state as resulting from for example constant propagation
5322 which can produce *&x = 0. */
5325 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
5327 gimple
*stmt
= gsi_stmt (*gsi
);
5328 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
5329 gcc_assert (gsi_stmt (*gsi
) == stmt
);
5333 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
5334 if EXPR is null or we don't know how.
5335 If non-null, the result always has boolean type. */
5338 canonicalize_bool (tree expr
, bool invert
)
5344 if (integer_nonzerop (expr
))
5345 return boolean_false_node
;
5346 else if (integer_zerop (expr
))
5347 return boolean_true_node
;
5348 else if (TREE_CODE (expr
) == SSA_NAME
)
5349 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
5350 build_int_cst (TREE_TYPE (expr
), 0));
5351 else if (COMPARISON_CLASS_P (expr
))
5352 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
5354 TREE_OPERAND (expr
, 0),
5355 TREE_OPERAND (expr
, 1));
5361 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
5363 if (integer_nonzerop (expr
))
5364 return boolean_true_node
;
5365 else if (integer_zerop (expr
))
5366 return boolean_false_node
;
5367 else if (TREE_CODE (expr
) == SSA_NAME
)
5368 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
5369 build_int_cst (TREE_TYPE (expr
), 0));
5370 else if (COMPARISON_CLASS_P (expr
))
5371 return fold_build2 (TREE_CODE (expr
),
5373 TREE_OPERAND (expr
, 0),
5374 TREE_OPERAND (expr
, 1));
5380 /* Check to see if a boolean expression EXPR is logically equivalent to the
5381 comparison (OP1 CODE OP2). Check for various identities involving
5385 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
5386 const_tree op1
, const_tree op2
)
5390 /* The obvious case. */
5391 if (TREE_CODE (expr
) == code
5392 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
5393 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
5396 /* Check for comparing (name, name != 0) and the case where expr
5397 is an SSA_NAME with a definition matching the comparison. */
5398 if (TREE_CODE (expr
) == SSA_NAME
5399 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
5401 if (operand_equal_p (expr
, op1
, 0))
5402 return ((code
== NE_EXPR
&& integer_zerop (op2
))
5403 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
5404 s
= SSA_NAME_DEF_STMT (expr
);
5405 if (is_gimple_assign (s
)
5406 && gimple_assign_rhs_code (s
) == code
5407 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
5408 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
5412 /* If op1 is of the form (name != 0) or (name == 0), and the definition
5413 of name is a comparison, recurse. */
5414 if (TREE_CODE (op1
) == SSA_NAME
5415 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
5417 s
= SSA_NAME_DEF_STMT (op1
);
5418 if (is_gimple_assign (s
)
5419 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
5421 enum tree_code c
= gimple_assign_rhs_code (s
);
5422 if ((c
== NE_EXPR
&& integer_zerop (op2
))
5423 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
5424 return same_bool_comparison_p (expr
, c
,
5425 gimple_assign_rhs1 (s
),
5426 gimple_assign_rhs2 (s
));
5427 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
5428 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
5429 return same_bool_comparison_p (expr
,
5430 invert_tree_comparison (c
, false),
5431 gimple_assign_rhs1 (s
),
5432 gimple_assign_rhs2 (s
));
5438 /* Check to see if two boolean expressions OP1 and OP2 are logically
5442 same_bool_result_p (const_tree op1
, const_tree op2
)
5444 /* Simple cases first. */
5445 if (operand_equal_p (op1
, op2
, 0))
5448 /* Check the cases where at least one of the operands is a comparison.
5449 These are a bit smarter than operand_equal_p in that they apply some
5450 identifies on SSA_NAMEs. */
5451 if (COMPARISON_CLASS_P (op2
)
5452 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
5453 TREE_OPERAND (op2
, 0),
5454 TREE_OPERAND (op2
, 1)))
5456 if (COMPARISON_CLASS_P (op1
)
5457 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
5458 TREE_OPERAND (op1
, 0),
5459 TREE_OPERAND (op1
, 1)))
5466 /* Forward declarations for some mutually recursive functions. */
5469 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
5470 enum tree_code code2
, tree op2a
, tree op2b
);
5472 and_var_with_comparison (tree type
, tree var
, bool invert
,
5473 enum tree_code code2
, tree op2a
, tree op2b
);
5475 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
5476 enum tree_code code2
, tree op2a
, tree op2b
);
5478 or_comparisons_1 (tree
, enum tree_code code1
, tree op1a
, tree op1b
,
5479 enum tree_code code2
, tree op2a
, tree op2b
);
5481 or_var_with_comparison (tree
, tree var
, bool invert
,
5482 enum tree_code code2
, tree op2a
, tree op2b
);
5484 or_var_with_comparison_1 (tree
, gimple
*stmt
,
5485 enum tree_code code2
, tree op2a
, tree op2b
);
5487 /* Helper function for and_comparisons_1: try to simplify the AND of the
5488 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5489 If INVERT is true, invert the value of the VAR before doing the AND.
5490 Return NULL_EXPR if we can't simplify this to a single expression. */
5493 and_var_with_comparison (tree type
, tree var
, bool invert
,
5494 enum tree_code code2
, tree op2a
, tree op2b
)
5497 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5499 /* We can only deal with variables whose definitions are assignments. */
5500 if (!is_gimple_assign (stmt
))
5503 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5504 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
5505 Then we only have to consider the simpler non-inverted cases. */
5507 t
= or_var_with_comparison_1 (type
, stmt
,
5508 invert_tree_comparison (code2
, false),
5511 t
= and_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
5512 return canonicalize_bool (t
, invert
);
5515 /* Try to simplify the AND of the ssa variable defined by the assignment
5516 STMT with the comparison specified by (OP2A CODE2 OP2B).
5517 Return NULL_EXPR if we can't simplify this to a single expression. */
5520 and_var_with_comparison_1 (tree type
, gimple
*stmt
,
5521 enum tree_code code2
, tree op2a
, tree op2b
)
5523 tree var
= gimple_assign_lhs (stmt
);
5524 tree true_test_var
= NULL_TREE
;
5525 tree false_test_var
= NULL_TREE
;
5526 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5528 /* Check for identities like (var AND (var == 0)) => false. */
5529 if (TREE_CODE (op2a
) == SSA_NAME
5530 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5532 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5533 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5535 true_test_var
= op2a
;
5536 if (var
== true_test_var
)
5539 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5540 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5542 false_test_var
= op2a
;
5543 if (var
== false_test_var
)
5544 return boolean_false_node
;
5548 /* If the definition is a comparison, recurse on it. */
5549 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5551 tree t
= and_comparisons_1 (type
, innercode
,
5552 gimple_assign_rhs1 (stmt
),
5553 gimple_assign_rhs2 (stmt
),
5561 /* If the definition is an AND or OR expression, we may be able to
5562 simplify by reassociating. */
5563 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5564 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5566 tree inner1
= gimple_assign_rhs1 (stmt
);
5567 tree inner2
= gimple_assign_rhs2 (stmt
);
5570 tree partial
= NULL_TREE
;
5571 bool is_and
= (innercode
== BIT_AND_EXPR
);
5573 /* Check for boolean identities that don't require recursive examination
5575 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
5576 inner1 AND (inner1 OR inner2) => inner1
5577 !inner1 AND (inner1 AND inner2) => false
5578 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
5579 Likewise for similar cases involving inner2. */
5580 if (inner1
== true_test_var
)
5581 return (is_and
? var
: inner1
);
5582 else if (inner2
== true_test_var
)
5583 return (is_and
? var
: inner2
);
5584 else if (inner1
== false_test_var
)
5586 ? boolean_false_node
5587 : and_var_with_comparison (type
, inner2
, false, code2
, op2a
,
5589 else if (inner2
== false_test_var
)
5591 ? boolean_false_node
5592 : and_var_with_comparison (type
, inner1
, false, code2
, op2a
,
5595 /* Next, redistribute/reassociate the AND across the inner tests.
5596 Compute the first partial result, (inner1 AND (op2a code op2b)) */
5597 if (TREE_CODE (inner1
) == SSA_NAME
5598 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5599 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5600 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
5601 gimple_assign_rhs1 (s
),
5602 gimple_assign_rhs2 (s
),
5603 code2
, op2a
, op2b
)))
5605 /* Handle the AND case, where we are reassociating:
5606 (inner1 AND inner2) AND (op2a code2 op2b)
5608 If the partial result t is a constant, we win. Otherwise
5609 continue on to try reassociating with the other inner test. */
5612 if (integer_onep (t
))
5614 else if (integer_zerop (t
))
5615 return boolean_false_node
;
5618 /* Handle the OR case, where we are redistributing:
5619 (inner1 OR inner2) AND (op2a code2 op2b)
5620 => (t OR (inner2 AND (op2a code2 op2b))) */
5621 else if (integer_onep (t
))
5622 return boolean_true_node
;
5624 /* Save partial result for later. */
5628 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5629 if (TREE_CODE (inner2
) == SSA_NAME
5630 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5631 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5632 && (t
= maybe_fold_and_comparisons (type
, gimple_assign_rhs_code (s
),
5633 gimple_assign_rhs1 (s
),
5634 gimple_assign_rhs2 (s
),
5635 code2
, op2a
, op2b
)))
5637 /* Handle the AND case, where we are reassociating:
5638 (inner1 AND inner2) AND (op2a code2 op2b)
5639 => (inner1 AND t) */
5642 if (integer_onep (t
))
5644 else if (integer_zerop (t
))
5645 return boolean_false_node
;
5646 /* If both are the same, we can apply the identity
5648 else if (partial
&& same_bool_result_p (t
, partial
))
5652 /* Handle the OR case. where we are redistributing:
5653 (inner1 OR inner2) AND (op2a code2 op2b)
5654 => (t OR (inner1 AND (op2a code2 op2b)))
5655 => (t OR partial) */
5658 if (integer_onep (t
))
5659 return boolean_true_node
;
5662 /* We already got a simplification for the other
5663 operand to the redistributed OR expression. The
5664 interesting case is when at least one is false.
5665 Or, if both are the same, we can apply the identity
5667 if (integer_zerop (partial
))
5669 else if (integer_zerop (t
))
5671 else if (same_bool_result_p (t
, partial
))
5680 /* Try to simplify the AND of two comparisons defined by
5681 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5682 If this can be done without constructing an intermediate value,
5683 return the resulting tree; otherwise NULL_TREE is returned.
5684 This function is deliberately asymmetric as it recurses on SSA_DEFs
5685 in the first comparison but not the second. */
5688 and_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
5689 enum tree_code code2
, tree op2a
, tree op2b
)
5691 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5693 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5694 if (operand_equal_p (op1a
, op2a
, 0)
5695 && operand_equal_p (op1b
, op2b
, 0))
5697 /* Result will be either NULL_TREE, or a combined comparison. */
5698 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5699 TRUTH_ANDIF_EXPR
, code1
, code2
,
5700 truth_type
, op1a
, op1b
);
5705 /* Likewise the swapped case of the above. */
5706 if (operand_equal_p (op1a
, op2b
, 0)
5707 && operand_equal_p (op1b
, op2a
, 0))
5709 /* Result will be either NULL_TREE, or a combined comparison. */
5710 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5711 TRUTH_ANDIF_EXPR
, code1
,
5712 swap_tree_comparison (code2
),
5713 truth_type
, op1a
, op1b
);
5718 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5719 NAME's definition is a truth value. See if there are any simplifications
5720 that can be done against the NAME's definition. */
5721 if (TREE_CODE (op1a
) == SSA_NAME
5722 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5723 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5725 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5726 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5727 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5728 switch (gimple_code (stmt
))
5731 /* Try to simplify by copy-propagating the definition. */
5732 return and_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
5736 /* If every argument to the PHI produces the same result when
5737 ANDed with the second comparison, we win.
5738 Do not do this unless the type is bool since we need a bool
5739 result here anyway. */
5740 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5742 tree result
= NULL_TREE
;
5744 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5746 tree arg
= gimple_phi_arg_def (stmt
, i
);
5748 /* If this PHI has itself as an argument, ignore it.
5749 If all the other args produce the same result,
5751 if (arg
== gimple_phi_result (stmt
))
5753 else if (TREE_CODE (arg
) == INTEGER_CST
)
5755 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
5758 result
= boolean_false_node
;
5759 else if (!integer_zerop (result
))
5763 result
= fold_build2 (code2
, boolean_type_node
,
5765 else if (!same_bool_comparison_p (result
,
5769 else if (TREE_CODE (arg
) == SSA_NAME
5770 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5773 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5774 /* In simple cases we can look through PHI nodes,
5775 but we have to be careful with loops.
5777 if (! dom_info_available_p (CDI_DOMINATORS
)
5778 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5779 || dominated_by_p (CDI_DOMINATORS
,
5780 gimple_bb (def_stmt
),
5783 temp
= and_var_with_comparison (type
, arg
, invert
, code2
,
5789 else if (!same_bool_result_p (result
, temp
))
5805 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
5806 : try to simplify the AND/OR of the ssa variable VAR with the comparison
5807 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
5808 simplify this to a single expression. As we are going to lower the cost
5809 of building SSA names / gimple stmts significantly, we need to allocate
5810 them ont the stack. This will cause the code to be a bit ugly. */
5813 maybe_fold_comparisons_from_match_pd (tree type
, enum tree_code code
,
5814 enum tree_code code1
,
5815 tree op1a
, tree op1b
,
5816 enum tree_code code2
, tree op2a
,
5819 /* Allocate gimple stmt1 on the stack. */
5821 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
5822 gimple_init (stmt1
, GIMPLE_ASSIGN
, 3);
5823 gimple_assign_set_rhs_code (stmt1
, code1
);
5824 gimple_assign_set_rhs1 (stmt1
, op1a
);
5825 gimple_assign_set_rhs2 (stmt1
, op1b
);
5827 /* Allocate gimple stmt2 on the stack. */
5829 = (gassign
*) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN
, 3));
5830 gimple_init (stmt2
, GIMPLE_ASSIGN
, 3);
5831 gimple_assign_set_rhs_code (stmt2
, code2
);
5832 gimple_assign_set_rhs1 (stmt2
, op2a
);
5833 gimple_assign_set_rhs2 (stmt2
, op2b
);
5835 /* Allocate SSA names(lhs1) on the stack. */
5836 tree lhs1
= (tree
)XALLOCA (tree_ssa_name
);
5837 memset (lhs1
, 0, sizeof (tree_ssa_name
));
5838 TREE_SET_CODE (lhs1
, SSA_NAME
);
5839 TREE_TYPE (lhs1
) = type
;
5840 init_ssa_name_imm_use (lhs1
);
5842 /* Allocate SSA names(lhs2) on the stack. */
5843 tree lhs2
= (tree
)XALLOCA (tree_ssa_name
);
5844 memset (lhs2
, 0, sizeof (tree_ssa_name
));
5845 TREE_SET_CODE (lhs2
, SSA_NAME
);
5846 TREE_TYPE (lhs2
) = type
;
5847 init_ssa_name_imm_use (lhs2
);
5849 gimple_assign_set_lhs (stmt1
, lhs1
);
5850 gimple_assign_set_lhs (stmt2
, lhs2
);
5852 gimple_match_op
op (gimple_match_cond::UNCOND
, code
,
5853 type
, gimple_assign_lhs (stmt1
),
5854 gimple_assign_lhs (stmt2
));
5855 if (op
.resimplify (NULL
, follow_all_ssa_edges
))
5857 if (gimple_simplified_result_is_gimple_val (&op
))
5859 tree res
= op
.ops
[0];
5861 return build2 (code1
, type
, op1a
, op1b
);
5862 else if (res
== lhs2
)
5863 return build2 (code2
, type
, op2a
, op2b
);
5867 else if (op
.code
.is_tree_code ()
5868 && TREE_CODE_CLASS ((tree_code
)op
.code
) == tcc_comparison
)
5870 tree op0
= op
.ops
[0];
5871 tree op1
= op
.ops
[1];
5872 if (op0
== lhs1
|| op0
== lhs2
|| op1
== lhs1
|| op1
== lhs2
)
5873 return NULL_TREE
; /* not simple */
5875 return build2 ((enum tree_code
)op
.code
, op
.type
, op0
, op1
);
5882 /* Try to simplify the AND of two comparisons, specified by
5883 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5884 If this can be simplified to a single expression (without requiring
5885 introducing more SSA variables to hold intermediate values),
5886 return the resulting tree. Otherwise return NULL_TREE.
5887 If the result expression is non-null, it has boolean type. */
5890 maybe_fold_and_comparisons (tree type
,
5891 enum tree_code code1
, tree op1a
, tree op1b
,
5892 enum tree_code code2
, tree op2a
, tree op2b
)
5894 if (tree t
= and_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
5897 if (tree t
= and_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
5900 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_AND_EXPR
, code1
,
5901 op1a
, op1b
, code2
, op2a
,
5908 /* Helper function for or_comparisons_1: try to simplify the OR of the
5909 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5910 If INVERT is true, invert the value of VAR before doing the OR.
5911 Return NULL_EXPR if we can't simplify this to a single expression. */
5914 or_var_with_comparison (tree type
, tree var
, bool invert
,
5915 enum tree_code code2
, tree op2a
, tree op2b
)
5918 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5920 /* We can only deal with variables whose definitions are assignments. */
5921 if (!is_gimple_assign (stmt
))
5924 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5925 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5926 Then we only have to consider the simpler non-inverted cases. */
5928 t
= and_var_with_comparison_1 (type
, stmt
,
5929 invert_tree_comparison (code2
, false),
5932 t
= or_var_with_comparison_1 (type
, stmt
, code2
, op2a
, op2b
);
5933 return canonicalize_bool (t
, invert
);
5936 /* Try to simplify the OR of the ssa variable defined by the assignment
5937 STMT with the comparison specified by (OP2A CODE2 OP2B).
5938 Return NULL_EXPR if we can't simplify this to a single expression. */
5941 or_var_with_comparison_1 (tree type
, gimple
*stmt
,
5942 enum tree_code code2
, tree op2a
, tree op2b
)
5944 tree var
= gimple_assign_lhs (stmt
);
5945 tree true_test_var
= NULL_TREE
;
5946 tree false_test_var
= NULL_TREE
;
5947 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5949 /* Check for identities like (var OR (var != 0)) => true . */
5950 if (TREE_CODE (op2a
) == SSA_NAME
5951 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5953 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5954 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5956 true_test_var
= op2a
;
5957 if (var
== true_test_var
)
5960 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5961 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5963 false_test_var
= op2a
;
5964 if (var
== false_test_var
)
5965 return boolean_true_node
;
5969 /* If the definition is a comparison, recurse on it. */
5970 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5972 tree t
= or_comparisons_1 (type
, innercode
,
5973 gimple_assign_rhs1 (stmt
),
5974 gimple_assign_rhs2 (stmt
),
5982 /* If the definition is an AND or OR expression, we may be able to
5983 simplify by reassociating. */
5984 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5985 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5987 tree inner1
= gimple_assign_rhs1 (stmt
);
5988 tree inner2
= gimple_assign_rhs2 (stmt
);
5991 tree partial
= NULL_TREE
;
5992 bool is_or
= (innercode
== BIT_IOR_EXPR
);
5994 /* Check for boolean identities that don't require recursive examination
5996 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5997 inner1 OR (inner1 AND inner2) => inner1
5998 !inner1 OR (inner1 OR inner2) => true
5999 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
6001 if (inner1
== true_test_var
)
6002 return (is_or
? var
: inner1
);
6003 else if (inner2
== true_test_var
)
6004 return (is_or
? var
: inner2
);
6005 else if (inner1
== false_test_var
)
6008 : or_var_with_comparison (type
, inner2
, false, code2
, op2a
,
6010 else if (inner2
== false_test_var
)
6013 : or_var_with_comparison (type
, inner1
, false, code2
, op2a
,
6016 /* Next, redistribute/reassociate the OR across the inner tests.
6017 Compute the first partial result, (inner1 OR (op2a code op2b)) */
6018 if (TREE_CODE (inner1
) == SSA_NAME
6019 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
6020 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6021 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
6022 gimple_assign_rhs1 (s
),
6023 gimple_assign_rhs2 (s
),
6024 code2
, op2a
, op2b
)))
6026 /* Handle the OR case, where we are reassociating:
6027 (inner1 OR inner2) OR (op2a code2 op2b)
6029 If the partial result t is a constant, we win. Otherwise
6030 continue on to try reassociating with the other inner test. */
6033 if (integer_onep (t
))
6034 return boolean_true_node
;
6035 else if (integer_zerop (t
))
6039 /* Handle the AND case, where we are redistributing:
6040 (inner1 AND inner2) OR (op2a code2 op2b)
6041 => (t AND (inner2 OR (op2a code op2b))) */
6042 else if (integer_zerop (t
))
6043 return boolean_false_node
;
6045 /* Save partial result for later. */
6049 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
6050 if (TREE_CODE (inner2
) == SSA_NAME
6051 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
6052 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
6053 && (t
= maybe_fold_or_comparisons (type
, gimple_assign_rhs_code (s
),
6054 gimple_assign_rhs1 (s
),
6055 gimple_assign_rhs2 (s
),
6056 code2
, op2a
, op2b
)))
6058 /* Handle the OR case, where we are reassociating:
6059 (inner1 OR inner2) OR (op2a code2 op2b)
6061 => (t OR partial) */
6064 if (integer_zerop (t
))
6066 else if (integer_onep (t
))
6067 return boolean_true_node
;
6068 /* If both are the same, we can apply the identity
6070 else if (partial
&& same_bool_result_p (t
, partial
))
6074 /* Handle the AND case, where we are redistributing:
6075 (inner1 AND inner2) OR (op2a code2 op2b)
6076 => (t AND (inner1 OR (op2a code2 op2b)))
6077 => (t AND partial) */
6080 if (integer_zerop (t
))
6081 return boolean_false_node
;
6084 /* We already got a simplification for the other
6085 operand to the redistributed AND expression. The
6086 interesting case is when at least one is true.
6087 Or, if both are the same, we can apply the identity
6089 if (integer_onep (partial
))
6091 else if (integer_onep (t
))
6093 else if (same_bool_result_p (t
, partial
))
6102 /* Try to simplify the OR of two comparisons defined by
6103 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6104 If this can be done without constructing an intermediate value,
6105 return the resulting tree; otherwise NULL_TREE is returned.
6106 This function is deliberately asymmetric as it recurses on SSA_DEFs
6107 in the first comparison but not the second. */
6110 or_comparisons_1 (tree type
, enum tree_code code1
, tree op1a
, tree op1b
,
6111 enum tree_code code2
, tree op2a
, tree op2b
)
6113 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
6115 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
6116 if (operand_equal_p (op1a
, op2a
, 0)
6117 && operand_equal_p (op1b
, op2b
, 0))
6119 /* Result will be either NULL_TREE, or a combined comparison. */
6120 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6121 TRUTH_ORIF_EXPR
, code1
, code2
,
6122 truth_type
, op1a
, op1b
);
6127 /* Likewise the swapped case of the above. */
6128 if (operand_equal_p (op1a
, op2b
, 0)
6129 && operand_equal_p (op1b
, op2a
, 0))
6131 /* Result will be either NULL_TREE, or a combined comparison. */
6132 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
6133 TRUTH_ORIF_EXPR
, code1
,
6134 swap_tree_comparison (code2
),
6135 truth_type
, op1a
, op1b
);
6140 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6141 NAME's definition is a truth value. See if there are any simplifications
6142 that can be done against the NAME's definition. */
6143 if (TREE_CODE (op1a
) == SSA_NAME
6144 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
6145 && (integer_zerop (op1b
) || integer_onep (op1b
)))
6147 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
6148 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
6149 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
6150 switch (gimple_code (stmt
))
6153 /* Try to simplify by copy-propagating the definition. */
6154 return or_var_with_comparison (type
, op1a
, invert
, code2
, op2a
,
6158 /* If every argument to the PHI produces the same result when
6159 ORed with the second comparison, we win.
6160 Do not do this unless the type is bool since we need a bool
6161 result here anyway. */
6162 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
6164 tree result
= NULL_TREE
;
6166 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
6168 tree arg
= gimple_phi_arg_def (stmt
, i
);
6170 /* If this PHI has itself as an argument, ignore it.
6171 If all the other args produce the same result,
6173 if (arg
== gimple_phi_result (stmt
))
6175 else if (TREE_CODE (arg
) == INTEGER_CST
)
6177 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
6180 result
= boolean_true_node
;
6181 else if (!integer_onep (result
))
6185 result
= fold_build2 (code2
, boolean_type_node
,
6187 else if (!same_bool_comparison_p (result
,
6191 else if (TREE_CODE (arg
) == SSA_NAME
6192 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
6195 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
6196 /* In simple cases we can look through PHI nodes,
6197 but we have to be careful with loops.
6199 if (! dom_info_available_p (CDI_DOMINATORS
)
6200 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
6201 || dominated_by_p (CDI_DOMINATORS
,
6202 gimple_bb (def_stmt
),
6205 temp
= or_var_with_comparison (type
, arg
, invert
, code2
,
6211 else if (!same_bool_result_p (result
, temp
))
6227 /* Try to simplify the OR of two comparisons, specified by
6228 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6229 If this can be simplified to a single expression (without requiring
6230 introducing more SSA variables to hold intermediate values),
6231 return the resulting tree. Otherwise return NULL_TREE.
6232 If the result expression is non-null, it has boolean type. */
6235 maybe_fold_or_comparisons (tree type
,
6236 enum tree_code code1
, tree op1a
, tree op1b
,
6237 enum tree_code code2
, tree op2a
, tree op2b
)
6239 if (tree t
= or_comparisons_1 (type
, code1
, op1a
, op1b
, code2
, op2a
, op2b
))
6242 if (tree t
= or_comparisons_1 (type
, code2
, op2a
, op2b
, code1
, op1a
, op1b
))
6245 if (tree t
= maybe_fold_comparisons_from_match_pd (type
, BIT_IOR_EXPR
, code1
,
6246 op1a
, op1b
, code2
, op2a
,
6253 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6255 Either NULL_TREE, a simplified but non-constant or a constant
6258 ??? This should go into a gimple-fold-inline.h file to be eventually
6259 privatized with the single valueize function used in the various TUs
6260 to avoid the indirect function call overhead. */
6263 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
6264 tree (*gvalueize
) (tree
))
6266 gimple_match_op res_op
;
6267 /* ??? The SSA propagators do not correctly deal with following SSA use-def
6268 edges if there are intermediate VARYING defs. For this reason
6269 do not follow SSA edges here even though SCCVN can technically
6270 just deal fine with that. */
6271 if (gimple_simplify (stmt
, &res_op
, NULL
, gvalueize
, valueize
))
6273 tree res
= NULL_TREE
;
6274 if (gimple_simplified_result_is_gimple_val (&res_op
))
6275 res
= res_op
.ops
[0];
6276 else if (mprts_hook
)
6277 res
= mprts_hook (&res_op
);
6280 if (dump_file
&& dump_flags
& TDF_DETAILS
)
6282 fprintf (dump_file
, "Match-and-simplified ");
6283 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
6284 fprintf (dump_file
, " to ");
6285 print_generic_expr (dump_file
, res
);
6286 fprintf (dump_file
, "\n");
6292 location_t loc
= gimple_location (stmt
);
6293 switch (gimple_code (stmt
))
6297 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
6299 switch (get_gimple_rhs_class (subcode
))
6301 case GIMPLE_SINGLE_RHS
:
6303 tree rhs
= gimple_assign_rhs1 (stmt
);
6304 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
6306 if (TREE_CODE (rhs
) == SSA_NAME
)
6308 /* If the RHS is an SSA_NAME, return its known constant value,
6310 return (*valueize
) (rhs
);
6312 /* Handle propagating invariant addresses into address
6314 else if (TREE_CODE (rhs
) == ADDR_EXPR
6315 && !is_gimple_min_invariant (rhs
))
6317 poly_int64 offset
= 0;
6319 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
6323 && (CONSTANT_CLASS_P (base
)
6324 || decl_address_invariant_p (base
)))
6325 return build_invariant_address (TREE_TYPE (rhs
),
6328 else if (TREE_CODE (rhs
) == CONSTRUCTOR
6329 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
6330 && known_eq (CONSTRUCTOR_NELTS (rhs
),
6331 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
6336 nelts
= CONSTRUCTOR_NELTS (rhs
);
6337 tree_vector_builder
vec (TREE_TYPE (rhs
), nelts
, 1);
6338 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
6340 val
= (*valueize
) (val
);
6341 if (TREE_CODE (val
) == INTEGER_CST
6342 || TREE_CODE (val
) == REAL_CST
6343 || TREE_CODE (val
) == FIXED_CST
)
6344 vec
.quick_push (val
);
6349 return vec
.build ();
6351 if (subcode
== OBJ_TYPE_REF
)
6353 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
6354 /* If callee is constant, we can fold away the wrapper. */
6355 if (is_gimple_min_invariant (val
))
6359 if (kind
== tcc_reference
)
6361 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
6362 || TREE_CODE (rhs
) == REALPART_EXPR
6363 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
6364 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6366 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6367 return fold_unary_loc (EXPR_LOCATION (rhs
),
6369 TREE_TYPE (rhs
), val
);
6371 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
6372 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6374 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6375 return fold_ternary_loc (EXPR_LOCATION (rhs
),
6377 TREE_TYPE (rhs
), val
,
6378 TREE_OPERAND (rhs
, 1),
6379 TREE_OPERAND (rhs
, 2));
6381 else if (TREE_CODE (rhs
) == MEM_REF
6382 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
6384 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
6385 if (TREE_CODE (val
) == ADDR_EXPR
6386 && is_gimple_min_invariant (val
))
6388 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
6390 TREE_OPERAND (rhs
, 1));
6395 return fold_const_aggregate_ref_1 (rhs
, valueize
);
6397 else if (kind
== tcc_declaration
)
6398 return get_symbol_constant_value (rhs
);
6402 case GIMPLE_UNARY_RHS
:
6405 case GIMPLE_BINARY_RHS
:
6406 /* Translate &x + CST into an invariant form suitable for
6407 further propagation. */
6408 if (subcode
== POINTER_PLUS_EXPR
)
6410 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6411 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6412 if (TREE_CODE (op0
) == ADDR_EXPR
6413 && TREE_CODE (op1
) == INTEGER_CST
)
6415 tree off
= fold_convert (ptr_type_node
, op1
);
6416 return build_fold_addr_expr_loc
6418 fold_build2 (MEM_REF
,
6419 TREE_TYPE (TREE_TYPE (op0
)),
6420 unshare_expr (op0
), off
));
6423 /* Canonicalize bool != 0 and bool == 0 appearing after
6424 valueization. While gimple_simplify handles this
6425 it can get confused by the ~X == 1 -> X == 0 transform
6426 which we cant reduce to a SSA name or a constant
6427 (and we have no way to tell gimple_simplify to not
6428 consider those transforms in the first place). */
6429 else if (subcode
== EQ_EXPR
6430 || subcode
== NE_EXPR
)
6432 tree lhs
= gimple_assign_lhs (stmt
);
6433 tree op0
= gimple_assign_rhs1 (stmt
);
6434 if (useless_type_conversion_p (TREE_TYPE (lhs
),
6437 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6438 op0
= (*valueize
) (op0
);
6439 if (TREE_CODE (op0
) == INTEGER_CST
)
6440 std::swap (op0
, op1
);
6441 if (TREE_CODE (op1
) == INTEGER_CST
6442 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
6443 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
6449 case GIMPLE_TERNARY_RHS
:
6451 /* Handle ternary operators that can appear in GIMPLE form. */
6452 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
6453 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
6454 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
6455 return fold_ternary_loc (loc
, subcode
,
6456 gimple_expr_type (stmt
), op0
, op1
, op2
);
6467 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
6469 if (gimple_call_internal_p (stmt
))
6471 enum tree_code subcode
= ERROR_MARK
;
6472 switch (gimple_call_internal_fn (stmt
))
6474 case IFN_UBSAN_CHECK_ADD
:
6475 subcode
= PLUS_EXPR
;
6477 case IFN_UBSAN_CHECK_SUB
:
6478 subcode
= MINUS_EXPR
;
6480 case IFN_UBSAN_CHECK_MUL
:
6481 subcode
= MULT_EXPR
;
6483 case IFN_BUILTIN_EXPECT
:
6485 tree arg0
= gimple_call_arg (stmt
, 0);
6486 tree op0
= (*valueize
) (arg0
);
6487 if (TREE_CODE (op0
) == INTEGER_CST
)
6494 tree arg0
= gimple_call_arg (stmt
, 0);
6495 tree arg1
= gimple_call_arg (stmt
, 1);
6496 tree op0
= (*valueize
) (arg0
);
6497 tree op1
= (*valueize
) (arg1
);
6499 if (TREE_CODE (op0
) != INTEGER_CST
6500 || TREE_CODE (op1
) != INTEGER_CST
)
6505 /* x * 0 = 0 * x = 0 without overflow. */
6506 if (integer_zerop (op0
) || integer_zerop (op1
))
6507 return build_zero_cst (TREE_TYPE (arg0
));
6510 /* y - y = 0 without overflow. */
6511 if (operand_equal_p (op0
, op1
, 0))
6512 return build_zero_cst (TREE_TYPE (arg0
));
6519 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
6521 && TREE_CODE (res
) == INTEGER_CST
6522 && !TREE_OVERFLOW (res
))
6527 fn
= (*valueize
) (gimple_call_fn (stmt
));
6528 if (TREE_CODE (fn
) == ADDR_EXPR
6529 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
6530 && fndecl_built_in_p (TREE_OPERAND (fn
, 0))
6531 && gimple_builtin_call_types_compatible_p (stmt
,
6532 TREE_OPERAND (fn
, 0)))
6534 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
6537 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6538 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
6539 retval
= fold_builtin_call_array (loc
,
6540 gimple_call_return_type (call_stmt
),
6541 fn
, gimple_call_num_args (stmt
), args
);
6544 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6545 STRIP_NOPS (retval
);
6546 retval
= fold_convert (gimple_call_return_type (call_stmt
),
6559 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6560 Returns NULL_TREE if folding to a constant is not possible, otherwise
6561 returns a constant according to is_gimple_min_invariant. */
6564 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
6566 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
6567 if (res
&& is_gimple_min_invariant (res
))
6573 /* The following set of functions are supposed to fold references using
6574 their constant initializers. */
6576 /* See if we can find constructor defining value of BASE.
6577 When we know the consructor with constant offset (such as
6578 base is array[40] and we do know constructor of array), then
6579 BIT_OFFSET is adjusted accordingly.
6581 As a special case, return error_mark_node when constructor
6582 is not explicitly available, but it is known to be zero
6583 such as 'static const int a;'. */
6585 get_base_constructor (tree base
, poly_int64_pod
*bit_offset
,
6586 tree (*valueize
)(tree
))
6588 poly_int64 bit_offset2
, size
, max_size
;
6591 if (TREE_CODE (base
) == MEM_REF
)
6593 poly_offset_int boff
= *bit_offset
+ mem_ref_offset (base
) * BITS_PER_UNIT
;
6594 if (!boff
.to_shwi (bit_offset
))
6598 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
6599 base
= valueize (TREE_OPERAND (base
, 0));
6600 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
6602 base
= TREE_OPERAND (base
, 0);
6605 && TREE_CODE (base
) == SSA_NAME
)
6606 base
= valueize (base
);
6608 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6609 DECL_INITIAL. If BASE is a nested reference into another
6610 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6611 the inner reference. */
6612 switch (TREE_CODE (base
))
6617 tree init
= ctor_for_folding (base
);
6619 /* Our semantic is exact opposite of ctor_for_folding;
6620 NULL means unknown, while error_mark_node is 0. */
6621 if (init
== error_mark_node
)
6624 return error_mark_node
;
6628 case VIEW_CONVERT_EXPR
:
6629 return get_base_constructor (TREE_OPERAND (base
, 0),
6630 bit_offset
, valueize
);
6634 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
6636 if (!known_size_p (max_size
) || maybe_ne (size
, max_size
))
6638 *bit_offset
+= bit_offset2
;
6639 return get_base_constructor (base
, bit_offset
, valueize
);
6645 if (CONSTANT_CLASS_P (base
))
6652 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
6653 to the memory at bit OFFSET. When non-null, TYPE is the expected
6654 type of the reference; otherwise the type of the referenced element
6655 is used instead. When SIZE is zero, attempt to fold a reference to
6656 the entire element which OFFSET refers to. Increment *SUBOFF by
6657 the bit offset of the accessed element. */
6660 fold_array_ctor_reference (tree type
, tree ctor
,
6661 unsigned HOST_WIDE_INT offset
,
6662 unsigned HOST_WIDE_INT size
,
6664 unsigned HOST_WIDE_INT
*suboff
)
6666 offset_int low_bound
;
6667 offset_int elt_size
;
6668 offset_int access_index
;
6669 tree domain_type
= NULL_TREE
;
6670 HOST_WIDE_INT inner_offset
;
6672 /* Compute low bound and elt size. */
6673 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
6674 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
6675 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6677 /* Static constructors for variably sized objects make no sense. */
6678 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
6680 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
6684 /* Static constructors for variably sized objects make no sense. */
6685 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
6687 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
6689 /* When TYPE is non-null, verify that it specifies a constant-sized
6690 access of a multiple of the array element size. Avoid division
6691 by zero below when ELT_SIZE is zero, such as with the result of
6692 an initializer for a zero-length array or an empty struct. */
6695 && (!TYPE_SIZE_UNIT (type
)
6696 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
)))
6699 /* Compute the array index we look for. */
6700 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
6702 access_index
+= low_bound
;
6704 /* And offset within the access. */
6705 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
6707 unsigned HOST_WIDE_INT elt_sz
= elt_size
.to_uhwi ();
6708 if (size
> elt_sz
* BITS_PER_UNIT
)
6710 /* native_encode_expr constraints. */
6711 if (size
> MAX_BITSIZE_MODE_ANY_MODE
6712 || size
% BITS_PER_UNIT
!= 0
6713 || inner_offset
% BITS_PER_UNIT
!= 0
6714 || elt_sz
> MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
)
6718 tree val
= get_array_ctor_element_at_index (ctor
, access_index
,
6720 if (!val
&& ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
6721 return build_zero_cst (type
);
6723 /* native-encode adjacent ctor elements. */
6724 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6725 unsigned bufoff
= 0;
6726 offset_int index
= 0;
6727 offset_int max_index
= access_index
;
6728 constructor_elt
*elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
6730 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
6731 else if (!CONSTANT_CLASS_P (val
))
6735 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
6737 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
6738 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
6741 index
= max_index
= wi::to_offset (elt
->index
);
6742 index
= wi::umax (index
, access_index
);
6745 if (bufoff
+ elt_sz
> sizeof (buf
))
6746 elt_sz
= sizeof (buf
) - bufoff
;
6747 int len
= native_encode_expr (val
, buf
+ bufoff
, elt_sz
,
6748 inner_offset
/ BITS_PER_UNIT
);
6749 if (len
!= (int) elt_sz
- inner_offset
/ BITS_PER_UNIT
)
6755 if (wi::cmpu (access_index
, index
) == 0)
6757 else if (wi::cmpu (access_index
, max_index
) > 0)
6760 if (ctor_idx
>= CONSTRUCTOR_NELTS (ctor
))
6762 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
6767 elt
= CONSTRUCTOR_ELT (ctor
, ctor_idx
);
6769 max_index
= access_index
;
6772 else if (TREE_CODE (elt
->index
) == RANGE_EXPR
)
6774 index
= wi::to_offset (TREE_OPERAND (elt
->index
, 0));
6775 max_index
= wi::to_offset (TREE_OPERAND (elt
->index
, 1));
6778 index
= max_index
= wi::to_offset (elt
->index
);
6779 index
= wi::umax (index
, access_index
);
6780 if (wi::cmpu (access_index
, index
) == 0)
6783 val
= build_zero_cst (TREE_TYPE (TREE_TYPE (ctor
)));
6787 while (bufoff
< size
/ BITS_PER_UNIT
);
6789 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
6792 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
6794 if (!size
&& TREE_CODE (val
) != CONSTRUCTOR
)
6796 /* For the final reference to the entire accessed element
6797 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
6798 may be null) in favor of the type of the element, and set
6799 SIZE to the size of the accessed element. */
6801 type
= TREE_TYPE (val
);
6802 size
= elt_size
.to_uhwi () * BITS_PER_UNIT
;
6805 *suboff
+= (access_index
* elt_size
* BITS_PER_UNIT
).to_uhwi ();
6806 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
,
6810 /* Memory not explicitly mentioned in constructor is 0 (or
6811 the reference is out of range). */
6812 return type
? build_zero_cst (type
) : NULL_TREE
;
6815 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
6816 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
6817 is the expected type of the reference; otherwise the type of
6818 the referenced member is used instead. When SIZE is zero,
6819 attempt to fold a reference to the entire member which OFFSET
6820 refers to; in this case. Increment *SUBOFF by the bit offset
6821 of the accessed member. */
6824 fold_nonarray_ctor_reference (tree type
, tree ctor
,
6825 unsigned HOST_WIDE_INT offset
,
6826 unsigned HOST_WIDE_INT size
,
6828 unsigned HOST_WIDE_INT
*suboff
)
6830 unsigned HOST_WIDE_INT cnt
;
6833 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
6836 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
6837 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
6838 tree field_size
= DECL_SIZE (cfield
);
6842 /* Determine the size of the flexible array member from
6843 the size of the initializer provided for it. */
6844 field_size
= TYPE_SIZE (TREE_TYPE (cval
));
6847 /* Variable sized objects in static constructors makes no sense,
6848 but field_size can be NULL for flexible array members. */
6849 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
6850 && TREE_CODE (byte_offset
) == INTEGER_CST
6851 && (field_size
!= NULL_TREE
6852 ? TREE_CODE (field_size
) == INTEGER_CST
6853 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
6855 /* Compute bit offset of the field. */
6856 offset_int bitoffset
6857 = (wi::to_offset (field_offset
)
6858 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
6859 /* Compute bit offset where the field ends. */
6860 offset_int bitoffset_end
;
6861 if (field_size
!= NULL_TREE
)
6862 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
6866 /* Compute the bit offset of the end of the desired access.
6867 As a special case, if the size of the desired access is
6868 zero, assume the access is to the entire field (and let
6869 the caller make any necessary adjustments by storing
6870 the actual bounds of the field in FIELDBOUNDS). */
6871 offset_int access_end
= offset_int (offset
);
6875 access_end
= bitoffset_end
;
6877 /* Is there any overlap between the desired access at
6878 [OFFSET, OFFSET+SIZE) and the offset of the field within
6879 the object at [BITOFFSET, BITOFFSET_END)? */
6880 if (wi::cmps (access_end
, bitoffset
) > 0
6881 && (field_size
== NULL_TREE
6882 || wi::lts_p (offset
, bitoffset_end
)))
6884 *suboff
+= bitoffset
.to_uhwi ();
6886 if (!size
&& TREE_CODE (cval
) != CONSTRUCTOR
)
6888 /* For the final reference to the entire accessed member
6889 (SIZE is zero), reset OFFSET, disegard TYPE (which may
6890 be null) in favor of the type of the member, and set
6891 SIZE to the size of the accessed member. */
6892 offset
= bitoffset
.to_uhwi ();
6893 type
= TREE_TYPE (cval
);
6894 size
= (bitoffset_end
- bitoffset
).to_uhwi ();
6897 /* We do have overlap. Now see if the field is large enough
6898 to cover the access. Give up for accesses that extend
6899 beyond the end of the object or that span multiple fields. */
6900 if (wi::cmps (access_end
, bitoffset_end
) > 0)
6902 if (offset
< bitoffset
)
6905 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
6906 return fold_ctor_reference (type
, cval
,
6907 inner_offset
.to_uhwi (), size
,
6915 return build_zero_cst (type
);
6918 /* CTOR is value initializing memory. Fold a reference of TYPE and
6919 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
6920 is zero, attempt to fold a reference to the entire subobject
6921 which OFFSET refers to. This is used when folding accesses to
6922 string members of aggregates. When non-null, set *SUBOFF to
6923 the bit offset of the accessed subobject. */
6926 fold_ctor_reference (tree type
, tree ctor
, const poly_uint64
&poly_offset
,
6927 const poly_uint64
&poly_size
, tree from_decl
,
6928 unsigned HOST_WIDE_INT
*suboff
/* = NULL */)
6932 /* We found the field with exact match. */
6934 && useless_type_conversion_p (type
, TREE_TYPE (ctor
))
6935 && known_eq (poly_offset
, 0U))
6936 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6938 /* The remaining optimizations need a constant size and offset. */
6939 unsigned HOST_WIDE_INT size
, offset
;
6940 if (!poly_size
.is_constant (&size
) || !poly_offset
.is_constant (&offset
))
6943 /* We are at the end of walk, see if we can view convert the
6945 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
6946 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6947 && !compare_tree_int (TYPE_SIZE (type
), size
)
6948 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
6950 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6953 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
6955 STRIP_USELESS_TYPE_CONVERSION (ret
);
6959 /* For constants and byte-aligned/sized reads try to go through
6960 native_encode/interpret. */
6961 if (CONSTANT_CLASS_P (ctor
)
6962 && BITS_PER_UNIT
== 8
6963 && offset
% BITS_PER_UNIT
== 0
6964 && offset
/ BITS_PER_UNIT
<= INT_MAX
6965 && size
% BITS_PER_UNIT
== 0
6966 && size
<= MAX_BITSIZE_MODE_ANY_MODE
6967 && can_native_interpret_type_p (type
))
6969 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6970 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
6971 offset
/ BITS_PER_UNIT
);
6973 return native_interpret_expr (type
, buf
, len
);
6975 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
6977 unsigned HOST_WIDE_INT dummy
= 0;
6982 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
6983 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
6984 ret
= fold_array_ctor_reference (type
, ctor
, offset
, size
,
6987 ret
= fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
6990 /* Fall back to native_encode_initializer. Needs to be done
6991 only in the outermost fold_ctor_reference call (because it itself
6992 recurses into CONSTRUCTORs) and doesn't update suboff. */
6993 if (ret
== NULL_TREE
6995 && BITS_PER_UNIT
== 8
6996 && offset
% BITS_PER_UNIT
== 0
6997 && offset
/ BITS_PER_UNIT
<= INT_MAX
6998 && size
% BITS_PER_UNIT
== 0
6999 && size
<= MAX_BITSIZE_MODE_ANY_MODE
7000 && can_native_interpret_type_p (type
))
7002 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
7003 int len
= native_encode_initializer (ctor
, buf
, size
/ BITS_PER_UNIT
,
7004 offset
/ BITS_PER_UNIT
);
7006 return native_interpret_expr (type
, buf
, len
);
7015 /* Return the tree representing the element referenced by T if T is an
7016 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
7017 names using VALUEIZE. Return NULL_TREE otherwise. */
7020 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
7022 tree ctor
, idx
, base
;
7023 poly_int64 offset
, size
, max_size
;
7027 if (TREE_THIS_VOLATILE (t
))
7031 return get_symbol_constant_value (t
);
7033 tem
= fold_read_from_constant_string (t
);
7037 switch (TREE_CODE (t
))
7040 case ARRAY_RANGE_REF
:
7041 /* Constant indexes are handled well by get_base_constructor.
7042 Only special case variable offsets.
7043 FIXME: This code can't handle nested references with variable indexes
7044 (they will be handled only by iteration of ccp). Perhaps we can bring
7045 get_ref_base_and_extent here and make it use a valueize callback. */
7046 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
7048 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
7049 && poly_int_tree_p (idx
))
7051 tree low_bound
, unit_size
;
7053 /* If the resulting bit-offset is constant, track it. */
7054 if ((low_bound
= array_ref_low_bound (t
),
7055 poly_int_tree_p (low_bound
))
7056 && (unit_size
= array_ref_element_size (t
),
7057 tree_fits_uhwi_p (unit_size
)))
7059 poly_offset_int woffset
7060 = wi::sext (wi::to_poly_offset (idx
)
7061 - wi::to_poly_offset (low_bound
),
7062 TYPE_PRECISION (TREE_TYPE (idx
)));
7063 woffset
*= tree_to_uhwi (unit_size
);
7064 woffset
*= BITS_PER_UNIT
;
7065 if (woffset
.to_shwi (&offset
))
7067 base
= TREE_OPERAND (t
, 0);
7068 ctor
= get_base_constructor (base
, &offset
, valueize
);
7069 /* Empty constructor. Always fold to 0. */
7070 if (ctor
== error_mark_node
)
7071 return build_zero_cst (TREE_TYPE (t
));
7072 /* Out of bound array access. Value is undefined,
7074 if (maybe_lt (offset
, 0))
7076 /* We cannot determine ctor. */
7079 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
7080 tree_to_uhwi (unit_size
)
7090 case TARGET_MEM_REF
:
7092 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
7093 ctor
= get_base_constructor (base
, &offset
, valueize
);
7095 /* Empty constructor. Always fold to 0. */
7096 if (ctor
== error_mark_node
)
7097 return build_zero_cst (TREE_TYPE (t
));
7098 /* We do not know precise address. */
7099 if (!known_size_p (max_size
) || maybe_ne (max_size
, size
))
7101 /* We cannot determine ctor. */
7105 /* Out of bound array access. Value is undefined, but don't fold. */
7106 if (maybe_lt (offset
, 0))
7109 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
7115 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
7116 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
7117 return fold_build1_loc (EXPR_LOCATION (t
),
7118 TREE_CODE (t
), TREE_TYPE (t
), c
);
7130 fold_const_aggregate_ref (tree t
)
7132 return fold_const_aggregate_ref_1 (t
, NULL
);
7135 /* Lookup virtual method with index TOKEN in a virtual table V
7137 Set CAN_REFER if non-NULL to false if method
7138 is not referable or if the virtual table is ill-formed (such as rewriten
7139 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7142 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
7144 unsigned HOST_WIDE_INT offset
,
7147 tree vtable
= v
, init
, fn
;
7148 unsigned HOST_WIDE_INT size
;
7149 unsigned HOST_WIDE_INT elt_size
, access_index
;
7155 /* First of all double check we have virtual table. */
7156 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
7158 /* Pass down that we lost track of the target. */
7164 init
= ctor_for_folding (v
);
7166 /* The virtual tables should always be born with constructors
7167 and we always should assume that they are avaialble for
7168 folding. At the moment we do not stream them in all cases,
7169 but it should never happen that ctor seem unreachable. */
7171 if (init
== error_mark_node
)
7173 /* Pass down that we lost track of the target. */
7178 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
7179 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
7180 offset
*= BITS_PER_UNIT
;
7181 offset
+= token
* size
;
7183 /* Lookup the value in the constructor that is assumed to be array.
7184 This is equivalent to
7185 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
7186 offset, size, NULL);
7187 but in a constant time. We expect that frontend produced a simple
7188 array without indexed initializers. */
7190 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
7191 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
7192 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
7193 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
7195 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
7196 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
7198 /* The C++ FE can now produce indexed fields, and we check if the indexes
7200 if (access_index
< CONSTRUCTOR_NELTS (init
))
7202 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
7203 tree idx
= CONSTRUCTOR_ELT (init
, access_index
)->index
;
7204 gcc_checking_assert (!idx
|| tree_to_uhwi (idx
) == access_index
);
7210 /* For type inconsistent program we may end up looking up virtual method
7211 in virtual table that does not contain TOKEN entries. We may overrun
7212 the virtual table and pick up a constant or RTTI info pointer.
7213 In any case the call is undefined. */
7215 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
7216 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
7217 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
7220 fn
= TREE_OPERAND (fn
, 0);
7222 /* When cgraph node is missing and function is not public, we cannot
7223 devirtualize. This can happen in WHOPR when the actual method
7224 ends up in other partition, because we found devirtualization
7225 possibility too late. */
7226 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
7237 /* Make sure we create a cgraph node for functions we'll reference.
7238 They can be non-existent if the reference comes from an entry
7239 of an external vtable for example. */
7240 cgraph_node::get_create (fn
);
7245 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
7246 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
7247 KNOWN_BINFO carries the binfo describing the true type of
7248 OBJ_TYPE_REF_OBJECT(REF).
7249 Set CAN_REFER if non-NULL to false if method
7250 is not referable or if the virtual table is ill-formed (such as rewriten
7251 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
7254 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
7257 unsigned HOST_WIDE_INT offset
;
7260 v
= BINFO_VTABLE (known_binfo
);
7261 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
7265 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
7271 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
7274 /* Given a pointer value T, return a simplified version of an
7275 indirection through T, or NULL_TREE if no simplification is
7276 possible. Note that the resulting type may be different from
7277 the type pointed to in the sense that it is still compatible
7278 from the langhooks point of view. */
7281 gimple_fold_indirect_ref (tree t
)
7283 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
7288 subtype
= TREE_TYPE (sub
);
7289 if (!POINTER_TYPE_P (subtype
)
7290 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
7293 if (TREE_CODE (sub
) == ADDR_EXPR
)
7295 tree op
= TREE_OPERAND (sub
, 0);
7296 tree optype
= TREE_TYPE (op
);
7298 if (useless_type_conversion_p (type
, optype
))
7301 /* *(foo *)&fooarray => fooarray[0] */
7302 if (TREE_CODE (optype
) == ARRAY_TYPE
7303 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
7304 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7306 tree type_domain
= TYPE_DOMAIN (optype
);
7307 tree min_val
= size_zero_node
;
7308 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
7309 min_val
= TYPE_MIN_VALUE (type_domain
);
7310 if (TREE_CODE (min_val
) == INTEGER_CST
)
7311 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
7313 /* *(foo *)&complexfoo => __real__ complexfoo */
7314 else if (TREE_CODE (optype
) == COMPLEX_TYPE
7315 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7316 return fold_build1 (REALPART_EXPR
, type
, op
);
7317 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
7318 else if (TREE_CODE (optype
) == VECTOR_TYPE
7319 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
7321 tree part_width
= TYPE_SIZE (type
);
7322 tree index
= bitsize_int (0);
7323 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
7327 /* *(p + CST) -> ... */
7328 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
7329 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
7331 tree addr
= TREE_OPERAND (sub
, 0);
7332 tree off
= TREE_OPERAND (sub
, 1);
7336 addrtype
= TREE_TYPE (addr
);
7338 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
7339 if (TREE_CODE (addr
) == ADDR_EXPR
7340 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
7341 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
7342 && tree_fits_uhwi_p (off
))
7344 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
7345 tree part_width
= TYPE_SIZE (type
);
7346 unsigned HOST_WIDE_INT part_widthi
7347 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
7348 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
7349 tree index
= bitsize_int (indexi
);
7350 if (known_lt (offset
/ part_widthi
,
7351 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
))))
7352 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
7356 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
7357 if (TREE_CODE (addr
) == ADDR_EXPR
7358 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
7359 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
7361 tree size
= TYPE_SIZE_UNIT (type
);
7362 if (tree_int_cst_equal (size
, off
))
7363 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
7366 /* *(p + CST) -> MEM_REF <p, CST>. */
7367 if (TREE_CODE (addr
) != ADDR_EXPR
7368 || DECL_P (TREE_OPERAND (addr
, 0)))
7369 return fold_build2 (MEM_REF
, type
,
7371 wide_int_to_tree (ptype
, wi::to_wide (off
)));
7374 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
7375 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
7376 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
7377 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
7380 tree min_val
= size_zero_node
;
7382 sub
= gimple_fold_indirect_ref (sub
);
7384 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
7385 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
7386 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
7387 min_val
= TYPE_MIN_VALUE (type_domain
);
7388 if (TREE_CODE (min_val
) == INTEGER_CST
)
7389 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
7395 /* Return true if CODE is an operation that when operating on signed
7396 integer types involves undefined behavior on overflow and the
7397 operation can be expressed with unsigned arithmetic. */
7400 arith_code_with_undefined_signed_overflow (tree_code code
)
7409 case POINTER_PLUS_EXPR
:
7416 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
7417 operation that can be transformed to unsigned arithmetic by converting
7418 its operand, carrying out the operation in the corresponding unsigned
7419 type and converting the result back to the original type.
7421 Returns a sequence of statements that replace STMT and also contain
7422 a modified form of STMT itself. */
7425 rewrite_to_defined_overflow (gimple
*stmt
)
7427 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7429 fprintf (dump_file
, "rewriting stmt with undefined signed "
7431 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
7434 tree lhs
= gimple_assign_lhs (stmt
);
7435 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
7436 gimple_seq stmts
= NULL
;
7437 if (gimple_assign_rhs_code (stmt
) == ABS_EXPR
)
7438 gimple_assign_set_rhs_code (stmt
, ABSU_EXPR
);
7440 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
7442 tree op
= gimple_op (stmt
, i
);
7443 op
= gimple_convert (&stmts
, type
, op
);
7444 gimple_set_op (stmt
, i
, op
);
7446 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
7447 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
7448 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
7449 gimple_set_modified (stmt
, true);
7450 gimple_seq_add_stmt (&stmts
, stmt
);
7451 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
7452 gimple_seq_add_stmt (&stmts
, cvt
);
7458 /* The valueization hook we use for the gimple_build API simplification.
7459 This makes us match fold_buildN behavior by only combining with
7460 statements in the sequence(s) we are currently building. */
7463 gimple_build_valueize (tree op
)
7465 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
7470 /* Build the expression CODE OP0 of type TYPE with location LOC,
7471 simplifying it first if possible. Returns the built
7472 expression value and appends statements possibly defining it
7476 gimple_build (gimple_seq
*seq
, location_t loc
,
7477 enum tree_code code
, tree type
, tree op0
)
7479 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
7482 res
= create_tmp_reg_or_ssa_name (type
);
7484 if (code
== REALPART_EXPR
7485 || code
== IMAGPART_EXPR
7486 || code
== VIEW_CONVERT_EXPR
)
7487 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
7489 stmt
= gimple_build_assign (res
, code
, op0
);
7490 gimple_set_location (stmt
, loc
);
7491 gimple_seq_add_stmt_without_update (seq
, stmt
);
7496 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
7497 simplifying it first if possible. Returns the built
7498 expression value and appends statements possibly defining it
7502 gimple_build (gimple_seq
*seq
, location_t loc
,
7503 enum tree_code code
, tree type
, tree op0
, tree op1
)
7505 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
7508 res
= create_tmp_reg_or_ssa_name (type
);
7509 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
7510 gimple_set_location (stmt
, loc
);
7511 gimple_seq_add_stmt_without_update (seq
, stmt
);
7516 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
7517 simplifying it first if possible. Returns the built
7518 expression value and appends statements possibly defining it
7522 gimple_build (gimple_seq
*seq
, location_t loc
,
7523 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
7525 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
7526 seq
, gimple_build_valueize
);
7529 res
= create_tmp_reg_or_ssa_name (type
);
7531 if (code
== BIT_FIELD_REF
)
7532 stmt
= gimple_build_assign (res
, code
,
7533 build3 (code
, type
, op0
, op1
, op2
));
7535 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
7536 gimple_set_location (stmt
, loc
);
7537 gimple_seq_add_stmt_without_update (seq
, stmt
);
7542 /* Build the call FN (ARG0) with a result of type TYPE
7543 (or no result if TYPE is void) with location LOC,
7544 simplifying it first if possible. Returns the built
7545 expression value (or NULL_TREE if TYPE is void) and appends
7546 statements possibly defining it to SEQ. */
7549 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7550 tree type
, tree arg0
)
7552 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
7556 if (internal_fn_p (fn
))
7557 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 1, arg0
);
7560 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7561 stmt
= gimple_build_call (decl
, 1, arg0
);
7563 if (!VOID_TYPE_P (type
))
7565 res
= create_tmp_reg_or_ssa_name (type
);
7566 gimple_call_set_lhs (stmt
, res
);
7568 gimple_set_location (stmt
, loc
);
7569 gimple_seq_add_stmt_without_update (seq
, stmt
);
7574 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
7575 (or no result if TYPE is void) with location LOC,
7576 simplifying it first if possible. Returns the built
7577 expression value (or NULL_TREE if TYPE is void) and appends
7578 statements possibly defining it to SEQ. */
7581 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7582 tree type
, tree arg0
, tree arg1
)
7584 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
7588 if (internal_fn_p (fn
))
7589 stmt
= gimple_build_call_internal (as_internal_fn (fn
), 2, arg0
, arg1
);
7592 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7593 stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
7595 if (!VOID_TYPE_P (type
))
7597 res
= create_tmp_reg_or_ssa_name (type
);
7598 gimple_call_set_lhs (stmt
, res
);
7600 gimple_set_location (stmt
, loc
);
7601 gimple_seq_add_stmt_without_update (seq
, stmt
);
7606 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
7607 (or no result if TYPE is void) with location LOC,
7608 simplifying it first if possible. Returns the built
7609 expression value (or NULL_TREE if TYPE is void) and appends
7610 statements possibly defining it to SEQ. */
7613 gimple_build (gimple_seq
*seq
, location_t loc
, combined_fn fn
,
7614 tree type
, tree arg0
, tree arg1
, tree arg2
)
7616 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
7617 seq
, gimple_build_valueize
);
7621 if (internal_fn_p (fn
))
7622 stmt
= gimple_build_call_internal (as_internal_fn (fn
),
7623 3, arg0
, arg1
, arg2
);
7626 tree decl
= builtin_decl_implicit (as_builtin_fn (fn
));
7627 stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
7629 if (!VOID_TYPE_P (type
))
7631 res
= create_tmp_reg_or_ssa_name (type
);
7632 gimple_call_set_lhs (stmt
, res
);
7634 gimple_set_location (stmt
, loc
);
7635 gimple_seq_add_stmt_without_update (seq
, stmt
);
7640 /* Build the conversion (TYPE) OP with a result of type TYPE
7641 with location LOC if such conversion is neccesary in GIMPLE,
7642 simplifying it first.
7643 Returns the built expression value and appends
7644 statements possibly defining it to SEQ. */
7647 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
7649 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
7651 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
7654 /* Build the conversion (ptrofftype) OP with a result of a type
7655 compatible with ptrofftype with location LOC if such conversion
7656 is neccesary in GIMPLE, simplifying it first.
7657 Returns the built expression value and appends
7658 statements possibly defining it to SEQ. */
7661 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
7663 if (ptrofftype_p (TREE_TYPE (op
)))
7665 return gimple_convert (seq
, loc
, sizetype
, op
);
7668 /* Build a vector of type TYPE in which each element has the value OP.
7669 Return a gimple value for the result, appending any new statements
7673 gimple_build_vector_from_val (gimple_seq
*seq
, location_t loc
, tree type
,
7676 if (!TYPE_VECTOR_SUBPARTS (type
).is_constant ()
7677 && !CONSTANT_CLASS_P (op
))
7678 return gimple_build (seq
, loc
, VEC_DUPLICATE_EXPR
, type
, op
);
7680 tree res
, vec
= build_vector_from_val (type
, op
);
7681 if (is_gimple_val (vec
))
7683 if (gimple_in_ssa_p (cfun
))
7684 res
= make_ssa_name (type
);
7686 res
= create_tmp_reg (type
);
7687 gimple
*stmt
= gimple_build_assign (res
, vec
);
7688 gimple_set_location (stmt
, loc
);
7689 gimple_seq_add_stmt_without_update (seq
, stmt
);
7693 /* Build a vector from BUILDER, handling the case in which some elements
7694 are non-constant. Return a gimple value for the result, appending any
7695 new instructions to SEQ.
7697 BUILDER must not have a stepped encoding on entry. This is because
7698 the function is not geared up to handle the arithmetic that would
7699 be needed in the variable case, and any code building a vector that
7700 is known to be constant should use BUILDER->build () directly. */
7703 gimple_build_vector (gimple_seq
*seq
, location_t loc
,
7704 tree_vector_builder
*builder
)
7706 gcc_assert (builder
->nelts_per_pattern () <= 2);
7707 unsigned int encoded_nelts
= builder
->encoded_nelts ();
7708 for (unsigned int i
= 0; i
< encoded_nelts
; ++i
)
7709 if (!TREE_CONSTANT ((*builder
)[i
]))
7711 tree type
= builder
->type ();
7712 unsigned int nelts
= TYPE_VECTOR_SUBPARTS (type
).to_constant ();
7713 vec
<constructor_elt
, va_gc
> *v
;
7714 vec_alloc (v
, nelts
);
7715 for (i
= 0; i
< nelts
; ++i
)
7716 CONSTRUCTOR_APPEND_ELT (v
, NULL_TREE
, builder
->elt (i
));
7719 if (gimple_in_ssa_p (cfun
))
7720 res
= make_ssa_name (type
);
7722 res
= create_tmp_reg (type
);
7723 gimple
*stmt
= gimple_build_assign (res
, build_constructor (type
, v
));
7724 gimple_set_location (stmt
, loc
);
7725 gimple_seq_add_stmt_without_update (seq
, stmt
);
7728 return builder
->build ();
7731 /* Return true if the result of assignment STMT is known to be non-negative.
7732 If the return value is based on the assumption that signed overflow is
7733 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7734 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7737 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7740 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7741 switch (get_gimple_rhs_class (code
))
7743 case GIMPLE_UNARY_RHS
:
7744 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7745 gimple_expr_type (stmt
),
7746 gimple_assign_rhs1 (stmt
),
7747 strict_overflow_p
, depth
);
7748 case GIMPLE_BINARY_RHS
:
7749 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7750 gimple_expr_type (stmt
),
7751 gimple_assign_rhs1 (stmt
),
7752 gimple_assign_rhs2 (stmt
),
7753 strict_overflow_p
, depth
);
7754 case GIMPLE_TERNARY_RHS
:
7756 case GIMPLE_SINGLE_RHS
:
7757 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
7758 strict_overflow_p
, depth
);
7759 case GIMPLE_INVALID_RHS
:
7765 /* Return true if return value of call STMT is known to be non-negative.
7766 If the return value is based on the assumption that signed overflow is
7767 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7768 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7771 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7774 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
7775 gimple_call_arg (stmt
, 0) : NULL_TREE
;
7776 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
7777 gimple_call_arg (stmt
, 1) : NULL_TREE
;
7779 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
7780 gimple_call_combined_fn (stmt
),
7783 strict_overflow_p
, depth
);
7786 /* Return true if return value of call STMT is known to be non-negative.
7787 If the return value is based on the assumption that signed overflow is
7788 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7789 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7792 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7795 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7797 tree arg
= gimple_phi_arg_def (stmt
, i
);
7798 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
7804 /* Return true if STMT is known to compute a non-negative value.
7805 If the return value is based on the assumption that signed overflow is
7806 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7807 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7810 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7813 switch (gimple_code (stmt
))
7816 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7819 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7822 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7829 /* Return true if the floating-point value computed by assignment STMT
7830 is known to have an integer value. We also allow +Inf, -Inf and NaN
7831 to be considered integer values. Return false for signaling NaN.
7833 DEPTH is the current nesting depth of the query. */
7836 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
7838 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7839 switch (get_gimple_rhs_class (code
))
7841 case GIMPLE_UNARY_RHS
:
7842 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
7843 gimple_assign_rhs1 (stmt
), depth
);
7844 case GIMPLE_BINARY_RHS
:
7845 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
7846 gimple_assign_rhs1 (stmt
),
7847 gimple_assign_rhs2 (stmt
), depth
);
7848 case GIMPLE_TERNARY_RHS
:
7850 case GIMPLE_SINGLE_RHS
:
7851 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
7852 case GIMPLE_INVALID_RHS
:
7858 /* Return true if the floating-point value computed by call STMT is known
7859 to have an integer value. We also allow +Inf, -Inf and NaN to be
7860 considered integer values. Return false for signaling NaN.
7862 DEPTH is the current nesting depth of the query. */
7865 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
7867 tree arg0
= (gimple_call_num_args (stmt
) > 0
7868 ? gimple_call_arg (stmt
, 0)
7870 tree arg1
= (gimple_call_num_args (stmt
) > 1
7871 ? gimple_call_arg (stmt
, 1)
7873 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
7877 /* Return true if the floating-point result of phi STMT is known to have
7878 an integer value. We also allow +Inf, -Inf and NaN to be considered
7879 integer values. Return false for signaling NaN.
7881 DEPTH is the current nesting depth of the query. */
7884 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
7886 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7888 tree arg
= gimple_phi_arg_def (stmt
, i
);
7889 if (!integer_valued_real_single_p (arg
, depth
+ 1))
7895 /* Return true if the floating-point value computed by STMT is known
7896 to have an integer value. We also allow +Inf, -Inf and NaN to be
7897 considered integer values. Return false for signaling NaN.
7899 DEPTH is the current nesting depth of the query. */
7902 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
7904 switch (gimple_code (stmt
))
7907 return gimple_assign_integer_valued_real_p (stmt
, depth
);
7909 return gimple_call_integer_valued_real_p (stmt
, depth
);
7911 return gimple_phi_integer_valued_real_p (stmt
, depth
);