1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
60 /* Return true if T is a constant and the value cast to a target char
61 can be represented by a host char.
62 Store the casted char constant in *P if so. */
65 target_char_cst_p (tree t
, char *p
)
67 if (!tree_fits_uhwi_p (t
) || CHAR_TYPE_SIZE
!= HOST_BITS_PER_CHAR
)
70 *p
= (char)tree_to_uhwi (t
);
74 /* Return true when DECL can be referenced from current unit.
75 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
76 We can get declarations that are not possible to reference for various
79 1) When analyzing C++ virtual tables.
80 C++ virtual tables do have known constructors even
81 when they are keyed to other compilation unit.
82 Those tables can contain pointers to methods and vars
83 in other units. Those methods have both STATIC and EXTERNAL
85 2) In WHOPR mode devirtualization might lead to reference
86 to method that was partitioned elsehwere.
87 In this case we have static VAR_DECL or FUNCTION_DECL
88 that has no corresponding callgraph/varpool node
90 3) COMDAT functions referred by external vtables that
91 we devirtualize only during final compilation stage.
92 At this time we already decided that we will not output
93 the function body and thus we can't reference the symbol
97 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
100 struct cgraph_node
*node
;
103 if (DECL_ABSTRACT_P (decl
))
106 /* We are concerned only about static/external vars and functions. */
107 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
108 || !VAR_OR_FUNCTION_DECL_P (decl
))
111 /* Static objects can be referred only if they was not optimized out yet. */
112 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
114 /* Before we start optimizing unreachable code we can be sure all
115 static objects are defined. */
116 if (symtab
->function_flags_ready
)
118 snode
= symtab_node::get (decl
);
119 if (!snode
|| !snode
->definition
)
121 node
= dyn_cast
<cgraph_node
*> (snode
);
122 return !node
|| !node
->global
.inlined_to
;
125 /* We will later output the initializer, so we can refer to it.
126 So we are concerned only when DECL comes from initializer of
127 external var or var that has been optimized out. */
129 || !VAR_P (from_decl
)
130 || (!DECL_EXTERNAL (from_decl
)
131 && (vnode
= varpool_node::get (from_decl
)) != NULL
132 && vnode
->definition
)
134 && (vnode
= varpool_node::get (from_decl
)) != NULL
135 && vnode
->in_other_partition
))
137 /* We are folding reference from external vtable. The vtable may reffer
138 to a symbol keyed to other compilation unit. The other compilation
139 unit may be in separate DSO and the symbol may be hidden. */
140 if (DECL_VISIBILITY_SPECIFIED (decl
)
141 && DECL_EXTERNAL (decl
)
142 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
143 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
145 /* When function is public, we always can introduce new reference.
146 Exception are the COMDAT functions where introducing a direct
147 reference imply need to include function body in the curren tunit. */
148 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
150 /* We have COMDAT. We are going to check if we still have definition
151 or if the definition is going to be output in other partition.
152 Bypass this when gimplifying; all needed functions will be produced.
154 As observed in PR20991 for already optimized out comdat virtual functions
155 it may be tempting to not necessarily give up because the copy will be
156 output elsewhere when corresponding vtable is output.
157 This is however not possible - ABI specify that COMDATs are output in
158 units where they are used and when the other unit was compiled with LTO
159 it is possible that vtable was kept public while the function itself
161 if (!symtab
->function_flags_ready
)
164 snode
= symtab_node::get (decl
);
166 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
167 && (!snode
->in_other_partition
168 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
170 node
= dyn_cast
<cgraph_node
*> (snode
);
171 return !node
|| !node
->global
.inlined_to
;
174 /* Create a temporary for TYPE for a statement STMT. If the current function
175 is in SSA form, a SSA name is created. Otherwise a temporary register
179 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
= NULL
)
181 if (gimple_in_ssa_p (cfun
))
182 return make_ssa_name (type
, stmt
);
184 return create_tmp_reg (type
);
187 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
188 acceptable form for is_gimple_min_invariant.
189 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
192 canonicalize_constructor_val (tree cval
, tree from_decl
)
194 tree orig_cval
= cval
;
196 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
197 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
199 tree ptr
= TREE_OPERAND (cval
, 0);
200 if (is_gimple_min_invariant (ptr
))
201 cval
= build1_loc (EXPR_LOCATION (cval
),
202 ADDR_EXPR
, TREE_TYPE (ptr
),
203 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
205 fold_convert (ptr_type_node
,
206 TREE_OPERAND (cval
, 1))));
208 if (TREE_CODE (cval
) == ADDR_EXPR
)
210 tree base
= NULL_TREE
;
211 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
213 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
215 TREE_OPERAND (cval
, 0) = base
;
218 base
= get_base_address (TREE_OPERAND (cval
, 0));
222 if (VAR_OR_FUNCTION_DECL_P (base
)
223 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
225 if (TREE_TYPE (base
) == error_mark_node
)
228 TREE_ADDRESSABLE (base
) = 1;
229 else if (TREE_CODE (base
) == FUNCTION_DECL
)
231 /* Make sure we create a cgraph node for functions we'll reference.
232 They can be non-existent if the reference comes from an entry
233 of an external vtable for example. */
234 cgraph_node::get_create (base
);
236 /* Fixup types in global initializers. */
237 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
238 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
240 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
241 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
244 if (TREE_OVERFLOW_P (cval
))
245 return drop_tree_overflow (cval
);
249 /* If SYM is a constant variable with known value, return the value.
250 NULL_TREE is returned otherwise. */
253 get_symbol_constant_value (tree sym
)
255 tree val
= ctor_for_folding (sym
);
256 if (val
!= error_mark_node
)
260 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
261 if (val
&& is_gimple_min_invariant (val
))
266 /* Variables declared 'const' without an initializer
267 have zero as the initializer if they may not be
268 overridden at link or run time. */
270 && is_gimple_reg_type (TREE_TYPE (sym
)))
271 return build_zero_cst (TREE_TYPE (sym
));
279 /* Subroutine of fold_stmt. We perform several simplifications of the
280 memory reference tree EXPR and make sure to re-gimplify them properly
281 after propagation of constant addresses. IS_LHS is true if the
282 reference is supposed to be an lvalue. */
285 maybe_fold_reference (tree expr
, bool is_lhs
)
289 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
290 || TREE_CODE (expr
) == REALPART_EXPR
291 || TREE_CODE (expr
) == IMAGPART_EXPR
)
292 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
293 return fold_unary_loc (EXPR_LOCATION (expr
),
296 TREE_OPERAND (expr
, 0));
297 else if (TREE_CODE (expr
) == BIT_FIELD_REF
298 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
299 return fold_ternary_loc (EXPR_LOCATION (expr
),
302 TREE_OPERAND (expr
, 0),
303 TREE_OPERAND (expr
, 1),
304 TREE_OPERAND (expr
, 2));
307 && (result
= fold_const_aggregate_ref (expr
))
308 && is_gimple_min_invariant (result
))
315 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
316 replacement rhs for the statement or NULL_TREE if no simplification
317 could be made. It is assumed that the operands have been previously
321 fold_gimple_assign (gimple_stmt_iterator
*si
)
323 gimple
*stmt
= gsi_stmt (*si
);
324 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
325 location_t loc
= gimple_location (stmt
);
327 tree result
= NULL_TREE
;
329 switch (get_gimple_rhs_class (subcode
))
331 case GIMPLE_SINGLE_RHS
:
333 tree rhs
= gimple_assign_rhs1 (stmt
);
335 if (TREE_CLOBBER_P (rhs
))
338 if (REFERENCE_CLASS_P (rhs
))
339 return maybe_fold_reference (rhs
, false);
341 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
343 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
344 if (is_gimple_min_invariant (val
))
346 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
349 vec
<cgraph_node
*>targets
350 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
351 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
353 if (dump_enabled_p ())
355 location_t loc
= gimple_location_safe (stmt
);
356 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
357 "resolving virtual function address "
358 "reference to function %s\n",
359 targets
.length () == 1
360 ? targets
[0]->name ()
363 if (targets
.length () == 1)
365 val
= fold_convert (TREE_TYPE (val
),
366 build_fold_addr_expr_loc
367 (loc
, targets
[0]->decl
));
368 STRIP_USELESS_TYPE_CONVERSION (val
);
371 /* We can not use __builtin_unreachable here because it
372 can not have address taken. */
373 val
= build_int_cst (TREE_TYPE (val
), 0);
379 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
381 tree ref
= TREE_OPERAND (rhs
, 0);
382 tree tem
= maybe_fold_reference (ref
, true);
384 && TREE_CODE (tem
) == MEM_REF
385 && integer_zerop (TREE_OPERAND (tem
, 1)))
386 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
388 result
= fold_convert (TREE_TYPE (rhs
),
389 build_fold_addr_expr_loc (loc
, tem
));
390 else if (TREE_CODE (ref
) == MEM_REF
391 && integer_zerop (TREE_OPERAND (ref
, 1)))
392 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
396 /* Strip away useless type conversions. Both the
397 NON_LVALUE_EXPR that may have been added by fold, and
398 "useless" type conversions that might now be apparent
399 due to propagation. */
400 STRIP_USELESS_TYPE_CONVERSION (result
);
402 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
407 else if (TREE_CODE (rhs
) == CONSTRUCTOR
408 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
410 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
414 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
415 if (! CONSTANT_CLASS_P (val
))
418 return build_vector_from_ctor (TREE_TYPE (rhs
),
419 CONSTRUCTOR_ELTS (rhs
));
422 else if (DECL_P (rhs
))
423 return get_symbol_constant_value (rhs
);
427 case GIMPLE_UNARY_RHS
:
430 case GIMPLE_BINARY_RHS
:
433 case GIMPLE_TERNARY_RHS
:
434 result
= fold_ternary_loc (loc
, subcode
,
435 TREE_TYPE (gimple_assign_lhs (stmt
)),
436 gimple_assign_rhs1 (stmt
),
437 gimple_assign_rhs2 (stmt
),
438 gimple_assign_rhs3 (stmt
));
442 STRIP_USELESS_TYPE_CONVERSION (result
);
443 if (valid_gimple_rhs_p (result
))
448 case GIMPLE_INVALID_RHS
:
456 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
457 adjusting the replacement stmts location and virtual operands.
458 If the statement has a lhs the last stmt in the sequence is expected
459 to assign to that lhs. */
462 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
464 gimple
*stmt
= gsi_stmt (*si_p
);
466 if (gimple_has_location (stmt
))
467 annotate_all_with_location (stmts
, gimple_location (stmt
));
469 /* First iterate over the replacement statements backward, assigning
470 virtual operands to their defining statements. */
471 gimple
*laststore
= NULL
;
472 for (gimple_stmt_iterator i
= gsi_last (stmts
);
473 !gsi_end_p (i
); gsi_prev (&i
))
475 gimple
*new_stmt
= gsi_stmt (i
);
476 if ((gimple_assign_single_p (new_stmt
)
477 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
478 || (is_gimple_call (new_stmt
)
479 && (gimple_call_flags (new_stmt
)
480 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
484 vdef
= gimple_vdef (stmt
);
486 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
487 gimple_set_vdef (new_stmt
, vdef
);
488 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
489 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
490 laststore
= new_stmt
;
494 /* Second iterate over the statements forward, assigning virtual
495 operands to their uses. */
496 tree reaching_vuse
= gimple_vuse (stmt
);
497 for (gimple_stmt_iterator i
= gsi_start (stmts
);
498 !gsi_end_p (i
); gsi_next (&i
))
500 gimple
*new_stmt
= gsi_stmt (i
);
501 /* If the new statement possibly has a VUSE, update it with exact SSA
502 name we know will reach this one. */
503 if (gimple_has_mem_ops (new_stmt
))
504 gimple_set_vuse (new_stmt
, reaching_vuse
);
505 gimple_set_modified (new_stmt
, true);
506 if (gimple_vdef (new_stmt
))
507 reaching_vuse
= gimple_vdef (new_stmt
);
510 /* If the new sequence does not do a store release the virtual
511 definition of the original statement. */
513 && reaching_vuse
== gimple_vuse (stmt
))
515 tree vdef
= gimple_vdef (stmt
);
517 && TREE_CODE (vdef
) == SSA_NAME
)
519 unlink_stmt_vdef (stmt
);
520 release_ssa_name (vdef
);
524 /* Finally replace the original statement with the sequence. */
525 gsi_replace_with_seq (si_p
, stmts
, false);
528 /* Convert EXPR into a GIMPLE value suitable for substitution on the
529 RHS of an assignment. Insert the necessary statements before
530 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
531 is replaced. If the call is expected to produces a result, then it
532 is replaced by an assignment of the new RHS to the result variable.
533 If the result is to be ignored, then the call is replaced by a
534 GIMPLE_NOP. A proper VDEF chain is retained by making the first
535 VUSE and the last VDEF of the whole sequence be the same as the replaced
536 statement and using new SSA names for stores in between. */
539 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
542 gimple
*stmt
, *new_stmt
;
543 gimple_stmt_iterator i
;
544 gimple_seq stmts
= NULL
;
546 stmt
= gsi_stmt (*si_p
);
548 gcc_assert (is_gimple_call (stmt
));
550 push_gimplify_context (gimple_in_ssa_p (cfun
));
552 lhs
= gimple_call_lhs (stmt
);
553 if (lhs
== NULL_TREE
)
555 gimplify_and_add (expr
, &stmts
);
556 /* We can end up with folding a memcpy of an empty class assignment
557 which gets optimized away by C++ gimplification. */
558 if (gimple_seq_empty_p (stmts
))
560 pop_gimplify_context (NULL
);
561 if (gimple_in_ssa_p (cfun
))
563 unlink_stmt_vdef (stmt
);
566 gsi_replace (si_p
, gimple_build_nop (), false);
572 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
573 new_stmt
= gimple_build_assign (lhs
, tmp
);
574 i
= gsi_last (stmts
);
575 gsi_insert_after_without_update (&i
, new_stmt
,
576 GSI_CONTINUE_LINKING
);
579 pop_gimplify_context (NULL
);
581 gsi_replace_with_seq_vops (si_p
, stmts
);
585 /* Replace the call at *GSI with the gimple value VAL. */
588 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
590 gimple
*stmt
= gsi_stmt (*gsi
);
591 tree lhs
= gimple_call_lhs (stmt
);
595 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
596 val
= fold_convert (TREE_TYPE (lhs
), val
);
597 repl
= gimple_build_assign (lhs
, val
);
600 repl
= gimple_build_nop ();
601 tree vdef
= gimple_vdef (stmt
);
602 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
604 unlink_stmt_vdef (stmt
);
605 release_ssa_name (vdef
);
607 gsi_replace (gsi
, repl
, false);
610 /* Replace the call at *GSI with the new call REPL and fold that
614 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
616 gimple
*stmt
= gsi_stmt (*gsi
);
617 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
618 gimple_set_location (repl
, gimple_location (stmt
));
619 if (gimple_vdef (stmt
)
620 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
622 gimple_set_vdef (repl
, gimple_vdef (stmt
));
623 gimple_set_vuse (repl
, gimple_vuse (stmt
));
624 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
626 gsi_replace (gsi
, repl
, false);
630 /* Return true if VAR is a VAR_DECL or a component thereof. */
633 var_decl_component_p (tree var
)
636 while (handled_component_p (inner
))
637 inner
= TREE_OPERAND (inner
, 0);
638 return SSA_VAR_P (inner
);
641 /* Fold function call to builtin mem{{,p}cpy,move}. Return
642 false if no simplification can be made.
643 If ENDP is 0, return DEST (like memcpy).
644 If ENDP is 1, return DEST+LEN (like mempcpy).
645 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
646 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
650 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
651 tree dest
, tree src
, int endp
)
653 gimple
*stmt
= gsi_stmt (*gsi
);
654 tree lhs
= gimple_call_lhs (stmt
);
655 tree len
= gimple_call_arg (stmt
, 2);
656 tree destvar
, srcvar
;
657 location_t loc
= gimple_location (stmt
);
659 /* If the LEN parameter is zero, return DEST. */
660 if (integer_zerop (len
))
663 if (gimple_call_lhs (stmt
))
664 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
666 repl
= gimple_build_nop ();
667 tree vdef
= gimple_vdef (stmt
);
668 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
670 unlink_stmt_vdef (stmt
);
671 release_ssa_name (vdef
);
673 gsi_replace (gsi
, repl
, false);
677 /* If SRC and DEST are the same (and not volatile), return
678 DEST{,+LEN,+LEN-1}. */
679 if (operand_equal_p (src
, dest
, 0))
681 unlink_stmt_vdef (stmt
);
682 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
683 release_ssa_name (gimple_vdef (stmt
));
686 gsi_replace (gsi
, gimple_build_nop (), false);
693 tree srctype
, desttype
;
694 unsigned int src_align
, dest_align
;
697 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
698 pointers as wide integer) and also may result in huge function
699 size because of inlined bounds copy. Thus don't inline for
700 functions we want to instrument. */
701 if (flag_check_pointer_bounds
702 && chkp_instrumentable_p (cfun
->decl
)
703 /* Even if data may contain pointers we can inline if copy
704 less than a pointer size. */
705 && (!tree_fits_uhwi_p (len
)
706 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
709 /* Build accesses at offset zero with a ref-all character type. */
710 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
713 /* If we can perform the copy efficiently with first doing all loads
714 and then all stores inline it that way. Currently efficiently
715 means that we can load all the memory into a single integer
716 register which is what MOVE_MAX gives us. */
717 src_align
= get_pointer_alignment (src
);
718 dest_align
= get_pointer_alignment (dest
);
719 if (tree_fits_uhwi_p (len
)
720 && compare_tree_int (len
, MOVE_MAX
) <= 0
721 /* ??? Don't transform copies from strings with known length this
722 confuses the tree-ssa-strlen.c. This doesn't handle
723 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
725 && !c_strlen (src
, 2))
727 unsigned ilen
= tree_to_uhwi (len
);
728 if (pow2p_hwi (ilen
))
730 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
732 && TYPE_MODE (type
) != BLKmode
733 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
735 /* If the destination pointer is not aligned we must be able
736 to emit an unaligned store. */
737 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
738 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)
739 || (optab_handler (movmisalign_optab
, TYPE_MODE (type
))
740 != CODE_FOR_nothing
)))
743 tree desttype
= type
;
744 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
745 srctype
= build_aligned_type (type
, src_align
);
746 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
747 tree tem
= fold_const_aggregate_ref (srcmem
);
750 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
751 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
753 && (optab_handler (movmisalign_optab
,
755 == CODE_FOR_nothing
))
760 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
762 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
764 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
766 gimple_assign_set_lhs (new_stmt
, srcmem
);
767 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
768 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
770 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
771 desttype
= build_aligned_type (type
, dest_align
);
773 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
776 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
777 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
778 if (gimple_vdef (new_stmt
)
779 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
780 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
783 gsi_replace (gsi
, new_stmt
, false);
786 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
795 /* Both DEST and SRC must be pointer types.
796 ??? This is what old code did. Is the testing for pointer types
799 If either SRC is readonly or length is 1, we can use memcpy. */
800 if (!dest_align
|| !src_align
)
802 if (readonly_data_expr (src
)
803 || (tree_fits_uhwi_p (len
)
804 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
805 >= tree_to_uhwi (len
))))
807 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
810 gimple_call_set_fndecl (stmt
, fn
);
811 gimple_call_set_arg (stmt
, 0, dest
);
812 gimple_call_set_arg (stmt
, 1, src
);
817 /* If *src and *dest can't overlap, optimize into memcpy as well. */
818 if (TREE_CODE (src
) == ADDR_EXPR
819 && TREE_CODE (dest
) == ADDR_EXPR
)
821 tree src_base
, dest_base
, fn
;
822 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
823 HOST_WIDE_INT maxsize
;
825 srcvar
= TREE_OPERAND (src
, 0);
826 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
827 if (src_base
== NULL
)
829 destvar
= TREE_OPERAND (dest
, 0);
830 dest_base
= get_addr_base_and_unit_offset (destvar
,
832 if (dest_base
== NULL
)
834 if (tree_fits_uhwi_p (len
))
835 maxsize
= tree_to_uhwi (len
);
838 if (SSA_VAR_P (src_base
)
839 && SSA_VAR_P (dest_base
))
841 if (operand_equal_p (src_base
, dest_base
, 0)
842 && ranges_overlap_p (src_offset
, maxsize
,
843 dest_offset
, maxsize
))
846 else if (TREE_CODE (src_base
) == MEM_REF
847 && TREE_CODE (dest_base
) == MEM_REF
)
849 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
850 TREE_OPERAND (dest_base
, 0), 0))
852 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
853 if (!wi::fits_shwi_p (off
))
855 src_offset
= off
.to_shwi ();
857 off
= mem_ref_offset (dest_base
) + dest_offset
;
858 if (!wi::fits_shwi_p (off
))
860 dest_offset
= off
.to_shwi ();
861 if (ranges_overlap_p (src_offset
, maxsize
,
862 dest_offset
, maxsize
))
868 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
871 gimple_call_set_fndecl (stmt
, fn
);
872 gimple_call_set_arg (stmt
, 0, dest
);
873 gimple_call_set_arg (stmt
, 1, src
);
878 /* If the destination and source do not alias optimize into
880 if ((is_gimple_min_invariant (dest
)
881 || TREE_CODE (dest
) == SSA_NAME
)
882 && (is_gimple_min_invariant (src
)
883 || TREE_CODE (src
) == SSA_NAME
))
886 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
887 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
888 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
891 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
894 gimple_call_set_fndecl (stmt
, fn
);
895 gimple_call_set_arg (stmt
, 0, dest
);
896 gimple_call_set_arg (stmt
, 1, src
);
905 if (!tree_fits_shwi_p (len
))
908 This logic lose for arguments like (type *)malloc (sizeof (type)),
909 since we strip the casts of up to VOID return value from malloc.
910 Perhaps we ought to inherit type from non-VOID argument here? */
913 if (!POINTER_TYPE_P (TREE_TYPE (src
))
914 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
916 /* In the following try to find a type that is most natural to be
917 used for the memcpy source and destination and that allows
918 the most optimization when memcpy is turned into a plain assignment
919 using that type. In theory we could always use a char[len] type
920 but that only gains us that the destination and source possibly
921 no longer will have their address taken. */
922 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
923 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
925 tree tem
= TREE_OPERAND (src
, 0);
927 if (tem
!= TREE_OPERAND (src
, 0))
928 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
930 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
932 tree tem
= TREE_OPERAND (dest
, 0);
934 if (tem
!= TREE_OPERAND (dest
, 0))
935 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
937 srctype
= TREE_TYPE (TREE_TYPE (src
));
938 if (TREE_CODE (srctype
) == ARRAY_TYPE
939 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
941 srctype
= TREE_TYPE (srctype
);
943 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
945 desttype
= TREE_TYPE (TREE_TYPE (dest
));
946 if (TREE_CODE (desttype
) == ARRAY_TYPE
947 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
949 desttype
= TREE_TYPE (desttype
);
951 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
953 if (TREE_ADDRESSABLE (srctype
)
954 || TREE_ADDRESSABLE (desttype
))
957 /* Make sure we are not copying using a floating-point mode or
958 a type whose size possibly does not match its precision. */
959 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
960 || TREE_CODE (desttype
) == BOOLEAN_TYPE
961 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
962 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
963 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
964 || TREE_CODE (srctype
) == BOOLEAN_TYPE
965 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
966 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
974 src_align
= get_pointer_alignment (src
);
975 dest_align
= get_pointer_alignment (dest
);
976 if (dest_align
< TYPE_ALIGN (desttype
)
977 || src_align
< TYPE_ALIGN (srctype
))
981 STRIP_NOPS (destvar
);
982 if (TREE_CODE (destvar
) == ADDR_EXPR
983 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
984 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
985 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
991 if (TREE_CODE (srcvar
) == ADDR_EXPR
992 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
993 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
996 || src_align
>= TYPE_ALIGN (desttype
))
997 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
999 else if (!STRICT_ALIGNMENT
)
1001 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1003 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1011 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1014 if (srcvar
== NULL_TREE
)
1017 if (src_align
>= TYPE_ALIGN (desttype
))
1018 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1021 if (STRICT_ALIGNMENT
)
1023 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1025 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1028 else if (destvar
== NULL_TREE
)
1031 if (dest_align
>= TYPE_ALIGN (srctype
))
1032 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1035 if (STRICT_ALIGNMENT
)
1037 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1039 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1044 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1046 tree tem
= fold_const_aggregate_ref (srcvar
);
1049 if (! is_gimple_min_invariant (srcvar
))
1051 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1052 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1054 gimple_assign_set_lhs (new_stmt
, srcvar
);
1055 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1056 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1059 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1060 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1061 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1062 if (gimple_vdef (new_stmt
)
1063 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1064 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1067 gsi_replace (gsi
, new_stmt
, false);
1070 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1074 gimple_seq stmts
= NULL
;
1075 if (endp
== 0 || endp
== 3)
1078 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1080 if (endp
== 2 || endp
== 1)
1082 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1083 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1084 TREE_TYPE (dest
), dest
, len
);
1087 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1088 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1089 gsi_replace (gsi
, repl
, false);
1093 /* Fold function call to builtin memset or bzero at *GSI setting the
1094 memory of size LEN to VAL. Return whether a simplification was made. */
1097 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1099 gimple
*stmt
= gsi_stmt (*gsi
);
1101 unsigned HOST_WIDE_INT length
, cval
;
1103 /* If the LEN parameter is zero, return DEST. */
1104 if (integer_zerop (len
))
1106 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1110 if (! tree_fits_uhwi_p (len
))
1113 if (TREE_CODE (c
) != INTEGER_CST
)
1116 tree dest
= gimple_call_arg (stmt
, 0);
1118 if (TREE_CODE (var
) != ADDR_EXPR
)
1121 var
= TREE_OPERAND (var
, 0);
1122 if (TREE_THIS_VOLATILE (var
))
1125 etype
= TREE_TYPE (var
);
1126 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1127 etype
= TREE_TYPE (etype
);
1129 if (!INTEGRAL_TYPE_P (etype
)
1130 && !POINTER_TYPE_P (etype
))
1133 if (! var_decl_component_p (var
))
1136 length
= tree_to_uhwi (len
);
1137 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1138 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1141 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1144 if (integer_zerop (c
))
1148 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1151 cval
= TREE_INT_CST_LOW (c
);
1155 cval
|= (cval
<< 31) << 1;
1158 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1159 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1160 gimple_set_vuse (store
, gimple_vuse (stmt
));
1161 tree vdef
= gimple_vdef (stmt
);
1162 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1164 gimple_set_vdef (store
, gimple_vdef (stmt
));
1165 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1167 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1168 if (gimple_call_lhs (stmt
))
1170 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1171 gsi_replace (gsi
, asgn
, false);
1175 gimple_stmt_iterator gsi2
= *gsi
;
1177 gsi_remove (&gsi2
, true);
1184 /* Obtain the minimum and maximum string length or minimum and maximum
1185 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1186 If ARG is an SSA name variable, follow its use-def chains. When
1187 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1188 if we are unable to determine the length or value, return False.
1189 VISITED is a bitmap of visited variables.
1190 TYPE is 0 if string length should be obtained, 1 for maximum string
1191 length and 2 for maximum value ARG can have.
1192 When FUZZY is set and the length of a string cannot be determined,
1193 the function instead considers as the maximum possible length the
1194 size of a character array it may refer to. */
1197 get_range_strlen (tree arg
, tree length
[2], bitmap
*visited
, int type
,
1203 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1204 but MINLEN may be cleared during the execution of the function. */
1205 tree
*minlen
= length
;
1206 tree
*const maxlen
= length
+ 1;
1208 if (TREE_CODE (arg
) != SSA_NAME
)
1210 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1211 if (TREE_CODE (arg
) == ADDR_EXPR
1212 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1213 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1215 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1216 if (TREE_CODE (aop0
) == INDIRECT_REF
1217 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1218 return get_range_strlen (TREE_OPERAND (aop0
, 0),
1219 length
, visited
, type
, fuzzy
);
1225 if (TREE_CODE (val
) != INTEGER_CST
1226 || tree_int_cst_sgn (val
) < 0)
1230 val
= c_strlen (arg
, 1);
1234 if (TREE_CODE (arg
) == ADDR_EXPR
)
1235 return get_range_strlen (TREE_OPERAND (arg
, 0), length
,
1236 visited
, type
, fuzzy
);
1238 if (TREE_CODE (arg
) == COMPONENT_REF
1239 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1))) == ARRAY_TYPE
)
1241 /* Use the type of the member array to determine the upper
1242 bound on the length of the array. This may be overly
1243 optimistic if the array itself isn't NUL-terminated and
1244 the caller relies on the subsequent member to contain
1246 arg
= TREE_OPERAND (arg
, 1);
1247 val
= TYPE_SIZE_UNIT (TREE_TYPE (arg
));
1248 if (!val
|| integer_zerop (val
))
1250 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1252 /* Avoid using the array size as the minimum. */
1263 && TREE_CODE (*minlen
) == INTEGER_CST
1264 && TREE_CODE (val
) == INTEGER_CST
1265 && tree_int_cst_lt (val
, *minlen
))))
1272 if (TREE_CODE (*maxlen
) != INTEGER_CST
1273 || TREE_CODE (val
) != INTEGER_CST
)
1276 if (tree_int_cst_lt (*maxlen
, val
))
1280 else if (simple_cst_equal (val
, *maxlen
) != 1)
1288 /* If ARG is registered for SSA update we cannot look at its defining
1290 if (name_registered_for_update_p (arg
))
1293 /* If we were already here, break the infinite cycle. */
1295 *visited
= BITMAP_ALLOC (NULL
);
1296 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1300 def_stmt
= SSA_NAME_DEF_STMT (var
);
1302 switch (gimple_code (def_stmt
))
1305 /* The RHS of the statement defining VAR must either have a
1306 constant length or come from another SSA_NAME with a constant
1308 if (gimple_assign_single_p (def_stmt
)
1309 || gimple_assign_unary_nop_p (def_stmt
))
1311 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1312 return get_range_strlen (rhs
, length
, visited
, type
, fuzzy
);
1314 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1316 tree op2
= gimple_assign_rhs2 (def_stmt
);
1317 tree op3
= gimple_assign_rhs3 (def_stmt
);
1318 return get_range_strlen (op2
, length
, visited
, type
, fuzzy
)
1319 && get_range_strlen (op3
, length
, visited
, type
, fuzzy
);
1325 /* All the arguments of the PHI node must have the same constant
1329 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1331 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1333 /* If this PHI has itself as an argument, we cannot
1334 determine the string length of this argument. However,
1335 if we can find a constant string length for the other
1336 PHI args then we can still be sure that this is a
1337 constant string length. So be optimistic and just
1338 continue with the next argument. */
1339 if (arg
== gimple_phi_result (def_stmt
))
1342 if (!get_range_strlen (arg
, length
, visited
, type
, fuzzy
))
1345 *maxlen
= build_all_ones_cst (size_type_node
);
1358 /* Determine the minimum and maximum value or string length that ARG
1359 refers to and store each in the first two elements of MINMAXLEN.
1360 For expressions that point to strings of unknown lengths that are
1361 character arrays, use the upper bound of the array as the maximum
1362 length. For example, given an expression like 'x ? array : "xyz"'
1363 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1364 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1368 void get_range_strlen (tree arg
, tree minmaxlen
[2])
1370 bitmap visited
= NULL
;
1372 minmaxlen
[0] = NULL_TREE
;
1373 minmaxlen
[1] = NULL_TREE
;
1375 get_range_strlen (arg
, minmaxlen
, &visited
, 1, true);
1378 BITMAP_FREE (visited
);
1382 get_maxval_strlen (tree arg
, int type
)
1384 bitmap visited
= NULL
;
1385 tree len
[2] = { NULL_TREE
, NULL_TREE
};
1386 if (!get_range_strlen (arg
, len
, &visited
, type
, false))
1389 BITMAP_FREE (visited
);
1395 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1396 If LEN is not NULL, it represents the length of the string to be
1397 copied. Return NULL_TREE if no simplification can be made. */
1400 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1401 tree dest
, tree src
)
1403 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1406 /* If SRC and DEST are the same (and not volatile), return DEST. */
1407 if (operand_equal_p (src
, dest
, 0))
1409 replace_call_with_value (gsi
, dest
);
1413 if (optimize_function_for_size_p (cfun
))
1416 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1420 tree len
= get_maxval_strlen (src
, 0);
1424 len
= fold_convert_loc (loc
, size_type_node
, len
);
1425 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1426 len
= force_gimple_operand_gsi (gsi
, len
, true,
1427 NULL_TREE
, true, GSI_SAME_STMT
);
1428 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1429 replace_call_with_call_and_fold (gsi
, repl
);
1433 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1434 If SLEN is not NULL, it represents the length of the source string.
1435 Return NULL_TREE if no simplification can be made. */
1438 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1439 tree dest
, tree src
, tree len
)
1441 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1444 /* If the LEN parameter is zero, return DEST. */
1445 if (integer_zerop (len
))
1447 replace_call_with_value (gsi
, dest
);
1451 /* We can't compare slen with len as constants below if len is not a
1453 if (TREE_CODE (len
) != INTEGER_CST
)
1456 /* Now, we must be passed a constant src ptr parameter. */
1457 tree slen
= get_maxval_strlen (src
, 0);
1458 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1461 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1463 /* We do not support simplification of this case, though we do
1464 support it when expanding trees into RTL. */
1465 /* FIXME: generate a call to __builtin_memset. */
1466 if (tree_int_cst_lt (slen
, len
))
1469 /* OK transform into builtin memcpy. */
1470 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1474 len
= fold_convert_loc (loc
, size_type_node
, len
);
1475 len
= force_gimple_operand_gsi (gsi
, len
, true,
1476 NULL_TREE
, true, GSI_SAME_STMT
);
1477 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1478 replace_call_with_call_and_fold (gsi
, repl
);
1482 /* Fold function call to builtin strchr or strrchr.
1483 If both arguments are constant, evaluate and fold the result,
1484 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1485 In general strlen is significantly faster than strchr
1486 due to being a simpler operation. */
1488 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1490 gimple
*stmt
= gsi_stmt (*gsi
);
1491 tree str
= gimple_call_arg (stmt
, 0);
1492 tree c
= gimple_call_arg (stmt
, 1);
1493 location_t loc
= gimple_location (stmt
);
1497 if (!gimple_call_lhs (stmt
))
1500 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1502 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1506 replace_call_with_value (gsi
, integer_zero_node
);
1510 tree len
= build_int_cst (size_type_node
, p1
- p
);
1511 gimple_seq stmts
= NULL
;
1512 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1513 POINTER_PLUS_EXPR
, str
, len
);
1514 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1515 gsi_replace_with_seq_vops (gsi
, stmts
);
1519 if (!integer_zerop (c
))
1522 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1523 if (optimize_function_for_size_p (cfun
))
1525 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1527 if (is_strrchr
&& strchr_fn
)
1529 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
1530 replace_call_with_call_and_fold (gsi
, repl
);
1538 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1543 /* Create newstr = strlen (str). */
1544 gimple_seq stmts
= NULL
;
1545 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1546 gimple_set_location (new_stmt
, loc
);
1547 len
= create_tmp_reg_or_ssa_name (size_type_node
);
1548 gimple_call_set_lhs (new_stmt
, len
);
1549 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1551 /* Create (str p+ strlen (str)). */
1552 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1553 POINTER_PLUS_EXPR
, str
, len
);
1554 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1555 gsi_replace_with_seq_vops (gsi
, stmts
);
1556 /* gsi now points at the assignment to the lhs, get a
1557 stmt iterator to the strlen.
1558 ??? We can't use gsi_for_stmt as that doesn't work when the
1559 CFG isn't built yet. */
1560 gimple_stmt_iterator gsi2
= *gsi
;
1566 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1569 Return NULL_TREE if no simplification was possible, otherwise return the
1570 simplified form of the call as a tree.
1572 The simplified form may be a constant or other expression which
1573 computes the same value, but in a more efficient manner (including
1574 calls to other builtin functions).
1576 The call may contain arguments which need to be evaluated, but
1577 which are not useful to determine the result of the call. In
1578 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1579 COMPOUND_EXPR will be an argument which must be evaluated.
1580 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1581 COMPOUND_EXPR in the chain will contain the tree for the simplified
1582 form of the builtin function call. */
1585 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1587 gimple
*stmt
= gsi_stmt (*gsi
);
1588 location_t loc
= gimple_location (stmt
);
1590 const char *p
= c_getstr (src
);
1592 /* If the string length is zero, return the dst parameter. */
1593 if (p
&& *p
== '\0')
1595 replace_call_with_value (gsi
, dst
);
1599 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1602 /* See if we can store by pieces into (dst + strlen(dst)). */
1604 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1605 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1607 if (!strlen_fn
|| !memcpy_fn
)
1610 /* If the length of the source string isn't computable don't
1611 split strcat into strlen and memcpy. */
1612 tree len
= get_maxval_strlen (src
, 0);
1616 /* Create strlen (dst). */
1617 gimple_seq stmts
= NULL
, stmts2
;
1618 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1619 gimple_set_location (repl
, loc
);
1620 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
1621 gimple_call_set_lhs (repl
, newdst
);
1622 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1624 /* Create (dst p+ strlen (dst)). */
1625 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1626 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1627 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1629 len
= fold_convert_loc (loc
, size_type_node
, len
);
1630 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1631 build_int_cst (size_type_node
, 1));
1632 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1633 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1635 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1636 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1637 if (gimple_call_lhs (stmt
))
1639 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1640 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1641 gsi_replace_with_seq_vops (gsi
, stmts
);
1642 /* gsi now points at the assignment to the lhs, get a
1643 stmt iterator to the memcpy call.
1644 ??? We can't use gsi_for_stmt as that doesn't work when the
1645 CFG isn't built yet. */
1646 gimple_stmt_iterator gsi2
= *gsi
;
1652 gsi_replace_with_seq_vops (gsi
, stmts
);
1658 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1659 are the arguments to the call. */
1662 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1664 gimple
*stmt
= gsi_stmt (*gsi
);
1665 tree dest
= gimple_call_arg (stmt
, 0);
1666 tree src
= gimple_call_arg (stmt
, 1);
1667 tree size
= gimple_call_arg (stmt
, 2);
1673 /* If the SRC parameter is "", return DEST. */
1674 if (p
&& *p
== '\0')
1676 replace_call_with_value (gsi
, dest
);
1680 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1683 /* If __builtin_strcat_chk is used, assume strcat is available. */
1684 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1688 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1689 replace_call_with_call_and_fold (gsi
, repl
);
1693 /* Simplify a call to the strncat builtin. */
1696 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1698 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1699 tree dst
= gimple_call_arg (stmt
, 0);
1700 tree src
= gimple_call_arg (stmt
, 1);
1701 tree len
= gimple_call_arg (stmt
, 2);
1703 const char *p
= c_getstr (src
);
1705 /* If the requested length is zero, or the src parameter string
1706 length is zero, return the dst parameter. */
1707 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1709 replace_call_with_value (gsi
, dst
);
1713 /* If the requested len is greater than or equal to the string
1714 length, call strcat. */
1715 if (TREE_CODE (len
) == INTEGER_CST
&& p
1716 && compare_tree_int (len
, strlen (p
)) >= 0)
1718 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1720 /* If the replacement _DECL isn't initialized, don't do the
1725 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1726 replace_call_with_call_and_fold (gsi
, repl
);
1733 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1737 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1739 gimple
*stmt
= gsi_stmt (*gsi
);
1740 tree dest
= gimple_call_arg (stmt
, 0);
1741 tree src
= gimple_call_arg (stmt
, 1);
1742 tree len
= gimple_call_arg (stmt
, 2);
1743 tree size
= gimple_call_arg (stmt
, 3);
1748 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1749 if ((p
&& *p
== '\0')
1750 || integer_zerop (len
))
1752 replace_call_with_value (gsi
, dest
);
1756 if (! tree_fits_uhwi_p (size
))
1759 if (! integer_all_onesp (size
))
1761 tree src_len
= c_strlen (src
, 1);
1763 && tree_fits_uhwi_p (src_len
)
1764 && tree_fits_uhwi_p (len
)
1765 && ! tree_int_cst_lt (len
, src_len
))
1767 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1768 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1772 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1773 replace_call_with_call_and_fold (gsi
, repl
);
1779 /* If __builtin_strncat_chk is used, assume strncat is available. */
1780 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1784 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1785 replace_call_with_call_and_fold (gsi
, repl
);
1789 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1790 to the call. IGNORE is true if the value returned
1791 by the builtin will be ignored. UNLOCKED is true is true if this
1792 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1793 the known length of the string. Return NULL_TREE if no simplification
1797 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1798 tree arg0
, tree arg1
,
1801 gimple
*stmt
= gsi_stmt (*gsi
);
1803 /* If we're using an unlocked function, assume the other unlocked
1804 functions exist explicitly. */
1805 tree
const fn_fputc
= (unlocked
1806 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1807 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1808 tree
const fn_fwrite
= (unlocked
1809 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1810 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1812 /* If the return value is used, don't do the transformation. */
1813 if (gimple_call_lhs (stmt
))
1816 /* Get the length of the string passed to fputs. If the length
1817 can't be determined, punt. */
1818 tree len
= get_maxval_strlen (arg0
, 0);
1820 || TREE_CODE (len
) != INTEGER_CST
)
1823 switch (compare_tree_int (len
, 1))
1825 case -1: /* length is 0, delete the call entirely . */
1826 replace_call_with_value (gsi
, integer_zero_node
);
1829 case 0: /* length is 1, call fputc. */
1831 const char *p
= c_getstr (arg0
);
1837 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
1839 (integer_type_node
, p
[0]), arg1
);
1840 replace_call_with_call_and_fold (gsi
, repl
);
1845 case 1: /* length is greater than 1, call fwrite. */
1847 /* If optimizing for size keep fputs. */
1848 if (optimize_function_for_size_p (cfun
))
1850 /* New argument list transforming fputs(string, stream) to
1851 fwrite(string, 1, len, stream). */
1855 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1856 size_one_node
, len
, arg1
);
1857 replace_call_with_call_and_fold (gsi
, repl
);
1866 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1867 DEST, SRC, LEN, and SIZE are the arguments to the call.
1868 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1869 code of the builtin. If MAXLEN is not NULL, it is maximum length
1870 passed as third argument. */
1873 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1874 tree dest
, tree src
, tree len
, tree size
,
1875 enum built_in_function fcode
)
1877 gimple
*stmt
= gsi_stmt (*gsi
);
1878 location_t loc
= gimple_location (stmt
);
1879 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1882 /* If SRC and DEST are the same (and not volatile), return DEST
1883 (resp. DEST+LEN for __mempcpy_chk). */
1884 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1886 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1888 replace_call_with_value (gsi
, dest
);
1893 gimple_seq stmts
= NULL
;
1894 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1895 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1896 TREE_TYPE (dest
), dest
, len
);
1897 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1898 replace_call_with_value (gsi
, temp
);
1903 if (! tree_fits_uhwi_p (size
))
1906 tree maxlen
= get_maxval_strlen (len
, 2);
1907 if (! integer_all_onesp (size
))
1909 if (! tree_fits_uhwi_p (len
))
1911 /* If LEN is not constant, try MAXLEN too.
1912 For MAXLEN only allow optimizing into non-_ocs function
1913 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1914 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1916 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1918 /* (void) __mempcpy_chk () can be optimized into
1919 (void) __memcpy_chk (). */
1920 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1924 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1925 replace_call_with_call_and_fold (gsi
, repl
);
1934 if (tree_int_cst_lt (size
, maxlen
))
1939 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1940 mem{cpy,pcpy,move,set} is available. */
1943 case BUILT_IN_MEMCPY_CHK
:
1944 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1946 case BUILT_IN_MEMPCPY_CHK
:
1947 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1949 case BUILT_IN_MEMMOVE_CHK
:
1950 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1952 case BUILT_IN_MEMSET_CHK
:
1953 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1962 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1963 replace_call_with_call_and_fold (gsi
, repl
);
1967 /* Fold a call to the __st[rp]cpy_chk builtin.
1968 DEST, SRC, and SIZE are the arguments to the call.
1969 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1970 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1971 strings passed as second argument. */
1974 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1976 tree src
, tree size
,
1977 enum built_in_function fcode
)
1979 gimple
*stmt
= gsi_stmt (*gsi
);
1980 location_t loc
= gimple_location (stmt
);
1981 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1984 /* If SRC and DEST are the same (and not volatile), return DEST. */
1985 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1987 replace_call_with_value (gsi
, dest
);
1991 if (! tree_fits_uhwi_p (size
))
1994 tree maxlen
= get_maxval_strlen (src
, 1);
1995 if (! integer_all_onesp (size
))
1997 len
= c_strlen (src
, 1);
1998 if (! len
|| ! tree_fits_uhwi_p (len
))
2000 /* If LEN is not constant, try MAXLEN too.
2001 For MAXLEN only allow optimizing into non-_ocs function
2002 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2003 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2005 if (fcode
== BUILT_IN_STPCPY_CHK
)
2010 /* If return value of __stpcpy_chk is ignored,
2011 optimize into __strcpy_chk. */
2012 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2016 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2017 replace_call_with_call_and_fold (gsi
, repl
);
2021 if (! len
|| TREE_SIDE_EFFECTS (len
))
2024 /* If c_strlen returned something, but not a constant,
2025 transform __strcpy_chk into __memcpy_chk. */
2026 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2030 gimple_seq stmts
= NULL
;
2031 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2032 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2033 build_int_cst (size_type_node
, 1));
2034 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2035 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2036 replace_call_with_call_and_fold (gsi
, repl
);
2043 if (! tree_int_cst_lt (maxlen
, size
))
2047 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2048 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2049 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2053 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2054 replace_call_with_call_and_fold (gsi
, repl
);
2058 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2059 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2060 length passed as third argument. IGNORE is true if return value can be
2061 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2064 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2065 tree dest
, tree src
,
2066 tree len
, tree size
,
2067 enum built_in_function fcode
)
2069 gimple
*stmt
= gsi_stmt (*gsi
);
2070 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2073 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2075 /* If return value of __stpncpy_chk is ignored,
2076 optimize into __strncpy_chk. */
2077 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2080 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2081 replace_call_with_call_and_fold (gsi
, repl
);
2086 if (! tree_fits_uhwi_p (size
))
2089 tree maxlen
= get_maxval_strlen (len
, 2);
2090 if (! integer_all_onesp (size
))
2092 if (! tree_fits_uhwi_p (len
))
2094 /* If LEN is not constant, try MAXLEN too.
2095 For MAXLEN only allow optimizing into non-_ocs function
2096 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2097 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2103 if (tree_int_cst_lt (size
, maxlen
))
2107 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2108 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2109 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2113 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2114 replace_call_with_call_and_fold (gsi
, repl
);
2118 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2119 Return NULL_TREE if no simplification can be made. */
2122 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2124 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2125 location_t loc
= gimple_location (stmt
);
2126 tree dest
= gimple_call_arg (stmt
, 0);
2127 tree src
= gimple_call_arg (stmt
, 1);
2128 tree fn
, len
, lenp1
;
2130 /* If the result is unused, replace stpcpy with strcpy. */
2131 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2133 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2136 gimple_call_set_fndecl (stmt
, fn
);
2141 len
= c_strlen (src
, 1);
2143 || TREE_CODE (len
) != INTEGER_CST
)
2146 if (optimize_function_for_size_p (cfun
)
2147 /* If length is zero it's small enough. */
2148 && !integer_zerop (len
))
2151 /* If the source has a known length replace stpcpy with memcpy. */
2152 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2156 gimple_seq stmts
= NULL
;
2157 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2158 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2159 tem
, build_int_cst (size_type_node
, 1));
2160 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2161 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2162 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2163 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2164 if (gimple_vdef (repl
)
2165 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2166 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2167 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2168 /* Replace the result with dest + len. */
2170 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2171 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2172 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2173 POINTER_PLUS_EXPR
, dest
, tem
);
2174 gsi_replace (gsi
, ret
, false);
2175 /* Finally fold the memcpy call. */
2176 gimple_stmt_iterator gsi2
= *gsi
;
2182 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2183 NULL_TREE if a normal call should be emitted rather than expanding
2184 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2185 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2186 passed as second argument. */
2189 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2190 enum built_in_function fcode
)
2192 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2193 tree dest
, size
, len
, fn
, fmt
, flag
;
2194 const char *fmt_str
;
2196 /* Verify the required arguments in the original call. */
2197 if (gimple_call_num_args (stmt
) < 5)
2200 dest
= gimple_call_arg (stmt
, 0);
2201 len
= gimple_call_arg (stmt
, 1);
2202 flag
= gimple_call_arg (stmt
, 2);
2203 size
= gimple_call_arg (stmt
, 3);
2204 fmt
= gimple_call_arg (stmt
, 4);
2206 if (! tree_fits_uhwi_p (size
))
2209 if (! integer_all_onesp (size
))
2211 tree maxlen
= get_maxval_strlen (len
, 2);
2212 if (! tree_fits_uhwi_p (len
))
2214 /* If LEN is not constant, try MAXLEN too.
2215 For MAXLEN only allow optimizing into non-_ocs function
2216 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2217 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2223 if (tree_int_cst_lt (size
, maxlen
))
2227 if (!init_target_chars ())
2230 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2231 or if format doesn't contain % chars or is "%s". */
2232 if (! integer_zerop (flag
))
2234 fmt_str
= c_getstr (fmt
);
2235 if (fmt_str
== NULL
)
2237 if (strchr (fmt_str
, target_percent
) != NULL
2238 && strcmp (fmt_str
, target_percent_s
))
2242 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2244 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2245 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2249 /* Replace the called function and the first 5 argument by 3 retaining
2250 trailing varargs. */
2251 gimple_call_set_fndecl (stmt
, fn
);
2252 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2253 gimple_call_set_arg (stmt
, 0, dest
);
2254 gimple_call_set_arg (stmt
, 1, len
);
2255 gimple_call_set_arg (stmt
, 2, fmt
);
2256 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2257 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2258 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2263 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2264 Return NULL_TREE if a normal call should be emitted rather than
2265 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2266 or BUILT_IN_VSPRINTF_CHK. */
2269 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2270 enum built_in_function fcode
)
2272 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2273 tree dest
, size
, len
, fn
, fmt
, flag
;
2274 const char *fmt_str
;
2275 unsigned nargs
= gimple_call_num_args (stmt
);
2277 /* Verify the required arguments in the original call. */
2280 dest
= gimple_call_arg (stmt
, 0);
2281 flag
= gimple_call_arg (stmt
, 1);
2282 size
= gimple_call_arg (stmt
, 2);
2283 fmt
= gimple_call_arg (stmt
, 3);
2285 if (! tree_fits_uhwi_p (size
))
2290 if (!init_target_chars ())
2293 /* Check whether the format is a literal string constant. */
2294 fmt_str
= c_getstr (fmt
);
2295 if (fmt_str
!= NULL
)
2297 /* If the format doesn't contain % args or %%, we know the size. */
2298 if (strchr (fmt_str
, target_percent
) == 0)
2300 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2301 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2303 /* If the format is "%s" and first ... argument is a string literal,
2304 we know the size too. */
2305 else if (fcode
== BUILT_IN_SPRINTF_CHK
2306 && strcmp (fmt_str
, target_percent_s
) == 0)
2312 arg
= gimple_call_arg (stmt
, 4);
2313 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2315 len
= c_strlen (arg
, 1);
2316 if (! len
|| ! tree_fits_uhwi_p (len
))
2323 if (! integer_all_onesp (size
))
2325 if (! len
|| ! tree_int_cst_lt (len
, size
))
2329 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2330 or if format doesn't contain % chars or is "%s". */
2331 if (! integer_zerop (flag
))
2333 if (fmt_str
== NULL
)
2335 if (strchr (fmt_str
, target_percent
) != NULL
2336 && strcmp (fmt_str
, target_percent_s
))
2340 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2341 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2342 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2346 /* Replace the called function and the first 4 argument by 2 retaining
2347 trailing varargs. */
2348 gimple_call_set_fndecl (stmt
, fn
);
2349 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2350 gimple_call_set_arg (stmt
, 0, dest
);
2351 gimple_call_set_arg (stmt
, 1, fmt
);
2352 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2353 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2354 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2359 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2360 ORIG may be null if this is a 2-argument call. We don't attempt to
2361 simplify calls with more than 3 arguments.
2363 Return NULL_TREE if no simplification was possible, otherwise return the
2364 simplified form of the call as a tree. If IGNORED is true, it means that
2365 the caller does not use the returned value of the function. */
2368 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2370 gimple
*stmt
= gsi_stmt (*gsi
);
2371 tree dest
= gimple_call_arg (stmt
, 0);
2372 tree fmt
= gimple_call_arg (stmt
, 1);
2373 tree orig
= NULL_TREE
;
2374 const char *fmt_str
= NULL
;
2376 /* Verify the required arguments in the original call. We deal with two
2377 types of sprintf() calls: 'sprintf (str, fmt)' and
2378 'sprintf (dest, "%s", orig)'. */
2379 if (gimple_call_num_args (stmt
) > 3)
2382 if (gimple_call_num_args (stmt
) == 3)
2383 orig
= gimple_call_arg (stmt
, 2);
2385 /* Check whether the format is a literal string constant. */
2386 fmt_str
= c_getstr (fmt
);
2387 if (fmt_str
== NULL
)
2390 if (!init_target_chars ())
2393 /* If the format doesn't contain % args or %%, use strcpy. */
2394 if (strchr (fmt_str
, target_percent
) == NULL
)
2396 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2401 /* Don't optimize sprintf (buf, "abc", ptr++). */
2405 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2406 'format' is known to contain no % formats. */
2407 gimple_seq stmts
= NULL
;
2408 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2409 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2410 if (gimple_call_lhs (stmt
))
2412 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2413 build_int_cst (integer_type_node
,
2415 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2416 gsi_replace_with_seq_vops (gsi
, stmts
);
2417 /* gsi now points at the assignment to the lhs, get a
2418 stmt iterator to the memcpy call.
2419 ??? We can't use gsi_for_stmt as that doesn't work when the
2420 CFG isn't built yet. */
2421 gimple_stmt_iterator gsi2
= *gsi
;
2427 gsi_replace_with_seq_vops (gsi
, stmts
);
2433 /* If the format is "%s", use strcpy if the result isn't used. */
2434 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2437 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2442 /* Don't crash on sprintf (str1, "%s"). */
2446 tree orig_len
= NULL_TREE
;
2447 if (gimple_call_lhs (stmt
))
2449 orig_len
= get_maxval_strlen (orig
, 0);
2454 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2455 gimple_seq stmts
= NULL
;
2456 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2457 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2458 if (gimple_call_lhs (stmt
))
2460 if (!useless_type_conversion_p (integer_type_node
,
2461 TREE_TYPE (orig_len
)))
2462 orig_len
= fold_convert (integer_type_node
, orig_len
);
2463 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2464 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2465 gsi_replace_with_seq_vops (gsi
, stmts
);
2466 /* gsi now points at the assignment to the lhs, get a
2467 stmt iterator to the memcpy call.
2468 ??? We can't use gsi_for_stmt as that doesn't work when the
2469 CFG isn't built yet. */
2470 gimple_stmt_iterator gsi2
= *gsi
;
2476 gsi_replace_with_seq_vops (gsi
, stmts
);
2484 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2485 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2486 attempt to simplify calls with more than 4 arguments.
2488 Return NULL_TREE if no simplification was possible, otherwise return the
2489 simplified form of the call as a tree. If IGNORED is true, it means that
2490 the caller does not use the returned value of the function. */
2493 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2495 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2496 tree dest
= gimple_call_arg (stmt
, 0);
2497 tree destsize
= gimple_call_arg (stmt
, 1);
2498 tree fmt
= gimple_call_arg (stmt
, 2);
2499 tree orig
= NULL_TREE
;
2500 const char *fmt_str
= NULL
;
2502 if (gimple_call_num_args (stmt
) > 4)
2505 if (gimple_call_num_args (stmt
) == 4)
2506 orig
= gimple_call_arg (stmt
, 3);
2508 if (!tree_fits_uhwi_p (destsize
))
2510 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2512 /* Check whether the format is a literal string constant. */
2513 fmt_str
= c_getstr (fmt
);
2514 if (fmt_str
== NULL
)
2517 if (!init_target_chars ())
2520 /* If the format doesn't contain % args or %%, use strcpy. */
2521 if (strchr (fmt_str
, target_percent
) == NULL
)
2523 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2527 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2531 /* We could expand this as
2532 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2534 memcpy (str, fmt_with_nul_at_cstm1, cst);
2535 but in the former case that might increase code size
2536 and in the latter case grow .rodata section too much.
2538 size_t len
= strlen (fmt_str
);
2542 gimple_seq stmts
= NULL
;
2543 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2544 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2545 if (gimple_call_lhs (stmt
))
2547 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2548 build_int_cst (integer_type_node
, len
));
2549 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2550 gsi_replace_with_seq_vops (gsi
, stmts
);
2551 /* gsi now points at the assignment to the lhs, get a
2552 stmt iterator to the memcpy call.
2553 ??? We can't use gsi_for_stmt as that doesn't work when the
2554 CFG isn't built yet. */
2555 gimple_stmt_iterator gsi2
= *gsi
;
2561 gsi_replace_with_seq_vops (gsi
, stmts
);
2567 /* If the format is "%s", use strcpy if the result isn't used. */
2568 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2570 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2574 /* Don't crash on snprintf (str1, cst, "%s"). */
2578 tree orig_len
= get_maxval_strlen (orig
, 0);
2579 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2582 /* We could expand this as
2583 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2585 memcpy (str1, str2_with_nul_at_cstm1, cst);
2586 but in the former case that might increase code size
2587 and in the latter case grow .rodata section too much.
2589 if (compare_tree_int (orig_len
, destlen
) >= 0)
2592 /* Convert snprintf (str1, cst, "%s", str2) into
2593 strcpy (str1, str2) if strlen (str2) < cst. */
2594 gimple_seq stmts
= NULL
;
2595 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2596 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2597 if (gimple_call_lhs (stmt
))
2599 if (!useless_type_conversion_p (integer_type_node
,
2600 TREE_TYPE (orig_len
)))
2601 orig_len
= fold_convert (integer_type_node
, orig_len
);
2602 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2603 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2604 gsi_replace_with_seq_vops (gsi
, stmts
);
2605 /* gsi now points at the assignment to the lhs, get a
2606 stmt iterator to the memcpy call.
2607 ??? We can't use gsi_for_stmt as that doesn't work when the
2608 CFG isn't built yet. */
2609 gimple_stmt_iterator gsi2
= *gsi
;
2615 gsi_replace_with_seq_vops (gsi
, stmts
);
2623 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2624 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2625 more than 3 arguments, and ARG may be null in the 2-argument case.
2627 Return NULL_TREE if no simplification was possible, otherwise return the
2628 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2629 code of the function to be simplified. */
2632 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2633 tree fp
, tree fmt
, tree arg
,
2634 enum built_in_function fcode
)
2636 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2637 tree fn_fputc
, fn_fputs
;
2638 const char *fmt_str
= NULL
;
2640 /* If the return value is used, don't do the transformation. */
2641 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2644 /* Check whether the format is a literal string constant. */
2645 fmt_str
= c_getstr (fmt
);
2646 if (fmt_str
== NULL
)
2649 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2651 /* If we're using an unlocked function, assume the other
2652 unlocked functions exist explicitly. */
2653 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2654 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2658 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2659 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2662 if (!init_target_chars ())
2665 /* If the format doesn't contain % args or %%, use strcpy. */
2666 if (strchr (fmt_str
, target_percent
) == NULL
)
2668 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2672 /* If the format specifier was "", fprintf does nothing. */
2673 if (fmt_str
[0] == '\0')
2675 replace_call_with_value (gsi
, NULL_TREE
);
2679 /* When "string" doesn't contain %, replace all cases of
2680 fprintf (fp, string) with fputs (string, fp). The fputs
2681 builtin will take care of special cases like length == 1. */
2684 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2685 replace_call_with_call_and_fold (gsi
, repl
);
2690 /* The other optimizations can be done only on the non-va_list variants. */
2691 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2694 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2695 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2697 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2701 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2702 replace_call_with_call_and_fold (gsi
, repl
);
2707 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2708 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2711 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2715 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2716 replace_call_with_call_and_fold (gsi
, repl
);
2724 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2725 FMT and ARG are the arguments to the call; we don't fold cases with
2726 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2728 Return NULL_TREE if no simplification was possible, otherwise return the
2729 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2730 code of the function to be simplified. */
2733 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2734 tree arg
, enum built_in_function fcode
)
2736 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2737 tree fn_putchar
, fn_puts
, newarg
;
2738 const char *fmt_str
= NULL
;
2740 /* If the return value is used, don't do the transformation. */
2741 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2744 /* Check whether the format is a literal string constant. */
2745 fmt_str
= c_getstr (fmt
);
2746 if (fmt_str
== NULL
)
2749 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2751 /* If we're using an unlocked function, assume the other
2752 unlocked functions exist explicitly. */
2753 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2754 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2758 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2759 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2762 if (!init_target_chars ())
2765 if (strcmp (fmt_str
, target_percent_s
) == 0
2766 || strchr (fmt_str
, target_percent
) == NULL
)
2770 if (strcmp (fmt_str
, target_percent_s
) == 0)
2772 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2775 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2778 str
= c_getstr (arg
);
2784 /* The format specifier doesn't contain any '%' characters. */
2785 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2791 /* If the string was "", printf does nothing. */
2794 replace_call_with_value (gsi
, NULL_TREE
);
2798 /* If the string has length of 1, call putchar. */
2801 /* Given printf("c"), (where c is any one character,)
2802 convert "c"[0] to an int and pass that to the replacement
2804 newarg
= build_int_cst (integer_type_node
, str
[0]);
2807 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2808 replace_call_with_call_and_fold (gsi
, repl
);
2814 /* If the string was "string\n", call puts("string"). */
2815 size_t len
= strlen (str
);
2816 if ((unsigned char)str
[len
- 1] == target_newline
2817 && (size_t) (int) len
== len
2821 tree offset_node
, string_cst
;
2823 /* Create a NUL-terminated string that's one char shorter
2824 than the original, stripping off the trailing '\n'. */
2825 newarg
= build_string_literal (len
, str
);
2826 string_cst
= string_constant (newarg
, &offset_node
);
2827 gcc_checking_assert (string_cst
2828 && (TREE_STRING_LENGTH (string_cst
)
2830 && integer_zerop (offset_node
)
2832 TREE_STRING_POINTER (string_cst
)[len
- 1]
2834 /* build_string_literal creates a new STRING_CST,
2835 modify it in place to avoid double copying. */
2836 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2837 newstr
[len
- 1] = '\0';
2840 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2841 replace_call_with_call_and_fold (gsi
, repl
);
2846 /* We'd like to arrange to call fputs(string,stdout) here,
2847 but we need stdout and don't have a way to get it yet. */
2852 /* The other optimizations can be done only on the non-va_list variants. */
2853 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2856 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2857 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2859 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2863 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2864 replace_call_with_call_and_fold (gsi
, repl
);
2869 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2870 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2872 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2877 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2878 replace_call_with_call_and_fold (gsi
, repl
);
2888 /* Fold a call to __builtin_strlen with known length LEN. */
2891 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2893 gimple
*stmt
= gsi_stmt (*gsi
);
2894 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2897 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2898 replace_call_with_value (gsi
, len
);
2902 /* Fold a call to __builtin_acc_on_device. */
2905 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
2907 /* Defer folding until we know which compiler we're in. */
2908 if (symtab
->state
!= EXPANSION
)
2911 unsigned val_host
= GOMP_DEVICE_HOST
;
2912 unsigned val_dev
= GOMP_DEVICE_NONE
;
2914 #ifdef ACCEL_COMPILER
2915 val_host
= GOMP_DEVICE_NOT_HOST
;
2916 val_dev
= ACCEL_COMPILER_acc_device
;
2919 location_t loc
= gimple_location (gsi_stmt (*gsi
));
2921 tree host_eq
= make_ssa_name (boolean_type_node
);
2922 gimple
*host_ass
= gimple_build_assign
2923 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
2924 gimple_set_location (host_ass
, loc
);
2925 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
2927 tree dev_eq
= make_ssa_name (boolean_type_node
);
2928 gimple
*dev_ass
= gimple_build_assign
2929 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
2930 gimple_set_location (dev_ass
, loc
);
2931 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
2933 tree result
= make_ssa_name (boolean_type_node
);
2934 gimple
*result_ass
= gimple_build_assign
2935 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
2936 gimple_set_location (result_ass
, loc
);
2937 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
2939 replace_call_with_value (gsi
, result
);
2944 /* Fold the non-target builtin at *GSI and return whether any simplification
2948 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2950 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2951 tree callee
= gimple_call_fndecl (stmt
);
2953 /* Give up for always_inline inline builtins until they are
2955 if (avoid_folding_inline_builtin (callee
))
2958 unsigned n
= gimple_call_num_args (stmt
);
2959 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2962 case BUILT_IN_BZERO
:
2963 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2964 gimple_call_arg (stmt
, 1));
2965 case BUILT_IN_MEMSET
:
2966 return gimple_fold_builtin_memset (gsi
,
2967 gimple_call_arg (stmt
, 1),
2968 gimple_call_arg (stmt
, 2));
2969 case BUILT_IN_BCOPY
:
2970 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2971 gimple_call_arg (stmt
, 0), 3);
2972 case BUILT_IN_MEMCPY
:
2973 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2974 gimple_call_arg (stmt
, 1), 0);
2975 case BUILT_IN_MEMPCPY
:
2976 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2977 gimple_call_arg (stmt
, 1), 1);
2978 case BUILT_IN_MEMMOVE
:
2979 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2980 gimple_call_arg (stmt
, 1), 3);
2981 case BUILT_IN_SPRINTF_CHK
:
2982 case BUILT_IN_VSPRINTF_CHK
:
2983 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2984 case BUILT_IN_STRCAT_CHK
:
2985 return gimple_fold_builtin_strcat_chk (gsi
);
2986 case BUILT_IN_STRNCAT_CHK
:
2987 return gimple_fold_builtin_strncat_chk (gsi
);
2988 case BUILT_IN_STRLEN
:
2989 return gimple_fold_builtin_strlen (gsi
);
2990 case BUILT_IN_STRCPY
:
2991 return gimple_fold_builtin_strcpy (gsi
,
2992 gimple_call_arg (stmt
, 0),
2993 gimple_call_arg (stmt
, 1));
2994 case BUILT_IN_STRNCPY
:
2995 return gimple_fold_builtin_strncpy (gsi
,
2996 gimple_call_arg (stmt
, 0),
2997 gimple_call_arg (stmt
, 1),
2998 gimple_call_arg (stmt
, 2));
2999 case BUILT_IN_STRCAT
:
3000 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
3001 gimple_call_arg (stmt
, 1));
3002 case BUILT_IN_STRNCAT
:
3003 return gimple_fold_builtin_strncat (gsi
);
3004 case BUILT_IN_INDEX
:
3005 case BUILT_IN_STRCHR
:
3006 return gimple_fold_builtin_strchr (gsi
, false);
3007 case BUILT_IN_RINDEX
:
3008 case BUILT_IN_STRRCHR
:
3009 return gimple_fold_builtin_strchr (gsi
, true);
3010 case BUILT_IN_FPUTS
:
3011 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3012 gimple_call_arg (stmt
, 1), false);
3013 case BUILT_IN_FPUTS_UNLOCKED
:
3014 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3015 gimple_call_arg (stmt
, 1), true);
3016 case BUILT_IN_MEMCPY_CHK
:
3017 case BUILT_IN_MEMPCPY_CHK
:
3018 case BUILT_IN_MEMMOVE_CHK
:
3019 case BUILT_IN_MEMSET_CHK
:
3020 return gimple_fold_builtin_memory_chk (gsi
,
3021 gimple_call_arg (stmt
, 0),
3022 gimple_call_arg (stmt
, 1),
3023 gimple_call_arg (stmt
, 2),
3024 gimple_call_arg (stmt
, 3),
3026 case BUILT_IN_STPCPY
:
3027 return gimple_fold_builtin_stpcpy (gsi
);
3028 case BUILT_IN_STRCPY_CHK
:
3029 case BUILT_IN_STPCPY_CHK
:
3030 return gimple_fold_builtin_stxcpy_chk (gsi
,
3031 gimple_call_arg (stmt
, 0),
3032 gimple_call_arg (stmt
, 1),
3033 gimple_call_arg (stmt
, 2),
3035 case BUILT_IN_STRNCPY_CHK
:
3036 case BUILT_IN_STPNCPY_CHK
:
3037 return gimple_fold_builtin_stxncpy_chk (gsi
,
3038 gimple_call_arg (stmt
, 0),
3039 gimple_call_arg (stmt
, 1),
3040 gimple_call_arg (stmt
, 2),
3041 gimple_call_arg (stmt
, 3),
3043 case BUILT_IN_SNPRINTF_CHK
:
3044 case BUILT_IN_VSNPRINTF_CHK
:
3045 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
3046 case BUILT_IN_SNPRINTF
:
3047 return gimple_fold_builtin_snprintf (gsi
);
3048 case BUILT_IN_SPRINTF
:
3049 return gimple_fold_builtin_sprintf (gsi
);
3050 case BUILT_IN_FPRINTF
:
3051 case BUILT_IN_FPRINTF_UNLOCKED
:
3052 case BUILT_IN_VFPRINTF
:
3053 if (n
== 2 || n
== 3)
3054 return gimple_fold_builtin_fprintf (gsi
,
3055 gimple_call_arg (stmt
, 0),
3056 gimple_call_arg (stmt
, 1),
3058 ? gimple_call_arg (stmt
, 2)
3062 case BUILT_IN_FPRINTF_CHK
:
3063 case BUILT_IN_VFPRINTF_CHK
:
3064 if (n
== 3 || n
== 4)
3065 return gimple_fold_builtin_fprintf (gsi
,
3066 gimple_call_arg (stmt
, 0),
3067 gimple_call_arg (stmt
, 2),
3069 ? gimple_call_arg (stmt
, 3)
3073 case BUILT_IN_PRINTF
:
3074 case BUILT_IN_PRINTF_UNLOCKED
:
3075 case BUILT_IN_VPRINTF
:
3076 if (n
== 1 || n
== 2)
3077 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
3079 ? gimple_call_arg (stmt
, 1)
3080 : NULL_TREE
, fcode
);
3082 case BUILT_IN_PRINTF_CHK
:
3083 case BUILT_IN_VPRINTF_CHK
:
3084 if (n
== 2 || n
== 3)
3085 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3087 ? gimple_call_arg (stmt
, 2)
3088 : NULL_TREE
, fcode
);
3090 case BUILT_IN_ACC_ON_DEVICE
:
3091 return gimple_fold_builtin_acc_on_device (gsi
,
3092 gimple_call_arg (stmt
, 0));
3096 /* Try the generic builtin folder. */
3097 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3098 tree result
= fold_call_stmt (stmt
, ignore
);
3102 STRIP_NOPS (result
);
3104 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3105 if (!update_call_from_tree (gsi
, result
))
3106 gimplify_and_update_call_from_tree (gsi
, result
);
3113 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3114 function calls to constants, where possible. */
3117 fold_internal_goacc_dim (const gimple
*call
)
3119 int axis
= get_oacc_ifn_dim_arg (call
);
3120 int size
= get_oacc_fn_dim_size (current_function_decl
, axis
);
3121 bool is_pos
= gimple_call_internal_fn (call
) == IFN_GOACC_DIM_POS
;
3122 tree result
= NULL_TREE
;
3124 /* If the size is 1, or we only want the size and it is not dynamic,
3125 we know the answer. */
3126 if (size
== 1 || (!is_pos
&& size
))
3128 tree type
= TREE_TYPE (gimple_call_lhs (call
));
3129 result
= build_int_cst (type
, size
- is_pos
);
3135 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3136 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3137 &var where var is only addressable because of such calls. */
3140 optimize_atomic_compare_exchange_p (gimple
*stmt
)
3142 if (gimple_call_num_args (stmt
) != 6
3143 || !flag_inline_atomics
3145 || (flag_sanitize
& (SANITIZE_THREAD
| SANITIZE_ADDRESS
)) != 0
3146 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3147 || !gimple_vdef (stmt
)
3148 || !gimple_vuse (stmt
))
3151 tree fndecl
= gimple_call_fndecl (stmt
);
3152 switch (DECL_FUNCTION_CODE (fndecl
))
3154 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
3155 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
3156 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
3157 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
3158 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
3164 tree expected
= gimple_call_arg (stmt
, 1);
3165 if (TREE_CODE (expected
) != ADDR_EXPR
3166 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
3169 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
3170 if (!is_gimple_reg_type (etype
)
3171 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
3172 || TREE_THIS_VOLATILE (etype
)
3173 || VECTOR_TYPE_P (etype
)
3174 || TREE_CODE (etype
) == COMPLEX_TYPE
3175 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3176 might not preserve all the bits. See PR71716. */
3177 || SCALAR_FLOAT_TYPE_P (etype
)
3178 || TYPE_PRECISION (etype
) != GET_MODE_BITSIZE (TYPE_MODE (etype
)))
3181 tree weak
= gimple_call_arg (stmt
, 3);
3182 if (!integer_zerop (weak
) && !integer_onep (weak
))
3185 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3186 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3187 machine_mode mode
= TYPE_MODE (itype
);
3189 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
3191 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
3194 if (int_size_in_bytes (etype
) != GET_MODE_SIZE (mode
))
3201 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3203 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3204 i = IMAGPART_EXPR <t>;
3206 e = REALPART_EXPR <t>; */
3209 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
3211 gimple
*stmt
= gsi_stmt (*gsi
);
3212 tree fndecl
= gimple_call_fndecl (stmt
);
3213 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3214 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3215 tree ctype
= build_complex_type (itype
);
3216 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3217 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3219 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3220 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
3221 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
3223 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
3224 build1 (VIEW_CONVERT_EXPR
, itype
,
3225 gimple_assign_lhs (g
)));
3226 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3228 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
3229 + int_size_in_bytes (itype
);
3230 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
3231 gimple_call_arg (stmt
, 0),
3232 gimple_assign_lhs (g
),
3233 gimple_call_arg (stmt
, 2),
3234 build_int_cst (integer_type_node
, flag
),
3235 gimple_call_arg (stmt
, 4),
3236 gimple_call_arg (stmt
, 5));
3237 tree lhs
= make_ssa_name (ctype
);
3238 gimple_call_set_lhs (g
, lhs
);
3239 gimple_set_vdef (g
, gimple_vdef (stmt
));
3240 gimple_set_vuse (g
, gimple_vuse (stmt
));
3241 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
3242 if (gimple_call_lhs (stmt
))
3244 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3245 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
3246 build1 (IMAGPART_EXPR
, itype
, lhs
));
3247 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3248 g
= gimple_build_assign (gimple_call_lhs (stmt
), NOP_EXPR
,
3249 gimple_assign_lhs (g
));
3251 gsi_replace (gsi
, g
, true);
3252 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
3253 build1 (REALPART_EXPR
, itype
, lhs
));
3254 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3255 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
3257 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3259 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
3260 gimple_assign_lhs (g
)));
3261 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3263 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
3264 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3268 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3269 doesn't fit into TYPE. The test for overflow should be regardless of
3270 -fwrapv, and even for unsigned types. */
3273 arith_overflowed_p (enum tree_code code
, const_tree type
,
3274 const_tree arg0
, const_tree arg1
)
3276 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3277 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3279 widest2_int warg0
= widest2_int_cst (arg0
);
3280 widest2_int warg1
= widest2_int_cst (arg1
);
3284 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3285 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3286 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3287 default: gcc_unreachable ();
3289 signop sign
= TYPE_SIGN (type
);
3290 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3292 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3295 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3296 The statement may be replaced by another statement, e.g., if the call
3297 simplifies to a constant value. Return true if any changes were made.
3298 It is assumed that the operands have been previously folded. */
3301 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3303 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3305 bool changed
= false;
3308 /* Fold *& in call arguments. */
3309 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3310 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3312 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3315 gimple_call_set_arg (stmt
, i
, tmp
);
3320 /* Check for virtual calls that became direct calls. */
3321 callee
= gimple_call_fn (stmt
);
3322 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3324 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3326 if (dump_file
&& virtual_method_call_p (callee
)
3327 && !possible_polymorphic_call_target_p
3328 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3329 (OBJ_TYPE_REF_EXPR (callee
)))))
3332 "Type inheritance inconsistent devirtualization of ");
3333 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3334 fprintf (dump_file
, " to ");
3335 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3336 fprintf (dump_file
, "\n");
3339 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3342 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3345 vec
<cgraph_node
*>targets
3346 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3347 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3349 tree lhs
= gimple_call_lhs (stmt
);
3350 if (dump_enabled_p ())
3352 location_t loc
= gimple_location_safe (stmt
);
3353 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3354 "folding virtual function call to %s\n",
3355 targets
.length () == 1
3356 ? targets
[0]->name ()
3357 : "__builtin_unreachable");
3359 if (targets
.length () == 1)
3361 tree fndecl
= targets
[0]->decl
;
3362 gimple_call_set_fndecl (stmt
, fndecl
);
3364 /* If changing the call to __cxa_pure_virtual
3365 or similar noreturn function, adjust gimple_call_fntype
3367 if (gimple_call_noreturn_p (stmt
)
3368 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
3369 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
3370 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
3372 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
3373 /* If the call becomes noreturn, remove the lhs. */
3375 && gimple_call_noreturn_p (stmt
)
3376 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
3377 || should_remove_lhs_p (lhs
)))
3379 if (TREE_CODE (lhs
) == SSA_NAME
)
3381 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3382 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3383 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3384 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3386 gimple_call_set_lhs (stmt
, NULL_TREE
);
3388 maybe_remove_unused_call_args (cfun
, stmt
);
3392 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3393 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3394 gimple_set_location (new_stmt
, gimple_location (stmt
));
3395 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3397 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3398 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3400 /* To satisfy condition for
3401 cgraph_update_edges_for_call_stmt_node,
3402 we need to preserve GIMPLE_CALL statement
3403 at position of GSI iterator. */
3404 update_call_from_tree (gsi
, def
);
3405 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3409 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3410 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3411 gsi_replace (gsi
, new_stmt
, false);
3419 /* Check for indirect calls that became direct calls, and then
3420 no longer require a static chain. */
3421 if (gimple_call_chain (stmt
))
3423 tree fn
= gimple_call_fndecl (stmt
);
3424 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3426 gimple_call_set_chain (stmt
, NULL
);
3431 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3434 gimple_call_set_chain (stmt
, tmp
);
3443 /* Check for builtins that CCP can handle using information not
3444 available in the generic fold routines. */
3445 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3447 if (gimple_fold_builtin (gsi
))
3450 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3452 changed
|= targetm
.gimple_fold_builtin (gsi
);
3454 else if (gimple_call_internal_p (stmt
))
3456 enum tree_code subcode
= ERROR_MARK
;
3457 tree result
= NULL_TREE
;
3458 bool cplx_result
= false;
3459 tree overflow
= NULL_TREE
;
3460 switch (gimple_call_internal_fn (stmt
))
3462 case IFN_BUILTIN_EXPECT
:
3463 result
= fold_builtin_expect (gimple_location (stmt
),
3464 gimple_call_arg (stmt
, 0),
3465 gimple_call_arg (stmt
, 1),
3466 gimple_call_arg (stmt
, 2));
3468 case IFN_UBSAN_OBJECT_SIZE
:
3469 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3470 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3471 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3472 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3473 gimple_call_arg (stmt
, 2))))
3475 gsi_replace (gsi
, gimple_build_nop (), false);
3476 unlink_stmt_vdef (stmt
);
3477 release_defs (stmt
);
3481 case IFN_GOACC_DIM_SIZE
:
3482 case IFN_GOACC_DIM_POS
:
3483 result
= fold_internal_goacc_dim (stmt
);
3485 case IFN_UBSAN_CHECK_ADD
:
3486 subcode
= PLUS_EXPR
;
3488 case IFN_UBSAN_CHECK_SUB
:
3489 subcode
= MINUS_EXPR
;
3491 case IFN_UBSAN_CHECK_MUL
:
3492 subcode
= MULT_EXPR
;
3494 case IFN_ADD_OVERFLOW
:
3495 subcode
= PLUS_EXPR
;
3498 case IFN_SUB_OVERFLOW
:
3499 subcode
= MINUS_EXPR
;
3502 case IFN_MUL_OVERFLOW
:
3503 subcode
= MULT_EXPR
;
3509 if (subcode
!= ERROR_MARK
)
3511 tree arg0
= gimple_call_arg (stmt
, 0);
3512 tree arg1
= gimple_call_arg (stmt
, 1);
3513 tree type
= TREE_TYPE (arg0
);
3516 tree lhs
= gimple_call_lhs (stmt
);
3517 if (lhs
== NULL_TREE
)
3520 type
= TREE_TYPE (TREE_TYPE (lhs
));
3522 if (type
== NULL_TREE
)
3524 /* x = y + 0; x = y - 0; x = y * 0; */
3525 else if (integer_zerop (arg1
))
3526 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3527 /* x = 0 + y; x = 0 * y; */
3528 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3529 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3531 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3532 result
= integer_zero_node
;
3533 /* x = y * 1; x = 1 * y; */
3534 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3536 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3538 else if (TREE_CODE (arg0
) == INTEGER_CST
3539 && TREE_CODE (arg1
) == INTEGER_CST
)
3542 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3543 fold_convert (type
, arg1
));
3545 result
= int_const_binop (subcode
, arg0
, arg1
);
3546 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3549 overflow
= build_one_cst (type
);
3556 if (result
== integer_zero_node
)
3557 result
= build_zero_cst (type
);
3558 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3560 if (TREE_CODE (result
) == INTEGER_CST
)
3562 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3564 overflow
= build_one_cst (type
);
3566 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3567 && TYPE_UNSIGNED (type
))
3568 || (TYPE_PRECISION (type
)
3569 < (TYPE_PRECISION (TREE_TYPE (result
))
3570 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3571 && !TYPE_UNSIGNED (type
)))))
3574 result
= fold_convert (type
, result
);
3581 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3582 result
= drop_tree_overflow (result
);
3585 if (overflow
== NULL_TREE
)
3586 overflow
= build_zero_cst (TREE_TYPE (result
));
3587 tree ctype
= build_complex_type (TREE_TYPE (result
));
3588 if (TREE_CODE (result
) == INTEGER_CST
3589 && TREE_CODE (overflow
) == INTEGER_CST
)
3590 result
= build_complex (ctype
, result
, overflow
);
3592 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3593 ctype
, result
, overflow
);
3595 if (!update_call_from_tree (gsi
, result
))
3596 gimplify_and_update_call_from_tree (gsi
, result
);
3605 /* Return true whether NAME has a use on STMT. */
3608 has_use_on_stmt (tree name
, gimple
*stmt
)
3610 imm_use_iterator iter
;
3611 use_operand_p use_p
;
3612 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
3613 if (USE_STMT (use_p
) == stmt
)
3618 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3621 Replaces *GSI with the simplification result in RCODE and OPS
3622 and the associated statements in *SEQ. Does the replacement
3623 according to INPLACE and returns true if the operation succeeded. */
3626 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3627 code_helper rcode
, tree
*ops
,
3628 gimple_seq
*seq
, bool inplace
)
3630 gimple
*stmt
= gsi_stmt (*gsi
);
3632 /* Play safe and do not allow abnormals to be mentioned in
3633 newly created statements. See also maybe_push_res_to_seq.
3634 As an exception allow such uses if there was a use of the
3635 same SSA name on the old stmt. */
3636 if ((TREE_CODE (ops
[0]) == SSA_NAME
3637 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
3638 && !has_use_on_stmt (ops
[0], stmt
))
3640 && TREE_CODE (ops
[1]) == SSA_NAME
3641 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
3642 && !has_use_on_stmt (ops
[1], stmt
))
3644 && TREE_CODE (ops
[2]) == SSA_NAME
3645 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
3646 && !has_use_on_stmt (ops
[2], stmt
))
3647 || (COMPARISON_CLASS_P (ops
[0])
3648 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
3649 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 0))
3650 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 0), stmt
))
3651 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
3652 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 1))
3653 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 1), stmt
)))))
3656 /* Don't insert new statements when INPLACE is true, even if we could
3657 reuse STMT for the final statement. */
3658 if (inplace
&& !gimple_seq_empty_p (*seq
))
3661 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3663 gcc_assert (rcode
.is_tree_code ());
3664 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3665 /* GIMPLE_CONDs condition may not throw. */
3666 && (!flag_exceptions
3667 || !cfun
->can_throw_non_call_exceptions
3668 || !operation_could_trap_p (rcode
,
3669 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3671 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3672 else if (rcode
== SSA_NAME
)
3673 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3674 build_zero_cst (TREE_TYPE (ops
[0])));
3675 else if (rcode
== INTEGER_CST
)
3677 if (integer_zerop (ops
[0]))
3678 gimple_cond_make_false (cond_stmt
);
3680 gimple_cond_make_true (cond_stmt
);
3684 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3688 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3689 build_zero_cst (TREE_TYPE (res
)));
3693 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3695 fprintf (dump_file
, "gimple_simplified to ");
3696 if (!gimple_seq_empty_p (*seq
))
3697 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3698 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3701 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3704 else if (is_gimple_assign (stmt
)
3705 && rcode
.is_tree_code ())
3708 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3710 maybe_build_generic_op (rcode
,
3711 TREE_TYPE (gimple_assign_lhs (stmt
)), ops
);
3712 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3713 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3715 fprintf (dump_file
, "gimple_simplified to ");
3716 if (!gimple_seq_empty_p (*seq
))
3717 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3718 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3721 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3725 else if (rcode
.is_fn_code ()
3726 && gimple_call_combined_fn (stmt
) == rcode
)
3729 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3731 gcc_assert (ops
[i
] != NULL_TREE
);
3732 gimple_call_set_arg (stmt
, i
, ops
[i
]);
3735 gcc_assert (ops
[i
] == NULL_TREE
);
3736 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3738 fprintf (dump_file
, "gimple_simplified to ");
3739 if (!gimple_seq_empty_p (*seq
))
3740 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3741 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
3743 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3748 if (gimple_has_lhs (stmt
))
3750 tree lhs
= gimple_get_lhs (stmt
);
3751 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3754 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3756 fprintf (dump_file
, "gimple_simplified to ");
3757 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3759 gsi_replace_with_seq_vops (gsi
, *seq
);
3769 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3772 maybe_canonicalize_mem_ref_addr (tree
*t
)
3776 if (TREE_CODE (*t
) == ADDR_EXPR
)
3777 t
= &TREE_OPERAND (*t
, 0);
3779 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3780 generic vector extension. The actual vector referenced is
3781 view-converted to an array type for this purpose. If the index
3782 is constant the canonical representation in the middle-end is a
3783 BIT_FIELD_REF so re-write the former to the latter here. */
3784 if (TREE_CODE (*t
) == ARRAY_REF
3785 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
3786 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
3787 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
3789 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
3790 if (VECTOR_TYPE_P (vtype
))
3792 tree low
= array_ref_low_bound (*t
);
3793 if (TREE_CODE (low
) == INTEGER_CST
)
3795 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
3797 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
3798 wi::to_widest (low
));
3799 idx
= wi::mul (idx
, wi::to_widest
3800 (TYPE_SIZE (TREE_TYPE (*t
))));
3802 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
3803 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
3805 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
3807 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
3808 TYPE_SIZE (TREE_TYPE (*t
)),
3809 wide_int_to_tree (sizetype
, idx
));
3817 while (handled_component_p (*t
))
3818 t
= &TREE_OPERAND (*t
, 0);
3820 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3821 of invariant addresses into a SSA name MEM_REF address. */
3822 if (TREE_CODE (*t
) == MEM_REF
3823 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3825 tree addr
= TREE_OPERAND (*t
, 0);
3826 if (TREE_CODE (addr
) == ADDR_EXPR
3827 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3828 || handled_component_p (TREE_OPERAND (addr
, 0))))
3831 HOST_WIDE_INT coffset
;
3832 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3837 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3838 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3839 TREE_OPERAND (*t
, 1),
3840 size_int (coffset
));
3843 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3844 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3847 /* Canonicalize back MEM_REFs to plain reference trees if the object
3848 accessed is a decl that has the same access semantics as the MEM_REF. */
3849 if (TREE_CODE (*t
) == MEM_REF
3850 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3851 && integer_zerop (TREE_OPERAND (*t
, 1))
3852 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3854 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3855 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3856 if (/* Same volatile qualification. */
3857 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3858 /* Same TBAA behavior with -fstrict-aliasing. */
3859 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3860 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3861 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3862 /* Same alignment. */
3863 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3864 /* We have to look out here to not drop a required conversion
3865 from the rhs to the lhs if *t appears on the lhs or vice-versa
3866 if it appears on the rhs. Thus require strict type
3868 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3870 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3875 /* Canonicalize TARGET_MEM_REF in particular with respect to
3876 the indexes becoming constant. */
3877 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3879 tree tem
= maybe_fold_tmr (*t
);
3890 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3891 distinguishes both cases. */
3894 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3896 bool changed
= false;
3897 gimple
*stmt
= gsi_stmt (*gsi
);
3898 bool nowarning
= gimple_no_warning_p (stmt
);
3900 fold_defer_overflow_warnings ();
3902 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3904 ??? This shouldn't be done in generic folding but in the
3905 propagation helpers which also know whether an address was
3907 Also canonicalize operand order. */
3908 switch (gimple_code (stmt
))
3911 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3913 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3914 if ((REFERENCE_CLASS_P (*rhs
)
3915 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3916 && maybe_canonicalize_mem_ref_addr (rhs
))
3918 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3919 if (REFERENCE_CLASS_P (*lhs
)
3920 && maybe_canonicalize_mem_ref_addr (lhs
))
3925 /* Canonicalize operand order. */
3926 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3927 if (TREE_CODE_CLASS (code
) == tcc_comparison
3928 || commutative_tree_code (code
)
3929 || commutative_ternary_tree_code (code
))
3931 tree rhs1
= gimple_assign_rhs1 (stmt
);
3932 tree rhs2
= gimple_assign_rhs2 (stmt
);
3933 if (tree_swap_operands_p (rhs1
, rhs2
, false))
3935 gimple_assign_set_rhs1 (stmt
, rhs2
);
3936 gimple_assign_set_rhs2 (stmt
, rhs1
);
3937 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3938 gimple_assign_set_rhs_code (stmt
,
3939 swap_tree_comparison (code
));
3947 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3949 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3950 if (REFERENCE_CLASS_P (*arg
)
3951 && maybe_canonicalize_mem_ref_addr (arg
))
3954 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3956 && REFERENCE_CLASS_P (*lhs
)
3957 && maybe_canonicalize_mem_ref_addr (lhs
))
3963 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3964 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3966 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3967 tree op
= TREE_VALUE (link
);
3968 if (REFERENCE_CLASS_P (op
)
3969 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3972 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3974 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3975 tree op
= TREE_VALUE (link
);
3976 if ((REFERENCE_CLASS_P (op
)
3977 || TREE_CODE (op
) == ADDR_EXPR
)
3978 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3984 if (gimple_debug_bind_p (stmt
))
3986 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3988 && (REFERENCE_CLASS_P (*val
)
3989 || TREE_CODE (*val
) == ADDR_EXPR
)
3990 && maybe_canonicalize_mem_ref_addr (val
))
3996 /* Canonicalize operand order. */
3997 tree lhs
= gimple_cond_lhs (stmt
);
3998 tree rhs
= gimple_cond_rhs (stmt
);
3999 if (tree_swap_operands_p (lhs
, rhs
, false))
4001 gcond
*gc
= as_a
<gcond
*> (stmt
);
4002 gimple_cond_set_lhs (gc
, rhs
);
4003 gimple_cond_set_rhs (gc
, lhs
);
4004 gimple_cond_set_code (gc
,
4005 swap_tree_comparison (gimple_cond_code (gc
)));
4012 /* Dispatch to pattern-based folding. */
4014 || is_gimple_assign (stmt
)
4015 || gimple_code (stmt
) == GIMPLE_COND
)
4017 gimple_seq seq
= NULL
;
4020 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
4021 valueize
, valueize
))
4023 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
4026 gimple_seq_discard (seq
);
4030 stmt
= gsi_stmt (*gsi
);
4032 /* Fold the main computation performed by the statement. */
4033 switch (gimple_code (stmt
))
4037 /* Try to canonicalize for boolean-typed X the comparisons
4038 X == 0, X == 1, X != 0, and X != 1. */
4039 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
4040 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
4042 tree lhs
= gimple_assign_lhs (stmt
);
4043 tree op1
= gimple_assign_rhs1 (stmt
);
4044 tree op2
= gimple_assign_rhs2 (stmt
);
4045 tree type
= TREE_TYPE (op1
);
4047 /* Check whether the comparison operands are of the same boolean
4048 type as the result type is.
4049 Check that second operand is an integer-constant with value
4051 if (TREE_CODE (op2
) == INTEGER_CST
4052 && (integer_zerop (op2
) || integer_onep (op2
))
4053 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
4055 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
4056 bool is_logical_not
= false;
4058 /* X == 0 and X != 1 is a logical-not.of X
4059 X == 1 and X != 0 is X */
4060 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
4061 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
4062 is_logical_not
= true;
4064 if (is_logical_not
== false)
4065 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
4066 /* Only for one-bit precision typed X the transformation
4067 !X -> ~X is valied. */
4068 else if (TYPE_PRECISION (type
) == 1)
4069 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
4070 /* Otherwise we use !X -> X ^ 1. */
4072 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
4073 build_int_cst (type
, 1));
4079 unsigned old_num_ops
= gimple_num_ops (stmt
);
4080 tree lhs
= gimple_assign_lhs (stmt
);
4081 tree new_rhs
= fold_gimple_assign (gsi
);
4083 && !useless_type_conversion_p (TREE_TYPE (lhs
),
4084 TREE_TYPE (new_rhs
)))
4085 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
4088 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
4090 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
4097 changed
|= gimple_fold_call (gsi
, inplace
);
4101 /* Fold *& in asm operands. */
4103 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4105 const char **oconstraints
;
4106 const char *constraint
;
4107 bool allows_mem
, allows_reg
;
4109 noutputs
= gimple_asm_noutputs (asm_stmt
);
4110 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4112 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4114 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4115 tree op
= TREE_VALUE (link
);
4117 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4118 if (REFERENCE_CLASS_P (op
)
4119 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
4121 TREE_VALUE (link
) = op
;
4125 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4127 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4128 tree op
= TREE_VALUE (link
);
4130 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4131 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
4132 oconstraints
, &allows_mem
, &allows_reg
);
4133 if (REFERENCE_CLASS_P (op
)
4134 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
4137 TREE_VALUE (link
) = op
;
4145 if (gimple_debug_bind_p (stmt
))
4147 tree val
= gimple_debug_bind_get_value (stmt
);
4149 && REFERENCE_CLASS_P (val
))
4151 tree tem
= maybe_fold_reference (val
, false);
4154 gimple_debug_bind_set_value (stmt
, tem
);
4159 && TREE_CODE (val
) == ADDR_EXPR
)
4161 tree ref
= TREE_OPERAND (val
, 0);
4162 tree tem
= maybe_fold_reference (ref
, false);
4165 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
4166 gimple_debug_bind_set_value (stmt
, tem
);
4176 stmt
= gsi_stmt (*gsi
);
4178 /* Fold *& on the lhs. */
4179 if (gimple_has_lhs (stmt
))
4181 tree lhs
= gimple_get_lhs (stmt
);
4182 if (lhs
&& REFERENCE_CLASS_P (lhs
))
4184 tree new_lhs
= maybe_fold_reference (lhs
, true);
4187 gimple_set_lhs (stmt
, new_lhs
);
4193 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
4197 /* Valueziation callback that ends up not following SSA edges. */
4200 no_follow_ssa_edges (tree
)
4205 /* Valueization callback that ends up following single-use SSA edges only. */
4208 follow_single_use_edges (tree val
)
4210 if (TREE_CODE (val
) == SSA_NAME
4211 && !has_single_use (val
))
4216 /* Fold the statement pointed to by GSI. In some cases, this function may
4217 replace the whole statement with a new one. Returns true iff folding
4219 The statement pointed to by GSI should be in valid gimple form but may
4220 be in unfolded state as resulting from for example constant propagation
4221 which can produce *&x = 0. */
4224 fold_stmt (gimple_stmt_iterator
*gsi
)
4226 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
4230 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
4232 return fold_stmt_1 (gsi
, false, valueize
);
4235 /* Perform the minimal folding on statement *GSI. Only operations like
4236 *&x created by constant propagation are handled. The statement cannot
4237 be replaced with a new one. Return true if the statement was
4238 changed, false otherwise.
4239 The statement *GSI should be in valid gimple form but may
4240 be in unfolded state as resulting from for example constant propagation
4241 which can produce *&x = 0. */
4244 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
4246 gimple
*stmt
= gsi_stmt (*gsi
);
4247 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
4248 gcc_assert (gsi_stmt (*gsi
) == stmt
);
4252 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4253 if EXPR is null or we don't know how.
4254 If non-null, the result always has boolean type. */
4257 canonicalize_bool (tree expr
, bool invert
)
4263 if (integer_nonzerop (expr
))
4264 return boolean_false_node
;
4265 else if (integer_zerop (expr
))
4266 return boolean_true_node
;
4267 else if (TREE_CODE (expr
) == SSA_NAME
)
4268 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
4269 build_int_cst (TREE_TYPE (expr
), 0));
4270 else if (COMPARISON_CLASS_P (expr
))
4271 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
4273 TREE_OPERAND (expr
, 0),
4274 TREE_OPERAND (expr
, 1));
4280 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4282 if (integer_nonzerop (expr
))
4283 return boolean_true_node
;
4284 else if (integer_zerop (expr
))
4285 return boolean_false_node
;
4286 else if (TREE_CODE (expr
) == SSA_NAME
)
4287 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
4288 build_int_cst (TREE_TYPE (expr
), 0));
4289 else if (COMPARISON_CLASS_P (expr
))
4290 return fold_build2 (TREE_CODE (expr
),
4292 TREE_OPERAND (expr
, 0),
4293 TREE_OPERAND (expr
, 1));
4299 /* Check to see if a boolean expression EXPR is logically equivalent to the
4300 comparison (OP1 CODE OP2). Check for various identities involving
4304 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
4305 const_tree op1
, const_tree op2
)
4309 /* The obvious case. */
4310 if (TREE_CODE (expr
) == code
4311 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
4312 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
4315 /* Check for comparing (name, name != 0) and the case where expr
4316 is an SSA_NAME with a definition matching the comparison. */
4317 if (TREE_CODE (expr
) == SSA_NAME
4318 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4320 if (operand_equal_p (expr
, op1
, 0))
4321 return ((code
== NE_EXPR
&& integer_zerop (op2
))
4322 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
4323 s
= SSA_NAME_DEF_STMT (expr
);
4324 if (is_gimple_assign (s
)
4325 && gimple_assign_rhs_code (s
) == code
4326 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
4327 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
4331 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4332 of name is a comparison, recurse. */
4333 if (TREE_CODE (op1
) == SSA_NAME
4334 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
4336 s
= SSA_NAME_DEF_STMT (op1
);
4337 if (is_gimple_assign (s
)
4338 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
4340 enum tree_code c
= gimple_assign_rhs_code (s
);
4341 if ((c
== NE_EXPR
&& integer_zerop (op2
))
4342 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
4343 return same_bool_comparison_p (expr
, c
,
4344 gimple_assign_rhs1 (s
),
4345 gimple_assign_rhs2 (s
));
4346 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
4347 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
4348 return same_bool_comparison_p (expr
,
4349 invert_tree_comparison (c
, false),
4350 gimple_assign_rhs1 (s
),
4351 gimple_assign_rhs2 (s
));
4357 /* Check to see if two boolean expressions OP1 and OP2 are logically
4361 same_bool_result_p (const_tree op1
, const_tree op2
)
4363 /* Simple cases first. */
4364 if (operand_equal_p (op1
, op2
, 0))
4367 /* Check the cases where at least one of the operands is a comparison.
4368 These are a bit smarter than operand_equal_p in that they apply some
4369 identifies on SSA_NAMEs. */
4370 if (COMPARISON_CLASS_P (op2
)
4371 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
4372 TREE_OPERAND (op2
, 0),
4373 TREE_OPERAND (op2
, 1)))
4375 if (COMPARISON_CLASS_P (op1
)
4376 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
4377 TREE_OPERAND (op1
, 0),
4378 TREE_OPERAND (op1
, 1)))
4385 /* Forward declarations for some mutually recursive functions. */
4388 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4389 enum tree_code code2
, tree op2a
, tree op2b
);
4391 and_var_with_comparison (tree var
, bool invert
,
4392 enum tree_code code2
, tree op2a
, tree op2b
);
4394 and_var_with_comparison_1 (gimple
*stmt
,
4395 enum tree_code code2
, tree op2a
, tree op2b
);
4397 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4398 enum tree_code code2
, tree op2a
, tree op2b
);
4400 or_var_with_comparison (tree var
, bool invert
,
4401 enum tree_code code2
, tree op2a
, tree op2b
);
4403 or_var_with_comparison_1 (gimple
*stmt
,
4404 enum tree_code code2
, tree op2a
, tree op2b
);
4406 /* Helper function for and_comparisons_1: try to simplify the AND of the
4407 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4408 If INVERT is true, invert the value of the VAR before doing the AND.
4409 Return NULL_EXPR if we can't simplify this to a single expression. */
4412 and_var_with_comparison (tree var
, bool invert
,
4413 enum tree_code code2
, tree op2a
, tree op2b
)
4416 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4418 /* We can only deal with variables whose definitions are assignments. */
4419 if (!is_gimple_assign (stmt
))
4422 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4423 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4424 Then we only have to consider the simpler non-inverted cases. */
4426 t
= or_var_with_comparison_1 (stmt
,
4427 invert_tree_comparison (code2
, false),
4430 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4431 return canonicalize_bool (t
, invert
);
4434 /* Try to simplify the AND of the ssa variable defined by the assignment
4435 STMT with the comparison specified by (OP2A CODE2 OP2B).
4436 Return NULL_EXPR if we can't simplify this to a single expression. */
4439 and_var_with_comparison_1 (gimple
*stmt
,
4440 enum tree_code code2
, tree op2a
, tree op2b
)
4442 tree var
= gimple_assign_lhs (stmt
);
4443 tree true_test_var
= NULL_TREE
;
4444 tree false_test_var
= NULL_TREE
;
4445 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4447 /* Check for identities like (var AND (var == 0)) => false. */
4448 if (TREE_CODE (op2a
) == SSA_NAME
4449 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4451 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4452 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4454 true_test_var
= op2a
;
4455 if (var
== true_test_var
)
4458 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4459 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4461 false_test_var
= op2a
;
4462 if (var
== false_test_var
)
4463 return boolean_false_node
;
4467 /* If the definition is a comparison, recurse on it. */
4468 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4470 tree t
= and_comparisons_1 (innercode
,
4471 gimple_assign_rhs1 (stmt
),
4472 gimple_assign_rhs2 (stmt
),
4480 /* If the definition is an AND or OR expression, we may be able to
4481 simplify by reassociating. */
4482 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4483 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4485 tree inner1
= gimple_assign_rhs1 (stmt
);
4486 tree inner2
= gimple_assign_rhs2 (stmt
);
4489 tree partial
= NULL_TREE
;
4490 bool is_and
= (innercode
== BIT_AND_EXPR
);
4492 /* Check for boolean identities that don't require recursive examination
4494 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4495 inner1 AND (inner1 OR inner2) => inner1
4496 !inner1 AND (inner1 AND inner2) => false
4497 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4498 Likewise for similar cases involving inner2. */
4499 if (inner1
== true_test_var
)
4500 return (is_and
? var
: inner1
);
4501 else if (inner2
== true_test_var
)
4502 return (is_and
? var
: inner2
);
4503 else if (inner1
== false_test_var
)
4505 ? boolean_false_node
4506 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4507 else if (inner2
== false_test_var
)
4509 ? boolean_false_node
4510 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4512 /* Next, redistribute/reassociate the AND across the inner tests.
4513 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4514 if (TREE_CODE (inner1
) == SSA_NAME
4515 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4516 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4517 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4518 gimple_assign_rhs1 (s
),
4519 gimple_assign_rhs2 (s
),
4520 code2
, op2a
, op2b
)))
4522 /* Handle the AND case, where we are reassociating:
4523 (inner1 AND inner2) AND (op2a code2 op2b)
4525 If the partial result t is a constant, we win. Otherwise
4526 continue on to try reassociating with the other inner test. */
4529 if (integer_onep (t
))
4531 else if (integer_zerop (t
))
4532 return boolean_false_node
;
4535 /* Handle the OR case, where we are redistributing:
4536 (inner1 OR inner2) AND (op2a code2 op2b)
4537 => (t OR (inner2 AND (op2a code2 op2b))) */
4538 else if (integer_onep (t
))
4539 return boolean_true_node
;
4541 /* Save partial result for later. */
4545 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4546 if (TREE_CODE (inner2
) == SSA_NAME
4547 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4548 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4549 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4550 gimple_assign_rhs1 (s
),
4551 gimple_assign_rhs2 (s
),
4552 code2
, op2a
, op2b
)))
4554 /* Handle the AND case, where we are reassociating:
4555 (inner1 AND inner2) AND (op2a code2 op2b)
4556 => (inner1 AND t) */
4559 if (integer_onep (t
))
4561 else if (integer_zerop (t
))
4562 return boolean_false_node
;
4563 /* If both are the same, we can apply the identity
4565 else if (partial
&& same_bool_result_p (t
, partial
))
4569 /* Handle the OR case. where we are redistributing:
4570 (inner1 OR inner2) AND (op2a code2 op2b)
4571 => (t OR (inner1 AND (op2a code2 op2b)))
4572 => (t OR partial) */
4575 if (integer_onep (t
))
4576 return boolean_true_node
;
4579 /* We already got a simplification for the other
4580 operand to the redistributed OR expression. The
4581 interesting case is when at least one is false.
4582 Or, if both are the same, we can apply the identity
4584 if (integer_zerop (partial
))
4586 else if (integer_zerop (t
))
4588 else if (same_bool_result_p (t
, partial
))
4597 /* Try to simplify the AND of two comparisons defined by
4598 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4599 If this can be done without constructing an intermediate value,
4600 return the resulting tree; otherwise NULL_TREE is returned.
4601 This function is deliberately asymmetric as it recurses on SSA_DEFs
4602 in the first comparison but not the second. */
4605 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4606 enum tree_code code2
, tree op2a
, tree op2b
)
4608 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4610 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4611 if (operand_equal_p (op1a
, op2a
, 0)
4612 && operand_equal_p (op1b
, op2b
, 0))
4614 /* Result will be either NULL_TREE, or a combined comparison. */
4615 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4616 TRUTH_ANDIF_EXPR
, code1
, code2
,
4617 truth_type
, op1a
, op1b
);
4622 /* Likewise the swapped case of the above. */
4623 if (operand_equal_p (op1a
, op2b
, 0)
4624 && operand_equal_p (op1b
, op2a
, 0))
4626 /* Result will be either NULL_TREE, or a combined comparison. */
4627 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4628 TRUTH_ANDIF_EXPR
, code1
,
4629 swap_tree_comparison (code2
),
4630 truth_type
, op1a
, op1b
);
4635 /* If both comparisons are of the same value against constants, we might
4636 be able to merge them. */
4637 if (operand_equal_p (op1a
, op2a
, 0)
4638 && TREE_CODE (op1b
) == INTEGER_CST
4639 && TREE_CODE (op2b
) == INTEGER_CST
)
4641 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4643 /* If we have (op1a == op1b), we should either be able to
4644 return that or FALSE, depending on whether the constant op1b
4645 also satisfies the other comparison against op2b. */
4646 if (code1
== EQ_EXPR
)
4652 case EQ_EXPR
: val
= (cmp
== 0); break;
4653 case NE_EXPR
: val
= (cmp
!= 0); break;
4654 case LT_EXPR
: val
= (cmp
< 0); break;
4655 case GT_EXPR
: val
= (cmp
> 0); break;
4656 case LE_EXPR
: val
= (cmp
<= 0); break;
4657 case GE_EXPR
: val
= (cmp
>= 0); break;
4658 default: done
= false;
4663 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4665 return boolean_false_node
;
4668 /* Likewise if the second comparison is an == comparison. */
4669 else if (code2
== EQ_EXPR
)
4675 case EQ_EXPR
: val
= (cmp
== 0); break;
4676 case NE_EXPR
: val
= (cmp
!= 0); break;
4677 case LT_EXPR
: val
= (cmp
> 0); break;
4678 case GT_EXPR
: val
= (cmp
< 0); break;
4679 case LE_EXPR
: val
= (cmp
>= 0); break;
4680 case GE_EXPR
: val
= (cmp
<= 0); break;
4681 default: done
= false;
4686 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4688 return boolean_false_node
;
4692 /* Same business with inequality tests. */
4693 else if (code1
== NE_EXPR
)
4698 case EQ_EXPR
: val
= (cmp
!= 0); break;
4699 case NE_EXPR
: val
= (cmp
== 0); break;
4700 case LT_EXPR
: val
= (cmp
>= 0); break;
4701 case GT_EXPR
: val
= (cmp
<= 0); break;
4702 case LE_EXPR
: val
= (cmp
> 0); break;
4703 case GE_EXPR
: val
= (cmp
< 0); break;
4708 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4710 else if (code2
== NE_EXPR
)
4715 case EQ_EXPR
: val
= (cmp
== 0); break;
4716 case NE_EXPR
: val
= (cmp
!= 0); break;
4717 case LT_EXPR
: val
= (cmp
<= 0); break;
4718 case GT_EXPR
: val
= (cmp
>= 0); break;
4719 case LE_EXPR
: val
= (cmp
< 0); break;
4720 case GE_EXPR
: val
= (cmp
> 0); break;
4725 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4728 /* Chose the more restrictive of two < or <= comparisons. */
4729 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4730 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4732 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4733 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4735 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4738 /* Likewise chose the more restrictive of two > or >= comparisons. */
4739 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4740 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4742 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4743 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4745 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4748 /* Check for singleton ranges. */
4750 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4751 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4752 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4754 /* Check for disjoint ranges. */
4756 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4757 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4758 return boolean_false_node
;
4760 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4761 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4762 return boolean_false_node
;
4765 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4766 NAME's definition is a truth value. See if there are any simplifications
4767 that can be done against the NAME's definition. */
4768 if (TREE_CODE (op1a
) == SSA_NAME
4769 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4770 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4772 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4773 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4774 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4775 switch (gimple_code (stmt
))
4778 /* Try to simplify by copy-propagating the definition. */
4779 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4782 /* If every argument to the PHI produces the same result when
4783 ANDed with the second comparison, we win.
4784 Do not do this unless the type is bool since we need a bool
4785 result here anyway. */
4786 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4788 tree result
= NULL_TREE
;
4790 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4792 tree arg
= gimple_phi_arg_def (stmt
, i
);
4794 /* If this PHI has itself as an argument, ignore it.
4795 If all the other args produce the same result,
4797 if (arg
== gimple_phi_result (stmt
))
4799 else if (TREE_CODE (arg
) == INTEGER_CST
)
4801 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4804 result
= boolean_false_node
;
4805 else if (!integer_zerop (result
))
4809 result
= fold_build2 (code2
, boolean_type_node
,
4811 else if (!same_bool_comparison_p (result
,
4815 else if (TREE_CODE (arg
) == SSA_NAME
4816 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4819 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4820 /* In simple cases we can look through PHI nodes,
4821 but we have to be careful with loops.
4823 if (! dom_info_available_p (CDI_DOMINATORS
)
4824 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4825 || dominated_by_p (CDI_DOMINATORS
,
4826 gimple_bb (def_stmt
),
4829 temp
= and_var_with_comparison (arg
, invert
, code2
,
4835 else if (!same_bool_result_p (result
, temp
))
4851 /* Try to simplify the AND of two comparisons, specified by
4852 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4853 If this can be simplified to a single expression (without requiring
4854 introducing more SSA variables to hold intermediate values),
4855 return the resulting tree. Otherwise return NULL_TREE.
4856 If the result expression is non-null, it has boolean type. */
4859 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4860 enum tree_code code2
, tree op2a
, tree op2b
)
4862 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4866 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4869 /* Helper function for or_comparisons_1: try to simplify the OR of the
4870 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4871 If INVERT is true, invert the value of VAR before doing the OR.
4872 Return NULL_EXPR if we can't simplify this to a single expression. */
4875 or_var_with_comparison (tree var
, bool invert
,
4876 enum tree_code code2
, tree op2a
, tree op2b
)
4879 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4881 /* We can only deal with variables whose definitions are assignments. */
4882 if (!is_gimple_assign (stmt
))
4885 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4886 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4887 Then we only have to consider the simpler non-inverted cases. */
4889 t
= and_var_with_comparison_1 (stmt
,
4890 invert_tree_comparison (code2
, false),
4893 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4894 return canonicalize_bool (t
, invert
);
4897 /* Try to simplify the OR of the ssa variable defined by the assignment
4898 STMT with the comparison specified by (OP2A CODE2 OP2B).
4899 Return NULL_EXPR if we can't simplify this to a single expression. */
4902 or_var_with_comparison_1 (gimple
*stmt
,
4903 enum tree_code code2
, tree op2a
, tree op2b
)
4905 tree var
= gimple_assign_lhs (stmt
);
4906 tree true_test_var
= NULL_TREE
;
4907 tree false_test_var
= NULL_TREE
;
4908 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4910 /* Check for identities like (var OR (var != 0)) => true . */
4911 if (TREE_CODE (op2a
) == SSA_NAME
4912 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4914 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4915 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4917 true_test_var
= op2a
;
4918 if (var
== true_test_var
)
4921 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4922 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4924 false_test_var
= op2a
;
4925 if (var
== false_test_var
)
4926 return boolean_true_node
;
4930 /* If the definition is a comparison, recurse on it. */
4931 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4933 tree t
= or_comparisons_1 (innercode
,
4934 gimple_assign_rhs1 (stmt
),
4935 gimple_assign_rhs2 (stmt
),
4943 /* If the definition is an AND or OR expression, we may be able to
4944 simplify by reassociating. */
4945 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4946 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4948 tree inner1
= gimple_assign_rhs1 (stmt
);
4949 tree inner2
= gimple_assign_rhs2 (stmt
);
4952 tree partial
= NULL_TREE
;
4953 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4955 /* Check for boolean identities that don't require recursive examination
4957 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4958 inner1 OR (inner1 AND inner2) => inner1
4959 !inner1 OR (inner1 OR inner2) => true
4960 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4962 if (inner1
== true_test_var
)
4963 return (is_or
? var
: inner1
);
4964 else if (inner2
== true_test_var
)
4965 return (is_or
? var
: inner2
);
4966 else if (inner1
== false_test_var
)
4969 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4970 else if (inner2
== false_test_var
)
4973 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4975 /* Next, redistribute/reassociate the OR across the inner tests.
4976 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4977 if (TREE_CODE (inner1
) == SSA_NAME
4978 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4979 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4980 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4981 gimple_assign_rhs1 (s
),
4982 gimple_assign_rhs2 (s
),
4983 code2
, op2a
, op2b
)))
4985 /* Handle the OR case, where we are reassociating:
4986 (inner1 OR inner2) OR (op2a code2 op2b)
4988 If the partial result t is a constant, we win. Otherwise
4989 continue on to try reassociating with the other inner test. */
4992 if (integer_onep (t
))
4993 return boolean_true_node
;
4994 else if (integer_zerop (t
))
4998 /* Handle the AND case, where we are redistributing:
4999 (inner1 AND inner2) OR (op2a code2 op2b)
5000 => (t AND (inner2 OR (op2a code op2b))) */
5001 else if (integer_zerop (t
))
5002 return boolean_false_node
;
5004 /* Save partial result for later. */
5008 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5009 if (TREE_CODE (inner2
) == SSA_NAME
5010 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5011 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5012 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5013 gimple_assign_rhs1 (s
),
5014 gimple_assign_rhs2 (s
),
5015 code2
, op2a
, op2b
)))
5017 /* Handle the OR case, where we are reassociating:
5018 (inner1 OR inner2) OR (op2a code2 op2b)
5020 => (t OR partial) */
5023 if (integer_zerop (t
))
5025 else if (integer_onep (t
))
5026 return boolean_true_node
;
5027 /* If both are the same, we can apply the identity
5029 else if (partial
&& same_bool_result_p (t
, partial
))
5033 /* Handle the AND case, where we are redistributing:
5034 (inner1 AND inner2) OR (op2a code2 op2b)
5035 => (t AND (inner1 OR (op2a code2 op2b)))
5036 => (t AND partial) */
5039 if (integer_zerop (t
))
5040 return boolean_false_node
;
5043 /* We already got a simplification for the other
5044 operand to the redistributed AND expression. The
5045 interesting case is when at least one is true.
5046 Or, if both are the same, we can apply the identity
5048 if (integer_onep (partial
))
5050 else if (integer_onep (t
))
5052 else if (same_bool_result_p (t
, partial
))
5061 /* Try to simplify the OR of two comparisons defined by
5062 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5063 If this can be done without constructing an intermediate value,
5064 return the resulting tree; otherwise NULL_TREE is returned.
5065 This function is deliberately asymmetric as it recurses on SSA_DEFs
5066 in the first comparison but not the second. */
5069 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5070 enum tree_code code2
, tree op2a
, tree op2b
)
5072 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5074 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5075 if (operand_equal_p (op1a
, op2a
, 0)
5076 && operand_equal_p (op1b
, op2b
, 0))
5078 /* Result will be either NULL_TREE, or a combined comparison. */
5079 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5080 TRUTH_ORIF_EXPR
, code1
, code2
,
5081 truth_type
, op1a
, op1b
);
5086 /* Likewise the swapped case of the above. */
5087 if (operand_equal_p (op1a
, op2b
, 0)
5088 && operand_equal_p (op1b
, op2a
, 0))
5090 /* Result will be either NULL_TREE, or a combined comparison. */
5091 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5092 TRUTH_ORIF_EXPR
, code1
,
5093 swap_tree_comparison (code2
),
5094 truth_type
, op1a
, op1b
);
5099 /* If both comparisons are of the same value against constants, we might
5100 be able to merge them. */
5101 if (operand_equal_p (op1a
, op2a
, 0)
5102 && TREE_CODE (op1b
) == INTEGER_CST
5103 && TREE_CODE (op2b
) == INTEGER_CST
)
5105 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5107 /* If we have (op1a != op1b), we should either be able to
5108 return that or TRUE, depending on whether the constant op1b
5109 also satisfies the other comparison against op2b. */
5110 if (code1
== NE_EXPR
)
5116 case EQ_EXPR
: val
= (cmp
== 0); break;
5117 case NE_EXPR
: val
= (cmp
!= 0); break;
5118 case LT_EXPR
: val
= (cmp
< 0); break;
5119 case GT_EXPR
: val
= (cmp
> 0); break;
5120 case LE_EXPR
: val
= (cmp
<= 0); break;
5121 case GE_EXPR
: val
= (cmp
>= 0); break;
5122 default: done
= false;
5127 return boolean_true_node
;
5129 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5132 /* Likewise if the second comparison is a != comparison. */
5133 else if (code2
== NE_EXPR
)
5139 case EQ_EXPR
: val
= (cmp
== 0); break;
5140 case NE_EXPR
: val
= (cmp
!= 0); break;
5141 case LT_EXPR
: val
= (cmp
> 0); break;
5142 case GT_EXPR
: val
= (cmp
< 0); break;
5143 case LE_EXPR
: val
= (cmp
>= 0); break;
5144 case GE_EXPR
: val
= (cmp
<= 0); break;
5145 default: done
= false;
5150 return boolean_true_node
;
5152 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5156 /* See if an equality test is redundant with the other comparison. */
5157 else if (code1
== EQ_EXPR
)
5162 case EQ_EXPR
: val
= (cmp
== 0); break;
5163 case NE_EXPR
: val
= (cmp
!= 0); break;
5164 case LT_EXPR
: val
= (cmp
< 0); break;
5165 case GT_EXPR
: val
= (cmp
> 0); break;
5166 case LE_EXPR
: val
= (cmp
<= 0); break;
5167 case GE_EXPR
: val
= (cmp
>= 0); break;
5172 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5174 else if (code2
== EQ_EXPR
)
5179 case EQ_EXPR
: val
= (cmp
== 0); break;
5180 case NE_EXPR
: val
= (cmp
!= 0); break;
5181 case LT_EXPR
: val
= (cmp
> 0); break;
5182 case GT_EXPR
: val
= (cmp
< 0); break;
5183 case LE_EXPR
: val
= (cmp
>= 0); break;
5184 case GE_EXPR
: val
= (cmp
<= 0); break;
5189 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5192 /* Chose the less restrictive of two < or <= comparisons. */
5193 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5194 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5196 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5197 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5199 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5202 /* Likewise chose the less restrictive of two > or >= comparisons. */
5203 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5204 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5206 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5207 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5209 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5212 /* Check for singleton ranges. */
5214 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
5215 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
5216 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
5218 /* Check for less/greater pairs that don't restrict the range at all. */
5220 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5221 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5222 return boolean_true_node
;
5224 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5225 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5226 return boolean_true_node
;
5229 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5230 NAME's definition is a truth value. See if there are any simplifications
5231 that can be done against the NAME's definition. */
5232 if (TREE_CODE (op1a
) == SSA_NAME
5233 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5234 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5236 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5237 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5238 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5239 switch (gimple_code (stmt
))
5242 /* Try to simplify by copy-propagating the definition. */
5243 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5246 /* If every argument to the PHI produces the same result when
5247 ORed with the second comparison, we win.
5248 Do not do this unless the type is bool since we need a bool
5249 result here anyway. */
5250 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5252 tree result
= NULL_TREE
;
5254 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5256 tree arg
= gimple_phi_arg_def (stmt
, i
);
5258 /* If this PHI has itself as an argument, ignore it.
5259 If all the other args produce the same result,
5261 if (arg
== gimple_phi_result (stmt
))
5263 else if (TREE_CODE (arg
) == INTEGER_CST
)
5265 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
5268 result
= boolean_true_node
;
5269 else if (!integer_onep (result
))
5273 result
= fold_build2 (code2
, boolean_type_node
,
5275 else if (!same_bool_comparison_p (result
,
5279 else if (TREE_CODE (arg
) == SSA_NAME
5280 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5283 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5284 /* In simple cases we can look through PHI nodes,
5285 but we have to be careful with loops.
5287 if (! dom_info_available_p (CDI_DOMINATORS
)
5288 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5289 || dominated_by_p (CDI_DOMINATORS
,
5290 gimple_bb (def_stmt
),
5293 temp
= or_var_with_comparison (arg
, invert
, code2
,
5299 else if (!same_bool_result_p (result
, temp
))
5315 /* Try to simplify the OR of two comparisons, specified by
5316 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5317 If this can be simplified to a single expression (without requiring
5318 introducing more SSA variables to hold intermediate values),
5319 return the resulting tree. Otherwise return NULL_TREE.
5320 If the result expression is non-null, it has boolean type. */
5323 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5324 enum tree_code code2
, tree op2a
, tree op2b
)
5326 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5330 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5334 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5336 Either NULL_TREE, a simplified but non-constant or a constant
5339 ??? This should go into a gimple-fold-inline.h file to be eventually
5340 privatized with the single valueize function used in the various TUs
5341 to avoid the indirect function call overhead. */
5344 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
5345 tree (*gvalueize
) (tree
))
5349 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5350 edges if there are intermediate VARYING defs. For this reason
5351 do not follow SSA edges here even though SCCVN can technically
5352 just deal fine with that. */
5353 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
5355 tree res
= NULL_TREE
;
5356 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
5358 else if (mprts_hook
)
5359 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
5362 if (dump_file
&& dump_flags
& TDF_DETAILS
)
5364 fprintf (dump_file
, "Match-and-simplified ");
5365 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
5366 fprintf (dump_file
, " to ");
5367 print_generic_expr (dump_file
, res
, 0);
5368 fprintf (dump_file
, "\n");
5374 location_t loc
= gimple_location (stmt
);
5375 switch (gimple_code (stmt
))
5379 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
5381 switch (get_gimple_rhs_class (subcode
))
5383 case GIMPLE_SINGLE_RHS
:
5385 tree rhs
= gimple_assign_rhs1 (stmt
);
5386 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
5388 if (TREE_CODE (rhs
) == SSA_NAME
)
5390 /* If the RHS is an SSA_NAME, return its known constant value,
5392 return (*valueize
) (rhs
);
5394 /* Handle propagating invariant addresses into address
5396 else if (TREE_CODE (rhs
) == ADDR_EXPR
5397 && !is_gimple_min_invariant (rhs
))
5399 HOST_WIDE_INT offset
= 0;
5401 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
5405 && (CONSTANT_CLASS_P (base
)
5406 || decl_address_invariant_p (base
)))
5407 return build_invariant_address (TREE_TYPE (rhs
),
5410 else if (TREE_CODE (rhs
) == CONSTRUCTOR
5411 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
5412 && (CONSTRUCTOR_NELTS (rhs
)
5413 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
5418 vec
= XALLOCAVEC (tree
,
5419 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
5420 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
5422 val
= (*valueize
) (val
);
5423 if (TREE_CODE (val
) == INTEGER_CST
5424 || TREE_CODE (val
) == REAL_CST
5425 || TREE_CODE (val
) == FIXED_CST
)
5431 return build_vector (TREE_TYPE (rhs
), vec
);
5433 if (subcode
== OBJ_TYPE_REF
)
5435 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5436 /* If callee is constant, we can fold away the wrapper. */
5437 if (is_gimple_min_invariant (val
))
5441 if (kind
== tcc_reference
)
5443 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5444 || TREE_CODE (rhs
) == REALPART_EXPR
5445 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5446 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5448 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5449 return fold_unary_loc (EXPR_LOCATION (rhs
),
5451 TREE_TYPE (rhs
), val
);
5453 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5454 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5456 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5457 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5459 TREE_TYPE (rhs
), val
,
5460 TREE_OPERAND (rhs
, 1),
5461 TREE_OPERAND (rhs
, 2));
5463 else if (TREE_CODE (rhs
) == MEM_REF
5464 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5466 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5467 if (TREE_CODE (val
) == ADDR_EXPR
5468 && is_gimple_min_invariant (val
))
5470 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5472 TREE_OPERAND (rhs
, 1));
5477 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5479 else if (kind
== tcc_declaration
)
5480 return get_symbol_constant_value (rhs
);
5484 case GIMPLE_UNARY_RHS
:
5487 case GIMPLE_BINARY_RHS
:
5488 /* Translate &x + CST into an invariant form suitable for
5489 further propagation. */
5490 if (subcode
== POINTER_PLUS_EXPR
)
5492 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5493 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5494 if (TREE_CODE (op0
) == ADDR_EXPR
5495 && TREE_CODE (op1
) == INTEGER_CST
)
5497 tree off
= fold_convert (ptr_type_node
, op1
);
5498 return build_fold_addr_expr_loc
5500 fold_build2 (MEM_REF
,
5501 TREE_TYPE (TREE_TYPE (op0
)),
5502 unshare_expr (op0
), off
));
5505 /* Canonicalize bool != 0 and bool == 0 appearing after
5506 valueization. While gimple_simplify handles this
5507 it can get confused by the ~X == 1 -> X == 0 transform
5508 which we cant reduce to a SSA name or a constant
5509 (and we have no way to tell gimple_simplify to not
5510 consider those transforms in the first place). */
5511 else if (subcode
== EQ_EXPR
5512 || subcode
== NE_EXPR
)
5514 tree lhs
= gimple_assign_lhs (stmt
);
5515 tree op0
= gimple_assign_rhs1 (stmt
);
5516 if (useless_type_conversion_p (TREE_TYPE (lhs
),
5519 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5520 op0
= (*valueize
) (op0
);
5521 if (TREE_CODE (op0
) == INTEGER_CST
)
5522 std::swap (op0
, op1
);
5523 if (TREE_CODE (op1
) == INTEGER_CST
5524 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
5525 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
5531 case GIMPLE_TERNARY_RHS
:
5533 /* Handle ternary operators that can appear in GIMPLE form. */
5534 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5535 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5536 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5537 return fold_ternary_loc (loc
, subcode
,
5538 gimple_expr_type (stmt
), op0
, op1
, op2
);
5549 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5551 if (gimple_call_internal_p (stmt
))
5553 enum tree_code subcode
= ERROR_MARK
;
5554 switch (gimple_call_internal_fn (stmt
))
5556 case IFN_UBSAN_CHECK_ADD
:
5557 subcode
= PLUS_EXPR
;
5559 case IFN_UBSAN_CHECK_SUB
:
5560 subcode
= MINUS_EXPR
;
5562 case IFN_UBSAN_CHECK_MUL
:
5563 subcode
= MULT_EXPR
;
5565 case IFN_BUILTIN_EXPECT
:
5567 tree arg0
= gimple_call_arg (stmt
, 0);
5568 tree op0
= (*valueize
) (arg0
);
5569 if (TREE_CODE (op0
) == INTEGER_CST
)
5576 tree arg0
= gimple_call_arg (stmt
, 0);
5577 tree arg1
= gimple_call_arg (stmt
, 1);
5578 tree op0
= (*valueize
) (arg0
);
5579 tree op1
= (*valueize
) (arg1
);
5581 if (TREE_CODE (op0
) != INTEGER_CST
5582 || TREE_CODE (op1
) != INTEGER_CST
)
5587 /* x * 0 = 0 * x = 0 without overflow. */
5588 if (integer_zerop (op0
) || integer_zerop (op1
))
5589 return build_zero_cst (TREE_TYPE (arg0
));
5592 /* y - y = 0 without overflow. */
5593 if (operand_equal_p (op0
, op1
, 0))
5594 return build_zero_cst (TREE_TYPE (arg0
));
5601 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5603 && TREE_CODE (res
) == INTEGER_CST
5604 && !TREE_OVERFLOW (res
))
5609 fn
= (*valueize
) (gimple_call_fn (stmt
));
5610 if (TREE_CODE (fn
) == ADDR_EXPR
5611 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5612 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5613 && gimple_builtin_call_types_compatible_p (stmt
,
5614 TREE_OPERAND (fn
, 0)))
5616 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5619 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5620 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5621 retval
= fold_builtin_call_array (loc
,
5622 gimple_call_return_type (call_stmt
),
5623 fn
, gimple_call_num_args (stmt
), args
);
5626 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5627 STRIP_NOPS (retval
);
5628 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5641 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5642 Returns NULL_TREE if folding to a constant is not possible, otherwise
5643 returns a constant according to is_gimple_min_invariant. */
5646 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
5648 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5649 if (res
&& is_gimple_min_invariant (res
))
5655 /* The following set of functions are supposed to fold references using
5656 their constant initializers. */
5658 /* See if we can find constructor defining value of BASE.
5659 When we know the consructor with constant offset (such as
5660 base is array[40] and we do know constructor of array), then
5661 BIT_OFFSET is adjusted accordingly.
5663 As a special case, return error_mark_node when constructor
5664 is not explicitly available, but it is known to be zero
5665 such as 'static const int a;'. */
5667 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5668 tree (*valueize
)(tree
))
5670 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5673 if (TREE_CODE (base
) == MEM_REF
)
5675 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5677 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5679 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5684 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5685 base
= valueize (TREE_OPERAND (base
, 0));
5686 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5688 base
= TREE_OPERAND (base
, 0);
5691 && TREE_CODE (base
) == SSA_NAME
)
5692 base
= valueize (base
);
5694 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5695 DECL_INITIAL. If BASE is a nested reference into another
5696 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5697 the inner reference. */
5698 switch (TREE_CODE (base
))
5703 tree init
= ctor_for_folding (base
);
5705 /* Our semantic is exact opposite of ctor_for_folding;
5706 NULL means unknown, while error_mark_node is 0. */
5707 if (init
== error_mark_node
)
5710 return error_mark_node
;
5714 case VIEW_CONVERT_EXPR
:
5715 return get_base_constructor (TREE_OPERAND (base
, 0),
5716 bit_offset
, valueize
);
5720 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
5722 if (max_size
== -1 || size
!= max_size
)
5724 *bit_offset
+= bit_offset2
;
5725 return get_base_constructor (base
, bit_offset
, valueize
);
5731 if (CONSTANT_CLASS_P (base
))
5738 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5739 SIZE to the memory at bit OFFSET. */
5742 fold_array_ctor_reference (tree type
, tree ctor
,
5743 unsigned HOST_WIDE_INT offset
,
5744 unsigned HOST_WIDE_INT size
,
5747 offset_int low_bound
;
5748 offset_int elt_size
;
5749 offset_int access_index
;
5750 tree domain_type
= NULL_TREE
;
5751 HOST_WIDE_INT inner_offset
;
5753 /* Compute low bound and elt size. */
5754 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5755 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5756 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5758 /* Static constructors for variably sized objects makes no sense. */
5759 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
5761 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5765 /* Static constructors for variably sized objects makes no sense. */
5766 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
5768 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5770 /* We can handle only constantly sized accesses that are known to not
5771 be larger than size of array element. */
5772 if (!TYPE_SIZE_UNIT (type
)
5773 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5774 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
5778 /* Compute the array index we look for. */
5779 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5781 access_index
+= low_bound
;
5783 /* And offset within the access. */
5784 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5786 /* See if the array field is large enough to span whole access. We do not
5787 care to fold accesses spanning multiple array indexes. */
5788 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5790 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
5791 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
5793 /* When memory is not explicitely mentioned in constructor,
5794 it is 0 (or out of range). */
5795 return build_zero_cst (type
);
5798 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5799 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5802 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5803 unsigned HOST_WIDE_INT offset
,
5804 unsigned HOST_WIDE_INT size
,
5807 unsigned HOST_WIDE_INT cnt
;
5810 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5813 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5814 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5815 tree field_size
= DECL_SIZE (cfield
);
5816 offset_int bitoffset
;
5817 offset_int bitoffset_end
, access_end
;
5819 /* Variable sized objects in static constructors makes no sense,
5820 but field_size can be NULL for flexible array members. */
5821 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5822 && TREE_CODE (byte_offset
) == INTEGER_CST
5823 && (field_size
!= NULL_TREE
5824 ? TREE_CODE (field_size
) == INTEGER_CST
5825 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5827 /* Compute bit offset of the field. */
5828 bitoffset
= (wi::to_offset (field_offset
)
5829 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
5830 /* Compute bit offset where the field ends. */
5831 if (field_size
!= NULL_TREE
)
5832 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5836 access_end
= offset_int (offset
) + size
;
5838 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5839 [BITOFFSET, BITOFFSET_END)? */
5840 if (wi::cmps (access_end
, bitoffset
) > 0
5841 && (field_size
== NULL_TREE
5842 || wi::lts_p (offset
, bitoffset_end
)))
5844 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5845 /* We do have overlap. Now see if field is large enough to
5846 cover the access. Give up for accesses spanning multiple
5848 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5850 if (offset
< bitoffset
)
5852 return fold_ctor_reference (type
, cval
,
5853 inner_offset
.to_uhwi (), size
,
5857 /* When memory is not explicitely mentioned in constructor, it is 0. */
5858 return build_zero_cst (type
);
5861 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5862 to the memory at bit OFFSET. */
5865 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5866 unsigned HOST_WIDE_INT size
, tree from_decl
)
5870 /* We found the field with exact match. */
5871 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5873 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5875 /* We are at the end of walk, see if we can view convert the
5877 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5878 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5879 && !compare_tree_int (TYPE_SIZE (type
), size
)
5880 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5882 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5883 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5885 STRIP_USELESS_TYPE_CONVERSION (ret
);
5888 /* For constants and byte-aligned/sized reads try to go through
5889 native_encode/interpret. */
5890 if (CONSTANT_CLASS_P (ctor
)
5891 && BITS_PER_UNIT
== 8
5892 && offset
% BITS_PER_UNIT
== 0
5893 && size
% BITS_PER_UNIT
== 0
5894 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5896 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5897 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5898 offset
/ BITS_PER_UNIT
);
5900 return native_interpret_expr (type
, buf
, len
);
5902 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5905 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5906 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5907 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5910 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5917 /* Return the tree representing the element referenced by T if T is an
5918 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5919 names using VALUEIZE. Return NULL_TREE otherwise. */
5922 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5924 tree ctor
, idx
, base
;
5925 HOST_WIDE_INT offset
, size
, max_size
;
5929 if (TREE_THIS_VOLATILE (t
))
5933 return get_symbol_constant_value (t
);
5935 tem
= fold_read_from_constant_string (t
);
5939 switch (TREE_CODE (t
))
5942 case ARRAY_RANGE_REF
:
5943 /* Constant indexes are handled well by get_base_constructor.
5944 Only special case variable offsets.
5945 FIXME: This code can't handle nested references with variable indexes
5946 (they will be handled only by iteration of ccp). Perhaps we can bring
5947 get_ref_base_and_extent here and make it use a valueize callback. */
5948 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5950 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5951 && TREE_CODE (idx
) == INTEGER_CST
)
5953 tree low_bound
, unit_size
;
5955 /* If the resulting bit-offset is constant, track it. */
5956 if ((low_bound
= array_ref_low_bound (t
),
5957 TREE_CODE (low_bound
) == INTEGER_CST
)
5958 && (unit_size
= array_ref_element_size (t
),
5959 tree_fits_uhwi_p (unit_size
)))
5962 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5963 TYPE_PRECISION (TREE_TYPE (idx
)));
5965 if (wi::fits_shwi_p (woffset
))
5967 offset
= woffset
.to_shwi ();
5968 /* TODO: This code seems wrong, multiply then check
5969 to see if it fits. */
5970 offset
*= tree_to_uhwi (unit_size
);
5971 offset
*= BITS_PER_UNIT
;
5973 base
= TREE_OPERAND (t
, 0);
5974 ctor
= get_base_constructor (base
, &offset
, valueize
);
5975 /* Empty constructor. Always fold to 0. */
5976 if (ctor
== error_mark_node
)
5977 return build_zero_cst (TREE_TYPE (t
));
5978 /* Out of bound array access. Value is undefined,
5982 /* We can not determine ctor. */
5985 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5986 tree_to_uhwi (unit_size
)
5996 case TARGET_MEM_REF
:
5998 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
5999 ctor
= get_base_constructor (base
, &offset
, valueize
);
6001 /* Empty constructor. Always fold to 0. */
6002 if (ctor
== error_mark_node
)
6003 return build_zero_cst (TREE_TYPE (t
));
6004 /* We do not know precise address. */
6005 if (max_size
== -1 || max_size
!= size
)
6007 /* We can not determine ctor. */
6011 /* Out of bound array access. Value is undefined, but don't fold. */
6015 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
6021 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
6022 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
6023 return fold_build1_loc (EXPR_LOCATION (t
),
6024 TREE_CODE (t
), TREE_TYPE (t
), c
);
6036 fold_const_aggregate_ref (tree t
)
6038 return fold_const_aggregate_ref_1 (t
, NULL
);
6041 /* Lookup virtual method with index TOKEN in a virtual table V
6043 Set CAN_REFER if non-NULL to false if method
6044 is not referable or if the virtual table is ill-formed (such as rewriten
6045 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6048 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
6050 unsigned HOST_WIDE_INT offset
,
6053 tree vtable
= v
, init
, fn
;
6054 unsigned HOST_WIDE_INT size
;
6055 unsigned HOST_WIDE_INT elt_size
, access_index
;
6061 /* First of all double check we have virtual table. */
6062 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
6064 /* Pass down that we lost track of the target. */
6070 init
= ctor_for_folding (v
);
6072 /* The virtual tables should always be born with constructors
6073 and we always should assume that they are avaialble for
6074 folding. At the moment we do not stream them in all cases,
6075 but it should never happen that ctor seem unreachable. */
6077 if (init
== error_mark_node
)
6079 gcc_assert (in_lto_p
);
6080 /* Pass down that we lost track of the target. */
6085 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
6086 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
6087 offset
*= BITS_PER_UNIT
;
6088 offset
+= token
* size
;
6090 /* Lookup the value in the constructor that is assumed to be array.
6091 This is equivalent to
6092 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6093 offset, size, NULL);
6094 but in a constant time. We expect that frontend produced a simple
6095 array without indexed initializers. */
6097 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
6098 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
6099 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
6100 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
6102 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
6103 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
6105 /* This code makes an assumption that there are no
6106 indexed fileds produced by C++ FE, so we can directly index the array. */
6107 if (access_index
< CONSTRUCTOR_NELTS (init
))
6109 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
6110 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
6116 /* For type inconsistent program we may end up looking up virtual method
6117 in virtual table that does not contain TOKEN entries. We may overrun
6118 the virtual table and pick up a constant or RTTI info pointer.
6119 In any case the call is undefined. */
6121 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
6122 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
6123 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6126 fn
= TREE_OPERAND (fn
, 0);
6128 /* When cgraph node is missing and function is not public, we cannot
6129 devirtualize. This can happen in WHOPR when the actual method
6130 ends up in other partition, because we found devirtualization
6131 possibility too late. */
6132 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
6143 /* Make sure we create a cgraph node for functions we'll reference.
6144 They can be non-existent if the reference comes from an entry
6145 of an external vtable for example. */
6146 cgraph_node::get_create (fn
);
6151 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6152 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6153 KNOWN_BINFO carries the binfo describing the true type of
6154 OBJ_TYPE_REF_OBJECT(REF).
6155 Set CAN_REFER if non-NULL to false if method
6156 is not referable or if the virtual table is ill-formed (such as rewriten
6157 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6160 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
6163 unsigned HOST_WIDE_INT offset
;
6166 v
= BINFO_VTABLE (known_binfo
);
6167 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6171 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
6177 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
6180 /* Given a pointer value OP0, return a simplified version of an
6181 indirection through OP0, or NULL_TREE if no simplification is
6182 possible. Note that the resulting type may be different from
6183 the type pointed to in the sense that it is still compatible
6184 from the langhooks point of view. */
6187 gimple_fold_indirect_ref (tree t
)
6189 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
6194 subtype
= TREE_TYPE (sub
);
6195 if (!POINTER_TYPE_P (subtype
))
6198 if (TREE_CODE (sub
) == ADDR_EXPR
)
6200 tree op
= TREE_OPERAND (sub
, 0);
6201 tree optype
= TREE_TYPE (op
);
6203 if (useless_type_conversion_p (type
, optype
))
6206 /* *(foo *)&fooarray => fooarray[0] */
6207 if (TREE_CODE (optype
) == ARRAY_TYPE
6208 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
6209 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6211 tree type_domain
= TYPE_DOMAIN (optype
);
6212 tree min_val
= size_zero_node
;
6213 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6214 min_val
= TYPE_MIN_VALUE (type_domain
);
6215 if (TREE_CODE (min_val
) == INTEGER_CST
)
6216 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
6218 /* *(foo *)&complexfoo => __real__ complexfoo */
6219 else if (TREE_CODE (optype
) == COMPLEX_TYPE
6220 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6221 return fold_build1 (REALPART_EXPR
, type
, op
);
6222 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6223 else if (TREE_CODE (optype
) == VECTOR_TYPE
6224 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6226 tree part_width
= TYPE_SIZE (type
);
6227 tree index
= bitsize_int (0);
6228 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
6232 /* *(p + CST) -> ... */
6233 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
6234 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
6236 tree addr
= TREE_OPERAND (sub
, 0);
6237 tree off
= TREE_OPERAND (sub
, 1);
6241 addrtype
= TREE_TYPE (addr
);
6243 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6244 if (TREE_CODE (addr
) == ADDR_EXPR
6245 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
6246 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
6247 && tree_fits_uhwi_p (off
))
6249 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
6250 tree part_width
= TYPE_SIZE (type
);
6251 unsigned HOST_WIDE_INT part_widthi
6252 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
6253 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
6254 tree index
= bitsize_int (indexi
);
6255 if (offset
/ part_widthi
6256 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
6257 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
6261 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6262 if (TREE_CODE (addr
) == ADDR_EXPR
6263 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
6264 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
6266 tree size
= TYPE_SIZE_UNIT (type
);
6267 if (tree_int_cst_equal (size
, off
))
6268 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
6271 /* *(p + CST) -> MEM_REF <p, CST>. */
6272 if (TREE_CODE (addr
) != ADDR_EXPR
6273 || DECL_P (TREE_OPERAND (addr
, 0)))
6274 return fold_build2 (MEM_REF
, type
,
6276 wide_int_to_tree (ptype
, off
));
6279 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6280 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6281 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6282 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6285 tree min_val
= size_zero_node
;
6287 sub
= gimple_fold_indirect_ref (sub
);
6289 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6290 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6291 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6292 min_val
= TYPE_MIN_VALUE (type_domain
);
6293 if (TREE_CODE (min_val
) == INTEGER_CST
)
6294 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6300 /* Return true if CODE is an operation that when operating on signed
6301 integer types involves undefined behavior on overflow and the
6302 operation can be expressed with unsigned arithmetic. */
6305 arith_code_with_undefined_signed_overflow (tree_code code
)
6313 case POINTER_PLUS_EXPR
:
6320 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6321 operation that can be transformed to unsigned arithmetic by converting
6322 its operand, carrying out the operation in the corresponding unsigned
6323 type and converting the result back to the original type.
6325 Returns a sequence of statements that replace STMT and also contain
6326 a modified form of STMT itself. */
6329 rewrite_to_defined_overflow (gimple
*stmt
)
6331 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6333 fprintf (dump_file
, "rewriting stmt with undefined signed "
6335 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6338 tree lhs
= gimple_assign_lhs (stmt
);
6339 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6340 gimple_seq stmts
= NULL
;
6341 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6343 tree op
= gimple_op (stmt
, i
);
6344 op
= gimple_convert (&stmts
, type
, op
);
6345 gimple_set_op (stmt
, i
, op
);
6347 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6348 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6349 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6350 gimple_seq_add_stmt (&stmts
, stmt
);
6351 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6352 gimple_seq_add_stmt (&stmts
, cvt
);
6358 /* The valueization hook we use for the gimple_build API simplification.
6359 This makes us match fold_buildN behavior by only combining with
6360 statements in the sequence(s) we are currently building. */
6363 gimple_build_valueize (tree op
)
6365 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6370 /* Build the expression CODE OP0 of type TYPE with location LOC,
6371 simplifying it first if possible. Returns the built
6372 expression value and appends statements possibly defining it
6376 gimple_build (gimple_seq
*seq
, location_t loc
,
6377 enum tree_code code
, tree type
, tree op0
)
6379 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6382 res
= create_tmp_reg_or_ssa_name (type
);
6384 if (code
== REALPART_EXPR
6385 || code
== IMAGPART_EXPR
6386 || code
== VIEW_CONVERT_EXPR
)
6387 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6389 stmt
= gimple_build_assign (res
, code
, op0
);
6390 gimple_set_location (stmt
, loc
);
6391 gimple_seq_add_stmt_without_update (seq
, stmt
);
6396 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6397 simplifying it first if possible. Returns the built
6398 expression value and appends statements possibly defining it
6402 gimple_build (gimple_seq
*seq
, location_t loc
,
6403 enum tree_code code
, tree type
, tree op0
, tree op1
)
6405 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6408 res
= create_tmp_reg_or_ssa_name (type
);
6409 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6410 gimple_set_location (stmt
, loc
);
6411 gimple_seq_add_stmt_without_update (seq
, stmt
);
6416 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6417 simplifying it first if possible. Returns the built
6418 expression value and appends statements possibly defining it
6422 gimple_build (gimple_seq
*seq
, location_t loc
,
6423 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6425 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6426 seq
, gimple_build_valueize
);
6429 res
= create_tmp_reg_or_ssa_name (type
);
6431 if (code
== BIT_FIELD_REF
)
6432 stmt
= gimple_build_assign (res
, code
,
6433 build3 (code
, type
, op0
, op1
, op2
));
6435 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6436 gimple_set_location (stmt
, loc
);
6437 gimple_seq_add_stmt_without_update (seq
, stmt
);
6442 /* Build the call FN (ARG0) with a result of type TYPE
6443 (or no result if TYPE is void) with location LOC,
6444 simplifying it first if possible. Returns the built
6445 expression value (or NULL_TREE if TYPE is void) and appends
6446 statements possibly defining it to SEQ. */
6449 gimple_build (gimple_seq
*seq
, location_t loc
,
6450 enum built_in_function fn
, tree type
, tree arg0
)
6452 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6455 tree decl
= builtin_decl_implicit (fn
);
6456 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
6457 if (!VOID_TYPE_P (type
))
6459 res
= create_tmp_reg_or_ssa_name (type
);
6460 gimple_call_set_lhs (stmt
, res
);
6462 gimple_set_location (stmt
, loc
);
6463 gimple_seq_add_stmt_without_update (seq
, stmt
);
6468 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6469 (or no result if TYPE is void) with location LOC,
6470 simplifying it first if possible. Returns the built
6471 expression value (or NULL_TREE if TYPE is void) and appends
6472 statements possibly defining it to SEQ. */
6475 gimple_build (gimple_seq
*seq
, location_t loc
,
6476 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6478 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6481 tree decl
= builtin_decl_implicit (fn
);
6482 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6483 if (!VOID_TYPE_P (type
))
6485 res
= create_tmp_reg_or_ssa_name (type
);
6486 gimple_call_set_lhs (stmt
, res
);
6488 gimple_set_location (stmt
, loc
);
6489 gimple_seq_add_stmt_without_update (seq
, stmt
);
6494 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6495 (or no result if TYPE is void) with location LOC,
6496 simplifying it first if possible. Returns the built
6497 expression value (or NULL_TREE if TYPE is void) and appends
6498 statements possibly defining it to SEQ. */
6501 gimple_build (gimple_seq
*seq
, location_t loc
,
6502 enum built_in_function fn
, tree type
,
6503 tree arg0
, tree arg1
, tree arg2
)
6505 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6506 seq
, gimple_build_valueize
);
6509 tree decl
= builtin_decl_implicit (fn
);
6510 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6511 if (!VOID_TYPE_P (type
))
6513 res
= create_tmp_reg_or_ssa_name (type
);
6514 gimple_call_set_lhs (stmt
, res
);
6516 gimple_set_location (stmt
, loc
);
6517 gimple_seq_add_stmt_without_update (seq
, stmt
);
6522 /* Build the conversion (TYPE) OP with a result of type TYPE
6523 with location LOC if such conversion is neccesary in GIMPLE,
6524 simplifying it first.
6525 Returns the built expression value and appends
6526 statements possibly defining it to SEQ. */
6529 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6531 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6533 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
6536 /* Build the conversion (ptrofftype) OP with a result of a type
6537 compatible with ptrofftype with location LOC if such conversion
6538 is neccesary in GIMPLE, simplifying it first.
6539 Returns the built expression value and appends
6540 statements possibly defining it to SEQ. */
6543 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
6545 if (ptrofftype_p (TREE_TYPE (op
)))
6547 return gimple_convert (seq
, loc
, sizetype
, op
);
6550 /* Return true if the result of assignment STMT is known to be non-negative.
6551 If the return value is based on the assumption that signed overflow is
6552 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6553 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6556 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6559 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6560 switch (get_gimple_rhs_class (code
))
6562 case GIMPLE_UNARY_RHS
:
6563 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6564 gimple_expr_type (stmt
),
6565 gimple_assign_rhs1 (stmt
),
6566 strict_overflow_p
, depth
);
6567 case GIMPLE_BINARY_RHS
:
6568 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6569 gimple_expr_type (stmt
),
6570 gimple_assign_rhs1 (stmt
),
6571 gimple_assign_rhs2 (stmt
),
6572 strict_overflow_p
, depth
);
6573 case GIMPLE_TERNARY_RHS
:
6575 case GIMPLE_SINGLE_RHS
:
6576 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
6577 strict_overflow_p
, depth
);
6578 case GIMPLE_INVALID_RHS
:
6584 /* Return true if return value of call STMT is known to be non-negative.
6585 If the return value is based on the assumption that signed overflow is
6586 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6587 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6590 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6593 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
6594 gimple_call_arg (stmt
, 0) : NULL_TREE
;
6595 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
6596 gimple_call_arg (stmt
, 1) : NULL_TREE
;
6598 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
6599 gimple_call_combined_fn (stmt
),
6602 strict_overflow_p
, depth
);
6605 /* Return true if return value of call STMT is known to be non-negative.
6606 If the return value is based on the assumption that signed overflow is
6607 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6608 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6611 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6614 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6616 tree arg
= gimple_phi_arg_def (stmt
, i
);
6617 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
6623 /* Return true if STMT is known to compute a non-negative value.
6624 If the return value is based on the assumption that signed overflow is
6625 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6626 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6629 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6632 switch (gimple_code (stmt
))
6635 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6638 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6641 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6648 /* Return true if the floating-point value computed by assignment STMT
6649 is known to have an integer value. We also allow +Inf, -Inf and NaN
6650 to be considered integer values. Return false for signaling NaN.
6652 DEPTH is the current nesting depth of the query. */
6655 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
6657 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6658 switch (get_gimple_rhs_class (code
))
6660 case GIMPLE_UNARY_RHS
:
6661 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
6662 gimple_assign_rhs1 (stmt
), depth
);
6663 case GIMPLE_BINARY_RHS
:
6664 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
6665 gimple_assign_rhs1 (stmt
),
6666 gimple_assign_rhs2 (stmt
), depth
);
6667 case GIMPLE_TERNARY_RHS
:
6669 case GIMPLE_SINGLE_RHS
:
6670 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
6671 case GIMPLE_INVALID_RHS
:
6677 /* Return true if the floating-point value computed by call STMT is known
6678 to have an integer value. We also allow +Inf, -Inf and NaN to be
6679 considered integer values. Return false for signaling NaN.
6681 DEPTH is the current nesting depth of the query. */
6684 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
6686 tree arg0
= (gimple_call_num_args (stmt
) > 0
6687 ? gimple_call_arg (stmt
, 0)
6689 tree arg1
= (gimple_call_num_args (stmt
) > 1
6690 ? gimple_call_arg (stmt
, 1)
6692 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
6696 /* Return true if the floating-point result of phi STMT is known to have
6697 an integer value. We also allow +Inf, -Inf and NaN to be considered
6698 integer values. Return false for signaling NaN.
6700 DEPTH is the current nesting depth of the query. */
6703 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
6705 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6707 tree arg
= gimple_phi_arg_def (stmt
, i
);
6708 if (!integer_valued_real_single_p (arg
, depth
+ 1))
6714 /* Return true if the floating-point value computed by STMT is known
6715 to have an integer value. We also allow +Inf, -Inf and NaN to be
6716 considered integer values. Return false for signaling NaN.
6718 DEPTH is the current nesting depth of the query. */
6721 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
6723 switch (gimple_code (stmt
))
6726 return gimple_assign_integer_valued_real_p (stmt
, depth
);
6728 return gimple_call_integer_valued_real_p (stmt
, depth
);
6730 return gimple_phi_integer_valued_real_p (stmt
, depth
);