1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "fold-const.h"
33 #include "insn-config.h"
42 #include "stor-layout.h"
44 #include "dominance.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "tree-into-ssa.h"
52 #include "tree-ssa-propagate.h"
55 #include "ipa-utils.h"
56 #include "gimple-pretty-print.h"
57 #include "tree-ssa-address.h"
58 #include "langhooks.h"
59 #include "gimplify-me.h"
64 #include "gimple-match.h"
66 /* Return true when DECL can be referenced from current unit.
67 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
68 We can get declarations that are not possible to reference for various
71 1) When analyzing C++ virtual tables.
72 C++ virtual tables do have known constructors even
73 when they are keyed to other compilation unit.
74 Those tables can contain pointers to methods and vars
75 in other units. Those methods have both STATIC and EXTERNAL
77 2) In WHOPR mode devirtualization might lead to reference
78 to method that was partitioned elsehwere.
79 In this case we have static VAR_DECL or FUNCTION_DECL
80 that has no corresponding callgraph/varpool node
82 3) COMDAT functions referred by external vtables that
83 we devirtualize only during final compilation stage.
84 At this time we already decided that we will not output
85 the function body and thus we can't reference the symbol
89 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
92 struct cgraph_node
*node
;
95 if (DECL_ABSTRACT_P (decl
))
98 /* We are concerned only about static/external vars and functions. */
99 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
100 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
103 /* Static objects can be referred only if they was not optimized out yet. */
104 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
106 /* Before we start optimizing unreachable code we can be sure all
107 static objects are defined. */
108 if (symtab
->function_flags_ready
)
110 snode
= symtab_node::get (decl
);
111 if (!snode
|| !snode
->definition
)
113 node
= dyn_cast
<cgraph_node
*> (snode
);
114 return !node
|| !node
->global
.inlined_to
;
117 /* We will later output the initializer, so we can refer to it.
118 So we are concerned only when DECL comes from initializer of
119 external var or var that has been optimized out. */
121 || TREE_CODE (from_decl
) != VAR_DECL
122 || (!DECL_EXTERNAL (from_decl
)
123 && (vnode
= varpool_node::get (from_decl
)) != NULL
124 && vnode
->definition
)
126 && (vnode
= varpool_node::get (from_decl
)) != NULL
127 && vnode
->in_other_partition
))
129 /* We are folding reference from external vtable. The vtable may reffer
130 to a symbol keyed to other compilation unit. The other compilation
131 unit may be in separate DSO and the symbol may be hidden. */
132 if (DECL_VISIBILITY_SPECIFIED (decl
)
133 && DECL_EXTERNAL (decl
)
134 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
135 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
137 /* When function is public, we always can introduce new reference.
138 Exception are the COMDAT functions where introducing a direct
139 reference imply need to include function body in the curren tunit. */
140 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
142 /* We have COMDAT. We are going to check if we still have definition
143 or if the definition is going to be output in other partition.
144 Bypass this when gimplifying; all needed functions will be produced.
146 As observed in PR20991 for already optimized out comdat virtual functions
147 it may be tempting to not necessarily give up because the copy will be
148 output elsewhere when corresponding vtable is output.
149 This is however not possible - ABI specify that COMDATs are output in
150 units where they are used and when the other unit was compiled with LTO
151 it is possible that vtable was kept public while the function itself
153 if (!symtab
->function_flags_ready
)
156 snode
= symtab_node::get (decl
);
158 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
159 && (!snode
->in_other_partition
160 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
162 node
= dyn_cast
<cgraph_node
*> (snode
);
163 return !node
|| !node
->global
.inlined_to
;
166 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
167 acceptable form for is_gimple_min_invariant.
168 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
171 canonicalize_constructor_val (tree cval
, tree from_decl
)
173 tree orig_cval
= cval
;
175 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
176 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
178 tree ptr
= TREE_OPERAND (cval
, 0);
179 if (is_gimple_min_invariant (ptr
))
180 cval
= build1_loc (EXPR_LOCATION (cval
),
181 ADDR_EXPR
, TREE_TYPE (ptr
),
182 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
184 fold_convert (ptr_type_node
,
185 TREE_OPERAND (cval
, 1))));
187 if (TREE_CODE (cval
) == ADDR_EXPR
)
189 tree base
= NULL_TREE
;
190 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
192 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
194 TREE_OPERAND (cval
, 0) = base
;
197 base
= get_base_address (TREE_OPERAND (cval
, 0));
201 if ((TREE_CODE (base
) == VAR_DECL
202 || TREE_CODE (base
) == FUNCTION_DECL
)
203 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
205 if (TREE_CODE (base
) == VAR_DECL
)
206 TREE_ADDRESSABLE (base
) = 1;
207 else if (TREE_CODE (base
) == FUNCTION_DECL
)
209 /* Make sure we create a cgraph node for functions we'll reference.
210 They can be non-existent if the reference comes from an entry
211 of an external vtable for example. */
212 cgraph_node::get_create (base
);
214 /* Fixup types in global initializers. */
215 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
216 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
218 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
219 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
222 if (TREE_OVERFLOW_P (cval
))
223 return drop_tree_overflow (cval
);
227 /* If SYM is a constant variable with known value, return the value.
228 NULL_TREE is returned otherwise. */
231 get_symbol_constant_value (tree sym
)
233 tree val
= ctor_for_folding (sym
);
234 if (val
!= error_mark_node
)
238 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
239 if (val
&& is_gimple_min_invariant (val
))
244 /* Variables declared 'const' without an initializer
245 have zero as the initializer if they may not be
246 overridden at link or run time. */
248 && is_gimple_reg_type (TREE_TYPE (sym
)))
249 return build_zero_cst (TREE_TYPE (sym
));
257 /* Subroutine of fold_stmt. We perform several simplifications of the
258 memory reference tree EXPR and make sure to re-gimplify them properly
259 after propagation of constant addresses. IS_LHS is true if the
260 reference is supposed to be an lvalue. */
263 maybe_fold_reference (tree expr
, bool is_lhs
)
267 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
268 || TREE_CODE (expr
) == REALPART_EXPR
269 || TREE_CODE (expr
) == IMAGPART_EXPR
)
270 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
271 return fold_unary_loc (EXPR_LOCATION (expr
),
274 TREE_OPERAND (expr
, 0));
275 else if (TREE_CODE (expr
) == BIT_FIELD_REF
276 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
277 return fold_ternary_loc (EXPR_LOCATION (expr
),
280 TREE_OPERAND (expr
, 0),
281 TREE_OPERAND (expr
, 1),
282 TREE_OPERAND (expr
, 2));
285 && (result
= fold_const_aggregate_ref (expr
))
286 && is_gimple_min_invariant (result
))
293 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
294 replacement rhs for the statement or NULL_TREE if no simplification
295 could be made. It is assumed that the operands have been previously
299 fold_gimple_assign (gimple_stmt_iterator
*si
)
301 gimple stmt
= gsi_stmt (*si
);
302 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
303 location_t loc
= gimple_location (stmt
);
305 tree result
= NULL_TREE
;
307 switch (get_gimple_rhs_class (subcode
))
309 case GIMPLE_SINGLE_RHS
:
311 tree rhs
= gimple_assign_rhs1 (stmt
);
313 if (TREE_CLOBBER_P (rhs
))
316 if (REFERENCE_CLASS_P (rhs
))
317 return maybe_fold_reference (rhs
, false);
319 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
321 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
322 if (is_gimple_min_invariant (val
))
324 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
327 vec
<cgraph_node
*>targets
328 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
329 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
331 if (dump_enabled_p ())
333 location_t loc
= gimple_location_safe (stmt
);
334 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
335 "resolving virtual function address "
336 "reference to function %s\n",
337 targets
.length () == 1
338 ? targets
[0]->name ()
341 if (targets
.length () == 1)
343 val
= fold_convert (TREE_TYPE (val
),
344 build_fold_addr_expr_loc
345 (loc
, targets
[0]->decl
));
346 STRIP_USELESS_TYPE_CONVERSION (val
);
349 /* We can not use __builtin_unreachable here because it
350 can not have address taken. */
351 val
= build_int_cst (TREE_TYPE (val
), 0);
357 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
359 tree ref
= TREE_OPERAND (rhs
, 0);
360 tree tem
= maybe_fold_reference (ref
, true);
362 && TREE_CODE (tem
) == MEM_REF
363 && integer_zerop (TREE_OPERAND (tem
, 1)))
364 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
366 result
= fold_convert (TREE_TYPE (rhs
),
367 build_fold_addr_expr_loc (loc
, tem
));
368 else if (TREE_CODE (ref
) == MEM_REF
369 && integer_zerop (TREE_OPERAND (ref
, 1)))
370 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
373 else if (TREE_CODE (rhs
) == CONSTRUCTOR
374 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
375 && (CONSTRUCTOR_NELTS (rhs
)
376 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
378 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
382 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
383 if (TREE_CODE (val
) != INTEGER_CST
384 && TREE_CODE (val
) != REAL_CST
385 && TREE_CODE (val
) != FIXED_CST
)
388 return build_vector_from_ctor (TREE_TYPE (rhs
),
389 CONSTRUCTOR_ELTS (rhs
));
392 else if (DECL_P (rhs
))
393 return get_symbol_constant_value (rhs
);
395 /* If we couldn't fold the RHS, hand over to the generic
397 if (result
== NULL_TREE
)
400 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
401 that may have been added by fold, and "useless" type
402 conversions that might now be apparent due to propagation. */
403 STRIP_USELESS_TYPE_CONVERSION (result
);
405 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
412 case GIMPLE_UNARY_RHS
:
415 case GIMPLE_BINARY_RHS
:
418 case GIMPLE_TERNARY_RHS
:
419 result
= fold_ternary_loc (loc
, subcode
,
420 TREE_TYPE (gimple_assign_lhs (stmt
)),
421 gimple_assign_rhs1 (stmt
),
422 gimple_assign_rhs2 (stmt
),
423 gimple_assign_rhs3 (stmt
));
427 STRIP_USELESS_TYPE_CONVERSION (result
);
428 if (valid_gimple_rhs_p (result
))
433 case GIMPLE_INVALID_RHS
:
441 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
442 adjusting the replacement stmts location and virtual operands.
443 If the statement has a lhs the last stmt in the sequence is expected
444 to assign to that lhs. */
447 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
449 gimple stmt
= gsi_stmt (*si_p
);
451 if (gimple_has_location (stmt
))
452 annotate_all_with_location (stmts
, gimple_location (stmt
));
454 /* First iterate over the replacement statements backward, assigning
455 virtual operands to their defining statements. */
456 gimple laststore
= NULL
;
457 for (gimple_stmt_iterator i
= gsi_last (stmts
);
458 !gsi_end_p (i
); gsi_prev (&i
))
460 gimple new_stmt
= gsi_stmt (i
);
461 if ((gimple_assign_single_p (new_stmt
)
462 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
463 || (is_gimple_call (new_stmt
)
464 && (gimple_call_flags (new_stmt
)
465 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
469 vdef
= gimple_vdef (stmt
);
471 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
472 gimple_set_vdef (new_stmt
, vdef
);
473 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
474 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
475 laststore
= new_stmt
;
479 /* Second iterate over the statements forward, assigning virtual
480 operands to their uses. */
481 tree reaching_vuse
= gimple_vuse (stmt
);
482 for (gimple_stmt_iterator i
= gsi_start (stmts
);
483 !gsi_end_p (i
); gsi_next (&i
))
485 gimple new_stmt
= gsi_stmt (i
);
486 /* If the new statement possibly has a VUSE, update it with exact SSA
487 name we know will reach this one. */
488 if (gimple_has_mem_ops (new_stmt
))
489 gimple_set_vuse (new_stmt
, reaching_vuse
);
490 gimple_set_modified (new_stmt
, true);
491 if (gimple_vdef (new_stmt
))
492 reaching_vuse
= gimple_vdef (new_stmt
);
495 /* If the new sequence does not do a store release the virtual
496 definition of the original statement. */
498 && reaching_vuse
== gimple_vuse (stmt
))
500 tree vdef
= gimple_vdef (stmt
);
502 && TREE_CODE (vdef
) == SSA_NAME
)
504 unlink_stmt_vdef (stmt
);
505 release_ssa_name (vdef
);
509 /* Finally replace the original statement with the sequence. */
510 gsi_replace_with_seq (si_p
, stmts
, false);
513 /* Convert EXPR into a GIMPLE value suitable for substitution on the
514 RHS of an assignment. Insert the necessary statements before
515 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
516 is replaced. If the call is expected to produces a result, then it
517 is replaced by an assignment of the new RHS to the result variable.
518 If the result is to be ignored, then the call is replaced by a
519 GIMPLE_NOP. A proper VDEF chain is retained by making the first
520 VUSE and the last VDEF of the whole sequence be the same as the replaced
521 statement and using new SSA names for stores in between. */
524 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
527 gimple stmt
, new_stmt
;
528 gimple_stmt_iterator i
;
529 gimple_seq stmts
= NULL
;
531 stmt
= gsi_stmt (*si_p
);
533 gcc_assert (is_gimple_call (stmt
));
535 push_gimplify_context (gimple_in_ssa_p (cfun
));
537 lhs
= gimple_call_lhs (stmt
);
538 if (lhs
== NULL_TREE
)
540 gimplify_and_add (expr
, &stmts
);
541 /* We can end up with folding a memcpy of an empty class assignment
542 which gets optimized away by C++ gimplification. */
543 if (gimple_seq_empty_p (stmts
))
545 pop_gimplify_context (NULL
);
546 if (gimple_in_ssa_p (cfun
))
548 unlink_stmt_vdef (stmt
);
551 gsi_replace (si_p
, gimple_build_nop (), true);
557 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
558 new_stmt
= gimple_build_assign (lhs
, tmp
);
559 i
= gsi_last (stmts
);
560 gsi_insert_after_without_update (&i
, new_stmt
,
561 GSI_CONTINUE_LINKING
);
564 pop_gimplify_context (NULL
);
566 gsi_replace_with_seq_vops (si_p
, stmts
);
570 /* Replace the call at *GSI with the gimple value VAL. */
573 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
575 gimple stmt
= gsi_stmt (*gsi
);
576 tree lhs
= gimple_call_lhs (stmt
);
580 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
581 val
= fold_convert (TREE_TYPE (lhs
), val
);
582 repl
= gimple_build_assign (lhs
, val
);
585 repl
= gimple_build_nop ();
586 tree vdef
= gimple_vdef (stmt
);
587 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
589 unlink_stmt_vdef (stmt
);
590 release_ssa_name (vdef
);
592 gsi_replace (gsi
, repl
, true);
595 /* Replace the call at *GSI with the new call REPL and fold that
599 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
601 gimple stmt
= gsi_stmt (*gsi
);
602 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
603 gimple_set_location (repl
, gimple_location (stmt
));
604 if (gimple_vdef (stmt
)
605 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
607 gimple_set_vdef (repl
, gimple_vdef (stmt
));
608 gimple_set_vuse (repl
, gimple_vuse (stmt
));
609 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
611 gsi_replace (gsi
, repl
, true);
615 /* Return true if VAR is a VAR_DECL or a component thereof. */
618 var_decl_component_p (tree var
)
621 while (handled_component_p (inner
))
622 inner
= TREE_OPERAND (inner
, 0);
623 return SSA_VAR_P (inner
);
626 /* Fold function call to builtin mem{{,p}cpy,move}. Return
627 false if no simplification can be made.
628 If ENDP is 0, return DEST (like memcpy).
629 If ENDP is 1, return DEST+LEN (like mempcpy).
630 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
631 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
635 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
636 tree dest
, tree src
, int endp
)
638 gimple stmt
= gsi_stmt (*gsi
);
639 tree lhs
= gimple_call_lhs (stmt
);
640 tree len
= gimple_call_arg (stmt
, 2);
641 tree destvar
, srcvar
;
642 location_t loc
= gimple_location (stmt
);
644 /* If the LEN parameter is zero, return DEST. */
645 if (integer_zerop (len
))
648 if (gimple_call_lhs (stmt
))
649 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
651 repl
= gimple_build_nop ();
652 tree vdef
= gimple_vdef (stmt
);
653 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
655 unlink_stmt_vdef (stmt
);
656 release_ssa_name (vdef
);
658 gsi_replace (gsi
, repl
, true);
662 /* If SRC and DEST are the same (and not volatile), return
663 DEST{,+LEN,+LEN-1}. */
664 if (operand_equal_p (src
, dest
, 0))
666 unlink_stmt_vdef (stmt
);
667 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
668 release_ssa_name (gimple_vdef (stmt
));
671 gsi_replace (gsi
, gimple_build_nop (), true);
678 tree srctype
, desttype
;
679 unsigned int src_align
, dest_align
;
682 /* Build accesses at offset zero with a ref-all character type. */
683 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
686 /* If we can perform the copy efficiently with first doing all loads
687 and then all stores inline it that way. Currently efficiently
688 means that we can load all the memory into a single integer
689 register which is what MOVE_MAX gives us. */
690 src_align
= get_pointer_alignment (src
);
691 dest_align
= get_pointer_alignment (dest
);
692 if (tree_fits_uhwi_p (len
)
693 && compare_tree_int (len
, MOVE_MAX
) <= 0
694 /* ??? Don't transform copies from strings with known length this
695 confuses the tree-ssa-strlen.c. This doesn't handle
696 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
698 && !c_strlen (src
, 2))
700 unsigned ilen
= tree_to_uhwi (len
);
701 if (exact_log2 (ilen
) != -1)
703 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
705 && TYPE_MODE (type
) != BLKmode
706 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
708 /* If the destination pointer is not aligned we must be able
709 to emit an unaligned store. */
710 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
711 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
714 tree desttype
= type
;
715 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
716 srctype
= build_aligned_type (type
, src_align
);
717 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
718 tree tem
= fold_const_aggregate_ref (srcmem
);
721 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
722 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
728 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
730 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
731 if (gimple_in_ssa_p (cfun
))
732 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
735 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
736 gimple_assign_set_lhs (new_stmt
, srcmem
);
737 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
738 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
740 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
741 desttype
= build_aligned_type (type
, dest_align
);
743 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
746 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
747 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
748 if (gimple_vdef (new_stmt
)
749 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
750 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
753 gsi_replace (gsi
, new_stmt
, true);
756 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
765 /* Both DEST and SRC must be pointer types.
766 ??? This is what old code did. Is the testing for pointer types
769 If either SRC is readonly or length is 1, we can use memcpy. */
770 if (!dest_align
|| !src_align
)
772 if (readonly_data_expr (src
)
773 || (tree_fits_uhwi_p (len
)
774 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
775 >= tree_to_uhwi (len
))))
777 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
780 gimple_call_set_fndecl (stmt
, fn
);
781 gimple_call_set_arg (stmt
, 0, dest
);
782 gimple_call_set_arg (stmt
, 1, src
);
787 /* If *src and *dest can't overlap, optimize into memcpy as well. */
788 if (TREE_CODE (src
) == ADDR_EXPR
789 && TREE_CODE (dest
) == ADDR_EXPR
)
791 tree src_base
, dest_base
, fn
;
792 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
793 HOST_WIDE_INT size
= -1;
794 HOST_WIDE_INT maxsize
= -1;
796 srcvar
= TREE_OPERAND (src
, 0);
797 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
799 destvar
= TREE_OPERAND (dest
, 0);
800 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
802 if (tree_fits_uhwi_p (len
))
803 maxsize
= tree_to_uhwi (len
);
806 src_offset
/= BITS_PER_UNIT
;
807 dest_offset
/= BITS_PER_UNIT
;
808 if (SSA_VAR_P (src_base
)
809 && SSA_VAR_P (dest_base
))
811 if (operand_equal_p (src_base
, dest_base
, 0)
812 && ranges_overlap_p (src_offset
, maxsize
,
813 dest_offset
, maxsize
))
816 else if (TREE_CODE (src_base
) == MEM_REF
817 && TREE_CODE (dest_base
) == MEM_REF
)
819 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
820 TREE_OPERAND (dest_base
, 0), 0))
822 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
823 if (!wi::fits_shwi_p (off
))
825 src_offset
= off
.to_shwi ();
827 off
= mem_ref_offset (dest_base
) + dest_offset
;
828 if (!wi::fits_shwi_p (off
))
830 dest_offset
= off
.to_shwi ();
831 if (ranges_overlap_p (src_offset
, maxsize
,
832 dest_offset
, maxsize
))
838 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
841 gimple_call_set_fndecl (stmt
, fn
);
842 gimple_call_set_arg (stmt
, 0, dest
);
843 gimple_call_set_arg (stmt
, 1, src
);
848 /* If the destination and source do not alias optimize into
850 if ((is_gimple_min_invariant (dest
)
851 || TREE_CODE (dest
) == SSA_NAME
)
852 && (is_gimple_min_invariant (src
)
853 || TREE_CODE (src
) == SSA_NAME
))
856 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
857 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
858 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
861 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
864 gimple_call_set_fndecl (stmt
, fn
);
865 gimple_call_set_arg (stmt
, 0, dest
);
866 gimple_call_set_arg (stmt
, 1, src
);
875 if (!tree_fits_shwi_p (len
))
878 This logic lose for arguments like (type *)malloc (sizeof (type)),
879 since we strip the casts of up to VOID return value from malloc.
880 Perhaps we ought to inherit type from non-VOID argument here? */
883 if (!POINTER_TYPE_P (TREE_TYPE (src
))
884 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
886 /* In the following try to find a type that is most natural to be
887 used for the memcpy source and destination and that allows
888 the most optimization when memcpy is turned into a plain assignment
889 using that type. In theory we could always use a char[len] type
890 but that only gains us that the destination and source possibly
891 no longer will have their address taken. */
892 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
893 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
895 tree tem
= TREE_OPERAND (src
, 0);
897 if (tem
!= TREE_OPERAND (src
, 0))
898 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
900 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
902 tree tem
= TREE_OPERAND (dest
, 0);
904 if (tem
!= TREE_OPERAND (dest
, 0))
905 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
907 srctype
= TREE_TYPE (TREE_TYPE (src
));
908 if (TREE_CODE (srctype
) == ARRAY_TYPE
909 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
911 srctype
= TREE_TYPE (srctype
);
913 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
915 desttype
= TREE_TYPE (TREE_TYPE (dest
));
916 if (TREE_CODE (desttype
) == ARRAY_TYPE
917 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
919 desttype
= TREE_TYPE (desttype
);
921 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
923 if (TREE_ADDRESSABLE (srctype
)
924 || TREE_ADDRESSABLE (desttype
))
927 /* Make sure we are not copying using a floating-point mode or
928 a type whose size possibly does not match its precision. */
929 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
930 || TREE_CODE (desttype
) == BOOLEAN_TYPE
931 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
932 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
933 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
934 || TREE_CODE (srctype
) == BOOLEAN_TYPE
935 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
936 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
944 src_align
= get_pointer_alignment (src
);
945 dest_align
= get_pointer_alignment (dest
);
946 if (dest_align
< TYPE_ALIGN (desttype
)
947 || src_align
< TYPE_ALIGN (srctype
))
951 STRIP_NOPS (destvar
);
952 if (TREE_CODE (destvar
) == ADDR_EXPR
953 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
954 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
955 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
961 if (TREE_CODE (srcvar
) == ADDR_EXPR
962 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
963 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
966 || src_align
>= TYPE_ALIGN (desttype
))
967 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
969 else if (!STRICT_ALIGNMENT
)
971 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
973 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
981 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
984 if (srcvar
== NULL_TREE
)
987 if (src_align
>= TYPE_ALIGN (desttype
))
988 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
991 if (STRICT_ALIGNMENT
)
993 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
995 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
998 else if (destvar
== NULL_TREE
)
1001 if (dest_align
>= TYPE_ALIGN (srctype
))
1002 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1005 if (STRICT_ALIGNMENT
)
1007 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1009 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1014 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1016 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1017 if (gimple_in_ssa_p (cfun
))
1018 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1020 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1021 gimple_assign_set_lhs (new_stmt
, srcvar
);
1022 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1023 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1025 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1026 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1027 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1028 if (gimple_vdef (new_stmt
)
1029 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1030 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1033 gsi_replace (gsi
, new_stmt
, true);
1036 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1040 if (endp
== 0 || endp
== 3)
1043 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1045 if (endp
== 2 || endp
== 1)
1046 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1048 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1050 gimple repl
= gimple_build_assign (lhs
, dest
);
1051 gsi_replace (gsi
, repl
, true);
1055 /* Fold function call to builtin memset or bzero at *GSI setting the
1056 memory of size LEN to VAL. Return whether a simplification was made. */
1059 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1061 gimple stmt
= gsi_stmt (*gsi
);
1063 unsigned HOST_WIDE_INT length
, cval
;
1065 /* If the LEN parameter is zero, return DEST. */
1066 if (integer_zerop (len
))
1068 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1072 if (! tree_fits_uhwi_p (len
))
1075 if (TREE_CODE (c
) != INTEGER_CST
)
1078 tree dest
= gimple_call_arg (stmt
, 0);
1080 if (TREE_CODE (var
) != ADDR_EXPR
)
1083 var
= TREE_OPERAND (var
, 0);
1084 if (TREE_THIS_VOLATILE (var
))
1087 etype
= TREE_TYPE (var
);
1088 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1089 etype
= TREE_TYPE (etype
);
1091 if (!INTEGRAL_TYPE_P (etype
)
1092 && !POINTER_TYPE_P (etype
))
1095 if (! var_decl_component_p (var
))
1098 length
= tree_to_uhwi (len
);
1099 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1100 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1103 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1106 if (integer_zerop (c
))
1110 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1113 cval
= TREE_INT_CST_LOW (c
);
1117 cval
|= (cval
<< 31) << 1;
1120 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1121 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1122 gimple_set_vuse (store
, gimple_vuse (stmt
));
1123 tree vdef
= gimple_vdef (stmt
);
1124 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1126 gimple_set_vdef (store
, gimple_vdef (stmt
));
1127 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1129 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1130 if (gimple_call_lhs (stmt
))
1132 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1133 gsi_replace (gsi
, asgn
, true);
1137 gimple_stmt_iterator gsi2
= *gsi
;
1139 gsi_remove (&gsi2
, true);
1146 /* Return the string length, maximum string length or maximum value of
1148 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1149 is not NULL and, for TYPE == 0, its value is not equal to the length
1150 we determine or if we are unable to determine the length or value,
1151 return false. VISITED is a bitmap of visited variables.
1152 TYPE is 0 if string length should be returned, 1 for maximum string
1153 length and 2 for maximum value ARG can have. */
1156 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1161 if (TREE_CODE (arg
) != SSA_NAME
)
1163 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1164 if (TREE_CODE (arg
) == ADDR_EXPR
1165 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1166 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1168 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1169 if (TREE_CODE (aop0
) == INDIRECT_REF
1170 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1171 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1172 length
, visited
, type
);
1178 if (TREE_CODE (val
) != INTEGER_CST
1179 || tree_int_cst_sgn (val
) < 0)
1183 val
= c_strlen (arg
, 1);
1191 if (TREE_CODE (*length
) != INTEGER_CST
1192 || TREE_CODE (val
) != INTEGER_CST
)
1195 if (tree_int_cst_lt (*length
, val
))
1199 else if (simple_cst_equal (val
, *length
) != 1)
1207 /* If ARG is registered for SSA update we cannot look at its defining
1209 if (name_registered_for_update_p (arg
))
1212 /* If we were already here, break the infinite cycle. */
1214 *visited
= BITMAP_ALLOC (NULL
);
1215 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1219 def_stmt
= SSA_NAME_DEF_STMT (var
);
1221 switch (gimple_code (def_stmt
))
1224 /* The RHS of the statement defining VAR must either have a
1225 constant length or come from another SSA_NAME with a constant
1227 if (gimple_assign_single_p (def_stmt
)
1228 || gimple_assign_unary_nop_p (def_stmt
))
1230 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1231 return get_maxval_strlen (rhs
, length
, visited
, type
);
1233 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1235 tree op2
= gimple_assign_rhs2 (def_stmt
);
1236 tree op3
= gimple_assign_rhs3 (def_stmt
);
1237 return get_maxval_strlen (op2
, length
, visited
, type
)
1238 && get_maxval_strlen (op3
, length
, visited
, type
);
1244 /* All the arguments of the PHI node must have the same constant
1248 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1250 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1252 /* If this PHI has itself as an argument, we cannot
1253 determine the string length of this argument. However,
1254 if we can find a constant string length for the other
1255 PHI args then we can still be sure that this is a
1256 constant string length. So be optimistic and just
1257 continue with the next argument. */
1258 if (arg
== gimple_phi_result (def_stmt
))
1261 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1273 get_maxval_strlen (tree arg
, int type
)
1275 bitmap visited
= NULL
;
1276 tree len
= NULL_TREE
;
1277 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1280 BITMAP_FREE (visited
);
1286 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1287 If LEN is not NULL, it represents the length of the string to be
1288 copied. Return NULL_TREE if no simplification can be made. */
1291 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1292 tree dest
, tree src
)
1294 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1297 /* If SRC and DEST are the same (and not volatile), return DEST. */
1298 if (operand_equal_p (src
, dest
, 0))
1300 replace_call_with_value (gsi
, dest
);
1304 if (optimize_function_for_size_p (cfun
))
1307 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1311 tree len
= get_maxval_strlen (src
, 0);
1315 len
= fold_convert_loc (loc
, size_type_node
, len
);
1316 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1317 len
= force_gimple_operand_gsi (gsi
, len
, true,
1318 NULL_TREE
, true, GSI_SAME_STMT
);
1319 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1320 replace_call_with_call_and_fold (gsi
, repl
);
1324 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1325 If SLEN is not NULL, it represents the length of the source string.
1326 Return NULL_TREE if no simplification can be made. */
1329 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1330 tree dest
, tree src
, tree len
)
1332 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1335 /* If the LEN parameter is zero, return DEST. */
1336 if (integer_zerop (len
))
1338 replace_call_with_value (gsi
, dest
);
1342 /* We can't compare slen with len as constants below if len is not a
1344 if (TREE_CODE (len
) != INTEGER_CST
)
1347 /* Now, we must be passed a constant src ptr parameter. */
1348 tree slen
= get_maxval_strlen (src
, 0);
1349 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1352 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1354 /* We do not support simplification of this case, though we do
1355 support it when expanding trees into RTL. */
1356 /* FIXME: generate a call to __builtin_memset. */
1357 if (tree_int_cst_lt (slen
, len
))
1360 /* OK transform into builtin memcpy. */
1361 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1365 len
= fold_convert_loc (loc
, size_type_node
, len
);
1366 len
= force_gimple_operand_gsi (gsi
, len
, true,
1367 NULL_TREE
, true, GSI_SAME_STMT
);
1368 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1369 replace_call_with_call_and_fold (gsi
, repl
);
1373 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1376 Return NULL_TREE if no simplification was possible, otherwise return the
1377 simplified form of the call as a tree.
1379 The simplified form may be a constant or other expression which
1380 computes the same value, but in a more efficient manner (including
1381 calls to other builtin functions).
1383 The call may contain arguments which need to be evaluated, but
1384 which are not useful to determine the result of the call. In
1385 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1386 COMPOUND_EXPR will be an argument which must be evaluated.
1387 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1388 COMPOUND_EXPR in the chain will contain the tree for the simplified
1389 form of the builtin function call. */
1392 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1394 gimple stmt
= gsi_stmt (*gsi
);
1395 location_t loc
= gimple_location (stmt
);
1397 const char *p
= c_getstr (src
);
1399 /* If the string length is zero, return the dst parameter. */
1400 if (p
&& *p
== '\0')
1402 replace_call_with_value (gsi
, dst
);
1406 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1409 /* See if we can store by pieces into (dst + strlen(dst)). */
1411 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1412 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1414 if (!strlen_fn
|| !memcpy_fn
)
1417 /* If the length of the source string isn't computable don't
1418 split strcat into strlen and memcpy. */
1419 tree len
= get_maxval_strlen (src
, 0);
1423 /* Create strlen (dst). */
1424 gimple_seq stmts
= NULL
, stmts2
;
1425 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1426 gimple_set_location (repl
, loc
);
1427 if (gimple_in_ssa_p (cfun
))
1428 newdst
= make_ssa_name (size_type_node
);
1430 newdst
= create_tmp_reg (size_type_node
);
1431 gimple_call_set_lhs (repl
, newdst
);
1432 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1434 /* Create (dst p+ strlen (dst)). */
1435 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1436 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1437 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1439 len
= fold_convert_loc (loc
, size_type_node
, len
);
1440 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1441 build_int_cst (size_type_node
, 1));
1442 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1443 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1445 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1446 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1447 if (gimple_call_lhs (stmt
))
1449 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1450 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1451 gsi_replace_with_seq_vops (gsi
, stmts
);
1452 /* gsi now points at the assignment to the lhs, get a
1453 stmt iterator to the memcpy call.
1454 ??? We can't use gsi_for_stmt as that doesn't work when the
1455 CFG isn't built yet. */
1456 gimple_stmt_iterator gsi2
= *gsi
;
1462 gsi_replace_with_seq_vops (gsi
, stmts
);
1468 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1469 are the arguments to the call. */
1472 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1474 gimple stmt
= gsi_stmt (*gsi
);
1475 tree dest
= gimple_call_arg (stmt
, 0);
1476 tree src
= gimple_call_arg (stmt
, 1);
1477 tree size
= gimple_call_arg (stmt
, 2);
1483 /* If the SRC parameter is "", return DEST. */
1484 if (p
&& *p
== '\0')
1486 replace_call_with_value (gsi
, dest
);
1490 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1493 /* If __builtin_strcat_chk is used, assume strcat is available. */
1494 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1498 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1499 replace_call_with_call_and_fold (gsi
, repl
);
1503 /* Simplify a call to the strncat builtin. */
1506 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1508 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1509 tree dst
= gimple_call_arg (stmt
, 0);
1510 tree src
= gimple_call_arg (stmt
, 1);
1511 tree len
= gimple_call_arg (stmt
, 2);
1513 const char *p
= c_getstr (src
);
1515 /* If the requested length is zero, or the src parameter string
1516 length is zero, return the dst parameter. */
1517 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1519 replace_call_with_value (gsi
, dst
);
1523 /* If the requested len is greater than or equal to the string
1524 length, call strcat. */
1525 if (TREE_CODE (len
) == INTEGER_CST
&& p
1526 && compare_tree_int (len
, strlen (p
)) >= 0)
1528 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1530 /* If the replacement _DECL isn't initialized, don't do the
1535 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1536 replace_call_with_call_and_fold (gsi
, repl
);
1543 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1547 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1549 gimple stmt
= gsi_stmt (*gsi
);
1550 tree dest
= gimple_call_arg (stmt
, 0);
1551 tree src
= gimple_call_arg (stmt
, 1);
1552 tree len
= gimple_call_arg (stmt
, 2);
1553 tree size
= gimple_call_arg (stmt
, 3);
1558 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1559 if ((p
&& *p
== '\0')
1560 || integer_zerop (len
))
1562 replace_call_with_value (gsi
, dest
);
1566 if (! tree_fits_uhwi_p (size
))
1569 if (! integer_all_onesp (size
))
1571 tree src_len
= c_strlen (src
, 1);
1573 && tree_fits_uhwi_p (src_len
)
1574 && tree_fits_uhwi_p (len
)
1575 && ! tree_int_cst_lt (len
, src_len
))
1577 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1578 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1582 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1583 replace_call_with_call_and_fold (gsi
, repl
);
1589 /* If __builtin_strncat_chk is used, assume strncat is available. */
1590 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1594 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1595 replace_call_with_call_and_fold (gsi
, repl
);
1599 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1600 to the call. IGNORE is true if the value returned
1601 by the builtin will be ignored. UNLOCKED is true is true if this
1602 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1603 the known length of the string. Return NULL_TREE if no simplification
1607 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1608 tree arg0
, tree arg1
,
1611 gimple stmt
= gsi_stmt (*gsi
);
1613 /* If we're using an unlocked function, assume the other unlocked
1614 functions exist explicitly. */
1615 tree
const fn_fputc
= (unlocked
1616 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1617 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1618 tree
const fn_fwrite
= (unlocked
1619 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1620 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1622 /* If the return value is used, don't do the transformation. */
1623 if (gimple_call_lhs (stmt
))
1626 /* Get the length of the string passed to fputs. If the length
1627 can't be determined, punt. */
1628 tree len
= get_maxval_strlen (arg0
, 0);
1630 || TREE_CODE (len
) != INTEGER_CST
)
1633 switch (compare_tree_int (len
, 1))
1635 case -1: /* length is 0, delete the call entirely . */
1636 replace_call_with_value (gsi
, integer_zero_node
);
1639 case 0: /* length is 1, call fputc. */
1641 const char *p
= c_getstr (arg0
);
1647 gimple repl
= gimple_build_call (fn_fputc
, 2,
1649 (integer_type_node
, p
[0]), arg1
);
1650 replace_call_with_call_and_fold (gsi
, repl
);
1655 case 1: /* length is greater than 1, call fwrite. */
1657 /* If optimizing for size keep fputs. */
1658 if (optimize_function_for_size_p (cfun
))
1660 /* New argument list transforming fputs(string, stream) to
1661 fwrite(string, 1, len, stream). */
1665 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1666 size_one_node
, len
, arg1
);
1667 replace_call_with_call_and_fold (gsi
, repl
);
1676 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1677 DEST, SRC, LEN, and SIZE are the arguments to the call.
1678 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1679 code of the builtin. If MAXLEN is not NULL, it is maximum length
1680 passed as third argument. */
1683 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1684 tree dest
, tree src
, tree len
, tree size
,
1685 enum built_in_function fcode
)
1687 gimple stmt
= gsi_stmt (*gsi
);
1688 location_t loc
= gimple_location (stmt
);
1689 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1692 /* If SRC and DEST are the same (and not volatile), return DEST
1693 (resp. DEST+LEN for __mempcpy_chk). */
1694 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1696 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1698 replace_call_with_value (gsi
, dest
);
1703 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1704 temp
= force_gimple_operand_gsi (gsi
, temp
,
1705 false, NULL_TREE
, true,
1707 replace_call_with_value (gsi
, temp
);
1712 if (! tree_fits_uhwi_p (size
))
1715 tree maxlen
= get_maxval_strlen (len
, 2);
1716 if (! integer_all_onesp (size
))
1718 if (! tree_fits_uhwi_p (len
))
1720 /* If LEN is not constant, try MAXLEN too.
1721 For MAXLEN only allow optimizing into non-_ocs function
1722 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1723 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1725 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1727 /* (void) __mempcpy_chk () can be optimized into
1728 (void) __memcpy_chk (). */
1729 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1733 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1734 replace_call_with_call_and_fold (gsi
, repl
);
1743 if (tree_int_cst_lt (size
, maxlen
))
1748 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1749 mem{cpy,pcpy,move,set} is available. */
1752 case BUILT_IN_MEMCPY_CHK
:
1753 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1755 case BUILT_IN_MEMPCPY_CHK
:
1756 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1758 case BUILT_IN_MEMMOVE_CHK
:
1759 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1761 case BUILT_IN_MEMSET_CHK
:
1762 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1771 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1772 replace_call_with_call_and_fold (gsi
, repl
);
1776 /* Fold a call to the __st[rp]cpy_chk builtin.
1777 DEST, SRC, and SIZE are the arguments to the call.
1778 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1779 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1780 strings passed as second argument. */
1783 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1785 tree src
, tree size
,
1786 enum built_in_function fcode
)
1788 gimple stmt
= gsi_stmt (*gsi
);
1789 location_t loc
= gimple_location (stmt
);
1790 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1793 /* If SRC and DEST are the same (and not volatile), return DEST. */
1794 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1796 replace_call_with_value (gsi
, dest
);
1800 if (! tree_fits_uhwi_p (size
))
1803 tree maxlen
= get_maxval_strlen (src
, 1);
1804 if (! integer_all_onesp (size
))
1806 len
= c_strlen (src
, 1);
1807 if (! len
|| ! tree_fits_uhwi_p (len
))
1809 /* If LEN is not constant, try MAXLEN too.
1810 For MAXLEN only allow optimizing into non-_ocs function
1811 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1812 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1814 if (fcode
== BUILT_IN_STPCPY_CHK
)
1819 /* If return value of __stpcpy_chk is ignored,
1820 optimize into __strcpy_chk. */
1821 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1825 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1826 replace_call_with_call_and_fold (gsi
, repl
);
1830 if (! len
|| TREE_SIDE_EFFECTS (len
))
1833 /* If c_strlen returned something, but not a constant,
1834 transform __strcpy_chk into __memcpy_chk. */
1835 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1839 len
= fold_convert_loc (loc
, size_type_node
, len
);
1840 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1841 build_int_cst (size_type_node
, 1));
1842 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1843 true, GSI_SAME_STMT
);
1844 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1845 replace_call_with_call_and_fold (gsi
, repl
);
1852 if (! tree_int_cst_lt (maxlen
, size
))
1856 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1857 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1858 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1862 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1863 replace_call_with_call_and_fold (gsi
, repl
);
1867 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1868 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1869 length passed as third argument. IGNORE is true if return value can be
1870 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1873 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1874 tree dest
, tree src
,
1875 tree len
, tree size
,
1876 enum built_in_function fcode
)
1878 gimple stmt
= gsi_stmt (*gsi
);
1879 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1882 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1884 /* If return value of __stpncpy_chk is ignored,
1885 optimize into __strncpy_chk. */
1886 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1889 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1890 replace_call_with_call_and_fold (gsi
, repl
);
1895 if (! tree_fits_uhwi_p (size
))
1898 tree maxlen
= get_maxval_strlen (len
, 2);
1899 if (! integer_all_onesp (size
))
1901 if (! tree_fits_uhwi_p (len
))
1903 /* If LEN is not constant, try MAXLEN too.
1904 For MAXLEN only allow optimizing into non-_ocs function
1905 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1906 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1912 if (tree_int_cst_lt (size
, maxlen
))
1916 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1917 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
1918 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
1922 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1923 replace_call_with_call_and_fold (gsi
, repl
);
1927 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1928 Return NULL_TREE if no simplification can be made. */
1931 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
1933 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1934 location_t loc
= gimple_location (stmt
);
1935 tree dest
= gimple_call_arg (stmt
, 0);
1936 tree src
= gimple_call_arg (stmt
, 1);
1937 tree fn
, len
, lenp1
;
1939 /* If the result is unused, replace stpcpy with strcpy. */
1940 if (gimple_call_lhs (stmt
) == NULL_TREE
)
1942 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
1945 gimple_call_set_fndecl (stmt
, fn
);
1950 len
= c_strlen (src
, 1);
1952 || TREE_CODE (len
) != INTEGER_CST
)
1955 if (optimize_function_for_size_p (cfun
)
1956 /* If length is zero it's small enough. */
1957 && !integer_zerop (len
))
1960 /* If the source has a known length replace stpcpy with memcpy. */
1961 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1965 gimple_seq stmts
= NULL
;
1966 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1967 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
1968 tem
, build_int_cst (size_type_node
, 1));
1969 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1970 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
1971 gimple_set_vuse (repl
, gimple_vuse (stmt
));
1972 gimple_set_vdef (repl
, gimple_vdef (stmt
));
1973 if (gimple_vdef (repl
)
1974 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
1975 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
1976 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
1977 /* Replace the result with dest + len. */
1979 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
1980 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1981 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
1982 POINTER_PLUS_EXPR
, dest
, tem
);
1983 gsi_replace (gsi
, ret
, true);
1984 /* Finally fold the memcpy call. */
1985 gimple_stmt_iterator gsi2
= *gsi
;
1991 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1992 NULL_TREE if a normal call should be emitted rather than expanding
1993 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1994 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1995 passed as second argument. */
1998 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
1999 enum built_in_function fcode
)
2001 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2002 tree dest
, size
, len
, fn
, fmt
, flag
;
2003 const char *fmt_str
;
2005 /* Verify the required arguments in the original call. */
2006 if (gimple_call_num_args (stmt
) < 5)
2009 dest
= gimple_call_arg (stmt
, 0);
2010 len
= gimple_call_arg (stmt
, 1);
2011 flag
= gimple_call_arg (stmt
, 2);
2012 size
= gimple_call_arg (stmt
, 3);
2013 fmt
= gimple_call_arg (stmt
, 4);
2015 if (! tree_fits_uhwi_p (size
))
2018 if (! integer_all_onesp (size
))
2020 tree maxlen
= get_maxval_strlen (len
, 2);
2021 if (! tree_fits_uhwi_p (len
))
2023 /* If LEN is not constant, try MAXLEN too.
2024 For MAXLEN only allow optimizing into non-_ocs function
2025 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2026 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2032 if (tree_int_cst_lt (size
, maxlen
))
2036 if (!init_target_chars ())
2039 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2040 or if format doesn't contain % chars or is "%s". */
2041 if (! integer_zerop (flag
))
2043 fmt_str
= c_getstr (fmt
);
2044 if (fmt_str
== NULL
)
2046 if (strchr (fmt_str
, target_percent
) != NULL
2047 && strcmp (fmt_str
, target_percent_s
))
2051 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2053 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2054 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2058 /* Replace the called function and the first 5 argument by 3 retaining
2059 trailing varargs. */
2060 gimple_call_set_fndecl (stmt
, fn
);
2061 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2062 gimple_call_set_arg (stmt
, 0, dest
);
2063 gimple_call_set_arg (stmt
, 1, len
);
2064 gimple_call_set_arg (stmt
, 2, fmt
);
2065 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2066 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2067 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2072 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2073 Return NULL_TREE if a normal call should be emitted rather than
2074 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2075 or BUILT_IN_VSPRINTF_CHK. */
2078 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2079 enum built_in_function fcode
)
2081 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2082 tree dest
, size
, len
, fn
, fmt
, flag
;
2083 const char *fmt_str
;
2084 unsigned nargs
= gimple_call_num_args (stmt
);
2086 /* Verify the required arguments in the original call. */
2089 dest
= gimple_call_arg (stmt
, 0);
2090 flag
= gimple_call_arg (stmt
, 1);
2091 size
= gimple_call_arg (stmt
, 2);
2092 fmt
= gimple_call_arg (stmt
, 3);
2094 if (! tree_fits_uhwi_p (size
))
2099 if (!init_target_chars ())
2102 /* Check whether the format is a literal string constant. */
2103 fmt_str
= c_getstr (fmt
);
2104 if (fmt_str
!= NULL
)
2106 /* If the format doesn't contain % args or %%, we know the size. */
2107 if (strchr (fmt_str
, target_percent
) == 0)
2109 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2110 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2112 /* If the format is "%s" and first ... argument is a string literal,
2113 we know the size too. */
2114 else if (fcode
== BUILT_IN_SPRINTF_CHK
2115 && strcmp (fmt_str
, target_percent_s
) == 0)
2121 arg
= gimple_call_arg (stmt
, 4);
2122 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2124 len
= c_strlen (arg
, 1);
2125 if (! len
|| ! tree_fits_uhwi_p (len
))
2132 if (! integer_all_onesp (size
))
2134 if (! len
|| ! tree_int_cst_lt (len
, size
))
2138 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2139 or if format doesn't contain % chars or is "%s". */
2140 if (! integer_zerop (flag
))
2142 if (fmt_str
== NULL
)
2144 if (strchr (fmt_str
, target_percent
) != NULL
2145 && strcmp (fmt_str
, target_percent_s
))
2149 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2150 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2151 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2155 /* Replace the called function and the first 4 argument by 2 retaining
2156 trailing varargs. */
2157 gimple_call_set_fndecl (stmt
, fn
);
2158 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2159 gimple_call_set_arg (stmt
, 0, dest
);
2160 gimple_call_set_arg (stmt
, 1, fmt
);
2161 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2162 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2163 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2168 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2169 ORIG may be null if this is a 2-argument call. We don't attempt to
2170 simplify calls with more than 3 arguments.
2172 Return NULL_TREE if no simplification was possible, otherwise return the
2173 simplified form of the call as a tree. If IGNORED is true, it means that
2174 the caller does not use the returned value of the function. */
2177 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2179 gimple stmt
= gsi_stmt (*gsi
);
2180 tree dest
= gimple_call_arg (stmt
, 0);
2181 tree fmt
= gimple_call_arg (stmt
, 1);
2182 tree orig
= NULL_TREE
;
2183 const char *fmt_str
= NULL
;
2185 /* Verify the required arguments in the original call. We deal with two
2186 types of sprintf() calls: 'sprintf (str, fmt)' and
2187 'sprintf (dest, "%s", orig)'. */
2188 if (gimple_call_num_args (stmt
) > 3)
2191 if (gimple_call_num_args (stmt
) == 3)
2192 orig
= gimple_call_arg (stmt
, 2);
2194 /* Check whether the format is a literal string constant. */
2195 fmt_str
= c_getstr (fmt
);
2196 if (fmt_str
== NULL
)
2199 if (!init_target_chars ())
2202 /* If the format doesn't contain % args or %%, use strcpy. */
2203 if (strchr (fmt_str
, target_percent
) == NULL
)
2205 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2210 /* Don't optimize sprintf (buf, "abc", ptr++). */
2214 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2215 'format' is known to contain no % formats. */
2216 gimple_seq stmts
= NULL
;
2217 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2218 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2219 if (gimple_call_lhs (stmt
))
2221 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2222 build_int_cst (integer_type_node
,
2224 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2225 gsi_replace_with_seq_vops (gsi
, stmts
);
2226 /* gsi now points at the assignment to the lhs, get a
2227 stmt iterator to the memcpy call.
2228 ??? We can't use gsi_for_stmt as that doesn't work when the
2229 CFG isn't built yet. */
2230 gimple_stmt_iterator gsi2
= *gsi
;
2236 gsi_replace_with_seq_vops (gsi
, stmts
);
2242 /* If the format is "%s", use strcpy if the result isn't used. */
2243 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2246 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2251 /* Don't crash on sprintf (str1, "%s"). */
2255 tree orig_len
= NULL_TREE
;
2256 if (gimple_call_lhs (stmt
))
2258 orig_len
= get_maxval_strlen (orig
, 0);
2263 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2264 gimple_seq stmts
= NULL
;
2265 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2266 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2267 if (gimple_call_lhs (stmt
))
2269 if (!useless_type_conversion_p (integer_type_node
,
2270 TREE_TYPE (orig_len
)))
2271 orig_len
= fold_convert (integer_type_node
, orig_len
);
2272 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2273 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2274 gsi_replace_with_seq_vops (gsi
, stmts
);
2275 /* gsi now points at the assignment to the lhs, get a
2276 stmt iterator to the memcpy call.
2277 ??? We can't use gsi_for_stmt as that doesn't work when the
2278 CFG isn't built yet. */
2279 gimple_stmt_iterator gsi2
= *gsi
;
2285 gsi_replace_with_seq_vops (gsi
, stmts
);
2293 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2294 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2295 attempt to simplify calls with more than 4 arguments.
2297 Return NULL_TREE if no simplification was possible, otherwise return the
2298 simplified form of the call as a tree. If IGNORED is true, it means that
2299 the caller does not use the returned value of the function. */
2302 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2304 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2305 tree dest
= gimple_call_arg (stmt
, 0);
2306 tree destsize
= gimple_call_arg (stmt
, 1);
2307 tree fmt
= gimple_call_arg (stmt
, 2);
2308 tree orig
= NULL_TREE
;
2309 const char *fmt_str
= NULL
;
2311 if (gimple_call_num_args (stmt
) > 4)
2314 if (gimple_call_num_args (stmt
) == 4)
2315 orig
= gimple_call_arg (stmt
, 3);
2317 if (!tree_fits_uhwi_p (destsize
))
2319 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2321 /* Check whether the format is a literal string constant. */
2322 fmt_str
= c_getstr (fmt
);
2323 if (fmt_str
== NULL
)
2326 if (!init_target_chars ())
2329 /* If the format doesn't contain % args or %%, use strcpy. */
2330 if (strchr (fmt_str
, target_percent
) == NULL
)
2332 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2336 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2340 /* We could expand this as
2341 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2343 memcpy (str, fmt_with_nul_at_cstm1, cst);
2344 but in the former case that might increase code size
2345 and in the latter case grow .rodata section too much.
2347 size_t len
= strlen (fmt_str
);
2351 gimple_seq stmts
= NULL
;
2352 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2353 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2354 if (gimple_call_lhs (stmt
))
2356 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2357 build_int_cst (integer_type_node
, len
));
2358 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2359 gsi_replace_with_seq_vops (gsi
, stmts
);
2360 /* gsi now points at the assignment to the lhs, get a
2361 stmt iterator to the memcpy call.
2362 ??? We can't use gsi_for_stmt as that doesn't work when the
2363 CFG isn't built yet. */
2364 gimple_stmt_iterator gsi2
= *gsi
;
2370 gsi_replace_with_seq_vops (gsi
, stmts
);
2376 /* If the format is "%s", use strcpy if the result isn't used. */
2377 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2379 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2383 /* Don't crash on snprintf (str1, cst, "%s"). */
2387 tree orig_len
= get_maxval_strlen (orig
, 0);
2388 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2391 /* We could expand this as
2392 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2394 memcpy (str1, str2_with_nul_at_cstm1, cst);
2395 but in the former case that might increase code size
2396 and in the latter case grow .rodata section too much.
2398 if (compare_tree_int (orig_len
, destlen
) >= 0)
2401 /* Convert snprintf (str1, cst, "%s", str2) into
2402 strcpy (str1, str2) if strlen (str2) < cst. */
2403 gimple_seq stmts
= NULL
;
2404 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2405 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2406 if (gimple_call_lhs (stmt
))
2408 if (!useless_type_conversion_p (integer_type_node
,
2409 TREE_TYPE (orig_len
)))
2410 orig_len
= fold_convert (integer_type_node
, orig_len
);
2411 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2412 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2413 gsi_replace_with_seq_vops (gsi
, stmts
);
2414 /* gsi now points at the assignment to the lhs, get a
2415 stmt iterator to the memcpy call.
2416 ??? We can't use gsi_for_stmt as that doesn't work when the
2417 CFG isn't built yet. */
2418 gimple_stmt_iterator gsi2
= *gsi
;
2424 gsi_replace_with_seq_vops (gsi
, stmts
);
2432 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2433 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2434 more than 3 arguments, and ARG may be null in the 2-argument case.
2436 Return NULL_TREE if no simplification was possible, otherwise return the
2437 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2438 code of the function to be simplified. */
2441 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2442 tree fp
, tree fmt
, tree arg
,
2443 enum built_in_function fcode
)
2445 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2446 tree fn_fputc
, fn_fputs
;
2447 const char *fmt_str
= NULL
;
2449 /* If the return value is used, don't do the transformation. */
2450 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2453 /* Check whether the format is a literal string constant. */
2454 fmt_str
= c_getstr (fmt
);
2455 if (fmt_str
== NULL
)
2458 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2460 /* If we're using an unlocked function, assume the other
2461 unlocked functions exist explicitly. */
2462 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2463 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2467 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2468 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2471 if (!init_target_chars ())
2474 /* If the format doesn't contain % args or %%, use strcpy. */
2475 if (strchr (fmt_str
, target_percent
) == NULL
)
2477 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2481 /* If the format specifier was "", fprintf does nothing. */
2482 if (fmt_str
[0] == '\0')
2484 replace_call_with_value (gsi
, NULL_TREE
);
2488 /* When "string" doesn't contain %, replace all cases of
2489 fprintf (fp, string) with fputs (string, fp). The fputs
2490 builtin will take care of special cases like length == 1. */
2493 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2494 replace_call_with_call_and_fold (gsi
, repl
);
2499 /* The other optimizations can be done only on the non-va_list variants. */
2500 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2503 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2504 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2506 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2510 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2511 replace_call_with_call_and_fold (gsi
, repl
);
2516 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2517 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2520 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2524 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2525 replace_call_with_call_and_fold (gsi
, repl
);
2533 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2534 FMT and ARG are the arguments to the call; we don't fold cases with
2535 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2537 Return NULL_TREE if no simplification was possible, otherwise return the
2538 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2539 code of the function to be simplified. */
2542 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2543 tree arg
, enum built_in_function fcode
)
2545 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2546 tree fn_putchar
, fn_puts
, newarg
;
2547 const char *fmt_str
= NULL
;
2549 /* If the return value is used, don't do the transformation. */
2550 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2553 /* Check whether the format is a literal string constant. */
2554 fmt_str
= c_getstr (fmt
);
2555 if (fmt_str
== NULL
)
2558 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2560 /* If we're using an unlocked function, assume the other
2561 unlocked functions exist explicitly. */
2562 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2563 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2567 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2568 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2571 if (!init_target_chars ())
2574 if (strcmp (fmt_str
, target_percent_s
) == 0
2575 || strchr (fmt_str
, target_percent
) == NULL
)
2579 if (strcmp (fmt_str
, target_percent_s
) == 0)
2581 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2584 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2587 str
= c_getstr (arg
);
2593 /* The format specifier doesn't contain any '%' characters. */
2594 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2600 /* If the string was "", printf does nothing. */
2603 replace_call_with_value (gsi
, NULL_TREE
);
2607 /* If the string has length of 1, call putchar. */
2610 /* Given printf("c"), (where c is any one character,)
2611 convert "c"[0] to an int and pass that to the replacement
2613 newarg
= build_int_cst (integer_type_node
, str
[0]);
2616 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2617 replace_call_with_call_and_fold (gsi
, repl
);
2623 /* If the string was "string\n", call puts("string"). */
2624 size_t len
= strlen (str
);
2625 if ((unsigned char)str
[len
- 1] == target_newline
2626 && (size_t) (int) len
== len
2630 tree offset_node
, string_cst
;
2632 /* Create a NUL-terminated string that's one char shorter
2633 than the original, stripping off the trailing '\n'. */
2634 newarg
= build_string_literal (len
, str
);
2635 string_cst
= string_constant (newarg
, &offset_node
);
2636 gcc_checking_assert (string_cst
2637 && (TREE_STRING_LENGTH (string_cst
)
2639 && integer_zerop (offset_node
)
2641 TREE_STRING_POINTER (string_cst
)[len
- 1]
2643 /* build_string_literal creates a new STRING_CST,
2644 modify it in place to avoid double copying. */
2645 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2646 newstr
[len
- 1] = '\0';
2649 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2650 replace_call_with_call_and_fold (gsi
, repl
);
2655 /* We'd like to arrange to call fputs(string,stdout) here,
2656 but we need stdout and don't have a way to get it yet. */
2661 /* The other optimizations can be done only on the non-va_list variants. */
2662 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2665 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2666 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2668 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2672 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2673 replace_call_with_call_and_fold (gsi
, repl
);
2678 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2679 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2681 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2686 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2687 replace_call_with_call_and_fold (gsi
, repl
);
2697 /* Fold a call to __builtin_strlen with known length LEN. */
2700 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2702 gimple stmt
= gsi_stmt (*gsi
);
2703 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2706 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2707 replace_call_with_value (gsi
, len
);
2712 /* Fold the non-target builtin at *GSI and return whether any simplification
2716 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2718 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2719 tree callee
= gimple_call_fndecl (stmt
);
2721 /* Give up for always_inline inline builtins until they are
2723 if (avoid_folding_inline_builtin (callee
))
2726 unsigned n
= gimple_call_num_args (stmt
);
2727 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2730 case BUILT_IN_BZERO
:
2731 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2732 gimple_call_arg (stmt
, 1));
2733 case BUILT_IN_MEMSET
:
2734 return gimple_fold_builtin_memset (gsi
,
2735 gimple_call_arg (stmt
, 1),
2736 gimple_call_arg (stmt
, 2));
2737 case BUILT_IN_BCOPY
:
2738 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2739 gimple_call_arg (stmt
, 0), 3);
2740 case BUILT_IN_MEMCPY
:
2741 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2742 gimple_call_arg (stmt
, 1), 0);
2743 case BUILT_IN_MEMPCPY
:
2744 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2745 gimple_call_arg (stmt
, 1), 1);
2746 case BUILT_IN_MEMMOVE
:
2747 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2748 gimple_call_arg (stmt
, 1), 3);
2749 case BUILT_IN_SPRINTF_CHK
:
2750 case BUILT_IN_VSPRINTF_CHK
:
2751 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2752 case BUILT_IN_STRCAT_CHK
:
2753 return gimple_fold_builtin_strcat_chk (gsi
);
2754 case BUILT_IN_STRNCAT_CHK
:
2755 return gimple_fold_builtin_strncat_chk (gsi
);
2756 case BUILT_IN_STRLEN
:
2757 return gimple_fold_builtin_strlen (gsi
);
2758 case BUILT_IN_STRCPY
:
2759 return gimple_fold_builtin_strcpy (gsi
,
2760 gimple_call_arg (stmt
, 0),
2761 gimple_call_arg (stmt
, 1));
2762 case BUILT_IN_STRNCPY
:
2763 return gimple_fold_builtin_strncpy (gsi
,
2764 gimple_call_arg (stmt
, 0),
2765 gimple_call_arg (stmt
, 1),
2766 gimple_call_arg (stmt
, 2));
2767 case BUILT_IN_STRCAT
:
2768 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2769 gimple_call_arg (stmt
, 1));
2770 case BUILT_IN_STRNCAT
:
2771 return gimple_fold_builtin_strncat (gsi
);
2772 case BUILT_IN_FPUTS
:
2773 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2774 gimple_call_arg (stmt
, 1), false);
2775 case BUILT_IN_FPUTS_UNLOCKED
:
2776 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2777 gimple_call_arg (stmt
, 1), true);
2778 case BUILT_IN_MEMCPY_CHK
:
2779 case BUILT_IN_MEMPCPY_CHK
:
2780 case BUILT_IN_MEMMOVE_CHK
:
2781 case BUILT_IN_MEMSET_CHK
:
2782 return gimple_fold_builtin_memory_chk (gsi
,
2783 gimple_call_arg (stmt
, 0),
2784 gimple_call_arg (stmt
, 1),
2785 gimple_call_arg (stmt
, 2),
2786 gimple_call_arg (stmt
, 3),
2788 case BUILT_IN_STPCPY
:
2789 return gimple_fold_builtin_stpcpy (gsi
);
2790 case BUILT_IN_STRCPY_CHK
:
2791 case BUILT_IN_STPCPY_CHK
:
2792 return gimple_fold_builtin_stxcpy_chk (gsi
,
2793 gimple_call_arg (stmt
, 0),
2794 gimple_call_arg (stmt
, 1),
2795 gimple_call_arg (stmt
, 2),
2797 case BUILT_IN_STRNCPY_CHK
:
2798 case BUILT_IN_STPNCPY_CHK
:
2799 return gimple_fold_builtin_stxncpy_chk (gsi
,
2800 gimple_call_arg (stmt
, 0),
2801 gimple_call_arg (stmt
, 1),
2802 gimple_call_arg (stmt
, 2),
2803 gimple_call_arg (stmt
, 3),
2805 case BUILT_IN_SNPRINTF_CHK
:
2806 case BUILT_IN_VSNPRINTF_CHK
:
2807 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2808 case BUILT_IN_SNPRINTF
:
2809 return gimple_fold_builtin_snprintf (gsi
);
2810 case BUILT_IN_SPRINTF
:
2811 return gimple_fold_builtin_sprintf (gsi
);
2812 case BUILT_IN_FPRINTF
:
2813 case BUILT_IN_FPRINTF_UNLOCKED
:
2814 case BUILT_IN_VFPRINTF
:
2815 if (n
== 2 || n
== 3)
2816 return gimple_fold_builtin_fprintf (gsi
,
2817 gimple_call_arg (stmt
, 0),
2818 gimple_call_arg (stmt
, 1),
2820 ? gimple_call_arg (stmt
, 2)
2824 case BUILT_IN_FPRINTF_CHK
:
2825 case BUILT_IN_VFPRINTF_CHK
:
2826 if (n
== 3 || n
== 4)
2827 return gimple_fold_builtin_fprintf (gsi
,
2828 gimple_call_arg (stmt
, 0),
2829 gimple_call_arg (stmt
, 2),
2831 ? gimple_call_arg (stmt
, 3)
2835 case BUILT_IN_PRINTF
:
2836 case BUILT_IN_PRINTF_UNLOCKED
:
2837 case BUILT_IN_VPRINTF
:
2838 if (n
== 1 || n
== 2)
2839 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2841 ? gimple_call_arg (stmt
, 1)
2842 : NULL_TREE
, fcode
);
2844 case BUILT_IN_PRINTF_CHK
:
2845 case BUILT_IN_VPRINTF_CHK
:
2846 if (n
== 2 || n
== 3)
2847 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2849 ? gimple_call_arg (stmt
, 2)
2850 : NULL_TREE
, fcode
);
2854 /* Try the generic builtin folder. */
2855 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2856 tree result
= fold_call_stmt (stmt
, ignore
);
2860 STRIP_NOPS (result
);
2862 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2863 if (!update_call_from_tree (gsi
, result
))
2864 gimplify_and_update_call_from_tree (gsi
, result
);
2871 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2872 doesn't fit into TYPE. The test for overflow should be regardless of
2873 -fwrapv, and even for unsigned types. */
2876 arith_overflowed_p (enum tree_code code
, const_tree type
,
2877 const_tree arg0
, const_tree arg1
)
2879 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
2880 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
2882 widest2_int warg0
= widest2_int_cst (arg0
);
2883 widest2_int warg1
= widest2_int_cst (arg1
);
2887 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
2888 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
2889 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
2890 default: gcc_unreachable ();
2892 signop sign
= TYPE_SIGN (type
);
2893 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
2895 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
2898 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2899 The statement may be replaced by another statement, e.g., if the call
2900 simplifies to a constant value. Return true if any changes were made.
2901 It is assumed that the operands have been previously folded. */
2904 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2906 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2908 bool changed
= false;
2911 /* Fold *& in call arguments. */
2912 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2913 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2915 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2918 gimple_call_set_arg (stmt
, i
, tmp
);
2923 /* Check for virtual calls that became direct calls. */
2924 callee
= gimple_call_fn (stmt
);
2925 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2927 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2929 if (dump_file
&& virtual_method_call_p (callee
)
2930 && !possible_polymorphic_call_target_p
2931 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2932 (OBJ_TYPE_REF_EXPR (callee
)))))
2935 "Type inheritance inconsistent devirtualization of ");
2936 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2937 fprintf (dump_file
, " to ");
2938 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2939 fprintf (dump_file
, "\n");
2942 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2945 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2948 vec
<cgraph_node
*>targets
2949 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2950 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2952 tree lhs
= gimple_call_lhs (stmt
);
2953 if (dump_enabled_p ())
2955 location_t loc
= gimple_location_safe (stmt
);
2956 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2957 "folding virtual function call to %s\n",
2958 targets
.length () == 1
2959 ? targets
[0]->name ()
2960 : "__builtin_unreachable");
2962 if (targets
.length () == 1)
2964 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
2966 /* If the call becomes noreturn, remove the lhs. */
2967 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
2969 if (TREE_CODE (lhs
) == SSA_NAME
)
2971 tree var
= create_tmp_var (TREE_TYPE (lhs
));
2972 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2973 gimple new_stmt
= gimple_build_assign (lhs
, def
);
2974 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2976 gimple_call_set_lhs (stmt
, NULL_TREE
);
2978 maybe_remove_unused_call_args (cfun
, stmt
);
2982 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2983 gimple new_stmt
= gimple_build_call (fndecl
, 0);
2984 gimple_set_location (new_stmt
, gimple_location (stmt
));
2985 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2987 tree var
= create_tmp_var (TREE_TYPE (lhs
));
2988 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2990 /* To satisfy condition for
2991 cgraph_update_edges_for_call_stmt_node,
2992 we need to preserve GIMPLE_CALL statement
2993 at position of GSI iterator. */
2994 update_call_from_tree (gsi
, def
);
2995 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
2999 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3000 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3001 gsi_replace (gsi
, new_stmt
, false);
3009 /* Check for indirect calls that became direct calls, and then
3010 no longer require a static chain. */
3011 if (gimple_call_chain (stmt
))
3013 tree fn
= gimple_call_fndecl (stmt
);
3014 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3016 gimple_call_set_chain (stmt
, NULL
);
3021 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3024 gimple_call_set_chain (stmt
, tmp
);
3033 /* Check for builtins that CCP can handle using information not
3034 available in the generic fold routines. */
3035 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3037 if (gimple_fold_builtin (gsi
))
3040 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3042 changed
|= targetm
.gimple_fold_builtin (gsi
);
3044 else if (gimple_call_internal_p (stmt
))
3046 enum tree_code subcode
= ERROR_MARK
;
3047 tree result
= NULL_TREE
;
3048 bool cplx_result
= false;
3049 tree overflow
= NULL_TREE
;
3050 switch (gimple_call_internal_fn (stmt
))
3052 case IFN_BUILTIN_EXPECT
:
3053 result
= fold_builtin_expect (gimple_location (stmt
),
3054 gimple_call_arg (stmt
, 0),
3055 gimple_call_arg (stmt
, 1),
3056 gimple_call_arg (stmt
, 2));
3058 case IFN_UBSAN_OBJECT_SIZE
:
3059 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3060 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3061 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3062 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3063 gimple_call_arg (stmt
, 2))))
3065 gsi_replace (gsi
, gimple_build_nop (), true);
3066 unlink_stmt_vdef (stmt
);
3067 release_defs (stmt
);
3071 case IFN_UBSAN_CHECK_ADD
:
3072 subcode
= PLUS_EXPR
;
3074 case IFN_UBSAN_CHECK_SUB
:
3075 subcode
= MINUS_EXPR
;
3077 case IFN_UBSAN_CHECK_MUL
:
3078 subcode
= MULT_EXPR
;
3080 case IFN_ADD_OVERFLOW
:
3081 subcode
= PLUS_EXPR
;
3084 case IFN_SUB_OVERFLOW
:
3085 subcode
= MINUS_EXPR
;
3088 case IFN_MUL_OVERFLOW
:
3089 subcode
= MULT_EXPR
;
3095 if (subcode
!= ERROR_MARK
)
3097 tree arg0
= gimple_call_arg (stmt
, 0);
3098 tree arg1
= gimple_call_arg (stmt
, 1);
3099 tree type
= TREE_TYPE (arg0
);
3102 tree lhs
= gimple_call_lhs (stmt
);
3103 if (lhs
== NULL_TREE
)
3106 type
= TREE_TYPE (TREE_TYPE (lhs
));
3108 if (type
== NULL_TREE
)
3110 /* x = y + 0; x = y - 0; x = y * 0; */
3111 else if (integer_zerop (arg1
))
3112 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3113 /* x = 0 + y; x = 0 * y; */
3114 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3115 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3117 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3118 result
= integer_zero_node
;
3119 /* x = y * 1; x = 1 * y; */
3120 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3122 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3124 else if (TREE_CODE (arg0
) == INTEGER_CST
3125 && TREE_CODE (arg1
) == INTEGER_CST
)
3128 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3129 fold_convert (type
, arg1
));
3131 result
= int_const_binop (subcode
, arg0
, arg1
);
3132 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3135 overflow
= build_one_cst (type
);
3142 if (result
== integer_zero_node
)
3143 result
= build_zero_cst (type
);
3144 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3146 if (TREE_CODE (result
) == INTEGER_CST
)
3148 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3150 overflow
= build_one_cst (type
);
3152 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3153 && TYPE_UNSIGNED (type
))
3154 || (TYPE_PRECISION (type
)
3155 < (TYPE_PRECISION (TREE_TYPE (result
))
3156 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3157 && !TYPE_UNSIGNED (type
)))))
3160 result
= fold_convert (type
, result
);
3167 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3168 result
= drop_tree_overflow (result
);
3171 if (overflow
== NULL_TREE
)
3172 overflow
= build_zero_cst (TREE_TYPE (result
));
3173 tree ctype
= build_complex_type (TREE_TYPE (result
));
3174 if (TREE_CODE (result
) == INTEGER_CST
3175 && TREE_CODE (overflow
) == INTEGER_CST
)
3176 result
= build_complex (ctype
, result
, overflow
);
3178 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3179 ctype
, result
, overflow
);
3181 if (!update_call_from_tree (gsi
, result
))
3182 gimplify_and_update_call_from_tree (gsi
, result
);
3191 /* Return true whether NAME has a use on STMT. */
3194 has_use_on_stmt (tree name
, gimple stmt
)
3196 imm_use_iterator iter
;
3197 use_operand_p use_p
;
3198 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
3199 if (USE_STMT (use_p
) == stmt
)
3204 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3207 Replaces *GSI with the simplification result in RCODE and OPS
3208 and the associated statements in *SEQ. Does the replacement
3209 according to INPLACE and returns true if the operation succeeded. */
3212 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3213 code_helper rcode
, tree
*ops
,
3214 gimple_seq
*seq
, bool inplace
)
3216 gimple stmt
= gsi_stmt (*gsi
);
3218 /* Play safe and do not allow abnormals to be mentioned in
3219 newly created statements. See also maybe_push_res_to_seq.
3220 As an exception allow such uses if there was a use of the
3221 same SSA name on the old stmt. */
3222 if ((TREE_CODE (ops
[0]) == SSA_NAME
3223 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
3224 && !has_use_on_stmt (ops
[0], stmt
))
3226 && TREE_CODE (ops
[1]) == SSA_NAME
3227 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
3228 && !has_use_on_stmt (ops
[1], stmt
))
3230 && TREE_CODE (ops
[2]) == SSA_NAME
3231 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
3232 && !has_use_on_stmt (ops
[2], stmt
)))
3235 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3237 gcc_assert (rcode
.is_tree_code ());
3238 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3239 /* GIMPLE_CONDs condition may not throw. */
3240 && (!flag_exceptions
3241 || !cfun
->can_throw_non_call_exceptions
3242 || !operation_could_trap_p (rcode
,
3243 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3245 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3246 else if (rcode
== SSA_NAME
)
3247 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3248 build_zero_cst (TREE_TYPE (ops
[0])));
3249 else if (rcode
== INTEGER_CST
)
3251 if (integer_zerop (ops
[0]))
3252 gimple_cond_make_false (cond_stmt
);
3254 gimple_cond_make_true (cond_stmt
);
3258 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3262 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3263 build_zero_cst (TREE_TYPE (res
)));
3267 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3269 fprintf (dump_file
, "gimple_simplified to ");
3270 if (!gimple_seq_empty_p (*seq
))
3271 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3272 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3275 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3278 else if (is_gimple_assign (stmt
)
3279 && rcode
.is_tree_code ())
3282 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3284 maybe_build_generic_op (rcode
,
3285 TREE_TYPE (gimple_assign_lhs (stmt
)),
3286 &ops
[0], ops
[1], ops
[2]);
3287 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3288 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3290 fprintf (dump_file
, "gimple_simplified to ");
3291 if (!gimple_seq_empty_p (*seq
))
3292 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3293 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3296 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3300 else if (rcode
.is_fn_code ()
3301 && gimple_call_builtin_p (stmt
, rcode
))
3304 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3306 gcc_assert (ops
[i
] != NULL_TREE
);
3307 gimple_call_set_arg (stmt
, i
, ops
[i
]);
3310 gcc_assert (ops
[i
] == NULL_TREE
);
3315 if (gimple_has_lhs (stmt
))
3317 tree lhs
= gimple_get_lhs (stmt
);
3318 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3321 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3323 fprintf (dump_file
, "gimple_simplified to ");
3324 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3326 gsi_replace_with_seq_vops (gsi
, *seq
);
3336 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3339 maybe_canonicalize_mem_ref_addr (tree
*t
)
3343 if (TREE_CODE (*t
) == ADDR_EXPR
)
3344 t
= &TREE_OPERAND (*t
, 0);
3346 while (handled_component_p (*t
))
3347 t
= &TREE_OPERAND (*t
, 0);
3349 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3350 of invariant addresses into a SSA name MEM_REF address. */
3351 if (TREE_CODE (*t
) == MEM_REF
3352 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3354 tree addr
= TREE_OPERAND (*t
, 0);
3355 if (TREE_CODE (addr
) == ADDR_EXPR
3356 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3357 || handled_component_p (TREE_OPERAND (addr
, 0))))
3360 HOST_WIDE_INT coffset
;
3361 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3366 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3367 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3368 TREE_OPERAND (*t
, 1),
3369 size_int (coffset
));
3372 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3373 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3376 /* Canonicalize back MEM_REFs to plain reference trees if the object
3377 accessed is a decl that has the same access semantics as the MEM_REF. */
3378 if (TREE_CODE (*t
) == MEM_REF
3379 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3380 && integer_zerop (TREE_OPERAND (*t
, 1))
3381 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3383 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3384 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3385 if (/* Same volatile qualification. */
3386 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3387 /* Same TBAA behavior with -fstrict-aliasing. */
3388 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3389 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3390 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3391 /* Same alignment. */
3392 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3393 /* We have to look out here to not drop a required conversion
3394 from the rhs to the lhs if *t appears on the lhs or vice-versa
3395 if it appears on the rhs. Thus require strict type
3397 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3399 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3404 /* Canonicalize TARGET_MEM_REF in particular with respect to
3405 the indexes becoming constant. */
3406 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3408 tree tem
= maybe_fold_tmr (*t
);
3419 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3420 distinguishes both cases. */
3423 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3425 bool changed
= false;
3426 gimple stmt
= gsi_stmt (*gsi
);
3429 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3431 ??? This shouldn't be done in generic folding but in the
3432 propagation helpers which also know whether an address was
3434 Also canonicalize operand order. */
3435 switch (gimple_code (stmt
))
3438 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3440 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3441 if ((REFERENCE_CLASS_P (*rhs
)
3442 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3443 && maybe_canonicalize_mem_ref_addr (rhs
))
3445 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3446 if (REFERENCE_CLASS_P (*lhs
)
3447 && maybe_canonicalize_mem_ref_addr (lhs
))
3452 /* Canonicalize operand order. */
3453 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3454 if (TREE_CODE_CLASS (code
) == tcc_comparison
3455 || commutative_tree_code (code
)
3456 || commutative_ternary_tree_code (code
))
3458 tree rhs1
= gimple_assign_rhs1 (stmt
);
3459 tree rhs2
= gimple_assign_rhs2 (stmt
);
3460 if (tree_swap_operands_p (rhs1
, rhs2
, false))
3462 gimple_assign_set_rhs1 (stmt
, rhs2
);
3463 gimple_assign_set_rhs2 (stmt
, rhs1
);
3464 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3465 gimple_assign_set_rhs_code (stmt
,
3466 swap_tree_comparison (code
));
3474 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3476 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3477 if (REFERENCE_CLASS_P (*arg
)
3478 && maybe_canonicalize_mem_ref_addr (arg
))
3481 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3483 && REFERENCE_CLASS_P (*lhs
)
3484 && maybe_canonicalize_mem_ref_addr (lhs
))
3490 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3491 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3493 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3494 tree op
= TREE_VALUE (link
);
3495 if (REFERENCE_CLASS_P (op
)
3496 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3499 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3501 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3502 tree op
= TREE_VALUE (link
);
3503 if ((REFERENCE_CLASS_P (op
)
3504 || TREE_CODE (op
) == ADDR_EXPR
)
3505 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3511 if (gimple_debug_bind_p (stmt
))
3513 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3515 && (REFERENCE_CLASS_P (*val
)
3516 || TREE_CODE (*val
) == ADDR_EXPR
)
3517 && maybe_canonicalize_mem_ref_addr (val
))
3523 /* Canonicalize operand order. */
3524 tree lhs
= gimple_cond_lhs (stmt
);
3525 tree rhs
= gimple_cond_rhs (stmt
);
3526 if (tree_swap_operands_p (lhs
, rhs
, false))
3528 gcond
*gc
= as_a
<gcond
*> (stmt
);
3529 gimple_cond_set_lhs (gc
, rhs
);
3530 gimple_cond_set_rhs (gc
, lhs
);
3531 gimple_cond_set_code (gc
,
3532 swap_tree_comparison (gimple_cond_code (gc
)));
3539 /* Dispatch to pattern-based folding. */
3541 || is_gimple_assign (stmt
)
3542 || gimple_code (stmt
) == GIMPLE_COND
)
3544 gimple_seq seq
= NULL
;
3547 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3548 valueize
, valueize
))
3550 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3553 gimple_seq_discard (seq
);
3557 stmt
= gsi_stmt (*gsi
);
3559 /* Fold the main computation performed by the statement. */
3560 switch (gimple_code (stmt
))
3564 /* Try to canonicalize for boolean-typed X the comparisons
3565 X == 0, X == 1, X != 0, and X != 1. */
3566 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
3567 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
3569 tree lhs
= gimple_assign_lhs (stmt
);
3570 tree op1
= gimple_assign_rhs1 (stmt
);
3571 tree op2
= gimple_assign_rhs2 (stmt
);
3572 tree type
= TREE_TYPE (op1
);
3574 /* Check whether the comparison operands are of the same boolean
3575 type as the result type is.
3576 Check that second operand is an integer-constant with value
3578 if (TREE_CODE (op2
) == INTEGER_CST
3579 && (integer_zerop (op2
) || integer_onep (op2
))
3580 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
3582 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
3583 bool is_logical_not
= false;
3585 /* X == 0 and X != 1 is a logical-not.of X
3586 X == 1 and X != 0 is X */
3587 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
3588 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
3589 is_logical_not
= true;
3591 if (is_logical_not
== false)
3592 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
3593 /* Only for one-bit precision typed X the transformation
3594 !X -> ~X is valied. */
3595 else if (TYPE_PRECISION (type
) == 1)
3596 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
3597 /* Otherwise we use !X -> X ^ 1. */
3599 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
3600 build_int_cst (type
, 1));
3606 unsigned old_num_ops
= gimple_num_ops (stmt
);
3607 tree lhs
= gimple_assign_lhs (stmt
);
3608 tree new_rhs
= fold_gimple_assign (gsi
);
3610 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3611 TREE_TYPE (new_rhs
)))
3612 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3615 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3617 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3624 changed
|= gimple_fold_call (gsi
, inplace
);
3628 /* Fold *& in asm operands. */
3630 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3632 const char **oconstraints
;
3633 const char *constraint
;
3634 bool allows_mem
, allows_reg
;
3636 noutputs
= gimple_asm_noutputs (asm_stmt
);
3637 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3639 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3641 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3642 tree op
= TREE_VALUE (link
);
3644 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3645 if (REFERENCE_CLASS_P (op
)
3646 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3648 TREE_VALUE (link
) = op
;
3652 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3654 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3655 tree op
= TREE_VALUE (link
);
3657 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3658 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3659 oconstraints
, &allows_mem
, &allows_reg
);
3660 if (REFERENCE_CLASS_P (op
)
3661 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3664 TREE_VALUE (link
) = op
;
3672 if (gimple_debug_bind_p (stmt
))
3674 tree val
= gimple_debug_bind_get_value (stmt
);
3676 && REFERENCE_CLASS_P (val
))
3678 tree tem
= maybe_fold_reference (val
, false);
3681 gimple_debug_bind_set_value (stmt
, tem
);
3686 && TREE_CODE (val
) == ADDR_EXPR
)
3688 tree ref
= TREE_OPERAND (val
, 0);
3689 tree tem
= maybe_fold_reference (ref
, false);
3692 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3693 gimple_debug_bind_set_value (stmt
, tem
);
3703 stmt
= gsi_stmt (*gsi
);
3705 /* Fold *& on the lhs. */
3706 if (gimple_has_lhs (stmt
))
3708 tree lhs
= gimple_get_lhs (stmt
);
3709 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3711 tree new_lhs
= maybe_fold_reference (lhs
, true);
3714 gimple_set_lhs (stmt
, new_lhs
);
3723 /* Valueziation callback that ends up not following SSA edges. */
3726 no_follow_ssa_edges (tree
)
3731 /* Valueization callback that ends up following single-use SSA edges only. */
3734 follow_single_use_edges (tree val
)
3736 if (TREE_CODE (val
) == SSA_NAME
3737 && !has_single_use (val
))
3742 /* Fold the statement pointed to by GSI. In some cases, this function may
3743 replace the whole statement with a new one. Returns true iff folding
3745 The statement pointed to by GSI should be in valid gimple form but may
3746 be in unfolded state as resulting from for example constant propagation
3747 which can produce *&x = 0. */
3750 fold_stmt (gimple_stmt_iterator
*gsi
)
3752 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3756 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3758 return fold_stmt_1 (gsi
, false, valueize
);
3761 /* Perform the minimal folding on statement *GSI. Only operations like
3762 *&x created by constant propagation are handled. The statement cannot
3763 be replaced with a new one. Return true if the statement was
3764 changed, false otherwise.
3765 The statement *GSI should be in valid gimple form but may
3766 be in unfolded state as resulting from for example constant propagation
3767 which can produce *&x = 0. */
3770 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3772 gimple stmt
= gsi_stmt (*gsi
);
3773 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3774 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3778 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3779 if EXPR is null or we don't know how.
3780 If non-null, the result always has boolean type. */
3783 canonicalize_bool (tree expr
, bool invert
)
3789 if (integer_nonzerop (expr
))
3790 return boolean_false_node
;
3791 else if (integer_zerop (expr
))
3792 return boolean_true_node
;
3793 else if (TREE_CODE (expr
) == SSA_NAME
)
3794 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3795 build_int_cst (TREE_TYPE (expr
), 0));
3796 else if (COMPARISON_CLASS_P (expr
))
3797 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3799 TREE_OPERAND (expr
, 0),
3800 TREE_OPERAND (expr
, 1));
3806 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3808 if (integer_nonzerop (expr
))
3809 return boolean_true_node
;
3810 else if (integer_zerop (expr
))
3811 return boolean_false_node
;
3812 else if (TREE_CODE (expr
) == SSA_NAME
)
3813 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3814 build_int_cst (TREE_TYPE (expr
), 0));
3815 else if (COMPARISON_CLASS_P (expr
))
3816 return fold_build2 (TREE_CODE (expr
),
3818 TREE_OPERAND (expr
, 0),
3819 TREE_OPERAND (expr
, 1));
3825 /* Check to see if a boolean expression EXPR is logically equivalent to the
3826 comparison (OP1 CODE OP2). Check for various identities involving
3830 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3831 const_tree op1
, const_tree op2
)
3835 /* The obvious case. */
3836 if (TREE_CODE (expr
) == code
3837 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3838 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3841 /* Check for comparing (name, name != 0) and the case where expr
3842 is an SSA_NAME with a definition matching the comparison. */
3843 if (TREE_CODE (expr
) == SSA_NAME
3844 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3846 if (operand_equal_p (expr
, op1
, 0))
3847 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3848 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3849 s
= SSA_NAME_DEF_STMT (expr
);
3850 if (is_gimple_assign (s
)
3851 && gimple_assign_rhs_code (s
) == code
3852 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3853 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3857 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3858 of name is a comparison, recurse. */
3859 if (TREE_CODE (op1
) == SSA_NAME
3860 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3862 s
= SSA_NAME_DEF_STMT (op1
);
3863 if (is_gimple_assign (s
)
3864 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3866 enum tree_code c
= gimple_assign_rhs_code (s
);
3867 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3868 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3869 return same_bool_comparison_p (expr
, c
,
3870 gimple_assign_rhs1 (s
),
3871 gimple_assign_rhs2 (s
));
3872 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3873 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3874 return same_bool_comparison_p (expr
,
3875 invert_tree_comparison (c
, false),
3876 gimple_assign_rhs1 (s
),
3877 gimple_assign_rhs2 (s
));
3883 /* Check to see if two boolean expressions OP1 and OP2 are logically
3887 same_bool_result_p (const_tree op1
, const_tree op2
)
3889 /* Simple cases first. */
3890 if (operand_equal_p (op1
, op2
, 0))
3893 /* Check the cases where at least one of the operands is a comparison.
3894 These are a bit smarter than operand_equal_p in that they apply some
3895 identifies on SSA_NAMEs. */
3896 if (COMPARISON_CLASS_P (op2
)
3897 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3898 TREE_OPERAND (op2
, 0),
3899 TREE_OPERAND (op2
, 1)))
3901 if (COMPARISON_CLASS_P (op1
)
3902 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3903 TREE_OPERAND (op1
, 0),
3904 TREE_OPERAND (op1
, 1)))
3911 /* Forward declarations for some mutually recursive functions. */
3914 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3915 enum tree_code code2
, tree op2a
, tree op2b
);
3917 and_var_with_comparison (tree var
, bool invert
,
3918 enum tree_code code2
, tree op2a
, tree op2b
);
3920 and_var_with_comparison_1 (gimple stmt
,
3921 enum tree_code code2
, tree op2a
, tree op2b
);
3923 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3924 enum tree_code code2
, tree op2a
, tree op2b
);
3926 or_var_with_comparison (tree var
, bool invert
,
3927 enum tree_code code2
, tree op2a
, tree op2b
);
3929 or_var_with_comparison_1 (gimple stmt
,
3930 enum tree_code code2
, tree op2a
, tree op2b
);
3932 /* Helper function for and_comparisons_1: try to simplify the AND of the
3933 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3934 If INVERT is true, invert the value of the VAR before doing the AND.
3935 Return NULL_EXPR if we can't simplify this to a single expression. */
3938 and_var_with_comparison (tree var
, bool invert
,
3939 enum tree_code code2
, tree op2a
, tree op2b
)
3942 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3944 /* We can only deal with variables whose definitions are assignments. */
3945 if (!is_gimple_assign (stmt
))
3948 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3949 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3950 Then we only have to consider the simpler non-inverted cases. */
3952 t
= or_var_with_comparison_1 (stmt
,
3953 invert_tree_comparison (code2
, false),
3956 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3957 return canonicalize_bool (t
, invert
);
3960 /* Try to simplify the AND of the ssa variable defined by the assignment
3961 STMT with the comparison specified by (OP2A CODE2 OP2B).
3962 Return NULL_EXPR if we can't simplify this to a single expression. */
3965 and_var_with_comparison_1 (gimple stmt
,
3966 enum tree_code code2
, tree op2a
, tree op2b
)
3968 tree var
= gimple_assign_lhs (stmt
);
3969 tree true_test_var
= NULL_TREE
;
3970 tree false_test_var
= NULL_TREE
;
3971 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3973 /* Check for identities like (var AND (var == 0)) => false. */
3974 if (TREE_CODE (op2a
) == SSA_NAME
3975 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3977 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3978 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3980 true_test_var
= op2a
;
3981 if (var
== true_test_var
)
3984 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3985 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3987 false_test_var
= op2a
;
3988 if (var
== false_test_var
)
3989 return boolean_false_node
;
3993 /* If the definition is a comparison, recurse on it. */
3994 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3996 tree t
= and_comparisons_1 (innercode
,
3997 gimple_assign_rhs1 (stmt
),
3998 gimple_assign_rhs2 (stmt
),
4006 /* If the definition is an AND or OR expression, we may be able to
4007 simplify by reassociating. */
4008 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4009 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4011 tree inner1
= gimple_assign_rhs1 (stmt
);
4012 tree inner2
= gimple_assign_rhs2 (stmt
);
4015 tree partial
= NULL_TREE
;
4016 bool is_and
= (innercode
== BIT_AND_EXPR
);
4018 /* Check for boolean identities that don't require recursive examination
4020 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4021 inner1 AND (inner1 OR inner2) => inner1
4022 !inner1 AND (inner1 AND inner2) => false
4023 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4024 Likewise for similar cases involving inner2. */
4025 if (inner1
== true_test_var
)
4026 return (is_and
? var
: inner1
);
4027 else if (inner2
== true_test_var
)
4028 return (is_and
? var
: inner2
);
4029 else if (inner1
== false_test_var
)
4031 ? boolean_false_node
4032 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4033 else if (inner2
== false_test_var
)
4035 ? boolean_false_node
4036 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4038 /* Next, redistribute/reassociate the AND across the inner tests.
4039 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4040 if (TREE_CODE (inner1
) == SSA_NAME
4041 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4042 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4043 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4044 gimple_assign_rhs1 (s
),
4045 gimple_assign_rhs2 (s
),
4046 code2
, op2a
, op2b
)))
4048 /* Handle the AND case, where we are reassociating:
4049 (inner1 AND inner2) AND (op2a code2 op2b)
4051 If the partial result t is a constant, we win. Otherwise
4052 continue on to try reassociating with the other inner test. */
4055 if (integer_onep (t
))
4057 else if (integer_zerop (t
))
4058 return boolean_false_node
;
4061 /* Handle the OR case, where we are redistributing:
4062 (inner1 OR inner2) AND (op2a code2 op2b)
4063 => (t OR (inner2 AND (op2a code2 op2b))) */
4064 else if (integer_onep (t
))
4065 return boolean_true_node
;
4067 /* Save partial result for later. */
4071 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4072 if (TREE_CODE (inner2
) == SSA_NAME
4073 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4074 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4075 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4076 gimple_assign_rhs1 (s
),
4077 gimple_assign_rhs2 (s
),
4078 code2
, op2a
, op2b
)))
4080 /* Handle the AND case, where we are reassociating:
4081 (inner1 AND inner2) AND (op2a code2 op2b)
4082 => (inner1 AND t) */
4085 if (integer_onep (t
))
4087 else if (integer_zerop (t
))
4088 return boolean_false_node
;
4089 /* If both are the same, we can apply the identity
4091 else if (partial
&& same_bool_result_p (t
, partial
))
4095 /* Handle the OR case. where we are redistributing:
4096 (inner1 OR inner2) AND (op2a code2 op2b)
4097 => (t OR (inner1 AND (op2a code2 op2b)))
4098 => (t OR partial) */
4101 if (integer_onep (t
))
4102 return boolean_true_node
;
4105 /* We already got a simplification for the other
4106 operand to the redistributed OR expression. The
4107 interesting case is when at least one is false.
4108 Or, if both are the same, we can apply the identity
4110 if (integer_zerop (partial
))
4112 else if (integer_zerop (t
))
4114 else if (same_bool_result_p (t
, partial
))
4123 /* Try to simplify the AND of two comparisons defined by
4124 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4125 If this can be done without constructing an intermediate value,
4126 return the resulting tree; otherwise NULL_TREE is returned.
4127 This function is deliberately asymmetric as it recurses on SSA_DEFs
4128 in the first comparison but not the second. */
4131 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4132 enum tree_code code2
, tree op2a
, tree op2b
)
4134 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4136 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4137 if (operand_equal_p (op1a
, op2a
, 0)
4138 && operand_equal_p (op1b
, op2b
, 0))
4140 /* Result will be either NULL_TREE, or a combined comparison. */
4141 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4142 TRUTH_ANDIF_EXPR
, code1
, code2
,
4143 truth_type
, op1a
, op1b
);
4148 /* Likewise the swapped case of the above. */
4149 if (operand_equal_p (op1a
, op2b
, 0)
4150 && operand_equal_p (op1b
, op2a
, 0))
4152 /* Result will be either NULL_TREE, or a combined comparison. */
4153 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4154 TRUTH_ANDIF_EXPR
, code1
,
4155 swap_tree_comparison (code2
),
4156 truth_type
, op1a
, op1b
);
4161 /* If both comparisons are of the same value against constants, we might
4162 be able to merge them. */
4163 if (operand_equal_p (op1a
, op2a
, 0)
4164 && TREE_CODE (op1b
) == INTEGER_CST
4165 && TREE_CODE (op2b
) == INTEGER_CST
)
4167 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4169 /* If we have (op1a == op1b), we should either be able to
4170 return that or FALSE, depending on whether the constant op1b
4171 also satisfies the other comparison against op2b. */
4172 if (code1
== EQ_EXPR
)
4178 case EQ_EXPR
: val
= (cmp
== 0); break;
4179 case NE_EXPR
: val
= (cmp
!= 0); break;
4180 case LT_EXPR
: val
= (cmp
< 0); break;
4181 case GT_EXPR
: val
= (cmp
> 0); break;
4182 case LE_EXPR
: val
= (cmp
<= 0); break;
4183 case GE_EXPR
: val
= (cmp
>= 0); break;
4184 default: done
= false;
4189 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4191 return boolean_false_node
;
4194 /* Likewise if the second comparison is an == comparison. */
4195 else if (code2
== EQ_EXPR
)
4201 case EQ_EXPR
: val
= (cmp
== 0); break;
4202 case NE_EXPR
: val
= (cmp
!= 0); break;
4203 case LT_EXPR
: val
= (cmp
> 0); break;
4204 case GT_EXPR
: val
= (cmp
< 0); break;
4205 case LE_EXPR
: val
= (cmp
>= 0); break;
4206 case GE_EXPR
: val
= (cmp
<= 0); break;
4207 default: done
= false;
4212 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4214 return boolean_false_node
;
4218 /* Same business with inequality tests. */
4219 else if (code1
== NE_EXPR
)
4224 case EQ_EXPR
: val
= (cmp
!= 0); break;
4225 case NE_EXPR
: val
= (cmp
== 0); break;
4226 case LT_EXPR
: val
= (cmp
>= 0); break;
4227 case GT_EXPR
: val
= (cmp
<= 0); break;
4228 case LE_EXPR
: val
= (cmp
> 0); break;
4229 case GE_EXPR
: val
= (cmp
< 0); break;
4234 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4236 else if (code2
== NE_EXPR
)
4241 case EQ_EXPR
: val
= (cmp
== 0); break;
4242 case NE_EXPR
: val
= (cmp
!= 0); break;
4243 case LT_EXPR
: val
= (cmp
<= 0); break;
4244 case GT_EXPR
: val
= (cmp
>= 0); break;
4245 case LE_EXPR
: val
= (cmp
< 0); break;
4246 case GE_EXPR
: val
= (cmp
> 0); break;
4251 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4254 /* Chose the more restrictive of two < or <= comparisons. */
4255 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4256 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4258 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4259 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4261 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4264 /* Likewise chose the more restrictive of two > or >= comparisons. */
4265 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4266 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4268 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4269 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4271 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4274 /* Check for singleton ranges. */
4276 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4277 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4278 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4280 /* Check for disjoint ranges. */
4282 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4283 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4284 return boolean_false_node
;
4286 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4287 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4288 return boolean_false_node
;
4291 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4292 NAME's definition is a truth value. See if there are any simplifications
4293 that can be done against the NAME's definition. */
4294 if (TREE_CODE (op1a
) == SSA_NAME
4295 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4296 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4298 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4299 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4300 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4301 switch (gimple_code (stmt
))
4304 /* Try to simplify by copy-propagating the definition. */
4305 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4308 /* If every argument to the PHI produces the same result when
4309 ANDed with the second comparison, we win.
4310 Do not do this unless the type is bool since we need a bool
4311 result here anyway. */
4312 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4314 tree result
= NULL_TREE
;
4316 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4318 tree arg
= gimple_phi_arg_def (stmt
, i
);
4320 /* If this PHI has itself as an argument, ignore it.
4321 If all the other args produce the same result,
4323 if (arg
== gimple_phi_result (stmt
))
4325 else if (TREE_CODE (arg
) == INTEGER_CST
)
4327 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4330 result
= boolean_false_node
;
4331 else if (!integer_zerop (result
))
4335 result
= fold_build2 (code2
, boolean_type_node
,
4337 else if (!same_bool_comparison_p (result
,
4341 else if (TREE_CODE (arg
) == SSA_NAME
4342 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4345 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4346 /* In simple cases we can look through PHI nodes,
4347 but we have to be careful with loops.
4349 if (! dom_info_available_p (CDI_DOMINATORS
)
4350 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4351 || dominated_by_p (CDI_DOMINATORS
,
4352 gimple_bb (def_stmt
),
4355 temp
= and_var_with_comparison (arg
, invert
, code2
,
4361 else if (!same_bool_result_p (result
, temp
))
4377 /* Try to simplify the AND of two comparisons, specified by
4378 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4379 If this can be simplified to a single expression (without requiring
4380 introducing more SSA variables to hold intermediate values),
4381 return the resulting tree. Otherwise return NULL_TREE.
4382 If the result expression is non-null, it has boolean type. */
4385 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4386 enum tree_code code2
, tree op2a
, tree op2b
)
4388 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4392 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4395 /* Helper function for or_comparisons_1: try to simplify the OR of the
4396 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4397 If INVERT is true, invert the value of VAR before doing the OR.
4398 Return NULL_EXPR if we can't simplify this to a single expression. */
4401 or_var_with_comparison (tree var
, bool invert
,
4402 enum tree_code code2
, tree op2a
, tree op2b
)
4405 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4407 /* We can only deal with variables whose definitions are assignments. */
4408 if (!is_gimple_assign (stmt
))
4411 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4412 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4413 Then we only have to consider the simpler non-inverted cases. */
4415 t
= and_var_with_comparison_1 (stmt
,
4416 invert_tree_comparison (code2
, false),
4419 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4420 return canonicalize_bool (t
, invert
);
4423 /* Try to simplify the OR of the ssa variable defined by the assignment
4424 STMT with the comparison specified by (OP2A CODE2 OP2B).
4425 Return NULL_EXPR if we can't simplify this to a single expression. */
4428 or_var_with_comparison_1 (gimple stmt
,
4429 enum tree_code code2
, tree op2a
, tree op2b
)
4431 tree var
= gimple_assign_lhs (stmt
);
4432 tree true_test_var
= NULL_TREE
;
4433 tree false_test_var
= NULL_TREE
;
4434 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4436 /* Check for identities like (var OR (var != 0)) => true . */
4437 if (TREE_CODE (op2a
) == SSA_NAME
4438 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4440 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4441 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4443 true_test_var
= op2a
;
4444 if (var
== true_test_var
)
4447 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4448 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4450 false_test_var
= op2a
;
4451 if (var
== false_test_var
)
4452 return boolean_true_node
;
4456 /* If the definition is a comparison, recurse on it. */
4457 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4459 tree t
= or_comparisons_1 (innercode
,
4460 gimple_assign_rhs1 (stmt
),
4461 gimple_assign_rhs2 (stmt
),
4469 /* If the definition is an AND or OR expression, we may be able to
4470 simplify by reassociating. */
4471 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4472 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4474 tree inner1
= gimple_assign_rhs1 (stmt
);
4475 tree inner2
= gimple_assign_rhs2 (stmt
);
4478 tree partial
= NULL_TREE
;
4479 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4481 /* Check for boolean identities that don't require recursive examination
4483 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4484 inner1 OR (inner1 AND inner2) => inner1
4485 !inner1 OR (inner1 OR inner2) => true
4486 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4488 if (inner1
== true_test_var
)
4489 return (is_or
? var
: inner1
);
4490 else if (inner2
== true_test_var
)
4491 return (is_or
? var
: inner2
);
4492 else if (inner1
== false_test_var
)
4495 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4496 else if (inner2
== false_test_var
)
4499 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4501 /* Next, redistribute/reassociate the OR across the inner tests.
4502 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4503 if (TREE_CODE (inner1
) == SSA_NAME
4504 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4505 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4506 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4507 gimple_assign_rhs1 (s
),
4508 gimple_assign_rhs2 (s
),
4509 code2
, op2a
, op2b
)))
4511 /* Handle the OR case, where we are reassociating:
4512 (inner1 OR inner2) OR (op2a code2 op2b)
4514 If the partial result t is a constant, we win. Otherwise
4515 continue on to try reassociating with the other inner test. */
4518 if (integer_onep (t
))
4519 return boolean_true_node
;
4520 else if (integer_zerop (t
))
4524 /* Handle the AND case, where we are redistributing:
4525 (inner1 AND inner2) OR (op2a code2 op2b)
4526 => (t AND (inner2 OR (op2a code op2b))) */
4527 else if (integer_zerop (t
))
4528 return boolean_false_node
;
4530 /* Save partial result for later. */
4534 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4535 if (TREE_CODE (inner2
) == SSA_NAME
4536 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4537 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4538 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4539 gimple_assign_rhs1 (s
),
4540 gimple_assign_rhs2 (s
),
4541 code2
, op2a
, op2b
)))
4543 /* Handle the OR case, where we are reassociating:
4544 (inner1 OR inner2) OR (op2a code2 op2b)
4546 => (t OR partial) */
4549 if (integer_zerop (t
))
4551 else if (integer_onep (t
))
4552 return boolean_true_node
;
4553 /* If both are the same, we can apply the identity
4555 else if (partial
&& same_bool_result_p (t
, partial
))
4559 /* Handle the AND case, where we are redistributing:
4560 (inner1 AND inner2) OR (op2a code2 op2b)
4561 => (t AND (inner1 OR (op2a code2 op2b)))
4562 => (t AND partial) */
4565 if (integer_zerop (t
))
4566 return boolean_false_node
;
4569 /* We already got a simplification for the other
4570 operand to the redistributed AND expression. The
4571 interesting case is when at least one is true.
4572 Or, if both are the same, we can apply the identity
4574 if (integer_onep (partial
))
4576 else if (integer_onep (t
))
4578 else if (same_bool_result_p (t
, partial
))
4587 /* Try to simplify the OR of two comparisons defined by
4588 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4589 If this can be done without constructing an intermediate value,
4590 return the resulting tree; otherwise NULL_TREE is returned.
4591 This function is deliberately asymmetric as it recurses on SSA_DEFs
4592 in the first comparison but not the second. */
4595 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4596 enum tree_code code2
, tree op2a
, tree op2b
)
4598 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4600 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4601 if (operand_equal_p (op1a
, op2a
, 0)
4602 && operand_equal_p (op1b
, op2b
, 0))
4604 /* Result will be either NULL_TREE, or a combined comparison. */
4605 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4606 TRUTH_ORIF_EXPR
, code1
, code2
,
4607 truth_type
, op1a
, op1b
);
4612 /* Likewise the swapped case of the above. */
4613 if (operand_equal_p (op1a
, op2b
, 0)
4614 && operand_equal_p (op1b
, op2a
, 0))
4616 /* Result will be either NULL_TREE, or a combined comparison. */
4617 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4618 TRUTH_ORIF_EXPR
, code1
,
4619 swap_tree_comparison (code2
),
4620 truth_type
, op1a
, op1b
);
4625 /* If both comparisons are of the same value against constants, we might
4626 be able to merge them. */
4627 if (operand_equal_p (op1a
, op2a
, 0)
4628 && TREE_CODE (op1b
) == INTEGER_CST
4629 && TREE_CODE (op2b
) == INTEGER_CST
)
4631 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4633 /* If we have (op1a != op1b), we should either be able to
4634 return that or TRUE, depending on whether the constant op1b
4635 also satisfies the other comparison against op2b. */
4636 if (code1
== NE_EXPR
)
4642 case EQ_EXPR
: val
= (cmp
== 0); break;
4643 case NE_EXPR
: val
= (cmp
!= 0); break;
4644 case LT_EXPR
: val
= (cmp
< 0); break;
4645 case GT_EXPR
: val
= (cmp
> 0); break;
4646 case LE_EXPR
: val
= (cmp
<= 0); break;
4647 case GE_EXPR
: val
= (cmp
>= 0); break;
4648 default: done
= false;
4653 return boolean_true_node
;
4655 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4658 /* Likewise if the second comparison is a != comparison. */
4659 else if (code2
== NE_EXPR
)
4665 case EQ_EXPR
: val
= (cmp
== 0); break;
4666 case NE_EXPR
: val
= (cmp
!= 0); break;
4667 case LT_EXPR
: val
= (cmp
> 0); break;
4668 case GT_EXPR
: val
= (cmp
< 0); break;
4669 case LE_EXPR
: val
= (cmp
>= 0); break;
4670 case GE_EXPR
: val
= (cmp
<= 0); break;
4671 default: done
= false;
4676 return boolean_true_node
;
4678 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4682 /* See if an equality test is redundant with the other comparison. */
4683 else if (code1
== EQ_EXPR
)
4688 case EQ_EXPR
: val
= (cmp
== 0); break;
4689 case NE_EXPR
: val
= (cmp
!= 0); break;
4690 case LT_EXPR
: val
= (cmp
< 0); break;
4691 case GT_EXPR
: val
= (cmp
> 0); break;
4692 case LE_EXPR
: val
= (cmp
<= 0); break;
4693 case GE_EXPR
: val
= (cmp
>= 0); break;
4698 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4700 else if (code2
== EQ_EXPR
)
4705 case EQ_EXPR
: val
= (cmp
== 0); break;
4706 case NE_EXPR
: val
= (cmp
!= 0); break;
4707 case LT_EXPR
: val
= (cmp
> 0); break;
4708 case GT_EXPR
: val
= (cmp
< 0); break;
4709 case LE_EXPR
: val
= (cmp
>= 0); break;
4710 case GE_EXPR
: val
= (cmp
<= 0); break;
4715 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4718 /* Chose the less restrictive of two < or <= comparisons. */
4719 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4720 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4722 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4723 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4725 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4728 /* Likewise chose the less restrictive of two > or >= comparisons. */
4729 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4730 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4732 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4733 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4735 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4738 /* Check for singleton ranges. */
4740 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4741 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4742 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4744 /* Check for less/greater pairs that don't restrict the range at all. */
4746 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4747 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4748 return boolean_true_node
;
4750 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4751 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4752 return boolean_true_node
;
4755 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4756 NAME's definition is a truth value. See if there are any simplifications
4757 that can be done against the NAME's definition. */
4758 if (TREE_CODE (op1a
) == SSA_NAME
4759 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4760 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4762 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4763 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4764 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4765 switch (gimple_code (stmt
))
4768 /* Try to simplify by copy-propagating the definition. */
4769 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4772 /* If every argument to the PHI produces the same result when
4773 ORed with the second comparison, we win.
4774 Do not do this unless the type is bool since we need a bool
4775 result here anyway. */
4776 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4778 tree result
= NULL_TREE
;
4780 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4782 tree arg
= gimple_phi_arg_def (stmt
, i
);
4784 /* If this PHI has itself as an argument, ignore it.
4785 If all the other args produce the same result,
4787 if (arg
== gimple_phi_result (stmt
))
4789 else if (TREE_CODE (arg
) == INTEGER_CST
)
4791 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4794 result
= boolean_true_node
;
4795 else if (!integer_onep (result
))
4799 result
= fold_build2 (code2
, boolean_type_node
,
4801 else if (!same_bool_comparison_p (result
,
4805 else if (TREE_CODE (arg
) == SSA_NAME
4806 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4809 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4810 /* In simple cases we can look through PHI nodes,
4811 but we have to be careful with loops.
4813 if (! dom_info_available_p (CDI_DOMINATORS
)
4814 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4815 || dominated_by_p (CDI_DOMINATORS
,
4816 gimple_bb (def_stmt
),
4819 temp
= or_var_with_comparison (arg
, invert
, code2
,
4825 else if (!same_bool_result_p (result
, temp
))
4841 /* Try to simplify the OR of two comparisons, specified by
4842 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4843 If this can be simplified to a single expression (without requiring
4844 introducing more SSA variables to hold intermediate values),
4845 return the resulting tree. Otherwise return NULL_TREE.
4846 If the result expression is non-null, it has boolean type. */
4849 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4850 enum tree_code code2
, tree op2a
, tree op2b
)
4852 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4856 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4860 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4862 Either NULL_TREE, a simplified but non-constant or a constant
4865 ??? This should go into a gimple-fold-inline.h file to be eventually
4866 privatized with the single valueize function used in the various TUs
4867 to avoid the indirect function call overhead. */
4870 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4871 tree (*gvalueize
) (tree
))
4875 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4876 edges if there are intermediate VARYING defs. For this reason
4877 do not follow SSA edges here even though SCCVN can technically
4878 just deal fine with that. */
4879 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
)
4880 && rcode
.is_tree_code ()
4881 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4882 || ((tree_code
) rcode
) == ADDR_EXPR
)
4883 && is_gimple_val (ops
[0]))
4886 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4888 fprintf (dump_file
, "Match-and-simplified ");
4889 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4890 fprintf (dump_file
, " to ");
4891 print_generic_expr (dump_file
, res
, 0);
4892 fprintf (dump_file
, "\n");
4897 location_t loc
= gimple_location (stmt
);
4898 switch (gimple_code (stmt
))
4902 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4904 switch (get_gimple_rhs_class (subcode
))
4906 case GIMPLE_SINGLE_RHS
:
4908 tree rhs
= gimple_assign_rhs1 (stmt
);
4909 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4911 if (TREE_CODE (rhs
) == SSA_NAME
)
4913 /* If the RHS is an SSA_NAME, return its known constant value,
4915 return (*valueize
) (rhs
);
4917 /* Handle propagating invariant addresses into address
4919 else if (TREE_CODE (rhs
) == ADDR_EXPR
4920 && !is_gimple_min_invariant (rhs
))
4922 HOST_WIDE_INT offset
= 0;
4924 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4928 && (CONSTANT_CLASS_P (base
)
4929 || decl_address_invariant_p (base
)))
4930 return build_invariant_address (TREE_TYPE (rhs
),
4933 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4934 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4935 && (CONSTRUCTOR_NELTS (rhs
)
4936 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4941 vec
= XALLOCAVEC (tree
,
4942 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4943 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4945 val
= (*valueize
) (val
);
4946 if (TREE_CODE (val
) == INTEGER_CST
4947 || TREE_CODE (val
) == REAL_CST
4948 || TREE_CODE (val
) == FIXED_CST
)
4954 return build_vector (TREE_TYPE (rhs
), vec
);
4956 if (subcode
== OBJ_TYPE_REF
)
4958 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4959 /* If callee is constant, we can fold away the wrapper. */
4960 if (is_gimple_min_invariant (val
))
4964 if (kind
== tcc_reference
)
4966 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4967 || TREE_CODE (rhs
) == REALPART_EXPR
4968 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4969 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4971 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4972 return fold_unary_loc (EXPR_LOCATION (rhs
),
4974 TREE_TYPE (rhs
), val
);
4976 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
4977 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4979 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4980 return fold_ternary_loc (EXPR_LOCATION (rhs
),
4982 TREE_TYPE (rhs
), val
,
4983 TREE_OPERAND (rhs
, 1),
4984 TREE_OPERAND (rhs
, 2));
4986 else if (TREE_CODE (rhs
) == MEM_REF
4987 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4989 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4990 if (TREE_CODE (val
) == ADDR_EXPR
4991 && is_gimple_min_invariant (val
))
4993 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
4995 TREE_OPERAND (rhs
, 1));
5000 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5002 else if (kind
== tcc_declaration
)
5003 return get_symbol_constant_value (rhs
);
5007 case GIMPLE_UNARY_RHS
:
5010 case GIMPLE_BINARY_RHS
:
5011 /* Translate &x + CST into an invariant form suitable for
5012 further propagation. */
5013 if (subcode
== POINTER_PLUS_EXPR
)
5015 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5016 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5017 if (TREE_CODE (op0
) == ADDR_EXPR
5018 && TREE_CODE (op1
) == INTEGER_CST
)
5020 tree off
= fold_convert (ptr_type_node
, op1
);
5021 return build_fold_addr_expr_loc
5023 fold_build2 (MEM_REF
,
5024 TREE_TYPE (TREE_TYPE (op0
)),
5025 unshare_expr (op0
), off
));
5028 /* Canonicalize bool != 0 and bool == 0 appearing after
5029 valueization. While gimple_simplify handles this
5030 it can get confused by the ~X == 1 -> X == 0 transform
5031 which we cant reduce to a SSA name or a constant
5032 (and we have no way to tell gimple_simplify to not
5033 consider those transforms in the first place). */
5034 else if (subcode
== EQ_EXPR
5035 || subcode
== NE_EXPR
)
5037 tree lhs
= gimple_assign_lhs (stmt
);
5038 tree op0
= gimple_assign_rhs1 (stmt
);
5039 if (useless_type_conversion_p (TREE_TYPE (lhs
),
5042 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5043 op0
= (*valueize
) (op0
);
5044 if (TREE_CODE (op0
) == INTEGER_CST
)
5045 std::swap (op0
, op1
);
5046 if (TREE_CODE (op1
) == INTEGER_CST
5047 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
5048 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
5054 case GIMPLE_TERNARY_RHS
:
5056 /* Handle ternary operators that can appear in GIMPLE form. */
5057 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5058 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5059 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5060 return fold_ternary_loc (loc
, subcode
,
5061 gimple_expr_type (stmt
), op0
, op1
, op2
);
5072 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5074 if (gimple_call_internal_p (stmt
))
5076 enum tree_code subcode
= ERROR_MARK
;
5077 switch (gimple_call_internal_fn (stmt
))
5079 case IFN_UBSAN_CHECK_ADD
:
5080 subcode
= PLUS_EXPR
;
5082 case IFN_UBSAN_CHECK_SUB
:
5083 subcode
= MINUS_EXPR
;
5085 case IFN_UBSAN_CHECK_MUL
:
5086 subcode
= MULT_EXPR
;
5091 tree arg0
= gimple_call_arg (stmt
, 0);
5092 tree arg1
= gimple_call_arg (stmt
, 1);
5093 tree op0
= (*valueize
) (arg0
);
5094 tree op1
= (*valueize
) (arg1
);
5096 if (TREE_CODE (op0
) != INTEGER_CST
5097 || TREE_CODE (op1
) != INTEGER_CST
)
5102 /* x * 0 = 0 * x = 0 without overflow. */
5103 if (integer_zerop (op0
) || integer_zerop (op1
))
5104 return build_zero_cst (TREE_TYPE (arg0
));
5107 /* y - y = 0 without overflow. */
5108 if (operand_equal_p (op0
, op1
, 0))
5109 return build_zero_cst (TREE_TYPE (arg0
));
5116 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5118 && TREE_CODE (res
) == INTEGER_CST
5119 && !TREE_OVERFLOW (res
))
5124 fn
= (*valueize
) (gimple_call_fn (stmt
));
5125 if (TREE_CODE (fn
) == ADDR_EXPR
5126 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5127 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5128 && gimple_builtin_call_types_compatible_p (stmt
,
5129 TREE_OPERAND (fn
, 0)))
5131 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5134 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5135 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5136 retval
= fold_builtin_call_array (loc
,
5137 gimple_call_return_type (call_stmt
),
5138 fn
, gimple_call_num_args (stmt
), args
);
5141 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5142 STRIP_NOPS (retval
);
5143 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5156 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5157 Returns NULL_TREE if folding to a constant is not possible, otherwise
5158 returns a constant according to is_gimple_min_invariant. */
5161 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
5163 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5164 if (res
&& is_gimple_min_invariant (res
))
5170 /* The following set of functions are supposed to fold references using
5171 their constant initializers. */
5173 /* See if we can find constructor defining value of BASE.
5174 When we know the consructor with constant offset (such as
5175 base is array[40] and we do know constructor of array), then
5176 BIT_OFFSET is adjusted accordingly.
5178 As a special case, return error_mark_node when constructor
5179 is not explicitly available, but it is known to be zero
5180 such as 'static const int a;'. */
5182 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5183 tree (*valueize
)(tree
))
5185 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5186 if (TREE_CODE (base
) == MEM_REF
)
5188 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5190 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5192 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5197 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5198 base
= valueize (TREE_OPERAND (base
, 0));
5199 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5201 base
= TREE_OPERAND (base
, 0);
5204 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5205 DECL_INITIAL. If BASE is a nested reference into another
5206 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5207 the inner reference. */
5208 switch (TREE_CODE (base
))
5213 tree init
= ctor_for_folding (base
);
5215 /* Our semantic is exact opposite of ctor_for_folding;
5216 NULL means unknown, while error_mark_node is 0. */
5217 if (init
== error_mark_node
)
5220 return error_mark_node
;
5226 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5227 if (max_size
== -1 || size
!= max_size
)
5229 *bit_offset
+= bit_offset2
;
5230 return get_base_constructor (base
, bit_offset
, valueize
);
5241 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5242 SIZE to the memory at bit OFFSET. */
5245 fold_array_ctor_reference (tree type
, tree ctor
,
5246 unsigned HOST_WIDE_INT offset
,
5247 unsigned HOST_WIDE_INT size
,
5250 unsigned HOST_WIDE_INT cnt
;
5252 offset_int low_bound
;
5253 offset_int elt_size
;
5254 offset_int index
, max_index
;
5255 offset_int access_index
;
5256 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5257 HOST_WIDE_INT inner_offset
;
5259 /* Compute low bound and elt size. */
5260 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5261 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5262 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5264 /* Static constructors for variably sized objects makes no sense. */
5265 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5266 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5267 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5271 /* Static constructors for variably sized objects makes no sense. */
5272 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5274 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5276 /* We can handle only constantly sized accesses that are known to not
5277 be larger than size of array element. */
5278 if (!TYPE_SIZE_UNIT (type
)
5279 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5280 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5284 /* Compute the array index we look for. */
5285 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5287 access_index
+= low_bound
;
5289 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5290 TYPE_SIGN (index_type
));
5292 /* And offset within the access. */
5293 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5295 /* See if the array field is large enough to span whole access. We do not
5296 care to fold accesses spanning multiple array indexes. */
5297 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5300 index
= low_bound
- 1;
5302 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5303 TYPE_SIGN (index_type
));
5305 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5307 /* Array constructor might explicitely set index, or specify range
5308 or leave index NULL meaning that it is next index after previous
5312 if (TREE_CODE (cfield
) == INTEGER_CST
)
5313 max_index
= index
= wi::to_offset (cfield
);
5316 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5317 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5318 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5325 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5326 TYPE_SIGN (index_type
));
5330 /* Do we have match? */
5331 if (wi::cmpu (access_index
, index
) >= 0
5332 && wi::cmpu (access_index
, max_index
) <= 0)
5333 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5336 /* When memory is not explicitely mentioned in constructor,
5337 it is 0 (or out of range). */
5338 return build_zero_cst (type
);
5341 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5342 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5345 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5346 unsigned HOST_WIDE_INT offset
,
5347 unsigned HOST_WIDE_INT size
,
5350 unsigned HOST_WIDE_INT cnt
;
5353 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5356 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5357 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5358 tree field_size
= DECL_SIZE (cfield
);
5359 offset_int bitoffset
;
5360 offset_int bitoffset_end
, access_end
;
5362 /* Variable sized objects in static constructors makes no sense,
5363 but field_size can be NULL for flexible array members. */
5364 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5365 && TREE_CODE (byte_offset
) == INTEGER_CST
5366 && (field_size
!= NULL_TREE
5367 ? TREE_CODE (field_size
) == INTEGER_CST
5368 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5370 /* Compute bit offset of the field. */
5371 bitoffset
= (wi::to_offset (field_offset
)
5372 + wi::lshift (wi::to_offset (byte_offset
),
5373 LOG2_BITS_PER_UNIT
));
5374 /* Compute bit offset where the field ends. */
5375 if (field_size
!= NULL_TREE
)
5376 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5380 access_end
= offset_int (offset
) + size
;
5382 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5383 [BITOFFSET, BITOFFSET_END)? */
5384 if (wi::cmps (access_end
, bitoffset
) > 0
5385 && (field_size
== NULL_TREE
5386 || wi::lts_p (offset
, bitoffset_end
)))
5388 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5389 /* We do have overlap. Now see if field is large enough to
5390 cover the access. Give up for accesses spanning multiple
5392 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5394 if (wi::lts_p (offset
, bitoffset
))
5396 return fold_ctor_reference (type
, cval
,
5397 inner_offset
.to_uhwi (), size
,
5401 /* When memory is not explicitely mentioned in constructor, it is 0. */
5402 return build_zero_cst (type
);
5405 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5406 to the memory at bit OFFSET. */
5409 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5410 unsigned HOST_WIDE_INT size
, tree from_decl
)
5414 /* We found the field with exact match. */
5415 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5417 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5419 /* We are at the end of walk, see if we can view convert the
5421 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5422 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5423 && !compare_tree_int (TYPE_SIZE (type
), size
)
5424 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5426 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5427 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5429 STRIP_USELESS_TYPE_CONVERSION (ret
);
5432 /* For constants and byte-aligned/sized reads try to go through
5433 native_encode/interpret. */
5434 if (CONSTANT_CLASS_P (ctor
)
5435 && BITS_PER_UNIT
== 8
5436 && offset
% BITS_PER_UNIT
== 0
5437 && size
% BITS_PER_UNIT
== 0
5438 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5440 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5441 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5442 offset
/ BITS_PER_UNIT
) > 0)
5443 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5445 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5448 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5449 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5450 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5453 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5460 /* Return the tree representing the element referenced by T if T is an
5461 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5462 names using VALUEIZE. Return NULL_TREE otherwise. */
5465 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5467 tree ctor
, idx
, base
;
5468 HOST_WIDE_INT offset
, size
, max_size
;
5471 if (TREE_THIS_VOLATILE (t
))
5475 return get_symbol_constant_value (t
);
5477 tem
= fold_read_from_constant_string (t
);
5481 switch (TREE_CODE (t
))
5484 case ARRAY_RANGE_REF
:
5485 /* Constant indexes are handled well by get_base_constructor.
5486 Only special case variable offsets.
5487 FIXME: This code can't handle nested references with variable indexes
5488 (they will be handled only by iteration of ccp). Perhaps we can bring
5489 get_ref_base_and_extent here and make it use a valueize callback. */
5490 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5492 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5493 && TREE_CODE (idx
) == INTEGER_CST
)
5495 tree low_bound
, unit_size
;
5497 /* If the resulting bit-offset is constant, track it. */
5498 if ((low_bound
= array_ref_low_bound (t
),
5499 TREE_CODE (low_bound
) == INTEGER_CST
)
5500 && (unit_size
= array_ref_element_size (t
),
5501 tree_fits_uhwi_p (unit_size
)))
5504 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5505 TYPE_PRECISION (TREE_TYPE (idx
)));
5507 if (wi::fits_shwi_p (woffset
))
5509 offset
= woffset
.to_shwi ();
5510 /* TODO: This code seems wrong, multiply then check
5511 to see if it fits. */
5512 offset
*= tree_to_uhwi (unit_size
);
5513 offset
*= BITS_PER_UNIT
;
5515 base
= TREE_OPERAND (t
, 0);
5516 ctor
= get_base_constructor (base
, &offset
, valueize
);
5517 /* Empty constructor. Always fold to 0. */
5518 if (ctor
== error_mark_node
)
5519 return build_zero_cst (TREE_TYPE (t
));
5520 /* Out of bound array access. Value is undefined,
5524 /* We can not determine ctor. */
5527 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5528 tree_to_uhwi (unit_size
)
5538 case TARGET_MEM_REF
:
5540 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5541 ctor
= get_base_constructor (base
, &offset
, valueize
);
5543 /* Empty constructor. Always fold to 0. */
5544 if (ctor
== error_mark_node
)
5545 return build_zero_cst (TREE_TYPE (t
));
5546 /* We do not know precise address. */
5547 if (max_size
== -1 || max_size
!= size
)
5549 /* We can not determine ctor. */
5553 /* Out of bound array access. Value is undefined, but don't fold. */
5557 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5563 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5564 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5565 return fold_build1_loc (EXPR_LOCATION (t
),
5566 TREE_CODE (t
), TREE_TYPE (t
), c
);
5578 fold_const_aggregate_ref (tree t
)
5580 return fold_const_aggregate_ref_1 (t
, NULL
);
5583 /* Lookup virtual method with index TOKEN in a virtual table V
5585 Set CAN_REFER if non-NULL to false if method
5586 is not referable or if the virtual table is ill-formed (such as rewriten
5587 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5590 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5592 unsigned HOST_WIDE_INT offset
,
5595 tree vtable
= v
, init
, fn
;
5596 unsigned HOST_WIDE_INT size
;
5597 unsigned HOST_WIDE_INT elt_size
, access_index
;
5603 /* First of all double check we have virtual table. */
5604 if (TREE_CODE (v
) != VAR_DECL
5605 || !DECL_VIRTUAL_P (v
))
5607 /* Pass down that we lost track of the target. */
5613 init
= ctor_for_folding (v
);
5615 /* The virtual tables should always be born with constructors
5616 and we always should assume that they are avaialble for
5617 folding. At the moment we do not stream them in all cases,
5618 but it should never happen that ctor seem unreachable. */
5620 if (init
== error_mark_node
)
5622 gcc_assert (in_lto_p
);
5623 /* Pass down that we lost track of the target. */
5628 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5629 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5630 offset
*= BITS_PER_UNIT
;
5631 offset
+= token
* size
;
5633 /* Lookup the value in the constructor that is assumed to be array.
5634 This is equivalent to
5635 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5636 offset, size, NULL);
5637 but in a constant time. We expect that frontend produced a simple
5638 array without indexed initializers. */
5640 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5641 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5642 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5643 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5645 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5646 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5648 /* This code makes an assumption that there are no
5649 indexed fileds produced by C++ FE, so we can directly index the array. */
5650 if (access_index
< CONSTRUCTOR_NELTS (init
))
5652 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5653 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5659 /* For type inconsistent program we may end up looking up virtual method
5660 in virtual table that does not contain TOKEN entries. We may overrun
5661 the virtual table and pick up a constant or RTTI info pointer.
5662 In any case the call is undefined. */
5664 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5665 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5666 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5669 fn
= TREE_OPERAND (fn
, 0);
5671 /* When cgraph node is missing and function is not public, we cannot
5672 devirtualize. This can happen in WHOPR when the actual method
5673 ends up in other partition, because we found devirtualization
5674 possibility too late. */
5675 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5686 /* Make sure we create a cgraph node for functions we'll reference.
5687 They can be non-existent if the reference comes from an entry
5688 of an external vtable for example. */
5689 cgraph_node::get_create (fn
);
5694 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5695 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5696 KNOWN_BINFO carries the binfo describing the true type of
5697 OBJ_TYPE_REF_OBJECT(REF).
5698 Set CAN_REFER if non-NULL to false if method
5699 is not referable or if the virtual table is ill-formed (such as rewriten
5700 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5703 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5706 unsigned HOST_WIDE_INT offset
;
5709 v
= BINFO_VTABLE (known_binfo
);
5710 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5714 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5720 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5723 /* Return true iff VAL is a gimple expression that is known to be
5724 non-negative. Restricted to floating-point inputs. */
5727 gimple_val_nonnegative_real_p (tree val
)
5731 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5733 /* Use existing logic for non-gimple trees. */
5734 if (tree_expr_nonnegative_p (val
))
5737 if (TREE_CODE (val
) != SSA_NAME
)
5740 /* Currently we look only at the immediately defining statement
5741 to make this determination, since recursion on defining
5742 statements of operands can lead to quadratic behavior in the
5743 worst case. This is expected to catch almost all occurrences
5744 in practice. It would be possible to implement limited-depth
5745 recursion if important cases are lost. Alternatively, passes
5746 that need this information (such as the pow/powi lowering code
5747 in the cse_sincos pass) could be revised to provide it through
5748 dataflow propagation. */
5750 def_stmt
= SSA_NAME_DEF_STMT (val
);
5752 if (is_gimple_assign (def_stmt
))
5756 /* See fold-const.c:tree_expr_nonnegative_p for additional
5757 cases that could be handled with recursion. */
5759 switch (gimple_assign_rhs_code (def_stmt
))
5762 /* Always true for floating-point operands. */
5766 /* True if the two operands are identical (since we are
5767 restricted to floating-point inputs). */
5768 op0
= gimple_assign_rhs1 (def_stmt
);
5769 op1
= gimple_assign_rhs2 (def_stmt
);
5772 || operand_equal_p (op0
, op1
, 0))
5779 else if (is_gimple_call (def_stmt
))
5781 tree fndecl
= gimple_call_fndecl (def_stmt
);
5783 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5787 switch (DECL_FUNCTION_CODE (fndecl
))
5789 CASE_FLT_FN (BUILT_IN_ACOS
):
5790 CASE_FLT_FN (BUILT_IN_ACOSH
):
5791 CASE_FLT_FN (BUILT_IN_CABS
):
5792 CASE_FLT_FN (BUILT_IN_COSH
):
5793 CASE_FLT_FN (BUILT_IN_ERFC
):
5794 CASE_FLT_FN (BUILT_IN_EXP
):
5795 CASE_FLT_FN (BUILT_IN_EXP10
):
5796 CASE_FLT_FN (BUILT_IN_EXP2
):
5797 CASE_FLT_FN (BUILT_IN_FABS
):
5798 CASE_FLT_FN (BUILT_IN_FDIM
):
5799 CASE_FLT_FN (BUILT_IN_HYPOT
):
5800 CASE_FLT_FN (BUILT_IN_POW10
):
5803 CASE_FLT_FN (BUILT_IN_SQRT
):
5804 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5805 nonnegative inputs. */
5806 if (!HONOR_SIGNED_ZEROS (val
))
5811 CASE_FLT_FN (BUILT_IN_POWI
):
5812 /* True if the second argument is an even integer. */
5813 arg1
= gimple_call_arg (def_stmt
, 1);
5815 if (TREE_CODE (arg1
) == INTEGER_CST
5816 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5821 CASE_FLT_FN (BUILT_IN_POW
):
5822 /* True if the second argument is an even integer-valued
5824 arg1
= gimple_call_arg (def_stmt
, 1);
5826 if (TREE_CODE (arg1
) == REAL_CST
)
5831 c
= TREE_REAL_CST (arg1
);
5832 n
= real_to_integer (&c
);
5836 REAL_VALUE_TYPE cint
;
5837 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5838 if (real_identical (&c
, &cint
))
5854 /* Given a pointer value OP0, return a simplified version of an
5855 indirection through OP0, or NULL_TREE if no simplification is
5856 possible. Note that the resulting type may be different from
5857 the type pointed to in the sense that it is still compatible
5858 from the langhooks point of view. */
5861 gimple_fold_indirect_ref (tree t
)
5863 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5868 subtype
= TREE_TYPE (sub
);
5869 if (!POINTER_TYPE_P (subtype
))
5872 if (TREE_CODE (sub
) == ADDR_EXPR
)
5874 tree op
= TREE_OPERAND (sub
, 0);
5875 tree optype
= TREE_TYPE (op
);
5877 if (useless_type_conversion_p (type
, optype
))
5880 /* *(foo *)&fooarray => fooarray[0] */
5881 if (TREE_CODE (optype
) == ARRAY_TYPE
5882 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5883 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5885 tree type_domain
= TYPE_DOMAIN (optype
);
5886 tree min_val
= size_zero_node
;
5887 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5888 min_val
= TYPE_MIN_VALUE (type_domain
);
5889 if (TREE_CODE (min_val
) == INTEGER_CST
)
5890 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5892 /* *(foo *)&complexfoo => __real__ complexfoo */
5893 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5894 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5895 return fold_build1 (REALPART_EXPR
, type
, op
);
5896 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5897 else if (TREE_CODE (optype
) == VECTOR_TYPE
5898 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5900 tree part_width
= TYPE_SIZE (type
);
5901 tree index
= bitsize_int (0);
5902 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5906 /* *(p + CST) -> ... */
5907 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5908 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5910 tree addr
= TREE_OPERAND (sub
, 0);
5911 tree off
= TREE_OPERAND (sub
, 1);
5915 addrtype
= TREE_TYPE (addr
);
5917 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5918 if (TREE_CODE (addr
) == ADDR_EXPR
5919 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5920 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5921 && tree_fits_uhwi_p (off
))
5923 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5924 tree part_width
= TYPE_SIZE (type
);
5925 unsigned HOST_WIDE_INT part_widthi
5926 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5927 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5928 tree index
= bitsize_int (indexi
);
5929 if (offset
/ part_widthi
5930 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5931 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5935 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5936 if (TREE_CODE (addr
) == ADDR_EXPR
5937 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5938 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5940 tree size
= TYPE_SIZE_UNIT (type
);
5941 if (tree_int_cst_equal (size
, off
))
5942 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5945 /* *(p + CST) -> MEM_REF <p, CST>. */
5946 if (TREE_CODE (addr
) != ADDR_EXPR
5947 || DECL_P (TREE_OPERAND (addr
, 0)))
5948 return fold_build2 (MEM_REF
, type
,
5950 wide_int_to_tree (ptype
, off
));
5953 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5954 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5955 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5956 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5959 tree min_val
= size_zero_node
;
5961 sub
= gimple_fold_indirect_ref (sub
);
5963 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5964 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5965 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5966 min_val
= TYPE_MIN_VALUE (type_domain
);
5967 if (TREE_CODE (min_val
) == INTEGER_CST
)
5968 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5974 /* Return true if CODE is an operation that when operating on signed
5975 integer types involves undefined behavior on overflow and the
5976 operation can be expressed with unsigned arithmetic. */
5979 arith_code_with_undefined_signed_overflow (tree_code code
)
5987 case POINTER_PLUS_EXPR
:
5994 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5995 operation that can be transformed to unsigned arithmetic by converting
5996 its operand, carrying out the operation in the corresponding unsigned
5997 type and converting the result back to the original type.
5999 Returns a sequence of statements that replace STMT and also contain
6000 a modified form of STMT itself. */
6003 rewrite_to_defined_overflow (gimple stmt
)
6005 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6007 fprintf (dump_file
, "rewriting stmt with undefined signed "
6009 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6012 tree lhs
= gimple_assign_lhs (stmt
);
6013 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6014 gimple_seq stmts
= NULL
;
6015 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6017 gimple_seq stmts2
= NULL
;
6018 gimple_set_op (stmt
, i
,
6019 force_gimple_operand (fold_convert (type
,
6020 gimple_op (stmt
, i
)),
6021 &stmts2
, true, NULL_TREE
));
6022 gimple_seq_add_seq (&stmts
, stmts2
);
6024 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6025 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6026 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6027 gimple_seq_add_stmt (&stmts
, stmt
);
6028 gimple cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6029 gimple_seq_add_stmt (&stmts
, cvt
);
6035 /* The valueization hook we use for the gimple_build API simplification.
6036 This makes us match fold_buildN behavior by only combining with
6037 statements in the sequence(s) we are currently building. */
6040 gimple_build_valueize (tree op
)
6042 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6047 /* Build the expression CODE OP0 of type TYPE with location LOC,
6048 simplifying it first if possible. Returns the built
6049 expression value and appends statements possibly defining it
6053 gimple_build (gimple_seq
*seq
, location_t loc
,
6054 enum tree_code code
, tree type
, tree op0
)
6056 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6059 if (gimple_in_ssa_p (cfun
))
6060 res
= make_ssa_name (type
);
6062 res
= create_tmp_reg (type
);
6064 if (code
== REALPART_EXPR
6065 || code
== IMAGPART_EXPR
6066 || code
== VIEW_CONVERT_EXPR
)
6067 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6069 stmt
= gimple_build_assign (res
, code
, op0
);
6070 gimple_set_location (stmt
, loc
);
6071 gimple_seq_add_stmt_without_update (seq
, stmt
);
6076 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6077 simplifying it first if possible. Returns the built
6078 expression value and appends statements possibly defining it
6082 gimple_build (gimple_seq
*seq
, location_t loc
,
6083 enum tree_code code
, tree type
, tree op0
, tree op1
)
6085 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6088 if (gimple_in_ssa_p (cfun
))
6089 res
= make_ssa_name (type
);
6091 res
= create_tmp_reg (type
);
6092 gimple stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6093 gimple_set_location (stmt
, loc
);
6094 gimple_seq_add_stmt_without_update (seq
, stmt
);
6099 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6100 simplifying it first if possible. Returns the built
6101 expression value and appends statements possibly defining it
6105 gimple_build (gimple_seq
*seq
, location_t loc
,
6106 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6108 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6109 seq
, gimple_build_valueize
);
6112 if (gimple_in_ssa_p (cfun
))
6113 res
= make_ssa_name (type
);
6115 res
= create_tmp_reg (type
);
6117 if (code
== BIT_FIELD_REF
)
6118 stmt
= gimple_build_assign (res
, code
,
6119 build3 (code
, type
, op0
, op1
, op2
));
6121 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6122 gimple_set_location (stmt
, loc
);
6123 gimple_seq_add_stmt_without_update (seq
, stmt
);
6128 /* Build the call FN (ARG0) with a result of type TYPE
6129 (or no result if TYPE is void) with location LOC,
6130 simplifying it first if possible. Returns the built
6131 expression value (or NULL_TREE if TYPE is void) and appends
6132 statements possibly defining it to SEQ. */
6135 gimple_build (gimple_seq
*seq
, location_t loc
,
6136 enum built_in_function fn
, tree type
, tree arg0
)
6138 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6141 tree decl
= builtin_decl_implicit (fn
);
6142 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
6143 if (!VOID_TYPE_P (type
))
6145 if (gimple_in_ssa_p (cfun
))
6146 res
= make_ssa_name (type
);
6148 res
= create_tmp_reg (type
);
6149 gimple_call_set_lhs (stmt
, res
);
6151 gimple_set_location (stmt
, loc
);
6152 gimple_seq_add_stmt_without_update (seq
, stmt
);
6157 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6158 (or no result if TYPE is void) with location LOC,
6159 simplifying it first if possible. Returns the built
6160 expression value (or NULL_TREE if TYPE is void) and appends
6161 statements possibly defining it to SEQ. */
6164 gimple_build (gimple_seq
*seq
, location_t loc
,
6165 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6167 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6170 tree decl
= builtin_decl_implicit (fn
);
6171 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6172 if (!VOID_TYPE_P (type
))
6174 if (gimple_in_ssa_p (cfun
))
6175 res
= make_ssa_name (type
);
6177 res
= create_tmp_reg (type
);
6178 gimple_call_set_lhs (stmt
, res
);
6180 gimple_set_location (stmt
, loc
);
6181 gimple_seq_add_stmt_without_update (seq
, stmt
);
6186 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6187 (or no result if TYPE is void) with location LOC,
6188 simplifying it first if possible. Returns the built
6189 expression value (or NULL_TREE if TYPE is void) and appends
6190 statements possibly defining it to SEQ. */
6193 gimple_build (gimple_seq
*seq
, location_t loc
,
6194 enum built_in_function fn
, tree type
,
6195 tree arg0
, tree arg1
, tree arg2
)
6197 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6198 seq
, gimple_build_valueize
);
6201 tree decl
= builtin_decl_implicit (fn
);
6202 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6203 if (!VOID_TYPE_P (type
))
6205 if (gimple_in_ssa_p (cfun
))
6206 res
= make_ssa_name (type
);
6208 res
= create_tmp_reg (type
);
6209 gimple_call_set_lhs (stmt
, res
);
6211 gimple_set_location (stmt
, loc
);
6212 gimple_seq_add_stmt_without_update (seq
, stmt
);
6217 /* Build the conversion (TYPE) OP with a result of type TYPE
6218 with location LOC if such conversion is neccesary in GIMPLE,
6219 simplifying it first.
6220 Returns the built expression value and appends
6221 statements possibly defining it to SEQ. */
6224 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6226 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6228 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);