1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "fold-const.h"
30 #include "stringpool.h"
31 #include "hard-reg-set.h"
35 #include "insn-config.h"
44 #include "stor-layout.h"
48 #include "dominance.h"
49 #include "basic-block.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
52 #include "gimple-fold.h"
53 #include "gimple-expr.h"
57 #include "gimple-iterator.h"
58 #include "gimple-ssa.h"
59 #include "tree-ssanames.h"
60 #include "tree-into-ssa.h"
63 #include "tree-ssa-propagate.h"
65 #include "plugin-api.h"
68 #include "ipa-utils.h"
69 #include "gimple-pretty-print.h"
70 #include "tree-ssa-address.h"
71 #include "langhooks.h"
72 #include "gimplify-me.h"
77 #include "gimple-match.h"
78 #include "tree-phinodes.h"
79 #include "ssa-iterators.h"
81 /* Return true when DECL can be referenced from current unit.
82 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
83 We can get declarations that are not possible to reference for various
86 1) When analyzing C++ virtual tables.
87 C++ virtual tables do have known constructors even
88 when they are keyed to other compilation unit.
89 Those tables can contain pointers to methods and vars
90 in other units. Those methods have both STATIC and EXTERNAL
92 2) In WHOPR mode devirtualization might lead to reference
93 to method that was partitioned elsehwere.
94 In this case we have static VAR_DECL or FUNCTION_DECL
95 that has no corresponding callgraph/varpool node
97 3) COMDAT functions referred by external vtables that
98 we devirtualize only during final compilation stage.
99 At this time we already decided that we will not output
100 the function body and thus we can't reference the symbol
104 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
107 struct cgraph_node
*node
;
110 if (DECL_ABSTRACT_P (decl
))
113 /* We are concerned only about static/external vars and functions. */
114 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
115 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
118 /* Static objects can be referred only if they was not optimized out yet. */
119 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
121 /* Before we start optimizing unreachable code we can be sure all
122 static objects are defined. */
123 if (symtab
->function_flags_ready
)
125 snode
= symtab_node::get (decl
);
126 if (!snode
|| !snode
->definition
)
128 node
= dyn_cast
<cgraph_node
*> (snode
);
129 return !node
|| !node
->global
.inlined_to
;
132 /* We will later output the initializer, so we can refer to it.
133 So we are concerned only when DECL comes from initializer of
134 external var or var that has been optimized out. */
136 || TREE_CODE (from_decl
) != VAR_DECL
137 || (!DECL_EXTERNAL (from_decl
)
138 && (vnode
= varpool_node::get (from_decl
)) != NULL
139 && vnode
->definition
)
141 && (vnode
= varpool_node::get (from_decl
)) != NULL
142 && vnode
->in_other_partition
))
144 /* We are folding reference from external vtable. The vtable may reffer
145 to a symbol keyed to other compilation unit. The other compilation
146 unit may be in separate DSO and the symbol may be hidden. */
147 if (DECL_VISIBILITY_SPECIFIED (decl
)
148 && DECL_EXTERNAL (decl
)
149 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
150 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
152 /* When function is public, we always can introduce new reference.
153 Exception are the COMDAT functions where introducing a direct
154 reference imply need to include function body in the curren tunit. */
155 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
157 /* We have COMDAT. We are going to check if we still have definition
158 or if the definition is going to be output in other partition.
159 Bypass this when gimplifying; all needed functions will be produced.
161 As observed in PR20991 for already optimized out comdat virtual functions
162 it may be tempting to not necessarily give up because the copy will be
163 output elsewhere when corresponding vtable is output.
164 This is however not possible - ABI specify that COMDATs are output in
165 units where they are used and when the other unit was compiled with LTO
166 it is possible that vtable was kept public while the function itself
168 if (!symtab
->function_flags_ready
)
171 snode
= symtab_node::get (decl
);
173 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
174 && (!snode
->in_other_partition
175 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
177 node
= dyn_cast
<cgraph_node
*> (snode
);
178 return !node
|| !node
->global
.inlined_to
;
181 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
182 acceptable form for is_gimple_min_invariant.
183 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
186 canonicalize_constructor_val (tree cval
, tree from_decl
)
188 tree orig_cval
= cval
;
190 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
191 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
193 tree ptr
= TREE_OPERAND (cval
, 0);
194 if (is_gimple_min_invariant (ptr
))
195 cval
= build1_loc (EXPR_LOCATION (cval
),
196 ADDR_EXPR
, TREE_TYPE (ptr
),
197 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
199 fold_convert (ptr_type_node
,
200 TREE_OPERAND (cval
, 1))));
202 if (TREE_CODE (cval
) == ADDR_EXPR
)
204 tree base
= NULL_TREE
;
205 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
207 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
209 TREE_OPERAND (cval
, 0) = base
;
212 base
= get_base_address (TREE_OPERAND (cval
, 0));
216 if ((TREE_CODE (base
) == VAR_DECL
217 || TREE_CODE (base
) == FUNCTION_DECL
)
218 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
220 if (TREE_CODE (base
) == VAR_DECL
)
221 TREE_ADDRESSABLE (base
) = 1;
222 else if (TREE_CODE (base
) == FUNCTION_DECL
)
224 /* Make sure we create a cgraph node for functions we'll reference.
225 They can be non-existent if the reference comes from an entry
226 of an external vtable for example. */
227 cgraph_node::get_create (base
);
229 /* Fixup types in global initializers. */
230 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
231 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
233 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
234 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
237 if (TREE_OVERFLOW_P (cval
))
238 return drop_tree_overflow (cval
);
242 /* If SYM is a constant variable with known value, return the value.
243 NULL_TREE is returned otherwise. */
246 get_symbol_constant_value (tree sym
)
248 tree val
= ctor_for_folding (sym
);
249 if (val
!= error_mark_node
)
253 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
254 if (val
&& is_gimple_min_invariant (val
))
259 /* Variables declared 'const' without an initializer
260 have zero as the initializer if they may not be
261 overridden at link or run time. */
263 && is_gimple_reg_type (TREE_TYPE (sym
)))
264 return build_zero_cst (TREE_TYPE (sym
));
272 /* Subroutine of fold_stmt. We perform several simplifications of the
273 memory reference tree EXPR and make sure to re-gimplify them properly
274 after propagation of constant addresses. IS_LHS is true if the
275 reference is supposed to be an lvalue. */
278 maybe_fold_reference (tree expr
, bool is_lhs
)
282 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
283 || TREE_CODE (expr
) == REALPART_EXPR
284 || TREE_CODE (expr
) == IMAGPART_EXPR
)
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
286 return fold_unary_loc (EXPR_LOCATION (expr
),
289 TREE_OPERAND (expr
, 0));
290 else if (TREE_CODE (expr
) == BIT_FIELD_REF
291 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
292 return fold_ternary_loc (EXPR_LOCATION (expr
),
295 TREE_OPERAND (expr
, 0),
296 TREE_OPERAND (expr
, 1),
297 TREE_OPERAND (expr
, 2));
300 && (result
= fold_const_aggregate_ref (expr
))
301 && is_gimple_min_invariant (result
))
308 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
309 replacement rhs for the statement or NULL_TREE if no simplification
310 could be made. It is assumed that the operands have been previously
314 fold_gimple_assign (gimple_stmt_iterator
*si
)
316 gimple stmt
= gsi_stmt (*si
);
317 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
318 location_t loc
= gimple_location (stmt
);
320 tree result
= NULL_TREE
;
322 switch (get_gimple_rhs_class (subcode
))
324 case GIMPLE_SINGLE_RHS
:
326 tree rhs
= gimple_assign_rhs1 (stmt
);
328 if (TREE_CLOBBER_P (rhs
))
331 if (REFERENCE_CLASS_P (rhs
))
332 return maybe_fold_reference (rhs
, false);
334 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
336 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
337 if (is_gimple_min_invariant (val
))
339 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
342 vec
<cgraph_node
*>targets
343 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
344 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
346 if (dump_enabled_p ())
348 location_t loc
= gimple_location_safe (stmt
);
349 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
350 "resolving virtual function address "
351 "reference to function %s\n",
352 targets
.length () == 1
353 ? targets
[0]->name ()
356 if (targets
.length () == 1)
358 val
= fold_convert (TREE_TYPE (val
),
359 build_fold_addr_expr_loc
360 (loc
, targets
[0]->decl
));
361 STRIP_USELESS_TYPE_CONVERSION (val
);
364 /* We can not use __builtin_unreachable here because it
365 can not have address taken. */
366 val
= build_int_cst (TREE_TYPE (val
), 0);
372 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
374 tree ref
= TREE_OPERAND (rhs
, 0);
375 tree tem
= maybe_fold_reference (ref
, true);
377 && TREE_CODE (tem
) == MEM_REF
378 && integer_zerop (TREE_OPERAND (tem
, 1)))
379 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
381 result
= fold_convert (TREE_TYPE (rhs
),
382 build_fold_addr_expr_loc (loc
, tem
));
383 else if (TREE_CODE (ref
) == MEM_REF
384 && integer_zerop (TREE_OPERAND (ref
, 1)))
385 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
388 else if (TREE_CODE (rhs
) == CONSTRUCTOR
389 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
390 && (CONSTRUCTOR_NELTS (rhs
)
391 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
393 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
397 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
398 if (TREE_CODE (val
) != INTEGER_CST
399 && TREE_CODE (val
) != REAL_CST
400 && TREE_CODE (val
) != FIXED_CST
)
403 return build_vector_from_ctor (TREE_TYPE (rhs
),
404 CONSTRUCTOR_ELTS (rhs
));
407 else if (DECL_P (rhs
))
408 return get_symbol_constant_value (rhs
);
410 /* If we couldn't fold the RHS, hand over to the generic
412 if (result
== NULL_TREE
)
415 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
416 that may have been added by fold, and "useless" type
417 conversions that might now be apparent due to propagation. */
418 STRIP_USELESS_TYPE_CONVERSION (result
);
420 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
427 case GIMPLE_UNARY_RHS
:
430 case GIMPLE_BINARY_RHS
:
431 /* Try to canonicalize for boolean-typed X the comparisons
432 X == 0, X == 1, X != 0, and X != 1. */
433 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
434 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
436 tree lhs
= gimple_assign_lhs (stmt
);
437 tree op1
= gimple_assign_rhs1 (stmt
);
438 tree op2
= gimple_assign_rhs2 (stmt
);
439 tree type
= TREE_TYPE (op1
);
441 /* Check whether the comparison operands are of the same boolean
442 type as the result type is.
443 Check that second operand is an integer-constant with value
445 if (TREE_CODE (op2
) == INTEGER_CST
446 && (integer_zerop (op2
) || integer_onep (op2
))
447 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
449 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
450 bool is_logical_not
= false;
452 /* X == 0 and X != 1 is a logical-not.of X
453 X == 1 and X != 0 is X */
454 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
455 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
456 is_logical_not
= true;
458 if (is_logical_not
== false)
460 /* Only for one-bit precision typed X the transformation
461 !X -> ~X is valied. */
462 else if (TYPE_PRECISION (type
) == 1)
463 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
465 /* Otherwise we use !X -> X ^ 1. */
467 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
468 type
, op1
, build_int_cst (type
, 1));
474 result
= fold_binary_loc (loc
, subcode
,
475 TREE_TYPE (gimple_assign_lhs (stmt
)),
476 gimple_assign_rhs1 (stmt
),
477 gimple_assign_rhs2 (stmt
));
481 STRIP_USELESS_TYPE_CONVERSION (result
);
482 if (valid_gimple_rhs_p (result
))
487 case GIMPLE_TERNARY_RHS
:
488 /* Try to fold a conditional expression. */
489 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
491 tree op0
= gimple_assign_rhs1 (stmt
);
494 location_t cond_loc
= gimple_location (stmt
);
496 if (COMPARISON_CLASS_P (op0
))
498 fold_defer_overflow_warnings ();
499 tem
= fold_binary_loc (cond_loc
,
500 TREE_CODE (op0
), TREE_TYPE (op0
),
501 TREE_OPERAND (op0
, 0),
502 TREE_OPERAND (op0
, 1));
503 /* This is actually a conditional expression, not a GIMPLE
504 conditional statement, however, the valid_gimple_rhs_p
505 test still applies. */
506 set
= (tem
&& is_gimple_condexpr (tem
)
507 && valid_gimple_rhs_p (tem
));
508 fold_undefer_overflow_warnings (set
, stmt
, 0);
510 else if (is_gimple_min_invariant (op0
))
519 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
520 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
521 gimple_assign_rhs2 (stmt
),
522 gimple_assign_rhs3 (stmt
));
526 result
= fold_ternary_loc (loc
, subcode
,
527 TREE_TYPE (gimple_assign_lhs (stmt
)),
528 gimple_assign_rhs1 (stmt
),
529 gimple_assign_rhs2 (stmt
),
530 gimple_assign_rhs3 (stmt
));
534 STRIP_USELESS_TYPE_CONVERSION (result
);
535 if (valid_gimple_rhs_p (result
))
540 case GIMPLE_INVALID_RHS
:
547 /* Attempt to fold a conditional statement. Return true if any changes were
548 made. We only attempt to fold the condition expression, and do not perform
549 any transformation that would require alteration of the cfg. It is
550 assumed that the operands have been previously folded. */
553 fold_gimple_cond (gcond
*stmt
)
555 tree result
= fold_binary_loc (gimple_location (stmt
),
556 gimple_cond_code (stmt
),
558 gimple_cond_lhs (stmt
),
559 gimple_cond_rhs (stmt
));
563 STRIP_USELESS_TYPE_CONVERSION (result
);
564 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
566 gimple_cond_set_condition_from_tree (stmt
, result
);
575 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
576 adjusting the replacement stmts location and virtual operands.
577 If the statement has a lhs the last stmt in the sequence is expected
578 to assign to that lhs. */
581 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
583 gimple stmt
= gsi_stmt (*si_p
);
585 if (gimple_has_location (stmt
))
586 annotate_all_with_location (stmts
, gimple_location (stmt
));
588 /* First iterate over the replacement statements backward, assigning
589 virtual operands to their defining statements. */
590 gimple laststore
= NULL
;
591 for (gimple_stmt_iterator i
= gsi_last (stmts
);
592 !gsi_end_p (i
); gsi_prev (&i
))
594 gimple new_stmt
= gsi_stmt (i
);
595 if ((gimple_assign_single_p (new_stmt
)
596 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
597 || (is_gimple_call (new_stmt
)
598 && (gimple_call_flags (new_stmt
)
599 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
603 vdef
= gimple_vdef (stmt
);
605 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
606 gimple_set_vdef (new_stmt
, vdef
);
607 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
608 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
609 laststore
= new_stmt
;
613 /* Second iterate over the statements forward, assigning virtual
614 operands to their uses. */
615 tree reaching_vuse
= gimple_vuse (stmt
);
616 for (gimple_stmt_iterator i
= gsi_start (stmts
);
617 !gsi_end_p (i
); gsi_next (&i
))
619 gimple new_stmt
= gsi_stmt (i
);
620 /* If the new statement possibly has a VUSE, update it with exact SSA
621 name we know will reach this one. */
622 if (gimple_has_mem_ops (new_stmt
))
623 gimple_set_vuse (new_stmt
, reaching_vuse
);
624 gimple_set_modified (new_stmt
, true);
625 if (gimple_vdef (new_stmt
))
626 reaching_vuse
= gimple_vdef (new_stmt
);
629 /* If the new sequence does not do a store release the virtual
630 definition of the original statement. */
632 && reaching_vuse
== gimple_vuse (stmt
))
634 tree vdef
= gimple_vdef (stmt
);
636 && TREE_CODE (vdef
) == SSA_NAME
)
638 unlink_stmt_vdef (stmt
);
639 release_ssa_name (vdef
);
643 /* Finally replace the original statement with the sequence. */
644 gsi_replace_with_seq (si_p
, stmts
, false);
647 /* Convert EXPR into a GIMPLE value suitable for substitution on the
648 RHS of an assignment. Insert the necessary statements before
649 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
650 is replaced. If the call is expected to produces a result, then it
651 is replaced by an assignment of the new RHS to the result variable.
652 If the result is to be ignored, then the call is replaced by a
653 GIMPLE_NOP. A proper VDEF chain is retained by making the first
654 VUSE and the last VDEF of the whole sequence be the same as the replaced
655 statement and using new SSA names for stores in between. */
658 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
661 gimple stmt
, new_stmt
;
662 gimple_stmt_iterator i
;
663 gimple_seq stmts
= NULL
;
665 stmt
= gsi_stmt (*si_p
);
667 gcc_assert (is_gimple_call (stmt
));
669 push_gimplify_context (gimple_in_ssa_p (cfun
));
671 lhs
= gimple_call_lhs (stmt
);
672 if (lhs
== NULL_TREE
)
674 gimplify_and_add (expr
, &stmts
);
675 /* We can end up with folding a memcpy of an empty class assignment
676 which gets optimized away by C++ gimplification. */
677 if (gimple_seq_empty_p (stmts
))
679 pop_gimplify_context (NULL
);
680 if (gimple_in_ssa_p (cfun
))
682 unlink_stmt_vdef (stmt
);
685 gsi_replace (si_p
, gimple_build_nop (), true);
691 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
692 new_stmt
= gimple_build_assign (lhs
, tmp
);
693 i
= gsi_last (stmts
);
694 gsi_insert_after_without_update (&i
, new_stmt
,
695 GSI_CONTINUE_LINKING
);
698 pop_gimplify_context (NULL
);
700 gsi_replace_with_seq_vops (si_p
, stmts
);
704 /* Replace the call at *GSI with the gimple value VAL. */
707 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
709 gimple stmt
= gsi_stmt (*gsi
);
710 tree lhs
= gimple_call_lhs (stmt
);
714 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
715 val
= fold_convert (TREE_TYPE (lhs
), val
);
716 repl
= gimple_build_assign (lhs
, val
);
719 repl
= gimple_build_nop ();
720 tree vdef
= gimple_vdef (stmt
);
721 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
723 unlink_stmt_vdef (stmt
);
724 release_ssa_name (vdef
);
726 gsi_replace (gsi
, repl
, true);
729 /* Replace the call at *GSI with the new call REPL and fold that
733 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
735 gimple stmt
= gsi_stmt (*gsi
);
736 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
737 gimple_set_location (repl
, gimple_location (stmt
));
738 if (gimple_vdef (stmt
)
739 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
741 gimple_set_vdef (repl
, gimple_vdef (stmt
));
742 gimple_set_vuse (repl
, gimple_vuse (stmt
));
743 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
745 gsi_replace (gsi
, repl
, true);
749 /* Return true if VAR is a VAR_DECL or a component thereof. */
752 var_decl_component_p (tree var
)
755 while (handled_component_p (inner
))
756 inner
= TREE_OPERAND (inner
, 0);
757 return SSA_VAR_P (inner
);
760 /* Fold function call to builtin mem{{,p}cpy,move}. Return
761 false if no simplification can be made.
762 If ENDP is 0, return DEST (like memcpy).
763 If ENDP is 1, return DEST+LEN (like mempcpy).
764 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
765 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
769 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
770 tree dest
, tree src
, int endp
)
772 gimple stmt
= gsi_stmt (*gsi
);
773 tree lhs
= gimple_call_lhs (stmt
);
774 tree len
= gimple_call_arg (stmt
, 2);
775 tree destvar
, srcvar
;
776 location_t loc
= gimple_location (stmt
);
778 /* If the LEN parameter is zero, return DEST. */
779 if (integer_zerop (len
))
782 if (gimple_call_lhs (stmt
))
783 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
785 repl
= gimple_build_nop ();
786 tree vdef
= gimple_vdef (stmt
);
787 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
789 unlink_stmt_vdef (stmt
);
790 release_ssa_name (vdef
);
792 gsi_replace (gsi
, repl
, true);
796 /* If SRC and DEST are the same (and not volatile), return
797 DEST{,+LEN,+LEN-1}. */
798 if (operand_equal_p (src
, dest
, 0))
800 unlink_stmt_vdef (stmt
);
801 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
802 release_ssa_name (gimple_vdef (stmt
));
805 gsi_replace (gsi
, gimple_build_nop (), true);
812 tree srctype
, desttype
;
813 unsigned int src_align
, dest_align
;
816 /* Build accesses at offset zero with a ref-all character type. */
817 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
820 /* If we can perform the copy efficiently with first doing all loads
821 and then all stores inline it that way. Currently efficiently
822 means that we can load all the memory into a single integer
823 register which is what MOVE_MAX gives us. */
824 src_align
= get_pointer_alignment (src
);
825 dest_align
= get_pointer_alignment (dest
);
826 if (tree_fits_uhwi_p (len
)
827 && compare_tree_int (len
, MOVE_MAX
) <= 0
828 /* ??? Don't transform copies from strings with known length this
829 confuses the tree-ssa-strlen.c. This doesn't handle
830 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
832 && !c_strlen (src
, 2))
834 unsigned ilen
= tree_to_uhwi (len
);
835 if (exact_log2 (ilen
) != -1)
837 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
839 && TYPE_MODE (type
) != BLKmode
840 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
842 /* If the destination pointer is not aligned we must be able
843 to emit an unaligned store. */
844 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
845 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
848 tree desttype
= type
;
849 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
850 srctype
= build_aligned_type (type
, src_align
);
851 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
852 tree tem
= fold_const_aggregate_ref (srcmem
);
855 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
856 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
862 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
864 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
865 if (gimple_in_ssa_p (cfun
))
866 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
869 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
870 gimple_assign_set_lhs (new_stmt
, srcmem
);
871 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
872 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
874 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
875 desttype
= build_aligned_type (type
, dest_align
);
877 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
880 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
881 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
882 if (gimple_vdef (new_stmt
)
883 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
884 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
887 gsi_replace (gsi
, new_stmt
, true);
890 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
899 /* Both DEST and SRC must be pointer types.
900 ??? This is what old code did. Is the testing for pointer types
903 If either SRC is readonly or length is 1, we can use memcpy. */
904 if (!dest_align
|| !src_align
)
906 if (readonly_data_expr (src
)
907 || (tree_fits_uhwi_p (len
)
908 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
909 >= tree_to_uhwi (len
))))
911 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
914 gimple_call_set_fndecl (stmt
, fn
);
915 gimple_call_set_arg (stmt
, 0, dest
);
916 gimple_call_set_arg (stmt
, 1, src
);
921 /* If *src and *dest can't overlap, optimize into memcpy as well. */
922 if (TREE_CODE (src
) == ADDR_EXPR
923 && TREE_CODE (dest
) == ADDR_EXPR
)
925 tree src_base
, dest_base
, fn
;
926 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
927 HOST_WIDE_INT size
= -1;
928 HOST_WIDE_INT maxsize
= -1;
930 srcvar
= TREE_OPERAND (src
, 0);
931 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
933 destvar
= TREE_OPERAND (dest
, 0);
934 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
936 if (tree_fits_uhwi_p (len
))
937 maxsize
= tree_to_uhwi (len
);
940 src_offset
/= BITS_PER_UNIT
;
941 dest_offset
/= BITS_PER_UNIT
;
942 if (SSA_VAR_P (src_base
)
943 && SSA_VAR_P (dest_base
))
945 if (operand_equal_p (src_base
, dest_base
, 0)
946 && ranges_overlap_p (src_offset
, maxsize
,
947 dest_offset
, maxsize
))
950 else if (TREE_CODE (src_base
) == MEM_REF
951 && TREE_CODE (dest_base
) == MEM_REF
)
953 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
954 TREE_OPERAND (dest_base
, 0), 0))
956 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
957 if (!wi::fits_shwi_p (off
))
959 src_offset
= off
.to_shwi ();
961 off
= mem_ref_offset (dest_base
) + dest_offset
;
962 if (!wi::fits_shwi_p (off
))
964 dest_offset
= off
.to_shwi ();
965 if (ranges_overlap_p (src_offset
, maxsize
,
966 dest_offset
, maxsize
))
972 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
975 gimple_call_set_fndecl (stmt
, fn
);
976 gimple_call_set_arg (stmt
, 0, dest
);
977 gimple_call_set_arg (stmt
, 1, src
);
982 /* If the destination and source do not alias optimize into
984 if ((is_gimple_min_invariant (dest
)
985 || TREE_CODE (dest
) == SSA_NAME
)
986 && (is_gimple_min_invariant (src
)
987 || TREE_CODE (src
) == SSA_NAME
))
990 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
991 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
992 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
995 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
998 gimple_call_set_fndecl (stmt
, fn
);
999 gimple_call_set_arg (stmt
, 0, dest
);
1000 gimple_call_set_arg (stmt
, 1, src
);
1009 if (!tree_fits_shwi_p (len
))
1012 This logic lose for arguments like (type *)malloc (sizeof (type)),
1013 since we strip the casts of up to VOID return value from malloc.
1014 Perhaps we ought to inherit type from non-VOID argument here? */
1017 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1018 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1020 /* In the following try to find a type that is most natural to be
1021 used for the memcpy source and destination and that allows
1022 the most optimization when memcpy is turned into a plain assignment
1023 using that type. In theory we could always use a char[len] type
1024 but that only gains us that the destination and source possibly
1025 no longer will have their address taken. */
1026 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1027 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1029 tree tem
= TREE_OPERAND (src
, 0);
1031 if (tem
!= TREE_OPERAND (src
, 0))
1032 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1034 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1036 tree tem
= TREE_OPERAND (dest
, 0);
1038 if (tem
!= TREE_OPERAND (dest
, 0))
1039 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1041 srctype
= TREE_TYPE (TREE_TYPE (src
));
1042 if (TREE_CODE (srctype
) == ARRAY_TYPE
1043 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1045 srctype
= TREE_TYPE (srctype
);
1047 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1049 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1050 if (TREE_CODE (desttype
) == ARRAY_TYPE
1051 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1053 desttype
= TREE_TYPE (desttype
);
1055 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1057 if (TREE_ADDRESSABLE (srctype
)
1058 || TREE_ADDRESSABLE (desttype
))
1061 /* Make sure we are not copying using a floating-point mode or
1062 a type whose size possibly does not match its precision. */
1063 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1064 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1065 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1066 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1067 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1068 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1069 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1070 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1078 src_align
= get_pointer_alignment (src
);
1079 dest_align
= get_pointer_alignment (dest
);
1080 if (dest_align
< TYPE_ALIGN (desttype
)
1081 || src_align
< TYPE_ALIGN (srctype
))
1085 STRIP_NOPS (destvar
);
1086 if (TREE_CODE (destvar
) == ADDR_EXPR
1087 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1088 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1089 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1091 destvar
= NULL_TREE
;
1094 STRIP_NOPS (srcvar
);
1095 if (TREE_CODE (srcvar
) == ADDR_EXPR
1096 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1097 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1100 || src_align
>= TYPE_ALIGN (desttype
))
1101 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1103 else if (!STRICT_ALIGNMENT
)
1105 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1107 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1115 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1118 if (srcvar
== NULL_TREE
)
1121 if (src_align
>= TYPE_ALIGN (desttype
))
1122 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1125 if (STRICT_ALIGNMENT
)
1127 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1129 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1132 else if (destvar
== NULL_TREE
)
1135 if (dest_align
>= TYPE_ALIGN (srctype
))
1136 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1139 if (STRICT_ALIGNMENT
)
1141 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1143 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1148 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1150 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1151 if (gimple_in_ssa_p (cfun
))
1152 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1154 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1155 gimple_assign_set_lhs (new_stmt
, srcvar
);
1156 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1157 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1159 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1160 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1161 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1162 if (gimple_vdef (new_stmt
)
1163 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1164 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1167 gsi_replace (gsi
, new_stmt
, true);
1170 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1174 if (endp
== 0 || endp
== 3)
1177 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1179 if (endp
== 2 || endp
== 1)
1180 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1182 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1184 gimple repl
= gimple_build_assign (lhs
, dest
);
1185 gsi_replace (gsi
, repl
, true);
1189 /* Fold function call to builtin memset or bzero at *GSI setting the
1190 memory of size LEN to VAL. Return whether a simplification was made. */
1193 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1195 gimple stmt
= gsi_stmt (*gsi
);
1197 unsigned HOST_WIDE_INT length
, cval
;
1199 /* If the LEN parameter is zero, return DEST. */
1200 if (integer_zerop (len
))
1202 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1206 if (! tree_fits_uhwi_p (len
))
1209 if (TREE_CODE (c
) != INTEGER_CST
)
1212 tree dest
= gimple_call_arg (stmt
, 0);
1214 if (TREE_CODE (var
) != ADDR_EXPR
)
1217 var
= TREE_OPERAND (var
, 0);
1218 if (TREE_THIS_VOLATILE (var
))
1221 etype
= TREE_TYPE (var
);
1222 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1223 etype
= TREE_TYPE (etype
);
1225 if (!INTEGRAL_TYPE_P (etype
)
1226 && !POINTER_TYPE_P (etype
))
1229 if (! var_decl_component_p (var
))
1232 length
= tree_to_uhwi (len
);
1233 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1234 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1237 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1240 if (integer_zerop (c
))
1244 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1247 cval
= TREE_INT_CST_LOW (c
);
1251 cval
|= (cval
<< 31) << 1;
1254 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1255 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1256 gimple_set_vuse (store
, gimple_vuse (stmt
));
1257 tree vdef
= gimple_vdef (stmt
);
1258 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1260 gimple_set_vdef (store
, gimple_vdef (stmt
));
1261 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1263 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1264 if (gimple_call_lhs (stmt
))
1266 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1267 gsi_replace (gsi
, asgn
, true);
1271 gimple_stmt_iterator gsi2
= *gsi
;
1273 gsi_remove (&gsi2
, true);
1280 /* Return the string length, maximum string length or maximum value of
1282 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1283 is not NULL and, for TYPE == 0, its value is not equal to the length
1284 we determine or if we are unable to determine the length or value,
1285 return false. VISITED is a bitmap of visited variables.
1286 TYPE is 0 if string length should be returned, 1 for maximum string
1287 length and 2 for maximum value ARG can have. */
1290 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1295 if (TREE_CODE (arg
) != SSA_NAME
)
1297 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1298 if (TREE_CODE (arg
) == ADDR_EXPR
1299 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1300 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1302 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1303 if (TREE_CODE (aop0
) == INDIRECT_REF
1304 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1305 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1306 length
, visited
, type
);
1312 if (TREE_CODE (val
) != INTEGER_CST
1313 || tree_int_cst_sgn (val
) < 0)
1317 val
= c_strlen (arg
, 1);
1325 if (TREE_CODE (*length
) != INTEGER_CST
1326 || TREE_CODE (val
) != INTEGER_CST
)
1329 if (tree_int_cst_lt (*length
, val
))
1333 else if (simple_cst_equal (val
, *length
) != 1)
1341 /* If ARG is registered for SSA update we cannot look at its defining
1343 if (name_registered_for_update_p (arg
))
1346 /* If we were already here, break the infinite cycle. */
1348 *visited
= BITMAP_ALLOC (NULL
);
1349 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1353 def_stmt
= SSA_NAME_DEF_STMT (var
);
1355 switch (gimple_code (def_stmt
))
1358 /* The RHS of the statement defining VAR must either have a
1359 constant length or come from another SSA_NAME with a constant
1361 if (gimple_assign_single_p (def_stmt
)
1362 || gimple_assign_unary_nop_p (def_stmt
))
1364 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1365 return get_maxval_strlen (rhs
, length
, visited
, type
);
1367 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1369 tree op2
= gimple_assign_rhs2 (def_stmt
);
1370 tree op3
= gimple_assign_rhs3 (def_stmt
);
1371 return get_maxval_strlen (op2
, length
, visited
, type
)
1372 && get_maxval_strlen (op3
, length
, visited
, type
);
1378 /* All the arguments of the PHI node must have the same constant
1382 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1384 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1386 /* If this PHI has itself as an argument, we cannot
1387 determine the string length of this argument. However,
1388 if we can find a constant string length for the other
1389 PHI args then we can still be sure that this is a
1390 constant string length. So be optimistic and just
1391 continue with the next argument. */
1392 if (arg
== gimple_phi_result (def_stmt
))
1395 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1407 get_maxval_strlen (tree arg
, int type
)
1409 bitmap visited
= NULL
;
1410 tree len
= NULL_TREE
;
1411 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1414 BITMAP_FREE (visited
);
1420 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1421 If LEN is not NULL, it represents the length of the string to be
1422 copied. Return NULL_TREE if no simplification can be made. */
1425 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1426 tree dest
, tree src
)
1428 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1431 /* If SRC and DEST are the same (and not volatile), return DEST. */
1432 if (operand_equal_p (src
, dest
, 0))
1434 replace_call_with_value (gsi
, dest
);
1438 if (optimize_function_for_size_p (cfun
))
1441 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1445 tree len
= get_maxval_strlen (src
, 0);
1449 len
= fold_convert_loc (loc
, size_type_node
, len
);
1450 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1451 len
= force_gimple_operand_gsi (gsi
, len
, true,
1452 NULL_TREE
, true, GSI_SAME_STMT
);
1453 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1454 replace_call_with_call_and_fold (gsi
, repl
);
1458 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1459 If SLEN is not NULL, it represents the length of the source string.
1460 Return NULL_TREE if no simplification can be made. */
1463 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1464 tree dest
, tree src
, tree len
)
1466 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1469 /* If the LEN parameter is zero, return DEST. */
1470 if (integer_zerop (len
))
1472 replace_call_with_value (gsi
, dest
);
1476 /* We can't compare slen with len as constants below if len is not a
1478 if (TREE_CODE (len
) != INTEGER_CST
)
1481 /* Now, we must be passed a constant src ptr parameter. */
1482 tree slen
= get_maxval_strlen (src
, 0);
1483 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1486 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1488 /* We do not support simplification of this case, though we do
1489 support it when expanding trees into RTL. */
1490 /* FIXME: generate a call to __builtin_memset. */
1491 if (tree_int_cst_lt (slen
, len
))
1494 /* OK transform into builtin memcpy. */
1495 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1499 len
= fold_convert_loc (loc
, size_type_node
, len
);
1500 len
= force_gimple_operand_gsi (gsi
, len
, true,
1501 NULL_TREE
, true, GSI_SAME_STMT
);
1502 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1503 replace_call_with_call_and_fold (gsi
, repl
);
1507 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1510 Return NULL_TREE if no simplification was possible, otherwise return the
1511 simplified form of the call as a tree.
1513 The simplified form may be a constant or other expression which
1514 computes the same value, but in a more efficient manner (including
1515 calls to other builtin functions).
1517 The call may contain arguments which need to be evaluated, but
1518 which are not useful to determine the result of the call. In
1519 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1520 COMPOUND_EXPR will be an argument which must be evaluated.
1521 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1522 COMPOUND_EXPR in the chain will contain the tree for the simplified
1523 form of the builtin function call. */
1526 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1528 gimple stmt
= gsi_stmt (*gsi
);
1529 location_t loc
= gimple_location (stmt
);
1531 const char *p
= c_getstr (src
);
1533 /* If the string length is zero, return the dst parameter. */
1534 if (p
&& *p
== '\0')
1536 replace_call_with_value (gsi
, dst
);
1540 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1543 /* See if we can store by pieces into (dst + strlen(dst)). */
1545 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1546 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1548 if (!strlen_fn
|| !memcpy_fn
)
1551 /* If the length of the source string isn't computable don't
1552 split strcat into strlen and memcpy. */
1553 tree len
= get_maxval_strlen (src
, 0);
1557 /* Create strlen (dst). */
1558 gimple_seq stmts
= NULL
, stmts2
;
1559 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1560 gimple_set_location (repl
, loc
);
1561 if (gimple_in_ssa_p (cfun
))
1562 newdst
= make_ssa_name (size_type_node
);
1564 newdst
= create_tmp_reg (size_type_node
);
1565 gimple_call_set_lhs (repl
, newdst
);
1566 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1568 /* Create (dst p+ strlen (dst)). */
1569 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1570 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1571 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1573 len
= fold_convert_loc (loc
, size_type_node
, len
);
1574 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1575 build_int_cst (size_type_node
, 1));
1576 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1577 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1579 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1580 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1581 if (gimple_call_lhs (stmt
))
1583 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1584 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1585 gsi_replace_with_seq_vops (gsi
, stmts
);
1586 /* gsi now points at the assignment to the lhs, get a
1587 stmt iterator to the memcpy call.
1588 ??? We can't use gsi_for_stmt as that doesn't work when the
1589 CFG isn't built yet. */
1590 gimple_stmt_iterator gsi2
= *gsi
;
1596 gsi_replace_with_seq_vops (gsi
, stmts
);
1602 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1603 are the arguments to the call. */
1606 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1608 gimple stmt
= gsi_stmt (*gsi
);
1609 tree dest
= gimple_call_arg (stmt
, 0);
1610 tree src
= gimple_call_arg (stmt
, 1);
1611 tree size
= gimple_call_arg (stmt
, 2);
1617 /* If the SRC parameter is "", return DEST. */
1618 if (p
&& *p
== '\0')
1620 replace_call_with_value (gsi
, dest
);
1624 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1627 /* If __builtin_strcat_chk is used, assume strcat is available. */
1628 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1632 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1633 replace_call_with_call_and_fold (gsi
, repl
);
1637 /* Simplify a call to the strncat builtin. */
1640 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1642 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1643 tree dst
= gimple_call_arg (stmt
, 0);
1644 tree src
= gimple_call_arg (stmt
, 1);
1645 tree len
= gimple_call_arg (stmt
, 2);
1647 const char *p
= c_getstr (src
);
1649 /* If the requested length is zero, or the src parameter string
1650 length is zero, return the dst parameter. */
1651 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1653 replace_call_with_value (gsi
, dst
);
1657 /* If the requested len is greater than or equal to the string
1658 length, call strcat. */
1659 if (TREE_CODE (len
) == INTEGER_CST
&& p
1660 && compare_tree_int (len
, strlen (p
)) >= 0)
1662 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1664 /* If the replacement _DECL isn't initialized, don't do the
1669 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1670 replace_call_with_call_and_fold (gsi
, repl
);
1677 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1681 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1683 gimple stmt
= gsi_stmt (*gsi
);
1684 tree dest
= gimple_call_arg (stmt
, 0);
1685 tree src
= gimple_call_arg (stmt
, 1);
1686 tree len
= gimple_call_arg (stmt
, 2);
1687 tree size
= gimple_call_arg (stmt
, 3);
1692 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1693 if ((p
&& *p
== '\0')
1694 || integer_zerop (len
))
1696 replace_call_with_value (gsi
, dest
);
1700 if (! tree_fits_uhwi_p (size
))
1703 if (! integer_all_onesp (size
))
1705 tree src_len
= c_strlen (src
, 1);
1707 && tree_fits_uhwi_p (src_len
)
1708 && tree_fits_uhwi_p (len
)
1709 && ! tree_int_cst_lt (len
, src_len
))
1711 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1712 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1716 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1717 replace_call_with_call_and_fold (gsi
, repl
);
1723 /* If __builtin_strncat_chk is used, assume strncat is available. */
1724 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1728 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1729 replace_call_with_call_and_fold (gsi
, repl
);
1733 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1734 to the call. IGNORE is true if the value returned
1735 by the builtin will be ignored. UNLOCKED is true is true if this
1736 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1737 the known length of the string. Return NULL_TREE if no simplification
1741 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1742 tree arg0
, tree arg1
,
1745 gimple stmt
= gsi_stmt (*gsi
);
1747 /* If we're using an unlocked function, assume the other unlocked
1748 functions exist explicitly. */
1749 tree
const fn_fputc
= (unlocked
1750 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1751 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1752 tree
const fn_fwrite
= (unlocked
1753 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1754 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1756 /* If the return value is used, don't do the transformation. */
1757 if (gimple_call_lhs (stmt
))
1760 /* Get the length of the string passed to fputs. If the length
1761 can't be determined, punt. */
1762 tree len
= get_maxval_strlen (arg0
, 0);
1764 || TREE_CODE (len
) != INTEGER_CST
)
1767 switch (compare_tree_int (len
, 1))
1769 case -1: /* length is 0, delete the call entirely . */
1770 replace_call_with_value (gsi
, integer_zero_node
);
1773 case 0: /* length is 1, call fputc. */
1775 const char *p
= c_getstr (arg0
);
1781 gimple repl
= gimple_build_call (fn_fputc
, 2,
1783 (integer_type_node
, p
[0]), arg1
);
1784 replace_call_with_call_and_fold (gsi
, repl
);
1789 case 1: /* length is greater than 1, call fwrite. */
1791 /* If optimizing for size keep fputs. */
1792 if (optimize_function_for_size_p (cfun
))
1794 /* New argument list transforming fputs(string, stream) to
1795 fwrite(string, 1, len, stream). */
1799 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1800 size_one_node
, len
, arg1
);
1801 replace_call_with_call_and_fold (gsi
, repl
);
1810 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1811 DEST, SRC, LEN, and SIZE are the arguments to the call.
1812 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1813 code of the builtin. If MAXLEN is not NULL, it is maximum length
1814 passed as third argument. */
1817 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1818 tree dest
, tree src
, tree len
, tree size
,
1819 enum built_in_function fcode
)
1821 gimple stmt
= gsi_stmt (*gsi
);
1822 location_t loc
= gimple_location (stmt
);
1823 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1826 /* If SRC and DEST are the same (and not volatile), return DEST
1827 (resp. DEST+LEN for __mempcpy_chk). */
1828 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1830 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1832 replace_call_with_value (gsi
, dest
);
1837 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1838 temp
= force_gimple_operand_gsi (gsi
, temp
,
1839 false, NULL_TREE
, true,
1841 replace_call_with_value (gsi
, temp
);
1846 if (! tree_fits_uhwi_p (size
))
1849 tree maxlen
= get_maxval_strlen (len
, 2);
1850 if (! integer_all_onesp (size
))
1852 if (! tree_fits_uhwi_p (len
))
1854 /* If LEN is not constant, try MAXLEN too.
1855 For MAXLEN only allow optimizing into non-_ocs function
1856 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1857 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1859 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1861 /* (void) __mempcpy_chk () can be optimized into
1862 (void) __memcpy_chk (). */
1863 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1867 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1868 replace_call_with_call_and_fold (gsi
, repl
);
1877 if (tree_int_cst_lt (size
, maxlen
))
1882 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1883 mem{cpy,pcpy,move,set} is available. */
1886 case BUILT_IN_MEMCPY_CHK
:
1887 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1889 case BUILT_IN_MEMPCPY_CHK
:
1890 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1892 case BUILT_IN_MEMMOVE_CHK
:
1893 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1895 case BUILT_IN_MEMSET_CHK
:
1896 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1905 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1906 replace_call_with_call_and_fold (gsi
, repl
);
1910 /* Fold a call to the __st[rp]cpy_chk builtin.
1911 DEST, SRC, and SIZE are the arguments to the call.
1912 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1913 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1914 strings passed as second argument. */
1917 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1919 tree src
, tree size
,
1920 enum built_in_function fcode
)
1922 gimple stmt
= gsi_stmt (*gsi
);
1923 location_t loc
= gimple_location (stmt
);
1924 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1927 /* If SRC and DEST are the same (and not volatile), return DEST. */
1928 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1930 replace_call_with_value (gsi
, dest
);
1934 if (! tree_fits_uhwi_p (size
))
1937 tree maxlen
= get_maxval_strlen (src
, 1);
1938 if (! integer_all_onesp (size
))
1940 len
= c_strlen (src
, 1);
1941 if (! len
|| ! tree_fits_uhwi_p (len
))
1943 /* If LEN is not constant, try MAXLEN too.
1944 For MAXLEN only allow optimizing into non-_ocs function
1945 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1946 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1948 if (fcode
== BUILT_IN_STPCPY_CHK
)
1953 /* If return value of __stpcpy_chk is ignored,
1954 optimize into __strcpy_chk. */
1955 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1959 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1960 replace_call_with_call_and_fold (gsi
, repl
);
1964 if (! len
|| TREE_SIDE_EFFECTS (len
))
1967 /* If c_strlen returned something, but not a constant,
1968 transform __strcpy_chk into __memcpy_chk. */
1969 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1973 len
= fold_convert_loc (loc
, size_type_node
, len
);
1974 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1975 build_int_cst (size_type_node
, 1));
1976 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1977 true, GSI_SAME_STMT
);
1978 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1979 replace_call_with_call_and_fold (gsi
, repl
);
1986 if (! tree_int_cst_lt (maxlen
, size
))
1990 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1991 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1992 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1996 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1997 replace_call_with_call_and_fold (gsi
, repl
);
2001 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2002 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2003 length passed as third argument. IGNORE is true if return value can be
2004 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2007 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2008 tree dest
, tree src
,
2009 tree len
, tree size
,
2010 enum built_in_function fcode
)
2012 gimple stmt
= gsi_stmt (*gsi
);
2013 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2016 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2018 /* If return value of __stpncpy_chk is ignored,
2019 optimize into __strncpy_chk. */
2020 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2023 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2024 replace_call_with_call_and_fold (gsi
, repl
);
2029 if (! tree_fits_uhwi_p (size
))
2032 tree maxlen
= get_maxval_strlen (len
, 2);
2033 if (! integer_all_onesp (size
))
2035 if (! tree_fits_uhwi_p (len
))
2037 /* If LEN is not constant, try MAXLEN too.
2038 For MAXLEN only allow optimizing into non-_ocs function
2039 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2040 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2046 if (tree_int_cst_lt (size
, maxlen
))
2050 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2051 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2052 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2056 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2057 replace_call_with_call_and_fold (gsi
, repl
);
2061 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2062 Return NULL_TREE if no simplification can be made. */
2065 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2067 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2068 location_t loc
= gimple_location (stmt
);
2069 tree dest
= gimple_call_arg (stmt
, 0);
2070 tree src
= gimple_call_arg (stmt
, 1);
2071 tree fn
, len
, lenp1
;
2073 /* If the result is unused, replace stpcpy with strcpy. */
2074 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2076 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2079 gimple_call_set_fndecl (stmt
, fn
);
2084 len
= c_strlen (src
, 1);
2086 || TREE_CODE (len
) != INTEGER_CST
)
2089 if (optimize_function_for_size_p (cfun
)
2090 /* If length is zero it's small enough. */
2091 && !integer_zerop (len
))
2094 /* If the source has a known length replace stpcpy with memcpy. */
2095 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2099 gimple_seq stmts
= NULL
;
2100 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2101 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2102 tem
, build_int_cst (size_type_node
, 1));
2103 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2104 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2105 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2106 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2107 if (gimple_vdef (repl
)
2108 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2109 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2110 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2111 /* Replace the result with dest + len. */
2113 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2114 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2115 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2116 POINTER_PLUS_EXPR
, dest
, tem
);
2117 gsi_replace (gsi
, ret
, true);
2118 /* Finally fold the memcpy call. */
2119 gimple_stmt_iterator gsi2
= *gsi
;
2125 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2126 NULL_TREE if a normal call should be emitted rather than expanding
2127 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2128 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2129 passed as second argument. */
2132 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2133 enum built_in_function fcode
)
2135 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2136 tree dest
, size
, len
, fn
, fmt
, flag
;
2137 const char *fmt_str
;
2139 /* Verify the required arguments in the original call. */
2140 if (gimple_call_num_args (stmt
) < 5)
2143 dest
= gimple_call_arg (stmt
, 0);
2144 len
= gimple_call_arg (stmt
, 1);
2145 flag
= gimple_call_arg (stmt
, 2);
2146 size
= gimple_call_arg (stmt
, 3);
2147 fmt
= gimple_call_arg (stmt
, 4);
2149 if (! tree_fits_uhwi_p (size
))
2152 if (! integer_all_onesp (size
))
2154 tree maxlen
= get_maxval_strlen (len
, 2);
2155 if (! tree_fits_uhwi_p (len
))
2157 /* If LEN is not constant, try MAXLEN too.
2158 For MAXLEN only allow optimizing into non-_ocs function
2159 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2160 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2166 if (tree_int_cst_lt (size
, maxlen
))
2170 if (!init_target_chars ())
2173 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2174 or if format doesn't contain % chars or is "%s". */
2175 if (! integer_zerop (flag
))
2177 fmt_str
= c_getstr (fmt
);
2178 if (fmt_str
== NULL
)
2180 if (strchr (fmt_str
, target_percent
) != NULL
2181 && strcmp (fmt_str
, target_percent_s
))
2185 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2187 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2188 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2192 /* Replace the called function and the first 5 argument by 3 retaining
2193 trailing varargs. */
2194 gimple_call_set_fndecl (stmt
, fn
);
2195 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2196 gimple_call_set_arg (stmt
, 0, dest
);
2197 gimple_call_set_arg (stmt
, 1, len
);
2198 gimple_call_set_arg (stmt
, 2, fmt
);
2199 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2200 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2201 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2206 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2207 Return NULL_TREE if a normal call should be emitted rather than
2208 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2209 or BUILT_IN_VSPRINTF_CHK. */
2212 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2213 enum built_in_function fcode
)
2215 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2216 tree dest
, size
, len
, fn
, fmt
, flag
;
2217 const char *fmt_str
;
2218 unsigned nargs
= gimple_call_num_args (stmt
);
2220 /* Verify the required arguments in the original call. */
2223 dest
= gimple_call_arg (stmt
, 0);
2224 flag
= gimple_call_arg (stmt
, 1);
2225 size
= gimple_call_arg (stmt
, 2);
2226 fmt
= gimple_call_arg (stmt
, 3);
2228 if (! tree_fits_uhwi_p (size
))
2233 if (!init_target_chars ())
2236 /* Check whether the format is a literal string constant. */
2237 fmt_str
= c_getstr (fmt
);
2238 if (fmt_str
!= NULL
)
2240 /* If the format doesn't contain % args or %%, we know the size. */
2241 if (strchr (fmt_str
, target_percent
) == 0)
2243 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2244 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2246 /* If the format is "%s" and first ... argument is a string literal,
2247 we know the size too. */
2248 else if (fcode
== BUILT_IN_SPRINTF_CHK
2249 && strcmp (fmt_str
, target_percent_s
) == 0)
2255 arg
= gimple_call_arg (stmt
, 4);
2256 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2258 len
= c_strlen (arg
, 1);
2259 if (! len
|| ! tree_fits_uhwi_p (len
))
2266 if (! integer_all_onesp (size
))
2268 if (! len
|| ! tree_int_cst_lt (len
, size
))
2272 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2273 or if format doesn't contain % chars or is "%s". */
2274 if (! integer_zerop (flag
))
2276 if (fmt_str
== NULL
)
2278 if (strchr (fmt_str
, target_percent
) != NULL
2279 && strcmp (fmt_str
, target_percent_s
))
2283 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2284 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2285 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2289 /* Replace the called function and the first 4 argument by 2 retaining
2290 trailing varargs. */
2291 gimple_call_set_fndecl (stmt
, fn
);
2292 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2293 gimple_call_set_arg (stmt
, 0, dest
);
2294 gimple_call_set_arg (stmt
, 1, fmt
);
2295 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2296 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2297 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2302 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2303 ORIG may be null if this is a 2-argument call. We don't attempt to
2304 simplify calls with more than 3 arguments.
2306 Return NULL_TREE if no simplification was possible, otherwise return the
2307 simplified form of the call as a tree. If IGNORED is true, it means that
2308 the caller does not use the returned value of the function. */
2311 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2313 gimple stmt
= gsi_stmt (*gsi
);
2314 tree dest
= gimple_call_arg (stmt
, 0);
2315 tree fmt
= gimple_call_arg (stmt
, 1);
2316 tree orig
= NULL_TREE
;
2317 const char *fmt_str
= NULL
;
2319 /* Verify the required arguments in the original call. We deal with two
2320 types of sprintf() calls: 'sprintf (str, fmt)' and
2321 'sprintf (dest, "%s", orig)'. */
2322 if (gimple_call_num_args (stmt
) > 3)
2325 if (gimple_call_num_args (stmt
) == 3)
2326 orig
= gimple_call_arg (stmt
, 2);
2328 /* Check whether the format is a literal string constant. */
2329 fmt_str
= c_getstr (fmt
);
2330 if (fmt_str
== NULL
)
2333 if (!init_target_chars ())
2336 /* If the format doesn't contain % args or %%, use strcpy. */
2337 if (strchr (fmt_str
, target_percent
) == NULL
)
2339 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2344 /* Don't optimize sprintf (buf, "abc", ptr++). */
2348 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2349 'format' is known to contain no % formats. */
2350 gimple_seq stmts
= NULL
;
2351 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2352 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2353 if (gimple_call_lhs (stmt
))
2355 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2356 build_int_cst (integer_type_node
,
2358 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2359 gsi_replace_with_seq_vops (gsi
, stmts
);
2360 /* gsi now points at the assignment to the lhs, get a
2361 stmt iterator to the memcpy call.
2362 ??? We can't use gsi_for_stmt as that doesn't work when the
2363 CFG isn't built yet. */
2364 gimple_stmt_iterator gsi2
= *gsi
;
2370 gsi_replace_with_seq_vops (gsi
, stmts
);
2376 /* If the format is "%s", use strcpy if the result isn't used. */
2377 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2380 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2385 /* Don't crash on sprintf (str1, "%s"). */
2389 tree orig_len
= NULL_TREE
;
2390 if (gimple_call_lhs (stmt
))
2392 orig_len
= get_maxval_strlen (orig
, 0);
2397 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2398 gimple_seq stmts
= NULL
;
2399 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2400 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2401 if (gimple_call_lhs (stmt
))
2403 if (!useless_type_conversion_p (integer_type_node
,
2404 TREE_TYPE (orig_len
)))
2405 orig_len
= fold_convert (integer_type_node
, orig_len
);
2406 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2407 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2408 gsi_replace_with_seq_vops (gsi
, stmts
);
2409 /* gsi now points at the assignment to the lhs, get a
2410 stmt iterator to the memcpy call.
2411 ??? We can't use gsi_for_stmt as that doesn't work when the
2412 CFG isn't built yet. */
2413 gimple_stmt_iterator gsi2
= *gsi
;
2419 gsi_replace_with_seq_vops (gsi
, stmts
);
2427 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2428 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2429 attempt to simplify calls with more than 4 arguments.
2431 Return NULL_TREE if no simplification was possible, otherwise return the
2432 simplified form of the call as a tree. If IGNORED is true, it means that
2433 the caller does not use the returned value of the function. */
2436 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2438 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2439 tree dest
= gimple_call_arg (stmt
, 0);
2440 tree destsize
= gimple_call_arg (stmt
, 1);
2441 tree fmt
= gimple_call_arg (stmt
, 2);
2442 tree orig
= NULL_TREE
;
2443 const char *fmt_str
= NULL
;
2445 if (gimple_call_num_args (stmt
) > 4)
2448 if (gimple_call_num_args (stmt
) == 4)
2449 orig
= gimple_call_arg (stmt
, 3);
2451 if (!tree_fits_uhwi_p (destsize
))
2453 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2455 /* Check whether the format is a literal string constant. */
2456 fmt_str
= c_getstr (fmt
);
2457 if (fmt_str
== NULL
)
2460 if (!init_target_chars ())
2463 /* If the format doesn't contain % args or %%, use strcpy. */
2464 if (strchr (fmt_str
, target_percent
) == NULL
)
2466 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2470 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2474 /* We could expand this as
2475 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2477 memcpy (str, fmt_with_nul_at_cstm1, cst);
2478 but in the former case that might increase code size
2479 and in the latter case grow .rodata section too much.
2481 size_t len
= strlen (fmt_str
);
2485 gimple_seq stmts
= NULL
;
2486 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2487 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2488 if (gimple_call_lhs (stmt
))
2490 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2491 build_int_cst (integer_type_node
, len
));
2492 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2493 gsi_replace_with_seq_vops (gsi
, stmts
);
2494 /* gsi now points at the assignment to the lhs, get a
2495 stmt iterator to the memcpy call.
2496 ??? We can't use gsi_for_stmt as that doesn't work when the
2497 CFG isn't built yet. */
2498 gimple_stmt_iterator gsi2
= *gsi
;
2504 gsi_replace_with_seq_vops (gsi
, stmts
);
2510 /* If the format is "%s", use strcpy if the result isn't used. */
2511 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2513 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2517 /* Don't crash on snprintf (str1, cst, "%s"). */
2521 tree orig_len
= get_maxval_strlen (orig
, 0);
2522 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2525 /* We could expand this as
2526 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2528 memcpy (str1, str2_with_nul_at_cstm1, cst);
2529 but in the former case that might increase code size
2530 and in the latter case grow .rodata section too much.
2532 if (compare_tree_int (orig_len
, destlen
) >= 0)
2535 /* Convert snprintf (str1, cst, "%s", str2) into
2536 strcpy (str1, str2) if strlen (str2) < cst. */
2537 gimple_seq stmts
= NULL
;
2538 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2539 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2540 if (gimple_call_lhs (stmt
))
2542 if (!useless_type_conversion_p (integer_type_node
,
2543 TREE_TYPE (orig_len
)))
2544 orig_len
= fold_convert (integer_type_node
, orig_len
);
2545 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2546 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2547 gsi_replace_with_seq_vops (gsi
, stmts
);
2548 /* gsi now points at the assignment to the lhs, get a
2549 stmt iterator to the memcpy call.
2550 ??? We can't use gsi_for_stmt as that doesn't work when the
2551 CFG isn't built yet. */
2552 gimple_stmt_iterator gsi2
= *gsi
;
2558 gsi_replace_with_seq_vops (gsi
, stmts
);
2566 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2567 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2568 more than 3 arguments, and ARG may be null in the 2-argument case.
2570 Return NULL_TREE if no simplification was possible, otherwise return the
2571 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2572 code of the function to be simplified. */
2575 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2576 tree fp
, tree fmt
, tree arg
,
2577 enum built_in_function fcode
)
2579 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2580 tree fn_fputc
, fn_fputs
;
2581 const char *fmt_str
= NULL
;
2583 /* If the return value is used, don't do the transformation. */
2584 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2587 /* Check whether the format is a literal string constant. */
2588 fmt_str
= c_getstr (fmt
);
2589 if (fmt_str
== NULL
)
2592 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2594 /* If we're using an unlocked function, assume the other
2595 unlocked functions exist explicitly. */
2596 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2597 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2601 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2602 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2605 if (!init_target_chars ())
2608 /* If the format doesn't contain % args or %%, use strcpy. */
2609 if (strchr (fmt_str
, target_percent
) == NULL
)
2611 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2615 /* If the format specifier was "", fprintf does nothing. */
2616 if (fmt_str
[0] == '\0')
2618 replace_call_with_value (gsi
, NULL_TREE
);
2622 /* When "string" doesn't contain %, replace all cases of
2623 fprintf (fp, string) with fputs (string, fp). The fputs
2624 builtin will take care of special cases like length == 1. */
2627 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2628 replace_call_with_call_and_fold (gsi
, repl
);
2633 /* The other optimizations can be done only on the non-va_list variants. */
2634 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2637 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2638 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2640 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2644 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2645 replace_call_with_call_and_fold (gsi
, repl
);
2650 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2651 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2654 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2658 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2659 replace_call_with_call_and_fold (gsi
, repl
);
2667 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2668 FMT and ARG are the arguments to the call; we don't fold cases with
2669 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2671 Return NULL_TREE if no simplification was possible, otherwise return the
2672 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2673 code of the function to be simplified. */
2676 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2677 tree arg
, enum built_in_function fcode
)
2679 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2680 tree fn_putchar
, fn_puts
, newarg
;
2681 const char *fmt_str
= NULL
;
2683 /* If the return value is used, don't do the transformation. */
2684 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2687 /* Check whether the format is a literal string constant. */
2688 fmt_str
= c_getstr (fmt
);
2689 if (fmt_str
== NULL
)
2692 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2694 /* If we're using an unlocked function, assume the other
2695 unlocked functions exist explicitly. */
2696 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2697 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2701 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2702 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2705 if (!init_target_chars ())
2708 if (strcmp (fmt_str
, target_percent_s
) == 0
2709 || strchr (fmt_str
, target_percent
) == NULL
)
2713 if (strcmp (fmt_str
, target_percent_s
) == 0)
2715 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2718 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2721 str
= c_getstr (arg
);
2727 /* The format specifier doesn't contain any '%' characters. */
2728 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2734 /* If the string was "", printf does nothing. */
2737 replace_call_with_value (gsi
, NULL_TREE
);
2741 /* If the string has length of 1, call putchar. */
2744 /* Given printf("c"), (where c is any one character,)
2745 convert "c"[0] to an int and pass that to the replacement
2747 newarg
= build_int_cst (integer_type_node
, str
[0]);
2750 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2751 replace_call_with_call_and_fold (gsi
, repl
);
2757 /* If the string was "string\n", call puts("string"). */
2758 size_t len
= strlen (str
);
2759 if ((unsigned char)str
[len
- 1] == target_newline
2760 && (size_t) (int) len
== len
2764 tree offset_node
, string_cst
;
2766 /* Create a NUL-terminated string that's one char shorter
2767 than the original, stripping off the trailing '\n'. */
2768 newarg
= build_string_literal (len
, str
);
2769 string_cst
= string_constant (newarg
, &offset_node
);
2770 gcc_checking_assert (string_cst
2771 && (TREE_STRING_LENGTH (string_cst
)
2773 && integer_zerop (offset_node
)
2775 TREE_STRING_POINTER (string_cst
)[len
- 1]
2777 /* build_string_literal creates a new STRING_CST,
2778 modify it in place to avoid double copying. */
2779 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2780 newstr
[len
- 1] = '\0';
2783 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2784 replace_call_with_call_and_fold (gsi
, repl
);
2789 /* We'd like to arrange to call fputs(string,stdout) here,
2790 but we need stdout and don't have a way to get it yet. */
2795 /* The other optimizations can be done only on the non-va_list variants. */
2796 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2799 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2800 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2802 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2806 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2807 replace_call_with_call_and_fold (gsi
, repl
);
2812 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2813 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2815 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2820 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2821 replace_call_with_call_and_fold (gsi
, repl
);
2831 /* Fold a call to __builtin_strlen with known length LEN. */
2834 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2836 gimple stmt
= gsi_stmt (*gsi
);
2837 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2840 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2841 replace_call_with_value (gsi
, len
);
2846 /* Fold the non-target builtin at *GSI and return whether any simplification
2850 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2852 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2853 tree callee
= gimple_call_fndecl (stmt
);
2855 /* Give up for always_inline inline builtins until they are
2857 if (avoid_folding_inline_builtin (callee
))
2860 unsigned n
= gimple_call_num_args (stmt
);
2861 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2864 case BUILT_IN_BZERO
:
2865 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2866 gimple_call_arg (stmt
, 1));
2867 case BUILT_IN_MEMSET
:
2868 return gimple_fold_builtin_memset (gsi
,
2869 gimple_call_arg (stmt
, 1),
2870 gimple_call_arg (stmt
, 2));
2871 case BUILT_IN_BCOPY
:
2872 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2873 gimple_call_arg (stmt
, 0), 3);
2874 case BUILT_IN_MEMCPY
:
2875 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2876 gimple_call_arg (stmt
, 1), 0);
2877 case BUILT_IN_MEMPCPY
:
2878 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2879 gimple_call_arg (stmt
, 1), 1);
2880 case BUILT_IN_MEMMOVE
:
2881 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2882 gimple_call_arg (stmt
, 1), 3);
2883 case BUILT_IN_SPRINTF_CHK
:
2884 case BUILT_IN_VSPRINTF_CHK
:
2885 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2886 case BUILT_IN_STRCAT_CHK
:
2887 return gimple_fold_builtin_strcat_chk (gsi
);
2888 case BUILT_IN_STRNCAT_CHK
:
2889 return gimple_fold_builtin_strncat_chk (gsi
);
2890 case BUILT_IN_STRLEN
:
2891 return gimple_fold_builtin_strlen (gsi
);
2892 case BUILT_IN_STRCPY
:
2893 return gimple_fold_builtin_strcpy (gsi
,
2894 gimple_call_arg (stmt
, 0),
2895 gimple_call_arg (stmt
, 1));
2896 case BUILT_IN_STRNCPY
:
2897 return gimple_fold_builtin_strncpy (gsi
,
2898 gimple_call_arg (stmt
, 0),
2899 gimple_call_arg (stmt
, 1),
2900 gimple_call_arg (stmt
, 2));
2901 case BUILT_IN_STRCAT
:
2902 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2903 gimple_call_arg (stmt
, 1));
2904 case BUILT_IN_STRNCAT
:
2905 return gimple_fold_builtin_strncat (gsi
);
2906 case BUILT_IN_FPUTS
:
2907 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2908 gimple_call_arg (stmt
, 1), false);
2909 case BUILT_IN_FPUTS_UNLOCKED
:
2910 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2911 gimple_call_arg (stmt
, 1), true);
2912 case BUILT_IN_MEMCPY_CHK
:
2913 case BUILT_IN_MEMPCPY_CHK
:
2914 case BUILT_IN_MEMMOVE_CHK
:
2915 case BUILT_IN_MEMSET_CHK
:
2916 return gimple_fold_builtin_memory_chk (gsi
,
2917 gimple_call_arg (stmt
, 0),
2918 gimple_call_arg (stmt
, 1),
2919 gimple_call_arg (stmt
, 2),
2920 gimple_call_arg (stmt
, 3),
2922 case BUILT_IN_STPCPY
:
2923 return gimple_fold_builtin_stpcpy (gsi
);
2924 case BUILT_IN_STRCPY_CHK
:
2925 case BUILT_IN_STPCPY_CHK
:
2926 return gimple_fold_builtin_stxcpy_chk (gsi
,
2927 gimple_call_arg (stmt
, 0),
2928 gimple_call_arg (stmt
, 1),
2929 gimple_call_arg (stmt
, 2),
2931 case BUILT_IN_STRNCPY_CHK
:
2932 case BUILT_IN_STPNCPY_CHK
:
2933 return gimple_fold_builtin_stxncpy_chk (gsi
,
2934 gimple_call_arg (stmt
, 0),
2935 gimple_call_arg (stmt
, 1),
2936 gimple_call_arg (stmt
, 2),
2937 gimple_call_arg (stmt
, 3),
2939 case BUILT_IN_SNPRINTF_CHK
:
2940 case BUILT_IN_VSNPRINTF_CHK
:
2941 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2942 case BUILT_IN_SNPRINTF
:
2943 return gimple_fold_builtin_snprintf (gsi
);
2944 case BUILT_IN_SPRINTF
:
2945 return gimple_fold_builtin_sprintf (gsi
);
2946 case BUILT_IN_FPRINTF
:
2947 case BUILT_IN_FPRINTF_UNLOCKED
:
2948 case BUILT_IN_VFPRINTF
:
2949 if (n
== 2 || n
== 3)
2950 return gimple_fold_builtin_fprintf (gsi
,
2951 gimple_call_arg (stmt
, 0),
2952 gimple_call_arg (stmt
, 1),
2954 ? gimple_call_arg (stmt
, 2)
2958 case BUILT_IN_FPRINTF_CHK
:
2959 case BUILT_IN_VFPRINTF_CHK
:
2960 if (n
== 3 || n
== 4)
2961 return gimple_fold_builtin_fprintf (gsi
,
2962 gimple_call_arg (stmt
, 0),
2963 gimple_call_arg (stmt
, 2),
2965 ? gimple_call_arg (stmt
, 3)
2969 case BUILT_IN_PRINTF
:
2970 case BUILT_IN_PRINTF_UNLOCKED
:
2971 case BUILT_IN_VPRINTF
:
2972 if (n
== 1 || n
== 2)
2973 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2975 ? gimple_call_arg (stmt
, 1)
2976 : NULL_TREE
, fcode
);
2978 case BUILT_IN_PRINTF_CHK
:
2979 case BUILT_IN_VPRINTF_CHK
:
2980 if (n
== 2 || n
== 3)
2981 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2983 ? gimple_call_arg (stmt
, 2)
2984 : NULL_TREE
, fcode
);
2988 /* Try the generic builtin folder. */
2989 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2990 tree result
= fold_call_stmt (stmt
, ignore
);
2994 STRIP_NOPS (result
);
2996 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2997 if (!update_call_from_tree (gsi
, result
))
2998 gimplify_and_update_call_from_tree (gsi
, result
);
3005 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3006 doesn't fit into TYPE. The test for overflow should be regardless of
3007 -fwrapv, and even for unsigned types. */
3010 arith_overflowed_p (enum tree_code code
, const_tree type
,
3011 const_tree arg0
, const_tree arg1
)
3013 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3014 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3016 widest2_int warg0
= widest2_int_cst (arg0
);
3017 widest2_int warg1
= widest2_int_cst (arg1
);
3021 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3022 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3023 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3024 default: gcc_unreachable ();
3026 signop sign
= TYPE_SIGN (type
);
3027 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3029 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3032 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3033 The statement may be replaced by another statement, e.g., if the call
3034 simplifies to a constant value. Return true if any changes were made.
3035 It is assumed that the operands have been previously folded. */
3038 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3040 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3042 bool changed
= false;
3045 /* Fold *& in call arguments. */
3046 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3047 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3049 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3052 gimple_call_set_arg (stmt
, i
, tmp
);
3057 /* Check for virtual calls that became direct calls. */
3058 callee
= gimple_call_fn (stmt
);
3059 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3061 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3063 if (dump_file
&& virtual_method_call_p (callee
)
3064 && !possible_polymorphic_call_target_p
3065 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3066 (OBJ_TYPE_REF_EXPR (callee
)))))
3069 "Type inheritance inconsistent devirtualization of ");
3070 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3071 fprintf (dump_file
, " to ");
3072 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3073 fprintf (dump_file
, "\n");
3076 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3079 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3082 vec
<cgraph_node
*>targets
3083 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3084 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3086 tree lhs
= gimple_call_lhs (stmt
);
3087 if (dump_enabled_p ())
3089 location_t loc
= gimple_location_safe (stmt
);
3090 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3091 "folding virtual function call to %s\n",
3092 targets
.length () == 1
3093 ? targets
[0]->name ()
3094 : "__builtin_unreachable");
3096 if (targets
.length () == 1)
3098 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3100 /* If the call becomes noreturn, remove the lhs. */
3101 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3103 if (TREE_CODE (lhs
) == SSA_NAME
)
3105 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3106 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3107 gimple new_stmt
= gimple_build_assign (lhs
, def
);
3108 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3110 gimple_call_set_lhs (stmt
, NULL_TREE
);
3112 maybe_remove_unused_call_args (cfun
, stmt
);
3116 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3117 gimple new_stmt
= gimple_build_call (fndecl
, 0);
3118 gimple_set_location (new_stmt
, gimple_location (stmt
));
3119 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3121 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3122 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3124 /* To satisfy condition for
3125 cgraph_update_edges_for_call_stmt_node,
3126 we need to preserve GIMPLE_CALL statement
3127 at position of GSI iterator. */
3128 update_call_from_tree (gsi
, def
);
3129 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3133 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3134 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3135 gsi_replace (gsi
, new_stmt
, false);
3143 /* Check for indirect calls that became direct calls, and then
3144 no longer require a static chain. */
3145 if (gimple_call_chain (stmt
))
3147 tree fn
= gimple_call_fndecl (stmt
);
3148 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3150 gimple_call_set_chain (stmt
, NULL
);
3155 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3158 gimple_call_set_chain (stmt
, tmp
);
3167 /* Check for builtins that CCP can handle using information not
3168 available in the generic fold routines. */
3169 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3171 if (gimple_fold_builtin (gsi
))
3174 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3176 changed
|= targetm
.gimple_fold_builtin (gsi
);
3178 else if (gimple_call_internal_p (stmt
))
3180 enum tree_code subcode
= ERROR_MARK
;
3181 tree result
= NULL_TREE
;
3182 bool cplx_result
= false;
3183 tree overflow
= NULL_TREE
;
3184 switch (gimple_call_internal_fn (stmt
))
3186 case IFN_BUILTIN_EXPECT
:
3187 result
= fold_builtin_expect (gimple_location (stmt
),
3188 gimple_call_arg (stmt
, 0),
3189 gimple_call_arg (stmt
, 1),
3190 gimple_call_arg (stmt
, 2));
3192 case IFN_UBSAN_OBJECT_SIZE
:
3193 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3194 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3195 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3196 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3197 gimple_call_arg (stmt
, 2))))
3199 gsi_replace (gsi
, gimple_build_nop (), true);
3200 unlink_stmt_vdef (stmt
);
3201 release_defs (stmt
);
3205 case IFN_UBSAN_CHECK_ADD
:
3206 subcode
= PLUS_EXPR
;
3208 case IFN_UBSAN_CHECK_SUB
:
3209 subcode
= MINUS_EXPR
;
3211 case IFN_UBSAN_CHECK_MUL
:
3212 subcode
= MULT_EXPR
;
3214 case IFN_ADD_OVERFLOW
:
3215 subcode
= PLUS_EXPR
;
3218 case IFN_SUB_OVERFLOW
:
3219 subcode
= MINUS_EXPR
;
3222 case IFN_MUL_OVERFLOW
:
3223 subcode
= MULT_EXPR
;
3229 if (subcode
!= ERROR_MARK
)
3231 tree arg0
= gimple_call_arg (stmt
, 0);
3232 tree arg1
= gimple_call_arg (stmt
, 1);
3233 tree type
= TREE_TYPE (arg0
);
3236 tree lhs
= gimple_call_lhs (stmt
);
3237 if (lhs
== NULL_TREE
)
3240 type
= TREE_TYPE (TREE_TYPE (lhs
));
3242 if (type
== NULL_TREE
)
3244 /* x = y + 0; x = y - 0; x = y * 0; */
3245 else if (integer_zerop (arg1
))
3246 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3247 /* x = 0 + y; x = 0 * y; */
3248 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3249 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3251 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3252 result
= integer_zero_node
;
3253 /* x = y * 1; x = 1 * y; */
3254 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3256 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3258 else if (TREE_CODE (arg0
) == INTEGER_CST
3259 && TREE_CODE (arg1
) == INTEGER_CST
)
3262 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3263 fold_convert (type
, arg1
));
3265 result
= int_const_binop (subcode
, arg0
, arg1
);
3266 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3269 overflow
= build_one_cst (type
);
3276 if (result
== integer_zero_node
)
3277 result
= build_zero_cst (type
);
3278 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3280 if (TREE_CODE (result
) == INTEGER_CST
)
3282 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3284 overflow
= build_one_cst (type
);
3286 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3287 && TYPE_UNSIGNED (type
))
3288 || (TYPE_PRECISION (type
)
3289 < (TYPE_PRECISION (TREE_TYPE (result
))
3290 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3291 && !TYPE_UNSIGNED (type
)))))
3294 result
= fold_convert (type
, result
);
3301 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3302 result
= drop_tree_overflow (result
);
3305 if (overflow
== NULL_TREE
)
3306 overflow
= build_zero_cst (TREE_TYPE (result
));
3307 tree ctype
= build_complex_type (TREE_TYPE (result
));
3308 if (TREE_CODE (result
) == INTEGER_CST
3309 && TREE_CODE (overflow
) == INTEGER_CST
)
3310 result
= build_complex (ctype
, result
, overflow
);
3312 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3313 ctype
, result
, overflow
);
3315 if (!update_call_from_tree (gsi
, result
))
3316 gimplify_and_update_call_from_tree (gsi
, result
);
3325 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3328 Replaces *GSI with the simplification result in RCODE and OPS
3329 and the associated statements in *SEQ. Does the replacement
3330 according to INPLACE and returns true if the operation succeeded. */
3333 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3334 code_helper rcode
, tree
*ops
,
3335 gimple_seq
*seq
, bool inplace
)
3337 gimple stmt
= gsi_stmt (*gsi
);
3339 /* Play safe and do not allow abnormals to be mentioned in
3340 newly created statements. See also maybe_push_res_to_seq. */
3341 if ((TREE_CODE (ops
[0]) == SSA_NAME
3342 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
3344 && TREE_CODE (ops
[1]) == SSA_NAME
3345 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
3347 && TREE_CODE (ops
[2]) == SSA_NAME
3348 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
3351 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3353 gcc_assert (rcode
.is_tree_code ());
3354 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3355 /* GIMPLE_CONDs condition may not throw. */
3356 && (!flag_exceptions
3357 || !cfun
->can_throw_non_call_exceptions
3358 || !operation_could_trap_p (rcode
,
3359 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3361 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3362 else if (rcode
== SSA_NAME
)
3363 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3364 build_zero_cst (TREE_TYPE (ops
[0])));
3365 else if (rcode
== INTEGER_CST
)
3367 if (integer_zerop (ops
[0]))
3368 gimple_cond_make_false (cond_stmt
);
3370 gimple_cond_make_true (cond_stmt
);
3374 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3378 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3379 build_zero_cst (TREE_TYPE (res
)));
3383 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3385 fprintf (dump_file
, "gimple_simplified to ");
3386 if (!gimple_seq_empty_p (*seq
))
3387 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3388 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3391 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3394 else if (is_gimple_assign (stmt
)
3395 && rcode
.is_tree_code ())
3398 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3400 maybe_build_generic_op (rcode
,
3401 TREE_TYPE (gimple_assign_lhs (stmt
)),
3402 &ops
[0], ops
[1], ops
[2]);
3403 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3404 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3406 fprintf (dump_file
, "gimple_simplified to ");
3407 if (!gimple_seq_empty_p (*seq
))
3408 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3409 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3412 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3418 if (gimple_has_lhs (stmt
))
3420 tree lhs
= gimple_get_lhs (stmt
);
3421 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3424 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3426 fprintf (dump_file
, "gimple_simplified to ");
3427 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3429 gsi_replace_with_seq_vops (gsi
, *seq
);
3439 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3442 maybe_canonicalize_mem_ref_addr (tree
*t
)
3446 if (TREE_CODE (*t
) == ADDR_EXPR
)
3447 t
= &TREE_OPERAND (*t
, 0);
3449 while (handled_component_p (*t
))
3450 t
= &TREE_OPERAND (*t
, 0);
3452 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3453 of invariant addresses into a SSA name MEM_REF address. */
3454 if (TREE_CODE (*t
) == MEM_REF
3455 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3457 tree addr
= TREE_OPERAND (*t
, 0);
3458 if (TREE_CODE (addr
) == ADDR_EXPR
3459 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3460 || handled_component_p (TREE_OPERAND (addr
, 0))))
3463 HOST_WIDE_INT coffset
;
3464 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3469 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3470 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3471 TREE_OPERAND (*t
, 1),
3472 size_int (coffset
));
3475 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3476 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3479 /* Canonicalize back MEM_REFs to plain reference trees if the object
3480 accessed is a decl that has the same access semantics as the MEM_REF. */
3481 if (TREE_CODE (*t
) == MEM_REF
3482 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3483 && integer_zerop (TREE_OPERAND (*t
, 1))
3484 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3486 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3487 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3488 if (/* Same volatile qualification. */
3489 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3490 /* Same TBAA behavior with -fstrict-aliasing. */
3491 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3492 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3493 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3494 /* Same alignment. */
3495 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3496 /* We have to look out here to not drop a required conversion
3497 from the rhs to the lhs if *t appears on the lhs or vice-versa
3498 if it appears on the rhs. Thus require strict type
3500 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3502 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3507 /* Canonicalize TARGET_MEM_REF in particular with respect to
3508 the indexes becoming constant. */
3509 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3511 tree tem
= maybe_fold_tmr (*t
);
3522 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3523 distinguishes both cases. */
3526 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3528 bool changed
= false;
3529 gimple stmt
= gsi_stmt (*gsi
);
3532 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3534 ??? This shouldn't be done in generic folding but in the
3535 propagation helpers which also know whether an address was
3537 switch (gimple_code (stmt
))
3540 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3542 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3543 if ((REFERENCE_CLASS_P (*rhs
)
3544 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3545 && maybe_canonicalize_mem_ref_addr (rhs
))
3547 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3548 if (REFERENCE_CLASS_P (*lhs
)
3549 && maybe_canonicalize_mem_ref_addr (lhs
))
3555 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3557 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3558 if (REFERENCE_CLASS_P (*arg
)
3559 && maybe_canonicalize_mem_ref_addr (arg
))
3562 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3564 && REFERENCE_CLASS_P (*lhs
)
3565 && maybe_canonicalize_mem_ref_addr (lhs
))
3571 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3572 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3574 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3575 tree op
= TREE_VALUE (link
);
3576 if (REFERENCE_CLASS_P (op
)
3577 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3580 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3582 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3583 tree op
= TREE_VALUE (link
);
3584 if ((REFERENCE_CLASS_P (op
)
3585 || TREE_CODE (op
) == ADDR_EXPR
)
3586 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3592 if (gimple_debug_bind_p (stmt
))
3594 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3596 && (REFERENCE_CLASS_P (*val
)
3597 || TREE_CODE (*val
) == ADDR_EXPR
)
3598 && maybe_canonicalize_mem_ref_addr (val
))
3605 /* Dispatch to pattern-based folding. */
3607 || is_gimple_assign (stmt
)
3608 || gimple_code (stmt
) == GIMPLE_COND
)
3610 gimple_seq seq
= NULL
;
3613 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3614 valueize
, valueize
))
3616 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3619 gimple_seq_discard (seq
);
3623 stmt
= gsi_stmt (*gsi
);
3625 /* Fold the main computation performed by the statement. */
3626 switch (gimple_code (stmt
))
3630 unsigned old_num_ops
= gimple_num_ops (stmt
);
3631 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3632 tree lhs
= gimple_assign_lhs (stmt
);
3634 /* First canonicalize operand order. This avoids building new
3635 trees if this is the only thing fold would later do. */
3636 if ((commutative_tree_code (subcode
)
3637 || commutative_ternary_tree_code (subcode
))
3638 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3639 gimple_assign_rhs2 (stmt
), false))
3641 tree tem
= gimple_assign_rhs1 (stmt
);
3642 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3643 gimple_assign_set_rhs2 (stmt
, tem
);
3646 new_rhs
= fold_gimple_assign (gsi
);
3648 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3649 TREE_TYPE (new_rhs
)))
3650 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3653 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3655 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3662 changed
|= fold_gimple_cond (as_a
<gcond
*> (stmt
));
3666 changed
|= gimple_fold_call (gsi
, inplace
);
3670 /* Fold *& in asm operands. */
3672 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3674 const char **oconstraints
;
3675 const char *constraint
;
3676 bool allows_mem
, allows_reg
;
3678 noutputs
= gimple_asm_noutputs (asm_stmt
);
3679 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3681 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3683 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3684 tree op
= TREE_VALUE (link
);
3686 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3687 if (REFERENCE_CLASS_P (op
)
3688 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3690 TREE_VALUE (link
) = op
;
3694 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3696 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3697 tree op
= TREE_VALUE (link
);
3699 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3700 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3701 oconstraints
, &allows_mem
, &allows_reg
);
3702 if (REFERENCE_CLASS_P (op
)
3703 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3706 TREE_VALUE (link
) = op
;
3714 if (gimple_debug_bind_p (stmt
))
3716 tree val
= gimple_debug_bind_get_value (stmt
);
3718 && REFERENCE_CLASS_P (val
))
3720 tree tem
= maybe_fold_reference (val
, false);
3723 gimple_debug_bind_set_value (stmt
, tem
);
3728 && TREE_CODE (val
) == ADDR_EXPR
)
3730 tree ref
= TREE_OPERAND (val
, 0);
3731 tree tem
= maybe_fold_reference (ref
, false);
3734 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3735 gimple_debug_bind_set_value (stmt
, tem
);
3745 stmt
= gsi_stmt (*gsi
);
3747 /* Fold *& on the lhs. */
3748 if (gimple_has_lhs (stmt
))
3750 tree lhs
= gimple_get_lhs (stmt
);
3751 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3753 tree new_lhs
= maybe_fold_reference (lhs
, true);
3756 gimple_set_lhs (stmt
, new_lhs
);
3765 /* Valueziation callback that ends up not following SSA edges. */
3768 no_follow_ssa_edges (tree
)
3773 /* Valueization callback that ends up following single-use SSA edges only. */
3776 follow_single_use_edges (tree val
)
3778 if (TREE_CODE (val
) == SSA_NAME
3779 && !has_single_use (val
))
3784 /* Fold the statement pointed to by GSI. In some cases, this function may
3785 replace the whole statement with a new one. Returns true iff folding
3787 The statement pointed to by GSI should be in valid gimple form but may
3788 be in unfolded state as resulting from for example constant propagation
3789 which can produce *&x = 0. */
3792 fold_stmt (gimple_stmt_iterator
*gsi
)
3794 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3798 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3800 return fold_stmt_1 (gsi
, false, valueize
);
3803 /* Perform the minimal folding on statement *GSI. Only operations like
3804 *&x created by constant propagation are handled. The statement cannot
3805 be replaced with a new one. Return true if the statement was
3806 changed, false otherwise.
3807 The statement *GSI should be in valid gimple form but may
3808 be in unfolded state as resulting from for example constant propagation
3809 which can produce *&x = 0. */
3812 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3814 gimple stmt
= gsi_stmt (*gsi
);
3815 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3816 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3820 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3821 if EXPR is null or we don't know how.
3822 If non-null, the result always has boolean type. */
3825 canonicalize_bool (tree expr
, bool invert
)
3831 if (integer_nonzerop (expr
))
3832 return boolean_false_node
;
3833 else if (integer_zerop (expr
))
3834 return boolean_true_node
;
3835 else if (TREE_CODE (expr
) == SSA_NAME
)
3836 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3837 build_int_cst (TREE_TYPE (expr
), 0));
3838 else if (COMPARISON_CLASS_P (expr
))
3839 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3841 TREE_OPERAND (expr
, 0),
3842 TREE_OPERAND (expr
, 1));
3848 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3850 if (integer_nonzerop (expr
))
3851 return boolean_true_node
;
3852 else if (integer_zerop (expr
))
3853 return boolean_false_node
;
3854 else if (TREE_CODE (expr
) == SSA_NAME
)
3855 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3856 build_int_cst (TREE_TYPE (expr
), 0));
3857 else if (COMPARISON_CLASS_P (expr
))
3858 return fold_build2 (TREE_CODE (expr
),
3860 TREE_OPERAND (expr
, 0),
3861 TREE_OPERAND (expr
, 1));
3867 /* Check to see if a boolean expression EXPR is logically equivalent to the
3868 comparison (OP1 CODE OP2). Check for various identities involving
3872 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3873 const_tree op1
, const_tree op2
)
3877 /* The obvious case. */
3878 if (TREE_CODE (expr
) == code
3879 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3880 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3883 /* Check for comparing (name, name != 0) and the case where expr
3884 is an SSA_NAME with a definition matching the comparison. */
3885 if (TREE_CODE (expr
) == SSA_NAME
3886 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3888 if (operand_equal_p (expr
, op1
, 0))
3889 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3890 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3891 s
= SSA_NAME_DEF_STMT (expr
);
3892 if (is_gimple_assign (s
)
3893 && gimple_assign_rhs_code (s
) == code
3894 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3895 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3899 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3900 of name is a comparison, recurse. */
3901 if (TREE_CODE (op1
) == SSA_NAME
3902 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3904 s
= SSA_NAME_DEF_STMT (op1
);
3905 if (is_gimple_assign (s
)
3906 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3908 enum tree_code c
= gimple_assign_rhs_code (s
);
3909 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3910 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3911 return same_bool_comparison_p (expr
, c
,
3912 gimple_assign_rhs1 (s
),
3913 gimple_assign_rhs2 (s
));
3914 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3915 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3916 return same_bool_comparison_p (expr
,
3917 invert_tree_comparison (c
, false),
3918 gimple_assign_rhs1 (s
),
3919 gimple_assign_rhs2 (s
));
3925 /* Check to see if two boolean expressions OP1 and OP2 are logically
3929 same_bool_result_p (const_tree op1
, const_tree op2
)
3931 /* Simple cases first. */
3932 if (operand_equal_p (op1
, op2
, 0))
3935 /* Check the cases where at least one of the operands is a comparison.
3936 These are a bit smarter than operand_equal_p in that they apply some
3937 identifies on SSA_NAMEs. */
3938 if (COMPARISON_CLASS_P (op2
)
3939 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3940 TREE_OPERAND (op2
, 0),
3941 TREE_OPERAND (op2
, 1)))
3943 if (COMPARISON_CLASS_P (op1
)
3944 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3945 TREE_OPERAND (op1
, 0),
3946 TREE_OPERAND (op1
, 1)))
3953 /* Forward declarations for some mutually recursive functions. */
3956 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3957 enum tree_code code2
, tree op2a
, tree op2b
);
3959 and_var_with_comparison (tree var
, bool invert
,
3960 enum tree_code code2
, tree op2a
, tree op2b
);
3962 and_var_with_comparison_1 (gimple stmt
,
3963 enum tree_code code2
, tree op2a
, tree op2b
);
3965 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3966 enum tree_code code2
, tree op2a
, tree op2b
);
3968 or_var_with_comparison (tree var
, bool invert
,
3969 enum tree_code code2
, tree op2a
, tree op2b
);
3971 or_var_with_comparison_1 (gimple stmt
,
3972 enum tree_code code2
, tree op2a
, tree op2b
);
3974 /* Helper function for and_comparisons_1: try to simplify the AND of the
3975 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3976 If INVERT is true, invert the value of the VAR before doing the AND.
3977 Return NULL_EXPR if we can't simplify this to a single expression. */
3980 and_var_with_comparison (tree var
, bool invert
,
3981 enum tree_code code2
, tree op2a
, tree op2b
)
3984 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3986 /* We can only deal with variables whose definitions are assignments. */
3987 if (!is_gimple_assign (stmt
))
3990 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3991 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3992 Then we only have to consider the simpler non-inverted cases. */
3994 t
= or_var_with_comparison_1 (stmt
,
3995 invert_tree_comparison (code2
, false),
3998 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3999 return canonicalize_bool (t
, invert
);
4002 /* Try to simplify the AND of the ssa variable defined by the assignment
4003 STMT with the comparison specified by (OP2A CODE2 OP2B).
4004 Return NULL_EXPR if we can't simplify this to a single expression. */
4007 and_var_with_comparison_1 (gimple stmt
,
4008 enum tree_code code2
, tree op2a
, tree op2b
)
4010 tree var
= gimple_assign_lhs (stmt
);
4011 tree true_test_var
= NULL_TREE
;
4012 tree false_test_var
= NULL_TREE
;
4013 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4015 /* Check for identities like (var AND (var == 0)) => false. */
4016 if (TREE_CODE (op2a
) == SSA_NAME
4017 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4019 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4020 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4022 true_test_var
= op2a
;
4023 if (var
== true_test_var
)
4026 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4027 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4029 false_test_var
= op2a
;
4030 if (var
== false_test_var
)
4031 return boolean_false_node
;
4035 /* If the definition is a comparison, recurse on it. */
4036 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4038 tree t
= and_comparisons_1 (innercode
,
4039 gimple_assign_rhs1 (stmt
),
4040 gimple_assign_rhs2 (stmt
),
4048 /* If the definition is an AND or OR expression, we may be able to
4049 simplify by reassociating. */
4050 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4051 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4053 tree inner1
= gimple_assign_rhs1 (stmt
);
4054 tree inner2
= gimple_assign_rhs2 (stmt
);
4057 tree partial
= NULL_TREE
;
4058 bool is_and
= (innercode
== BIT_AND_EXPR
);
4060 /* Check for boolean identities that don't require recursive examination
4062 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4063 inner1 AND (inner1 OR inner2) => inner1
4064 !inner1 AND (inner1 AND inner2) => false
4065 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4066 Likewise for similar cases involving inner2. */
4067 if (inner1
== true_test_var
)
4068 return (is_and
? var
: inner1
);
4069 else if (inner2
== true_test_var
)
4070 return (is_and
? var
: inner2
);
4071 else if (inner1
== false_test_var
)
4073 ? boolean_false_node
4074 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4075 else if (inner2
== false_test_var
)
4077 ? boolean_false_node
4078 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4080 /* Next, redistribute/reassociate the AND across the inner tests.
4081 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4082 if (TREE_CODE (inner1
) == SSA_NAME
4083 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4084 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4085 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4086 gimple_assign_rhs1 (s
),
4087 gimple_assign_rhs2 (s
),
4088 code2
, op2a
, op2b
)))
4090 /* Handle the AND case, where we are reassociating:
4091 (inner1 AND inner2) AND (op2a code2 op2b)
4093 If the partial result t is a constant, we win. Otherwise
4094 continue on to try reassociating with the other inner test. */
4097 if (integer_onep (t
))
4099 else if (integer_zerop (t
))
4100 return boolean_false_node
;
4103 /* Handle the OR case, where we are redistributing:
4104 (inner1 OR inner2) AND (op2a code2 op2b)
4105 => (t OR (inner2 AND (op2a code2 op2b))) */
4106 else if (integer_onep (t
))
4107 return boolean_true_node
;
4109 /* Save partial result for later. */
4113 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4114 if (TREE_CODE (inner2
) == SSA_NAME
4115 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4116 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4117 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4118 gimple_assign_rhs1 (s
),
4119 gimple_assign_rhs2 (s
),
4120 code2
, op2a
, op2b
)))
4122 /* Handle the AND case, where we are reassociating:
4123 (inner1 AND inner2) AND (op2a code2 op2b)
4124 => (inner1 AND t) */
4127 if (integer_onep (t
))
4129 else if (integer_zerop (t
))
4130 return boolean_false_node
;
4131 /* If both are the same, we can apply the identity
4133 else if (partial
&& same_bool_result_p (t
, partial
))
4137 /* Handle the OR case. where we are redistributing:
4138 (inner1 OR inner2) AND (op2a code2 op2b)
4139 => (t OR (inner1 AND (op2a code2 op2b)))
4140 => (t OR partial) */
4143 if (integer_onep (t
))
4144 return boolean_true_node
;
4147 /* We already got a simplification for the other
4148 operand to the redistributed OR expression. The
4149 interesting case is when at least one is false.
4150 Or, if both are the same, we can apply the identity
4152 if (integer_zerop (partial
))
4154 else if (integer_zerop (t
))
4156 else if (same_bool_result_p (t
, partial
))
4165 /* Try to simplify the AND of two comparisons defined by
4166 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4167 If this can be done without constructing an intermediate value,
4168 return the resulting tree; otherwise NULL_TREE is returned.
4169 This function is deliberately asymmetric as it recurses on SSA_DEFs
4170 in the first comparison but not the second. */
4173 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4174 enum tree_code code2
, tree op2a
, tree op2b
)
4176 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4178 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4179 if (operand_equal_p (op1a
, op2a
, 0)
4180 && operand_equal_p (op1b
, op2b
, 0))
4182 /* Result will be either NULL_TREE, or a combined comparison. */
4183 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4184 TRUTH_ANDIF_EXPR
, code1
, code2
,
4185 truth_type
, op1a
, op1b
);
4190 /* Likewise the swapped case of the above. */
4191 if (operand_equal_p (op1a
, op2b
, 0)
4192 && operand_equal_p (op1b
, op2a
, 0))
4194 /* Result will be either NULL_TREE, or a combined comparison. */
4195 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4196 TRUTH_ANDIF_EXPR
, code1
,
4197 swap_tree_comparison (code2
),
4198 truth_type
, op1a
, op1b
);
4203 /* If both comparisons are of the same value against constants, we might
4204 be able to merge them. */
4205 if (operand_equal_p (op1a
, op2a
, 0)
4206 && TREE_CODE (op1b
) == INTEGER_CST
4207 && TREE_CODE (op2b
) == INTEGER_CST
)
4209 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4211 /* If we have (op1a == op1b), we should either be able to
4212 return that or FALSE, depending on whether the constant op1b
4213 also satisfies the other comparison against op2b. */
4214 if (code1
== EQ_EXPR
)
4220 case EQ_EXPR
: val
= (cmp
== 0); break;
4221 case NE_EXPR
: val
= (cmp
!= 0); break;
4222 case LT_EXPR
: val
= (cmp
< 0); break;
4223 case GT_EXPR
: val
= (cmp
> 0); break;
4224 case LE_EXPR
: val
= (cmp
<= 0); break;
4225 case GE_EXPR
: val
= (cmp
>= 0); break;
4226 default: done
= false;
4231 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4233 return boolean_false_node
;
4236 /* Likewise if the second comparison is an == comparison. */
4237 else if (code2
== EQ_EXPR
)
4243 case EQ_EXPR
: val
= (cmp
== 0); break;
4244 case NE_EXPR
: val
= (cmp
!= 0); break;
4245 case LT_EXPR
: val
= (cmp
> 0); break;
4246 case GT_EXPR
: val
= (cmp
< 0); break;
4247 case LE_EXPR
: val
= (cmp
>= 0); break;
4248 case GE_EXPR
: val
= (cmp
<= 0); break;
4249 default: done
= false;
4254 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4256 return boolean_false_node
;
4260 /* Same business with inequality tests. */
4261 else if (code1
== NE_EXPR
)
4266 case EQ_EXPR
: val
= (cmp
!= 0); break;
4267 case NE_EXPR
: val
= (cmp
== 0); break;
4268 case LT_EXPR
: val
= (cmp
>= 0); break;
4269 case GT_EXPR
: val
= (cmp
<= 0); break;
4270 case LE_EXPR
: val
= (cmp
> 0); break;
4271 case GE_EXPR
: val
= (cmp
< 0); break;
4276 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4278 else if (code2
== NE_EXPR
)
4283 case EQ_EXPR
: val
= (cmp
== 0); break;
4284 case NE_EXPR
: val
= (cmp
!= 0); break;
4285 case LT_EXPR
: val
= (cmp
<= 0); break;
4286 case GT_EXPR
: val
= (cmp
>= 0); break;
4287 case LE_EXPR
: val
= (cmp
< 0); break;
4288 case GE_EXPR
: val
= (cmp
> 0); break;
4293 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4296 /* Chose the more restrictive of two < or <= comparisons. */
4297 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4298 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4300 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4301 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4303 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4306 /* Likewise chose the more restrictive of two > or >= comparisons. */
4307 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4308 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4310 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4311 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4313 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4316 /* Check for singleton ranges. */
4318 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4319 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4320 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4322 /* Check for disjoint ranges. */
4324 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4325 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4326 return boolean_false_node
;
4328 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4329 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4330 return boolean_false_node
;
4333 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4334 NAME's definition is a truth value. See if there are any simplifications
4335 that can be done against the NAME's definition. */
4336 if (TREE_CODE (op1a
) == SSA_NAME
4337 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4338 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4340 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4341 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4342 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4343 switch (gimple_code (stmt
))
4346 /* Try to simplify by copy-propagating the definition. */
4347 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4350 /* If every argument to the PHI produces the same result when
4351 ANDed with the second comparison, we win.
4352 Do not do this unless the type is bool since we need a bool
4353 result here anyway. */
4354 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4356 tree result
= NULL_TREE
;
4358 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4360 tree arg
= gimple_phi_arg_def (stmt
, i
);
4362 /* If this PHI has itself as an argument, ignore it.
4363 If all the other args produce the same result,
4365 if (arg
== gimple_phi_result (stmt
))
4367 else if (TREE_CODE (arg
) == INTEGER_CST
)
4369 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4372 result
= boolean_false_node
;
4373 else if (!integer_zerop (result
))
4377 result
= fold_build2 (code2
, boolean_type_node
,
4379 else if (!same_bool_comparison_p (result
,
4383 else if (TREE_CODE (arg
) == SSA_NAME
4384 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4387 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4388 /* In simple cases we can look through PHI nodes,
4389 but we have to be careful with loops.
4391 if (! dom_info_available_p (CDI_DOMINATORS
)
4392 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4393 || dominated_by_p (CDI_DOMINATORS
,
4394 gimple_bb (def_stmt
),
4397 temp
= and_var_with_comparison (arg
, invert
, code2
,
4403 else if (!same_bool_result_p (result
, temp
))
4419 /* Try to simplify the AND of two comparisons, specified by
4420 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4421 If this can be simplified to a single expression (without requiring
4422 introducing more SSA variables to hold intermediate values),
4423 return the resulting tree. Otherwise return NULL_TREE.
4424 If the result expression is non-null, it has boolean type. */
4427 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4428 enum tree_code code2
, tree op2a
, tree op2b
)
4430 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4434 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4437 /* Helper function for or_comparisons_1: try to simplify the OR of the
4438 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4439 If INVERT is true, invert the value of VAR before doing the OR.
4440 Return NULL_EXPR if we can't simplify this to a single expression. */
4443 or_var_with_comparison (tree var
, bool invert
,
4444 enum tree_code code2
, tree op2a
, tree op2b
)
4447 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4449 /* We can only deal with variables whose definitions are assignments. */
4450 if (!is_gimple_assign (stmt
))
4453 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4454 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4455 Then we only have to consider the simpler non-inverted cases. */
4457 t
= and_var_with_comparison_1 (stmt
,
4458 invert_tree_comparison (code2
, false),
4461 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4462 return canonicalize_bool (t
, invert
);
4465 /* Try to simplify the OR of the ssa variable defined by the assignment
4466 STMT with the comparison specified by (OP2A CODE2 OP2B).
4467 Return NULL_EXPR if we can't simplify this to a single expression. */
4470 or_var_with_comparison_1 (gimple stmt
,
4471 enum tree_code code2
, tree op2a
, tree op2b
)
4473 tree var
= gimple_assign_lhs (stmt
);
4474 tree true_test_var
= NULL_TREE
;
4475 tree false_test_var
= NULL_TREE
;
4476 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4478 /* Check for identities like (var OR (var != 0)) => true . */
4479 if (TREE_CODE (op2a
) == SSA_NAME
4480 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4482 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4483 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4485 true_test_var
= op2a
;
4486 if (var
== true_test_var
)
4489 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4490 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4492 false_test_var
= op2a
;
4493 if (var
== false_test_var
)
4494 return boolean_true_node
;
4498 /* If the definition is a comparison, recurse on it. */
4499 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4501 tree t
= or_comparisons_1 (innercode
,
4502 gimple_assign_rhs1 (stmt
),
4503 gimple_assign_rhs2 (stmt
),
4511 /* If the definition is an AND or OR expression, we may be able to
4512 simplify by reassociating. */
4513 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4514 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4516 tree inner1
= gimple_assign_rhs1 (stmt
);
4517 tree inner2
= gimple_assign_rhs2 (stmt
);
4520 tree partial
= NULL_TREE
;
4521 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4523 /* Check for boolean identities that don't require recursive examination
4525 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4526 inner1 OR (inner1 AND inner2) => inner1
4527 !inner1 OR (inner1 OR inner2) => true
4528 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4530 if (inner1
== true_test_var
)
4531 return (is_or
? var
: inner1
);
4532 else if (inner2
== true_test_var
)
4533 return (is_or
? var
: inner2
);
4534 else if (inner1
== false_test_var
)
4537 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4538 else if (inner2
== false_test_var
)
4541 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4543 /* Next, redistribute/reassociate the OR across the inner tests.
4544 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4545 if (TREE_CODE (inner1
) == SSA_NAME
4546 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4547 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4548 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4549 gimple_assign_rhs1 (s
),
4550 gimple_assign_rhs2 (s
),
4551 code2
, op2a
, op2b
)))
4553 /* Handle the OR case, where we are reassociating:
4554 (inner1 OR inner2) OR (op2a code2 op2b)
4556 If the partial result t is a constant, we win. Otherwise
4557 continue on to try reassociating with the other inner test. */
4560 if (integer_onep (t
))
4561 return boolean_true_node
;
4562 else if (integer_zerop (t
))
4566 /* Handle the AND case, where we are redistributing:
4567 (inner1 AND inner2) OR (op2a code2 op2b)
4568 => (t AND (inner2 OR (op2a code op2b))) */
4569 else if (integer_zerop (t
))
4570 return boolean_false_node
;
4572 /* Save partial result for later. */
4576 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4577 if (TREE_CODE (inner2
) == SSA_NAME
4578 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4579 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4580 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4581 gimple_assign_rhs1 (s
),
4582 gimple_assign_rhs2 (s
),
4583 code2
, op2a
, op2b
)))
4585 /* Handle the OR case, where we are reassociating:
4586 (inner1 OR inner2) OR (op2a code2 op2b)
4588 => (t OR partial) */
4591 if (integer_zerop (t
))
4593 else if (integer_onep (t
))
4594 return boolean_true_node
;
4595 /* If both are the same, we can apply the identity
4597 else if (partial
&& same_bool_result_p (t
, partial
))
4601 /* Handle the AND case, where we are redistributing:
4602 (inner1 AND inner2) OR (op2a code2 op2b)
4603 => (t AND (inner1 OR (op2a code2 op2b)))
4604 => (t AND partial) */
4607 if (integer_zerop (t
))
4608 return boolean_false_node
;
4611 /* We already got a simplification for the other
4612 operand to the redistributed AND expression. The
4613 interesting case is when at least one is true.
4614 Or, if both are the same, we can apply the identity
4616 if (integer_onep (partial
))
4618 else if (integer_onep (t
))
4620 else if (same_bool_result_p (t
, partial
))
4629 /* Try to simplify the OR of two comparisons defined by
4630 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4631 If this can be done without constructing an intermediate value,
4632 return the resulting tree; otherwise NULL_TREE is returned.
4633 This function is deliberately asymmetric as it recurses on SSA_DEFs
4634 in the first comparison but not the second. */
4637 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4638 enum tree_code code2
, tree op2a
, tree op2b
)
4640 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4642 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4643 if (operand_equal_p (op1a
, op2a
, 0)
4644 && operand_equal_p (op1b
, op2b
, 0))
4646 /* Result will be either NULL_TREE, or a combined comparison. */
4647 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4648 TRUTH_ORIF_EXPR
, code1
, code2
,
4649 truth_type
, op1a
, op1b
);
4654 /* Likewise the swapped case of the above. */
4655 if (operand_equal_p (op1a
, op2b
, 0)
4656 && operand_equal_p (op1b
, op2a
, 0))
4658 /* Result will be either NULL_TREE, or a combined comparison. */
4659 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4660 TRUTH_ORIF_EXPR
, code1
,
4661 swap_tree_comparison (code2
),
4662 truth_type
, op1a
, op1b
);
4667 /* If both comparisons are of the same value against constants, we might
4668 be able to merge them. */
4669 if (operand_equal_p (op1a
, op2a
, 0)
4670 && TREE_CODE (op1b
) == INTEGER_CST
4671 && TREE_CODE (op2b
) == INTEGER_CST
)
4673 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4675 /* If we have (op1a != op1b), we should either be able to
4676 return that or TRUE, depending on whether the constant op1b
4677 also satisfies the other comparison against op2b. */
4678 if (code1
== NE_EXPR
)
4684 case EQ_EXPR
: val
= (cmp
== 0); break;
4685 case NE_EXPR
: val
= (cmp
!= 0); break;
4686 case LT_EXPR
: val
= (cmp
< 0); break;
4687 case GT_EXPR
: val
= (cmp
> 0); break;
4688 case LE_EXPR
: val
= (cmp
<= 0); break;
4689 case GE_EXPR
: val
= (cmp
>= 0); break;
4690 default: done
= false;
4695 return boolean_true_node
;
4697 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4700 /* Likewise if the second comparison is a != comparison. */
4701 else if (code2
== NE_EXPR
)
4707 case EQ_EXPR
: val
= (cmp
== 0); break;
4708 case NE_EXPR
: val
= (cmp
!= 0); break;
4709 case LT_EXPR
: val
= (cmp
> 0); break;
4710 case GT_EXPR
: val
= (cmp
< 0); break;
4711 case LE_EXPR
: val
= (cmp
>= 0); break;
4712 case GE_EXPR
: val
= (cmp
<= 0); break;
4713 default: done
= false;
4718 return boolean_true_node
;
4720 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4724 /* See if an equality test is redundant with the other comparison. */
4725 else if (code1
== EQ_EXPR
)
4730 case EQ_EXPR
: val
= (cmp
== 0); break;
4731 case NE_EXPR
: val
= (cmp
!= 0); break;
4732 case LT_EXPR
: val
= (cmp
< 0); break;
4733 case GT_EXPR
: val
= (cmp
> 0); break;
4734 case LE_EXPR
: val
= (cmp
<= 0); break;
4735 case GE_EXPR
: val
= (cmp
>= 0); break;
4740 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4742 else if (code2
== EQ_EXPR
)
4747 case EQ_EXPR
: val
= (cmp
== 0); break;
4748 case NE_EXPR
: val
= (cmp
!= 0); break;
4749 case LT_EXPR
: val
= (cmp
> 0); break;
4750 case GT_EXPR
: val
= (cmp
< 0); break;
4751 case LE_EXPR
: val
= (cmp
>= 0); break;
4752 case GE_EXPR
: val
= (cmp
<= 0); break;
4757 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4760 /* Chose the less restrictive of two < or <= comparisons. */
4761 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4762 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4764 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4765 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4767 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4770 /* Likewise chose the less restrictive of two > or >= comparisons. */
4771 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4772 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4774 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4775 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4777 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4780 /* Check for singleton ranges. */
4782 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4783 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4784 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4786 /* Check for less/greater pairs that don't restrict the range at all. */
4788 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4789 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4790 return boolean_true_node
;
4792 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4793 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4794 return boolean_true_node
;
4797 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4798 NAME's definition is a truth value. See if there are any simplifications
4799 that can be done against the NAME's definition. */
4800 if (TREE_CODE (op1a
) == SSA_NAME
4801 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4802 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4804 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4805 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4806 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4807 switch (gimple_code (stmt
))
4810 /* Try to simplify by copy-propagating the definition. */
4811 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4814 /* If every argument to the PHI produces the same result when
4815 ORed with the second comparison, we win.
4816 Do not do this unless the type is bool since we need a bool
4817 result here anyway. */
4818 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4820 tree result
= NULL_TREE
;
4822 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4824 tree arg
= gimple_phi_arg_def (stmt
, i
);
4826 /* If this PHI has itself as an argument, ignore it.
4827 If all the other args produce the same result,
4829 if (arg
== gimple_phi_result (stmt
))
4831 else if (TREE_CODE (arg
) == INTEGER_CST
)
4833 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4836 result
= boolean_true_node
;
4837 else if (!integer_onep (result
))
4841 result
= fold_build2 (code2
, boolean_type_node
,
4843 else if (!same_bool_comparison_p (result
,
4847 else if (TREE_CODE (arg
) == SSA_NAME
4848 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4851 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4852 /* In simple cases we can look through PHI nodes,
4853 but we have to be careful with loops.
4855 if (! dom_info_available_p (CDI_DOMINATORS
)
4856 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4857 || dominated_by_p (CDI_DOMINATORS
,
4858 gimple_bb (def_stmt
),
4861 temp
= or_var_with_comparison (arg
, invert
, code2
,
4867 else if (!same_bool_result_p (result
, temp
))
4883 /* Try to simplify the OR of two comparisons, specified by
4884 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4885 If this can be simplified to a single expression (without requiring
4886 introducing more SSA variables to hold intermediate values),
4887 return the resulting tree. Otherwise return NULL_TREE.
4888 If the result expression is non-null, it has boolean type. */
4891 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4892 enum tree_code code2
, tree op2a
, tree op2b
)
4894 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4898 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4902 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4904 Either NULL_TREE, a simplified but non-constant or a constant
4907 ??? This should go into a gimple-fold-inline.h file to be eventually
4908 privatized with the single valueize function used in the various TUs
4909 to avoid the indirect function call overhead. */
4912 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4913 tree (*gvalueize
) (tree
))
4917 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4918 edges if there are intermediate VARYING defs. For this reason
4919 do not follow SSA edges here even though SCCVN can technically
4920 just deal fine with that. */
4921 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
)
4922 && rcode
.is_tree_code ()
4923 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4924 || ((tree_code
) rcode
) == ADDR_EXPR
)
4925 && is_gimple_val (ops
[0]))
4928 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4930 fprintf (dump_file
, "Match-and-simplified ");
4931 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4932 fprintf (dump_file
, " to ");
4933 print_generic_expr (dump_file
, res
, 0);
4934 fprintf (dump_file
, "\n");
4939 location_t loc
= gimple_location (stmt
);
4940 switch (gimple_code (stmt
))
4944 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4946 switch (get_gimple_rhs_class (subcode
))
4948 case GIMPLE_SINGLE_RHS
:
4950 tree rhs
= gimple_assign_rhs1 (stmt
);
4951 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4953 if (TREE_CODE (rhs
) == SSA_NAME
)
4955 /* If the RHS is an SSA_NAME, return its known constant value,
4957 return (*valueize
) (rhs
);
4959 /* Handle propagating invariant addresses into address
4961 else if (TREE_CODE (rhs
) == ADDR_EXPR
4962 && !is_gimple_min_invariant (rhs
))
4964 HOST_WIDE_INT offset
= 0;
4966 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4970 && (CONSTANT_CLASS_P (base
)
4971 || decl_address_invariant_p (base
)))
4972 return build_invariant_address (TREE_TYPE (rhs
),
4975 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4976 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4977 && (CONSTRUCTOR_NELTS (rhs
)
4978 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4983 vec
= XALLOCAVEC (tree
,
4984 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4985 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4987 val
= (*valueize
) (val
);
4988 if (TREE_CODE (val
) == INTEGER_CST
4989 || TREE_CODE (val
) == REAL_CST
4990 || TREE_CODE (val
) == FIXED_CST
)
4996 return build_vector (TREE_TYPE (rhs
), vec
);
4998 if (subcode
== OBJ_TYPE_REF
)
5000 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5001 /* If callee is constant, we can fold away the wrapper. */
5002 if (is_gimple_min_invariant (val
))
5006 if (kind
== tcc_reference
)
5008 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5009 || TREE_CODE (rhs
) == REALPART_EXPR
5010 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5011 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5013 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5014 return fold_unary_loc (EXPR_LOCATION (rhs
),
5016 TREE_TYPE (rhs
), val
);
5018 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5019 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5021 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5022 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5024 TREE_TYPE (rhs
), val
,
5025 TREE_OPERAND (rhs
, 1),
5026 TREE_OPERAND (rhs
, 2));
5028 else if (TREE_CODE (rhs
) == MEM_REF
5029 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5031 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5032 if (TREE_CODE (val
) == ADDR_EXPR
5033 && is_gimple_min_invariant (val
))
5035 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5037 TREE_OPERAND (rhs
, 1));
5042 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5044 else if (kind
== tcc_declaration
)
5045 return get_symbol_constant_value (rhs
);
5049 case GIMPLE_UNARY_RHS
:
5052 case GIMPLE_BINARY_RHS
:
5054 /* Handle binary operators that can appear in GIMPLE form. */
5055 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5056 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5058 /* Translate &x + CST into an invariant form suitable for
5059 further propagation. */
5060 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
5061 && TREE_CODE (op0
) == ADDR_EXPR
5062 && TREE_CODE (op1
) == INTEGER_CST
)
5064 tree off
= fold_convert (ptr_type_node
, op1
);
5065 return build_fold_addr_expr_loc
5067 fold_build2 (MEM_REF
,
5068 TREE_TYPE (TREE_TYPE (op0
)),
5069 unshare_expr (op0
), off
));
5072 return fold_binary_loc (loc
, subcode
,
5073 gimple_expr_type (stmt
), op0
, op1
);
5076 case GIMPLE_TERNARY_RHS
:
5078 /* Handle ternary operators that can appear in GIMPLE form. */
5079 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5080 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5081 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5083 /* Fold embedded expressions in ternary codes. */
5084 if ((subcode
== COND_EXPR
5085 || subcode
== VEC_COND_EXPR
)
5086 && COMPARISON_CLASS_P (op0
))
5088 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
5089 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
5090 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
5091 TREE_TYPE (op0
), op00
, op01
);
5096 return fold_ternary_loc (loc
, subcode
,
5097 gimple_expr_type (stmt
), op0
, op1
, op2
);
5108 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5110 if (gimple_call_internal_p (stmt
))
5112 enum tree_code subcode
= ERROR_MARK
;
5113 switch (gimple_call_internal_fn (stmt
))
5115 case IFN_UBSAN_CHECK_ADD
:
5116 subcode
= PLUS_EXPR
;
5118 case IFN_UBSAN_CHECK_SUB
:
5119 subcode
= MINUS_EXPR
;
5121 case IFN_UBSAN_CHECK_MUL
:
5122 subcode
= MULT_EXPR
;
5127 tree arg0
= gimple_call_arg (stmt
, 0);
5128 tree arg1
= gimple_call_arg (stmt
, 1);
5129 tree op0
= (*valueize
) (arg0
);
5130 tree op1
= (*valueize
) (arg1
);
5132 if (TREE_CODE (op0
) != INTEGER_CST
5133 || TREE_CODE (op1
) != INTEGER_CST
)
5138 /* x * 0 = 0 * x = 0 without overflow. */
5139 if (integer_zerop (op0
) || integer_zerop (op1
))
5140 return build_zero_cst (TREE_TYPE (arg0
));
5143 /* y - y = 0 without overflow. */
5144 if (operand_equal_p (op0
, op1
, 0))
5145 return build_zero_cst (TREE_TYPE (arg0
));
5152 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5154 && TREE_CODE (res
) == INTEGER_CST
5155 && !TREE_OVERFLOW (res
))
5160 fn
= (*valueize
) (gimple_call_fn (stmt
));
5161 if (TREE_CODE (fn
) == ADDR_EXPR
5162 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5163 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5164 && gimple_builtin_call_types_compatible_p (stmt
,
5165 TREE_OPERAND (fn
, 0)))
5167 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5170 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5171 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5172 retval
= fold_builtin_call_array (loc
,
5173 gimple_call_return_type (call_stmt
),
5174 fn
, gimple_call_num_args (stmt
), args
);
5177 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5178 STRIP_NOPS (retval
);
5179 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5192 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5193 Returns NULL_TREE if folding to a constant is not possible, otherwise
5194 returns a constant according to is_gimple_min_invariant. */
5197 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
5199 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5200 if (res
&& is_gimple_min_invariant (res
))
5206 /* The following set of functions are supposed to fold references using
5207 their constant initializers. */
5209 /* See if we can find constructor defining value of BASE.
5210 When we know the consructor with constant offset (such as
5211 base is array[40] and we do know constructor of array), then
5212 BIT_OFFSET is adjusted accordingly.
5214 As a special case, return error_mark_node when constructor
5215 is not explicitly available, but it is known to be zero
5216 such as 'static const int a;'. */
5218 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5219 tree (*valueize
)(tree
))
5221 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5222 if (TREE_CODE (base
) == MEM_REF
)
5224 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5226 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5228 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5233 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5234 base
= valueize (TREE_OPERAND (base
, 0));
5235 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5237 base
= TREE_OPERAND (base
, 0);
5240 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5241 DECL_INITIAL. If BASE is a nested reference into another
5242 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5243 the inner reference. */
5244 switch (TREE_CODE (base
))
5249 tree init
= ctor_for_folding (base
);
5251 /* Our semantic is exact opposite of ctor_for_folding;
5252 NULL means unknown, while error_mark_node is 0. */
5253 if (init
== error_mark_node
)
5256 return error_mark_node
;
5262 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5263 if (max_size
== -1 || size
!= max_size
)
5265 *bit_offset
+= bit_offset2
;
5266 return get_base_constructor (base
, bit_offset
, valueize
);
5277 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5278 SIZE to the memory at bit OFFSET. */
5281 fold_array_ctor_reference (tree type
, tree ctor
,
5282 unsigned HOST_WIDE_INT offset
,
5283 unsigned HOST_WIDE_INT size
,
5286 unsigned HOST_WIDE_INT cnt
;
5288 offset_int low_bound
;
5289 offset_int elt_size
;
5290 offset_int index
, max_index
;
5291 offset_int access_index
;
5292 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5293 HOST_WIDE_INT inner_offset
;
5295 /* Compute low bound and elt size. */
5296 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5297 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5298 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5300 /* Static constructors for variably sized objects makes no sense. */
5301 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5302 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5303 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5307 /* Static constructors for variably sized objects makes no sense. */
5308 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5310 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5312 /* We can handle only constantly sized accesses that are known to not
5313 be larger than size of array element. */
5314 if (!TYPE_SIZE_UNIT (type
)
5315 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5316 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5320 /* Compute the array index we look for. */
5321 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5323 access_index
+= low_bound
;
5325 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5326 TYPE_SIGN (index_type
));
5328 /* And offset within the access. */
5329 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5331 /* See if the array field is large enough to span whole access. We do not
5332 care to fold accesses spanning multiple array indexes. */
5333 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5336 index
= low_bound
- 1;
5338 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5339 TYPE_SIGN (index_type
));
5341 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5343 /* Array constructor might explicitely set index, or specify range
5344 or leave index NULL meaning that it is next index after previous
5348 if (TREE_CODE (cfield
) == INTEGER_CST
)
5349 max_index
= index
= wi::to_offset (cfield
);
5352 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5353 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5354 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5361 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5362 TYPE_SIGN (index_type
));
5366 /* Do we have match? */
5367 if (wi::cmpu (access_index
, index
) >= 0
5368 && wi::cmpu (access_index
, max_index
) <= 0)
5369 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5372 /* When memory is not explicitely mentioned in constructor,
5373 it is 0 (or out of range). */
5374 return build_zero_cst (type
);
5377 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5378 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5381 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5382 unsigned HOST_WIDE_INT offset
,
5383 unsigned HOST_WIDE_INT size
,
5386 unsigned HOST_WIDE_INT cnt
;
5389 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5392 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5393 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5394 tree field_size
= DECL_SIZE (cfield
);
5395 offset_int bitoffset
;
5396 offset_int bitoffset_end
, access_end
;
5398 /* Variable sized objects in static constructors makes no sense,
5399 but field_size can be NULL for flexible array members. */
5400 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5401 && TREE_CODE (byte_offset
) == INTEGER_CST
5402 && (field_size
!= NULL_TREE
5403 ? TREE_CODE (field_size
) == INTEGER_CST
5404 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5406 /* Compute bit offset of the field. */
5407 bitoffset
= (wi::to_offset (field_offset
)
5408 + wi::lshift (wi::to_offset (byte_offset
),
5409 LOG2_BITS_PER_UNIT
));
5410 /* Compute bit offset where the field ends. */
5411 if (field_size
!= NULL_TREE
)
5412 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5416 access_end
= offset_int (offset
) + size
;
5418 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5419 [BITOFFSET, BITOFFSET_END)? */
5420 if (wi::cmps (access_end
, bitoffset
) > 0
5421 && (field_size
== NULL_TREE
5422 || wi::lts_p (offset
, bitoffset_end
)))
5424 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5425 /* We do have overlap. Now see if field is large enough to
5426 cover the access. Give up for accesses spanning multiple
5428 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5430 if (wi::lts_p (offset
, bitoffset
))
5432 return fold_ctor_reference (type
, cval
,
5433 inner_offset
.to_uhwi (), size
,
5437 /* When memory is not explicitely mentioned in constructor, it is 0. */
5438 return build_zero_cst (type
);
5441 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5442 to the memory at bit OFFSET. */
5445 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5446 unsigned HOST_WIDE_INT size
, tree from_decl
)
5450 /* We found the field with exact match. */
5451 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5453 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5455 /* We are at the end of walk, see if we can view convert the
5457 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5458 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5459 && !compare_tree_int (TYPE_SIZE (type
), size
)
5460 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5462 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5463 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5465 STRIP_USELESS_TYPE_CONVERSION (ret
);
5468 /* For constants and byte-aligned/sized reads try to go through
5469 native_encode/interpret. */
5470 if (CONSTANT_CLASS_P (ctor
)
5471 && BITS_PER_UNIT
== 8
5472 && offset
% BITS_PER_UNIT
== 0
5473 && size
% BITS_PER_UNIT
== 0
5474 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5476 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5477 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5478 offset
/ BITS_PER_UNIT
) > 0)
5479 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5481 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5484 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5485 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5486 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5489 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5496 /* Return the tree representing the element referenced by T if T is an
5497 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5498 names using VALUEIZE. Return NULL_TREE otherwise. */
5501 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5503 tree ctor
, idx
, base
;
5504 HOST_WIDE_INT offset
, size
, max_size
;
5507 if (TREE_THIS_VOLATILE (t
))
5511 return get_symbol_constant_value (t
);
5513 tem
= fold_read_from_constant_string (t
);
5517 switch (TREE_CODE (t
))
5520 case ARRAY_RANGE_REF
:
5521 /* Constant indexes are handled well by get_base_constructor.
5522 Only special case variable offsets.
5523 FIXME: This code can't handle nested references with variable indexes
5524 (they will be handled only by iteration of ccp). Perhaps we can bring
5525 get_ref_base_and_extent here and make it use a valueize callback. */
5526 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5528 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5529 && TREE_CODE (idx
) == INTEGER_CST
)
5531 tree low_bound
, unit_size
;
5533 /* If the resulting bit-offset is constant, track it. */
5534 if ((low_bound
= array_ref_low_bound (t
),
5535 TREE_CODE (low_bound
) == INTEGER_CST
)
5536 && (unit_size
= array_ref_element_size (t
),
5537 tree_fits_uhwi_p (unit_size
)))
5540 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5541 TYPE_PRECISION (TREE_TYPE (idx
)));
5543 if (wi::fits_shwi_p (woffset
))
5545 offset
= woffset
.to_shwi ();
5546 /* TODO: This code seems wrong, multiply then check
5547 to see if it fits. */
5548 offset
*= tree_to_uhwi (unit_size
);
5549 offset
*= BITS_PER_UNIT
;
5551 base
= TREE_OPERAND (t
, 0);
5552 ctor
= get_base_constructor (base
, &offset
, valueize
);
5553 /* Empty constructor. Always fold to 0. */
5554 if (ctor
== error_mark_node
)
5555 return build_zero_cst (TREE_TYPE (t
));
5556 /* Out of bound array access. Value is undefined,
5560 /* We can not determine ctor. */
5563 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5564 tree_to_uhwi (unit_size
)
5574 case TARGET_MEM_REF
:
5576 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5577 ctor
= get_base_constructor (base
, &offset
, valueize
);
5579 /* Empty constructor. Always fold to 0. */
5580 if (ctor
== error_mark_node
)
5581 return build_zero_cst (TREE_TYPE (t
));
5582 /* We do not know precise address. */
5583 if (max_size
== -1 || max_size
!= size
)
5585 /* We can not determine ctor. */
5589 /* Out of bound array access. Value is undefined, but don't fold. */
5593 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5599 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5600 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5601 return fold_build1_loc (EXPR_LOCATION (t
),
5602 TREE_CODE (t
), TREE_TYPE (t
), c
);
5614 fold_const_aggregate_ref (tree t
)
5616 return fold_const_aggregate_ref_1 (t
, NULL
);
5619 /* Lookup virtual method with index TOKEN in a virtual table V
5621 Set CAN_REFER if non-NULL to false if method
5622 is not referable or if the virtual table is ill-formed (such as rewriten
5623 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5626 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5628 unsigned HOST_WIDE_INT offset
,
5631 tree vtable
= v
, init
, fn
;
5632 unsigned HOST_WIDE_INT size
;
5633 unsigned HOST_WIDE_INT elt_size
, access_index
;
5639 /* First of all double check we have virtual table. */
5640 if (TREE_CODE (v
) != VAR_DECL
5641 || !DECL_VIRTUAL_P (v
))
5643 /* Pass down that we lost track of the target. */
5649 init
= ctor_for_folding (v
);
5651 /* The virtual tables should always be born with constructors
5652 and we always should assume that they are avaialble for
5653 folding. At the moment we do not stream them in all cases,
5654 but it should never happen that ctor seem unreachable. */
5656 if (init
== error_mark_node
)
5658 gcc_assert (in_lto_p
);
5659 /* Pass down that we lost track of the target. */
5664 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5665 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5666 offset
*= BITS_PER_UNIT
;
5667 offset
+= token
* size
;
5669 /* Lookup the value in the constructor that is assumed to be array.
5670 This is equivalent to
5671 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5672 offset, size, NULL);
5673 but in a constant time. We expect that frontend produced a simple
5674 array without indexed initializers. */
5676 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5677 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5678 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5679 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5681 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5682 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5684 /* This code makes an assumption that there are no
5685 indexed fileds produced by C++ FE, so we can directly index the array. */
5686 if (access_index
< CONSTRUCTOR_NELTS (init
))
5688 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5689 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5695 /* For type inconsistent program we may end up looking up virtual method
5696 in virtual table that does not contain TOKEN entries. We may overrun
5697 the virtual table and pick up a constant or RTTI info pointer.
5698 In any case the call is undefined. */
5700 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5701 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5702 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5705 fn
= TREE_OPERAND (fn
, 0);
5707 /* When cgraph node is missing and function is not public, we cannot
5708 devirtualize. This can happen in WHOPR when the actual method
5709 ends up in other partition, because we found devirtualization
5710 possibility too late. */
5711 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5722 /* Make sure we create a cgraph node for functions we'll reference.
5723 They can be non-existent if the reference comes from an entry
5724 of an external vtable for example. */
5725 cgraph_node::get_create (fn
);
5730 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5731 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5732 KNOWN_BINFO carries the binfo describing the true type of
5733 OBJ_TYPE_REF_OBJECT(REF).
5734 Set CAN_REFER if non-NULL to false if method
5735 is not referable or if the virtual table is ill-formed (such as rewriten
5736 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5739 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5742 unsigned HOST_WIDE_INT offset
;
5745 v
= BINFO_VTABLE (known_binfo
);
5746 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5750 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5756 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5759 /* Return true iff VAL is a gimple expression that is known to be
5760 non-negative. Restricted to floating-point inputs. */
5763 gimple_val_nonnegative_real_p (tree val
)
5767 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5769 /* Use existing logic for non-gimple trees. */
5770 if (tree_expr_nonnegative_p (val
))
5773 if (TREE_CODE (val
) != SSA_NAME
)
5776 /* Currently we look only at the immediately defining statement
5777 to make this determination, since recursion on defining
5778 statements of operands can lead to quadratic behavior in the
5779 worst case. This is expected to catch almost all occurrences
5780 in practice. It would be possible to implement limited-depth
5781 recursion if important cases are lost. Alternatively, passes
5782 that need this information (such as the pow/powi lowering code
5783 in the cse_sincos pass) could be revised to provide it through
5784 dataflow propagation. */
5786 def_stmt
= SSA_NAME_DEF_STMT (val
);
5788 if (is_gimple_assign (def_stmt
))
5792 /* See fold-const.c:tree_expr_nonnegative_p for additional
5793 cases that could be handled with recursion. */
5795 switch (gimple_assign_rhs_code (def_stmt
))
5798 /* Always true for floating-point operands. */
5802 /* True if the two operands are identical (since we are
5803 restricted to floating-point inputs). */
5804 op0
= gimple_assign_rhs1 (def_stmt
);
5805 op1
= gimple_assign_rhs2 (def_stmt
);
5808 || operand_equal_p (op0
, op1
, 0))
5815 else if (is_gimple_call (def_stmt
))
5817 tree fndecl
= gimple_call_fndecl (def_stmt
);
5819 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5823 switch (DECL_FUNCTION_CODE (fndecl
))
5825 CASE_FLT_FN (BUILT_IN_ACOS
):
5826 CASE_FLT_FN (BUILT_IN_ACOSH
):
5827 CASE_FLT_FN (BUILT_IN_CABS
):
5828 CASE_FLT_FN (BUILT_IN_COSH
):
5829 CASE_FLT_FN (BUILT_IN_ERFC
):
5830 CASE_FLT_FN (BUILT_IN_EXP
):
5831 CASE_FLT_FN (BUILT_IN_EXP10
):
5832 CASE_FLT_FN (BUILT_IN_EXP2
):
5833 CASE_FLT_FN (BUILT_IN_FABS
):
5834 CASE_FLT_FN (BUILT_IN_FDIM
):
5835 CASE_FLT_FN (BUILT_IN_HYPOT
):
5836 CASE_FLT_FN (BUILT_IN_POW10
):
5839 CASE_FLT_FN (BUILT_IN_SQRT
):
5840 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5841 nonnegative inputs. */
5842 if (!HONOR_SIGNED_ZEROS (val
))
5847 CASE_FLT_FN (BUILT_IN_POWI
):
5848 /* True if the second argument is an even integer. */
5849 arg1
= gimple_call_arg (def_stmt
, 1);
5851 if (TREE_CODE (arg1
) == INTEGER_CST
5852 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5857 CASE_FLT_FN (BUILT_IN_POW
):
5858 /* True if the second argument is an even integer-valued
5860 arg1
= gimple_call_arg (def_stmt
, 1);
5862 if (TREE_CODE (arg1
) == REAL_CST
)
5867 c
= TREE_REAL_CST (arg1
);
5868 n
= real_to_integer (&c
);
5872 REAL_VALUE_TYPE cint
;
5873 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5874 if (real_identical (&c
, &cint
))
5890 /* Given a pointer value OP0, return a simplified version of an
5891 indirection through OP0, or NULL_TREE if no simplification is
5892 possible. Note that the resulting type may be different from
5893 the type pointed to in the sense that it is still compatible
5894 from the langhooks point of view. */
5897 gimple_fold_indirect_ref (tree t
)
5899 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5904 subtype
= TREE_TYPE (sub
);
5905 if (!POINTER_TYPE_P (subtype
))
5908 if (TREE_CODE (sub
) == ADDR_EXPR
)
5910 tree op
= TREE_OPERAND (sub
, 0);
5911 tree optype
= TREE_TYPE (op
);
5913 if (useless_type_conversion_p (type
, optype
))
5916 /* *(foo *)&fooarray => fooarray[0] */
5917 if (TREE_CODE (optype
) == ARRAY_TYPE
5918 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5919 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5921 tree type_domain
= TYPE_DOMAIN (optype
);
5922 tree min_val
= size_zero_node
;
5923 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5924 min_val
= TYPE_MIN_VALUE (type_domain
);
5925 if (TREE_CODE (min_val
) == INTEGER_CST
)
5926 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5928 /* *(foo *)&complexfoo => __real__ complexfoo */
5929 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5930 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5931 return fold_build1 (REALPART_EXPR
, type
, op
);
5932 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5933 else if (TREE_CODE (optype
) == VECTOR_TYPE
5934 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5936 tree part_width
= TYPE_SIZE (type
);
5937 tree index
= bitsize_int (0);
5938 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5942 /* *(p + CST) -> ... */
5943 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5944 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5946 tree addr
= TREE_OPERAND (sub
, 0);
5947 tree off
= TREE_OPERAND (sub
, 1);
5951 addrtype
= TREE_TYPE (addr
);
5953 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5954 if (TREE_CODE (addr
) == ADDR_EXPR
5955 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5956 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5957 && tree_fits_uhwi_p (off
))
5959 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5960 tree part_width
= TYPE_SIZE (type
);
5961 unsigned HOST_WIDE_INT part_widthi
5962 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5963 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5964 tree index
= bitsize_int (indexi
);
5965 if (offset
/ part_widthi
5966 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5967 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5971 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5972 if (TREE_CODE (addr
) == ADDR_EXPR
5973 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5974 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5976 tree size
= TYPE_SIZE_UNIT (type
);
5977 if (tree_int_cst_equal (size
, off
))
5978 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5981 /* *(p + CST) -> MEM_REF <p, CST>. */
5982 if (TREE_CODE (addr
) != ADDR_EXPR
5983 || DECL_P (TREE_OPERAND (addr
, 0)))
5984 return fold_build2 (MEM_REF
, type
,
5986 wide_int_to_tree (ptype
, off
));
5989 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5990 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5991 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5992 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5995 tree min_val
= size_zero_node
;
5997 sub
= gimple_fold_indirect_ref (sub
);
5999 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6000 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6001 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6002 min_val
= TYPE_MIN_VALUE (type_domain
);
6003 if (TREE_CODE (min_val
) == INTEGER_CST
)
6004 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6010 /* Return true if CODE is an operation that when operating on signed
6011 integer types involves undefined behavior on overflow and the
6012 operation can be expressed with unsigned arithmetic. */
6015 arith_code_with_undefined_signed_overflow (tree_code code
)
6023 case POINTER_PLUS_EXPR
:
6030 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6031 operation that can be transformed to unsigned arithmetic by converting
6032 its operand, carrying out the operation in the corresponding unsigned
6033 type and converting the result back to the original type.
6035 Returns a sequence of statements that replace STMT and also contain
6036 a modified form of STMT itself. */
6039 rewrite_to_defined_overflow (gimple stmt
)
6041 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6043 fprintf (dump_file
, "rewriting stmt with undefined signed "
6045 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6048 tree lhs
= gimple_assign_lhs (stmt
);
6049 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6050 gimple_seq stmts
= NULL
;
6051 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6053 gimple_seq stmts2
= NULL
;
6054 gimple_set_op (stmt
, i
,
6055 force_gimple_operand (fold_convert (type
,
6056 gimple_op (stmt
, i
)),
6057 &stmts2
, true, NULL_TREE
));
6058 gimple_seq_add_seq (&stmts
, stmts2
);
6060 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6061 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6062 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6063 gimple_seq_add_stmt (&stmts
, stmt
);
6064 gimple cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6065 gimple_seq_add_stmt (&stmts
, cvt
);
6071 /* The valueization hook we use for the gimple_build API simplification.
6072 This makes us match fold_buildN behavior by only combining with
6073 statements in the sequence(s) we are currently building. */
6076 gimple_build_valueize (tree op
)
6078 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6083 /* Build the expression CODE OP0 of type TYPE with location LOC,
6084 simplifying it first if possible. Returns the built
6085 expression value and appends statements possibly defining it
6089 gimple_build (gimple_seq
*seq
, location_t loc
,
6090 enum tree_code code
, tree type
, tree op0
)
6092 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6095 if (gimple_in_ssa_p (cfun
))
6096 res
= make_ssa_name (type
);
6098 res
= create_tmp_reg (type
);
6100 if (code
== REALPART_EXPR
6101 || code
== IMAGPART_EXPR
6102 || code
== VIEW_CONVERT_EXPR
)
6103 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6105 stmt
= gimple_build_assign (res
, code
, op0
);
6106 gimple_set_location (stmt
, loc
);
6107 gimple_seq_add_stmt_without_update (seq
, stmt
);
6112 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6113 simplifying it first if possible. Returns the built
6114 expression value and appends statements possibly defining it
6118 gimple_build (gimple_seq
*seq
, location_t loc
,
6119 enum tree_code code
, tree type
, tree op0
, tree op1
)
6121 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6124 if (gimple_in_ssa_p (cfun
))
6125 res
= make_ssa_name (type
);
6127 res
= create_tmp_reg (type
);
6128 gimple stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6129 gimple_set_location (stmt
, loc
);
6130 gimple_seq_add_stmt_without_update (seq
, stmt
);
6135 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6136 simplifying it first if possible. Returns the built
6137 expression value and appends statements possibly defining it
6141 gimple_build (gimple_seq
*seq
, location_t loc
,
6142 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6144 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6145 seq
, gimple_build_valueize
);
6148 if (gimple_in_ssa_p (cfun
))
6149 res
= make_ssa_name (type
);
6151 res
= create_tmp_reg (type
);
6153 if (code
== BIT_FIELD_REF
)
6154 stmt
= gimple_build_assign (res
, code
,
6155 build3 (code
, type
, op0
, op1
, op2
));
6157 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6158 gimple_set_location (stmt
, loc
);
6159 gimple_seq_add_stmt_without_update (seq
, stmt
);
6164 /* Build the call FN (ARG0) with a result of type TYPE
6165 (or no result if TYPE is void) with location LOC,
6166 simplifying it first if possible. Returns the built
6167 expression value (or NULL_TREE if TYPE is void) and appends
6168 statements possibly defining it to SEQ. */
6171 gimple_build (gimple_seq
*seq
, location_t loc
,
6172 enum built_in_function fn
, tree type
, tree arg0
)
6174 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6177 tree decl
= builtin_decl_implicit (fn
);
6178 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
6179 if (!VOID_TYPE_P (type
))
6181 if (gimple_in_ssa_p (cfun
))
6182 res
= make_ssa_name (type
);
6184 res
= create_tmp_reg (type
);
6185 gimple_call_set_lhs (stmt
, res
);
6187 gimple_set_location (stmt
, loc
);
6188 gimple_seq_add_stmt_without_update (seq
, stmt
);
6193 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6194 (or no result if TYPE is void) with location LOC,
6195 simplifying it first if possible. Returns the built
6196 expression value (or NULL_TREE if TYPE is void) and appends
6197 statements possibly defining it to SEQ. */
6200 gimple_build (gimple_seq
*seq
, location_t loc
,
6201 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6203 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6206 tree decl
= builtin_decl_implicit (fn
);
6207 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6208 if (!VOID_TYPE_P (type
))
6210 if (gimple_in_ssa_p (cfun
))
6211 res
= make_ssa_name (type
);
6213 res
= create_tmp_reg (type
);
6214 gimple_call_set_lhs (stmt
, res
);
6216 gimple_set_location (stmt
, loc
);
6217 gimple_seq_add_stmt_without_update (seq
, stmt
);
6222 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6223 (or no result if TYPE is void) with location LOC,
6224 simplifying it first if possible. Returns the built
6225 expression value (or NULL_TREE if TYPE is void) and appends
6226 statements possibly defining it to SEQ. */
6229 gimple_build (gimple_seq
*seq
, location_t loc
,
6230 enum built_in_function fn
, tree type
,
6231 tree arg0
, tree arg1
, tree arg2
)
6233 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6234 seq
, gimple_build_valueize
);
6237 tree decl
= builtin_decl_implicit (fn
);
6238 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6239 if (!VOID_TYPE_P (type
))
6241 if (gimple_in_ssa_p (cfun
))
6242 res
= make_ssa_name (type
);
6244 res
= create_tmp_reg (type
);
6245 gimple_call_set_lhs (stmt
, res
);
6247 gimple_set_location (stmt
, loc
);
6248 gimple_seq_add_stmt_without_update (seq
, stmt
);
6253 /* Build the conversion (TYPE) OP with a result of type TYPE
6254 with location LOC if such conversion is neccesary in GIMPLE,
6255 simplifying it first.
6256 Returns the built expression value and appends
6257 statements possibly defining it to SEQ. */
6260 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6262 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6264 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);