1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
35 #include "hard-reg-set.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
56 #include "tree-ssa-propagate.h"
59 #include "plugin-api.h"
62 #include "ipa-utils.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-ssa-address.h"
65 #include "langhooks.h"
66 #include "gimplify-me.h"
71 #include "gimple-match.h"
72 #include "tree-phinodes.h"
73 #include "ssa-iterators.h"
75 /* Return true when DECL can be referenced from current unit.
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
91 3) COMDAT functions referred by external vtables that
92 we devirtualize only during final compilation stage.
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
98 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
101 struct cgraph_node
*node
;
104 if (DECL_ABSTRACT_P (decl
))
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
109 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
117 if (symtab
->function_flags_ready
)
119 snode
= symtab_node::get (decl
);
120 if (!snode
|| !snode
->definition
)
122 node
= dyn_cast
<cgraph_node
*> (snode
);
123 return !node
|| !node
->global
.inlined_to
;
126 /* We will later output the initializer, so we can refer to it.
127 So we are concerned only when DECL comes from initializer of
128 external var or var that has been optimized out. */
130 || TREE_CODE (from_decl
) != VAR_DECL
131 || (!DECL_EXTERNAL (from_decl
)
132 && (vnode
= varpool_node::get (from_decl
)) != NULL
133 && vnode
->definition
)
135 && (vnode
= varpool_node::get (from_decl
)) != NULL
136 && vnode
->in_other_partition
))
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl
)
142 && DECL_EXTERNAL (decl
)
143 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
144 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
155 As observed in PR20991 for already optimized out comdat virtual functions
156 it may be tempting to not necessarily give up because the copy will be
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
162 if (!symtab
->function_flags_ready
)
165 snode
= symtab_node::get (decl
);
167 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
168 && (!snode
->in_other_partition
169 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
171 node
= dyn_cast
<cgraph_node
*> (snode
);
172 return !node
|| !node
->global
.inlined_to
;
175 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
180 canonicalize_constructor_val (tree cval
, tree from_decl
)
182 tree orig_cval
= cval
;
184 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
187 tree ptr
= TREE_OPERAND (cval
, 0);
188 if (is_gimple_min_invariant (ptr
))
189 cval
= build1_loc (EXPR_LOCATION (cval
),
190 ADDR_EXPR
, TREE_TYPE (ptr
),
191 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
193 fold_convert (ptr_type_node
,
194 TREE_OPERAND (cval
, 1))));
196 if (TREE_CODE (cval
) == ADDR_EXPR
)
198 tree base
= NULL_TREE
;
199 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
201 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
203 TREE_OPERAND (cval
, 0) = base
;
206 base
= get_base_address (TREE_OPERAND (cval
, 0));
210 if ((TREE_CODE (base
) == VAR_DECL
211 || TREE_CODE (base
) == FUNCTION_DECL
)
212 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
214 if (TREE_CODE (base
) == VAR_DECL
)
215 TREE_ADDRESSABLE (base
) = 1;
216 else if (TREE_CODE (base
) == FUNCTION_DECL
)
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base
);
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
225 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
228 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
231 if (TREE_OVERFLOW_P (cval
))
232 return drop_tree_overflow (cval
);
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
240 get_symbol_constant_value (tree sym
)
242 tree val
= ctor_for_folding (sym
);
243 if (val
!= error_mark_node
)
247 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
248 if (val
&& is_gimple_min_invariant (val
))
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
257 && is_gimple_reg_type (TREE_TYPE (sym
)))
258 return build_zero_cst (TREE_TYPE (sym
));
266 /* Subroutine of fold_stmt. We perform several simplifications of the
267 memory reference tree EXPR and make sure to re-gimplify them properly
268 after propagation of constant addresses. IS_LHS is true if the
269 reference is supposed to be an lvalue. */
272 maybe_fold_reference (tree expr
, bool is_lhs
)
276 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
277 || TREE_CODE (expr
) == REALPART_EXPR
278 || TREE_CODE (expr
) == IMAGPART_EXPR
)
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
280 return fold_unary_loc (EXPR_LOCATION (expr
),
283 TREE_OPERAND (expr
, 0));
284 else if (TREE_CODE (expr
) == BIT_FIELD_REF
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
286 return fold_ternary_loc (EXPR_LOCATION (expr
),
289 TREE_OPERAND (expr
, 0),
290 TREE_OPERAND (expr
, 1),
291 TREE_OPERAND (expr
, 2));
294 && (result
= fold_const_aggregate_ref (expr
))
295 && is_gimple_min_invariant (result
))
302 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
303 replacement rhs for the statement or NULL_TREE if no simplification
304 could be made. It is assumed that the operands have been previously
308 fold_gimple_assign (gimple_stmt_iterator
*si
)
310 gimple stmt
= gsi_stmt (*si
);
311 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
312 location_t loc
= gimple_location (stmt
);
314 tree result
= NULL_TREE
;
316 switch (get_gimple_rhs_class (subcode
))
318 case GIMPLE_SINGLE_RHS
:
320 tree rhs
= gimple_assign_rhs1 (stmt
);
322 if (TREE_CLOBBER_P (rhs
))
325 if (REFERENCE_CLASS_P (rhs
))
326 return maybe_fold_reference (rhs
, false);
328 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
330 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
331 if (is_gimple_min_invariant (val
))
333 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
336 vec
<cgraph_node
*>targets
337 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
338 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
340 if (dump_enabled_p ())
342 location_t loc
= gimple_location_safe (stmt
);
343 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
344 "resolving virtual function address "
345 "reference to function %s\n",
346 targets
.length () == 1
347 ? targets
[0]->name ()
350 if (targets
.length () == 1)
352 val
= fold_convert (TREE_TYPE (val
),
353 build_fold_addr_expr_loc
354 (loc
, targets
[0]->decl
));
355 STRIP_USELESS_TYPE_CONVERSION (val
);
358 /* We can not use __builtin_unreachable here because it
359 can not have address taken. */
360 val
= build_int_cst (TREE_TYPE (val
), 0);
366 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
368 tree ref
= TREE_OPERAND (rhs
, 0);
369 tree tem
= maybe_fold_reference (ref
, true);
371 && TREE_CODE (tem
) == MEM_REF
372 && integer_zerop (TREE_OPERAND (tem
, 1)))
373 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
375 result
= fold_convert (TREE_TYPE (rhs
),
376 build_fold_addr_expr_loc (loc
, tem
));
377 else if (TREE_CODE (ref
) == MEM_REF
378 && integer_zerop (TREE_OPERAND (ref
, 1)))
379 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
382 else if (TREE_CODE (rhs
) == CONSTRUCTOR
383 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
384 && (CONSTRUCTOR_NELTS (rhs
)
385 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
387 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
392 if (TREE_CODE (val
) != INTEGER_CST
393 && TREE_CODE (val
) != REAL_CST
394 && TREE_CODE (val
) != FIXED_CST
)
397 return build_vector_from_ctor (TREE_TYPE (rhs
),
398 CONSTRUCTOR_ELTS (rhs
));
401 else if (DECL_P (rhs
))
402 return get_symbol_constant_value (rhs
);
404 /* If we couldn't fold the RHS, hand over to the generic
406 if (result
== NULL_TREE
)
409 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
410 that may have been added by fold, and "useless" type
411 conversions that might now be apparent due to propagation. */
412 STRIP_USELESS_TYPE_CONVERSION (result
);
414 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
421 case GIMPLE_UNARY_RHS
:
424 case GIMPLE_BINARY_RHS
:
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
427 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
430 tree lhs
= gimple_assign_lhs (stmt
);
431 tree op1
= gimple_assign_rhs1 (stmt
);
432 tree op2
= gimple_assign_rhs2 (stmt
);
433 tree type
= TREE_TYPE (op1
);
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
439 if (TREE_CODE (op2
) == INTEGER_CST
440 && (integer_zerop (op2
) || integer_onep (op2
))
441 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
443 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
444 bool is_logical_not
= false;
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
449 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
450 is_logical_not
= true;
452 if (is_logical_not
== false)
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type
) == 1)
457 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
459 /* Otherwise we use !X -> X ^ 1. */
461 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
462 type
, op1
, build_int_cst (type
, 1));
468 result
= fold_binary_loc (loc
, subcode
,
469 TREE_TYPE (gimple_assign_lhs (stmt
)),
470 gimple_assign_rhs1 (stmt
),
471 gimple_assign_rhs2 (stmt
));
475 STRIP_USELESS_TYPE_CONVERSION (result
);
476 if (valid_gimple_rhs_p (result
))
481 case GIMPLE_TERNARY_RHS
:
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
485 tree op0
= gimple_assign_rhs1 (stmt
);
488 location_t cond_loc
= gimple_location (stmt
);
490 if (COMPARISON_CLASS_P (op0
))
492 fold_defer_overflow_warnings ();
493 tem
= fold_binary_loc (cond_loc
,
494 TREE_CODE (op0
), TREE_TYPE (op0
),
495 TREE_OPERAND (op0
, 0),
496 TREE_OPERAND (op0
, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set
= (tem
&& is_gimple_condexpr (tem
)
501 && valid_gimple_rhs_p (tem
));
502 fold_undefer_overflow_warnings (set
, stmt
, 0);
504 else if (is_gimple_min_invariant (op0
))
513 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
514 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
515 gimple_assign_rhs2 (stmt
),
516 gimple_assign_rhs3 (stmt
));
520 result
= fold_ternary_loc (loc
, subcode
,
521 TREE_TYPE (gimple_assign_lhs (stmt
)),
522 gimple_assign_rhs1 (stmt
),
523 gimple_assign_rhs2 (stmt
),
524 gimple_assign_rhs3 (stmt
));
528 STRIP_USELESS_TYPE_CONVERSION (result
);
529 if (valid_gimple_rhs_p (result
))
534 case GIMPLE_INVALID_RHS
:
541 /* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
547 fold_gimple_cond (gcond
*stmt
)
549 tree result
= fold_binary_loc (gimple_location (stmt
),
550 gimple_cond_code (stmt
),
552 gimple_cond_lhs (stmt
),
553 gimple_cond_rhs (stmt
));
557 STRIP_USELESS_TYPE_CONVERSION (result
);
558 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
560 gimple_cond_set_condition_from_tree (stmt
, result
);
569 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
570 adjusting the replacement stmts location and virtual operands.
571 If the statement has a lhs the last stmt in the sequence is expected
572 to assign to that lhs. */
575 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
577 gimple stmt
= gsi_stmt (*si_p
);
579 if (gimple_has_location (stmt
))
580 annotate_all_with_location (stmts
, gimple_location (stmt
));
582 /* First iterate over the replacement statements backward, assigning
583 virtual operands to their defining statements. */
584 gimple laststore
= NULL
;
585 for (gimple_stmt_iterator i
= gsi_last (stmts
);
586 !gsi_end_p (i
); gsi_prev (&i
))
588 gimple new_stmt
= gsi_stmt (i
);
589 if ((gimple_assign_single_p (new_stmt
)
590 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
591 || (is_gimple_call (new_stmt
)
592 && (gimple_call_flags (new_stmt
)
593 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
597 vdef
= gimple_vdef (stmt
);
599 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
600 gimple_set_vdef (new_stmt
, vdef
);
601 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
602 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
603 laststore
= new_stmt
;
607 /* Second iterate over the statements forward, assigning virtual
608 operands to their uses. */
609 tree reaching_vuse
= gimple_vuse (stmt
);
610 for (gimple_stmt_iterator i
= gsi_start (stmts
);
611 !gsi_end_p (i
); gsi_next (&i
))
613 gimple new_stmt
= gsi_stmt (i
);
614 /* If the new statement possibly has a VUSE, update it with exact SSA
615 name we know will reach this one. */
616 if (gimple_has_mem_ops (new_stmt
))
617 gimple_set_vuse (new_stmt
, reaching_vuse
);
618 gimple_set_modified (new_stmt
, true);
619 if (gimple_vdef (new_stmt
))
620 reaching_vuse
= gimple_vdef (new_stmt
);
623 /* If the new sequence does not do a store release the virtual
624 definition of the original statement. */
626 && reaching_vuse
== gimple_vuse (stmt
))
628 tree vdef
= gimple_vdef (stmt
);
630 && TREE_CODE (vdef
) == SSA_NAME
)
632 unlink_stmt_vdef (stmt
);
633 release_ssa_name (vdef
);
637 /* Finally replace the original statement with the sequence. */
638 gsi_replace_with_seq (si_p
, stmts
, false);
641 /* Convert EXPR into a GIMPLE value suitable for substitution on the
642 RHS of an assignment. Insert the necessary statements before
643 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
644 is replaced. If the call is expected to produces a result, then it
645 is replaced by an assignment of the new RHS to the result variable.
646 If the result is to be ignored, then the call is replaced by a
647 GIMPLE_NOP. A proper VDEF chain is retained by making the first
648 VUSE and the last VDEF of the whole sequence be the same as the replaced
649 statement and using new SSA names for stores in between. */
652 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
655 gimple stmt
, new_stmt
;
656 gimple_stmt_iterator i
;
657 gimple_seq stmts
= NULL
;
659 stmt
= gsi_stmt (*si_p
);
661 gcc_assert (is_gimple_call (stmt
));
663 push_gimplify_context (gimple_in_ssa_p (cfun
));
665 lhs
= gimple_call_lhs (stmt
);
666 if (lhs
== NULL_TREE
)
668 gimplify_and_add (expr
, &stmts
);
669 /* We can end up with folding a memcpy of an empty class assignment
670 which gets optimized away by C++ gimplification. */
671 if (gimple_seq_empty_p (stmts
))
673 pop_gimplify_context (NULL
);
674 if (gimple_in_ssa_p (cfun
))
676 unlink_stmt_vdef (stmt
);
679 gsi_replace (si_p
, gimple_build_nop (), true);
685 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
686 new_stmt
= gimple_build_assign (lhs
, tmp
);
687 i
= gsi_last (stmts
);
688 gsi_insert_after_without_update (&i
, new_stmt
,
689 GSI_CONTINUE_LINKING
);
692 pop_gimplify_context (NULL
);
694 gsi_replace_with_seq_vops (si_p
, stmts
);
698 /* Replace the call at *GSI with the gimple value VAL. */
701 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
703 gimple stmt
= gsi_stmt (*gsi
);
704 tree lhs
= gimple_call_lhs (stmt
);
708 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
709 val
= fold_convert (TREE_TYPE (lhs
), val
);
710 repl
= gimple_build_assign (lhs
, val
);
713 repl
= gimple_build_nop ();
714 tree vdef
= gimple_vdef (stmt
);
715 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
717 unlink_stmt_vdef (stmt
);
718 release_ssa_name (vdef
);
720 gsi_replace (gsi
, repl
, true);
723 /* Replace the call at *GSI with the new call REPL and fold that
727 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
729 gimple stmt
= gsi_stmt (*gsi
);
730 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
731 gimple_set_location (repl
, gimple_location (stmt
));
732 if (gimple_vdef (stmt
)
733 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
735 gimple_set_vdef (repl
, gimple_vdef (stmt
));
736 gimple_set_vuse (repl
, gimple_vuse (stmt
));
737 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
739 gsi_replace (gsi
, repl
, true);
743 /* Return true if VAR is a VAR_DECL or a component thereof. */
746 var_decl_component_p (tree var
)
749 while (handled_component_p (inner
))
750 inner
= TREE_OPERAND (inner
, 0);
751 return SSA_VAR_P (inner
);
754 /* Fold function call to builtin mem{{,p}cpy,move}. Return
755 NULL_TREE if no simplification can be made.
756 If ENDP is 0, return DEST (like memcpy).
757 If ENDP is 1, return DEST+LEN (like mempcpy).
758 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
759 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
763 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
764 tree dest
, tree src
, int endp
)
766 gimple stmt
= gsi_stmt (*gsi
);
767 tree lhs
= gimple_call_lhs (stmt
);
768 tree len
= gimple_call_arg (stmt
, 2);
769 tree destvar
, srcvar
;
770 location_t loc
= gimple_location (stmt
);
772 /* If the LEN parameter is zero, return DEST. */
773 if (integer_zerop (len
))
776 if (gimple_call_lhs (stmt
))
777 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
779 repl
= gimple_build_nop ();
780 tree vdef
= gimple_vdef (stmt
);
781 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
783 unlink_stmt_vdef (stmt
);
784 release_ssa_name (vdef
);
786 gsi_replace (gsi
, repl
, true);
790 /* If SRC and DEST are the same (and not volatile), return
791 DEST{,+LEN,+LEN-1}. */
792 if (operand_equal_p (src
, dest
, 0))
794 unlink_stmt_vdef (stmt
);
795 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
796 release_ssa_name (gimple_vdef (stmt
));
799 gsi_replace (gsi
, gimple_build_nop (), true);
806 tree srctype
, desttype
;
807 unsigned int src_align
, dest_align
;
810 /* Build accesses at offset zero with a ref-all character type. */
811 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
814 /* If we can perform the copy efficiently with first doing all loads
815 and then all stores inline it that way. Currently efficiently
816 means that we can load all the memory into a single integer
817 register which is what MOVE_MAX gives us. */
818 src_align
= get_pointer_alignment (src
);
819 dest_align
= get_pointer_alignment (dest
);
820 if (tree_fits_uhwi_p (len
)
821 && compare_tree_int (len
, MOVE_MAX
) <= 0
822 /* ??? Don't transform copies from strings with known length this
823 confuses the tree-ssa-strlen.c. This doesn't handle
824 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
826 && !c_strlen (src
, 2))
828 unsigned ilen
= tree_to_uhwi (len
);
829 if (exact_log2 (ilen
) != -1)
831 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
833 && TYPE_MODE (type
) != BLKmode
834 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
836 /* If the destination pointer is not aligned we must be able
837 to emit an unaligned store. */
838 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
839 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
842 tree desttype
= type
;
843 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
844 srctype
= build_aligned_type (type
, src_align
);
845 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
846 tree tem
= fold_const_aggregate_ref (srcmem
);
849 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
850 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
856 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
858 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
859 if (gimple_in_ssa_p (cfun
))
860 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
863 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
864 gimple_assign_set_lhs (new_stmt
, srcmem
);
865 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
866 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
868 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
869 desttype
= build_aligned_type (type
, dest_align
);
871 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
874 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
875 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
876 if (gimple_vdef (new_stmt
)
877 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
878 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
881 gsi_replace (gsi
, new_stmt
, true);
884 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
893 /* Both DEST and SRC must be pointer types.
894 ??? This is what old code did. Is the testing for pointer types
897 If either SRC is readonly or length is 1, we can use memcpy. */
898 if (!dest_align
|| !src_align
)
900 if (readonly_data_expr (src
)
901 || (tree_fits_uhwi_p (len
)
902 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
903 >= tree_to_uhwi (len
))))
905 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
908 gimple_call_set_fndecl (stmt
, fn
);
909 gimple_call_set_arg (stmt
, 0, dest
);
910 gimple_call_set_arg (stmt
, 1, src
);
915 /* If *src and *dest can't overlap, optimize into memcpy as well. */
916 if (TREE_CODE (src
) == ADDR_EXPR
917 && TREE_CODE (dest
) == ADDR_EXPR
)
919 tree src_base
, dest_base
, fn
;
920 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
921 HOST_WIDE_INT size
= -1;
922 HOST_WIDE_INT maxsize
= -1;
924 srcvar
= TREE_OPERAND (src
, 0);
925 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
927 destvar
= TREE_OPERAND (dest
, 0);
928 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
930 if (tree_fits_uhwi_p (len
))
931 maxsize
= tree_to_uhwi (len
);
934 src_offset
/= BITS_PER_UNIT
;
935 dest_offset
/= BITS_PER_UNIT
;
936 if (SSA_VAR_P (src_base
)
937 && SSA_VAR_P (dest_base
))
939 if (operand_equal_p (src_base
, dest_base
, 0)
940 && ranges_overlap_p (src_offset
, maxsize
,
941 dest_offset
, maxsize
))
944 else if (TREE_CODE (src_base
) == MEM_REF
945 && TREE_CODE (dest_base
) == MEM_REF
)
947 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
948 TREE_OPERAND (dest_base
, 0), 0))
950 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
951 if (!wi::fits_shwi_p (off
))
953 src_offset
= off
.to_shwi ();
955 off
= mem_ref_offset (dest_base
) + dest_offset
;
956 if (!wi::fits_shwi_p (off
))
958 dest_offset
= off
.to_shwi ();
959 if (ranges_overlap_p (src_offset
, maxsize
,
960 dest_offset
, maxsize
))
966 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
969 gimple_call_set_fndecl (stmt
, fn
);
970 gimple_call_set_arg (stmt
, 0, dest
);
971 gimple_call_set_arg (stmt
, 1, src
);
976 /* If the destination and source do not alias optimize into
978 if ((is_gimple_min_invariant (dest
)
979 || TREE_CODE (dest
) == SSA_NAME
)
980 && (is_gimple_min_invariant (src
)
981 || TREE_CODE (src
) == SSA_NAME
))
984 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
985 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
986 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
989 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
992 gimple_call_set_fndecl (stmt
, fn
);
993 gimple_call_set_arg (stmt
, 0, dest
);
994 gimple_call_set_arg (stmt
, 1, src
);
1003 if (!tree_fits_shwi_p (len
))
1006 This logic lose for arguments like (type *)malloc (sizeof (type)),
1007 since we strip the casts of up to VOID return value from malloc.
1008 Perhaps we ought to inherit type from non-VOID argument here? */
1011 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1012 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1014 /* In the following try to find a type that is most natural to be
1015 used for the memcpy source and destination and that allows
1016 the most optimization when memcpy is turned into a plain assignment
1017 using that type. In theory we could always use a char[len] type
1018 but that only gains us that the destination and source possibly
1019 no longer will have their address taken. */
1020 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1021 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1023 tree tem
= TREE_OPERAND (src
, 0);
1025 if (tem
!= TREE_OPERAND (src
, 0))
1026 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1028 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1030 tree tem
= TREE_OPERAND (dest
, 0);
1032 if (tem
!= TREE_OPERAND (dest
, 0))
1033 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1035 srctype
= TREE_TYPE (TREE_TYPE (src
));
1036 if (TREE_CODE (srctype
) == ARRAY_TYPE
1037 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1039 srctype
= TREE_TYPE (srctype
);
1041 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1043 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1044 if (TREE_CODE (desttype
) == ARRAY_TYPE
1045 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1047 desttype
= TREE_TYPE (desttype
);
1049 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1051 if (TREE_ADDRESSABLE (srctype
)
1052 || TREE_ADDRESSABLE (desttype
))
1055 /* Make sure we are not copying using a floating-point mode or
1056 a type whose size possibly does not match its precision. */
1057 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1058 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1059 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1060 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1061 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1062 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1063 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1064 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1072 src_align
= get_pointer_alignment (src
);
1073 dest_align
= get_pointer_alignment (dest
);
1074 if (dest_align
< TYPE_ALIGN (desttype
)
1075 || src_align
< TYPE_ALIGN (srctype
))
1079 STRIP_NOPS (destvar
);
1080 if (TREE_CODE (destvar
) == ADDR_EXPR
1081 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1082 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1083 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1085 destvar
= NULL_TREE
;
1088 STRIP_NOPS (srcvar
);
1089 if (TREE_CODE (srcvar
) == ADDR_EXPR
1090 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1091 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1094 || src_align
>= TYPE_ALIGN (desttype
))
1095 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1097 else if (!STRICT_ALIGNMENT
)
1099 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1101 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1109 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1112 if (srcvar
== NULL_TREE
)
1115 if (src_align
>= TYPE_ALIGN (desttype
))
1116 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1119 if (STRICT_ALIGNMENT
)
1121 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1123 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1126 else if (destvar
== NULL_TREE
)
1129 if (dest_align
>= TYPE_ALIGN (srctype
))
1130 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1133 if (STRICT_ALIGNMENT
)
1135 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1137 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1142 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1144 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1145 if (gimple_in_ssa_p (cfun
))
1146 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1148 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1149 gimple_assign_set_lhs (new_stmt
, srcvar
);
1150 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1151 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1153 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1154 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1155 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1156 if (gimple_vdef (new_stmt
)
1157 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1158 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1161 gsi_replace (gsi
, new_stmt
, true);
1164 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1168 if (endp
== 0 || endp
== 3)
1171 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1173 if (endp
== 2 || endp
== 1)
1174 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1176 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1178 gimple repl
= gimple_build_assign (lhs
, dest
);
1179 gsi_replace (gsi
, repl
, true);
1183 /* Fold function call to builtin memset or bzero at *GSI setting the
1184 memory of size LEN to VAL. Return whether a simplification was made. */
1187 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1189 gimple stmt
= gsi_stmt (*gsi
);
1191 unsigned HOST_WIDE_INT length
, cval
;
1193 /* If the LEN parameter is zero, return DEST. */
1194 if (integer_zerop (len
))
1196 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1200 if (! tree_fits_uhwi_p (len
))
1203 if (TREE_CODE (c
) != INTEGER_CST
)
1206 tree dest
= gimple_call_arg (stmt
, 0);
1208 if (TREE_CODE (var
) != ADDR_EXPR
)
1211 var
= TREE_OPERAND (var
, 0);
1212 if (TREE_THIS_VOLATILE (var
))
1215 etype
= TREE_TYPE (var
);
1216 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1217 etype
= TREE_TYPE (etype
);
1219 if (!INTEGRAL_TYPE_P (etype
)
1220 && !POINTER_TYPE_P (etype
))
1223 if (! var_decl_component_p (var
))
1226 length
= tree_to_uhwi (len
);
1227 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1228 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1231 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1234 if (integer_zerop (c
))
1238 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1241 cval
= TREE_INT_CST_LOW (c
);
1245 cval
|= (cval
<< 31) << 1;
1248 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1249 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1250 gimple_set_vuse (store
, gimple_vuse (stmt
));
1251 tree vdef
= gimple_vdef (stmt
);
1252 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1254 gimple_set_vdef (store
, gimple_vdef (stmt
));
1255 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1257 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1258 if (gimple_call_lhs (stmt
))
1260 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1261 gsi_replace (gsi
, asgn
, true);
1265 gimple_stmt_iterator gsi2
= *gsi
;
1267 gsi_remove (&gsi2
, true);
1274 /* Return the string length, maximum string length or maximum value of
1276 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1277 is not NULL and, for TYPE == 0, its value is not equal to the length
1278 we determine or if we are unable to determine the length or value,
1279 return false. VISITED is a bitmap of visited variables.
1280 TYPE is 0 if string length should be returned, 1 for maximum string
1281 length and 2 for maximum value ARG can have. */
1284 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1289 if (TREE_CODE (arg
) != SSA_NAME
)
1291 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1292 if (TREE_CODE (arg
) == ADDR_EXPR
1293 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1294 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1296 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1297 if (TREE_CODE (aop0
) == INDIRECT_REF
1298 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1299 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1300 length
, visited
, type
);
1306 if (TREE_CODE (val
) != INTEGER_CST
1307 || tree_int_cst_sgn (val
) < 0)
1311 val
= c_strlen (arg
, 1);
1319 if (TREE_CODE (*length
) != INTEGER_CST
1320 || TREE_CODE (val
) != INTEGER_CST
)
1323 if (tree_int_cst_lt (*length
, val
))
1327 else if (simple_cst_equal (val
, *length
) != 1)
1335 /* If ARG is registered for SSA update we cannot look at its defining
1337 if (name_registered_for_update_p (arg
))
1340 /* If we were already here, break the infinite cycle. */
1342 *visited
= BITMAP_ALLOC (NULL
);
1343 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1347 def_stmt
= SSA_NAME_DEF_STMT (var
);
1349 switch (gimple_code (def_stmt
))
1352 /* The RHS of the statement defining VAR must either have a
1353 constant length or come from another SSA_NAME with a constant
1355 if (gimple_assign_single_p (def_stmt
)
1356 || gimple_assign_unary_nop_p (def_stmt
))
1358 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1359 return get_maxval_strlen (rhs
, length
, visited
, type
);
1361 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1363 tree op2
= gimple_assign_rhs2 (def_stmt
);
1364 tree op3
= gimple_assign_rhs3 (def_stmt
);
1365 return get_maxval_strlen (op2
, length
, visited
, type
)
1366 && get_maxval_strlen (op3
, length
, visited
, type
);
1372 /* All the arguments of the PHI node must have the same constant
1376 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1378 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1380 /* If this PHI has itself as an argument, we cannot
1381 determine the string length of this argument. However,
1382 if we can find a constant string length for the other
1383 PHI args then we can still be sure that this is a
1384 constant string length. So be optimistic and just
1385 continue with the next argument. */
1386 if (arg
== gimple_phi_result (def_stmt
))
1389 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1401 get_maxval_strlen (tree arg
, int type
)
1403 bitmap visited
= NULL
;
1404 tree len
= NULL_TREE
;
1405 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1408 BITMAP_FREE (visited
);
1414 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1415 If LEN is not NULL, it represents the length of the string to be
1416 copied. Return NULL_TREE if no simplification can be made. */
1419 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1420 tree dest
, tree src
)
1422 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1425 /* If SRC and DEST are the same (and not volatile), return DEST. */
1426 if (operand_equal_p (src
, dest
, 0))
1428 replace_call_with_value (gsi
, dest
);
1432 if (optimize_function_for_size_p (cfun
))
1435 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1439 tree len
= get_maxval_strlen (src
, 0);
1443 len
= fold_convert_loc (loc
, size_type_node
, len
);
1444 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1445 len
= force_gimple_operand_gsi (gsi
, len
, true,
1446 NULL_TREE
, true, GSI_SAME_STMT
);
1447 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1448 replace_call_with_call_and_fold (gsi
, repl
);
1452 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1453 If SLEN is not NULL, it represents the length of the source string.
1454 Return NULL_TREE if no simplification can be made. */
1457 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1458 tree dest
, tree src
, tree len
)
1460 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1463 /* If the LEN parameter is zero, return DEST. */
1464 if (integer_zerop (len
))
1466 replace_call_with_value (gsi
, dest
);
1470 /* We can't compare slen with len as constants below if len is not a
1472 if (TREE_CODE (len
) != INTEGER_CST
)
1475 /* Now, we must be passed a constant src ptr parameter. */
1476 tree slen
= get_maxval_strlen (src
, 0);
1477 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1480 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1482 /* We do not support simplification of this case, though we do
1483 support it when expanding trees into RTL. */
1484 /* FIXME: generate a call to __builtin_memset. */
1485 if (tree_int_cst_lt (slen
, len
))
1488 /* OK transform into builtin memcpy. */
1489 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1493 len
= fold_convert_loc (loc
, size_type_node
, len
);
1494 len
= force_gimple_operand_gsi (gsi
, len
, true,
1495 NULL_TREE
, true, GSI_SAME_STMT
);
1496 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1497 replace_call_with_call_and_fold (gsi
, repl
);
1501 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1504 Return NULL_TREE if no simplification was possible, otherwise return the
1505 simplified form of the call as a tree.
1507 The simplified form may be a constant or other expression which
1508 computes the same value, but in a more efficient manner (including
1509 calls to other builtin functions).
1511 The call may contain arguments which need to be evaluated, but
1512 which are not useful to determine the result of the call. In
1513 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1514 COMPOUND_EXPR will be an argument which must be evaluated.
1515 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1516 COMPOUND_EXPR in the chain will contain the tree for the simplified
1517 form of the builtin function call. */
1520 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1522 gimple stmt
= gsi_stmt (*gsi
);
1523 location_t loc
= gimple_location (stmt
);
1525 const char *p
= c_getstr (src
);
1527 /* If the string length is zero, return the dst parameter. */
1528 if (p
&& *p
== '\0')
1530 replace_call_with_value (gsi
, dst
);
1534 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1537 /* See if we can store by pieces into (dst + strlen(dst)). */
1539 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1540 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1542 if (!strlen_fn
|| !memcpy_fn
)
1545 /* If the length of the source string isn't computable don't
1546 split strcat into strlen and memcpy. */
1547 tree len
= get_maxval_strlen (src
, 0);
1551 /* Create strlen (dst). */
1552 gimple_seq stmts
= NULL
, stmts2
;
1553 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1554 gimple_set_location (repl
, loc
);
1555 if (gimple_in_ssa_p (cfun
))
1556 newdst
= make_ssa_name (size_type_node
);
1558 newdst
= create_tmp_reg (size_type_node
);
1559 gimple_call_set_lhs (repl
, newdst
);
1560 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1562 /* Create (dst p+ strlen (dst)). */
1563 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1564 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1565 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1567 len
= fold_convert_loc (loc
, size_type_node
, len
);
1568 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1569 build_int_cst (size_type_node
, 1));
1570 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1571 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1573 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1574 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1575 if (gimple_call_lhs (stmt
))
1577 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1578 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1579 gsi_replace_with_seq_vops (gsi
, stmts
);
1580 /* gsi now points at the assignment to the lhs, get a
1581 stmt iterator to the memcpy call.
1582 ??? We can't use gsi_for_stmt as that doesn't work when the
1583 CFG isn't built yet. */
1584 gimple_stmt_iterator gsi2
= *gsi
;
1590 gsi_replace_with_seq_vops (gsi
, stmts
);
1596 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1597 are the arguments to the call. */
1600 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1602 gimple stmt
= gsi_stmt (*gsi
);
1603 tree dest
= gimple_call_arg (stmt
, 0);
1604 tree src
= gimple_call_arg (stmt
, 1);
1605 tree size
= gimple_call_arg (stmt
, 2);
1611 /* If the SRC parameter is "", return DEST. */
1612 if (p
&& *p
== '\0')
1614 replace_call_with_value (gsi
, dest
);
1618 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1621 /* If __builtin_strcat_chk is used, assume strcat is available. */
1622 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1626 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1627 replace_call_with_call_and_fold (gsi
, repl
);
1631 /* Simplify a call to the strncat builtin. */
1634 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1636 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1637 tree dst
= gimple_call_arg (stmt
, 0);
1638 tree src
= gimple_call_arg (stmt
, 1);
1639 tree len
= gimple_call_arg (stmt
, 2);
1641 const char *p
= c_getstr (src
);
1643 /* If the requested length is zero, or the src parameter string
1644 length is zero, return the dst parameter. */
1645 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1647 replace_call_with_value (gsi
, dst
);
1651 /* If the requested len is greater than or equal to the string
1652 length, call strcat. */
1653 if (TREE_CODE (len
) == INTEGER_CST
&& p
1654 && compare_tree_int (len
, strlen (p
)) >= 0)
1656 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1658 /* If the replacement _DECL isn't initialized, don't do the
1663 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1664 replace_call_with_call_and_fold (gsi
, repl
);
1671 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1675 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1677 gimple stmt
= gsi_stmt (*gsi
);
1678 tree dest
= gimple_call_arg (stmt
, 0);
1679 tree src
= gimple_call_arg (stmt
, 1);
1680 tree len
= gimple_call_arg (stmt
, 2);
1681 tree size
= gimple_call_arg (stmt
, 3);
1686 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1687 if ((p
&& *p
== '\0')
1688 || integer_zerop (len
))
1690 replace_call_with_value (gsi
, dest
);
1694 if (! tree_fits_uhwi_p (size
))
1697 if (! integer_all_onesp (size
))
1699 tree src_len
= c_strlen (src
, 1);
1701 && tree_fits_uhwi_p (src_len
)
1702 && tree_fits_uhwi_p (len
)
1703 && ! tree_int_cst_lt (len
, src_len
))
1705 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1706 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1710 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1711 replace_call_with_call_and_fold (gsi
, repl
);
1717 /* If __builtin_strncat_chk is used, assume strncat is available. */
1718 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1722 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1723 replace_call_with_call_and_fold (gsi
, repl
);
1727 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1728 to the call. IGNORE is true if the value returned
1729 by the builtin will be ignored. UNLOCKED is true is true if this
1730 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1731 the known length of the string. Return NULL_TREE if no simplification
1735 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1736 tree arg0
, tree arg1
,
1739 gimple stmt
= gsi_stmt (*gsi
);
1741 /* If we're using an unlocked function, assume the other unlocked
1742 functions exist explicitly. */
1743 tree
const fn_fputc
= (unlocked
1744 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1745 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1746 tree
const fn_fwrite
= (unlocked
1747 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1748 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1750 /* If the return value is used, don't do the transformation. */
1751 if (gimple_call_lhs (stmt
))
1754 /* Get the length of the string passed to fputs. If the length
1755 can't be determined, punt. */
1756 tree len
= get_maxval_strlen (arg0
, 0);
1758 || TREE_CODE (len
) != INTEGER_CST
)
1761 switch (compare_tree_int (len
, 1))
1763 case -1: /* length is 0, delete the call entirely . */
1764 replace_call_with_value (gsi
, integer_zero_node
);
1767 case 0: /* length is 1, call fputc. */
1769 const char *p
= c_getstr (arg0
);
1775 gimple repl
= gimple_build_call (fn_fputc
, 2,
1777 (integer_type_node
, p
[0]), arg1
);
1778 replace_call_with_call_and_fold (gsi
, repl
);
1783 case 1: /* length is greater than 1, call fwrite. */
1785 /* If optimizing for size keep fputs. */
1786 if (optimize_function_for_size_p (cfun
))
1788 /* New argument list transforming fputs(string, stream) to
1789 fwrite(string, 1, len, stream). */
1793 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1794 size_one_node
, len
, arg1
);
1795 replace_call_with_call_and_fold (gsi
, repl
);
1804 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1805 DEST, SRC, LEN, and SIZE are the arguments to the call.
1806 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1807 code of the builtin. If MAXLEN is not NULL, it is maximum length
1808 passed as third argument. */
1811 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1812 tree dest
, tree src
, tree len
, tree size
,
1813 enum built_in_function fcode
)
1815 gimple stmt
= gsi_stmt (*gsi
);
1816 location_t loc
= gimple_location (stmt
);
1817 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1820 /* If SRC and DEST are the same (and not volatile), return DEST
1821 (resp. DEST+LEN for __mempcpy_chk). */
1822 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1824 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1826 replace_call_with_value (gsi
, dest
);
1831 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1832 temp
= force_gimple_operand_gsi (gsi
, temp
,
1833 false, NULL_TREE
, true,
1835 replace_call_with_value (gsi
, temp
);
1840 if (! tree_fits_uhwi_p (size
))
1843 tree maxlen
= get_maxval_strlen (len
, 2);
1844 if (! integer_all_onesp (size
))
1846 if (! tree_fits_uhwi_p (len
))
1848 /* If LEN is not constant, try MAXLEN too.
1849 For MAXLEN only allow optimizing into non-_ocs function
1850 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1851 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1853 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1855 /* (void) __mempcpy_chk () can be optimized into
1856 (void) __memcpy_chk (). */
1857 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1861 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1862 replace_call_with_call_and_fold (gsi
, repl
);
1871 if (tree_int_cst_lt (size
, maxlen
))
1876 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1877 mem{cpy,pcpy,move,set} is available. */
1880 case BUILT_IN_MEMCPY_CHK
:
1881 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1883 case BUILT_IN_MEMPCPY_CHK
:
1884 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1886 case BUILT_IN_MEMMOVE_CHK
:
1887 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1889 case BUILT_IN_MEMSET_CHK
:
1890 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1899 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1900 replace_call_with_call_and_fold (gsi
, repl
);
1904 /* Fold a call to the __st[rp]cpy_chk builtin.
1905 DEST, SRC, and SIZE are the arguments to the call.
1906 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1907 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1908 strings passed as second argument. */
1911 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1913 tree src
, tree size
,
1914 enum built_in_function fcode
)
1916 gimple stmt
= gsi_stmt (*gsi
);
1917 location_t loc
= gimple_location (stmt
);
1918 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1921 /* If SRC and DEST are the same (and not volatile), return DEST. */
1922 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1924 replace_call_with_value (gsi
, dest
);
1928 if (! tree_fits_uhwi_p (size
))
1931 tree maxlen
= get_maxval_strlen (src
, 1);
1932 if (! integer_all_onesp (size
))
1934 len
= c_strlen (src
, 1);
1935 if (! len
|| ! tree_fits_uhwi_p (len
))
1937 /* If LEN is not constant, try MAXLEN too.
1938 For MAXLEN only allow optimizing into non-_ocs function
1939 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1940 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1942 if (fcode
== BUILT_IN_STPCPY_CHK
)
1947 /* If return value of __stpcpy_chk is ignored,
1948 optimize into __strcpy_chk. */
1949 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1953 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1954 replace_call_with_call_and_fold (gsi
, repl
);
1958 if (! len
|| TREE_SIDE_EFFECTS (len
))
1961 /* If c_strlen returned something, but not a constant,
1962 transform __strcpy_chk into __memcpy_chk. */
1963 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1967 len
= fold_convert_loc (loc
, size_type_node
, len
);
1968 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1969 build_int_cst (size_type_node
, 1));
1970 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1971 true, GSI_SAME_STMT
);
1972 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1973 replace_call_with_call_and_fold (gsi
, repl
);
1980 if (! tree_int_cst_lt (maxlen
, size
))
1984 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1985 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1986 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1990 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1991 replace_call_with_call_and_fold (gsi
, repl
);
1995 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1996 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1997 length passed as third argument. IGNORE is true if return value can be
1998 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2001 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2002 tree dest
, tree src
,
2003 tree len
, tree size
,
2004 enum built_in_function fcode
)
2006 gimple stmt
= gsi_stmt (*gsi
);
2007 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2010 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2012 /* If return value of __stpncpy_chk is ignored,
2013 optimize into __strncpy_chk. */
2014 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2017 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2018 replace_call_with_call_and_fold (gsi
, repl
);
2023 if (! tree_fits_uhwi_p (size
))
2026 tree maxlen
= get_maxval_strlen (len
, 2);
2027 if (! integer_all_onesp (size
))
2029 if (! tree_fits_uhwi_p (len
))
2031 /* If LEN is not constant, try MAXLEN too.
2032 For MAXLEN only allow optimizing into non-_ocs function
2033 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2034 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2040 if (tree_int_cst_lt (size
, maxlen
))
2044 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2045 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2046 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2050 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2051 replace_call_with_call_and_fold (gsi
, repl
);
2055 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2056 Return NULL_TREE if no simplification can be made. */
2059 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2061 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2062 location_t loc
= gimple_location (stmt
);
2063 tree dest
= gimple_call_arg (stmt
, 0);
2064 tree src
= gimple_call_arg (stmt
, 1);
2065 tree fn
, len
, lenp1
;
2067 /* If the result is unused, replace stpcpy with strcpy. */
2068 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2070 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2073 gimple_call_set_fndecl (stmt
, fn
);
2078 len
= c_strlen (src
, 1);
2080 || TREE_CODE (len
) != INTEGER_CST
)
2083 if (optimize_function_for_size_p (cfun
)
2084 /* If length is zero it's small enough. */
2085 && !integer_zerop (len
))
2088 /* If the source has a known length replace stpcpy with memcpy. */
2089 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2093 gimple_seq stmts
= NULL
;
2094 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2095 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2096 tem
, build_int_cst (size_type_node
, 1));
2097 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2098 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2099 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2100 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2101 if (gimple_vdef (repl
)
2102 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2103 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2104 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2105 /* Replace the result with dest + len. */
2107 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2108 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2109 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2110 POINTER_PLUS_EXPR
, dest
, tem
);
2111 gsi_replace (gsi
, ret
, true);
2112 /* Finally fold the memcpy call. */
2113 gimple_stmt_iterator gsi2
= *gsi
;
2119 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2120 NULL_TREE if a normal call should be emitted rather than expanding
2121 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2122 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2123 passed as second argument. */
2126 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2127 enum built_in_function fcode
)
2129 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2130 tree dest
, size
, len
, fn
, fmt
, flag
;
2131 const char *fmt_str
;
2133 /* Verify the required arguments in the original call. */
2134 if (gimple_call_num_args (stmt
) < 5)
2137 dest
= gimple_call_arg (stmt
, 0);
2138 len
= gimple_call_arg (stmt
, 1);
2139 flag
= gimple_call_arg (stmt
, 2);
2140 size
= gimple_call_arg (stmt
, 3);
2141 fmt
= gimple_call_arg (stmt
, 4);
2143 if (! tree_fits_uhwi_p (size
))
2146 if (! integer_all_onesp (size
))
2148 tree maxlen
= get_maxval_strlen (len
, 2);
2149 if (! tree_fits_uhwi_p (len
))
2151 /* If LEN is not constant, try MAXLEN too.
2152 For MAXLEN only allow optimizing into non-_ocs function
2153 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2154 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2160 if (tree_int_cst_lt (size
, maxlen
))
2164 if (!init_target_chars ())
2167 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2168 or if format doesn't contain % chars or is "%s". */
2169 if (! integer_zerop (flag
))
2171 fmt_str
= c_getstr (fmt
);
2172 if (fmt_str
== NULL
)
2174 if (strchr (fmt_str
, target_percent
) != NULL
2175 && strcmp (fmt_str
, target_percent_s
))
2179 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2181 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2182 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2186 /* Replace the called function and the first 5 argument by 3 retaining
2187 trailing varargs. */
2188 gimple_call_set_fndecl (stmt
, fn
);
2189 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2190 gimple_call_set_arg (stmt
, 0, dest
);
2191 gimple_call_set_arg (stmt
, 1, len
);
2192 gimple_call_set_arg (stmt
, 2, fmt
);
2193 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2194 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2195 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2200 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2201 Return NULL_TREE if a normal call should be emitted rather than
2202 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2203 or BUILT_IN_VSPRINTF_CHK. */
2206 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2207 enum built_in_function fcode
)
2209 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2210 tree dest
, size
, len
, fn
, fmt
, flag
;
2211 const char *fmt_str
;
2212 unsigned nargs
= gimple_call_num_args (stmt
);
2214 /* Verify the required arguments in the original call. */
2217 dest
= gimple_call_arg (stmt
, 0);
2218 flag
= gimple_call_arg (stmt
, 1);
2219 size
= gimple_call_arg (stmt
, 2);
2220 fmt
= gimple_call_arg (stmt
, 3);
2222 if (! tree_fits_uhwi_p (size
))
2227 if (!init_target_chars ())
2230 /* Check whether the format is a literal string constant. */
2231 fmt_str
= c_getstr (fmt
);
2232 if (fmt_str
!= NULL
)
2234 /* If the format doesn't contain % args or %%, we know the size. */
2235 if (strchr (fmt_str
, target_percent
) == 0)
2237 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2238 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2240 /* If the format is "%s" and first ... argument is a string literal,
2241 we know the size too. */
2242 else if (fcode
== BUILT_IN_SPRINTF_CHK
2243 && strcmp (fmt_str
, target_percent_s
) == 0)
2249 arg
= gimple_call_arg (stmt
, 4);
2250 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2252 len
= c_strlen (arg
, 1);
2253 if (! len
|| ! tree_fits_uhwi_p (len
))
2260 if (! integer_all_onesp (size
))
2262 if (! len
|| ! tree_int_cst_lt (len
, size
))
2266 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2267 or if format doesn't contain % chars or is "%s". */
2268 if (! integer_zerop (flag
))
2270 if (fmt_str
== NULL
)
2272 if (strchr (fmt_str
, target_percent
) != NULL
2273 && strcmp (fmt_str
, target_percent_s
))
2277 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2278 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2279 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2283 /* Replace the called function and the first 4 argument by 2 retaining
2284 trailing varargs. */
2285 gimple_call_set_fndecl (stmt
, fn
);
2286 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2287 gimple_call_set_arg (stmt
, 0, dest
);
2288 gimple_call_set_arg (stmt
, 1, fmt
);
2289 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2290 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2291 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2296 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2297 ORIG may be null if this is a 2-argument call. We don't attempt to
2298 simplify calls with more than 3 arguments.
2300 Return NULL_TREE if no simplification was possible, otherwise return the
2301 simplified form of the call as a tree. If IGNORED is true, it means that
2302 the caller does not use the returned value of the function. */
2305 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2307 gimple stmt
= gsi_stmt (*gsi
);
2308 tree dest
= gimple_call_arg (stmt
, 0);
2309 tree fmt
= gimple_call_arg (stmt
, 1);
2310 tree orig
= NULL_TREE
;
2311 const char *fmt_str
= NULL
;
2313 /* Verify the required arguments in the original call. We deal with two
2314 types of sprintf() calls: 'sprintf (str, fmt)' and
2315 'sprintf (dest, "%s", orig)'. */
2316 if (gimple_call_num_args (stmt
) > 3)
2319 if (gimple_call_num_args (stmt
) == 3)
2320 orig
= gimple_call_arg (stmt
, 2);
2322 /* Check whether the format is a literal string constant. */
2323 fmt_str
= c_getstr (fmt
);
2324 if (fmt_str
== NULL
)
2327 if (!init_target_chars ())
2330 /* If the format doesn't contain % args or %%, use strcpy. */
2331 if (strchr (fmt_str
, target_percent
) == NULL
)
2333 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2338 /* Don't optimize sprintf (buf, "abc", ptr++). */
2342 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2343 'format' is known to contain no % formats. */
2344 gimple_seq stmts
= NULL
;
2345 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2346 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2347 if (gimple_call_lhs (stmt
))
2349 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2350 build_int_cst (integer_type_node
,
2352 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2353 gsi_replace_with_seq_vops (gsi
, stmts
);
2354 /* gsi now points at the assignment to the lhs, get a
2355 stmt iterator to the memcpy call.
2356 ??? We can't use gsi_for_stmt as that doesn't work when the
2357 CFG isn't built yet. */
2358 gimple_stmt_iterator gsi2
= *gsi
;
2364 gsi_replace_with_seq_vops (gsi
, stmts
);
2370 /* If the format is "%s", use strcpy if the result isn't used. */
2371 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2374 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2379 /* Don't crash on sprintf (str1, "%s"). */
2383 tree orig_len
= NULL_TREE
;
2384 if (gimple_call_lhs (stmt
))
2386 orig_len
= get_maxval_strlen (orig
, 0);
2391 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2392 gimple_seq stmts
= NULL
;
2393 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2394 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2395 if (gimple_call_lhs (stmt
))
2397 if (!useless_type_conversion_p (integer_type_node
,
2398 TREE_TYPE (orig_len
)))
2399 orig_len
= fold_convert (integer_type_node
, orig_len
);
2400 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2401 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2402 gsi_replace_with_seq_vops (gsi
, stmts
);
2403 /* gsi now points at the assignment to the lhs, get a
2404 stmt iterator to the memcpy call.
2405 ??? We can't use gsi_for_stmt as that doesn't work when the
2406 CFG isn't built yet. */
2407 gimple_stmt_iterator gsi2
= *gsi
;
2413 gsi_replace_with_seq_vops (gsi
, stmts
);
2421 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2422 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2423 attempt to simplify calls with more than 4 arguments.
2425 Return NULL_TREE if no simplification was possible, otherwise return the
2426 simplified form of the call as a tree. If IGNORED is true, it means that
2427 the caller does not use the returned value of the function. */
2430 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2432 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2433 tree dest
= gimple_call_arg (stmt
, 0);
2434 tree destsize
= gimple_call_arg (stmt
, 1);
2435 tree fmt
= gimple_call_arg (stmt
, 2);
2436 tree orig
= NULL_TREE
;
2437 const char *fmt_str
= NULL
;
2439 if (gimple_call_num_args (stmt
) > 4)
2442 if (gimple_call_num_args (stmt
) == 4)
2443 orig
= gimple_call_arg (stmt
, 3);
2445 if (!tree_fits_uhwi_p (destsize
))
2447 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2449 /* Check whether the format is a literal string constant. */
2450 fmt_str
= c_getstr (fmt
);
2451 if (fmt_str
== NULL
)
2454 if (!init_target_chars ())
2457 /* If the format doesn't contain % args or %%, use strcpy. */
2458 if (strchr (fmt_str
, target_percent
) == NULL
)
2460 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2464 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2468 /* We could expand this as
2469 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2471 memcpy (str, fmt_with_nul_at_cstm1, cst);
2472 but in the former case that might increase code size
2473 and in the latter case grow .rodata section too much.
2475 size_t len
= strlen (fmt_str
);
2479 gimple_seq stmts
= NULL
;
2480 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2481 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2482 if (gimple_call_lhs (stmt
))
2484 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2485 build_int_cst (integer_type_node
, len
));
2486 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2487 gsi_replace_with_seq_vops (gsi
, stmts
);
2488 /* gsi now points at the assignment to the lhs, get a
2489 stmt iterator to the memcpy call.
2490 ??? We can't use gsi_for_stmt as that doesn't work when the
2491 CFG isn't built yet. */
2492 gimple_stmt_iterator gsi2
= *gsi
;
2498 gsi_replace_with_seq_vops (gsi
, stmts
);
2504 /* If the format is "%s", use strcpy if the result isn't used. */
2505 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2507 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2511 /* Don't crash on snprintf (str1, cst, "%s"). */
2515 tree orig_len
= get_maxval_strlen (orig
, 0);
2519 /* We could expand this as
2520 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2522 memcpy (str1, str2_with_nul_at_cstm1, cst);
2523 but in the former case that might increase code size
2524 and in the latter case grow .rodata section too much.
2526 if (compare_tree_int (orig_len
, destlen
) >= 0)
2529 /* Convert snprintf (str1, cst, "%s", str2) into
2530 strcpy (str1, str2) if strlen (str2) < cst. */
2531 gimple_seq stmts
= NULL
;
2532 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2533 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2534 if (gimple_call_lhs (stmt
))
2536 if (!useless_type_conversion_p (integer_type_node
,
2537 TREE_TYPE (orig_len
)))
2538 orig_len
= fold_convert (integer_type_node
, orig_len
);
2539 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2540 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2541 gsi_replace_with_seq_vops (gsi
, stmts
);
2542 /* gsi now points at the assignment to the lhs, get a
2543 stmt iterator to the memcpy call.
2544 ??? We can't use gsi_for_stmt as that doesn't work when the
2545 CFG isn't built yet. */
2546 gimple_stmt_iterator gsi2
= *gsi
;
2552 gsi_replace_with_seq_vops (gsi
, stmts
);
2560 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2561 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2562 more than 3 arguments, and ARG may be null in the 2-argument case.
2564 Return NULL_TREE if no simplification was possible, otherwise return the
2565 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2566 code of the function to be simplified. */
2569 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2570 tree fp
, tree fmt
, tree arg
,
2571 enum built_in_function fcode
)
2573 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2574 tree fn_fputc
, fn_fputs
;
2575 const char *fmt_str
= NULL
;
2577 /* If the return value is used, don't do the transformation. */
2578 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2581 /* Check whether the format is a literal string constant. */
2582 fmt_str
= c_getstr (fmt
);
2583 if (fmt_str
== NULL
)
2586 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2588 /* If we're using an unlocked function, assume the other
2589 unlocked functions exist explicitly. */
2590 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2591 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2595 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2596 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2599 if (!init_target_chars ())
2602 /* If the format doesn't contain % args or %%, use strcpy. */
2603 if (strchr (fmt_str
, target_percent
) == NULL
)
2605 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2609 /* If the format specifier was "", fprintf does nothing. */
2610 if (fmt_str
[0] == '\0')
2612 replace_call_with_value (gsi
, NULL_TREE
);
2616 /* When "string" doesn't contain %, replace all cases of
2617 fprintf (fp, string) with fputs (string, fp). The fputs
2618 builtin will take care of special cases like length == 1. */
2621 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2622 replace_call_with_call_and_fold (gsi
, repl
);
2627 /* The other optimizations can be done only on the non-va_list variants. */
2628 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2631 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2632 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2634 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2638 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2639 replace_call_with_call_and_fold (gsi
, repl
);
2644 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2645 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2648 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2652 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2653 replace_call_with_call_and_fold (gsi
, repl
);
2661 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2662 FMT and ARG are the arguments to the call; we don't fold cases with
2663 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2665 Return NULL_TREE if no simplification was possible, otherwise return the
2666 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2667 code of the function to be simplified. */
2670 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2671 tree arg
, enum built_in_function fcode
)
2673 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2674 tree fn_putchar
, fn_puts
, newarg
;
2675 const char *fmt_str
= NULL
;
2677 /* If the return value is used, don't do the transformation. */
2678 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2681 /* Check whether the format is a literal string constant. */
2682 fmt_str
= c_getstr (fmt
);
2683 if (fmt_str
== NULL
)
2686 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2688 /* If we're using an unlocked function, assume the other
2689 unlocked functions exist explicitly. */
2690 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2691 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2695 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2696 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2699 if (!init_target_chars ())
2702 if (strcmp (fmt_str
, target_percent_s
) == 0
2703 || strchr (fmt_str
, target_percent
) == NULL
)
2707 if (strcmp (fmt_str
, target_percent_s
) == 0)
2709 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2712 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2715 str
= c_getstr (arg
);
2721 /* The format specifier doesn't contain any '%' characters. */
2722 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2728 /* If the string was "", printf does nothing. */
2731 replace_call_with_value (gsi
, NULL_TREE
);
2735 /* If the string has length of 1, call putchar. */
2738 /* Given printf("c"), (where c is any one character,)
2739 convert "c"[0] to an int and pass that to the replacement
2741 newarg
= build_int_cst (integer_type_node
, str
[0]);
2744 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2745 replace_call_with_call_and_fold (gsi
, repl
);
2751 /* If the string was "string\n", call puts("string"). */
2752 size_t len
= strlen (str
);
2753 if ((unsigned char)str
[len
- 1] == target_newline
2754 && (size_t) (int) len
== len
2758 tree offset_node
, string_cst
;
2760 /* Create a NUL-terminated string that's one char shorter
2761 than the original, stripping off the trailing '\n'. */
2762 newarg
= build_string_literal (len
, str
);
2763 string_cst
= string_constant (newarg
, &offset_node
);
2764 gcc_checking_assert (string_cst
2765 && (TREE_STRING_LENGTH (string_cst
)
2767 && integer_zerop (offset_node
)
2769 TREE_STRING_POINTER (string_cst
)[len
- 1]
2771 /* build_string_literal creates a new STRING_CST,
2772 modify it in place to avoid double copying. */
2773 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2774 newstr
[len
- 1] = '\0';
2777 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2778 replace_call_with_call_and_fold (gsi
, repl
);
2783 /* We'd like to arrange to call fputs(string,stdout) here,
2784 but we need stdout and don't have a way to get it yet. */
2789 /* The other optimizations can be done only on the non-va_list variants. */
2790 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2793 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2794 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2796 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2800 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2801 replace_call_with_call_and_fold (gsi
, repl
);
2806 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2807 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2809 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2814 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2815 replace_call_with_call_and_fold (gsi
, repl
);
2825 /* Fold a call to __builtin_strlen with known length LEN. */
2828 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2830 gimple stmt
= gsi_stmt (*gsi
);
2831 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2834 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2835 replace_call_with_value (gsi
, len
);
2840 /* Fold the non-target builtin at *GSI and return whether any simplification
2844 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2846 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2847 tree callee
= gimple_call_fndecl (stmt
);
2849 /* Give up for always_inline inline builtins until they are
2851 if (avoid_folding_inline_builtin (callee
))
2854 unsigned n
= gimple_call_num_args (stmt
);
2855 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2858 case BUILT_IN_BZERO
:
2859 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2860 gimple_call_arg (stmt
, 1));
2861 case BUILT_IN_MEMSET
:
2862 return gimple_fold_builtin_memset (gsi
,
2863 gimple_call_arg (stmt
, 1),
2864 gimple_call_arg (stmt
, 2));
2865 case BUILT_IN_BCOPY
:
2866 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2867 gimple_call_arg (stmt
, 0), 3);
2868 case BUILT_IN_MEMCPY
:
2869 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2870 gimple_call_arg (stmt
, 1), 0);
2871 case BUILT_IN_MEMPCPY
:
2872 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2873 gimple_call_arg (stmt
, 1), 1);
2874 case BUILT_IN_MEMMOVE
:
2875 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2876 gimple_call_arg (stmt
, 1), 3);
2877 case BUILT_IN_SPRINTF_CHK
:
2878 case BUILT_IN_VSPRINTF_CHK
:
2879 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2880 case BUILT_IN_STRCAT_CHK
:
2881 return gimple_fold_builtin_strcat_chk (gsi
);
2882 case BUILT_IN_STRNCAT_CHK
:
2883 return gimple_fold_builtin_strncat_chk (gsi
);
2884 case BUILT_IN_STRLEN
:
2885 return gimple_fold_builtin_strlen (gsi
);
2886 case BUILT_IN_STRCPY
:
2887 return gimple_fold_builtin_strcpy (gsi
,
2888 gimple_call_arg (stmt
, 0),
2889 gimple_call_arg (stmt
, 1));
2890 case BUILT_IN_STRNCPY
:
2891 return gimple_fold_builtin_strncpy (gsi
,
2892 gimple_call_arg (stmt
, 0),
2893 gimple_call_arg (stmt
, 1),
2894 gimple_call_arg (stmt
, 2));
2895 case BUILT_IN_STRCAT
:
2896 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2897 gimple_call_arg (stmt
, 1));
2898 case BUILT_IN_STRNCAT
:
2899 return gimple_fold_builtin_strncat (gsi
);
2900 case BUILT_IN_FPUTS
:
2901 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2902 gimple_call_arg (stmt
, 1), false);
2903 case BUILT_IN_FPUTS_UNLOCKED
:
2904 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2905 gimple_call_arg (stmt
, 1), true);
2906 case BUILT_IN_MEMCPY_CHK
:
2907 case BUILT_IN_MEMPCPY_CHK
:
2908 case BUILT_IN_MEMMOVE_CHK
:
2909 case BUILT_IN_MEMSET_CHK
:
2910 return gimple_fold_builtin_memory_chk (gsi
,
2911 gimple_call_arg (stmt
, 0),
2912 gimple_call_arg (stmt
, 1),
2913 gimple_call_arg (stmt
, 2),
2914 gimple_call_arg (stmt
, 3),
2916 case BUILT_IN_STPCPY
:
2917 return gimple_fold_builtin_stpcpy (gsi
);
2918 case BUILT_IN_STRCPY_CHK
:
2919 case BUILT_IN_STPCPY_CHK
:
2920 return gimple_fold_builtin_stxcpy_chk (gsi
,
2921 gimple_call_arg (stmt
, 0),
2922 gimple_call_arg (stmt
, 1),
2923 gimple_call_arg (stmt
, 2),
2925 case BUILT_IN_STRNCPY_CHK
:
2926 case BUILT_IN_STPNCPY_CHK
:
2927 return gimple_fold_builtin_stxncpy_chk (gsi
,
2928 gimple_call_arg (stmt
, 0),
2929 gimple_call_arg (stmt
, 1),
2930 gimple_call_arg (stmt
, 2),
2931 gimple_call_arg (stmt
, 3),
2933 case BUILT_IN_SNPRINTF_CHK
:
2934 case BUILT_IN_VSNPRINTF_CHK
:
2935 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2936 case BUILT_IN_SNPRINTF
:
2937 return gimple_fold_builtin_snprintf (gsi
);
2938 case BUILT_IN_SPRINTF
:
2939 return gimple_fold_builtin_sprintf (gsi
);
2940 case BUILT_IN_FPRINTF
:
2941 case BUILT_IN_FPRINTF_UNLOCKED
:
2942 case BUILT_IN_VFPRINTF
:
2943 if (n
== 2 || n
== 3)
2944 return gimple_fold_builtin_fprintf (gsi
,
2945 gimple_call_arg (stmt
, 0),
2946 gimple_call_arg (stmt
, 1),
2948 ? gimple_call_arg (stmt
, 2)
2952 case BUILT_IN_FPRINTF_CHK
:
2953 case BUILT_IN_VFPRINTF_CHK
:
2954 if (n
== 3 || n
== 4)
2955 return gimple_fold_builtin_fprintf (gsi
,
2956 gimple_call_arg (stmt
, 0),
2957 gimple_call_arg (stmt
, 2),
2959 ? gimple_call_arg (stmt
, 3)
2963 case BUILT_IN_PRINTF
:
2964 case BUILT_IN_PRINTF_UNLOCKED
:
2965 case BUILT_IN_VPRINTF
:
2966 if (n
== 1 || n
== 2)
2967 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2969 ? gimple_call_arg (stmt
, 1)
2970 : NULL_TREE
, fcode
);
2972 case BUILT_IN_PRINTF_CHK
:
2973 case BUILT_IN_VPRINTF_CHK
:
2974 if (n
== 2 || n
== 3)
2975 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2977 ? gimple_call_arg (stmt
, 2)
2978 : NULL_TREE
, fcode
);
2982 /* Try the generic builtin folder. */
2983 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2984 tree result
= fold_call_stmt (stmt
, ignore
);
2988 STRIP_NOPS (result
);
2990 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2991 if (!update_call_from_tree (gsi
, result
))
2992 gimplify_and_update_call_from_tree (gsi
, result
);
2999 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3000 doesn't fit into TYPE. The test for overflow should be regardless of
3001 -fwrapv, and even for unsigned types. */
3004 arith_overflowed_p (enum tree_code code
, const_tree type
,
3005 const_tree arg0
, const_tree arg1
)
3007 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3008 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3010 widest2_int warg0
= widest2_int_cst (arg0
);
3011 widest2_int warg1
= widest2_int_cst (arg1
);
3015 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3016 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3017 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3018 default: gcc_unreachable ();
3020 signop sign
= TYPE_SIGN (type
);
3021 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3023 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3026 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3027 The statement may be replaced by another statement, e.g., if the call
3028 simplifies to a constant value. Return true if any changes were made.
3029 It is assumed that the operands have been previously folded. */
3032 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3034 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3036 bool changed
= false;
3039 /* Fold *& in call arguments. */
3040 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3041 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3043 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3046 gimple_call_set_arg (stmt
, i
, tmp
);
3051 /* Check for virtual calls that became direct calls. */
3052 callee
= gimple_call_fn (stmt
);
3053 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3055 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3057 if (dump_file
&& virtual_method_call_p (callee
)
3058 && !possible_polymorphic_call_target_p
3059 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3060 (OBJ_TYPE_REF_EXPR (callee
)))))
3063 "Type inheritance inconsistent devirtualization of ");
3064 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3065 fprintf (dump_file
, " to ");
3066 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3067 fprintf (dump_file
, "\n");
3070 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3073 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3076 vec
<cgraph_node
*>targets
3077 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3078 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3080 tree lhs
= gimple_call_lhs (stmt
);
3081 if (dump_enabled_p ())
3083 location_t loc
= gimple_location_safe (stmt
);
3084 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3085 "folding virtual function call to %s\n",
3086 targets
.length () == 1
3087 ? targets
[0]->name ()
3088 : "__builtin_unreachable");
3090 if (targets
.length () == 1)
3092 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3094 /* If the call becomes noreturn, remove the lhs. */
3095 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3097 if (TREE_CODE (lhs
) == SSA_NAME
)
3099 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3100 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3101 gimple new_stmt
= gimple_build_assign (lhs
, def
);
3102 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3104 gimple_call_set_lhs (stmt
, NULL_TREE
);
3109 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3110 gimple new_stmt
= gimple_build_call (fndecl
, 0);
3111 gimple_set_location (new_stmt
, gimple_location (stmt
));
3112 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3114 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3115 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3117 /* To satisfy condition for
3118 cgraph_update_edges_for_call_stmt_node,
3119 we need to preserve GIMPLE_CALL statement
3120 at position of GSI iterator. */
3121 update_call_from_tree (gsi
, def
);
3122 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3126 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3127 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3128 gsi_replace (gsi
, new_stmt
, false);
3136 /* Check for indirect calls that became direct calls, and then
3137 no longer require a static chain. */
3138 if (gimple_call_chain (stmt
))
3140 tree fn
= gimple_call_fndecl (stmt
);
3141 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3143 gimple_call_set_chain (stmt
, NULL
);
3148 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3151 gimple_call_set_chain (stmt
, tmp
);
3160 /* Check for builtins that CCP can handle using information not
3161 available in the generic fold routines. */
3162 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3164 if (gimple_fold_builtin (gsi
))
3167 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3169 changed
|= targetm
.gimple_fold_builtin (gsi
);
3171 else if (gimple_call_internal_p (stmt
))
3173 enum tree_code subcode
= ERROR_MARK
;
3174 tree result
= NULL_TREE
;
3175 bool cplx_result
= false;
3176 tree overflow
= NULL_TREE
;
3177 switch (gimple_call_internal_fn (stmt
))
3179 case IFN_BUILTIN_EXPECT
:
3180 result
= fold_builtin_expect (gimple_location (stmt
),
3181 gimple_call_arg (stmt
, 0),
3182 gimple_call_arg (stmt
, 1),
3183 gimple_call_arg (stmt
, 2));
3185 case IFN_UBSAN_OBJECT_SIZE
:
3186 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3187 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3188 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3189 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3190 gimple_call_arg (stmt
, 2))))
3192 gsi_replace (gsi
, gimple_build_nop (), true);
3193 unlink_stmt_vdef (stmt
);
3194 release_defs (stmt
);
3198 case IFN_UBSAN_CHECK_ADD
:
3199 subcode
= PLUS_EXPR
;
3201 case IFN_UBSAN_CHECK_SUB
:
3202 subcode
= MINUS_EXPR
;
3204 case IFN_UBSAN_CHECK_MUL
:
3205 subcode
= MULT_EXPR
;
3207 case IFN_ADD_OVERFLOW
:
3208 subcode
= PLUS_EXPR
;
3211 case IFN_SUB_OVERFLOW
:
3212 subcode
= MINUS_EXPR
;
3215 case IFN_MUL_OVERFLOW
:
3216 subcode
= MULT_EXPR
;
3222 if (subcode
!= ERROR_MARK
)
3224 tree arg0
= gimple_call_arg (stmt
, 0);
3225 tree arg1
= gimple_call_arg (stmt
, 1);
3226 tree type
= TREE_TYPE (arg0
);
3229 tree lhs
= gimple_call_lhs (stmt
);
3230 if (lhs
== NULL_TREE
)
3233 type
= TREE_TYPE (TREE_TYPE (lhs
));
3235 if (type
== NULL_TREE
)
3237 /* x = y + 0; x = y - 0; x = y * 0; */
3238 else if (integer_zerop (arg1
))
3239 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3240 /* x = 0 + y; x = 0 * y; */
3241 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3242 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3244 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3245 result
= integer_zero_node
;
3246 /* x = y * 1; x = 1 * y; */
3247 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3249 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3251 else if (TREE_CODE (arg0
) == INTEGER_CST
3252 && TREE_CODE (arg1
) == INTEGER_CST
)
3255 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3256 fold_convert (type
, arg1
));
3258 result
= int_const_binop (subcode
, arg0
, arg1
);
3259 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3262 overflow
= build_one_cst (type
);
3269 if (result
== integer_zero_node
)
3270 result
= build_zero_cst (type
);
3271 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3273 if (TREE_CODE (result
) == INTEGER_CST
)
3275 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3277 overflow
= build_one_cst (type
);
3279 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3280 && TYPE_UNSIGNED (type
))
3281 || (TYPE_PRECISION (type
)
3282 < (TYPE_PRECISION (TREE_TYPE (result
))
3283 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3284 && !TYPE_UNSIGNED (type
)))))
3287 result
= fold_convert (type
, result
);
3294 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3295 result
= drop_tree_overflow (result
);
3298 if (overflow
== NULL_TREE
)
3299 overflow
= build_zero_cst (TREE_TYPE (result
));
3300 tree ctype
= build_complex_type (TREE_TYPE (result
));
3301 if (TREE_CODE (result
) == INTEGER_CST
3302 && TREE_CODE (overflow
) == INTEGER_CST
)
3303 result
= build_complex (ctype
, result
, overflow
);
3305 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3306 ctype
, result
, overflow
);
3308 if (!update_call_from_tree (gsi
, result
))
3309 gimplify_and_update_call_from_tree (gsi
, result
);
3318 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3321 Replaces *GSI with the simplification result in RCODE and OPS
3322 and the associated statements in *SEQ. Does the replacement
3323 according to INPLACE and returns true if the operation succeeded. */
3326 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3327 code_helper rcode
, tree
*ops
,
3328 gimple_seq
*seq
, bool inplace
)
3330 gimple stmt
= gsi_stmt (*gsi
);
3332 /* Play safe and do not allow abnormals to be mentioned in
3333 newly created statements. See also maybe_push_res_to_seq. */
3334 if ((TREE_CODE (ops
[0]) == SSA_NAME
3335 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
3337 && TREE_CODE (ops
[1]) == SSA_NAME
3338 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
3340 && TREE_CODE (ops
[2]) == SSA_NAME
3341 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
3344 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3346 gcc_assert (rcode
.is_tree_code ());
3347 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3348 /* GIMPLE_CONDs condition may not throw. */
3349 && (!flag_exceptions
3350 || !cfun
->can_throw_non_call_exceptions
3351 || !operation_could_trap_p (rcode
,
3352 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3354 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3355 else if (rcode
== SSA_NAME
)
3356 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3357 build_zero_cst (TREE_TYPE (ops
[0])));
3358 else if (rcode
== INTEGER_CST
)
3360 if (integer_zerop (ops
[0]))
3361 gimple_cond_make_false (cond_stmt
);
3363 gimple_cond_make_true (cond_stmt
);
3367 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3371 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3372 build_zero_cst (TREE_TYPE (res
)));
3376 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3378 fprintf (dump_file
, "gimple_simplified to ");
3379 if (!gimple_seq_empty_p (*seq
))
3380 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3381 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3384 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3387 else if (is_gimple_assign (stmt
)
3388 && rcode
.is_tree_code ())
3391 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3393 maybe_build_generic_op (rcode
,
3394 TREE_TYPE (gimple_assign_lhs (stmt
)),
3395 &ops
[0], ops
[1], ops
[2]);
3396 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3397 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3399 fprintf (dump_file
, "gimple_simplified to ");
3400 if (!gimple_seq_empty_p (*seq
))
3401 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3402 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3405 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3411 if (gimple_has_lhs (stmt
))
3413 tree lhs
= gimple_get_lhs (stmt
);
3414 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3417 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3419 fprintf (dump_file
, "gimple_simplified to ");
3420 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3422 gsi_replace_with_seq_vops (gsi
, *seq
);
3432 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3435 maybe_canonicalize_mem_ref_addr (tree
*t
)
3439 if (TREE_CODE (*t
) == ADDR_EXPR
)
3440 t
= &TREE_OPERAND (*t
, 0);
3442 while (handled_component_p (*t
))
3443 t
= &TREE_OPERAND (*t
, 0);
3445 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3446 of invariant addresses into a SSA name MEM_REF address. */
3447 if (TREE_CODE (*t
) == MEM_REF
3448 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3450 tree addr
= TREE_OPERAND (*t
, 0);
3451 if (TREE_CODE (addr
) == ADDR_EXPR
3452 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3453 || handled_component_p (TREE_OPERAND (addr
, 0))))
3456 HOST_WIDE_INT coffset
;
3457 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3462 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3463 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3464 TREE_OPERAND (*t
, 1),
3465 size_int (coffset
));
3468 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3469 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3472 /* Canonicalize back MEM_REFs to plain reference trees if the object
3473 accessed is a decl that has the same access semantics as the MEM_REF. */
3474 if (TREE_CODE (*t
) == MEM_REF
3475 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3476 && integer_zerop (TREE_OPERAND (*t
, 1))
3477 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3479 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3480 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3481 if (/* Same volatile qualification. */
3482 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3483 /* Same TBAA behavior with -fstrict-aliasing. */
3484 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3485 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3486 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3487 /* Same alignment. */
3488 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3489 /* We have to look out here to not drop a required conversion
3490 from the rhs to the lhs if *t appears on the lhs or vice-versa
3491 if it appears on the rhs. Thus require strict type
3493 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3495 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3500 /* Canonicalize TARGET_MEM_REF in particular with respect to
3501 the indexes becoming constant. */
3502 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3504 tree tem
= maybe_fold_tmr (*t
);
3515 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3516 distinguishes both cases. */
3519 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3521 bool changed
= false;
3522 gimple stmt
= gsi_stmt (*gsi
);
3525 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3527 ??? This shouldn't be done in generic folding but in the
3528 propagation helpers which also know whether an address was
3530 switch (gimple_code (stmt
))
3533 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3535 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3536 if ((REFERENCE_CLASS_P (*rhs
)
3537 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3538 && maybe_canonicalize_mem_ref_addr (rhs
))
3540 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3541 if (REFERENCE_CLASS_P (*lhs
)
3542 && maybe_canonicalize_mem_ref_addr (lhs
))
3548 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3550 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3551 if (REFERENCE_CLASS_P (*arg
)
3552 && maybe_canonicalize_mem_ref_addr (arg
))
3555 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3557 && REFERENCE_CLASS_P (*lhs
)
3558 && maybe_canonicalize_mem_ref_addr (lhs
))
3564 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3565 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3567 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3568 tree op
= TREE_VALUE (link
);
3569 if (REFERENCE_CLASS_P (op
)
3570 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3573 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3575 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3576 tree op
= TREE_VALUE (link
);
3577 if ((REFERENCE_CLASS_P (op
)
3578 || TREE_CODE (op
) == ADDR_EXPR
)
3579 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3585 if (gimple_debug_bind_p (stmt
))
3587 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3589 && (REFERENCE_CLASS_P (*val
)
3590 || TREE_CODE (*val
) == ADDR_EXPR
)
3591 && maybe_canonicalize_mem_ref_addr (val
))
3598 /* Dispatch to pattern-based folding. */
3600 || is_gimple_assign (stmt
)
3601 || gimple_code (stmt
) == GIMPLE_COND
)
3603 gimple_seq seq
= NULL
;
3606 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
, valueize
))
3608 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3611 gimple_seq_discard (seq
);
3615 stmt
= gsi_stmt (*gsi
);
3617 /* Fold the main computation performed by the statement. */
3618 switch (gimple_code (stmt
))
3622 unsigned old_num_ops
= gimple_num_ops (stmt
);
3623 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3624 tree lhs
= gimple_assign_lhs (stmt
);
3626 /* First canonicalize operand order. This avoids building new
3627 trees if this is the only thing fold would later do. */
3628 if ((commutative_tree_code (subcode
)
3629 || commutative_ternary_tree_code (subcode
))
3630 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3631 gimple_assign_rhs2 (stmt
), false))
3633 tree tem
= gimple_assign_rhs1 (stmt
);
3634 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3635 gimple_assign_set_rhs2 (stmt
, tem
);
3638 new_rhs
= fold_gimple_assign (gsi
);
3640 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3641 TREE_TYPE (new_rhs
)))
3642 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3645 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3647 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3654 changed
|= fold_gimple_cond (as_a
<gcond
*> (stmt
));
3658 changed
|= gimple_fold_call (gsi
, inplace
);
3662 /* Fold *& in asm operands. */
3664 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3666 const char **oconstraints
;
3667 const char *constraint
;
3668 bool allows_mem
, allows_reg
;
3670 noutputs
= gimple_asm_noutputs (asm_stmt
);
3671 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3673 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3675 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3676 tree op
= TREE_VALUE (link
);
3678 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3679 if (REFERENCE_CLASS_P (op
)
3680 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3682 TREE_VALUE (link
) = op
;
3686 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3688 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3689 tree op
= TREE_VALUE (link
);
3691 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3692 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3693 oconstraints
, &allows_mem
, &allows_reg
);
3694 if (REFERENCE_CLASS_P (op
)
3695 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3698 TREE_VALUE (link
) = op
;
3706 if (gimple_debug_bind_p (stmt
))
3708 tree val
= gimple_debug_bind_get_value (stmt
);
3710 && REFERENCE_CLASS_P (val
))
3712 tree tem
= maybe_fold_reference (val
, false);
3715 gimple_debug_bind_set_value (stmt
, tem
);
3720 && TREE_CODE (val
) == ADDR_EXPR
)
3722 tree ref
= TREE_OPERAND (val
, 0);
3723 tree tem
= maybe_fold_reference (ref
, false);
3726 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3727 gimple_debug_bind_set_value (stmt
, tem
);
3737 stmt
= gsi_stmt (*gsi
);
3739 /* Fold *& on the lhs. */
3740 if (gimple_has_lhs (stmt
))
3742 tree lhs
= gimple_get_lhs (stmt
);
3743 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3745 tree new_lhs
= maybe_fold_reference (lhs
, true);
3748 gimple_set_lhs (stmt
, new_lhs
);
3757 /* Valueziation callback that ends up not following SSA edges. */
3760 no_follow_ssa_edges (tree
)
3765 /* Valueization callback that ends up following single-use SSA edges only. */
3768 follow_single_use_edges (tree val
)
3770 if (TREE_CODE (val
) == SSA_NAME
3771 && !has_single_use (val
))
3776 /* Fold the statement pointed to by GSI. In some cases, this function may
3777 replace the whole statement with a new one. Returns true iff folding
3779 The statement pointed to by GSI should be in valid gimple form but may
3780 be in unfolded state as resulting from for example constant propagation
3781 which can produce *&x = 0. */
3784 fold_stmt (gimple_stmt_iterator
*gsi
)
3786 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3790 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3792 return fold_stmt_1 (gsi
, false, valueize
);
3795 /* Perform the minimal folding on statement *GSI. Only operations like
3796 *&x created by constant propagation are handled. The statement cannot
3797 be replaced with a new one. Return true if the statement was
3798 changed, false otherwise.
3799 The statement *GSI should be in valid gimple form but may
3800 be in unfolded state as resulting from for example constant propagation
3801 which can produce *&x = 0. */
3804 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3806 gimple stmt
= gsi_stmt (*gsi
);
3807 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3808 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3812 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3813 if EXPR is null or we don't know how.
3814 If non-null, the result always has boolean type. */
3817 canonicalize_bool (tree expr
, bool invert
)
3823 if (integer_nonzerop (expr
))
3824 return boolean_false_node
;
3825 else if (integer_zerop (expr
))
3826 return boolean_true_node
;
3827 else if (TREE_CODE (expr
) == SSA_NAME
)
3828 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3829 build_int_cst (TREE_TYPE (expr
), 0));
3830 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3831 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3833 TREE_OPERAND (expr
, 0),
3834 TREE_OPERAND (expr
, 1));
3840 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3842 if (integer_nonzerop (expr
))
3843 return boolean_true_node
;
3844 else if (integer_zerop (expr
))
3845 return boolean_false_node
;
3846 else if (TREE_CODE (expr
) == SSA_NAME
)
3847 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3848 build_int_cst (TREE_TYPE (expr
), 0));
3849 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3850 return fold_build2 (TREE_CODE (expr
),
3852 TREE_OPERAND (expr
, 0),
3853 TREE_OPERAND (expr
, 1));
3859 /* Check to see if a boolean expression EXPR is logically equivalent to the
3860 comparison (OP1 CODE OP2). Check for various identities involving
3864 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3865 const_tree op1
, const_tree op2
)
3869 /* The obvious case. */
3870 if (TREE_CODE (expr
) == code
3871 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3872 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3875 /* Check for comparing (name, name != 0) and the case where expr
3876 is an SSA_NAME with a definition matching the comparison. */
3877 if (TREE_CODE (expr
) == SSA_NAME
3878 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3880 if (operand_equal_p (expr
, op1
, 0))
3881 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3882 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3883 s
= SSA_NAME_DEF_STMT (expr
);
3884 if (is_gimple_assign (s
)
3885 && gimple_assign_rhs_code (s
) == code
3886 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3887 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3891 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3892 of name is a comparison, recurse. */
3893 if (TREE_CODE (op1
) == SSA_NAME
3894 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3896 s
= SSA_NAME_DEF_STMT (op1
);
3897 if (is_gimple_assign (s
)
3898 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3900 enum tree_code c
= gimple_assign_rhs_code (s
);
3901 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3902 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3903 return same_bool_comparison_p (expr
, c
,
3904 gimple_assign_rhs1 (s
),
3905 gimple_assign_rhs2 (s
));
3906 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3907 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3908 return same_bool_comparison_p (expr
,
3909 invert_tree_comparison (c
, false),
3910 gimple_assign_rhs1 (s
),
3911 gimple_assign_rhs2 (s
));
3917 /* Check to see if two boolean expressions OP1 and OP2 are logically
3921 same_bool_result_p (const_tree op1
, const_tree op2
)
3923 /* Simple cases first. */
3924 if (operand_equal_p (op1
, op2
, 0))
3927 /* Check the cases where at least one of the operands is a comparison.
3928 These are a bit smarter than operand_equal_p in that they apply some
3929 identifies on SSA_NAMEs. */
3930 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3931 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3932 TREE_OPERAND (op2
, 0),
3933 TREE_OPERAND (op2
, 1)))
3935 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3936 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3937 TREE_OPERAND (op1
, 0),
3938 TREE_OPERAND (op1
, 1)))
3945 /* Forward declarations for some mutually recursive functions. */
3948 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3949 enum tree_code code2
, tree op2a
, tree op2b
);
3951 and_var_with_comparison (tree var
, bool invert
,
3952 enum tree_code code2
, tree op2a
, tree op2b
);
3954 and_var_with_comparison_1 (gimple stmt
,
3955 enum tree_code code2
, tree op2a
, tree op2b
);
3957 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3958 enum tree_code code2
, tree op2a
, tree op2b
);
3960 or_var_with_comparison (tree var
, bool invert
,
3961 enum tree_code code2
, tree op2a
, tree op2b
);
3963 or_var_with_comparison_1 (gimple stmt
,
3964 enum tree_code code2
, tree op2a
, tree op2b
);
3966 /* Helper function for and_comparisons_1: try to simplify the AND of the
3967 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3968 If INVERT is true, invert the value of the VAR before doing the AND.
3969 Return NULL_EXPR if we can't simplify this to a single expression. */
3972 and_var_with_comparison (tree var
, bool invert
,
3973 enum tree_code code2
, tree op2a
, tree op2b
)
3976 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3978 /* We can only deal with variables whose definitions are assignments. */
3979 if (!is_gimple_assign (stmt
))
3982 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3983 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3984 Then we only have to consider the simpler non-inverted cases. */
3986 t
= or_var_with_comparison_1 (stmt
,
3987 invert_tree_comparison (code2
, false),
3990 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3991 return canonicalize_bool (t
, invert
);
3994 /* Try to simplify the AND of the ssa variable defined by the assignment
3995 STMT with the comparison specified by (OP2A CODE2 OP2B).
3996 Return NULL_EXPR if we can't simplify this to a single expression. */
3999 and_var_with_comparison_1 (gimple stmt
,
4000 enum tree_code code2
, tree op2a
, tree op2b
)
4002 tree var
= gimple_assign_lhs (stmt
);
4003 tree true_test_var
= NULL_TREE
;
4004 tree false_test_var
= NULL_TREE
;
4005 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4007 /* Check for identities like (var AND (var == 0)) => false. */
4008 if (TREE_CODE (op2a
) == SSA_NAME
4009 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4011 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4012 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4014 true_test_var
= op2a
;
4015 if (var
== true_test_var
)
4018 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4019 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4021 false_test_var
= op2a
;
4022 if (var
== false_test_var
)
4023 return boolean_false_node
;
4027 /* If the definition is a comparison, recurse on it. */
4028 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4030 tree t
= and_comparisons_1 (innercode
,
4031 gimple_assign_rhs1 (stmt
),
4032 gimple_assign_rhs2 (stmt
),
4040 /* If the definition is an AND or OR expression, we may be able to
4041 simplify by reassociating. */
4042 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4043 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4045 tree inner1
= gimple_assign_rhs1 (stmt
);
4046 tree inner2
= gimple_assign_rhs2 (stmt
);
4049 tree partial
= NULL_TREE
;
4050 bool is_and
= (innercode
== BIT_AND_EXPR
);
4052 /* Check for boolean identities that don't require recursive examination
4054 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4055 inner1 AND (inner1 OR inner2) => inner1
4056 !inner1 AND (inner1 AND inner2) => false
4057 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4058 Likewise for similar cases involving inner2. */
4059 if (inner1
== true_test_var
)
4060 return (is_and
? var
: inner1
);
4061 else if (inner2
== true_test_var
)
4062 return (is_and
? var
: inner2
);
4063 else if (inner1
== false_test_var
)
4065 ? boolean_false_node
4066 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4067 else if (inner2
== false_test_var
)
4069 ? boolean_false_node
4070 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4072 /* Next, redistribute/reassociate the AND across the inner tests.
4073 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4074 if (TREE_CODE (inner1
) == SSA_NAME
4075 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4076 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4077 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4078 gimple_assign_rhs1 (s
),
4079 gimple_assign_rhs2 (s
),
4080 code2
, op2a
, op2b
)))
4082 /* Handle the AND case, where we are reassociating:
4083 (inner1 AND inner2) AND (op2a code2 op2b)
4085 If the partial result t is a constant, we win. Otherwise
4086 continue on to try reassociating with the other inner test. */
4089 if (integer_onep (t
))
4091 else if (integer_zerop (t
))
4092 return boolean_false_node
;
4095 /* Handle the OR case, where we are redistributing:
4096 (inner1 OR inner2) AND (op2a code2 op2b)
4097 => (t OR (inner2 AND (op2a code2 op2b))) */
4098 else if (integer_onep (t
))
4099 return boolean_true_node
;
4101 /* Save partial result for later. */
4105 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4106 if (TREE_CODE (inner2
) == SSA_NAME
4107 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4108 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4109 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4110 gimple_assign_rhs1 (s
),
4111 gimple_assign_rhs2 (s
),
4112 code2
, op2a
, op2b
)))
4114 /* Handle the AND case, where we are reassociating:
4115 (inner1 AND inner2) AND (op2a code2 op2b)
4116 => (inner1 AND t) */
4119 if (integer_onep (t
))
4121 else if (integer_zerop (t
))
4122 return boolean_false_node
;
4123 /* If both are the same, we can apply the identity
4125 else if (partial
&& same_bool_result_p (t
, partial
))
4129 /* Handle the OR case. where we are redistributing:
4130 (inner1 OR inner2) AND (op2a code2 op2b)
4131 => (t OR (inner1 AND (op2a code2 op2b)))
4132 => (t OR partial) */
4135 if (integer_onep (t
))
4136 return boolean_true_node
;
4139 /* We already got a simplification for the other
4140 operand to the redistributed OR expression. The
4141 interesting case is when at least one is false.
4142 Or, if both are the same, we can apply the identity
4144 if (integer_zerop (partial
))
4146 else if (integer_zerop (t
))
4148 else if (same_bool_result_p (t
, partial
))
4157 /* Try to simplify the AND of two comparisons defined by
4158 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4159 If this can be done without constructing an intermediate value,
4160 return the resulting tree; otherwise NULL_TREE is returned.
4161 This function is deliberately asymmetric as it recurses on SSA_DEFs
4162 in the first comparison but not the second. */
4165 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4166 enum tree_code code2
, tree op2a
, tree op2b
)
4168 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4170 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4171 if (operand_equal_p (op1a
, op2a
, 0)
4172 && operand_equal_p (op1b
, op2b
, 0))
4174 /* Result will be either NULL_TREE, or a combined comparison. */
4175 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4176 TRUTH_ANDIF_EXPR
, code1
, code2
,
4177 truth_type
, op1a
, op1b
);
4182 /* Likewise the swapped case of the above. */
4183 if (operand_equal_p (op1a
, op2b
, 0)
4184 && operand_equal_p (op1b
, op2a
, 0))
4186 /* Result will be either NULL_TREE, or a combined comparison. */
4187 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4188 TRUTH_ANDIF_EXPR
, code1
,
4189 swap_tree_comparison (code2
),
4190 truth_type
, op1a
, op1b
);
4195 /* If both comparisons are of the same value against constants, we might
4196 be able to merge them. */
4197 if (operand_equal_p (op1a
, op2a
, 0)
4198 && TREE_CODE (op1b
) == INTEGER_CST
4199 && TREE_CODE (op2b
) == INTEGER_CST
)
4201 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4203 /* If we have (op1a == op1b), we should either be able to
4204 return that or FALSE, depending on whether the constant op1b
4205 also satisfies the other comparison against op2b. */
4206 if (code1
== EQ_EXPR
)
4212 case EQ_EXPR
: val
= (cmp
== 0); break;
4213 case NE_EXPR
: val
= (cmp
!= 0); break;
4214 case LT_EXPR
: val
= (cmp
< 0); break;
4215 case GT_EXPR
: val
= (cmp
> 0); break;
4216 case LE_EXPR
: val
= (cmp
<= 0); break;
4217 case GE_EXPR
: val
= (cmp
>= 0); break;
4218 default: done
= false;
4223 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4225 return boolean_false_node
;
4228 /* Likewise if the second comparison is an == comparison. */
4229 else if (code2
== EQ_EXPR
)
4235 case EQ_EXPR
: val
= (cmp
== 0); break;
4236 case NE_EXPR
: val
= (cmp
!= 0); break;
4237 case LT_EXPR
: val
= (cmp
> 0); break;
4238 case GT_EXPR
: val
= (cmp
< 0); break;
4239 case LE_EXPR
: val
= (cmp
>= 0); break;
4240 case GE_EXPR
: val
= (cmp
<= 0); break;
4241 default: done
= false;
4246 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4248 return boolean_false_node
;
4252 /* Same business with inequality tests. */
4253 else if (code1
== NE_EXPR
)
4258 case EQ_EXPR
: val
= (cmp
!= 0); break;
4259 case NE_EXPR
: val
= (cmp
== 0); break;
4260 case LT_EXPR
: val
= (cmp
>= 0); break;
4261 case GT_EXPR
: val
= (cmp
<= 0); break;
4262 case LE_EXPR
: val
= (cmp
> 0); break;
4263 case GE_EXPR
: val
= (cmp
< 0); break;
4268 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4270 else if (code2
== NE_EXPR
)
4275 case EQ_EXPR
: val
= (cmp
== 0); break;
4276 case NE_EXPR
: val
= (cmp
!= 0); break;
4277 case LT_EXPR
: val
= (cmp
<= 0); break;
4278 case GT_EXPR
: val
= (cmp
>= 0); break;
4279 case LE_EXPR
: val
= (cmp
< 0); break;
4280 case GE_EXPR
: val
= (cmp
> 0); break;
4285 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4288 /* Chose the more restrictive of two < or <= comparisons. */
4289 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4290 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4292 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4293 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4295 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4298 /* Likewise chose the more restrictive of two > or >= comparisons. */
4299 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4300 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4302 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4303 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4305 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4308 /* Check for singleton ranges. */
4310 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4311 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4312 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4314 /* Check for disjoint ranges. */
4316 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4317 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4318 return boolean_false_node
;
4320 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4321 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4322 return boolean_false_node
;
4325 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4326 NAME's definition is a truth value. See if there are any simplifications
4327 that can be done against the NAME's definition. */
4328 if (TREE_CODE (op1a
) == SSA_NAME
4329 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4330 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4332 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4333 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4334 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4335 switch (gimple_code (stmt
))
4338 /* Try to simplify by copy-propagating the definition. */
4339 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4342 /* If every argument to the PHI produces the same result when
4343 ANDed with the second comparison, we win.
4344 Do not do this unless the type is bool since we need a bool
4345 result here anyway. */
4346 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4348 tree result
= NULL_TREE
;
4350 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4352 tree arg
= gimple_phi_arg_def (stmt
, i
);
4354 /* If this PHI has itself as an argument, ignore it.
4355 If all the other args produce the same result,
4357 if (arg
== gimple_phi_result (stmt
))
4359 else if (TREE_CODE (arg
) == INTEGER_CST
)
4361 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4364 result
= boolean_false_node
;
4365 else if (!integer_zerop (result
))
4369 result
= fold_build2 (code2
, boolean_type_node
,
4371 else if (!same_bool_comparison_p (result
,
4375 else if (TREE_CODE (arg
) == SSA_NAME
4376 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4379 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4380 /* In simple cases we can look through PHI nodes,
4381 but we have to be careful with loops.
4383 if (! dom_info_available_p (CDI_DOMINATORS
)
4384 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4385 || dominated_by_p (CDI_DOMINATORS
,
4386 gimple_bb (def_stmt
),
4389 temp
= and_var_with_comparison (arg
, invert
, code2
,
4395 else if (!same_bool_result_p (result
, temp
))
4411 /* Try to simplify the AND of two comparisons, specified by
4412 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4413 If this can be simplified to a single expression (without requiring
4414 introducing more SSA variables to hold intermediate values),
4415 return the resulting tree. Otherwise return NULL_TREE.
4416 If the result expression is non-null, it has boolean type. */
4419 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4420 enum tree_code code2
, tree op2a
, tree op2b
)
4422 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4426 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4429 /* Helper function for or_comparisons_1: try to simplify the OR of the
4430 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4431 If INVERT is true, invert the value of VAR before doing the OR.
4432 Return NULL_EXPR if we can't simplify this to a single expression. */
4435 or_var_with_comparison (tree var
, bool invert
,
4436 enum tree_code code2
, tree op2a
, tree op2b
)
4439 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4441 /* We can only deal with variables whose definitions are assignments. */
4442 if (!is_gimple_assign (stmt
))
4445 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4446 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4447 Then we only have to consider the simpler non-inverted cases. */
4449 t
= and_var_with_comparison_1 (stmt
,
4450 invert_tree_comparison (code2
, false),
4453 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4454 return canonicalize_bool (t
, invert
);
4457 /* Try to simplify the OR of the ssa variable defined by the assignment
4458 STMT with the comparison specified by (OP2A CODE2 OP2B).
4459 Return NULL_EXPR if we can't simplify this to a single expression. */
4462 or_var_with_comparison_1 (gimple stmt
,
4463 enum tree_code code2
, tree op2a
, tree op2b
)
4465 tree var
= gimple_assign_lhs (stmt
);
4466 tree true_test_var
= NULL_TREE
;
4467 tree false_test_var
= NULL_TREE
;
4468 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4470 /* Check for identities like (var OR (var != 0)) => true . */
4471 if (TREE_CODE (op2a
) == SSA_NAME
4472 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4474 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4475 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4477 true_test_var
= op2a
;
4478 if (var
== true_test_var
)
4481 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4482 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4484 false_test_var
= op2a
;
4485 if (var
== false_test_var
)
4486 return boolean_true_node
;
4490 /* If the definition is a comparison, recurse on it. */
4491 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4493 tree t
= or_comparisons_1 (innercode
,
4494 gimple_assign_rhs1 (stmt
),
4495 gimple_assign_rhs2 (stmt
),
4503 /* If the definition is an AND or OR expression, we may be able to
4504 simplify by reassociating. */
4505 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4506 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4508 tree inner1
= gimple_assign_rhs1 (stmt
);
4509 tree inner2
= gimple_assign_rhs2 (stmt
);
4512 tree partial
= NULL_TREE
;
4513 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4515 /* Check for boolean identities that don't require recursive examination
4517 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4518 inner1 OR (inner1 AND inner2) => inner1
4519 !inner1 OR (inner1 OR inner2) => true
4520 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4522 if (inner1
== true_test_var
)
4523 return (is_or
? var
: inner1
);
4524 else if (inner2
== true_test_var
)
4525 return (is_or
? var
: inner2
);
4526 else if (inner1
== false_test_var
)
4529 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4530 else if (inner2
== false_test_var
)
4533 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4535 /* Next, redistribute/reassociate the OR across the inner tests.
4536 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4537 if (TREE_CODE (inner1
) == SSA_NAME
4538 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4539 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4540 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4541 gimple_assign_rhs1 (s
),
4542 gimple_assign_rhs2 (s
),
4543 code2
, op2a
, op2b
)))
4545 /* Handle the OR case, where we are reassociating:
4546 (inner1 OR inner2) OR (op2a code2 op2b)
4548 If the partial result t is a constant, we win. Otherwise
4549 continue on to try reassociating with the other inner test. */
4552 if (integer_onep (t
))
4553 return boolean_true_node
;
4554 else if (integer_zerop (t
))
4558 /* Handle the AND case, where we are redistributing:
4559 (inner1 AND inner2) OR (op2a code2 op2b)
4560 => (t AND (inner2 OR (op2a code op2b))) */
4561 else if (integer_zerop (t
))
4562 return boolean_false_node
;
4564 /* Save partial result for later. */
4568 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4569 if (TREE_CODE (inner2
) == SSA_NAME
4570 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4571 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4572 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4573 gimple_assign_rhs1 (s
),
4574 gimple_assign_rhs2 (s
),
4575 code2
, op2a
, op2b
)))
4577 /* Handle the OR case, where we are reassociating:
4578 (inner1 OR inner2) OR (op2a code2 op2b)
4580 => (t OR partial) */
4583 if (integer_zerop (t
))
4585 else if (integer_onep (t
))
4586 return boolean_true_node
;
4587 /* If both are the same, we can apply the identity
4589 else if (partial
&& same_bool_result_p (t
, partial
))
4593 /* Handle the AND case, where we are redistributing:
4594 (inner1 AND inner2) OR (op2a code2 op2b)
4595 => (t AND (inner1 OR (op2a code2 op2b)))
4596 => (t AND partial) */
4599 if (integer_zerop (t
))
4600 return boolean_false_node
;
4603 /* We already got a simplification for the other
4604 operand to the redistributed AND expression. The
4605 interesting case is when at least one is true.
4606 Or, if both are the same, we can apply the identity
4608 if (integer_onep (partial
))
4610 else if (integer_onep (t
))
4612 else if (same_bool_result_p (t
, partial
))
4621 /* Try to simplify the OR of two comparisons defined by
4622 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4623 If this can be done without constructing an intermediate value,
4624 return the resulting tree; otherwise NULL_TREE is returned.
4625 This function is deliberately asymmetric as it recurses on SSA_DEFs
4626 in the first comparison but not the second. */
4629 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4630 enum tree_code code2
, tree op2a
, tree op2b
)
4632 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4634 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4635 if (operand_equal_p (op1a
, op2a
, 0)
4636 && operand_equal_p (op1b
, op2b
, 0))
4638 /* Result will be either NULL_TREE, or a combined comparison. */
4639 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4640 TRUTH_ORIF_EXPR
, code1
, code2
,
4641 truth_type
, op1a
, op1b
);
4646 /* Likewise the swapped case of the above. */
4647 if (operand_equal_p (op1a
, op2b
, 0)
4648 && operand_equal_p (op1b
, op2a
, 0))
4650 /* Result will be either NULL_TREE, or a combined comparison. */
4651 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4652 TRUTH_ORIF_EXPR
, code1
,
4653 swap_tree_comparison (code2
),
4654 truth_type
, op1a
, op1b
);
4659 /* If both comparisons are of the same value against constants, we might
4660 be able to merge them. */
4661 if (operand_equal_p (op1a
, op2a
, 0)
4662 && TREE_CODE (op1b
) == INTEGER_CST
4663 && TREE_CODE (op2b
) == INTEGER_CST
)
4665 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4667 /* If we have (op1a != op1b), we should either be able to
4668 return that or TRUE, depending on whether the constant op1b
4669 also satisfies the other comparison against op2b. */
4670 if (code1
== NE_EXPR
)
4676 case EQ_EXPR
: val
= (cmp
== 0); break;
4677 case NE_EXPR
: val
= (cmp
!= 0); break;
4678 case LT_EXPR
: val
= (cmp
< 0); break;
4679 case GT_EXPR
: val
= (cmp
> 0); break;
4680 case LE_EXPR
: val
= (cmp
<= 0); break;
4681 case GE_EXPR
: val
= (cmp
>= 0); break;
4682 default: done
= false;
4687 return boolean_true_node
;
4689 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4692 /* Likewise if the second comparison is a != comparison. */
4693 else if (code2
== NE_EXPR
)
4699 case EQ_EXPR
: val
= (cmp
== 0); break;
4700 case NE_EXPR
: val
= (cmp
!= 0); break;
4701 case LT_EXPR
: val
= (cmp
> 0); break;
4702 case GT_EXPR
: val
= (cmp
< 0); break;
4703 case LE_EXPR
: val
= (cmp
>= 0); break;
4704 case GE_EXPR
: val
= (cmp
<= 0); break;
4705 default: done
= false;
4710 return boolean_true_node
;
4712 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4716 /* See if an equality test is redundant with the other comparison. */
4717 else if (code1
== EQ_EXPR
)
4722 case EQ_EXPR
: val
= (cmp
== 0); break;
4723 case NE_EXPR
: val
= (cmp
!= 0); break;
4724 case LT_EXPR
: val
= (cmp
< 0); break;
4725 case GT_EXPR
: val
= (cmp
> 0); break;
4726 case LE_EXPR
: val
= (cmp
<= 0); break;
4727 case GE_EXPR
: val
= (cmp
>= 0); break;
4732 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4734 else if (code2
== EQ_EXPR
)
4739 case EQ_EXPR
: val
= (cmp
== 0); break;
4740 case NE_EXPR
: val
= (cmp
!= 0); break;
4741 case LT_EXPR
: val
= (cmp
> 0); break;
4742 case GT_EXPR
: val
= (cmp
< 0); break;
4743 case LE_EXPR
: val
= (cmp
>= 0); break;
4744 case GE_EXPR
: val
= (cmp
<= 0); break;
4749 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4752 /* Chose the less restrictive of two < or <= comparisons. */
4753 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4754 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4756 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4757 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4759 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4762 /* Likewise chose the less restrictive of two > or >= comparisons. */
4763 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4764 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4766 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4767 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4769 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4772 /* Check for singleton ranges. */
4774 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4775 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4776 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4778 /* Check for less/greater pairs that don't restrict the range at all. */
4780 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4781 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4782 return boolean_true_node
;
4784 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4785 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4786 return boolean_true_node
;
4789 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4790 NAME's definition is a truth value. See if there are any simplifications
4791 that can be done against the NAME's definition. */
4792 if (TREE_CODE (op1a
) == SSA_NAME
4793 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4794 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4796 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4797 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4798 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4799 switch (gimple_code (stmt
))
4802 /* Try to simplify by copy-propagating the definition. */
4803 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4806 /* If every argument to the PHI produces the same result when
4807 ORed with the second comparison, we win.
4808 Do not do this unless the type is bool since we need a bool
4809 result here anyway. */
4810 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4812 tree result
= NULL_TREE
;
4814 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4816 tree arg
= gimple_phi_arg_def (stmt
, i
);
4818 /* If this PHI has itself as an argument, ignore it.
4819 If all the other args produce the same result,
4821 if (arg
== gimple_phi_result (stmt
))
4823 else if (TREE_CODE (arg
) == INTEGER_CST
)
4825 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4828 result
= boolean_true_node
;
4829 else if (!integer_onep (result
))
4833 result
= fold_build2 (code2
, boolean_type_node
,
4835 else if (!same_bool_comparison_p (result
,
4839 else if (TREE_CODE (arg
) == SSA_NAME
4840 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4843 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4844 /* In simple cases we can look through PHI nodes,
4845 but we have to be careful with loops.
4847 if (! dom_info_available_p (CDI_DOMINATORS
)
4848 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4849 || dominated_by_p (CDI_DOMINATORS
,
4850 gimple_bb (def_stmt
),
4853 temp
= or_var_with_comparison (arg
, invert
, code2
,
4859 else if (!same_bool_result_p (result
, temp
))
4875 /* Try to simplify the OR of two comparisons, specified by
4876 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4877 If this can be simplified to a single expression (without requiring
4878 introducing more SSA variables to hold intermediate values),
4879 return the resulting tree. Otherwise return NULL_TREE.
4880 If the result expression is non-null, it has boolean type. */
4883 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4884 enum tree_code code2
, tree op2a
, tree op2b
)
4886 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4890 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4894 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4896 Either NULL_TREE, a simplified but non-constant or a constant
4899 ??? This should go into a gimple-fold-inline.h file to be eventually
4900 privatized with the single valueize function used in the various TUs
4901 to avoid the indirect function call overhead. */
4904 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4905 tree (*gvalueize
) (tree
))
4909 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4910 edges if there are intermediate VARYING defs. For this reason
4911 do not follow SSA edges here even though SCCVN can technically
4912 just deal fine with that. */
4913 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
)
4914 && rcode
.is_tree_code ()
4915 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4916 || ((tree_code
) rcode
) == ADDR_EXPR
)
4917 && is_gimple_val (ops
[0]))
4920 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4922 fprintf (dump_file
, "Match-and-simplified ");
4923 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4924 fprintf (dump_file
, " to ");
4925 print_generic_expr (dump_file
, res
, 0);
4926 fprintf (dump_file
, "\n");
4931 location_t loc
= gimple_location (stmt
);
4932 switch (gimple_code (stmt
))
4936 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4938 switch (get_gimple_rhs_class (subcode
))
4940 case GIMPLE_SINGLE_RHS
:
4942 tree rhs
= gimple_assign_rhs1 (stmt
);
4943 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4945 if (TREE_CODE (rhs
) == SSA_NAME
)
4947 /* If the RHS is an SSA_NAME, return its known constant value,
4949 return (*valueize
) (rhs
);
4951 /* Handle propagating invariant addresses into address
4953 else if (TREE_CODE (rhs
) == ADDR_EXPR
4954 && !is_gimple_min_invariant (rhs
))
4956 HOST_WIDE_INT offset
= 0;
4958 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4962 && (CONSTANT_CLASS_P (base
)
4963 || decl_address_invariant_p (base
)))
4964 return build_invariant_address (TREE_TYPE (rhs
),
4967 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4968 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4969 && (CONSTRUCTOR_NELTS (rhs
)
4970 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4975 vec
= XALLOCAVEC (tree
,
4976 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4977 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4979 val
= (*valueize
) (val
);
4980 if (TREE_CODE (val
) == INTEGER_CST
4981 || TREE_CODE (val
) == REAL_CST
4982 || TREE_CODE (val
) == FIXED_CST
)
4988 return build_vector (TREE_TYPE (rhs
), vec
);
4990 if (subcode
== OBJ_TYPE_REF
)
4992 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4993 /* If callee is constant, we can fold away the wrapper. */
4994 if (is_gimple_min_invariant (val
))
4998 if (kind
== tcc_reference
)
5000 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5001 || TREE_CODE (rhs
) == REALPART_EXPR
5002 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5003 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5005 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5006 return fold_unary_loc (EXPR_LOCATION (rhs
),
5008 TREE_TYPE (rhs
), val
);
5010 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5011 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5013 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5014 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5016 TREE_TYPE (rhs
), val
,
5017 TREE_OPERAND (rhs
, 1),
5018 TREE_OPERAND (rhs
, 2));
5020 else if (TREE_CODE (rhs
) == MEM_REF
5021 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5023 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5024 if (TREE_CODE (val
) == ADDR_EXPR
5025 && is_gimple_min_invariant (val
))
5027 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5029 TREE_OPERAND (rhs
, 1));
5034 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5036 else if (kind
== tcc_declaration
)
5037 return get_symbol_constant_value (rhs
);
5041 case GIMPLE_UNARY_RHS
:
5044 case GIMPLE_BINARY_RHS
:
5046 /* Handle binary operators that can appear in GIMPLE form. */
5047 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5048 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5050 /* Translate &x + CST into an invariant form suitable for
5051 further propagation. */
5052 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
5053 && TREE_CODE (op0
) == ADDR_EXPR
5054 && TREE_CODE (op1
) == INTEGER_CST
)
5056 tree off
= fold_convert (ptr_type_node
, op1
);
5057 return build_fold_addr_expr_loc
5059 fold_build2 (MEM_REF
,
5060 TREE_TYPE (TREE_TYPE (op0
)),
5061 unshare_expr (op0
), off
));
5064 return fold_binary_loc (loc
, subcode
,
5065 gimple_expr_type (stmt
), op0
, op1
);
5068 case GIMPLE_TERNARY_RHS
:
5070 /* Handle ternary operators that can appear in GIMPLE form. */
5071 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5072 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5073 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5075 /* Fold embedded expressions in ternary codes. */
5076 if ((subcode
== COND_EXPR
5077 || subcode
== VEC_COND_EXPR
)
5078 && COMPARISON_CLASS_P (op0
))
5080 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
5081 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
5082 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
5083 TREE_TYPE (op0
), op00
, op01
);
5088 return fold_ternary_loc (loc
, subcode
,
5089 gimple_expr_type (stmt
), op0
, op1
, op2
);
5100 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5102 if (gimple_call_internal_p (stmt
))
5104 enum tree_code subcode
= ERROR_MARK
;
5105 switch (gimple_call_internal_fn (stmt
))
5107 case IFN_UBSAN_CHECK_ADD
:
5108 subcode
= PLUS_EXPR
;
5110 case IFN_UBSAN_CHECK_SUB
:
5111 subcode
= MINUS_EXPR
;
5113 case IFN_UBSAN_CHECK_MUL
:
5114 subcode
= MULT_EXPR
;
5119 tree arg0
= gimple_call_arg (stmt
, 0);
5120 tree arg1
= gimple_call_arg (stmt
, 1);
5121 tree op0
= (*valueize
) (arg0
);
5122 tree op1
= (*valueize
) (arg1
);
5124 if (TREE_CODE (op0
) != INTEGER_CST
5125 || TREE_CODE (op1
) != INTEGER_CST
)
5130 /* x * 0 = 0 * x = 0 without overflow. */
5131 if (integer_zerop (op0
) || integer_zerop (op1
))
5132 return build_zero_cst (TREE_TYPE (arg0
));
5135 /* y - y = 0 without overflow. */
5136 if (operand_equal_p (op0
, op1
, 0))
5137 return build_zero_cst (TREE_TYPE (arg0
));
5144 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5146 && TREE_CODE (res
) == INTEGER_CST
5147 && !TREE_OVERFLOW (res
))
5152 fn
= (*valueize
) (gimple_call_fn (stmt
));
5153 if (TREE_CODE (fn
) == ADDR_EXPR
5154 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5155 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5156 && gimple_builtin_call_types_compatible_p (stmt
,
5157 TREE_OPERAND (fn
, 0)))
5159 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5162 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5163 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5164 retval
= fold_builtin_call_array (loc
,
5165 gimple_call_return_type (call_stmt
),
5166 fn
, gimple_call_num_args (stmt
), args
);
5169 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5170 STRIP_NOPS (retval
);
5171 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5184 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5185 Returns NULL_TREE if folding to a constant is not possible, otherwise
5186 returns a constant according to is_gimple_min_invariant. */
5189 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
5191 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5192 if (res
&& is_gimple_min_invariant (res
))
5198 /* The following set of functions are supposed to fold references using
5199 their constant initializers. */
5201 /* See if we can find constructor defining value of BASE.
5202 When we know the consructor with constant offset (such as
5203 base is array[40] and we do know constructor of array), then
5204 BIT_OFFSET is adjusted accordingly.
5206 As a special case, return error_mark_node when constructor
5207 is not explicitly available, but it is known to be zero
5208 such as 'static const int a;'. */
5210 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5211 tree (*valueize
)(tree
))
5213 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5214 if (TREE_CODE (base
) == MEM_REF
)
5216 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5218 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5220 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5225 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5226 base
= valueize (TREE_OPERAND (base
, 0));
5227 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5229 base
= TREE_OPERAND (base
, 0);
5232 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5233 DECL_INITIAL. If BASE is a nested reference into another
5234 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5235 the inner reference. */
5236 switch (TREE_CODE (base
))
5241 tree init
= ctor_for_folding (base
);
5243 /* Our semantic is exact opposite of ctor_for_folding;
5244 NULL means unknown, while error_mark_node is 0. */
5245 if (init
== error_mark_node
)
5248 return error_mark_node
;
5254 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5255 if (max_size
== -1 || size
!= max_size
)
5257 *bit_offset
+= bit_offset2
;
5258 return get_base_constructor (base
, bit_offset
, valueize
);
5269 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5270 SIZE to the memory at bit OFFSET. */
5273 fold_array_ctor_reference (tree type
, tree ctor
,
5274 unsigned HOST_WIDE_INT offset
,
5275 unsigned HOST_WIDE_INT size
,
5278 unsigned HOST_WIDE_INT cnt
;
5280 offset_int low_bound
;
5281 offset_int elt_size
;
5282 offset_int index
, max_index
;
5283 offset_int access_index
;
5284 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5285 HOST_WIDE_INT inner_offset
;
5287 /* Compute low bound and elt size. */
5288 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5289 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5290 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5292 /* Static constructors for variably sized objects makes no sense. */
5293 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5294 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5295 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5299 /* Static constructors for variably sized objects makes no sense. */
5300 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5302 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5304 /* We can handle only constantly sized accesses that are known to not
5305 be larger than size of array element. */
5306 if (!TYPE_SIZE_UNIT (type
)
5307 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5308 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5312 /* Compute the array index we look for. */
5313 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5315 access_index
+= low_bound
;
5317 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5318 TYPE_SIGN (index_type
));
5320 /* And offset within the access. */
5321 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5323 /* See if the array field is large enough to span whole access. We do not
5324 care to fold accesses spanning multiple array indexes. */
5325 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5328 index
= low_bound
- 1;
5330 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5331 TYPE_SIGN (index_type
));
5333 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5335 /* Array constructor might explicitely set index, or specify range
5336 or leave index NULL meaning that it is next index after previous
5340 if (TREE_CODE (cfield
) == INTEGER_CST
)
5341 max_index
= index
= wi::to_offset (cfield
);
5344 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5345 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5346 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5353 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5354 TYPE_SIGN (index_type
));
5358 /* Do we have match? */
5359 if (wi::cmpu (access_index
, index
) >= 0
5360 && wi::cmpu (access_index
, max_index
) <= 0)
5361 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5364 /* When memory is not explicitely mentioned in constructor,
5365 it is 0 (or out of range). */
5366 return build_zero_cst (type
);
5369 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5370 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5373 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5374 unsigned HOST_WIDE_INT offset
,
5375 unsigned HOST_WIDE_INT size
,
5378 unsigned HOST_WIDE_INT cnt
;
5381 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5384 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5385 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5386 tree field_size
= DECL_SIZE (cfield
);
5387 offset_int bitoffset
;
5388 offset_int bitoffset_end
, access_end
;
5390 /* Variable sized objects in static constructors makes no sense,
5391 but field_size can be NULL for flexible array members. */
5392 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5393 && TREE_CODE (byte_offset
) == INTEGER_CST
5394 && (field_size
!= NULL_TREE
5395 ? TREE_CODE (field_size
) == INTEGER_CST
5396 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5398 /* Compute bit offset of the field. */
5399 bitoffset
= (wi::to_offset (field_offset
)
5400 + wi::lshift (wi::to_offset (byte_offset
),
5401 LOG2_BITS_PER_UNIT
));
5402 /* Compute bit offset where the field ends. */
5403 if (field_size
!= NULL_TREE
)
5404 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5408 access_end
= offset_int (offset
) + size
;
5410 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5411 [BITOFFSET, BITOFFSET_END)? */
5412 if (wi::cmps (access_end
, bitoffset
) > 0
5413 && (field_size
== NULL_TREE
5414 || wi::lts_p (offset
, bitoffset_end
)))
5416 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5417 /* We do have overlap. Now see if field is large enough to
5418 cover the access. Give up for accesses spanning multiple
5420 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5422 if (wi::lts_p (offset
, bitoffset
))
5424 return fold_ctor_reference (type
, cval
,
5425 inner_offset
.to_uhwi (), size
,
5429 /* When memory is not explicitely mentioned in constructor, it is 0. */
5430 return build_zero_cst (type
);
5433 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5434 to the memory at bit OFFSET. */
5437 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5438 unsigned HOST_WIDE_INT size
, tree from_decl
)
5442 /* We found the field with exact match. */
5443 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5445 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5447 /* We are at the end of walk, see if we can view convert the
5449 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5450 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5451 && !compare_tree_int (TYPE_SIZE (type
), size
)
5452 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5454 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5455 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5460 /* For constants and byte-aligned/sized reads try to go through
5461 native_encode/interpret. */
5462 if (CONSTANT_CLASS_P (ctor
)
5463 && BITS_PER_UNIT
== 8
5464 && offset
% BITS_PER_UNIT
== 0
5465 && size
% BITS_PER_UNIT
== 0
5466 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5468 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5469 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5470 offset
/ BITS_PER_UNIT
) > 0)
5471 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5473 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5476 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5477 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5478 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5481 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5488 /* Return the tree representing the element referenced by T if T is an
5489 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5490 names using VALUEIZE. Return NULL_TREE otherwise. */
5493 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5495 tree ctor
, idx
, base
;
5496 HOST_WIDE_INT offset
, size
, max_size
;
5499 if (TREE_THIS_VOLATILE (t
))
5502 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
5503 return get_symbol_constant_value (t
);
5505 tem
= fold_read_from_constant_string (t
);
5509 switch (TREE_CODE (t
))
5512 case ARRAY_RANGE_REF
:
5513 /* Constant indexes are handled well by get_base_constructor.
5514 Only special case variable offsets.
5515 FIXME: This code can't handle nested references with variable indexes
5516 (they will be handled only by iteration of ccp). Perhaps we can bring
5517 get_ref_base_and_extent here and make it use a valueize callback. */
5518 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5520 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5521 && TREE_CODE (idx
) == INTEGER_CST
)
5523 tree low_bound
, unit_size
;
5525 /* If the resulting bit-offset is constant, track it. */
5526 if ((low_bound
= array_ref_low_bound (t
),
5527 TREE_CODE (low_bound
) == INTEGER_CST
)
5528 && (unit_size
= array_ref_element_size (t
),
5529 tree_fits_uhwi_p (unit_size
)))
5532 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5533 TYPE_PRECISION (TREE_TYPE (idx
)));
5535 if (wi::fits_shwi_p (woffset
))
5537 offset
= woffset
.to_shwi ();
5538 /* TODO: This code seems wrong, multiply then check
5539 to see if it fits. */
5540 offset
*= tree_to_uhwi (unit_size
);
5541 offset
*= BITS_PER_UNIT
;
5543 base
= TREE_OPERAND (t
, 0);
5544 ctor
= get_base_constructor (base
, &offset
, valueize
);
5545 /* Empty constructor. Always fold to 0. */
5546 if (ctor
== error_mark_node
)
5547 return build_zero_cst (TREE_TYPE (t
));
5548 /* Out of bound array access. Value is undefined,
5552 /* We can not determine ctor. */
5555 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5556 tree_to_uhwi (unit_size
)
5566 case TARGET_MEM_REF
:
5568 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5569 ctor
= get_base_constructor (base
, &offset
, valueize
);
5571 /* Empty constructor. Always fold to 0. */
5572 if (ctor
== error_mark_node
)
5573 return build_zero_cst (TREE_TYPE (t
));
5574 /* We do not know precise address. */
5575 if (max_size
== -1 || max_size
!= size
)
5577 /* We can not determine ctor. */
5581 /* Out of bound array access. Value is undefined, but don't fold. */
5585 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5591 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5592 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5593 return fold_build1_loc (EXPR_LOCATION (t
),
5594 TREE_CODE (t
), TREE_TYPE (t
), c
);
5606 fold_const_aggregate_ref (tree t
)
5608 return fold_const_aggregate_ref_1 (t
, NULL
);
5611 /* Lookup virtual method with index TOKEN in a virtual table V
5613 Set CAN_REFER if non-NULL to false if method
5614 is not referable or if the virtual table is ill-formed (such as rewriten
5615 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5618 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5620 unsigned HOST_WIDE_INT offset
,
5623 tree vtable
= v
, init
, fn
;
5624 unsigned HOST_WIDE_INT size
;
5625 unsigned HOST_WIDE_INT elt_size
, access_index
;
5631 /* First of all double check we have virtual table. */
5632 if (TREE_CODE (v
) != VAR_DECL
5633 || !DECL_VIRTUAL_P (v
))
5635 gcc_assert (in_lto_p
);
5636 /* Pass down that we lost track of the target. */
5642 init
= ctor_for_folding (v
);
5644 /* The virtual tables should always be born with constructors
5645 and we always should assume that they are avaialble for
5646 folding. At the moment we do not stream them in all cases,
5647 but it should never happen that ctor seem unreachable. */
5649 if (init
== error_mark_node
)
5651 gcc_assert (in_lto_p
);
5652 /* Pass down that we lost track of the target. */
5657 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5658 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5659 offset
*= BITS_PER_UNIT
;
5660 offset
+= token
* size
;
5662 /* Lookup the value in the constructor that is assumed to be array.
5663 This is equivalent to
5664 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5665 offset, size, NULL);
5666 but in a constant time. We expect that frontend produced a simple
5667 array without indexed initializers. */
5669 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5670 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5671 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5672 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5674 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5675 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5677 /* This code makes an assumption that there are no
5678 indexed fileds produced by C++ FE, so we can directly index the array. */
5679 if (access_index
< CONSTRUCTOR_NELTS (init
))
5681 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5682 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5688 /* For type inconsistent program we may end up looking up virtual method
5689 in virtual table that does not contain TOKEN entries. We may overrun
5690 the virtual table and pick up a constant or RTTI info pointer.
5691 In any case the call is undefined. */
5693 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5694 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5695 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5698 fn
= TREE_OPERAND (fn
, 0);
5700 /* When cgraph node is missing and function is not public, we cannot
5701 devirtualize. This can happen in WHOPR when the actual method
5702 ends up in other partition, because we found devirtualization
5703 possibility too late. */
5704 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5715 /* Make sure we create a cgraph node for functions we'll reference.
5716 They can be non-existent if the reference comes from an entry
5717 of an external vtable for example. */
5718 cgraph_node::get_create (fn
);
5723 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5724 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5725 KNOWN_BINFO carries the binfo describing the true type of
5726 OBJ_TYPE_REF_OBJECT(REF).
5727 Set CAN_REFER if non-NULL to false if method
5728 is not referable or if the virtual table is ill-formed (such as rewriten
5729 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5732 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5735 unsigned HOST_WIDE_INT offset
;
5738 v
= BINFO_VTABLE (known_binfo
);
5739 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5743 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5749 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5752 /* Return true iff VAL is a gimple expression that is known to be
5753 non-negative. Restricted to floating-point inputs. */
5756 gimple_val_nonnegative_real_p (tree val
)
5760 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5762 /* Use existing logic for non-gimple trees. */
5763 if (tree_expr_nonnegative_p (val
))
5766 if (TREE_CODE (val
) != SSA_NAME
)
5769 /* Currently we look only at the immediately defining statement
5770 to make this determination, since recursion on defining
5771 statements of operands can lead to quadratic behavior in the
5772 worst case. This is expected to catch almost all occurrences
5773 in practice. It would be possible to implement limited-depth
5774 recursion if important cases are lost. Alternatively, passes
5775 that need this information (such as the pow/powi lowering code
5776 in the cse_sincos pass) could be revised to provide it through
5777 dataflow propagation. */
5779 def_stmt
= SSA_NAME_DEF_STMT (val
);
5781 if (is_gimple_assign (def_stmt
))
5785 /* See fold-const.c:tree_expr_nonnegative_p for additional
5786 cases that could be handled with recursion. */
5788 switch (gimple_assign_rhs_code (def_stmt
))
5791 /* Always true for floating-point operands. */
5795 /* True if the two operands are identical (since we are
5796 restricted to floating-point inputs). */
5797 op0
= gimple_assign_rhs1 (def_stmt
);
5798 op1
= gimple_assign_rhs2 (def_stmt
);
5801 || operand_equal_p (op0
, op1
, 0))
5808 else if (is_gimple_call (def_stmt
))
5810 tree fndecl
= gimple_call_fndecl (def_stmt
);
5812 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5816 switch (DECL_FUNCTION_CODE (fndecl
))
5818 CASE_FLT_FN (BUILT_IN_ACOS
):
5819 CASE_FLT_FN (BUILT_IN_ACOSH
):
5820 CASE_FLT_FN (BUILT_IN_CABS
):
5821 CASE_FLT_FN (BUILT_IN_COSH
):
5822 CASE_FLT_FN (BUILT_IN_ERFC
):
5823 CASE_FLT_FN (BUILT_IN_EXP
):
5824 CASE_FLT_FN (BUILT_IN_EXP10
):
5825 CASE_FLT_FN (BUILT_IN_EXP2
):
5826 CASE_FLT_FN (BUILT_IN_FABS
):
5827 CASE_FLT_FN (BUILT_IN_FDIM
):
5828 CASE_FLT_FN (BUILT_IN_HYPOT
):
5829 CASE_FLT_FN (BUILT_IN_POW10
):
5832 CASE_FLT_FN (BUILT_IN_SQRT
):
5833 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5834 nonnegative inputs. */
5835 if (!HONOR_SIGNED_ZEROS (val
))
5840 CASE_FLT_FN (BUILT_IN_POWI
):
5841 /* True if the second argument is an even integer. */
5842 arg1
= gimple_call_arg (def_stmt
, 1);
5844 if (TREE_CODE (arg1
) == INTEGER_CST
5845 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5850 CASE_FLT_FN (BUILT_IN_POW
):
5851 /* True if the second argument is an even integer-valued
5853 arg1
= gimple_call_arg (def_stmt
, 1);
5855 if (TREE_CODE (arg1
) == REAL_CST
)
5860 c
= TREE_REAL_CST (arg1
);
5861 n
= real_to_integer (&c
);
5865 REAL_VALUE_TYPE cint
;
5866 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5867 if (real_identical (&c
, &cint
))
5883 /* Given a pointer value OP0, return a simplified version of an
5884 indirection through OP0, or NULL_TREE if no simplification is
5885 possible. Note that the resulting type may be different from
5886 the type pointed to in the sense that it is still compatible
5887 from the langhooks point of view. */
5890 gimple_fold_indirect_ref (tree t
)
5892 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5897 subtype
= TREE_TYPE (sub
);
5898 if (!POINTER_TYPE_P (subtype
))
5901 if (TREE_CODE (sub
) == ADDR_EXPR
)
5903 tree op
= TREE_OPERAND (sub
, 0);
5904 tree optype
= TREE_TYPE (op
);
5906 if (useless_type_conversion_p (type
, optype
))
5909 /* *(foo *)&fooarray => fooarray[0] */
5910 if (TREE_CODE (optype
) == ARRAY_TYPE
5911 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5912 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5914 tree type_domain
= TYPE_DOMAIN (optype
);
5915 tree min_val
= size_zero_node
;
5916 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5917 min_val
= TYPE_MIN_VALUE (type_domain
);
5918 if (TREE_CODE (min_val
) == INTEGER_CST
)
5919 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5921 /* *(foo *)&complexfoo => __real__ complexfoo */
5922 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5923 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5924 return fold_build1 (REALPART_EXPR
, type
, op
);
5925 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5926 else if (TREE_CODE (optype
) == VECTOR_TYPE
5927 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5929 tree part_width
= TYPE_SIZE (type
);
5930 tree index
= bitsize_int (0);
5931 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5935 /* *(p + CST) -> ... */
5936 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5937 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5939 tree addr
= TREE_OPERAND (sub
, 0);
5940 tree off
= TREE_OPERAND (sub
, 1);
5944 addrtype
= TREE_TYPE (addr
);
5946 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5947 if (TREE_CODE (addr
) == ADDR_EXPR
5948 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5949 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5950 && tree_fits_uhwi_p (off
))
5952 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5953 tree part_width
= TYPE_SIZE (type
);
5954 unsigned HOST_WIDE_INT part_widthi
5955 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5956 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5957 tree index
= bitsize_int (indexi
);
5958 if (offset
/ part_widthi
5959 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5960 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5964 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5965 if (TREE_CODE (addr
) == ADDR_EXPR
5966 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5967 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5969 tree size
= TYPE_SIZE_UNIT (type
);
5970 if (tree_int_cst_equal (size
, off
))
5971 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5974 /* *(p + CST) -> MEM_REF <p, CST>. */
5975 if (TREE_CODE (addr
) != ADDR_EXPR
5976 || DECL_P (TREE_OPERAND (addr
, 0)))
5977 return fold_build2 (MEM_REF
, type
,
5979 wide_int_to_tree (ptype
, off
));
5982 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5983 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5984 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5985 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5988 tree min_val
= size_zero_node
;
5990 sub
= gimple_fold_indirect_ref (sub
);
5992 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5993 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5994 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5995 min_val
= TYPE_MIN_VALUE (type_domain
);
5996 if (TREE_CODE (min_val
) == INTEGER_CST
)
5997 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6003 /* Return true if CODE is an operation that when operating on signed
6004 integer types involves undefined behavior on overflow and the
6005 operation can be expressed with unsigned arithmetic. */
6008 arith_code_with_undefined_signed_overflow (tree_code code
)
6016 case POINTER_PLUS_EXPR
:
6023 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6024 operation that can be transformed to unsigned arithmetic by converting
6025 its operand, carrying out the operation in the corresponding unsigned
6026 type and converting the result back to the original type.
6028 Returns a sequence of statements that replace STMT and also contain
6029 a modified form of STMT itself. */
6032 rewrite_to_defined_overflow (gimple stmt
)
6034 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6036 fprintf (dump_file
, "rewriting stmt with undefined signed "
6038 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6041 tree lhs
= gimple_assign_lhs (stmt
);
6042 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6043 gimple_seq stmts
= NULL
;
6044 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6046 gimple_seq stmts2
= NULL
;
6047 gimple_set_op (stmt
, i
,
6048 force_gimple_operand (fold_convert (type
,
6049 gimple_op (stmt
, i
)),
6050 &stmts2
, true, NULL_TREE
));
6051 gimple_seq_add_seq (&stmts
, stmts2
);
6053 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6054 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6055 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6056 gimple_seq_add_stmt (&stmts
, stmt
);
6057 gimple cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6058 gimple_seq_add_stmt (&stmts
, cvt
);
6064 /* Build the expression CODE OP0 of type TYPE with location LOC,
6065 simplifying it first if possible using VALUEIZE if not NULL.
6066 OP0 is expected to be valueized already. Returns the built
6067 expression value and appends statements possibly defining it
6071 gimple_build (gimple_seq
*seq
, location_t loc
,
6072 enum tree_code code
, tree type
, tree op0
,
6073 tree (*valueize
)(tree
))
6075 tree res
= gimple_simplify (code
, type
, op0
, seq
, valueize
);
6078 if (gimple_in_ssa_p (cfun
))
6079 res
= make_ssa_name (type
);
6081 res
= create_tmp_reg (type
);
6083 if (code
== REALPART_EXPR
6084 || code
== IMAGPART_EXPR
6085 || code
== VIEW_CONVERT_EXPR
)
6086 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6088 stmt
= gimple_build_assign (res
, code
, op0
);
6089 gimple_set_location (stmt
, loc
);
6090 gimple_seq_add_stmt_without_update (seq
, stmt
);
6095 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6096 simplifying it first if possible using VALUEIZE if not NULL.
6097 OP0 and OP1 are expected to be valueized already. Returns the built
6098 expression value and appends statements possibly defining it
6102 gimple_build (gimple_seq
*seq
, location_t loc
,
6103 enum tree_code code
, tree type
, tree op0
, tree op1
,
6104 tree (*valueize
)(tree
))
6106 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, valueize
);
6109 if (gimple_in_ssa_p (cfun
))
6110 res
= make_ssa_name (type
);
6112 res
= create_tmp_reg (type
);
6113 gimple stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6114 gimple_set_location (stmt
, loc
);
6115 gimple_seq_add_stmt_without_update (seq
, stmt
);
6120 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6121 simplifying it first if possible using VALUEIZE if not NULL.
6122 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
6123 expression value and appends statements possibly defining it
6127 gimple_build (gimple_seq
*seq
, location_t loc
,
6128 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
,
6129 tree (*valueize
)(tree
))
6131 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6135 if (gimple_in_ssa_p (cfun
))
6136 res
= make_ssa_name (type
);
6138 res
= create_tmp_reg (type
);
6140 if (code
== BIT_FIELD_REF
)
6141 stmt
= gimple_build_assign (res
, code
,
6142 build3 (code
, type
, op0
, op1
, op2
));
6144 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6145 gimple_set_location (stmt
, loc
);
6146 gimple_seq_add_stmt_without_update (seq
, stmt
);
6151 /* Build the call FN (ARG0) with a result of type TYPE
6152 (or no result if TYPE is void) with location LOC,
6153 simplifying it first if possible using VALUEIZE if not NULL.
6154 ARG0 is expected to be valueized already. Returns the built
6155 expression value (or NULL_TREE if TYPE is void) and appends
6156 statements possibly defining it to SEQ. */
6159 gimple_build (gimple_seq
*seq
, location_t loc
,
6160 enum built_in_function fn
, tree type
, tree arg0
,
6161 tree (*valueize
)(tree
))
6163 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, valueize
);
6166 tree decl
= builtin_decl_implicit (fn
);
6167 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
6168 if (!VOID_TYPE_P (type
))
6170 if (gimple_in_ssa_p (cfun
))
6171 res
= make_ssa_name (type
);
6173 res
= create_tmp_reg (type
);
6174 gimple_call_set_lhs (stmt
, res
);
6176 gimple_set_location (stmt
, loc
);
6177 gimple_seq_add_stmt_without_update (seq
, stmt
);
6182 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6183 (or no result if TYPE is void) with location LOC,
6184 simplifying it first if possible using VALUEIZE if not NULL.
6185 ARG0 is expected to be valueized already. Returns the built
6186 expression value (or NULL_TREE if TYPE is void) and appends
6187 statements possibly defining it to SEQ. */
6190 gimple_build (gimple_seq
*seq
, location_t loc
,
6191 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
,
6192 tree (*valueize
)(tree
))
6194 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, valueize
);
6197 tree decl
= builtin_decl_implicit (fn
);
6198 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6199 if (!VOID_TYPE_P (type
))
6201 if (gimple_in_ssa_p (cfun
))
6202 res
= make_ssa_name (type
);
6204 res
= create_tmp_reg (type
);
6205 gimple_call_set_lhs (stmt
, res
);
6207 gimple_set_location (stmt
, loc
);
6208 gimple_seq_add_stmt_without_update (seq
, stmt
);
6213 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6214 (or no result if TYPE is void) with location LOC,
6215 simplifying it first if possible using VALUEIZE if not NULL.
6216 ARG0 is expected to be valueized already. Returns the built
6217 expression value (or NULL_TREE if TYPE is void) and appends
6218 statements possibly defining it to SEQ. */
6221 gimple_build (gimple_seq
*seq
, location_t loc
,
6222 enum built_in_function fn
, tree type
,
6223 tree arg0
, tree arg1
, tree arg2
,
6224 tree (*valueize
)(tree
))
6226 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
, seq
, valueize
);
6229 tree decl
= builtin_decl_implicit (fn
);
6230 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6231 if (!VOID_TYPE_P (type
))
6233 if (gimple_in_ssa_p (cfun
))
6234 res
= make_ssa_name (type
);
6236 res
= create_tmp_reg (type
);
6237 gimple_call_set_lhs (stmt
, res
);
6239 gimple_set_location (stmt
, loc
);
6240 gimple_seq_add_stmt_without_update (seq
, stmt
);
6245 /* Build the conversion (TYPE) OP with a result of type TYPE
6246 with location LOC if such conversion is neccesary in GIMPLE,
6247 simplifying it first.
6248 Returns the built expression value and appends
6249 statements possibly defining it to SEQ. */
6252 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6254 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6256 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);