1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
48 #include "tree-ssa-propagate.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
82 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
85 struct cgraph_node
*node
;
88 if (DECL_ABSTRACT_P (decl
))
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
93 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab
->function_flags_ready
)
103 snode
= symtab_node::get (decl
);
104 if (!snode
|| !snode
->definition
)
106 node
= dyn_cast
<cgraph_node
*> (snode
);
107 return !node
|| !node
->global
.inlined_to
;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
114 || TREE_CODE (from_decl
) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl
)
116 && (vnode
= varpool_node::get (from_decl
)) != NULL
117 && vnode
->definition
)
119 && (vnode
= varpool_node::get (from_decl
)) != NULL
120 && vnode
->in_other_partition
))
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl
)
126 && DECL_EXTERNAL (decl
)
127 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
128 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
146 if (!symtab
->function_flags_ready
)
149 snode
= symtab_node::get (decl
);
151 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
152 && (!snode
->in_other_partition
153 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
155 node
= dyn_cast
<cgraph_node
*> (snode
);
156 return !node
|| !node
->global
.inlined_to
;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
164 canonicalize_constructor_val (tree cval
, tree from_decl
)
166 tree orig_cval
= cval
;
168 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
171 tree ptr
= TREE_OPERAND (cval
, 0);
172 if (is_gimple_min_invariant (ptr
))
173 cval
= build1_loc (EXPR_LOCATION (cval
),
174 ADDR_EXPR
, TREE_TYPE (ptr
),
175 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
177 fold_convert (ptr_type_node
,
178 TREE_OPERAND (cval
, 1))));
180 if (TREE_CODE (cval
) == ADDR_EXPR
)
182 tree base
= NULL_TREE
;
183 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
185 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
187 TREE_OPERAND (cval
, 0) = base
;
190 base
= get_base_address (TREE_OPERAND (cval
, 0));
194 if ((TREE_CODE (base
) == VAR_DECL
195 || TREE_CODE (base
) == FUNCTION_DECL
)
196 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
198 if (TREE_CODE (base
) == VAR_DECL
)
199 TREE_ADDRESSABLE (base
) = 1;
200 else if (TREE_CODE (base
) == FUNCTION_DECL
)
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
205 cgraph_node::get_create (base
);
207 /* Fixup types in global initializers. */
208 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
209 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
212 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
215 if (TREE_OVERFLOW_P (cval
))
216 return drop_tree_overflow (cval
);
220 /* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
224 get_symbol_constant_value (tree sym
)
226 tree val
= ctor_for_folding (sym
);
227 if (val
!= error_mark_node
)
231 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
232 if (val
&& is_gimple_min_invariant (val
))
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
241 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
242 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
243 return build_zero_cst (TREE_TYPE (sym
));
251 /* Subroutine of fold_stmt. We perform several simplifications of the
252 memory reference tree EXPR and make sure to re-gimplify them properly
253 after propagation of constant addresses. IS_LHS is true if the
254 reference is supposed to be an lvalue. */
257 maybe_fold_reference (tree expr
, bool is_lhs
)
261 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
262 || TREE_CODE (expr
) == REALPART_EXPR
263 || TREE_CODE (expr
) == IMAGPART_EXPR
)
264 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
265 return fold_unary_loc (EXPR_LOCATION (expr
),
268 TREE_OPERAND (expr
, 0));
269 else if (TREE_CODE (expr
) == BIT_FIELD_REF
270 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
271 return fold_ternary_loc (EXPR_LOCATION (expr
),
274 TREE_OPERAND (expr
, 0),
275 TREE_OPERAND (expr
, 1),
276 TREE_OPERAND (expr
, 2));
279 && (result
= fold_const_aggregate_ref (expr
))
280 && is_gimple_min_invariant (result
))
287 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
288 replacement rhs for the statement or NULL_TREE if no simplification
289 could be made. It is assumed that the operands have been previously
293 fold_gimple_assign (gimple_stmt_iterator
*si
)
295 gimple stmt
= gsi_stmt (*si
);
296 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
297 location_t loc
= gimple_location (stmt
);
299 tree result
= NULL_TREE
;
301 switch (get_gimple_rhs_class (subcode
))
303 case GIMPLE_SINGLE_RHS
:
305 tree rhs
= gimple_assign_rhs1 (stmt
);
307 if (REFERENCE_CLASS_P (rhs
))
308 return maybe_fold_reference (rhs
, false);
310 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
312 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
313 if (is_gimple_min_invariant (val
))
315 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
318 vec
<cgraph_node
*>targets
319 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
320 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
322 if (dump_enabled_p ())
324 location_t loc
= gimple_location_safe (stmt
);
325 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
326 "resolving virtual function address "
327 "reference to function %s\n",
328 targets
.length () == 1
329 ? targets
[0]->name ()
332 if (targets
.length () == 1)
334 val
= fold_convert (TREE_TYPE (val
),
335 build_fold_addr_expr_loc
336 (loc
, targets
[0]->decl
));
337 STRIP_USELESS_TYPE_CONVERSION (val
);
340 /* We can not use __builtin_unreachable here because it
341 can not have address taken. */
342 val
= build_int_cst (TREE_TYPE (val
), 0);
348 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
350 tree ref
= TREE_OPERAND (rhs
, 0);
351 tree tem
= maybe_fold_reference (ref
, true);
353 && TREE_CODE (tem
) == MEM_REF
354 && integer_zerop (TREE_OPERAND (tem
, 1)))
355 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
357 result
= fold_convert (TREE_TYPE (rhs
),
358 build_fold_addr_expr_loc (loc
, tem
));
359 else if (TREE_CODE (ref
) == MEM_REF
360 && integer_zerop (TREE_OPERAND (ref
, 1)))
361 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
364 else if (TREE_CODE (rhs
) == CONSTRUCTOR
365 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
366 && (CONSTRUCTOR_NELTS (rhs
)
367 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
369 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
373 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
374 if (TREE_CODE (val
) != INTEGER_CST
375 && TREE_CODE (val
) != REAL_CST
376 && TREE_CODE (val
) != FIXED_CST
)
379 return build_vector_from_ctor (TREE_TYPE (rhs
),
380 CONSTRUCTOR_ELTS (rhs
));
383 else if (DECL_P (rhs
))
384 return get_symbol_constant_value (rhs
);
386 /* If we couldn't fold the RHS, hand over to the generic
388 if (result
== NULL_TREE
)
391 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
392 that may have been added by fold, and "useless" type
393 conversions that might now be apparent due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result
);
396 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
403 case GIMPLE_UNARY_RHS
:
405 tree rhs
= gimple_assign_rhs1 (stmt
);
407 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
410 /* If the operation was a conversion do _not_ mark a
411 resulting constant with TREE_OVERFLOW if the original
412 constant was not. These conversions have implementation
413 defined behavior and retaining the TREE_OVERFLOW flag
414 here would confuse later passes such as VRP. */
415 if (CONVERT_EXPR_CODE_P (subcode
)
416 && TREE_CODE (result
) == INTEGER_CST
417 && TREE_CODE (rhs
) == INTEGER_CST
)
418 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
420 STRIP_USELESS_TYPE_CONVERSION (result
);
421 if (valid_gimple_rhs_p (result
))
427 case GIMPLE_BINARY_RHS
:
428 /* Try to canonicalize for boolean-typed X the comparisons
429 X == 0, X == 1, X != 0, and X != 1. */
430 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
431 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
433 tree lhs
= gimple_assign_lhs (stmt
);
434 tree op1
= gimple_assign_rhs1 (stmt
);
435 tree op2
= gimple_assign_rhs2 (stmt
);
436 tree type
= TREE_TYPE (op1
);
438 /* Check whether the comparison operands are of the same boolean
439 type as the result type is.
440 Check that second operand is an integer-constant with value
442 if (TREE_CODE (op2
) == INTEGER_CST
443 && (integer_zerop (op2
) || integer_onep (op2
))
444 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
446 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
447 bool is_logical_not
= false;
449 /* X == 0 and X != 1 is a logical-not.of X
450 X == 1 and X != 0 is X */
451 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
452 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
453 is_logical_not
= true;
455 if (is_logical_not
== false)
457 /* Only for one-bit precision typed X the transformation
458 !X -> ~X is valied. */
459 else if (TYPE_PRECISION (type
) == 1)
460 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
462 /* Otherwise we use !X -> X ^ 1. */
464 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
465 type
, op1
, build_int_cst (type
, 1));
471 result
= fold_binary_loc (loc
, subcode
,
472 TREE_TYPE (gimple_assign_lhs (stmt
)),
473 gimple_assign_rhs1 (stmt
),
474 gimple_assign_rhs2 (stmt
));
478 STRIP_USELESS_TYPE_CONVERSION (result
);
479 if (valid_gimple_rhs_p (result
))
484 case GIMPLE_TERNARY_RHS
:
485 /* Try to fold a conditional expression. */
486 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
488 tree op0
= gimple_assign_rhs1 (stmt
);
491 location_t cond_loc
= gimple_location (stmt
);
493 if (COMPARISON_CLASS_P (op0
))
495 fold_defer_overflow_warnings ();
496 tem
= fold_binary_loc (cond_loc
,
497 TREE_CODE (op0
), TREE_TYPE (op0
),
498 TREE_OPERAND (op0
, 0),
499 TREE_OPERAND (op0
, 1));
500 /* This is actually a conditional expression, not a GIMPLE
501 conditional statement, however, the valid_gimple_rhs_p
502 test still applies. */
503 set
= (tem
&& is_gimple_condexpr (tem
)
504 && valid_gimple_rhs_p (tem
));
505 fold_undefer_overflow_warnings (set
, stmt
, 0);
507 else if (is_gimple_min_invariant (op0
))
516 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
517 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
518 gimple_assign_rhs2 (stmt
),
519 gimple_assign_rhs3 (stmt
));
523 result
= fold_ternary_loc (loc
, subcode
,
524 TREE_TYPE (gimple_assign_lhs (stmt
)),
525 gimple_assign_rhs1 (stmt
),
526 gimple_assign_rhs2 (stmt
),
527 gimple_assign_rhs3 (stmt
));
531 STRIP_USELESS_TYPE_CONVERSION (result
);
532 if (valid_gimple_rhs_p (result
))
537 case GIMPLE_INVALID_RHS
:
544 /* Attempt to fold a conditional statement. Return true if any changes were
545 made. We only attempt to fold the condition expression, and do not perform
546 any transformation that would require alteration of the cfg. It is
547 assumed that the operands have been previously folded. */
550 fold_gimple_cond (gimple stmt
)
552 tree result
= fold_binary_loc (gimple_location (stmt
),
553 gimple_cond_code (stmt
),
555 gimple_cond_lhs (stmt
),
556 gimple_cond_rhs (stmt
));
560 STRIP_USELESS_TYPE_CONVERSION (result
);
561 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
563 gimple_cond_set_condition_from_tree (stmt
, result
);
572 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
573 adjusting the replacement stmts location and virtual operands.
574 If the statement has a lhs the last stmt in the sequence is expected
575 to assign to that lhs. */
578 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
580 gimple stmt
= gsi_stmt (*si_p
);
582 if (gimple_has_location (stmt
))
583 annotate_all_with_location (stmts
, gimple_location (stmt
));
585 /* First iterate over the replacement statements backward, assigning
586 virtual operands to their defining statements. */
587 gimple laststore
= NULL
;
588 for (gimple_stmt_iterator i
= gsi_last (stmts
);
589 !gsi_end_p (i
); gsi_prev (&i
))
591 gimple new_stmt
= gsi_stmt (i
);
592 if ((gimple_assign_single_p (new_stmt
)
593 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
594 || (is_gimple_call (new_stmt
)
595 && (gimple_call_flags (new_stmt
)
596 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
600 vdef
= gimple_vdef (stmt
);
602 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
603 gimple_set_vdef (new_stmt
, vdef
);
604 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
605 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
606 laststore
= new_stmt
;
610 /* Second iterate over the statements forward, assigning virtual
611 operands to their uses. */
612 tree reaching_vuse
= gimple_vuse (stmt
);
613 for (gimple_stmt_iterator i
= gsi_start (stmts
);
614 !gsi_end_p (i
); gsi_next (&i
))
616 gimple new_stmt
= gsi_stmt (i
);
617 /* If the new statement possibly has a VUSE, update it with exact SSA
618 name we know will reach this one. */
619 if (gimple_has_mem_ops (new_stmt
))
620 gimple_set_vuse (new_stmt
, reaching_vuse
);
621 gimple_set_modified (new_stmt
, true);
622 if (gimple_vdef (new_stmt
))
623 reaching_vuse
= gimple_vdef (new_stmt
);
626 /* If the new sequence does not do a store release the virtual
627 definition of the original statement. */
629 && reaching_vuse
== gimple_vuse (stmt
))
631 tree vdef
= gimple_vdef (stmt
);
633 && TREE_CODE (vdef
) == SSA_NAME
)
635 unlink_stmt_vdef (stmt
);
636 release_ssa_name (vdef
);
640 /* Finally replace the original statement with the sequence. */
641 gsi_replace_with_seq (si_p
, stmts
, false);
644 /* Convert EXPR into a GIMPLE value suitable for substitution on the
645 RHS of an assignment. Insert the necessary statements before
646 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
647 is replaced. If the call is expected to produces a result, then it
648 is replaced by an assignment of the new RHS to the result variable.
649 If the result is to be ignored, then the call is replaced by a
650 GIMPLE_NOP. A proper VDEF chain is retained by making the first
651 VUSE and the last VDEF of the whole sequence be the same as the replaced
652 statement and using new SSA names for stores in between. */
655 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
658 gimple stmt
, new_stmt
;
659 gimple_stmt_iterator i
;
660 gimple_seq stmts
= NULL
;
662 stmt
= gsi_stmt (*si_p
);
664 gcc_assert (is_gimple_call (stmt
));
666 push_gimplify_context (gimple_in_ssa_p (cfun
));
668 lhs
= gimple_call_lhs (stmt
);
669 if (lhs
== NULL_TREE
)
671 gimplify_and_add (expr
, &stmts
);
672 /* We can end up with folding a memcpy of an empty class assignment
673 which gets optimized away by C++ gimplification. */
674 if (gimple_seq_empty_p (stmts
))
676 pop_gimplify_context (NULL
);
677 if (gimple_in_ssa_p (cfun
))
679 unlink_stmt_vdef (stmt
);
682 gsi_replace (si_p
, gimple_build_nop (), true);
688 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
689 new_stmt
= gimple_build_assign (lhs
, tmp
);
690 i
= gsi_last (stmts
);
691 gsi_insert_after_without_update (&i
, new_stmt
,
692 GSI_CONTINUE_LINKING
);
695 pop_gimplify_context (NULL
);
697 gsi_replace_with_seq_vops (si_p
, stmts
);
701 /* Replace the call at *GSI with the gimple value VAL. */
704 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
706 gimple stmt
= gsi_stmt (*gsi
);
707 tree lhs
= gimple_call_lhs (stmt
);
711 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
712 val
= fold_convert (TREE_TYPE (lhs
), val
);
713 repl
= gimple_build_assign (lhs
, val
);
716 repl
= gimple_build_nop ();
717 tree vdef
= gimple_vdef (stmt
);
718 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
720 unlink_stmt_vdef (stmt
);
721 release_ssa_name (vdef
);
723 gsi_replace (gsi
, repl
, true);
726 /* Replace the call at *GSI with the new call REPL and fold that
730 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
732 gimple stmt
= gsi_stmt (*gsi
);
733 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
734 gimple_set_location (repl
, gimple_location (stmt
));
735 if (gimple_vdef (stmt
)
736 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
738 gimple_set_vdef (repl
, gimple_vdef (stmt
));
739 gimple_set_vuse (repl
, gimple_vuse (stmt
));
740 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
742 gsi_replace (gsi
, repl
, true);
746 /* Return true if VAR is a VAR_DECL or a component thereof. */
749 var_decl_component_p (tree var
)
752 while (handled_component_p (inner
))
753 inner
= TREE_OPERAND (inner
, 0);
754 return SSA_VAR_P (inner
);
757 /* Fold function call to builtin mem{{,p}cpy,move}. Return
758 NULL_TREE if no simplification can be made.
759 If ENDP is 0, return DEST (like memcpy).
760 If ENDP is 1, return DEST+LEN (like mempcpy).
761 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
762 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
766 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
767 tree dest
, tree src
, int endp
)
769 gimple stmt
= gsi_stmt (*gsi
);
770 tree lhs
= gimple_call_lhs (stmt
);
771 tree len
= gimple_call_arg (stmt
, 2);
772 tree destvar
, srcvar
;
773 location_t loc
= gimple_location (stmt
);
775 /* If the LEN parameter is zero, return DEST. */
776 if (integer_zerop (len
))
779 if (gimple_call_lhs (stmt
))
780 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
782 repl
= gimple_build_nop ();
783 tree vdef
= gimple_vdef (stmt
);
784 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
786 unlink_stmt_vdef (stmt
);
787 release_ssa_name (vdef
);
789 gsi_replace (gsi
, repl
, true);
793 /* If SRC and DEST are the same (and not volatile), return
794 DEST{,+LEN,+LEN-1}. */
795 if (operand_equal_p (src
, dest
, 0))
797 unlink_stmt_vdef (stmt
);
798 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
799 release_ssa_name (gimple_vdef (stmt
));
802 gsi_replace (gsi
, gimple_build_nop (), true);
809 tree srctype
, desttype
;
810 unsigned int src_align
, dest_align
;
813 /* Build accesses at offset zero with a ref-all character type. */
814 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
817 /* If we can perform the copy efficiently with first doing all loads
818 and then all stores inline it that way. Currently efficiently
819 means that we can load all the memory into a single integer
820 register which is what MOVE_MAX gives us. */
821 src_align
= get_pointer_alignment (src
);
822 dest_align
= get_pointer_alignment (dest
);
823 if (tree_fits_uhwi_p (len
)
824 && compare_tree_int (len
, MOVE_MAX
) <= 0
825 /* ??? Don't transform copies from strings with known length this
826 confuses the tree-ssa-strlen.c. This doesn't handle
827 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
829 && !c_strlen (src
, 2))
831 unsigned ilen
= tree_to_uhwi (len
);
832 if (exact_log2 (ilen
) != -1)
834 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
836 && TYPE_MODE (type
) != BLKmode
837 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
839 /* If the destination pointer is not aligned we must be able
840 to emit an unaligned store. */
841 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
842 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
845 tree desttype
= type
;
846 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
847 srctype
= build_aligned_type (type
, src_align
);
848 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
849 tree tem
= fold_const_aggregate_ref (srcmem
);
852 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
853 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
859 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
861 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
862 if (gimple_in_ssa_p (cfun
))
863 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
866 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
),
868 gimple_assign_set_lhs (new_stmt
, srcmem
);
869 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
870 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
872 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
873 desttype
= build_aligned_type (type
, dest_align
);
875 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
878 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
879 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
880 if (gimple_vdef (new_stmt
)
881 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
882 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
885 gsi_replace (gsi
, new_stmt
, true);
888 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
897 /* Both DEST and SRC must be pointer types.
898 ??? This is what old code did. Is the testing for pointer types
901 If either SRC is readonly or length is 1, we can use memcpy. */
902 if (!dest_align
|| !src_align
)
904 if (readonly_data_expr (src
)
905 || (tree_fits_uhwi_p (len
)
906 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
907 >= tree_to_uhwi (len
))))
909 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
912 gimple_call_set_fndecl (stmt
, fn
);
913 gimple_call_set_arg (stmt
, 0, dest
);
914 gimple_call_set_arg (stmt
, 1, src
);
919 /* If *src and *dest can't overlap, optimize into memcpy as well. */
920 if (TREE_CODE (src
) == ADDR_EXPR
921 && TREE_CODE (dest
) == ADDR_EXPR
)
923 tree src_base
, dest_base
, fn
;
924 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
925 HOST_WIDE_INT size
= -1;
926 HOST_WIDE_INT maxsize
= -1;
928 srcvar
= TREE_OPERAND (src
, 0);
929 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
931 destvar
= TREE_OPERAND (dest
, 0);
932 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
934 if (tree_fits_uhwi_p (len
))
935 maxsize
= tree_to_uhwi (len
);
938 src_offset
/= BITS_PER_UNIT
;
939 dest_offset
/= BITS_PER_UNIT
;
940 if (SSA_VAR_P (src_base
)
941 && SSA_VAR_P (dest_base
))
943 if (operand_equal_p (src_base
, dest_base
, 0)
944 && ranges_overlap_p (src_offset
, maxsize
,
945 dest_offset
, maxsize
))
948 else if (TREE_CODE (src_base
) == MEM_REF
949 && TREE_CODE (dest_base
) == MEM_REF
)
951 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
952 TREE_OPERAND (dest_base
, 0), 0))
954 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
955 if (!wi::fits_shwi_p (off
))
957 src_offset
= off
.to_shwi ();
959 off
= mem_ref_offset (dest_base
) + dest_offset
;
960 if (!wi::fits_shwi_p (off
))
962 dest_offset
= off
.to_shwi ();
963 if (ranges_overlap_p (src_offset
, maxsize
,
964 dest_offset
, maxsize
))
970 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
973 gimple_call_set_fndecl (stmt
, fn
);
974 gimple_call_set_arg (stmt
, 0, dest
);
975 gimple_call_set_arg (stmt
, 1, src
);
980 /* If the destination and source do not alias optimize into
982 if ((is_gimple_min_invariant (dest
)
983 || TREE_CODE (dest
) == SSA_NAME
)
984 && (is_gimple_min_invariant (src
)
985 || TREE_CODE (src
) == SSA_NAME
))
988 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
989 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
990 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
993 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
996 gimple_call_set_fndecl (stmt
, fn
);
997 gimple_call_set_arg (stmt
, 0, dest
);
998 gimple_call_set_arg (stmt
, 1, src
);
1007 if (!tree_fits_shwi_p (len
))
1010 This logic lose for arguments like (type *)malloc (sizeof (type)),
1011 since we strip the casts of up to VOID return value from malloc.
1012 Perhaps we ought to inherit type from non-VOID argument here? */
1015 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1016 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1018 /* In the following try to find a type that is most natural to be
1019 used for the memcpy source and destination and that allows
1020 the most optimization when memcpy is turned into a plain assignment
1021 using that type. In theory we could always use a char[len] type
1022 but that only gains us that the destination and source possibly
1023 no longer will have their address taken. */
1024 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1025 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1027 tree tem
= TREE_OPERAND (src
, 0);
1029 if (tem
!= TREE_OPERAND (src
, 0))
1030 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1032 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1034 tree tem
= TREE_OPERAND (dest
, 0);
1036 if (tem
!= TREE_OPERAND (dest
, 0))
1037 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1039 srctype
= TREE_TYPE (TREE_TYPE (src
));
1040 if (TREE_CODE (srctype
) == ARRAY_TYPE
1041 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1043 srctype
= TREE_TYPE (srctype
);
1045 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1047 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1048 if (TREE_CODE (desttype
) == ARRAY_TYPE
1049 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1051 desttype
= TREE_TYPE (desttype
);
1053 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1055 if (TREE_ADDRESSABLE (srctype
)
1056 || TREE_ADDRESSABLE (desttype
))
1059 /* Make sure we are not copying using a floating-point mode or
1060 a type whose size possibly does not match its precision. */
1061 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1062 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1063 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1064 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1065 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1066 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1067 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1068 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1076 src_align
= get_pointer_alignment (src
);
1077 dest_align
= get_pointer_alignment (dest
);
1078 if (dest_align
< TYPE_ALIGN (desttype
)
1079 || src_align
< TYPE_ALIGN (srctype
))
1083 STRIP_NOPS (destvar
);
1084 if (TREE_CODE (destvar
) == ADDR_EXPR
1085 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1086 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1087 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1089 destvar
= NULL_TREE
;
1092 STRIP_NOPS (srcvar
);
1093 if (TREE_CODE (srcvar
) == ADDR_EXPR
1094 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1095 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1098 || src_align
>= TYPE_ALIGN (desttype
))
1099 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1101 else if (!STRICT_ALIGNMENT
)
1103 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1105 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1113 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1116 if (srcvar
== NULL_TREE
)
1119 if (src_align
>= TYPE_ALIGN (desttype
))
1120 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1123 if (STRICT_ALIGNMENT
)
1125 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1127 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1130 else if (destvar
== NULL_TREE
)
1133 if (dest_align
>= TYPE_ALIGN (srctype
))
1134 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1137 if (STRICT_ALIGNMENT
)
1139 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1141 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1146 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1148 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1149 if (gimple_in_ssa_p (cfun
))
1150 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1152 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
), NULL
);
1153 gimple_assign_set_lhs (new_stmt
, srcvar
);
1154 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1155 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1157 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1158 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1159 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1160 if (gimple_vdef (new_stmt
)
1161 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1162 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1165 gsi_replace (gsi
, new_stmt
, true);
1168 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1172 if (endp
== 0 || endp
== 3)
1175 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1177 if (endp
== 2 || endp
== 1)
1178 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1180 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1182 gimple repl
= gimple_build_assign (lhs
, dest
);
1183 gsi_replace (gsi
, repl
, true);
1187 /* Fold function call to builtin memset or bzero at *GSI setting the
1188 memory of size LEN to VAL. Return whether a simplification was made. */
1191 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1193 gimple stmt
= gsi_stmt (*gsi
);
1195 unsigned HOST_WIDE_INT length
, cval
;
1197 /* If the LEN parameter is zero, return DEST. */
1198 if (integer_zerop (len
))
1200 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1204 if (! tree_fits_uhwi_p (len
))
1207 if (TREE_CODE (c
) != INTEGER_CST
)
1210 tree dest
= gimple_call_arg (stmt
, 0);
1212 if (TREE_CODE (var
) != ADDR_EXPR
)
1215 var
= TREE_OPERAND (var
, 0);
1216 if (TREE_THIS_VOLATILE (var
))
1219 etype
= TREE_TYPE (var
);
1220 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1221 etype
= TREE_TYPE (etype
);
1223 if (!INTEGRAL_TYPE_P (etype
)
1224 && !POINTER_TYPE_P (etype
))
1227 if (! var_decl_component_p (var
))
1230 length
= tree_to_uhwi (len
);
1231 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1232 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1235 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1238 if (integer_zerop (c
))
1242 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1245 cval
= TREE_INT_CST_LOW (c
);
1249 cval
|= (cval
<< 31) << 1;
1252 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1253 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1254 gimple_set_vuse (store
, gimple_vuse (stmt
));
1255 tree vdef
= gimple_vdef (stmt
);
1256 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1258 gimple_set_vdef (store
, gimple_vdef (stmt
));
1259 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1261 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1262 if (gimple_call_lhs (stmt
))
1264 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1265 gsi_replace (gsi
, asgn
, true);
1269 gimple_stmt_iterator gsi2
= *gsi
;
1271 gsi_remove (&gsi2
, true);
1278 /* Return the string length, maximum string length or maximum value of
1280 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1281 is not NULL and, for TYPE == 0, its value is not equal to the length
1282 we determine or if we are unable to determine the length or value,
1283 return false. VISITED is a bitmap of visited variables.
1284 TYPE is 0 if string length should be returned, 1 for maximum string
1285 length and 2 for maximum value ARG can have. */
1288 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1293 if (TREE_CODE (arg
) != SSA_NAME
)
1295 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1296 if (TREE_CODE (arg
) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1298 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1300 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1301 if (TREE_CODE (aop0
) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1303 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1304 length
, visited
, type
);
1310 if (TREE_CODE (val
) != INTEGER_CST
1311 || tree_int_cst_sgn (val
) < 0)
1315 val
= c_strlen (arg
, 1);
1323 if (TREE_CODE (*length
) != INTEGER_CST
1324 || TREE_CODE (val
) != INTEGER_CST
)
1327 if (tree_int_cst_lt (*length
, val
))
1331 else if (simple_cst_equal (val
, *length
) != 1)
1339 /* If ARG is registered for SSA update we cannot look at its defining
1341 if (name_registered_for_update_p (arg
))
1344 /* If we were already here, break the infinite cycle. */
1346 *visited
= BITMAP_ALLOC (NULL
);
1347 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1351 def_stmt
= SSA_NAME_DEF_STMT (var
);
1353 switch (gimple_code (def_stmt
))
1356 /* The RHS of the statement defining VAR must either have a
1357 constant length or come from another SSA_NAME with a constant
1359 if (gimple_assign_single_p (def_stmt
)
1360 || gimple_assign_unary_nop_p (def_stmt
))
1362 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1363 return get_maxval_strlen (rhs
, length
, visited
, type
);
1365 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1367 tree op2
= gimple_assign_rhs2 (def_stmt
);
1368 tree op3
= gimple_assign_rhs3 (def_stmt
);
1369 return get_maxval_strlen (op2
, length
, visited
, type
)
1370 && get_maxval_strlen (op3
, length
, visited
, type
);
1376 /* All the arguments of the PHI node must have the same constant
1380 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1382 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1384 /* If this PHI has itself as an argument, we cannot
1385 determine the string length of this argument. However,
1386 if we can find a constant string length for the other
1387 PHI args then we can still be sure that this is a
1388 constant string length. So be optimistic and just
1389 continue with the next argument. */
1390 if (arg
== gimple_phi_result (def_stmt
))
1393 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1405 get_maxval_strlen (tree arg
, int type
)
1407 bitmap visited
= NULL
;
1408 tree len
= NULL_TREE
;
1409 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1412 BITMAP_FREE (visited
);
1418 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1419 If LEN is not NULL, it represents the length of the string to be
1420 copied. Return NULL_TREE if no simplification can be made. */
1423 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1424 tree dest
, tree src
)
1426 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1429 /* If SRC and DEST are the same (and not volatile), return DEST. */
1430 if (operand_equal_p (src
, dest
, 0))
1432 replace_call_with_value (gsi
, dest
);
1436 if (optimize_function_for_size_p (cfun
))
1439 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1443 tree len
= get_maxval_strlen (src
, 0);
1447 len
= fold_convert_loc (loc
, size_type_node
, len
);
1448 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1449 len
= force_gimple_operand_gsi (gsi
, len
, true,
1450 NULL_TREE
, true, GSI_SAME_STMT
);
1451 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1452 replace_call_with_call_and_fold (gsi
, repl
);
1456 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1457 If SLEN is not NULL, it represents the length of the source string.
1458 Return NULL_TREE if no simplification can be made. */
1461 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1462 tree dest
, tree src
, tree len
)
1464 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1467 /* If the LEN parameter is zero, return DEST. */
1468 if (integer_zerop (len
))
1470 replace_call_with_value (gsi
, dest
);
1474 /* We can't compare slen with len as constants below if len is not a
1476 if (TREE_CODE (len
) != INTEGER_CST
)
1479 /* Now, we must be passed a constant src ptr parameter. */
1480 tree slen
= get_maxval_strlen (src
, 0);
1481 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1484 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1486 /* We do not support simplification of this case, though we do
1487 support it when expanding trees into RTL. */
1488 /* FIXME: generate a call to __builtin_memset. */
1489 if (tree_int_cst_lt (slen
, len
))
1492 /* OK transform into builtin memcpy. */
1493 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1497 len
= fold_convert_loc (loc
, size_type_node
, len
);
1498 len
= force_gimple_operand_gsi (gsi
, len
, true,
1499 NULL_TREE
, true, GSI_SAME_STMT
);
1500 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1501 replace_call_with_call_and_fold (gsi
, repl
);
1505 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1508 Return NULL_TREE if no simplification was possible, otherwise return the
1509 simplified form of the call as a tree.
1511 The simplified form may be a constant or other expression which
1512 computes the same value, but in a more efficient manner (including
1513 calls to other builtin functions).
1515 The call may contain arguments which need to be evaluated, but
1516 which are not useful to determine the result of the call. In
1517 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1518 COMPOUND_EXPR will be an argument which must be evaluated.
1519 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1520 COMPOUND_EXPR in the chain will contain the tree for the simplified
1521 form of the builtin function call. */
1524 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1526 gimple stmt
= gsi_stmt (*gsi
);
1527 location_t loc
= gimple_location (stmt
);
1529 const char *p
= c_getstr (src
);
1531 /* If the string length is zero, return the dst parameter. */
1532 if (p
&& *p
== '\0')
1534 replace_call_with_value (gsi
, dst
);
1538 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1541 /* See if we can store by pieces into (dst + strlen(dst)). */
1543 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1544 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1546 if (!strlen_fn
|| !memcpy_fn
)
1549 /* If the length of the source string isn't computable don't
1550 split strcat into strlen and memcpy. */
1551 tree len
= get_maxval_strlen (src
, 0);
1555 /* Create strlen (dst). */
1556 gimple_seq stmts
= NULL
, stmts2
;
1557 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1558 gimple_set_location (repl
, loc
);
1559 if (gimple_in_ssa_p (cfun
))
1560 newdst
= make_ssa_name (size_type_node
, NULL
);
1562 newdst
= create_tmp_reg (size_type_node
, NULL
);
1563 gimple_call_set_lhs (repl
, newdst
);
1564 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1566 /* Create (dst p+ strlen (dst)). */
1567 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1568 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1569 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1571 len
= fold_convert_loc (loc
, size_type_node
, len
);
1572 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1573 build_int_cst (size_type_node
, 1));
1574 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1575 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1577 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1578 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1579 if (gimple_call_lhs (stmt
))
1581 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1582 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1583 gsi_replace_with_seq_vops (gsi
, stmts
);
1584 /* gsi now points at the assignment to the lhs, get a
1585 stmt iterator to the memcpy call.
1586 ??? We can't use gsi_for_stmt as that doesn't work when the
1587 CFG isn't built yet. */
1588 gimple_stmt_iterator gsi2
= *gsi
;
1594 gsi_replace_with_seq_vops (gsi
, stmts
);
1600 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1601 are the arguments to the call. */
1604 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1606 gimple stmt
= gsi_stmt (*gsi
);
1607 tree dest
= gimple_call_arg (stmt
, 0);
1608 tree src
= gimple_call_arg (stmt
, 1);
1609 tree size
= gimple_call_arg (stmt
, 2);
1615 /* If the SRC parameter is "", return DEST. */
1616 if (p
&& *p
== '\0')
1618 replace_call_with_value (gsi
, dest
);
1622 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1625 /* If __builtin_strcat_chk is used, assume strcat is available. */
1626 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1630 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1631 replace_call_with_call_and_fold (gsi
, repl
);
1635 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1636 to the call. IGNORE is true if the value returned
1637 by the builtin will be ignored. UNLOCKED is true is true if this
1638 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1639 the known length of the string. Return NULL_TREE if no simplification
1643 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1644 tree arg0
, tree arg1
,
1647 gimple stmt
= gsi_stmt (*gsi
);
1649 /* If we're using an unlocked function, assume the other unlocked
1650 functions exist explicitly. */
1651 tree
const fn_fputc
= (unlocked
1652 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1653 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1654 tree
const fn_fwrite
= (unlocked
1655 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1656 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1658 /* If the return value is used, don't do the transformation. */
1659 if (gimple_call_lhs (stmt
))
1662 /* Get the length of the string passed to fputs. If the length
1663 can't be determined, punt. */
1664 tree len
= get_maxval_strlen (arg0
, 0);
1666 || TREE_CODE (len
) != INTEGER_CST
)
1669 switch (compare_tree_int (len
, 1))
1671 case -1: /* length is 0, delete the call entirely . */
1672 replace_call_with_value (gsi
, integer_zero_node
);
1675 case 0: /* length is 1, call fputc. */
1677 const char *p
= c_getstr (arg0
);
1683 gimple repl
= gimple_build_call (fn_fputc
, 2,
1685 (integer_type_node
, p
[0]), arg1
);
1686 replace_call_with_call_and_fold (gsi
, repl
);
1691 case 1: /* length is greater than 1, call fwrite. */
1693 /* If optimizing for size keep fputs. */
1694 if (optimize_function_for_size_p (cfun
))
1696 /* New argument list transforming fputs(string, stream) to
1697 fwrite(string, 1, len, stream). */
1701 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1702 size_one_node
, len
, arg1
);
1703 replace_call_with_call_and_fold (gsi
, repl
);
1712 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1713 DEST, SRC, LEN, and SIZE are the arguments to the call.
1714 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1715 code of the builtin. If MAXLEN is not NULL, it is maximum length
1716 passed as third argument. */
1719 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1720 tree dest
, tree src
, tree len
, tree size
,
1721 enum built_in_function fcode
)
1723 gimple stmt
= gsi_stmt (*gsi
);
1724 location_t loc
= gimple_location (stmt
);
1725 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1728 /* If SRC and DEST are the same (and not volatile), return DEST
1729 (resp. DEST+LEN for __mempcpy_chk). */
1730 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1732 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1734 replace_call_with_value (gsi
, dest
);
1739 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1740 temp
= force_gimple_operand_gsi (gsi
, temp
,
1741 false, NULL_TREE
, true,
1743 replace_call_with_value (gsi
, temp
);
1748 if (! tree_fits_uhwi_p (size
))
1751 tree maxlen
= get_maxval_strlen (len
, 2);
1752 if (! integer_all_onesp (size
))
1754 if (! tree_fits_uhwi_p (len
))
1756 /* If LEN is not constant, try MAXLEN too.
1757 For MAXLEN only allow optimizing into non-_ocs function
1758 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1759 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1761 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1763 /* (void) __mempcpy_chk () can be optimized into
1764 (void) __memcpy_chk (). */
1765 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1769 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1770 replace_call_with_call_and_fold (gsi
, repl
);
1779 if (tree_int_cst_lt (size
, maxlen
))
1784 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1785 mem{cpy,pcpy,move,set} is available. */
1788 case BUILT_IN_MEMCPY_CHK
:
1789 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1791 case BUILT_IN_MEMPCPY_CHK
:
1792 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1794 case BUILT_IN_MEMMOVE_CHK
:
1795 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1797 case BUILT_IN_MEMSET_CHK
:
1798 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1807 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1808 replace_call_with_call_and_fold (gsi
, repl
);
1812 /* Fold a call to the __st[rp]cpy_chk builtin.
1813 DEST, SRC, and SIZE are the arguments to the call.
1814 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1815 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1816 strings passed as second argument. */
1819 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1821 tree src
, tree size
,
1822 enum built_in_function fcode
)
1824 gimple stmt
= gsi_stmt (*gsi
);
1825 location_t loc
= gimple_location (stmt
);
1826 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1829 /* If SRC and DEST are the same (and not volatile), return DEST. */
1830 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1832 replace_call_with_value (gsi
, dest
);
1836 if (! tree_fits_uhwi_p (size
))
1839 tree maxlen
= get_maxval_strlen (src
, 1);
1840 if (! integer_all_onesp (size
))
1842 len
= c_strlen (src
, 1);
1843 if (! len
|| ! tree_fits_uhwi_p (len
))
1845 /* If LEN is not constant, try MAXLEN too.
1846 For MAXLEN only allow optimizing into non-_ocs function
1847 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1848 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1850 if (fcode
== BUILT_IN_STPCPY_CHK
)
1855 /* If return value of __stpcpy_chk is ignored,
1856 optimize into __strcpy_chk. */
1857 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1861 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1862 replace_call_with_call_and_fold (gsi
, repl
);
1866 if (! len
|| TREE_SIDE_EFFECTS (len
))
1869 /* If c_strlen returned something, but not a constant,
1870 transform __strcpy_chk into __memcpy_chk. */
1871 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1875 len
= fold_convert_loc (loc
, size_type_node
, len
);
1876 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1877 build_int_cst (size_type_node
, 1));
1878 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1879 true, GSI_SAME_STMT
);
1880 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1881 replace_call_with_call_and_fold (gsi
, repl
);
1888 if (! tree_int_cst_lt (maxlen
, size
))
1892 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1893 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1894 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1898 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1899 replace_call_with_call_and_fold (gsi
, repl
);
1903 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1904 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1905 length passed as third argument. IGNORE is true if return value can be
1906 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1909 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1910 tree dest
, tree src
,
1911 tree len
, tree size
,
1912 enum built_in_function fcode
)
1914 gimple stmt
= gsi_stmt (*gsi
);
1915 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1918 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1920 /* If return value of __stpncpy_chk is ignored,
1921 optimize into __strncpy_chk. */
1922 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1925 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1926 replace_call_with_call_and_fold (gsi
, repl
);
1931 if (! tree_fits_uhwi_p (size
))
1934 tree maxlen
= get_maxval_strlen (len
, 2);
1935 if (! integer_all_onesp (size
))
1937 if (! tree_fits_uhwi_p (len
))
1939 /* If LEN is not constant, try MAXLEN too.
1940 For MAXLEN only allow optimizing into non-_ocs function
1941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1942 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1948 if (tree_int_cst_lt (size
, maxlen
))
1952 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1953 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
1954 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
1958 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1959 replace_call_with_call_and_fold (gsi
, repl
);
1963 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1964 NULL_TREE if a normal call should be emitted rather than expanding
1965 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1966 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1967 passed as second argument. */
1970 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
1971 enum built_in_function fcode
)
1973 gimple stmt
= gsi_stmt (*gsi
);
1974 tree dest
, size
, len
, fn
, fmt
, flag
;
1975 const char *fmt_str
;
1977 /* Verify the required arguments in the original call. */
1978 if (gimple_call_num_args (stmt
) < 5)
1981 dest
= gimple_call_arg (stmt
, 0);
1982 len
= gimple_call_arg (stmt
, 1);
1983 flag
= gimple_call_arg (stmt
, 2);
1984 size
= gimple_call_arg (stmt
, 3);
1985 fmt
= gimple_call_arg (stmt
, 4);
1987 if (! tree_fits_uhwi_p (size
))
1990 if (! integer_all_onesp (size
))
1992 tree maxlen
= get_maxval_strlen (len
, 2);
1993 if (! tree_fits_uhwi_p (len
))
1995 /* If LEN is not constant, try MAXLEN too.
1996 For MAXLEN only allow optimizing into non-_ocs function
1997 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1998 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2004 if (tree_int_cst_lt (size
, maxlen
))
2008 if (!init_target_chars ())
2011 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2012 or if format doesn't contain % chars or is "%s". */
2013 if (! integer_zerop (flag
))
2015 fmt_str
= c_getstr (fmt
);
2016 if (fmt_str
== NULL
)
2018 if (strchr (fmt_str
, target_percent
) != NULL
2019 && strcmp (fmt_str
, target_percent_s
))
2023 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2025 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2026 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2030 /* Replace the called function and the first 5 argument by 3 retaining
2031 trailing varargs. */
2032 gimple_call_set_fndecl (stmt
, fn
);
2033 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2034 gimple_call_set_arg (stmt
, 0, dest
);
2035 gimple_call_set_arg (stmt
, 1, len
);
2036 gimple_call_set_arg (stmt
, 2, fmt
);
2037 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2038 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2039 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2044 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2045 Return NULL_TREE if a normal call should be emitted rather than
2046 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2047 or BUILT_IN_VSPRINTF_CHK. */
2050 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2051 enum built_in_function fcode
)
2053 gimple stmt
= gsi_stmt (*gsi
);
2054 tree dest
, size
, len
, fn
, fmt
, flag
;
2055 const char *fmt_str
;
2056 unsigned nargs
= gimple_call_num_args (stmt
);
2058 /* Verify the required arguments in the original call. */
2061 dest
= gimple_call_arg (stmt
, 0);
2062 flag
= gimple_call_arg (stmt
, 1);
2063 size
= gimple_call_arg (stmt
, 2);
2064 fmt
= gimple_call_arg (stmt
, 3);
2066 if (! tree_fits_uhwi_p (size
))
2071 if (!init_target_chars ())
2074 /* Check whether the format is a literal string constant. */
2075 fmt_str
= c_getstr (fmt
);
2076 if (fmt_str
!= NULL
)
2078 /* If the format doesn't contain % args or %%, we know the size. */
2079 if (strchr (fmt_str
, target_percent
) == 0)
2081 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2082 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2084 /* If the format is "%s" and first ... argument is a string literal,
2085 we know the size too. */
2086 else if (fcode
== BUILT_IN_SPRINTF_CHK
2087 && strcmp (fmt_str
, target_percent_s
) == 0)
2093 arg
= gimple_call_arg (stmt
, 4);
2094 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2096 len
= c_strlen (arg
, 1);
2097 if (! len
|| ! tree_fits_uhwi_p (len
))
2104 if (! integer_all_onesp (size
))
2106 if (! len
|| ! tree_int_cst_lt (len
, size
))
2110 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2111 or if format doesn't contain % chars or is "%s". */
2112 if (! integer_zerop (flag
))
2114 if (fmt_str
== NULL
)
2116 if (strchr (fmt_str
, target_percent
) != NULL
2117 && strcmp (fmt_str
, target_percent_s
))
2121 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2122 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2123 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2127 /* Replace the called function and the first 4 argument by 2 retaining
2128 trailing varargs. */
2129 gimple_call_set_fndecl (stmt
, fn
);
2130 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2131 gimple_call_set_arg (stmt
, 0, dest
);
2132 gimple_call_set_arg (stmt
, 1, fmt
);
2133 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2134 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2135 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2140 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2141 ORIG may be null if this is a 2-argument call. We don't attempt to
2142 simplify calls with more than 3 arguments.
2144 Return NULL_TREE if no simplification was possible, otherwise return the
2145 simplified form of the call as a tree. If IGNORED is true, it means that
2146 the caller does not use the returned value of the function. */
2149 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2151 gimple stmt
= gsi_stmt (*gsi
);
2152 tree dest
= gimple_call_arg (stmt
, 0);
2153 tree fmt
= gimple_call_arg (stmt
, 1);
2154 tree orig
= NULL_TREE
;
2155 const char *fmt_str
= NULL
;
2157 /* Verify the required arguments in the original call. We deal with two
2158 types of sprintf() calls: 'sprintf (str, fmt)' and
2159 'sprintf (dest, "%s", orig)'. */
2160 if (gimple_call_num_args (stmt
) > 3)
2163 if (gimple_call_num_args (stmt
) == 3)
2164 orig
= gimple_call_arg (stmt
, 2);
2166 /* Check whether the format is a literal string constant. */
2167 fmt_str
= c_getstr (fmt
);
2168 if (fmt_str
== NULL
)
2171 if (!init_target_chars ())
2174 /* If the format doesn't contain % args or %%, use strcpy. */
2175 if (strchr (fmt_str
, target_percent
) == NULL
)
2177 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2182 /* Don't optimize sprintf (buf, "abc", ptr++). */
2186 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2187 'format' is known to contain no % formats. */
2188 gimple_seq stmts
= NULL
;
2189 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2190 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2191 if (gimple_call_lhs (stmt
))
2193 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2194 build_int_cst (integer_type_node
,
2196 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2197 gsi_replace_with_seq_vops (gsi
, stmts
);
2198 /* gsi now points at the assignment to the lhs, get a
2199 stmt iterator to the memcpy call.
2200 ??? We can't use gsi_for_stmt as that doesn't work when the
2201 CFG isn't built yet. */
2202 gimple_stmt_iterator gsi2
= *gsi
;
2208 gsi_replace_with_seq_vops (gsi
, stmts
);
2214 /* If the format is "%s", use strcpy if the result isn't used. */
2215 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2218 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2223 /* Don't crash on sprintf (str1, "%s"). */
2227 tree orig_len
= NULL_TREE
;
2228 if (gimple_call_lhs (stmt
))
2230 orig_len
= get_maxval_strlen (orig
, 0);
2235 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2236 gimple_seq stmts
= NULL
;
2237 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2238 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2239 if (gimple_call_lhs (stmt
))
2241 if (!useless_type_conversion_p (integer_type_node
,
2242 TREE_TYPE (orig_len
)))
2243 orig_len
= fold_convert (integer_type_node
, orig_len
);
2244 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2245 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2246 gsi_replace_with_seq_vops (gsi
, stmts
);
2247 /* gsi now points at the assignment to the lhs, get a
2248 stmt iterator to the memcpy call.
2249 ??? We can't use gsi_for_stmt as that doesn't work when the
2250 CFG isn't built yet. */
2251 gimple_stmt_iterator gsi2
= *gsi
;
2257 gsi_replace_with_seq_vops (gsi
, stmts
);
2265 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2266 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2267 attempt to simplify calls with more than 4 arguments.
2269 Return NULL_TREE if no simplification was possible, otherwise return the
2270 simplified form of the call as a tree. If IGNORED is true, it means that
2271 the caller does not use the returned value of the function. */
2274 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2276 gimple stmt
= gsi_stmt (*gsi
);
2277 tree dest
= gimple_call_arg (stmt
, 0);
2278 tree destsize
= gimple_call_arg (stmt
, 1);
2279 tree fmt
= gimple_call_arg (stmt
, 2);
2280 tree orig
= NULL_TREE
;
2281 const char *fmt_str
= NULL
;
2283 if (gimple_call_num_args (stmt
) > 4)
2286 if (gimple_call_num_args (stmt
) == 4)
2287 orig
= gimple_call_arg (stmt
, 3);
2289 if (!tree_fits_uhwi_p (destsize
))
2291 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2293 /* Check whether the format is a literal string constant. */
2294 fmt_str
= c_getstr (fmt
);
2295 if (fmt_str
== NULL
)
2298 if (!init_target_chars ())
2301 /* If the format doesn't contain % args or %%, use strcpy. */
2302 if (strchr (fmt_str
, target_percent
) == NULL
)
2304 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2308 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2312 /* We could expand this as
2313 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2315 memcpy (str, fmt_with_nul_at_cstm1, cst);
2316 but in the former case that might increase code size
2317 and in the latter case grow .rodata section too much.
2319 size_t len
= strlen (fmt_str
);
2323 gimple_seq stmts
= NULL
;
2324 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2325 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2326 if (gimple_call_lhs (stmt
))
2328 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2329 build_int_cst (integer_type_node
, len
));
2330 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2331 gsi_replace_with_seq_vops (gsi
, stmts
);
2332 /* gsi now points at the assignment to the lhs, get a
2333 stmt iterator to the memcpy call.
2334 ??? We can't use gsi_for_stmt as that doesn't work when the
2335 CFG isn't built yet. */
2336 gimple_stmt_iterator gsi2
= *gsi
;
2342 gsi_replace_with_seq_vops (gsi
, stmts
);
2348 /* If the format is "%s", use strcpy if the result isn't used. */
2349 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2351 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2355 /* Don't crash on snprintf (str1, cst, "%s"). */
2359 tree orig_len
= get_maxval_strlen (orig
, 0);
2363 /* We could expand this as
2364 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2366 memcpy (str1, str2_with_nul_at_cstm1, cst);
2367 but in the former case that might increase code size
2368 and in the latter case grow .rodata section too much.
2370 if (compare_tree_int (orig_len
, destlen
) >= 0)
2373 /* Convert snprintf (str1, cst, "%s", str2) into
2374 strcpy (str1, str2) if strlen (str2) < cst. */
2375 gimple_seq stmts
= NULL
;
2376 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2377 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2378 if (gimple_call_lhs (stmt
))
2380 if (!useless_type_conversion_p (integer_type_node
,
2381 TREE_TYPE (orig_len
)))
2382 orig_len
= fold_convert (integer_type_node
, orig_len
);
2383 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2384 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2385 gsi_replace_with_seq_vops (gsi
, stmts
);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2
= *gsi
;
2396 gsi_replace_with_seq_vops (gsi
, stmts
);
2405 /* Fold a call to __builtin_strlen with known length LEN. */
2408 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2410 gimple stmt
= gsi_stmt (*gsi
);
2411 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2414 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2415 replace_call_with_value (gsi
, len
);
2420 /* Fold the non-target builtin at *GSI and return whether any simplification
2424 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2426 gimple stmt
= gsi_stmt (*gsi
);
2427 tree callee
= gimple_call_fndecl (stmt
);
2429 /* Give up for always_inline inline builtins until they are
2431 if (avoid_folding_inline_builtin (callee
))
2434 switch (DECL_FUNCTION_CODE (callee
))
2436 case BUILT_IN_BZERO
:
2437 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2438 gimple_call_arg (stmt
, 1));
2439 case BUILT_IN_MEMSET
:
2440 return gimple_fold_builtin_memset (gsi
,
2441 gimple_call_arg (stmt
, 1),
2442 gimple_call_arg (stmt
, 2));
2443 case BUILT_IN_BCOPY
:
2444 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2445 gimple_call_arg (stmt
, 0), 3);
2446 case BUILT_IN_MEMCPY
:
2447 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2448 gimple_call_arg (stmt
, 1), 0);
2449 case BUILT_IN_MEMPCPY
:
2450 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2451 gimple_call_arg (stmt
, 1), 1);
2452 case BUILT_IN_MEMMOVE
:
2453 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2454 gimple_call_arg (stmt
, 1), 3);
2455 case BUILT_IN_SPRINTF_CHK
:
2456 case BUILT_IN_VSPRINTF_CHK
:
2457 return gimple_fold_builtin_sprintf_chk (gsi
, DECL_FUNCTION_CODE (callee
));
2458 case BUILT_IN_STRCAT_CHK
:
2459 return gimple_fold_builtin_strcat_chk (gsi
);
2460 case BUILT_IN_STRLEN
:
2461 return gimple_fold_builtin_strlen (gsi
);
2462 case BUILT_IN_STRCPY
:
2463 return gimple_fold_builtin_strcpy (gsi
,
2464 gimple_call_arg (stmt
, 0),
2465 gimple_call_arg (stmt
, 1));
2466 case BUILT_IN_STRNCPY
:
2467 return gimple_fold_builtin_strncpy (gsi
,
2468 gimple_call_arg (stmt
, 0),
2469 gimple_call_arg (stmt
, 1),
2470 gimple_call_arg (stmt
, 2));
2471 case BUILT_IN_STRCAT
:
2472 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2473 gimple_call_arg (stmt
, 1));
2474 case BUILT_IN_FPUTS
:
2475 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2476 gimple_call_arg (stmt
, 1), false);
2477 case BUILT_IN_FPUTS_UNLOCKED
:
2478 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2479 gimple_call_arg (stmt
, 1), true);
2480 case BUILT_IN_MEMCPY_CHK
:
2481 case BUILT_IN_MEMPCPY_CHK
:
2482 case BUILT_IN_MEMMOVE_CHK
:
2483 case BUILT_IN_MEMSET_CHK
:
2484 return gimple_fold_builtin_memory_chk (gsi
,
2485 gimple_call_arg (stmt
, 0),
2486 gimple_call_arg (stmt
, 1),
2487 gimple_call_arg (stmt
, 2),
2488 gimple_call_arg (stmt
, 3),
2489 DECL_FUNCTION_CODE (callee
));
2490 case BUILT_IN_STRCPY_CHK
:
2491 case BUILT_IN_STPCPY_CHK
:
2492 return gimple_fold_builtin_stxcpy_chk (gsi
,
2493 gimple_call_arg (stmt
, 0),
2494 gimple_call_arg (stmt
, 1),
2495 gimple_call_arg (stmt
, 2),
2496 DECL_FUNCTION_CODE (callee
));
2497 case BUILT_IN_STRNCPY_CHK
:
2498 case BUILT_IN_STPNCPY_CHK
:
2499 return gimple_fold_builtin_stxncpy_chk (gsi
,
2500 gimple_call_arg (stmt
, 0),
2501 gimple_call_arg (stmt
, 1),
2502 gimple_call_arg (stmt
, 2),
2503 gimple_call_arg (stmt
, 3),
2504 DECL_FUNCTION_CODE (callee
));
2505 case BUILT_IN_SNPRINTF_CHK
:
2506 case BUILT_IN_VSNPRINTF_CHK
:
2507 return gimple_fold_builtin_snprintf_chk (gsi
,
2508 DECL_FUNCTION_CODE (callee
));
2509 case BUILT_IN_SNPRINTF
:
2510 return gimple_fold_builtin_snprintf (gsi
);
2511 case BUILT_IN_SPRINTF
:
2512 return gimple_fold_builtin_sprintf (gsi
);
2516 /* Try the generic builtin folder. */
2517 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2518 tree result
= fold_call_stmt (stmt
, ignore
);
2522 STRIP_NOPS (result
);
2524 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2525 if (!update_call_from_tree (gsi
, result
))
2526 gimplify_and_update_call_from_tree (gsi
, result
);
2533 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2534 The statement may be replaced by another statement, e.g., if the call
2535 simplifies to a constant value. Return true if any changes were made.
2536 It is assumed that the operands have been previously folded. */
2539 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2541 gimple stmt
= gsi_stmt (*gsi
);
2543 bool changed
= false;
2546 /* Fold *& in call arguments. */
2547 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2548 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2550 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2553 gimple_call_set_arg (stmt
, i
, tmp
);
2558 /* Check for virtual calls that became direct calls. */
2559 callee
= gimple_call_fn (stmt
);
2560 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2562 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2564 if (dump_file
&& virtual_method_call_p (callee
)
2565 && !possible_polymorphic_call_target_p
2566 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2567 (OBJ_TYPE_REF_EXPR (callee
)))))
2570 "Type inheritance inconsistent devirtualization of ");
2571 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2572 fprintf (dump_file
, " to ");
2573 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2574 fprintf (dump_file
, "\n");
2577 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2580 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2583 vec
<cgraph_node
*>targets
2584 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2585 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2587 tree lhs
= gimple_call_lhs (stmt
);
2588 if (dump_enabled_p ())
2590 location_t loc
= gimple_location_safe (stmt
);
2591 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2592 "folding virtual function call to %s\n",
2593 targets
.length () == 1
2594 ? targets
[0]->name ()
2595 : "__builtin_unreachable");
2597 if (targets
.length () == 1)
2599 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
2601 /* If the call becomes noreturn, remove the lhs. */
2602 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
2604 if (TREE_CODE (lhs
) == SSA_NAME
)
2606 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2607 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2608 gimple new_stmt
= gimple_build_assign (lhs
, def
);
2609 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2611 gimple_call_set_lhs (stmt
, NULL_TREE
);
2616 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2617 gimple new_stmt
= gimple_build_call (fndecl
, 0);
2618 gimple_set_location (new_stmt
, gimple_location (stmt
));
2619 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2621 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2622 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2624 /* To satisfy condition for
2625 cgraph_update_edges_for_call_stmt_node,
2626 we need to preserve GIMPLE_CALL statement
2627 at position of GSI iterator. */
2628 update_call_from_tree (gsi
, def
);
2629 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
2632 gsi_replace (gsi
, new_stmt
, true);
2642 /* Check for builtins that CCP can handle using information not
2643 available in the generic fold routines. */
2644 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
2646 if (gimple_fold_builtin (gsi
))
2649 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
2651 changed
|= targetm
.gimple_fold_builtin (gsi
);
2653 else if (gimple_call_internal_p (stmt
))
2655 enum tree_code subcode
= ERROR_MARK
;
2656 tree result
= NULL_TREE
;
2657 switch (gimple_call_internal_fn (stmt
))
2659 case IFN_BUILTIN_EXPECT
:
2660 result
= fold_builtin_expect (gimple_location (stmt
),
2661 gimple_call_arg (stmt
, 0),
2662 gimple_call_arg (stmt
, 1),
2663 gimple_call_arg (stmt
, 2));
2665 case IFN_UBSAN_CHECK_ADD
:
2666 subcode
= PLUS_EXPR
;
2668 case IFN_UBSAN_CHECK_SUB
:
2669 subcode
= MINUS_EXPR
;
2671 case IFN_UBSAN_CHECK_MUL
:
2672 subcode
= MULT_EXPR
;
2677 if (subcode
!= ERROR_MARK
)
2679 tree arg0
= gimple_call_arg (stmt
, 0);
2680 tree arg1
= gimple_call_arg (stmt
, 1);
2681 /* x = y + 0; x = y - 0; x = y * 0; */
2682 if (integer_zerop (arg1
))
2683 result
= subcode
== MULT_EXPR
2684 ? build_zero_cst (TREE_TYPE (arg0
))
2686 /* x = 0 + y; x = 0 * y; */
2687 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
2688 result
= subcode
== MULT_EXPR
2689 ? build_zero_cst (TREE_TYPE (arg0
))
2692 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
2693 result
= build_zero_cst (TREE_TYPE (arg0
));
2694 /* x = y * 1; x = 1 * y; */
2695 else if (subcode
== MULT_EXPR
)
2697 if (integer_onep (arg1
))
2699 else if (integer_onep (arg0
))
2705 if (!update_call_from_tree (gsi
, result
))
2706 gimplify_and_update_call_from_tree (gsi
, result
);
2714 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2717 maybe_canonicalize_mem_ref_addr (tree
*t
)
2721 if (TREE_CODE (*t
) == ADDR_EXPR
)
2722 t
= &TREE_OPERAND (*t
, 0);
2724 while (handled_component_p (*t
))
2725 t
= &TREE_OPERAND (*t
, 0);
2727 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2728 of invariant addresses into a SSA name MEM_REF address. */
2729 if (TREE_CODE (*t
) == MEM_REF
2730 || TREE_CODE (*t
) == TARGET_MEM_REF
)
2732 tree addr
= TREE_OPERAND (*t
, 0);
2733 if (TREE_CODE (addr
) == ADDR_EXPR
2734 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
2735 || handled_component_p (TREE_OPERAND (addr
, 0))))
2738 HOST_WIDE_INT coffset
;
2739 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
2744 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
2745 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
2746 TREE_OPERAND (*t
, 1),
2747 size_int (coffset
));
2750 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
2751 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
2754 /* Canonicalize back MEM_REFs to plain reference trees if the object
2755 accessed is a decl that has the same access semantics as the MEM_REF. */
2756 if (TREE_CODE (*t
) == MEM_REF
2757 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
2758 && integer_zerop (TREE_OPERAND (*t
, 1)))
2760 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2761 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
2762 if (/* Same volatile qualification. */
2763 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
2764 /* Same TBAA behavior with -fstrict-aliasing. */
2765 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
2766 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
2767 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
2768 /* Same alignment. */
2769 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
2770 /* We have to look out here to not drop a required conversion
2771 from the rhs to the lhs if *t appears on the lhs or vice-versa
2772 if it appears on the rhs. Thus require strict type
2774 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
2776 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2781 /* Canonicalize TARGET_MEM_REF in particular with respect to
2782 the indexes becoming constant. */
2783 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
2785 tree tem
= maybe_fold_tmr (*t
);
2796 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2797 distinguishes both cases. */
2800 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
2802 bool changed
= false;
2803 gimple stmt
= gsi_stmt (*gsi
);
2806 /* First do required canonicalization of [TARGET_]MEM_REF addresses
2808 ??? This shouldn't be done in generic folding but in the
2809 propagation helpers which also know whether an address was
2811 switch (gimple_code (stmt
))
2814 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
2816 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
2817 if ((REFERENCE_CLASS_P (*rhs
)
2818 || TREE_CODE (*rhs
) == ADDR_EXPR
)
2819 && maybe_canonicalize_mem_ref_addr (rhs
))
2821 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
2822 if (REFERENCE_CLASS_P (*lhs
)
2823 && maybe_canonicalize_mem_ref_addr (lhs
))
2829 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2831 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
2832 if (REFERENCE_CLASS_P (*arg
)
2833 && maybe_canonicalize_mem_ref_addr (arg
))
2836 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
2838 && REFERENCE_CLASS_P (*lhs
)
2839 && maybe_canonicalize_mem_ref_addr (lhs
))
2845 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2847 tree link
= gimple_asm_output_op (stmt
, i
);
2848 tree op
= TREE_VALUE (link
);
2849 if (REFERENCE_CLASS_P (op
)
2850 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
2853 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2855 tree link
= gimple_asm_input_op (stmt
, i
);
2856 tree op
= TREE_VALUE (link
);
2857 if ((REFERENCE_CLASS_P (op
)
2858 || TREE_CODE (op
) == ADDR_EXPR
)
2859 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
2865 if (gimple_debug_bind_p (stmt
))
2867 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
2869 && (REFERENCE_CLASS_P (*val
)
2870 || TREE_CODE (*val
) == ADDR_EXPR
)
2871 && maybe_canonicalize_mem_ref_addr (val
))
2878 /* Fold the main computation performed by the statement. */
2879 switch (gimple_code (stmt
))
2883 unsigned old_num_ops
= gimple_num_ops (stmt
);
2884 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2885 tree lhs
= gimple_assign_lhs (stmt
);
2887 /* First canonicalize operand order. This avoids building new
2888 trees if this is the only thing fold would later do. */
2889 if ((commutative_tree_code (subcode
)
2890 || commutative_ternary_tree_code (subcode
))
2891 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2892 gimple_assign_rhs2 (stmt
), false))
2894 tree tem
= gimple_assign_rhs1 (stmt
);
2895 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
2896 gimple_assign_set_rhs2 (stmt
, tem
);
2899 new_rhs
= fold_gimple_assign (gsi
);
2901 && !useless_type_conversion_p (TREE_TYPE (lhs
),
2902 TREE_TYPE (new_rhs
)))
2903 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2906 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
2908 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
2915 changed
|= fold_gimple_cond (stmt
);
2919 changed
|= gimple_fold_call (gsi
, inplace
);
2923 /* Fold *& in asm operands. */
2926 const char **oconstraints
;
2927 const char *constraint
;
2928 bool allows_mem
, allows_reg
;
2930 noutputs
= gimple_asm_noutputs (stmt
);
2931 oconstraints
= XALLOCAVEC (const char *, noutputs
);
2933 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2935 tree link
= gimple_asm_output_op (stmt
, i
);
2936 tree op
= TREE_VALUE (link
);
2938 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
2939 if (REFERENCE_CLASS_P (op
)
2940 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
2942 TREE_VALUE (link
) = op
;
2946 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2948 tree link
= gimple_asm_input_op (stmt
, i
);
2949 tree op
= TREE_VALUE (link
);
2951 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
2952 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
2953 oconstraints
, &allows_mem
, &allows_reg
);
2954 if (REFERENCE_CLASS_P (op
)
2955 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
2958 TREE_VALUE (link
) = op
;
2966 if (gimple_debug_bind_p (stmt
))
2968 tree val
= gimple_debug_bind_get_value (stmt
);
2970 && REFERENCE_CLASS_P (val
))
2972 tree tem
= maybe_fold_reference (val
, false);
2975 gimple_debug_bind_set_value (stmt
, tem
);
2980 && TREE_CODE (val
) == ADDR_EXPR
)
2982 tree ref
= TREE_OPERAND (val
, 0);
2983 tree tem
= maybe_fold_reference (ref
, false);
2986 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
2987 gimple_debug_bind_set_value (stmt
, tem
);
2997 stmt
= gsi_stmt (*gsi
);
2999 /* Fold *& on the lhs. */
3000 if (gimple_has_lhs (stmt
))
3002 tree lhs
= gimple_get_lhs (stmt
);
3003 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3005 tree new_lhs
= maybe_fold_reference (lhs
, true);
3008 gimple_set_lhs (stmt
, new_lhs
);
3017 /* Fold the statement pointed to by GSI. In some cases, this function may
3018 replace the whole statement with a new one. Returns true iff folding
3020 The statement pointed to by GSI should be in valid gimple form but may
3021 be in unfolded state as resulting from for example constant propagation
3022 which can produce *&x = 0. */
3025 fold_stmt (gimple_stmt_iterator
*gsi
)
3027 return fold_stmt_1 (gsi
, false);
3030 /* Perform the minimal folding on statement *GSI. Only operations like
3031 *&x created by constant propagation are handled. The statement cannot
3032 be replaced with a new one. Return true if the statement was
3033 changed, false otherwise.
3034 The statement *GSI should be in valid gimple form but may
3035 be in unfolded state as resulting from for example constant propagation
3036 which can produce *&x = 0. */
3039 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3041 gimple stmt
= gsi_stmt (*gsi
);
3042 bool changed
= fold_stmt_1 (gsi
, true);
3043 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3047 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3048 if EXPR is null or we don't know how.
3049 If non-null, the result always has boolean type. */
3052 canonicalize_bool (tree expr
, bool invert
)
3058 if (integer_nonzerop (expr
))
3059 return boolean_false_node
;
3060 else if (integer_zerop (expr
))
3061 return boolean_true_node
;
3062 else if (TREE_CODE (expr
) == SSA_NAME
)
3063 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3064 build_int_cst (TREE_TYPE (expr
), 0));
3065 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3066 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3068 TREE_OPERAND (expr
, 0),
3069 TREE_OPERAND (expr
, 1));
3075 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3077 if (integer_nonzerop (expr
))
3078 return boolean_true_node
;
3079 else if (integer_zerop (expr
))
3080 return boolean_false_node
;
3081 else if (TREE_CODE (expr
) == SSA_NAME
)
3082 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3083 build_int_cst (TREE_TYPE (expr
), 0));
3084 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3085 return fold_build2 (TREE_CODE (expr
),
3087 TREE_OPERAND (expr
, 0),
3088 TREE_OPERAND (expr
, 1));
3094 /* Check to see if a boolean expression EXPR is logically equivalent to the
3095 comparison (OP1 CODE OP2). Check for various identities involving
3099 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3100 const_tree op1
, const_tree op2
)
3104 /* The obvious case. */
3105 if (TREE_CODE (expr
) == code
3106 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3107 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3110 /* Check for comparing (name, name != 0) and the case where expr
3111 is an SSA_NAME with a definition matching the comparison. */
3112 if (TREE_CODE (expr
) == SSA_NAME
3113 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3115 if (operand_equal_p (expr
, op1
, 0))
3116 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3117 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3118 s
= SSA_NAME_DEF_STMT (expr
);
3119 if (is_gimple_assign (s
)
3120 && gimple_assign_rhs_code (s
) == code
3121 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3122 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3126 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3127 of name is a comparison, recurse. */
3128 if (TREE_CODE (op1
) == SSA_NAME
3129 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3131 s
= SSA_NAME_DEF_STMT (op1
);
3132 if (is_gimple_assign (s
)
3133 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3135 enum tree_code c
= gimple_assign_rhs_code (s
);
3136 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3137 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3138 return same_bool_comparison_p (expr
, c
,
3139 gimple_assign_rhs1 (s
),
3140 gimple_assign_rhs2 (s
));
3141 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3142 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3143 return same_bool_comparison_p (expr
,
3144 invert_tree_comparison (c
, false),
3145 gimple_assign_rhs1 (s
),
3146 gimple_assign_rhs2 (s
));
3152 /* Check to see if two boolean expressions OP1 and OP2 are logically
3156 same_bool_result_p (const_tree op1
, const_tree op2
)
3158 /* Simple cases first. */
3159 if (operand_equal_p (op1
, op2
, 0))
3162 /* Check the cases where at least one of the operands is a comparison.
3163 These are a bit smarter than operand_equal_p in that they apply some
3164 identifies on SSA_NAMEs. */
3165 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3166 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3167 TREE_OPERAND (op2
, 0),
3168 TREE_OPERAND (op2
, 1)))
3170 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3171 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3172 TREE_OPERAND (op1
, 0),
3173 TREE_OPERAND (op1
, 1)))
3180 /* Forward declarations for some mutually recursive functions. */
3183 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3184 enum tree_code code2
, tree op2a
, tree op2b
);
3186 and_var_with_comparison (tree var
, bool invert
,
3187 enum tree_code code2
, tree op2a
, tree op2b
);
3189 and_var_with_comparison_1 (gimple stmt
,
3190 enum tree_code code2
, tree op2a
, tree op2b
);
3192 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3193 enum tree_code code2
, tree op2a
, tree op2b
);
3195 or_var_with_comparison (tree var
, bool invert
,
3196 enum tree_code code2
, tree op2a
, tree op2b
);
3198 or_var_with_comparison_1 (gimple stmt
,
3199 enum tree_code code2
, tree op2a
, tree op2b
);
3201 /* Helper function for and_comparisons_1: try to simplify the AND of the
3202 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3203 If INVERT is true, invert the value of the VAR before doing the AND.
3204 Return NULL_EXPR if we can't simplify this to a single expression. */
3207 and_var_with_comparison (tree var
, bool invert
,
3208 enum tree_code code2
, tree op2a
, tree op2b
)
3211 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3213 /* We can only deal with variables whose definitions are assignments. */
3214 if (!is_gimple_assign (stmt
))
3217 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3218 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3219 Then we only have to consider the simpler non-inverted cases. */
3221 t
= or_var_with_comparison_1 (stmt
,
3222 invert_tree_comparison (code2
, false),
3225 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3226 return canonicalize_bool (t
, invert
);
3229 /* Try to simplify the AND of the ssa variable defined by the assignment
3230 STMT with the comparison specified by (OP2A CODE2 OP2B).
3231 Return NULL_EXPR if we can't simplify this to a single expression. */
3234 and_var_with_comparison_1 (gimple stmt
,
3235 enum tree_code code2
, tree op2a
, tree op2b
)
3237 tree var
= gimple_assign_lhs (stmt
);
3238 tree true_test_var
= NULL_TREE
;
3239 tree false_test_var
= NULL_TREE
;
3240 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3242 /* Check for identities like (var AND (var == 0)) => false. */
3243 if (TREE_CODE (op2a
) == SSA_NAME
3244 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3246 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3247 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3249 true_test_var
= op2a
;
3250 if (var
== true_test_var
)
3253 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3254 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3256 false_test_var
= op2a
;
3257 if (var
== false_test_var
)
3258 return boolean_false_node
;
3262 /* If the definition is a comparison, recurse on it. */
3263 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3265 tree t
= and_comparisons_1 (innercode
,
3266 gimple_assign_rhs1 (stmt
),
3267 gimple_assign_rhs2 (stmt
),
3275 /* If the definition is an AND or OR expression, we may be able to
3276 simplify by reassociating. */
3277 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3278 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3280 tree inner1
= gimple_assign_rhs1 (stmt
);
3281 tree inner2
= gimple_assign_rhs2 (stmt
);
3284 tree partial
= NULL_TREE
;
3285 bool is_and
= (innercode
== BIT_AND_EXPR
);
3287 /* Check for boolean identities that don't require recursive examination
3289 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3290 inner1 AND (inner1 OR inner2) => inner1
3291 !inner1 AND (inner1 AND inner2) => false
3292 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3293 Likewise for similar cases involving inner2. */
3294 if (inner1
== true_test_var
)
3295 return (is_and
? var
: inner1
);
3296 else if (inner2
== true_test_var
)
3297 return (is_and
? var
: inner2
);
3298 else if (inner1
== false_test_var
)
3300 ? boolean_false_node
3301 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3302 else if (inner2
== false_test_var
)
3304 ? boolean_false_node
3305 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3307 /* Next, redistribute/reassociate the AND across the inner tests.
3308 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3309 if (TREE_CODE (inner1
) == SSA_NAME
3310 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3311 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3312 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3313 gimple_assign_rhs1 (s
),
3314 gimple_assign_rhs2 (s
),
3315 code2
, op2a
, op2b
)))
3317 /* Handle the AND case, where we are reassociating:
3318 (inner1 AND inner2) AND (op2a code2 op2b)
3320 If the partial result t is a constant, we win. Otherwise
3321 continue on to try reassociating with the other inner test. */
3324 if (integer_onep (t
))
3326 else if (integer_zerop (t
))
3327 return boolean_false_node
;
3330 /* Handle the OR case, where we are redistributing:
3331 (inner1 OR inner2) AND (op2a code2 op2b)
3332 => (t OR (inner2 AND (op2a code2 op2b))) */
3333 else if (integer_onep (t
))
3334 return boolean_true_node
;
3336 /* Save partial result for later. */
3340 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3341 if (TREE_CODE (inner2
) == SSA_NAME
3342 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3343 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3344 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3345 gimple_assign_rhs1 (s
),
3346 gimple_assign_rhs2 (s
),
3347 code2
, op2a
, op2b
)))
3349 /* Handle the AND case, where we are reassociating:
3350 (inner1 AND inner2) AND (op2a code2 op2b)
3351 => (inner1 AND t) */
3354 if (integer_onep (t
))
3356 else if (integer_zerop (t
))
3357 return boolean_false_node
;
3358 /* If both are the same, we can apply the identity
3360 else if (partial
&& same_bool_result_p (t
, partial
))
3364 /* Handle the OR case. where we are redistributing:
3365 (inner1 OR inner2) AND (op2a code2 op2b)
3366 => (t OR (inner1 AND (op2a code2 op2b)))
3367 => (t OR partial) */
3370 if (integer_onep (t
))
3371 return boolean_true_node
;
3374 /* We already got a simplification for the other
3375 operand to the redistributed OR expression. The
3376 interesting case is when at least one is false.
3377 Or, if both are the same, we can apply the identity
3379 if (integer_zerop (partial
))
3381 else if (integer_zerop (t
))
3383 else if (same_bool_result_p (t
, partial
))
3392 /* Try to simplify the AND of two comparisons defined by
3393 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3394 If this can be done without constructing an intermediate value,
3395 return the resulting tree; otherwise NULL_TREE is returned.
3396 This function is deliberately asymmetric as it recurses on SSA_DEFs
3397 in the first comparison but not the second. */
3400 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3401 enum tree_code code2
, tree op2a
, tree op2b
)
3403 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3405 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3406 if (operand_equal_p (op1a
, op2a
, 0)
3407 && operand_equal_p (op1b
, op2b
, 0))
3409 /* Result will be either NULL_TREE, or a combined comparison. */
3410 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3411 TRUTH_ANDIF_EXPR
, code1
, code2
,
3412 truth_type
, op1a
, op1b
);
3417 /* Likewise the swapped case of the above. */
3418 if (operand_equal_p (op1a
, op2b
, 0)
3419 && operand_equal_p (op1b
, op2a
, 0))
3421 /* Result will be either NULL_TREE, or a combined comparison. */
3422 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3423 TRUTH_ANDIF_EXPR
, code1
,
3424 swap_tree_comparison (code2
),
3425 truth_type
, op1a
, op1b
);
3430 /* If both comparisons are of the same value against constants, we might
3431 be able to merge them. */
3432 if (operand_equal_p (op1a
, op2a
, 0)
3433 && TREE_CODE (op1b
) == INTEGER_CST
3434 && TREE_CODE (op2b
) == INTEGER_CST
)
3436 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3438 /* If we have (op1a == op1b), we should either be able to
3439 return that or FALSE, depending on whether the constant op1b
3440 also satisfies the other comparison against op2b. */
3441 if (code1
== EQ_EXPR
)
3447 case EQ_EXPR
: val
= (cmp
== 0); break;
3448 case NE_EXPR
: val
= (cmp
!= 0); break;
3449 case LT_EXPR
: val
= (cmp
< 0); break;
3450 case GT_EXPR
: val
= (cmp
> 0); break;
3451 case LE_EXPR
: val
= (cmp
<= 0); break;
3452 case GE_EXPR
: val
= (cmp
>= 0); break;
3453 default: done
= false;
3458 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3460 return boolean_false_node
;
3463 /* Likewise if the second comparison is an == comparison. */
3464 else if (code2
== EQ_EXPR
)
3470 case EQ_EXPR
: val
= (cmp
== 0); break;
3471 case NE_EXPR
: val
= (cmp
!= 0); break;
3472 case LT_EXPR
: val
= (cmp
> 0); break;
3473 case GT_EXPR
: val
= (cmp
< 0); break;
3474 case LE_EXPR
: val
= (cmp
>= 0); break;
3475 case GE_EXPR
: val
= (cmp
<= 0); break;
3476 default: done
= false;
3481 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3483 return boolean_false_node
;
3487 /* Same business with inequality tests. */
3488 else if (code1
== NE_EXPR
)
3493 case EQ_EXPR
: val
= (cmp
!= 0); break;
3494 case NE_EXPR
: val
= (cmp
== 0); break;
3495 case LT_EXPR
: val
= (cmp
>= 0); break;
3496 case GT_EXPR
: val
= (cmp
<= 0); break;
3497 case LE_EXPR
: val
= (cmp
> 0); break;
3498 case GE_EXPR
: val
= (cmp
< 0); break;
3503 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3505 else if (code2
== NE_EXPR
)
3510 case EQ_EXPR
: val
= (cmp
== 0); break;
3511 case NE_EXPR
: val
= (cmp
!= 0); break;
3512 case LT_EXPR
: val
= (cmp
<= 0); break;
3513 case GT_EXPR
: val
= (cmp
>= 0); break;
3514 case LE_EXPR
: val
= (cmp
< 0); break;
3515 case GE_EXPR
: val
= (cmp
> 0); break;
3520 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3523 /* Chose the more restrictive of two < or <= comparisons. */
3524 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3525 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3527 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3528 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3530 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3533 /* Likewise chose the more restrictive of two > or >= comparisons. */
3534 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3535 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3537 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
3538 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3540 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3543 /* Check for singleton ranges. */
3545 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
3546 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
3547 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
3549 /* Check for disjoint ranges. */
3551 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
3552 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3553 return boolean_false_node
;
3555 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
3556 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3557 return boolean_false_node
;
3560 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3561 NAME's definition is a truth value. See if there are any simplifications
3562 that can be done against the NAME's definition. */
3563 if (TREE_CODE (op1a
) == SSA_NAME
3564 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
3565 && (integer_zerop (op1b
) || integer_onep (op1b
)))
3567 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
3568 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
3569 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
3570 switch (gimple_code (stmt
))
3573 /* Try to simplify by copy-propagating the definition. */
3574 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
3577 /* If every argument to the PHI produces the same result when
3578 ANDed with the second comparison, we win.
3579 Do not do this unless the type is bool since we need a bool
3580 result here anyway. */
3581 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
3583 tree result
= NULL_TREE
;
3585 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
3587 tree arg
= gimple_phi_arg_def (stmt
, i
);
3589 /* If this PHI has itself as an argument, ignore it.
3590 If all the other args produce the same result,
3592 if (arg
== gimple_phi_result (stmt
))
3594 else if (TREE_CODE (arg
) == INTEGER_CST
)
3596 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
3599 result
= boolean_false_node
;
3600 else if (!integer_zerop (result
))
3604 result
= fold_build2 (code2
, boolean_type_node
,
3606 else if (!same_bool_comparison_p (result
,
3610 else if (TREE_CODE (arg
) == SSA_NAME
3611 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
3614 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
3615 /* In simple cases we can look through PHI nodes,
3616 but we have to be careful with loops.
3618 if (! dom_info_available_p (CDI_DOMINATORS
)
3619 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
3620 || dominated_by_p (CDI_DOMINATORS
,
3621 gimple_bb (def_stmt
),
3624 temp
= and_var_with_comparison (arg
, invert
, code2
,
3630 else if (!same_bool_result_p (result
, temp
))
3646 /* Try to simplify the AND of two comparisons, specified by
3647 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3648 If this can be simplified to a single expression (without requiring
3649 introducing more SSA variables to hold intermediate values),
3650 return the resulting tree. Otherwise return NULL_TREE.
3651 If the result expression is non-null, it has boolean type. */
3654 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
3655 enum tree_code code2
, tree op2a
, tree op2b
)
3657 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
3661 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
3664 /* Helper function for or_comparisons_1: try to simplify the OR of the
3665 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3666 If INVERT is true, invert the value of VAR before doing the OR.
3667 Return NULL_EXPR if we can't simplify this to a single expression. */
3670 or_var_with_comparison (tree var
, bool invert
,
3671 enum tree_code code2
, tree op2a
, tree op2b
)
3674 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3676 /* We can only deal with variables whose definitions are assignments. */
3677 if (!is_gimple_assign (stmt
))
3680 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3681 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3682 Then we only have to consider the simpler non-inverted cases. */
3684 t
= and_var_with_comparison_1 (stmt
,
3685 invert_tree_comparison (code2
, false),
3688 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3689 return canonicalize_bool (t
, invert
);
3692 /* Try to simplify the OR of the ssa variable defined by the assignment
3693 STMT with the comparison specified by (OP2A CODE2 OP2B).
3694 Return NULL_EXPR if we can't simplify this to a single expression. */
3697 or_var_with_comparison_1 (gimple stmt
,
3698 enum tree_code code2
, tree op2a
, tree op2b
)
3700 tree var
= gimple_assign_lhs (stmt
);
3701 tree true_test_var
= NULL_TREE
;
3702 tree false_test_var
= NULL_TREE
;
3703 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3705 /* Check for identities like (var OR (var != 0)) => true . */
3706 if (TREE_CODE (op2a
) == SSA_NAME
3707 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3709 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3710 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3712 true_test_var
= op2a
;
3713 if (var
== true_test_var
)
3716 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3717 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3719 false_test_var
= op2a
;
3720 if (var
== false_test_var
)
3721 return boolean_true_node
;
3725 /* If the definition is a comparison, recurse on it. */
3726 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3728 tree t
= or_comparisons_1 (innercode
,
3729 gimple_assign_rhs1 (stmt
),
3730 gimple_assign_rhs2 (stmt
),
3738 /* If the definition is an AND or OR expression, we may be able to
3739 simplify by reassociating. */
3740 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3741 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3743 tree inner1
= gimple_assign_rhs1 (stmt
);
3744 tree inner2
= gimple_assign_rhs2 (stmt
);
3747 tree partial
= NULL_TREE
;
3748 bool is_or
= (innercode
== BIT_IOR_EXPR
);
3750 /* Check for boolean identities that don't require recursive examination
3752 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3753 inner1 OR (inner1 AND inner2) => inner1
3754 !inner1 OR (inner1 OR inner2) => true
3755 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3757 if (inner1
== true_test_var
)
3758 return (is_or
? var
: inner1
);
3759 else if (inner2
== true_test_var
)
3760 return (is_or
? var
: inner2
);
3761 else if (inner1
== false_test_var
)
3764 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3765 else if (inner2
== false_test_var
)
3768 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3770 /* Next, redistribute/reassociate the OR across the inner tests.
3771 Compute the first partial result, (inner1 OR (op2a code op2b)) */
3772 if (TREE_CODE (inner1
) == SSA_NAME
3773 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3774 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3775 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
3776 gimple_assign_rhs1 (s
),
3777 gimple_assign_rhs2 (s
),
3778 code2
, op2a
, op2b
)))
3780 /* Handle the OR case, where we are reassociating:
3781 (inner1 OR inner2) OR (op2a code2 op2b)
3783 If the partial result t is a constant, we win. Otherwise
3784 continue on to try reassociating with the other inner test. */
3787 if (integer_onep (t
))
3788 return boolean_true_node
;
3789 else if (integer_zerop (t
))
3793 /* Handle the AND case, where we are redistributing:
3794 (inner1 AND inner2) OR (op2a code2 op2b)
3795 => (t AND (inner2 OR (op2a code op2b))) */
3796 else if (integer_zerop (t
))
3797 return boolean_false_node
;
3799 /* Save partial result for later. */
3803 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
3804 if (TREE_CODE (inner2
) == SSA_NAME
3805 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3806 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3807 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
3808 gimple_assign_rhs1 (s
),
3809 gimple_assign_rhs2 (s
),
3810 code2
, op2a
, op2b
)))
3812 /* Handle the OR case, where we are reassociating:
3813 (inner1 OR inner2) OR (op2a code2 op2b)
3815 => (t OR partial) */
3818 if (integer_zerop (t
))
3820 else if (integer_onep (t
))
3821 return boolean_true_node
;
3822 /* If both are the same, we can apply the identity
3824 else if (partial
&& same_bool_result_p (t
, partial
))
3828 /* Handle the AND case, where we are redistributing:
3829 (inner1 AND inner2) OR (op2a code2 op2b)
3830 => (t AND (inner1 OR (op2a code2 op2b)))
3831 => (t AND partial) */
3834 if (integer_zerop (t
))
3835 return boolean_false_node
;
3838 /* We already got a simplification for the other
3839 operand to the redistributed AND expression. The
3840 interesting case is when at least one is true.
3841 Or, if both are the same, we can apply the identity
3843 if (integer_onep (partial
))
3845 else if (integer_onep (t
))
3847 else if (same_bool_result_p (t
, partial
))
3856 /* Try to simplify the OR of two comparisons defined by
3857 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3858 If this can be done without constructing an intermediate value,
3859 return the resulting tree; otherwise NULL_TREE is returned.
3860 This function is deliberately asymmetric as it recurses on SSA_DEFs
3861 in the first comparison but not the second. */
3864 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3865 enum tree_code code2
, tree op2a
, tree op2b
)
3867 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3869 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
3870 if (operand_equal_p (op1a
, op2a
, 0)
3871 && operand_equal_p (op1b
, op2b
, 0))
3873 /* Result will be either NULL_TREE, or a combined comparison. */
3874 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3875 TRUTH_ORIF_EXPR
, code1
, code2
,
3876 truth_type
, op1a
, op1b
);
3881 /* Likewise the swapped case of the above. */
3882 if (operand_equal_p (op1a
, op2b
, 0)
3883 && operand_equal_p (op1b
, op2a
, 0))
3885 /* Result will be either NULL_TREE, or a combined comparison. */
3886 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3887 TRUTH_ORIF_EXPR
, code1
,
3888 swap_tree_comparison (code2
),
3889 truth_type
, op1a
, op1b
);
3894 /* If both comparisons are of the same value against constants, we might
3895 be able to merge them. */
3896 if (operand_equal_p (op1a
, op2a
, 0)
3897 && TREE_CODE (op1b
) == INTEGER_CST
3898 && TREE_CODE (op2b
) == INTEGER_CST
)
3900 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3902 /* If we have (op1a != op1b), we should either be able to
3903 return that or TRUE, depending on whether the constant op1b
3904 also satisfies the other comparison against op2b. */
3905 if (code1
== NE_EXPR
)
3911 case EQ_EXPR
: val
= (cmp
== 0); break;
3912 case NE_EXPR
: val
= (cmp
!= 0); break;
3913 case LT_EXPR
: val
= (cmp
< 0); break;
3914 case GT_EXPR
: val
= (cmp
> 0); break;
3915 case LE_EXPR
: val
= (cmp
<= 0); break;
3916 case GE_EXPR
: val
= (cmp
>= 0); break;
3917 default: done
= false;
3922 return boolean_true_node
;
3924 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3927 /* Likewise if the second comparison is a != comparison. */
3928 else if (code2
== NE_EXPR
)
3934 case EQ_EXPR
: val
= (cmp
== 0); break;
3935 case NE_EXPR
: val
= (cmp
!= 0); break;
3936 case LT_EXPR
: val
= (cmp
> 0); break;
3937 case GT_EXPR
: val
= (cmp
< 0); break;
3938 case LE_EXPR
: val
= (cmp
>= 0); break;
3939 case GE_EXPR
: val
= (cmp
<= 0); break;
3940 default: done
= false;
3945 return boolean_true_node
;
3947 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3951 /* See if an equality test is redundant with the other comparison. */
3952 else if (code1
== EQ_EXPR
)
3957 case EQ_EXPR
: val
= (cmp
== 0); break;
3958 case NE_EXPR
: val
= (cmp
!= 0); break;
3959 case LT_EXPR
: val
= (cmp
< 0); break;
3960 case GT_EXPR
: val
= (cmp
> 0); break;
3961 case LE_EXPR
: val
= (cmp
<= 0); break;
3962 case GE_EXPR
: val
= (cmp
>= 0); break;
3967 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3969 else if (code2
== EQ_EXPR
)
3974 case EQ_EXPR
: val
= (cmp
== 0); break;
3975 case NE_EXPR
: val
= (cmp
!= 0); break;
3976 case LT_EXPR
: val
= (cmp
> 0); break;
3977 case GT_EXPR
: val
= (cmp
< 0); break;
3978 case LE_EXPR
: val
= (cmp
>= 0); break;
3979 case GE_EXPR
: val
= (cmp
<= 0); break;
3984 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3987 /* Chose the less restrictive of two < or <= comparisons. */
3988 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3989 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3991 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3992 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3994 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3997 /* Likewise chose the less restrictive of two > or >= comparisons. */
3998 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3999 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4001 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4002 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4004 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4007 /* Check for singleton ranges. */
4009 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4010 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4011 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4013 /* Check for less/greater pairs that don't restrict the range at all. */
4015 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4016 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4017 return boolean_true_node
;
4019 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4020 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4021 return boolean_true_node
;
4024 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4025 NAME's definition is a truth value. See if there are any simplifications
4026 that can be done against the NAME's definition. */
4027 if (TREE_CODE (op1a
) == SSA_NAME
4028 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4029 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4031 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4032 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4033 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4034 switch (gimple_code (stmt
))
4037 /* Try to simplify by copy-propagating the definition. */
4038 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4041 /* If every argument to the PHI produces the same result when
4042 ORed with the second comparison, we win.
4043 Do not do this unless the type is bool since we need a bool
4044 result here anyway. */
4045 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4047 tree result
= NULL_TREE
;
4049 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4051 tree arg
= gimple_phi_arg_def (stmt
, i
);
4053 /* If this PHI has itself as an argument, ignore it.
4054 If all the other args produce the same result,
4056 if (arg
== gimple_phi_result (stmt
))
4058 else if (TREE_CODE (arg
) == INTEGER_CST
)
4060 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4063 result
= boolean_true_node
;
4064 else if (!integer_onep (result
))
4068 result
= fold_build2 (code2
, boolean_type_node
,
4070 else if (!same_bool_comparison_p (result
,
4074 else if (TREE_CODE (arg
) == SSA_NAME
4075 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4078 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4079 /* In simple cases we can look through PHI nodes,
4080 but we have to be careful with loops.
4082 if (! dom_info_available_p (CDI_DOMINATORS
)
4083 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4084 || dominated_by_p (CDI_DOMINATORS
,
4085 gimple_bb (def_stmt
),
4088 temp
= or_var_with_comparison (arg
, invert
, code2
,
4094 else if (!same_bool_result_p (result
, temp
))
4110 /* Try to simplify the OR of two comparisons, specified by
4111 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4112 If this can be simplified to a single expression (without requiring
4113 introducing more SSA variables to hold intermediate values),
4114 return the resulting tree. Otherwise return NULL_TREE.
4115 If the result expression is non-null, it has boolean type. */
4118 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4119 enum tree_code code2
, tree op2a
, tree op2b
)
4121 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4125 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4129 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4131 Either NULL_TREE, a simplified but non-constant or a constant
4134 ??? This should go into a gimple-fold-inline.h file to be eventually
4135 privatized with the single valueize function used in the various TUs
4136 to avoid the indirect function call overhead. */
4139 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
4141 location_t loc
= gimple_location (stmt
);
4142 switch (gimple_code (stmt
))
4146 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4148 switch (get_gimple_rhs_class (subcode
))
4150 case GIMPLE_SINGLE_RHS
:
4152 tree rhs
= gimple_assign_rhs1 (stmt
);
4153 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4155 if (TREE_CODE (rhs
) == SSA_NAME
)
4157 /* If the RHS is an SSA_NAME, return its known constant value,
4159 return (*valueize
) (rhs
);
4161 /* Handle propagating invariant addresses into address
4163 else if (TREE_CODE (rhs
) == ADDR_EXPR
4164 && !is_gimple_min_invariant (rhs
))
4166 HOST_WIDE_INT offset
= 0;
4168 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4172 && (CONSTANT_CLASS_P (base
)
4173 || decl_address_invariant_p (base
)))
4174 return build_invariant_address (TREE_TYPE (rhs
),
4177 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4178 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4179 && (CONSTRUCTOR_NELTS (rhs
)
4180 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4185 vec
= XALLOCAVEC (tree
,
4186 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4187 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4189 val
= (*valueize
) (val
);
4190 if (TREE_CODE (val
) == INTEGER_CST
4191 || TREE_CODE (val
) == REAL_CST
4192 || TREE_CODE (val
) == FIXED_CST
)
4198 return build_vector (TREE_TYPE (rhs
), vec
);
4200 if (subcode
== OBJ_TYPE_REF
)
4202 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4203 /* If callee is constant, we can fold away the wrapper. */
4204 if (is_gimple_min_invariant (val
))
4208 if (kind
== tcc_reference
)
4210 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4211 || TREE_CODE (rhs
) == REALPART_EXPR
4212 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4213 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4215 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4216 return fold_unary_loc (EXPR_LOCATION (rhs
),
4218 TREE_TYPE (rhs
), val
);
4220 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
4221 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4223 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4224 return fold_ternary_loc (EXPR_LOCATION (rhs
),
4226 TREE_TYPE (rhs
), val
,
4227 TREE_OPERAND (rhs
, 1),
4228 TREE_OPERAND (rhs
, 2));
4230 else if (TREE_CODE (rhs
) == MEM_REF
4231 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4233 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4234 if (TREE_CODE (val
) == ADDR_EXPR
4235 && is_gimple_min_invariant (val
))
4237 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
4239 TREE_OPERAND (rhs
, 1));
4244 return fold_const_aggregate_ref_1 (rhs
, valueize
);
4246 else if (kind
== tcc_declaration
)
4247 return get_symbol_constant_value (rhs
);
4251 case GIMPLE_UNARY_RHS
:
4253 /* Handle unary operators that can appear in GIMPLE form.
4254 Note that we know the single operand must be a constant,
4255 so this should almost always return a simplified RHS. */
4256 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4259 fold_unary_ignore_overflow_loc (loc
, subcode
,
4260 gimple_expr_type (stmt
), op0
);
4263 case GIMPLE_BINARY_RHS
:
4265 /* Handle binary operators that can appear in GIMPLE form. */
4266 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4267 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4269 /* Translate &x + CST into an invariant form suitable for
4270 further propagation. */
4271 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
4272 && TREE_CODE (op0
) == ADDR_EXPR
4273 && TREE_CODE (op1
) == INTEGER_CST
)
4275 tree off
= fold_convert (ptr_type_node
, op1
);
4276 return build_fold_addr_expr_loc
4278 fold_build2 (MEM_REF
,
4279 TREE_TYPE (TREE_TYPE (op0
)),
4280 unshare_expr (op0
), off
));
4283 return fold_binary_loc (loc
, subcode
,
4284 gimple_expr_type (stmt
), op0
, op1
);
4287 case GIMPLE_TERNARY_RHS
:
4289 /* Handle ternary operators that can appear in GIMPLE form. */
4290 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4291 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4292 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
4294 /* Fold embedded expressions in ternary codes. */
4295 if ((subcode
== COND_EXPR
4296 || subcode
== VEC_COND_EXPR
)
4297 && COMPARISON_CLASS_P (op0
))
4299 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
4300 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
4301 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
4302 TREE_TYPE (op0
), op00
, op01
);
4307 return fold_ternary_loc (loc
, subcode
,
4308 gimple_expr_type (stmt
), op0
, op1
, op2
);
4320 if (gimple_call_internal_p (stmt
))
4322 enum tree_code subcode
= ERROR_MARK
;
4323 switch (gimple_call_internal_fn (stmt
))
4325 case IFN_UBSAN_CHECK_ADD
:
4326 subcode
= PLUS_EXPR
;
4328 case IFN_UBSAN_CHECK_SUB
:
4329 subcode
= MINUS_EXPR
;
4331 case IFN_UBSAN_CHECK_MUL
:
4332 subcode
= MULT_EXPR
;
4337 tree arg0
= gimple_call_arg (stmt
, 0);
4338 tree arg1
= gimple_call_arg (stmt
, 1);
4339 tree op0
= (*valueize
) (arg0
);
4340 tree op1
= (*valueize
) (arg1
);
4342 if (TREE_CODE (op0
) != INTEGER_CST
4343 || TREE_CODE (op1
) != INTEGER_CST
)
4348 /* x * 0 = 0 * x = 0 without overflow. */
4349 if (integer_zerop (op0
) || integer_zerop (op1
))
4350 return build_zero_cst (TREE_TYPE (arg0
));
4353 /* y - y = 0 without overflow. */
4354 if (operand_equal_p (op0
, op1
, 0))
4355 return build_zero_cst (TREE_TYPE (arg0
));
4362 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
4364 && TREE_CODE (res
) == INTEGER_CST
4365 && !TREE_OVERFLOW (res
))
4370 fn
= (*valueize
) (gimple_call_fn (stmt
));
4371 if (TREE_CODE (fn
) == ADDR_EXPR
4372 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4373 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
4374 && gimple_builtin_call_types_compatible_p (stmt
,
4375 TREE_OPERAND (fn
, 0)))
4377 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
4380 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4381 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
4382 call
= build_call_array_loc (loc
,
4383 gimple_call_return_type (stmt
),
4384 fn
, gimple_call_num_args (stmt
), args
);
4385 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
4388 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4389 STRIP_NOPS (retval
);
4390 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
4402 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4403 Returns NULL_TREE if folding to a constant is not possible, otherwise
4404 returns a constant according to is_gimple_min_invariant. */
4407 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
4409 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
4410 if (res
&& is_gimple_min_invariant (res
))
4416 /* The following set of functions are supposed to fold references using
4417 their constant initializers. */
4419 static tree
fold_ctor_reference (tree type
, tree ctor
,
4420 unsigned HOST_WIDE_INT offset
,
4421 unsigned HOST_WIDE_INT size
, tree
);
4423 /* See if we can find constructor defining value of BASE.
4424 When we know the consructor with constant offset (such as
4425 base is array[40] and we do know constructor of array), then
4426 BIT_OFFSET is adjusted accordingly.
4428 As a special case, return error_mark_node when constructor
4429 is not explicitly available, but it is known to be zero
4430 such as 'static const int a;'. */
4432 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
4433 tree (*valueize
)(tree
))
4435 HOST_WIDE_INT bit_offset2
, size
, max_size
;
4436 if (TREE_CODE (base
) == MEM_REF
)
4438 if (!integer_zerop (TREE_OPERAND (base
, 1)))
4440 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
4442 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
4447 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
4448 base
= valueize (TREE_OPERAND (base
, 0));
4449 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
4451 base
= TREE_OPERAND (base
, 0);
4454 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4455 DECL_INITIAL. If BASE is a nested reference into another
4456 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4457 the inner reference. */
4458 switch (TREE_CODE (base
))
4463 tree init
= ctor_for_folding (base
);
4465 /* Our semantic is exact opposite of ctor_for_folding;
4466 NULL means unknown, while error_mark_node is 0. */
4467 if (init
== error_mark_node
)
4470 return error_mark_node
;
4476 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
4477 if (max_size
== -1 || size
!= max_size
)
4479 *bit_offset
+= bit_offset2
;
4480 return get_base_constructor (base
, bit_offset
, valueize
);
4491 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4492 SIZE to the memory at bit OFFSET. */
4495 fold_array_ctor_reference (tree type
, tree ctor
,
4496 unsigned HOST_WIDE_INT offset
,
4497 unsigned HOST_WIDE_INT size
,
4500 unsigned HOST_WIDE_INT cnt
;
4502 offset_int low_bound
;
4503 offset_int elt_size
;
4504 offset_int index
, max_index
;
4505 offset_int access_index
;
4506 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
4507 HOST_WIDE_INT inner_offset
;
4509 /* Compute low bound and elt size. */
4510 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
4511 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
4512 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
4514 /* Static constructors for variably sized objects makes no sense. */
4515 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
4516 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
4517 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
4521 /* Static constructors for variably sized objects makes no sense. */
4522 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
4524 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
4526 /* We can handle only constantly sized accesses that are known to not
4527 be larger than size of array element. */
4528 if (!TYPE_SIZE_UNIT (type
)
4529 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
4530 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
4534 /* Compute the array index we look for. */
4535 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
4537 access_index
+= low_bound
;
4539 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
4540 TYPE_SIGN (index_type
));
4542 /* And offset within the access. */
4543 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
4545 /* See if the array field is large enough to span whole access. We do not
4546 care to fold accesses spanning multiple array indexes. */
4547 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
4550 index
= low_bound
- 1;
4552 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4553 TYPE_SIGN (index_type
));
4555 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
4557 /* Array constructor might explicitely set index, or specify range
4558 or leave index NULL meaning that it is next index after previous
4562 if (TREE_CODE (cfield
) == INTEGER_CST
)
4563 max_index
= index
= wi::to_offset (cfield
);
4566 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
4567 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
4568 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
4575 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4576 TYPE_SIGN (index_type
));
4580 /* Do we have match? */
4581 if (wi::cmpu (access_index
, index
) >= 0
4582 && wi::cmpu (access_index
, max_index
) <= 0)
4583 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
4586 /* When memory is not explicitely mentioned in constructor,
4587 it is 0 (or out of range). */
4588 return build_zero_cst (type
);
4591 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4592 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4595 fold_nonarray_ctor_reference (tree type
, tree ctor
,
4596 unsigned HOST_WIDE_INT offset
,
4597 unsigned HOST_WIDE_INT size
,
4600 unsigned HOST_WIDE_INT cnt
;
4603 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
4606 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
4607 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
4608 tree field_size
= DECL_SIZE (cfield
);
4609 offset_int bitoffset
;
4610 offset_int bitoffset_end
, access_end
;
4612 /* Variable sized objects in static constructors makes no sense,
4613 but field_size can be NULL for flexible array members. */
4614 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
4615 && TREE_CODE (byte_offset
) == INTEGER_CST
4616 && (field_size
!= NULL_TREE
4617 ? TREE_CODE (field_size
) == INTEGER_CST
4618 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
4620 /* Compute bit offset of the field. */
4621 bitoffset
= (wi::to_offset (field_offset
)
4622 + wi::lshift (wi::to_offset (byte_offset
),
4623 LOG2_BITS_PER_UNIT
));
4624 /* Compute bit offset where the field ends. */
4625 if (field_size
!= NULL_TREE
)
4626 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
4630 access_end
= offset_int (offset
) + size
;
4632 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4633 [BITOFFSET, BITOFFSET_END)? */
4634 if (wi::cmps (access_end
, bitoffset
) > 0
4635 && (field_size
== NULL_TREE
4636 || wi::lts_p (offset
, bitoffset_end
)))
4638 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
4639 /* We do have overlap. Now see if field is large enough to
4640 cover the access. Give up for accesses spanning multiple
4642 if (wi::cmps (access_end
, bitoffset_end
) > 0)
4644 if (wi::lts_p (offset
, bitoffset
))
4646 return fold_ctor_reference (type
, cval
,
4647 inner_offset
.to_uhwi (), size
,
4651 /* When memory is not explicitely mentioned in constructor, it is 0. */
4652 return build_zero_cst (type
);
4655 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4656 to the memory at bit OFFSET. */
4659 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
4660 unsigned HOST_WIDE_INT size
, tree from_decl
)
4664 /* We found the field with exact match. */
4665 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
4667 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4669 /* We are at the end of walk, see if we can view convert the
4671 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
4672 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4673 && !compare_tree_int (TYPE_SIZE (type
), size
)
4674 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
4676 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4677 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
4682 /* For constants and byte-aligned/sized reads try to go through
4683 native_encode/interpret. */
4684 if (CONSTANT_CLASS_P (ctor
)
4685 && BITS_PER_UNIT
== 8
4686 && offset
% BITS_PER_UNIT
== 0
4687 && size
% BITS_PER_UNIT
== 0
4688 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
4690 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
4691 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
4692 offset
/ BITS_PER_UNIT
) > 0)
4693 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
4695 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
4698 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
4699 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
4700 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
4703 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
4710 /* Return the tree representing the element referenced by T if T is an
4711 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4712 names using VALUEIZE. Return NULL_TREE otherwise. */
4715 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
4717 tree ctor
, idx
, base
;
4718 HOST_WIDE_INT offset
, size
, max_size
;
4721 if (TREE_THIS_VOLATILE (t
))
4724 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
4725 return get_symbol_constant_value (t
);
4727 tem
= fold_read_from_constant_string (t
);
4731 switch (TREE_CODE (t
))
4734 case ARRAY_RANGE_REF
:
4735 /* Constant indexes are handled well by get_base_constructor.
4736 Only special case variable offsets.
4737 FIXME: This code can't handle nested references with variable indexes
4738 (they will be handled only by iteration of ccp). Perhaps we can bring
4739 get_ref_base_and_extent here and make it use a valueize callback. */
4740 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
4742 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
4743 && TREE_CODE (idx
) == INTEGER_CST
)
4745 tree low_bound
, unit_size
;
4747 /* If the resulting bit-offset is constant, track it. */
4748 if ((low_bound
= array_ref_low_bound (t
),
4749 TREE_CODE (low_bound
) == INTEGER_CST
)
4750 && (unit_size
= array_ref_element_size (t
),
4751 tree_fits_uhwi_p (unit_size
)))
4754 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
4755 TYPE_PRECISION (TREE_TYPE (idx
)));
4757 if (wi::fits_shwi_p (woffset
))
4759 offset
= woffset
.to_shwi ();
4760 /* TODO: This code seems wrong, multiply then check
4761 to see if it fits. */
4762 offset
*= tree_to_uhwi (unit_size
);
4763 offset
*= BITS_PER_UNIT
;
4765 base
= TREE_OPERAND (t
, 0);
4766 ctor
= get_base_constructor (base
, &offset
, valueize
);
4767 /* Empty constructor. Always fold to 0. */
4768 if (ctor
== error_mark_node
)
4769 return build_zero_cst (TREE_TYPE (t
));
4770 /* Out of bound array access. Value is undefined,
4774 /* We can not determine ctor. */
4777 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
4778 tree_to_uhwi (unit_size
)
4788 case TARGET_MEM_REF
:
4790 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
4791 ctor
= get_base_constructor (base
, &offset
, valueize
);
4793 /* Empty constructor. Always fold to 0. */
4794 if (ctor
== error_mark_node
)
4795 return build_zero_cst (TREE_TYPE (t
));
4796 /* We do not know precise address. */
4797 if (max_size
== -1 || max_size
!= size
)
4799 /* We can not determine ctor. */
4803 /* Out of bound array access. Value is undefined, but don't fold. */
4807 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
4813 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
4814 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
4815 return fold_build1_loc (EXPR_LOCATION (t
),
4816 TREE_CODE (t
), TREE_TYPE (t
), c
);
4828 fold_const_aggregate_ref (tree t
)
4830 return fold_const_aggregate_ref_1 (t
, NULL
);
4833 /* Lookup virtual method with index TOKEN in a virtual table V
4835 Set CAN_REFER if non-NULL to false if method
4836 is not referable or if the virtual table is ill-formed (such as rewriten
4837 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4840 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
4842 unsigned HOST_WIDE_INT offset
,
4845 tree vtable
= v
, init
, fn
;
4846 unsigned HOST_WIDE_INT size
;
4847 unsigned HOST_WIDE_INT elt_size
, access_index
;
4853 /* First of all double check we have virtual table. */
4854 if (TREE_CODE (v
) != VAR_DECL
4855 || !DECL_VIRTUAL_P (v
))
4857 gcc_assert (in_lto_p
);
4858 /* Pass down that we lost track of the target. */
4864 init
= ctor_for_folding (v
);
4866 /* The virtual tables should always be born with constructors
4867 and we always should assume that they are avaialble for
4868 folding. At the moment we do not stream them in all cases,
4869 but it should never happen that ctor seem unreachable. */
4871 if (init
== error_mark_node
)
4873 gcc_assert (in_lto_p
);
4874 /* Pass down that we lost track of the target. */
4879 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
4880 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
4881 offset
*= BITS_PER_UNIT
;
4882 offset
+= token
* size
;
4884 /* Lookup the value in the constructor that is assumed to be array.
4885 This is equivalent to
4886 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
4887 offset, size, NULL);
4888 but in a constant time. We expect that frontend produced a simple
4889 array without indexed initializers. */
4891 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
4892 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
4893 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
4894 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
4896 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
4897 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
4899 /* This code makes an assumption that there are no
4900 indexed fileds produced by C++ FE, so we can directly index the array. */
4901 if (access_index
< CONSTRUCTOR_NELTS (init
))
4903 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
4904 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
4910 /* For type inconsistent program we may end up looking up virtual method
4911 in virtual table that does not contain TOKEN entries. We may overrun
4912 the virtual table and pick up a constant or RTTI info pointer.
4913 In any case the call is undefined. */
4915 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
4916 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
4917 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4920 fn
= TREE_OPERAND (fn
, 0);
4922 /* When cgraph node is missing and function is not public, we cannot
4923 devirtualize. This can happen in WHOPR when the actual method
4924 ends up in other partition, because we found devirtualization
4925 possibility too late. */
4926 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
4937 /* Make sure we create a cgraph node for functions we'll reference.
4938 They can be non-existent if the reference comes from an entry
4939 of an external vtable for example. */
4940 cgraph_node::get_create (fn
);
4945 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
4946 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
4947 KNOWN_BINFO carries the binfo describing the true type of
4948 OBJ_TYPE_REF_OBJECT(REF).
4949 Set CAN_REFER if non-NULL to false if method
4950 is not referable or if the virtual table is ill-formed (such as rewriten
4951 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4954 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
4957 unsigned HOST_WIDE_INT offset
;
4960 v
= BINFO_VTABLE (known_binfo
);
4961 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
4965 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
4971 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
4974 /* Return true iff VAL is a gimple expression that is known to be
4975 non-negative. Restricted to floating-point inputs. */
4978 gimple_val_nonnegative_real_p (tree val
)
4982 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
4984 /* Use existing logic for non-gimple trees. */
4985 if (tree_expr_nonnegative_p (val
))
4988 if (TREE_CODE (val
) != SSA_NAME
)
4991 /* Currently we look only at the immediately defining statement
4992 to make this determination, since recursion on defining
4993 statements of operands can lead to quadratic behavior in the
4994 worst case. This is expected to catch almost all occurrences
4995 in practice. It would be possible to implement limited-depth
4996 recursion if important cases are lost. Alternatively, passes
4997 that need this information (such as the pow/powi lowering code
4998 in the cse_sincos pass) could be revised to provide it through
4999 dataflow propagation. */
5001 def_stmt
= SSA_NAME_DEF_STMT (val
);
5003 if (is_gimple_assign (def_stmt
))
5007 /* See fold-const.c:tree_expr_nonnegative_p for additional
5008 cases that could be handled with recursion. */
5010 switch (gimple_assign_rhs_code (def_stmt
))
5013 /* Always true for floating-point operands. */
5017 /* True if the two operands are identical (since we are
5018 restricted to floating-point inputs). */
5019 op0
= gimple_assign_rhs1 (def_stmt
);
5020 op1
= gimple_assign_rhs2 (def_stmt
);
5023 || operand_equal_p (op0
, op1
, 0))
5030 else if (is_gimple_call (def_stmt
))
5032 tree fndecl
= gimple_call_fndecl (def_stmt
);
5034 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5038 switch (DECL_FUNCTION_CODE (fndecl
))
5040 CASE_FLT_FN (BUILT_IN_ACOS
):
5041 CASE_FLT_FN (BUILT_IN_ACOSH
):
5042 CASE_FLT_FN (BUILT_IN_CABS
):
5043 CASE_FLT_FN (BUILT_IN_COSH
):
5044 CASE_FLT_FN (BUILT_IN_ERFC
):
5045 CASE_FLT_FN (BUILT_IN_EXP
):
5046 CASE_FLT_FN (BUILT_IN_EXP10
):
5047 CASE_FLT_FN (BUILT_IN_EXP2
):
5048 CASE_FLT_FN (BUILT_IN_FABS
):
5049 CASE_FLT_FN (BUILT_IN_FDIM
):
5050 CASE_FLT_FN (BUILT_IN_HYPOT
):
5051 CASE_FLT_FN (BUILT_IN_POW10
):
5054 CASE_FLT_FN (BUILT_IN_SQRT
):
5055 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5056 nonnegative inputs. */
5057 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
5062 CASE_FLT_FN (BUILT_IN_POWI
):
5063 /* True if the second argument is an even integer. */
5064 arg1
= gimple_call_arg (def_stmt
, 1);
5066 if (TREE_CODE (arg1
) == INTEGER_CST
5067 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5072 CASE_FLT_FN (BUILT_IN_POW
):
5073 /* True if the second argument is an even integer-valued
5075 arg1
= gimple_call_arg (def_stmt
, 1);
5077 if (TREE_CODE (arg1
) == REAL_CST
)
5082 c
= TREE_REAL_CST (arg1
);
5083 n
= real_to_integer (&c
);
5087 REAL_VALUE_TYPE cint
;
5088 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5089 if (real_identical (&c
, &cint
))
5105 /* Given a pointer value OP0, return a simplified version of an
5106 indirection through OP0, or NULL_TREE if no simplification is
5107 possible. Note that the resulting type may be different from
5108 the type pointed to in the sense that it is still compatible
5109 from the langhooks point of view. */
5112 gimple_fold_indirect_ref (tree t
)
5114 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5119 subtype
= TREE_TYPE (sub
);
5120 if (!POINTER_TYPE_P (subtype
))
5123 if (TREE_CODE (sub
) == ADDR_EXPR
)
5125 tree op
= TREE_OPERAND (sub
, 0);
5126 tree optype
= TREE_TYPE (op
);
5128 if (useless_type_conversion_p (type
, optype
))
5131 /* *(foo *)&fooarray => fooarray[0] */
5132 if (TREE_CODE (optype
) == ARRAY_TYPE
5133 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5134 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5136 tree type_domain
= TYPE_DOMAIN (optype
);
5137 tree min_val
= size_zero_node
;
5138 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5139 min_val
= TYPE_MIN_VALUE (type_domain
);
5140 if (TREE_CODE (min_val
) == INTEGER_CST
)
5141 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5143 /* *(foo *)&complexfoo => __real__ complexfoo */
5144 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5145 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5146 return fold_build1 (REALPART_EXPR
, type
, op
);
5147 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5148 else if (TREE_CODE (optype
) == VECTOR_TYPE
5149 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5151 tree part_width
= TYPE_SIZE (type
);
5152 tree index
= bitsize_int (0);
5153 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5157 /* *(p + CST) -> ... */
5158 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5159 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5161 tree addr
= TREE_OPERAND (sub
, 0);
5162 tree off
= TREE_OPERAND (sub
, 1);
5166 addrtype
= TREE_TYPE (addr
);
5168 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5169 if (TREE_CODE (addr
) == ADDR_EXPR
5170 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5171 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5172 && tree_fits_uhwi_p (off
))
5174 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5175 tree part_width
= TYPE_SIZE (type
);
5176 unsigned HOST_WIDE_INT part_widthi
5177 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5178 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5179 tree index
= bitsize_int (indexi
);
5180 if (offset
/ part_widthi
5181 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5182 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5186 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5187 if (TREE_CODE (addr
) == ADDR_EXPR
5188 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5189 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5191 tree size
= TYPE_SIZE_UNIT (type
);
5192 if (tree_int_cst_equal (size
, off
))
5193 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5196 /* *(p + CST) -> MEM_REF <p, CST>. */
5197 if (TREE_CODE (addr
) != ADDR_EXPR
5198 || DECL_P (TREE_OPERAND (addr
, 0)))
5199 return fold_build2 (MEM_REF
, type
,
5201 wide_int_to_tree (ptype
, off
));
5204 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5205 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5206 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5207 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5210 tree min_val
= size_zero_node
;
5212 sub
= gimple_fold_indirect_ref (sub
);
5214 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5215 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5216 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5217 min_val
= TYPE_MIN_VALUE (type_domain
);
5218 if (TREE_CODE (min_val
) == INTEGER_CST
)
5219 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5225 /* Return true if CODE is an operation that when operating on signed
5226 integer types involves undefined behavior on overflow and the
5227 operation can be expressed with unsigned arithmetic. */
5230 arith_code_with_undefined_signed_overflow (tree_code code
)
5238 case POINTER_PLUS_EXPR
:
5245 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5246 operation that can be transformed to unsigned arithmetic by converting
5247 its operand, carrying out the operation in the corresponding unsigned
5248 type and converting the result back to the original type.
5250 Returns a sequence of statements that replace STMT and also contain
5251 a modified form of STMT itself. */
5254 rewrite_to_defined_overflow (gimple stmt
)
5256 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5258 fprintf (dump_file
, "rewriting stmt with undefined signed "
5260 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5263 tree lhs
= gimple_assign_lhs (stmt
);
5264 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5265 gimple_seq stmts
= NULL
;
5266 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5268 gimple_seq stmts2
= NULL
;
5269 gimple_set_op (stmt
, i
,
5270 force_gimple_operand (fold_convert (type
,
5271 gimple_op (stmt
, i
)),
5272 &stmts2
, true, NULL_TREE
));
5273 gimple_seq_add_seq (&stmts
, stmts2
);
5275 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5276 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5277 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5278 gimple_seq_add_stmt (&stmts
, stmt
);
5279 gimple cvt
= gimple_build_assign_with_ops
5280 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
5281 gimple_seq_add_stmt (&stmts
, cvt
);