1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
48 #include "tree-ssa-propagate.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
82 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
85 struct cgraph_node
*node
;
88 if (DECL_ABSTRACT (decl
))
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
93 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab
->function_flags_ready
)
103 snode
= symtab_node::get (decl
);
104 if (!snode
|| !snode
->definition
)
106 node
= dyn_cast
<cgraph_node
*> (snode
);
107 return !node
|| !node
->global
.inlined_to
;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
114 || TREE_CODE (from_decl
) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl
)
116 && (vnode
= varpool_node::get (from_decl
)) != NULL
117 && vnode
->definition
)
119 && (vnode
= varpool_node::get (from_decl
)) != NULL
120 && vnode
->in_other_partition
))
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl
)
126 && DECL_EXTERNAL (decl
)
127 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
128 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
146 if (!symtab
->function_flags_ready
)
149 snode
= symtab_node::get (decl
);
151 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
152 && (!snode
->in_other_partition
153 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
155 node
= dyn_cast
<cgraph_node
*> (snode
);
156 return !node
|| !node
->global
.inlined_to
;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
164 canonicalize_constructor_val (tree cval
, tree from_decl
)
166 tree orig_cval
= cval
;
168 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
171 tree ptr
= TREE_OPERAND (cval
, 0);
172 if (is_gimple_min_invariant (ptr
))
173 cval
= build1_loc (EXPR_LOCATION (cval
),
174 ADDR_EXPR
, TREE_TYPE (ptr
),
175 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
177 fold_convert (ptr_type_node
,
178 TREE_OPERAND (cval
, 1))));
180 if (TREE_CODE (cval
) == ADDR_EXPR
)
182 tree base
= NULL_TREE
;
183 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
185 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
187 TREE_OPERAND (cval
, 0) = base
;
190 base
= get_base_address (TREE_OPERAND (cval
, 0));
194 if ((TREE_CODE (base
) == VAR_DECL
195 || TREE_CODE (base
) == FUNCTION_DECL
)
196 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
198 if (TREE_CODE (base
) == VAR_DECL
)
199 TREE_ADDRESSABLE (base
) = 1;
200 else if (TREE_CODE (base
) == FUNCTION_DECL
)
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
205 cgraph_node::get_create (base
);
207 /* Fixup types in global initializers. */
208 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
209 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
212 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
215 if (TREE_OVERFLOW_P (cval
))
216 return drop_tree_overflow (cval
);
220 /* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
224 get_symbol_constant_value (tree sym
)
226 tree val
= ctor_for_folding (sym
);
227 if (val
!= error_mark_node
)
231 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
232 if (val
&& is_gimple_min_invariant (val
))
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
241 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
242 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
243 return build_zero_cst (TREE_TYPE (sym
));
251 /* Subroutine of fold_stmt. We perform several simplifications of the
252 memory reference tree EXPR and make sure to re-gimplify them properly
253 after propagation of constant addresses. IS_LHS is true if the
254 reference is supposed to be an lvalue. */
257 maybe_fold_reference (tree expr
, bool is_lhs
)
261 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
262 || TREE_CODE (expr
) == REALPART_EXPR
263 || TREE_CODE (expr
) == IMAGPART_EXPR
)
264 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
265 return fold_unary_loc (EXPR_LOCATION (expr
),
268 TREE_OPERAND (expr
, 0));
269 else if (TREE_CODE (expr
) == BIT_FIELD_REF
270 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
271 return fold_ternary_loc (EXPR_LOCATION (expr
),
274 TREE_OPERAND (expr
, 0),
275 TREE_OPERAND (expr
, 1),
276 TREE_OPERAND (expr
, 2));
279 && (result
= fold_const_aggregate_ref (expr
))
280 && is_gimple_min_invariant (result
))
287 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
288 replacement rhs for the statement or NULL_TREE if no simplification
289 could be made. It is assumed that the operands have been previously
293 fold_gimple_assign (gimple_stmt_iterator
*si
)
295 gimple stmt
= gsi_stmt (*si
);
296 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
297 location_t loc
= gimple_location (stmt
);
299 tree result
= NULL_TREE
;
301 switch (get_gimple_rhs_class (subcode
))
303 case GIMPLE_SINGLE_RHS
:
305 tree rhs
= gimple_assign_rhs1 (stmt
);
307 if (REFERENCE_CLASS_P (rhs
))
308 return maybe_fold_reference (rhs
, false);
310 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
312 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
313 if (is_gimple_min_invariant (val
))
315 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
318 vec
<cgraph_node
*>targets
319 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
320 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
322 if (dump_enabled_p ())
324 location_t loc
= gimple_location_safe (stmt
);
325 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
326 "resolving virtual function address "
327 "reference to function %s\n",
328 targets
.length () == 1
329 ? targets
[0]->name ()
332 if (targets
.length () == 1)
334 val
= fold_convert (TREE_TYPE (val
),
335 build_fold_addr_expr_loc
336 (loc
, targets
[0]->decl
));
337 STRIP_USELESS_TYPE_CONVERSION (val
);
340 /* We can not use __builtin_unreachable here because it
341 can not have address taken. */
342 val
= build_int_cst (TREE_TYPE (val
), 0);
348 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
350 tree ref
= TREE_OPERAND (rhs
, 0);
351 tree tem
= maybe_fold_reference (ref
, true);
353 && TREE_CODE (tem
) == MEM_REF
354 && integer_zerop (TREE_OPERAND (tem
, 1)))
355 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
357 result
= fold_convert (TREE_TYPE (rhs
),
358 build_fold_addr_expr_loc (loc
, tem
));
359 else if (TREE_CODE (ref
) == MEM_REF
360 && integer_zerop (TREE_OPERAND (ref
, 1)))
361 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
364 else if (TREE_CODE (rhs
) == CONSTRUCTOR
365 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
366 && (CONSTRUCTOR_NELTS (rhs
)
367 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
369 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
373 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
374 if (TREE_CODE (val
) != INTEGER_CST
375 && TREE_CODE (val
) != REAL_CST
376 && TREE_CODE (val
) != FIXED_CST
)
379 return build_vector_from_ctor (TREE_TYPE (rhs
),
380 CONSTRUCTOR_ELTS (rhs
));
383 else if (DECL_P (rhs
))
384 return get_symbol_constant_value (rhs
);
386 /* If we couldn't fold the RHS, hand over to the generic
388 if (result
== NULL_TREE
)
391 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
392 that may have been added by fold, and "useless" type
393 conversions that might now be apparent due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result
);
396 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
403 case GIMPLE_UNARY_RHS
:
405 tree rhs
= gimple_assign_rhs1 (stmt
);
407 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
410 /* If the operation was a conversion do _not_ mark a
411 resulting constant with TREE_OVERFLOW if the original
412 constant was not. These conversions have implementation
413 defined behavior and retaining the TREE_OVERFLOW flag
414 here would confuse later passes such as VRP. */
415 if (CONVERT_EXPR_CODE_P (subcode
)
416 && TREE_CODE (result
) == INTEGER_CST
417 && TREE_CODE (rhs
) == INTEGER_CST
)
418 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
420 STRIP_USELESS_TYPE_CONVERSION (result
);
421 if (valid_gimple_rhs_p (result
))
427 case GIMPLE_BINARY_RHS
:
428 /* Try to canonicalize for boolean-typed X the comparisons
429 X == 0, X == 1, X != 0, and X != 1. */
430 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
431 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
433 tree lhs
= gimple_assign_lhs (stmt
);
434 tree op1
= gimple_assign_rhs1 (stmt
);
435 tree op2
= gimple_assign_rhs2 (stmt
);
436 tree type
= TREE_TYPE (op1
);
438 /* Check whether the comparison operands are of the same boolean
439 type as the result type is.
440 Check that second operand is an integer-constant with value
442 if (TREE_CODE (op2
) == INTEGER_CST
443 && (integer_zerop (op2
) || integer_onep (op2
))
444 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
446 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
447 bool is_logical_not
= false;
449 /* X == 0 and X != 1 is a logical-not.of X
450 X == 1 and X != 0 is X */
451 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
452 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
453 is_logical_not
= true;
455 if (is_logical_not
== false)
457 /* Only for one-bit precision typed X the transformation
458 !X -> ~X is valied. */
459 else if (TYPE_PRECISION (type
) == 1)
460 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
462 /* Otherwise we use !X -> X ^ 1. */
464 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
465 type
, op1
, build_int_cst (type
, 1));
471 result
= fold_binary_loc (loc
, subcode
,
472 TREE_TYPE (gimple_assign_lhs (stmt
)),
473 gimple_assign_rhs1 (stmt
),
474 gimple_assign_rhs2 (stmt
));
478 STRIP_USELESS_TYPE_CONVERSION (result
);
479 if (valid_gimple_rhs_p (result
))
484 case GIMPLE_TERNARY_RHS
:
485 /* Try to fold a conditional expression. */
486 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
488 tree op0
= gimple_assign_rhs1 (stmt
);
491 location_t cond_loc
= gimple_location (stmt
);
493 if (COMPARISON_CLASS_P (op0
))
495 fold_defer_overflow_warnings ();
496 tem
= fold_binary_loc (cond_loc
,
497 TREE_CODE (op0
), TREE_TYPE (op0
),
498 TREE_OPERAND (op0
, 0),
499 TREE_OPERAND (op0
, 1));
500 /* This is actually a conditional expression, not a GIMPLE
501 conditional statement, however, the valid_gimple_rhs_p
502 test still applies. */
503 set
= (tem
&& is_gimple_condexpr (tem
)
504 && valid_gimple_rhs_p (tem
));
505 fold_undefer_overflow_warnings (set
, stmt
, 0);
507 else if (is_gimple_min_invariant (op0
))
516 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
517 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
518 gimple_assign_rhs2 (stmt
),
519 gimple_assign_rhs3 (stmt
));
523 result
= fold_ternary_loc (loc
, subcode
,
524 TREE_TYPE (gimple_assign_lhs (stmt
)),
525 gimple_assign_rhs1 (stmt
),
526 gimple_assign_rhs2 (stmt
),
527 gimple_assign_rhs3 (stmt
));
531 STRIP_USELESS_TYPE_CONVERSION (result
);
532 if (valid_gimple_rhs_p (result
))
537 case GIMPLE_INVALID_RHS
:
544 /* Attempt to fold a conditional statement. Return true if any changes were
545 made. We only attempt to fold the condition expression, and do not perform
546 any transformation that would require alteration of the cfg. It is
547 assumed that the operands have been previously folded. */
550 fold_gimple_cond (gimple stmt
)
552 tree result
= fold_binary_loc (gimple_location (stmt
),
553 gimple_cond_code (stmt
),
555 gimple_cond_lhs (stmt
),
556 gimple_cond_rhs (stmt
));
560 STRIP_USELESS_TYPE_CONVERSION (result
);
561 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
563 gimple_cond_set_condition_from_tree (stmt
, result
);
572 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
573 adjusting the replacement stmts location and virtual operands.
574 If the statement has a lhs the last stmt in the sequence is expected
575 to assign to that lhs. */
578 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
580 gimple stmt
= gsi_stmt (*si_p
);
582 if (gimple_has_location (stmt
))
583 annotate_all_with_location (stmts
, gimple_location (stmt
));
585 /* First iterate over the replacement statements backward, assigning
586 virtual operands to their defining statements. */
587 gimple laststore
= NULL
;
588 for (gimple_stmt_iterator i
= gsi_last (stmts
);
589 !gsi_end_p (i
); gsi_prev (&i
))
591 gimple new_stmt
= gsi_stmt (i
);
592 if ((gimple_assign_single_p (new_stmt
)
593 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
594 || (is_gimple_call (new_stmt
)
595 && (gimple_call_flags (new_stmt
)
596 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
600 vdef
= gimple_vdef (stmt
);
602 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
603 gimple_set_vdef (new_stmt
, vdef
);
604 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
605 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
606 laststore
= new_stmt
;
610 /* Second iterate over the statements forward, assigning virtual
611 operands to their uses. */
612 tree reaching_vuse
= gimple_vuse (stmt
);
613 for (gimple_stmt_iterator i
= gsi_start (stmts
);
614 !gsi_end_p (i
); gsi_next (&i
))
616 gimple new_stmt
= gsi_stmt (i
);
617 /* If the new statement possibly has a VUSE, update it with exact SSA
618 name we know will reach this one. */
619 if (gimple_has_mem_ops (new_stmt
))
620 gimple_set_vuse (new_stmt
, reaching_vuse
);
621 gimple_set_modified (new_stmt
, true);
622 if (gimple_vdef (new_stmt
))
623 reaching_vuse
= gimple_vdef (new_stmt
);
626 /* If the new sequence does not do a store release the virtual
627 definition of the original statement. */
629 && reaching_vuse
== gimple_vuse (stmt
))
631 tree vdef
= gimple_vdef (stmt
);
633 && TREE_CODE (vdef
) == SSA_NAME
)
635 unlink_stmt_vdef (stmt
);
636 release_ssa_name (vdef
);
640 /* Finally replace the original statement with the sequence. */
641 gsi_replace_with_seq (si_p
, stmts
, false);
644 /* Convert EXPR into a GIMPLE value suitable for substitution on the
645 RHS of an assignment. Insert the necessary statements before
646 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
647 is replaced. If the call is expected to produces a result, then it
648 is replaced by an assignment of the new RHS to the result variable.
649 If the result is to be ignored, then the call is replaced by a
650 GIMPLE_NOP. A proper VDEF chain is retained by making the first
651 VUSE and the last VDEF of the whole sequence be the same as the replaced
652 statement and using new SSA names for stores in between. */
655 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
658 gimple stmt
, new_stmt
;
659 gimple_stmt_iterator i
;
660 gimple_seq stmts
= NULL
;
662 stmt
= gsi_stmt (*si_p
);
664 gcc_assert (is_gimple_call (stmt
));
666 push_gimplify_context (gimple_in_ssa_p (cfun
));
668 lhs
= gimple_call_lhs (stmt
);
669 if (lhs
== NULL_TREE
)
671 gimplify_and_add (expr
, &stmts
);
672 /* We can end up with folding a memcpy of an empty class assignment
673 which gets optimized away by C++ gimplification. */
674 if (gimple_seq_empty_p (stmts
))
676 pop_gimplify_context (NULL
);
677 if (gimple_in_ssa_p (cfun
))
679 unlink_stmt_vdef (stmt
);
682 gsi_replace (si_p
, gimple_build_nop (), true);
688 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
689 new_stmt
= gimple_build_assign (lhs
, tmp
);
690 i
= gsi_last (stmts
);
691 gsi_insert_after_without_update (&i
, new_stmt
,
692 GSI_CONTINUE_LINKING
);
695 pop_gimplify_context (NULL
);
697 gsi_replace_with_seq_vops (si_p
, stmts
);
701 /* Replace the call at *GSI with the gimple value VAL. */
704 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
706 gimple stmt
= gsi_stmt (*gsi
);
707 tree lhs
= gimple_call_lhs (stmt
);
711 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
712 val
= fold_convert (TREE_TYPE (lhs
), val
);
713 repl
= gimple_build_assign (lhs
, val
);
716 repl
= gimple_build_nop ();
717 tree vdef
= gimple_vdef (stmt
);
718 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
720 unlink_stmt_vdef (stmt
);
721 release_ssa_name (vdef
);
723 gsi_replace (gsi
, repl
, true);
726 /* Replace the call at *GSI with the new call REPL and fold that
730 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
732 gimple stmt
= gsi_stmt (*gsi
);
733 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
734 gimple_set_location (repl
, gimple_location (stmt
));
735 if (gimple_vdef (stmt
)
736 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
738 gimple_set_vdef (repl
, gimple_vdef (stmt
));
739 gimple_set_vuse (repl
, gimple_vuse (stmt
));
740 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
742 gsi_replace (gsi
, repl
, true);
746 /* Return true if VAR is a VAR_DECL or a component thereof. */
749 var_decl_component_p (tree var
)
752 while (handled_component_p (inner
))
753 inner
= TREE_OPERAND (inner
, 0);
754 return SSA_VAR_P (inner
);
757 /* Fold function call to builtin mem{{,p}cpy,move}. Return
758 NULL_TREE if no simplification can be made.
759 If ENDP is 0, return DEST (like memcpy).
760 If ENDP is 1, return DEST+LEN (like mempcpy).
761 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
762 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
766 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
767 tree dest
, tree src
, int endp
)
769 gimple stmt
= gsi_stmt (*gsi
);
770 tree lhs
= gimple_call_lhs (stmt
);
771 tree len
= gimple_call_arg (stmt
, 2);
772 tree destvar
, srcvar
;
773 location_t loc
= gimple_location (stmt
);
775 /* If the LEN parameter is zero, return DEST. */
776 if (integer_zerop (len
))
779 if (gimple_call_lhs (stmt
))
780 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
782 repl
= gimple_build_nop ();
783 tree vdef
= gimple_vdef (stmt
);
784 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
786 unlink_stmt_vdef (stmt
);
787 release_ssa_name (vdef
);
789 gsi_replace (gsi
, repl
, true);
793 /* If SRC and DEST are the same (and not volatile), return
794 DEST{,+LEN,+LEN-1}. */
795 if (operand_equal_p (src
, dest
, 0))
797 unlink_stmt_vdef (stmt
);
798 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
799 release_ssa_name (gimple_vdef (stmt
));
802 gsi_replace (gsi
, gimple_build_nop (), true);
809 tree srctype
, desttype
;
810 unsigned int src_align
, dest_align
;
813 /* Build accesses at offset zero with a ref-all character type. */
814 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
817 /* If we can perform the copy efficiently with first doing all loads
818 and then all stores inline it that way. Currently efficiently
819 means that we can load all the memory into a single integer
820 register which is what MOVE_MAX gives us. */
821 src_align
= get_pointer_alignment (src
);
822 dest_align
= get_pointer_alignment (dest
);
823 if (tree_fits_uhwi_p (len
)
824 && compare_tree_int (len
, MOVE_MAX
) <= 0
825 /* ??? Don't transform copies from strings with known length this
826 confuses the tree-ssa-strlen.c. This doesn't handle
827 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
829 && !c_strlen (src
, 2))
831 unsigned ilen
= tree_to_uhwi (len
);
832 if (exact_log2 (ilen
) != -1)
834 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
836 && TYPE_MODE (type
) != BLKmode
837 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
839 /* If the destination pointer is not aligned we must be able
840 to emit an unaligned store. */
841 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
842 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
845 tree desttype
= type
;
846 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
847 srctype
= build_aligned_type (type
, src_align
);
848 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
849 tree tem
= fold_const_aggregate_ref (srcmem
);
852 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
853 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
859 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
861 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
862 if (gimple_in_ssa_p (cfun
))
863 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
866 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
),
868 gimple_assign_set_lhs (new_stmt
, srcmem
);
869 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
870 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
872 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
873 desttype
= build_aligned_type (type
, dest_align
);
875 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
878 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
879 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
880 if (gimple_vdef (new_stmt
)
881 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
882 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
885 gsi_replace (gsi
, new_stmt
, true);
888 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
897 /* Both DEST and SRC must be pointer types.
898 ??? This is what old code did. Is the testing for pointer types
901 If either SRC is readonly or length is 1, we can use memcpy. */
902 if (!dest_align
|| !src_align
)
904 if (readonly_data_expr (src
)
905 || (tree_fits_uhwi_p (len
)
906 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
907 >= tree_to_uhwi (len
))))
909 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
912 gimple_call_set_fndecl (stmt
, fn
);
913 gimple_call_set_arg (stmt
, 0, dest
);
914 gimple_call_set_arg (stmt
, 1, src
);
919 /* If *src and *dest can't overlap, optimize into memcpy as well. */
920 if (TREE_CODE (src
) == ADDR_EXPR
921 && TREE_CODE (dest
) == ADDR_EXPR
)
923 tree src_base
, dest_base
, fn
;
924 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
925 HOST_WIDE_INT size
= -1;
926 HOST_WIDE_INT maxsize
= -1;
928 srcvar
= TREE_OPERAND (src
, 0);
929 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
931 destvar
= TREE_OPERAND (dest
, 0);
932 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
934 if (tree_fits_uhwi_p (len
))
935 maxsize
= tree_to_uhwi (len
);
938 src_offset
/= BITS_PER_UNIT
;
939 dest_offset
/= BITS_PER_UNIT
;
940 if (SSA_VAR_P (src_base
)
941 && SSA_VAR_P (dest_base
))
943 if (operand_equal_p (src_base
, dest_base
, 0)
944 && ranges_overlap_p (src_offset
, maxsize
,
945 dest_offset
, maxsize
))
948 else if (TREE_CODE (src_base
) == MEM_REF
949 && TREE_CODE (dest_base
) == MEM_REF
)
951 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
952 TREE_OPERAND (dest_base
, 0), 0))
954 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
955 if (!wi::fits_shwi_p (off
))
957 src_offset
= off
.to_shwi ();
959 off
= mem_ref_offset (dest_base
) + dest_offset
;
960 if (!wi::fits_shwi_p (off
))
962 dest_offset
= off
.to_shwi ();
963 if (ranges_overlap_p (src_offset
, maxsize
,
964 dest_offset
, maxsize
))
970 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
973 gimple_call_set_fndecl (stmt
, fn
);
974 gimple_call_set_arg (stmt
, 0, dest
);
975 gimple_call_set_arg (stmt
, 1, src
);
980 /* If the destination and source do not alias optimize into
982 if ((is_gimple_min_invariant (dest
)
983 || TREE_CODE (dest
) == SSA_NAME
)
984 && (is_gimple_min_invariant (src
)
985 || TREE_CODE (src
) == SSA_NAME
))
988 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
989 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
990 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
993 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
996 gimple_call_set_fndecl (stmt
, fn
);
997 gimple_call_set_arg (stmt
, 0, dest
);
998 gimple_call_set_arg (stmt
, 1, src
);
1007 if (!tree_fits_shwi_p (len
))
1010 This logic lose for arguments like (type *)malloc (sizeof (type)),
1011 since we strip the casts of up to VOID return value from malloc.
1012 Perhaps we ought to inherit type from non-VOID argument here? */
1015 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1016 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1018 /* In the following try to find a type that is most natural to be
1019 used for the memcpy source and destination and that allows
1020 the most optimization when memcpy is turned into a plain assignment
1021 using that type. In theory we could always use a char[len] type
1022 but that only gains us that the destination and source possibly
1023 no longer will have their address taken. */
1024 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1025 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1027 tree tem
= TREE_OPERAND (src
, 0);
1029 if (tem
!= TREE_OPERAND (src
, 0))
1030 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1032 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1034 tree tem
= TREE_OPERAND (dest
, 0);
1036 if (tem
!= TREE_OPERAND (dest
, 0))
1037 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1039 srctype
= TREE_TYPE (TREE_TYPE (src
));
1040 if (TREE_CODE (srctype
) == ARRAY_TYPE
1041 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1043 srctype
= TREE_TYPE (srctype
);
1045 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1047 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1048 if (TREE_CODE (desttype
) == ARRAY_TYPE
1049 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1051 desttype
= TREE_TYPE (desttype
);
1053 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1055 if (TREE_ADDRESSABLE (srctype
)
1056 || TREE_ADDRESSABLE (desttype
))
1059 /* Make sure we are not copying using a floating-point mode or
1060 a type whose size possibly does not match its precision. */
1061 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1062 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1063 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1064 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1065 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1066 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1067 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1068 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1076 src_align
= get_pointer_alignment (src
);
1077 dest_align
= get_pointer_alignment (dest
);
1078 if (dest_align
< TYPE_ALIGN (desttype
)
1079 || src_align
< TYPE_ALIGN (srctype
))
1083 STRIP_NOPS (destvar
);
1084 if (TREE_CODE (destvar
) == ADDR_EXPR
1085 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1086 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1087 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1089 destvar
= NULL_TREE
;
1092 STRIP_NOPS (srcvar
);
1093 if (TREE_CODE (srcvar
) == ADDR_EXPR
1094 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1095 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1098 || src_align
>= TYPE_ALIGN (desttype
))
1099 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1101 else if (!STRICT_ALIGNMENT
)
1103 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1105 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1113 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1116 if (srcvar
== NULL_TREE
)
1119 if (src_align
>= TYPE_ALIGN (desttype
))
1120 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1123 if (STRICT_ALIGNMENT
)
1125 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1127 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1130 else if (destvar
== NULL_TREE
)
1133 if (dest_align
>= TYPE_ALIGN (srctype
))
1134 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1137 if (STRICT_ALIGNMENT
)
1139 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1141 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1146 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1148 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1149 if (gimple_in_ssa_p (cfun
))
1150 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1152 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
), NULL
);
1153 gimple_assign_set_lhs (new_stmt
, srcvar
);
1154 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1155 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1157 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1158 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1159 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1160 if (gimple_vdef (new_stmt
)
1161 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1162 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1165 gsi_replace (gsi
, new_stmt
, true);
1168 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1172 if (endp
== 0 || endp
== 3)
1175 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1177 if (endp
== 2 || endp
== 1)
1178 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1180 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1182 gimple repl
= gimple_build_assign (lhs
, dest
);
1183 gsi_replace (gsi
, repl
, true);
1187 /* Fold function call to builtin memset or bzero at *GSI setting the
1188 memory of size LEN to VAL. Return whether a simplification was made. */
1191 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1193 gimple stmt
= gsi_stmt (*gsi
);
1195 unsigned HOST_WIDE_INT length
, cval
;
1197 /* If the LEN parameter is zero, return DEST. */
1198 if (integer_zerop (len
))
1200 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1204 if (! tree_fits_uhwi_p (len
))
1207 if (TREE_CODE (c
) != INTEGER_CST
)
1210 tree dest
= gimple_call_arg (stmt
, 0);
1212 if (TREE_CODE (var
) != ADDR_EXPR
)
1215 var
= TREE_OPERAND (var
, 0);
1216 if (TREE_THIS_VOLATILE (var
))
1219 etype
= TREE_TYPE (var
);
1220 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1221 etype
= TREE_TYPE (etype
);
1223 if (!INTEGRAL_TYPE_P (etype
)
1224 && !POINTER_TYPE_P (etype
))
1227 if (! var_decl_component_p (var
))
1230 length
= tree_to_uhwi (len
);
1231 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1232 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1235 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1238 if (integer_zerop (c
))
1242 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1245 cval
= TREE_INT_CST_LOW (c
);
1249 cval
|= (cval
<< 31) << 1;
1252 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1253 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1254 gimple_set_vuse (store
, gimple_vuse (stmt
));
1255 tree vdef
= gimple_vdef (stmt
);
1256 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1258 gimple_set_vdef (store
, gimple_vdef (stmt
));
1259 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1261 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1262 if (gimple_call_lhs (stmt
))
1264 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1265 gsi_replace (gsi
, asgn
, true);
1269 gimple_stmt_iterator gsi2
= *gsi
;
1271 gsi_remove (&gsi2
, true);
1278 /* Return the string length, maximum string length or maximum value of
1280 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1281 is not NULL and, for TYPE == 0, its value is not equal to the length
1282 we determine or if we are unable to determine the length or value,
1283 return false. VISITED is a bitmap of visited variables.
1284 TYPE is 0 if string length should be returned, 1 for maximum string
1285 length and 2 for maximum value ARG can have. */
1288 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1293 if (TREE_CODE (arg
) != SSA_NAME
)
1295 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1296 if (TREE_CODE (arg
) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1298 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1300 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1301 if (TREE_CODE (aop0
) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1303 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1304 length
, visited
, type
);
1310 if (TREE_CODE (val
) != INTEGER_CST
1311 || tree_int_cst_sgn (val
) < 0)
1315 val
= c_strlen (arg
, 1);
1323 if (TREE_CODE (*length
) != INTEGER_CST
1324 || TREE_CODE (val
) != INTEGER_CST
)
1327 if (tree_int_cst_lt (*length
, val
))
1331 else if (simple_cst_equal (val
, *length
) != 1)
1339 /* If ARG is registered for SSA update we cannot look at its defining
1341 if (name_registered_for_update_p (arg
))
1344 /* If we were already here, break the infinite cycle. */
1346 *visited
= BITMAP_ALLOC (NULL
);
1347 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1351 def_stmt
= SSA_NAME_DEF_STMT (var
);
1353 switch (gimple_code (def_stmt
))
1356 /* The RHS of the statement defining VAR must either have a
1357 constant length or come from another SSA_NAME with a constant
1359 if (gimple_assign_single_p (def_stmt
)
1360 || gimple_assign_unary_nop_p (def_stmt
))
1362 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1363 return get_maxval_strlen (rhs
, length
, visited
, type
);
1365 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1367 tree op2
= gimple_assign_rhs2 (def_stmt
);
1368 tree op3
= gimple_assign_rhs3 (def_stmt
);
1369 return get_maxval_strlen (op2
, length
, visited
, type
)
1370 && get_maxval_strlen (op3
, length
, visited
, type
);
1376 /* All the arguments of the PHI node must have the same constant
1380 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1382 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1384 /* If this PHI has itself as an argument, we cannot
1385 determine the string length of this argument. However,
1386 if we can find a constant string length for the other
1387 PHI args then we can still be sure that this is a
1388 constant string length. So be optimistic and just
1389 continue with the next argument. */
1390 if (arg
== gimple_phi_result (def_stmt
))
1393 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1405 get_maxval_strlen (tree arg
, int type
)
1407 bitmap visited
= NULL
;
1408 tree len
= NULL_TREE
;
1409 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1412 BITMAP_FREE (visited
);
1418 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1419 If LEN is not NULL, it represents the length of the string to be
1420 copied. Return NULL_TREE if no simplification can be made. */
1423 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1424 tree dest
, tree src
)
1426 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1429 /* If SRC and DEST are the same (and not volatile), return DEST. */
1430 if (operand_equal_p (src
, dest
, 0))
1432 replace_call_with_value (gsi
, dest
);
1436 if (optimize_function_for_size_p (cfun
))
1439 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1443 tree len
= get_maxval_strlen (src
, 0);
1447 len
= fold_convert_loc (loc
, size_type_node
, len
);
1448 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1449 len
= force_gimple_operand_gsi (gsi
, len
, true,
1450 NULL_TREE
, true, GSI_SAME_STMT
);
1451 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1452 replace_call_with_call_and_fold (gsi
, repl
);
1456 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1457 If SLEN is not NULL, it represents the length of the source string.
1458 Return NULL_TREE if no simplification can be made. */
1461 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1462 tree dest
, tree src
, tree len
)
1464 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1467 /* If the LEN parameter is zero, return DEST. */
1468 if (integer_zerop (len
))
1470 replace_call_with_value (gsi
, dest
);
1474 /* We can't compare slen with len as constants below if len is not a
1476 if (TREE_CODE (len
) != INTEGER_CST
)
1479 /* Now, we must be passed a constant src ptr parameter. */
1480 tree slen
= get_maxval_strlen (src
, 0);
1481 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1484 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1486 /* We do not support simplification of this case, though we do
1487 support it when expanding trees into RTL. */
1488 /* FIXME: generate a call to __builtin_memset. */
1489 if (tree_int_cst_lt (slen
, len
))
1492 /* OK transform into builtin memcpy. */
1493 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1497 len
= fold_convert_loc (loc
, size_type_node
, len
);
1498 len
= force_gimple_operand_gsi (gsi
, len
, true,
1499 NULL_TREE
, true, GSI_SAME_STMT
);
1500 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1501 replace_call_with_call_and_fold (gsi
, repl
);
1505 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1508 Return NULL_TREE if no simplification was possible, otherwise return the
1509 simplified form of the call as a tree.
1511 The simplified form may be a constant or other expression which
1512 computes the same value, but in a more efficient manner (including
1513 calls to other builtin functions).
1515 The call may contain arguments which need to be evaluated, but
1516 which are not useful to determine the result of the call. In
1517 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1518 COMPOUND_EXPR will be an argument which must be evaluated.
1519 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1520 COMPOUND_EXPR in the chain will contain the tree for the simplified
1521 form of the builtin function call. */
1524 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1526 gimple stmt
= gsi_stmt (*gsi
);
1527 location_t loc
= gimple_location (stmt
);
1529 const char *p
= c_getstr (src
);
1531 /* If the string length is zero, return the dst parameter. */
1532 if (p
&& *p
== '\0')
1534 replace_call_with_value (gsi
, dst
);
1538 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1541 /* See if we can store by pieces into (dst + strlen(dst)). */
1543 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1544 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1546 if (!strlen_fn
|| !memcpy_fn
)
1549 /* If the length of the source string isn't computable don't
1550 split strcat into strlen and memcpy. */
1551 tree len
= get_maxval_strlen (src
, 0);
1555 /* Create strlen (dst). */
1556 gimple_seq stmts
= NULL
, stmts2
;
1557 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1558 gimple_set_location (repl
, loc
);
1559 if (gimple_in_ssa_p (cfun
))
1560 newdst
= make_ssa_name (size_type_node
, NULL
);
1562 newdst
= create_tmp_reg (size_type_node
, NULL
);
1563 gimple_call_set_lhs (repl
, newdst
);
1564 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1566 /* Create (dst p+ strlen (dst)). */
1567 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1568 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1569 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1571 len
= fold_convert_loc (loc
, size_type_node
, len
);
1572 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1573 build_int_cst (size_type_node
, 1));
1574 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1575 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1577 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1578 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1579 if (gimple_call_lhs (stmt
))
1581 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1582 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1583 gsi_replace_with_seq_vops (gsi
, stmts
);
1584 /* gsi now points at the assignment to the lhs, get a
1585 stmt iterator to the memcpy call.
1586 ??? We can't use gsi_for_stmt as that doesn't work when the
1587 CFG isn't built yet. */
1588 gimple_stmt_iterator gsi2
= *gsi
;
1594 gsi_replace_with_seq_vops (gsi
, stmts
);
1600 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1601 are the arguments to the call. */
1604 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1606 gimple stmt
= gsi_stmt (*gsi
);
1607 tree dest
= gimple_call_arg (stmt
, 0);
1608 tree src
= gimple_call_arg (stmt
, 1);
1609 tree size
= gimple_call_arg (stmt
, 2);
1615 /* If the SRC parameter is "", return DEST. */
1616 if (p
&& *p
== '\0')
1618 replace_call_with_value (gsi
, dest
);
1622 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1625 /* If __builtin_strcat_chk is used, assume strcat is available. */
1626 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1630 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1631 replace_call_with_call_and_fold (gsi
, repl
);
1635 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1636 to the call. IGNORE is true if the value returned
1637 by the builtin will be ignored. UNLOCKED is true is true if this
1638 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1639 the known length of the string. Return NULL_TREE if no simplification
1643 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1644 tree arg0
, tree arg1
,
1647 gimple stmt
= gsi_stmt (*gsi
);
1649 /* If we're using an unlocked function, assume the other unlocked
1650 functions exist explicitly. */
1651 tree
const fn_fputc
= (unlocked
1652 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1653 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1654 tree
const fn_fwrite
= (unlocked
1655 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1656 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1658 /* If the return value is used, don't do the transformation. */
1659 if (gimple_call_lhs (stmt
))
1662 /* Get the length of the string passed to fputs. If the length
1663 can't be determined, punt. */
1664 tree len
= get_maxval_strlen (arg0
, 0);
1666 || TREE_CODE (len
) != INTEGER_CST
)
1669 switch (compare_tree_int (len
, 1))
1671 case -1: /* length is 0, delete the call entirely . */
1672 replace_call_with_value (gsi
, integer_zero_node
);
1675 case 0: /* length is 1, call fputc. */
1677 const char *p
= c_getstr (arg0
);
1683 gimple repl
= gimple_build_call (fn_fputc
, 2,
1685 (integer_type_node
, p
[0]), arg1
);
1686 replace_call_with_call_and_fold (gsi
, repl
);
1691 case 1: /* length is greater than 1, call fwrite. */
1693 /* If optimizing for size keep fputs. */
1694 if (optimize_function_for_size_p (cfun
))
1696 /* New argument list transforming fputs(string, stream) to
1697 fwrite(string, 1, len, stream). */
1701 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1702 size_one_node
, len
, arg1
);
1703 replace_call_with_call_and_fold (gsi
, repl
);
1712 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1713 DEST, SRC, LEN, and SIZE are the arguments to the call.
1714 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1715 code of the builtin. If MAXLEN is not NULL, it is maximum length
1716 passed as third argument. */
1719 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1720 tree dest
, tree src
, tree len
, tree size
,
1721 enum built_in_function fcode
)
1723 gimple stmt
= gsi_stmt (*gsi
);
1724 location_t loc
= gimple_location (stmt
);
1725 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1728 /* If SRC and DEST are the same (and not volatile), return DEST
1729 (resp. DEST+LEN for __mempcpy_chk). */
1730 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1732 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1734 replace_call_with_value (gsi
, dest
);
1739 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1740 temp
= force_gimple_operand_gsi (gsi
, temp
,
1741 false, NULL_TREE
, true,
1743 replace_call_with_value (gsi
, temp
);
1748 if (! tree_fits_uhwi_p (size
))
1751 tree maxlen
= get_maxval_strlen (len
, 2);
1752 if (! integer_all_onesp (size
))
1754 if (! tree_fits_uhwi_p (len
))
1756 /* If LEN is not constant, try MAXLEN too.
1757 For MAXLEN only allow optimizing into non-_ocs function
1758 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1759 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1761 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1763 /* (void) __mempcpy_chk () can be optimized into
1764 (void) __memcpy_chk (). */
1765 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1769 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1770 replace_call_with_call_and_fold (gsi
, repl
);
1779 if (tree_int_cst_lt (size
, maxlen
))
1784 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1785 mem{cpy,pcpy,move,set} is available. */
1788 case BUILT_IN_MEMCPY_CHK
:
1789 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1791 case BUILT_IN_MEMPCPY_CHK
:
1792 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1794 case BUILT_IN_MEMMOVE_CHK
:
1795 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1797 case BUILT_IN_MEMSET_CHK
:
1798 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1807 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1808 replace_call_with_call_and_fold (gsi
, repl
);
1812 /* Fold a call to the __st[rp]cpy_chk builtin.
1813 DEST, SRC, and SIZE are the arguments to the call.
1814 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1815 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1816 strings passed as second argument. */
1819 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1821 tree src
, tree size
,
1822 enum built_in_function fcode
)
1824 gimple stmt
= gsi_stmt (*gsi
);
1825 location_t loc
= gimple_location (stmt
);
1826 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1829 /* If SRC and DEST are the same (and not volatile), return DEST. */
1830 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1832 replace_call_with_value (gsi
, dest
);
1836 if (! tree_fits_uhwi_p (size
))
1839 tree maxlen
= get_maxval_strlen (src
, 1);
1840 if (! integer_all_onesp (size
))
1842 len
= c_strlen (src
, 1);
1843 if (! len
|| ! tree_fits_uhwi_p (len
))
1845 /* If LEN is not constant, try MAXLEN too.
1846 For MAXLEN only allow optimizing into non-_ocs function
1847 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1848 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1850 if (fcode
== BUILT_IN_STPCPY_CHK
)
1855 /* If return value of __stpcpy_chk is ignored,
1856 optimize into __strcpy_chk. */
1857 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1861 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1862 replace_call_with_call_and_fold (gsi
, repl
);
1866 if (! len
|| TREE_SIDE_EFFECTS (len
))
1869 /* If c_strlen returned something, but not a constant,
1870 transform __strcpy_chk into __memcpy_chk. */
1871 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1875 len
= fold_convert_loc (loc
, size_type_node
, len
);
1876 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1877 build_int_cst (size_type_node
, 1));
1878 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1879 true, GSI_SAME_STMT
);
1880 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1881 replace_call_with_call_and_fold (gsi
, repl
);
1888 if (! tree_int_cst_lt (maxlen
, size
))
1892 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1893 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1894 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1898 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1899 replace_call_with_call_and_fold (gsi
, repl
);
1903 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1904 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1905 length passed as third argument. IGNORE is true if return value can be
1906 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1909 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1910 tree dest
, tree src
,
1911 tree len
, tree size
,
1912 enum built_in_function fcode
)
1914 gimple stmt
= gsi_stmt (*gsi
);
1915 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1918 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1920 /* If return value of __stpncpy_chk is ignored,
1921 optimize into __strncpy_chk. */
1922 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1925 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1926 replace_call_with_call_and_fold (gsi
, repl
);
1931 if (! tree_fits_uhwi_p (size
))
1934 tree maxlen
= get_maxval_strlen (len
, 2);
1935 if (! integer_all_onesp (size
))
1937 if (! tree_fits_uhwi_p (len
))
1939 /* If LEN is not constant, try MAXLEN too.
1940 For MAXLEN only allow optimizing into non-_ocs function
1941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1942 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1948 if (tree_int_cst_lt (size
, maxlen
))
1952 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1953 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
1954 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
1958 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1959 replace_call_with_call_and_fold (gsi
, repl
);
1963 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1964 NULL_TREE if a normal call should be emitted rather than expanding
1965 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1966 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1967 passed as second argument. */
1970 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
1971 enum built_in_function fcode
)
1973 gimple stmt
= gsi_stmt (*gsi
);
1974 tree dest
, size
, len
, fn
, fmt
, flag
;
1975 const char *fmt_str
;
1977 /* Verify the required arguments in the original call. */
1978 if (gimple_call_num_args (stmt
) < 5)
1981 dest
= gimple_call_arg (stmt
, 0);
1982 len
= gimple_call_arg (stmt
, 1);
1983 flag
= gimple_call_arg (stmt
, 2);
1984 size
= gimple_call_arg (stmt
, 3);
1985 fmt
= gimple_call_arg (stmt
, 4);
1987 if (! tree_fits_uhwi_p (size
))
1990 if (! integer_all_onesp (size
))
1992 tree maxlen
= get_maxval_strlen (len
, 2);
1993 if (! tree_fits_uhwi_p (len
))
1995 /* If LEN is not constant, try MAXLEN too.
1996 For MAXLEN only allow optimizing into non-_ocs function
1997 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1998 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2004 if (tree_int_cst_lt (size
, maxlen
))
2008 if (!init_target_chars ())
2011 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2012 or if format doesn't contain % chars or is "%s". */
2013 if (! integer_zerop (flag
))
2015 fmt_str
= c_getstr (fmt
);
2016 if (fmt_str
== NULL
)
2018 if (strchr (fmt_str
, target_percent
) != NULL
2019 && strcmp (fmt_str
, target_percent_s
))
2023 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2025 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2026 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2030 /* Replace the called function and the first 5 argument by 3 retaining
2031 trailing varargs. */
2032 gimple_call_set_fndecl (stmt
, fn
);
2033 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2034 gimple_call_set_arg (stmt
, 0, dest
);
2035 gimple_call_set_arg (stmt
, 1, len
);
2036 gimple_call_set_arg (stmt
, 2, fmt
);
2037 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2038 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2039 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2044 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2045 Return NULL_TREE if a normal call should be emitted rather than
2046 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2047 or BUILT_IN_VSPRINTF_CHK. */
2050 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2051 enum built_in_function fcode
)
2053 gimple stmt
= gsi_stmt (*gsi
);
2054 tree dest
, size
, len
, fn
, fmt
, flag
;
2055 const char *fmt_str
;
2056 unsigned nargs
= gimple_call_num_args (stmt
);
2058 /* Verify the required arguments in the original call. */
2061 dest
= gimple_call_arg (stmt
, 0);
2062 flag
= gimple_call_arg (stmt
, 1);
2063 size
= gimple_call_arg (stmt
, 2);
2064 fmt
= gimple_call_arg (stmt
, 3);
2066 if (! tree_fits_uhwi_p (size
))
2071 if (!init_target_chars ())
2074 /* Check whether the format is a literal string constant. */
2075 fmt_str
= c_getstr (fmt
);
2076 if (fmt_str
!= NULL
)
2078 /* If the format doesn't contain % args or %%, we know the size. */
2079 if (strchr (fmt_str
, target_percent
) == 0)
2081 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2082 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2084 /* If the format is "%s" and first ... argument is a string literal,
2085 we know the size too. */
2086 else if (fcode
== BUILT_IN_SPRINTF_CHK
2087 && strcmp (fmt_str
, target_percent_s
) == 0)
2093 arg
= gimple_call_arg (stmt
, 4);
2094 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2096 len
= c_strlen (arg
, 1);
2097 if (! len
|| ! tree_fits_uhwi_p (len
))
2104 if (! integer_all_onesp (size
))
2106 if (! len
|| ! tree_int_cst_lt (len
, size
))
2110 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2111 or if format doesn't contain % chars or is "%s". */
2112 if (! integer_zerop (flag
))
2114 if (fmt_str
== NULL
)
2116 if (strchr (fmt_str
, target_percent
) != NULL
2117 && strcmp (fmt_str
, target_percent_s
))
2121 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2122 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2123 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2127 /* Replace the called function and the first 4 argument by 2 retaining
2128 trailing varargs. */
2129 gimple_call_set_fndecl (stmt
, fn
);
2130 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2131 gimple_call_set_arg (stmt
, 0, dest
);
2132 gimple_call_set_arg (stmt
, 1, fmt
);
2133 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2134 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2135 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2140 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2141 ORIG may be null if this is a 2-argument call. We don't attempt to
2142 simplify calls with more than 3 arguments.
2144 Return NULL_TREE if no simplification was possible, otherwise return the
2145 simplified form of the call as a tree. If IGNORED is true, it means that
2146 the caller does not use the returned value of the function. */
2149 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2151 gimple stmt
= gsi_stmt (*gsi
);
2152 tree dest
= gimple_call_arg (stmt
, 0);
2153 tree fmt
= gimple_call_arg (stmt
, 1);
2154 tree orig
= NULL_TREE
;
2155 const char *fmt_str
= NULL
;
2157 /* Verify the required arguments in the original call. We deal with two
2158 types of sprintf() calls: 'sprintf (str, fmt)' and
2159 'sprintf (dest, "%s", orig)'. */
2160 if (gimple_call_num_args (stmt
) > 3)
2163 if (gimple_call_num_args (stmt
) == 3)
2164 orig
= gimple_call_arg (stmt
, 2);
2166 /* Check whether the format is a literal string constant. */
2167 fmt_str
= c_getstr (fmt
);
2168 if (fmt_str
== NULL
)
2171 if (!init_target_chars ())
2174 /* If the format doesn't contain % args or %%, use strcpy. */
2175 if (strchr (fmt_str
, target_percent
) == NULL
)
2177 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2182 /* Don't optimize sprintf (buf, "abc", ptr++). */
2186 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2187 'format' is known to contain no % formats. */
2188 gimple_seq stmts
= NULL
;
2189 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2190 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2191 if (gimple_call_lhs (stmt
))
2193 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2194 build_int_cst (integer_type_node
,
2196 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2197 gsi_replace_with_seq_vops (gsi
, stmts
);
2198 /* gsi now points at the assignment to the lhs, get a
2199 stmt iterator to the memcpy call.
2200 ??? We can't use gsi_for_stmt as that doesn't work when the
2201 CFG isn't built yet. */
2202 gimple_stmt_iterator gsi2
= *gsi
;
2208 gsi_replace_with_seq_vops (gsi
, stmts
);
2214 /* If the format is "%s", use strcpy if the result isn't used. */
2215 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2218 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2223 /* Don't crash on sprintf (str1, "%s"). */
2227 tree orig_len
= NULL_TREE
;
2228 if (gimple_call_lhs (stmt
))
2230 orig_len
= get_maxval_strlen (orig
, 0);
2235 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2236 gimple_seq stmts
= NULL
;
2237 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2238 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2239 if (gimple_call_lhs (stmt
))
2241 if (!useless_type_conversion_p (integer_type_node
,
2242 TREE_TYPE (orig_len
)))
2243 orig_len
= fold_convert (integer_type_node
, orig_len
);
2244 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2245 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2246 gsi_replace_with_seq_vops (gsi
, stmts
);
2247 /* gsi now points at the assignment to the lhs, get a
2248 stmt iterator to the memcpy call.
2249 ??? We can't use gsi_for_stmt as that doesn't work when the
2250 CFG isn't built yet. */
2251 gimple_stmt_iterator gsi2
= *gsi
;
2257 gsi_replace_with_seq_vops (gsi
, stmts
);
2265 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2266 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2267 attempt to simplify calls with more than 4 arguments.
2269 Return NULL_TREE if no simplification was possible, otherwise return the
2270 simplified form of the call as a tree. If IGNORED is true, it means that
2271 the caller does not use the returned value of the function. */
2274 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2276 gimple stmt
= gsi_stmt (*gsi
);
2277 tree dest
= gimple_call_arg (stmt
, 0);
2278 tree destsize
= gimple_call_arg (stmt
, 1);
2279 tree fmt
= gimple_call_arg (stmt
, 2);
2280 tree orig
= NULL_TREE
;
2281 const char *fmt_str
= NULL
;
2283 if (gimple_call_num_args (stmt
) > 4)
2286 if (gimple_call_num_args (stmt
) == 4)
2287 orig
= gimple_call_arg (stmt
, 3);
2289 if (!tree_fits_uhwi_p (destsize
))
2291 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2293 /* Check whether the format is a literal string constant. */
2294 fmt_str
= c_getstr (fmt
);
2295 if (fmt_str
== NULL
)
2298 if (!init_target_chars ())
2301 /* If the format doesn't contain % args or %%, use strcpy. */
2302 if (strchr (fmt_str
, target_percent
) == NULL
)
2304 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2308 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2312 /* We could expand this as
2313 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2315 memcpy (str, fmt_with_nul_at_cstm1, cst);
2316 but in the former case that might increase code size
2317 and in the latter case grow .rodata section too much.
2319 size_t len
= strlen (fmt_str
);
2323 gimple_seq stmts
= NULL
;
2324 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2325 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2326 if (gimple_call_lhs (stmt
))
2328 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2329 build_int_cst (integer_type_node
, len
));
2330 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2331 gsi_replace_with_seq_vops (gsi
, stmts
);
2332 /* gsi now points at the assignment to the lhs, get a
2333 stmt iterator to the memcpy call.
2334 ??? We can't use gsi_for_stmt as that doesn't work when the
2335 CFG isn't built yet. */
2336 gimple_stmt_iterator gsi2
= *gsi
;
2342 gsi_replace_with_seq_vops (gsi
, stmts
);
2348 /* If the format is "%s", use strcpy if the result isn't used. */
2349 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2351 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2355 /* Don't crash on snprintf (str1, cst, "%s"). */
2359 tree orig_len
= get_maxval_strlen (orig
, 0);
2363 /* We could expand this as
2364 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2366 memcpy (str1, str2_with_nul_at_cstm1, cst);
2367 but in the former case that might increase code size
2368 and in the latter case grow .rodata section too much.
2370 if (compare_tree_int (orig_len
, destlen
) >= 0)
2373 /* Convert snprintf (str1, cst, "%s", str2) into
2374 strcpy (str1, str2) if strlen (str2) < cst. */
2375 gimple_seq stmts
= NULL
;
2376 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2377 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2378 if (gimple_call_lhs (stmt
))
2380 if (!useless_type_conversion_p (integer_type_node
,
2381 TREE_TYPE (orig_len
)))
2382 orig_len
= fold_convert (integer_type_node
, orig_len
);
2383 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2384 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2385 gsi_replace_with_seq_vops (gsi
, stmts
);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2
= *gsi
;
2396 gsi_replace_with_seq_vops (gsi
, stmts
);
2405 /* Fold a call to __builtin_strlen with known length LEN. */
2408 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2410 gimple stmt
= gsi_stmt (*gsi
);
2411 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2414 replace_call_with_value (gsi
, len
);
2419 /* Fold the non-target builtin at *GSI and return whether any simplification
2423 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2425 gimple stmt
= gsi_stmt (*gsi
);
2426 tree callee
= gimple_call_fndecl (stmt
);
2428 /* Give up for always_inline inline builtins until they are
2430 if (avoid_folding_inline_builtin (callee
))
2433 switch (DECL_FUNCTION_CODE (callee
))
2435 case BUILT_IN_BZERO
:
2436 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2437 gimple_call_arg (stmt
, 1));
2438 case BUILT_IN_MEMSET
:
2439 return gimple_fold_builtin_memset (gsi
,
2440 gimple_call_arg (stmt
, 1),
2441 gimple_call_arg (stmt
, 2));
2442 case BUILT_IN_BCOPY
:
2443 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2444 gimple_call_arg (stmt
, 0), 3);
2445 case BUILT_IN_MEMCPY
:
2446 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2447 gimple_call_arg (stmt
, 1), 0);
2448 case BUILT_IN_MEMPCPY
:
2449 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2450 gimple_call_arg (stmt
, 1), 1);
2451 case BUILT_IN_MEMMOVE
:
2452 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2453 gimple_call_arg (stmt
, 1), 3);
2454 case BUILT_IN_SPRINTF_CHK
:
2455 case BUILT_IN_VSPRINTF_CHK
:
2456 return gimple_fold_builtin_sprintf_chk (gsi
, DECL_FUNCTION_CODE (callee
));
2457 case BUILT_IN_STRCAT_CHK
:
2458 return gimple_fold_builtin_strcat_chk (gsi
);
2459 case BUILT_IN_STRLEN
:
2460 return gimple_fold_builtin_strlen (gsi
);
2461 case BUILT_IN_STRCPY
:
2462 return gimple_fold_builtin_strcpy (gsi
,
2463 gimple_call_arg (stmt
, 0),
2464 gimple_call_arg (stmt
, 1));
2465 case BUILT_IN_STRNCPY
:
2466 return gimple_fold_builtin_strncpy (gsi
,
2467 gimple_call_arg (stmt
, 0),
2468 gimple_call_arg (stmt
, 1),
2469 gimple_call_arg (stmt
, 2));
2470 case BUILT_IN_STRCAT
:
2471 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2472 gimple_call_arg (stmt
, 1));
2473 case BUILT_IN_FPUTS
:
2474 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2475 gimple_call_arg (stmt
, 1), false);
2476 case BUILT_IN_FPUTS_UNLOCKED
:
2477 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2478 gimple_call_arg (stmt
, 1), true);
2479 case BUILT_IN_MEMCPY_CHK
:
2480 case BUILT_IN_MEMPCPY_CHK
:
2481 case BUILT_IN_MEMMOVE_CHK
:
2482 case BUILT_IN_MEMSET_CHK
:
2483 return gimple_fold_builtin_memory_chk (gsi
,
2484 gimple_call_arg (stmt
, 0),
2485 gimple_call_arg (stmt
, 1),
2486 gimple_call_arg (stmt
, 2),
2487 gimple_call_arg (stmt
, 3),
2488 DECL_FUNCTION_CODE (callee
));
2489 case BUILT_IN_STRCPY_CHK
:
2490 case BUILT_IN_STPCPY_CHK
:
2491 return gimple_fold_builtin_stxcpy_chk (gsi
,
2492 gimple_call_arg (stmt
, 0),
2493 gimple_call_arg (stmt
, 1),
2494 gimple_call_arg (stmt
, 2),
2495 DECL_FUNCTION_CODE (callee
));
2496 case BUILT_IN_STRNCPY_CHK
:
2497 case BUILT_IN_STPNCPY_CHK
:
2498 return gimple_fold_builtin_stxncpy_chk (gsi
,
2499 gimple_call_arg (stmt
, 0),
2500 gimple_call_arg (stmt
, 1),
2501 gimple_call_arg (stmt
, 2),
2502 gimple_call_arg (stmt
, 3),
2503 DECL_FUNCTION_CODE (callee
));
2504 case BUILT_IN_SNPRINTF_CHK
:
2505 case BUILT_IN_VSNPRINTF_CHK
:
2506 return gimple_fold_builtin_snprintf_chk (gsi
,
2507 DECL_FUNCTION_CODE (callee
));
2508 case BUILT_IN_SNPRINTF
:
2509 return gimple_fold_builtin_snprintf (gsi
);
2510 case BUILT_IN_SPRINTF
:
2511 return gimple_fold_builtin_sprintf (gsi
);
2515 /* Try the generic builtin folder. */
2516 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2517 tree result
= fold_call_stmt (stmt
, ignore
);
2521 STRIP_NOPS (result
);
2523 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2524 if (!update_call_from_tree (gsi
, result
))
2525 gimplify_and_update_call_from_tree (gsi
, result
);
2532 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2533 The statement may be replaced by another statement, e.g., if the call
2534 simplifies to a constant value. Return true if any changes were made.
2535 It is assumed that the operands have been previously folded. */
2538 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2540 gimple stmt
= gsi_stmt (*gsi
);
2542 bool changed
= false;
2545 /* Fold *& in call arguments. */
2546 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2547 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2549 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2552 gimple_call_set_arg (stmt
, i
, tmp
);
2557 /* Check for virtual calls that became direct calls. */
2558 callee
= gimple_call_fn (stmt
);
2559 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2561 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2563 if (dump_file
&& virtual_method_call_p (callee
)
2564 && !possible_polymorphic_call_target_p
2565 (callee
, cgraph_node::get (gimple_call_addr_fndecl
2566 (OBJ_TYPE_REF_EXPR (callee
)))))
2569 "Type inheritance inconsistent devirtualization of ");
2570 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2571 fprintf (dump_file
, " to ");
2572 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2573 fprintf (dump_file
, "\n");
2576 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2579 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2582 vec
<cgraph_node
*>targets
2583 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2584 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2586 tree lhs
= gimple_call_lhs (stmt
);
2587 if (dump_enabled_p ())
2589 location_t loc
= gimple_location_safe (stmt
);
2590 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2591 "folding virtual function call to %s\n",
2592 targets
.length () == 1
2593 ? targets
[0]->name ()
2594 : "__builtin_unreachable");
2596 if (targets
.length () == 1)
2598 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
2600 /* If the call becomes noreturn, remove the lhs. */
2601 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
2603 if (TREE_CODE (lhs
) == SSA_NAME
)
2605 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2606 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2607 gimple new_stmt
= gimple_build_assign (lhs
, def
);
2608 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2610 gimple_call_set_lhs (stmt
, NULL_TREE
);
2615 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2616 gimple new_stmt
= gimple_build_call (fndecl
, 0);
2617 gimple_set_location (new_stmt
, gimple_location (stmt
));
2618 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2620 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2621 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2623 /* To satisfy condition for
2624 cgraph_update_edges_for_call_stmt_node,
2625 we need to preserve GIMPLE_CALL statement
2626 at position of GSI iterator. */
2627 update_call_from_tree (gsi
, def
);
2628 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
2631 gsi_replace (gsi
, new_stmt
, true);
2641 /* Check for builtins that CCP can handle using information not
2642 available in the generic fold routines. */
2643 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
2645 if (gimple_fold_builtin (gsi
))
2648 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
2650 changed
|= targetm
.gimple_fold_builtin (gsi
);
2652 else if (gimple_call_internal_p (stmt
))
2654 enum tree_code subcode
= ERROR_MARK
;
2655 tree result
= NULL_TREE
;
2656 switch (gimple_call_internal_fn (stmt
))
2658 case IFN_BUILTIN_EXPECT
:
2659 result
= fold_builtin_expect (gimple_location (stmt
),
2660 gimple_call_arg (stmt
, 0),
2661 gimple_call_arg (stmt
, 1),
2662 gimple_call_arg (stmt
, 2));
2664 case IFN_UBSAN_CHECK_ADD
:
2665 subcode
= PLUS_EXPR
;
2667 case IFN_UBSAN_CHECK_SUB
:
2668 subcode
= MINUS_EXPR
;
2670 case IFN_UBSAN_CHECK_MUL
:
2671 subcode
= MULT_EXPR
;
2676 if (subcode
!= ERROR_MARK
)
2678 tree arg0
= gimple_call_arg (stmt
, 0);
2679 tree arg1
= gimple_call_arg (stmt
, 1);
2680 /* x = y + 0; x = y - 0; x = y * 0; */
2681 if (integer_zerop (arg1
))
2682 result
= subcode
== MULT_EXPR
2683 ? build_zero_cst (TREE_TYPE (arg0
))
2685 /* x = 0 + y; x = 0 * y; */
2686 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
2687 result
= subcode
== MULT_EXPR
2688 ? build_zero_cst (TREE_TYPE (arg0
))
2691 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
2692 result
= build_zero_cst (TREE_TYPE (arg0
));
2693 /* x = y * 1; x = 1 * y; */
2694 else if (subcode
== MULT_EXPR
)
2696 if (integer_onep (arg1
))
2698 else if (integer_onep (arg0
))
2704 if (!update_call_from_tree (gsi
, result
))
2705 gimplify_and_update_call_from_tree (gsi
, result
);
2713 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2716 maybe_canonicalize_mem_ref_addr (tree
*t
)
2720 if (TREE_CODE (*t
) == ADDR_EXPR
)
2721 t
= &TREE_OPERAND (*t
, 0);
2723 while (handled_component_p (*t
))
2724 t
= &TREE_OPERAND (*t
, 0);
2726 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2727 of invariant addresses into a SSA name MEM_REF address. */
2728 if (TREE_CODE (*t
) == MEM_REF
2729 || TREE_CODE (*t
) == TARGET_MEM_REF
)
2731 tree addr
= TREE_OPERAND (*t
, 0);
2732 if (TREE_CODE (addr
) == ADDR_EXPR
2733 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
2734 || handled_component_p (TREE_OPERAND (addr
, 0))))
2737 HOST_WIDE_INT coffset
;
2738 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
2743 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
2744 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
2745 TREE_OPERAND (*t
, 1),
2746 size_int (coffset
));
2749 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
2750 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
2753 /* Canonicalize back MEM_REFs to plain reference trees if the object
2754 accessed is a decl that has the same access semantics as the MEM_REF. */
2755 if (TREE_CODE (*t
) == MEM_REF
2756 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
2757 && integer_zerop (TREE_OPERAND (*t
, 1)))
2759 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2760 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
2761 if (/* Same volatile qualification. */
2762 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
2763 /* Same TBAA behavior with -fstrict-aliasing. */
2764 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
2765 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
2766 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
2767 /* Same alignment. */
2768 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
2769 /* We have to look out here to not drop a required conversion
2770 from the rhs to the lhs if *t appears on the lhs or vice-versa
2771 if it appears on the rhs. Thus require strict type
2773 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
2775 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2780 /* Canonicalize TARGET_MEM_REF in particular with respect to
2781 the indexes becoming constant. */
2782 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
2784 tree tem
= maybe_fold_tmr (*t
);
2795 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2796 distinguishes both cases. */
2799 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
2801 bool changed
= false;
2802 gimple stmt
= gsi_stmt (*gsi
);
2805 /* First do required canonicalization of [TARGET_]MEM_REF addresses
2807 ??? This shouldn't be done in generic folding but in the
2808 propagation helpers which also know whether an address was
2810 switch (gimple_code (stmt
))
2813 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
2815 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
2816 if ((REFERENCE_CLASS_P (*rhs
)
2817 || TREE_CODE (*rhs
) == ADDR_EXPR
)
2818 && maybe_canonicalize_mem_ref_addr (rhs
))
2820 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
2821 if (REFERENCE_CLASS_P (*lhs
)
2822 && maybe_canonicalize_mem_ref_addr (lhs
))
2828 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2830 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
2831 if (REFERENCE_CLASS_P (*arg
)
2832 && maybe_canonicalize_mem_ref_addr (arg
))
2835 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
2837 && REFERENCE_CLASS_P (*lhs
)
2838 && maybe_canonicalize_mem_ref_addr (lhs
))
2844 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2846 tree link
= gimple_asm_output_op (stmt
, i
);
2847 tree op
= TREE_VALUE (link
);
2848 if (REFERENCE_CLASS_P (op
)
2849 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
2852 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2854 tree link
= gimple_asm_input_op (stmt
, i
);
2855 tree op
= TREE_VALUE (link
);
2856 if ((REFERENCE_CLASS_P (op
)
2857 || TREE_CODE (op
) == ADDR_EXPR
)
2858 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
2864 if (gimple_debug_bind_p (stmt
))
2866 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
2868 && (REFERENCE_CLASS_P (*val
)
2869 || TREE_CODE (*val
) == ADDR_EXPR
)
2870 && maybe_canonicalize_mem_ref_addr (val
))
2877 /* Fold the main computation performed by the statement. */
2878 switch (gimple_code (stmt
))
2882 unsigned old_num_ops
= gimple_num_ops (stmt
);
2883 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2884 tree lhs
= gimple_assign_lhs (stmt
);
2886 /* First canonicalize operand order. This avoids building new
2887 trees if this is the only thing fold would later do. */
2888 if ((commutative_tree_code (subcode
)
2889 || commutative_ternary_tree_code (subcode
))
2890 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2891 gimple_assign_rhs2 (stmt
), false))
2893 tree tem
= gimple_assign_rhs1 (stmt
);
2894 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
2895 gimple_assign_set_rhs2 (stmt
, tem
);
2898 new_rhs
= fold_gimple_assign (gsi
);
2900 && !useless_type_conversion_p (TREE_TYPE (lhs
),
2901 TREE_TYPE (new_rhs
)))
2902 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2905 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
2907 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
2914 changed
|= fold_gimple_cond (stmt
);
2918 changed
|= gimple_fold_call (gsi
, inplace
);
2922 /* Fold *& in asm operands. */
2925 const char **oconstraints
;
2926 const char *constraint
;
2927 bool allows_mem
, allows_reg
;
2929 noutputs
= gimple_asm_noutputs (stmt
);
2930 oconstraints
= XALLOCAVEC (const char *, noutputs
);
2932 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2934 tree link
= gimple_asm_output_op (stmt
, i
);
2935 tree op
= TREE_VALUE (link
);
2937 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
2938 if (REFERENCE_CLASS_P (op
)
2939 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
2941 TREE_VALUE (link
) = op
;
2945 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2947 tree link
= gimple_asm_input_op (stmt
, i
);
2948 tree op
= TREE_VALUE (link
);
2950 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
2951 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
2952 oconstraints
, &allows_mem
, &allows_reg
);
2953 if (REFERENCE_CLASS_P (op
)
2954 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
2957 TREE_VALUE (link
) = op
;
2965 if (gimple_debug_bind_p (stmt
))
2967 tree val
= gimple_debug_bind_get_value (stmt
);
2969 && REFERENCE_CLASS_P (val
))
2971 tree tem
= maybe_fold_reference (val
, false);
2974 gimple_debug_bind_set_value (stmt
, tem
);
2979 && TREE_CODE (val
) == ADDR_EXPR
)
2981 tree ref
= TREE_OPERAND (val
, 0);
2982 tree tem
= maybe_fold_reference (ref
, false);
2985 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
2986 gimple_debug_bind_set_value (stmt
, tem
);
2996 stmt
= gsi_stmt (*gsi
);
2998 /* Fold *& on the lhs. */
2999 if (gimple_has_lhs (stmt
))
3001 tree lhs
= gimple_get_lhs (stmt
);
3002 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3004 tree new_lhs
= maybe_fold_reference (lhs
, true);
3007 gimple_set_lhs (stmt
, new_lhs
);
3016 /* Fold the statement pointed to by GSI. In some cases, this function may
3017 replace the whole statement with a new one. Returns true iff folding
3019 The statement pointed to by GSI should be in valid gimple form but may
3020 be in unfolded state as resulting from for example constant propagation
3021 which can produce *&x = 0. */
3024 fold_stmt (gimple_stmt_iterator
*gsi
)
3026 return fold_stmt_1 (gsi
, false);
3029 /* Perform the minimal folding on statement *GSI. Only operations like
3030 *&x created by constant propagation are handled. The statement cannot
3031 be replaced with a new one. Return true if the statement was
3032 changed, false otherwise.
3033 The statement *GSI should be in valid gimple form but may
3034 be in unfolded state as resulting from for example constant propagation
3035 which can produce *&x = 0. */
3038 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3040 gimple stmt
= gsi_stmt (*gsi
);
3041 bool changed
= fold_stmt_1 (gsi
, true);
3042 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3046 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3047 if EXPR is null or we don't know how.
3048 If non-null, the result always has boolean type. */
3051 canonicalize_bool (tree expr
, bool invert
)
3057 if (integer_nonzerop (expr
))
3058 return boolean_false_node
;
3059 else if (integer_zerop (expr
))
3060 return boolean_true_node
;
3061 else if (TREE_CODE (expr
) == SSA_NAME
)
3062 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3063 build_int_cst (TREE_TYPE (expr
), 0));
3064 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3065 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3067 TREE_OPERAND (expr
, 0),
3068 TREE_OPERAND (expr
, 1));
3074 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3076 if (integer_nonzerop (expr
))
3077 return boolean_true_node
;
3078 else if (integer_zerop (expr
))
3079 return boolean_false_node
;
3080 else if (TREE_CODE (expr
) == SSA_NAME
)
3081 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3082 build_int_cst (TREE_TYPE (expr
), 0));
3083 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3084 return fold_build2 (TREE_CODE (expr
),
3086 TREE_OPERAND (expr
, 0),
3087 TREE_OPERAND (expr
, 1));
3093 /* Check to see if a boolean expression EXPR is logically equivalent to the
3094 comparison (OP1 CODE OP2). Check for various identities involving
3098 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3099 const_tree op1
, const_tree op2
)
3103 /* The obvious case. */
3104 if (TREE_CODE (expr
) == code
3105 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3106 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3109 /* Check for comparing (name, name != 0) and the case where expr
3110 is an SSA_NAME with a definition matching the comparison. */
3111 if (TREE_CODE (expr
) == SSA_NAME
3112 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3114 if (operand_equal_p (expr
, op1
, 0))
3115 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3116 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3117 s
= SSA_NAME_DEF_STMT (expr
);
3118 if (is_gimple_assign (s
)
3119 && gimple_assign_rhs_code (s
) == code
3120 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3121 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3125 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3126 of name is a comparison, recurse. */
3127 if (TREE_CODE (op1
) == SSA_NAME
3128 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3130 s
= SSA_NAME_DEF_STMT (op1
);
3131 if (is_gimple_assign (s
)
3132 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3134 enum tree_code c
= gimple_assign_rhs_code (s
);
3135 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3136 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3137 return same_bool_comparison_p (expr
, c
,
3138 gimple_assign_rhs1 (s
),
3139 gimple_assign_rhs2 (s
));
3140 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3141 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3142 return same_bool_comparison_p (expr
,
3143 invert_tree_comparison (c
, false),
3144 gimple_assign_rhs1 (s
),
3145 gimple_assign_rhs2 (s
));
3151 /* Check to see if two boolean expressions OP1 and OP2 are logically
3155 same_bool_result_p (const_tree op1
, const_tree op2
)
3157 /* Simple cases first. */
3158 if (operand_equal_p (op1
, op2
, 0))
3161 /* Check the cases where at least one of the operands is a comparison.
3162 These are a bit smarter than operand_equal_p in that they apply some
3163 identifies on SSA_NAMEs. */
3164 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3165 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3166 TREE_OPERAND (op2
, 0),
3167 TREE_OPERAND (op2
, 1)))
3169 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3170 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3171 TREE_OPERAND (op1
, 0),
3172 TREE_OPERAND (op1
, 1)))
3179 /* Forward declarations for some mutually recursive functions. */
3182 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3183 enum tree_code code2
, tree op2a
, tree op2b
);
3185 and_var_with_comparison (tree var
, bool invert
,
3186 enum tree_code code2
, tree op2a
, tree op2b
);
3188 and_var_with_comparison_1 (gimple stmt
,
3189 enum tree_code code2
, tree op2a
, tree op2b
);
3191 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3192 enum tree_code code2
, tree op2a
, tree op2b
);
3194 or_var_with_comparison (tree var
, bool invert
,
3195 enum tree_code code2
, tree op2a
, tree op2b
);
3197 or_var_with_comparison_1 (gimple stmt
,
3198 enum tree_code code2
, tree op2a
, tree op2b
);
3200 /* Helper function for and_comparisons_1: try to simplify the AND of the
3201 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3202 If INVERT is true, invert the value of the VAR before doing the AND.
3203 Return NULL_EXPR if we can't simplify this to a single expression. */
3206 and_var_with_comparison (tree var
, bool invert
,
3207 enum tree_code code2
, tree op2a
, tree op2b
)
3210 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3212 /* We can only deal with variables whose definitions are assignments. */
3213 if (!is_gimple_assign (stmt
))
3216 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3217 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3218 Then we only have to consider the simpler non-inverted cases. */
3220 t
= or_var_with_comparison_1 (stmt
,
3221 invert_tree_comparison (code2
, false),
3224 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3225 return canonicalize_bool (t
, invert
);
3228 /* Try to simplify the AND of the ssa variable defined by the assignment
3229 STMT with the comparison specified by (OP2A CODE2 OP2B).
3230 Return NULL_EXPR if we can't simplify this to a single expression. */
3233 and_var_with_comparison_1 (gimple stmt
,
3234 enum tree_code code2
, tree op2a
, tree op2b
)
3236 tree var
= gimple_assign_lhs (stmt
);
3237 tree true_test_var
= NULL_TREE
;
3238 tree false_test_var
= NULL_TREE
;
3239 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3241 /* Check for identities like (var AND (var == 0)) => false. */
3242 if (TREE_CODE (op2a
) == SSA_NAME
3243 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3245 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3246 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3248 true_test_var
= op2a
;
3249 if (var
== true_test_var
)
3252 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3253 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3255 false_test_var
= op2a
;
3256 if (var
== false_test_var
)
3257 return boolean_false_node
;
3261 /* If the definition is a comparison, recurse on it. */
3262 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3264 tree t
= and_comparisons_1 (innercode
,
3265 gimple_assign_rhs1 (stmt
),
3266 gimple_assign_rhs2 (stmt
),
3274 /* If the definition is an AND or OR expression, we may be able to
3275 simplify by reassociating. */
3276 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3277 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3279 tree inner1
= gimple_assign_rhs1 (stmt
);
3280 tree inner2
= gimple_assign_rhs2 (stmt
);
3283 tree partial
= NULL_TREE
;
3284 bool is_and
= (innercode
== BIT_AND_EXPR
);
3286 /* Check for boolean identities that don't require recursive examination
3288 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3289 inner1 AND (inner1 OR inner2) => inner1
3290 !inner1 AND (inner1 AND inner2) => false
3291 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3292 Likewise for similar cases involving inner2. */
3293 if (inner1
== true_test_var
)
3294 return (is_and
? var
: inner1
);
3295 else if (inner2
== true_test_var
)
3296 return (is_and
? var
: inner2
);
3297 else if (inner1
== false_test_var
)
3299 ? boolean_false_node
3300 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3301 else if (inner2
== false_test_var
)
3303 ? boolean_false_node
3304 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3306 /* Next, redistribute/reassociate the AND across the inner tests.
3307 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3308 if (TREE_CODE (inner1
) == SSA_NAME
3309 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3310 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3311 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3312 gimple_assign_rhs1 (s
),
3313 gimple_assign_rhs2 (s
),
3314 code2
, op2a
, op2b
)))
3316 /* Handle the AND case, where we are reassociating:
3317 (inner1 AND inner2) AND (op2a code2 op2b)
3319 If the partial result t is a constant, we win. Otherwise
3320 continue on to try reassociating with the other inner test. */
3323 if (integer_onep (t
))
3325 else if (integer_zerop (t
))
3326 return boolean_false_node
;
3329 /* Handle the OR case, where we are redistributing:
3330 (inner1 OR inner2) AND (op2a code2 op2b)
3331 => (t OR (inner2 AND (op2a code2 op2b))) */
3332 else if (integer_onep (t
))
3333 return boolean_true_node
;
3335 /* Save partial result for later. */
3339 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3340 if (TREE_CODE (inner2
) == SSA_NAME
3341 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3342 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3343 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3344 gimple_assign_rhs1 (s
),
3345 gimple_assign_rhs2 (s
),
3346 code2
, op2a
, op2b
)))
3348 /* Handle the AND case, where we are reassociating:
3349 (inner1 AND inner2) AND (op2a code2 op2b)
3350 => (inner1 AND t) */
3353 if (integer_onep (t
))
3355 else if (integer_zerop (t
))
3356 return boolean_false_node
;
3357 /* If both are the same, we can apply the identity
3359 else if (partial
&& same_bool_result_p (t
, partial
))
3363 /* Handle the OR case. where we are redistributing:
3364 (inner1 OR inner2) AND (op2a code2 op2b)
3365 => (t OR (inner1 AND (op2a code2 op2b)))
3366 => (t OR partial) */
3369 if (integer_onep (t
))
3370 return boolean_true_node
;
3373 /* We already got a simplification for the other
3374 operand to the redistributed OR expression. The
3375 interesting case is when at least one is false.
3376 Or, if both are the same, we can apply the identity
3378 if (integer_zerop (partial
))
3380 else if (integer_zerop (t
))
3382 else if (same_bool_result_p (t
, partial
))
3391 /* Try to simplify the AND of two comparisons defined by
3392 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3393 If this can be done without constructing an intermediate value,
3394 return the resulting tree; otherwise NULL_TREE is returned.
3395 This function is deliberately asymmetric as it recurses on SSA_DEFs
3396 in the first comparison but not the second. */
3399 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3400 enum tree_code code2
, tree op2a
, tree op2b
)
3402 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3404 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3405 if (operand_equal_p (op1a
, op2a
, 0)
3406 && operand_equal_p (op1b
, op2b
, 0))
3408 /* Result will be either NULL_TREE, or a combined comparison. */
3409 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3410 TRUTH_ANDIF_EXPR
, code1
, code2
,
3411 truth_type
, op1a
, op1b
);
3416 /* Likewise the swapped case of the above. */
3417 if (operand_equal_p (op1a
, op2b
, 0)
3418 && operand_equal_p (op1b
, op2a
, 0))
3420 /* Result will be either NULL_TREE, or a combined comparison. */
3421 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3422 TRUTH_ANDIF_EXPR
, code1
,
3423 swap_tree_comparison (code2
),
3424 truth_type
, op1a
, op1b
);
3429 /* If both comparisons are of the same value against constants, we might
3430 be able to merge them. */
3431 if (operand_equal_p (op1a
, op2a
, 0)
3432 && TREE_CODE (op1b
) == INTEGER_CST
3433 && TREE_CODE (op2b
) == INTEGER_CST
)
3435 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3437 /* If we have (op1a == op1b), we should either be able to
3438 return that or FALSE, depending on whether the constant op1b
3439 also satisfies the other comparison against op2b. */
3440 if (code1
== EQ_EXPR
)
3446 case EQ_EXPR
: val
= (cmp
== 0); break;
3447 case NE_EXPR
: val
= (cmp
!= 0); break;
3448 case LT_EXPR
: val
= (cmp
< 0); break;
3449 case GT_EXPR
: val
= (cmp
> 0); break;
3450 case LE_EXPR
: val
= (cmp
<= 0); break;
3451 case GE_EXPR
: val
= (cmp
>= 0); break;
3452 default: done
= false;
3457 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3459 return boolean_false_node
;
3462 /* Likewise if the second comparison is an == comparison. */
3463 else if (code2
== EQ_EXPR
)
3469 case EQ_EXPR
: val
= (cmp
== 0); break;
3470 case NE_EXPR
: val
= (cmp
!= 0); break;
3471 case LT_EXPR
: val
= (cmp
> 0); break;
3472 case GT_EXPR
: val
= (cmp
< 0); break;
3473 case LE_EXPR
: val
= (cmp
>= 0); break;
3474 case GE_EXPR
: val
= (cmp
<= 0); break;
3475 default: done
= false;
3480 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3482 return boolean_false_node
;
3486 /* Same business with inequality tests. */
3487 else if (code1
== NE_EXPR
)
3492 case EQ_EXPR
: val
= (cmp
!= 0); break;
3493 case NE_EXPR
: val
= (cmp
== 0); break;
3494 case LT_EXPR
: val
= (cmp
>= 0); break;
3495 case GT_EXPR
: val
= (cmp
<= 0); break;
3496 case LE_EXPR
: val
= (cmp
> 0); break;
3497 case GE_EXPR
: val
= (cmp
< 0); break;
3502 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3504 else if (code2
== NE_EXPR
)
3509 case EQ_EXPR
: val
= (cmp
== 0); break;
3510 case NE_EXPR
: val
= (cmp
!= 0); break;
3511 case LT_EXPR
: val
= (cmp
<= 0); break;
3512 case GT_EXPR
: val
= (cmp
>= 0); break;
3513 case LE_EXPR
: val
= (cmp
< 0); break;
3514 case GE_EXPR
: val
= (cmp
> 0); break;
3519 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3522 /* Chose the more restrictive of two < or <= comparisons. */
3523 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3524 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3526 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3527 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3529 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3532 /* Likewise chose the more restrictive of two > or >= comparisons. */
3533 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3534 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3536 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
3537 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3539 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3542 /* Check for singleton ranges. */
3544 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
3545 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
3546 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
3548 /* Check for disjoint ranges. */
3550 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
3551 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3552 return boolean_false_node
;
3554 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
3555 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3556 return boolean_false_node
;
3559 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3560 NAME's definition is a truth value. See if there are any simplifications
3561 that can be done against the NAME's definition. */
3562 if (TREE_CODE (op1a
) == SSA_NAME
3563 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
3564 && (integer_zerop (op1b
) || integer_onep (op1b
)))
3566 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
3567 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
3568 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
3569 switch (gimple_code (stmt
))
3572 /* Try to simplify by copy-propagating the definition. */
3573 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
3576 /* If every argument to the PHI produces the same result when
3577 ANDed with the second comparison, we win.
3578 Do not do this unless the type is bool since we need a bool
3579 result here anyway. */
3580 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
3582 tree result
= NULL_TREE
;
3584 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
3586 tree arg
= gimple_phi_arg_def (stmt
, i
);
3588 /* If this PHI has itself as an argument, ignore it.
3589 If all the other args produce the same result,
3591 if (arg
== gimple_phi_result (stmt
))
3593 else if (TREE_CODE (arg
) == INTEGER_CST
)
3595 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
3598 result
= boolean_false_node
;
3599 else if (!integer_zerop (result
))
3603 result
= fold_build2 (code2
, boolean_type_node
,
3605 else if (!same_bool_comparison_p (result
,
3609 else if (TREE_CODE (arg
) == SSA_NAME
3610 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
3613 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
3614 /* In simple cases we can look through PHI nodes,
3615 but we have to be careful with loops.
3617 if (! dom_info_available_p (CDI_DOMINATORS
)
3618 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
3619 || dominated_by_p (CDI_DOMINATORS
,
3620 gimple_bb (def_stmt
),
3623 temp
= and_var_with_comparison (arg
, invert
, code2
,
3629 else if (!same_bool_result_p (result
, temp
))
3645 /* Try to simplify the AND of two comparisons, specified by
3646 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3647 If this can be simplified to a single expression (without requiring
3648 introducing more SSA variables to hold intermediate values),
3649 return the resulting tree. Otherwise return NULL_TREE.
3650 If the result expression is non-null, it has boolean type. */
3653 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
3654 enum tree_code code2
, tree op2a
, tree op2b
)
3656 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
3660 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
3663 /* Helper function for or_comparisons_1: try to simplify the OR of the
3664 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3665 If INVERT is true, invert the value of VAR before doing the OR.
3666 Return NULL_EXPR if we can't simplify this to a single expression. */
3669 or_var_with_comparison (tree var
, bool invert
,
3670 enum tree_code code2
, tree op2a
, tree op2b
)
3673 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3675 /* We can only deal with variables whose definitions are assignments. */
3676 if (!is_gimple_assign (stmt
))
3679 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3680 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3681 Then we only have to consider the simpler non-inverted cases. */
3683 t
= and_var_with_comparison_1 (stmt
,
3684 invert_tree_comparison (code2
, false),
3687 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3688 return canonicalize_bool (t
, invert
);
3691 /* Try to simplify the OR of the ssa variable defined by the assignment
3692 STMT with the comparison specified by (OP2A CODE2 OP2B).
3693 Return NULL_EXPR if we can't simplify this to a single expression. */
3696 or_var_with_comparison_1 (gimple stmt
,
3697 enum tree_code code2
, tree op2a
, tree op2b
)
3699 tree var
= gimple_assign_lhs (stmt
);
3700 tree true_test_var
= NULL_TREE
;
3701 tree false_test_var
= NULL_TREE
;
3702 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3704 /* Check for identities like (var OR (var != 0)) => true . */
3705 if (TREE_CODE (op2a
) == SSA_NAME
3706 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3708 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3709 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3711 true_test_var
= op2a
;
3712 if (var
== true_test_var
)
3715 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3716 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3718 false_test_var
= op2a
;
3719 if (var
== false_test_var
)
3720 return boolean_true_node
;
3724 /* If the definition is a comparison, recurse on it. */
3725 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3727 tree t
= or_comparisons_1 (innercode
,
3728 gimple_assign_rhs1 (stmt
),
3729 gimple_assign_rhs2 (stmt
),
3737 /* If the definition is an AND or OR expression, we may be able to
3738 simplify by reassociating. */
3739 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3740 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3742 tree inner1
= gimple_assign_rhs1 (stmt
);
3743 tree inner2
= gimple_assign_rhs2 (stmt
);
3746 tree partial
= NULL_TREE
;
3747 bool is_or
= (innercode
== BIT_IOR_EXPR
);
3749 /* Check for boolean identities that don't require recursive examination
3751 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3752 inner1 OR (inner1 AND inner2) => inner1
3753 !inner1 OR (inner1 OR inner2) => true
3754 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3756 if (inner1
== true_test_var
)
3757 return (is_or
? var
: inner1
);
3758 else if (inner2
== true_test_var
)
3759 return (is_or
? var
: inner2
);
3760 else if (inner1
== false_test_var
)
3763 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3764 else if (inner2
== false_test_var
)
3767 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3769 /* Next, redistribute/reassociate the OR across the inner tests.
3770 Compute the first partial result, (inner1 OR (op2a code op2b)) */
3771 if (TREE_CODE (inner1
) == SSA_NAME
3772 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3773 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3774 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
3775 gimple_assign_rhs1 (s
),
3776 gimple_assign_rhs2 (s
),
3777 code2
, op2a
, op2b
)))
3779 /* Handle the OR case, where we are reassociating:
3780 (inner1 OR inner2) OR (op2a code2 op2b)
3782 If the partial result t is a constant, we win. Otherwise
3783 continue on to try reassociating with the other inner test. */
3786 if (integer_onep (t
))
3787 return boolean_true_node
;
3788 else if (integer_zerop (t
))
3792 /* Handle the AND case, where we are redistributing:
3793 (inner1 AND inner2) OR (op2a code2 op2b)
3794 => (t AND (inner2 OR (op2a code op2b))) */
3795 else if (integer_zerop (t
))
3796 return boolean_false_node
;
3798 /* Save partial result for later. */
3802 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
3803 if (TREE_CODE (inner2
) == SSA_NAME
3804 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3805 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3806 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
3807 gimple_assign_rhs1 (s
),
3808 gimple_assign_rhs2 (s
),
3809 code2
, op2a
, op2b
)))
3811 /* Handle the OR case, where we are reassociating:
3812 (inner1 OR inner2) OR (op2a code2 op2b)
3814 => (t OR partial) */
3817 if (integer_zerop (t
))
3819 else if (integer_onep (t
))
3820 return boolean_true_node
;
3821 /* If both are the same, we can apply the identity
3823 else if (partial
&& same_bool_result_p (t
, partial
))
3827 /* Handle the AND case, where we are redistributing:
3828 (inner1 AND inner2) OR (op2a code2 op2b)
3829 => (t AND (inner1 OR (op2a code2 op2b)))
3830 => (t AND partial) */
3833 if (integer_zerop (t
))
3834 return boolean_false_node
;
3837 /* We already got a simplification for the other
3838 operand to the redistributed AND expression. The
3839 interesting case is when at least one is true.
3840 Or, if both are the same, we can apply the identity
3842 if (integer_onep (partial
))
3844 else if (integer_onep (t
))
3846 else if (same_bool_result_p (t
, partial
))
3855 /* Try to simplify the OR of two comparisons defined by
3856 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3857 If this can be done without constructing an intermediate value,
3858 return the resulting tree; otherwise NULL_TREE is returned.
3859 This function is deliberately asymmetric as it recurses on SSA_DEFs
3860 in the first comparison but not the second. */
3863 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3864 enum tree_code code2
, tree op2a
, tree op2b
)
3866 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3868 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
3869 if (operand_equal_p (op1a
, op2a
, 0)
3870 && operand_equal_p (op1b
, op2b
, 0))
3872 /* Result will be either NULL_TREE, or a combined comparison. */
3873 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3874 TRUTH_ORIF_EXPR
, code1
, code2
,
3875 truth_type
, op1a
, op1b
);
3880 /* Likewise the swapped case of the above. */
3881 if (operand_equal_p (op1a
, op2b
, 0)
3882 && operand_equal_p (op1b
, op2a
, 0))
3884 /* Result will be either NULL_TREE, or a combined comparison. */
3885 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3886 TRUTH_ORIF_EXPR
, code1
,
3887 swap_tree_comparison (code2
),
3888 truth_type
, op1a
, op1b
);
3893 /* If both comparisons are of the same value against constants, we might
3894 be able to merge them. */
3895 if (operand_equal_p (op1a
, op2a
, 0)
3896 && TREE_CODE (op1b
) == INTEGER_CST
3897 && TREE_CODE (op2b
) == INTEGER_CST
)
3899 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3901 /* If we have (op1a != op1b), we should either be able to
3902 return that or TRUE, depending on whether the constant op1b
3903 also satisfies the other comparison against op2b. */
3904 if (code1
== NE_EXPR
)
3910 case EQ_EXPR
: val
= (cmp
== 0); break;
3911 case NE_EXPR
: val
= (cmp
!= 0); break;
3912 case LT_EXPR
: val
= (cmp
< 0); break;
3913 case GT_EXPR
: val
= (cmp
> 0); break;
3914 case LE_EXPR
: val
= (cmp
<= 0); break;
3915 case GE_EXPR
: val
= (cmp
>= 0); break;
3916 default: done
= false;
3921 return boolean_true_node
;
3923 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3926 /* Likewise if the second comparison is a != comparison. */
3927 else if (code2
== NE_EXPR
)
3933 case EQ_EXPR
: val
= (cmp
== 0); break;
3934 case NE_EXPR
: val
= (cmp
!= 0); break;
3935 case LT_EXPR
: val
= (cmp
> 0); break;
3936 case GT_EXPR
: val
= (cmp
< 0); break;
3937 case LE_EXPR
: val
= (cmp
>= 0); break;
3938 case GE_EXPR
: val
= (cmp
<= 0); break;
3939 default: done
= false;
3944 return boolean_true_node
;
3946 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3950 /* See if an equality test is redundant with the other comparison. */
3951 else if (code1
== EQ_EXPR
)
3956 case EQ_EXPR
: val
= (cmp
== 0); break;
3957 case NE_EXPR
: val
= (cmp
!= 0); break;
3958 case LT_EXPR
: val
= (cmp
< 0); break;
3959 case GT_EXPR
: val
= (cmp
> 0); break;
3960 case LE_EXPR
: val
= (cmp
<= 0); break;
3961 case GE_EXPR
: val
= (cmp
>= 0); break;
3966 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3968 else if (code2
== EQ_EXPR
)
3973 case EQ_EXPR
: val
= (cmp
== 0); break;
3974 case NE_EXPR
: val
= (cmp
!= 0); break;
3975 case LT_EXPR
: val
= (cmp
> 0); break;
3976 case GT_EXPR
: val
= (cmp
< 0); break;
3977 case LE_EXPR
: val
= (cmp
>= 0); break;
3978 case GE_EXPR
: val
= (cmp
<= 0); break;
3983 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3986 /* Chose the less restrictive of two < or <= comparisons. */
3987 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3988 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3990 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3991 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3993 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3996 /* Likewise chose the less restrictive of two > or >= comparisons. */
3997 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3998 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4000 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4001 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4003 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4006 /* Check for singleton ranges. */
4008 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4009 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4010 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4012 /* Check for less/greater pairs that don't restrict the range at all. */
4014 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4015 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4016 return boolean_true_node
;
4018 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4019 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4020 return boolean_true_node
;
4023 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4024 NAME's definition is a truth value. See if there are any simplifications
4025 that can be done against the NAME's definition. */
4026 if (TREE_CODE (op1a
) == SSA_NAME
4027 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4028 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4030 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4031 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4032 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4033 switch (gimple_code (stmt
))
4036 /* Try to simplify by copy-propagating the definition. */
4037 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4040 /* If every argument to the PHI produces the same result when
4041 ORed with the second comparison, we win.
4042 Do not do this unless the type is bool since we need a bool
4043 result here anyway. */
4044 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4046 tree result
= NULL_TREE
;
4048 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4050 tree arg
= gimple_phi_arg_def (stmt
, i
);
4052 /* If this PHI has itself as an argument, ignore it.
4053 If all the other args produce the same result,
4055 if (arg
== gimple_phi_result (stmt
))
4057 else if (TREE_CODE (arg
) == INTEGER_CST
)
4059 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4062 result
= boolean_true_node
;
4063 else if (!integer_onep (result
))
4067 result
= fold_build2 (code2
, boolean_type_node
,
4069 else if (!same_bool_comparison_p (result
,
4073 else if (TREE_CODE (arg
) == SSA_NAME
4074 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4077 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4078 /* In simple cases we can look through PHI nodes,
4079 but we have to be careful with loops.
4081 if (! dom_info_available_p (CDI_DOMINATORS
)
4082 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4083 || dominated_by_p (CDI_DOMINATORS
,
4084 gimple_bb (def_stmt
),
4087 temp
= or_var_with_comparison (arg
, invert
, code2
,
4093 else if (!same_bool_result_p (result
, temp
))
4109 /* Try to simplify the OR of two comparisons, specified by
4110 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4111 If this can be simplified to a single expression (without requiring
4112 introducing more SSA variables to hold intermediate values),
4113 return the resulting tree. Otherwise return NULL_TREE.
4114 If the result expression is non-null, it has boolean type. */
4117 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4118 enum tree_code code2
, tree op2a
, tree op2b
)
4120 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4124 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4128 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4130 Either NULL_TREE, a simplified but non-constant or a constant
4133 ??? This should go into a gimple-fold-inline.h file to be eventually
4134 privatized with the single valueize function used in the various TUs
4135 to avoid the indirect function call overhead. */
4138 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
4140 location_t loc
= gimple_location (stmt
);
4141 switch (gimple_code (stmt
))
4145 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4147 switch (get_gimple_rhs_class (subcode
))
4149 case GIMPLE_SINGLE_RHS
:
4151 tree rhs
= gimple_assign_rhs1 (stmt
);
4152 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4154 if (TREE_CODE (rhs
) == SSA_NAME
)
4156 /* If the RHS is an SSA_NAME, return its known constant value,
4158 return (*valueize
) (rhs
);
4160 /* Handle propagating invariant addresses into address
4162 else if (TREE_CODE (rhs
) == ADDR_EXPR
4163 && !is_gimple_min_invariant (rhs
))
4165 HOST_WIDE_INT offset
= 0;
4167 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4171 && (CONSTANT_CLASS_P (base
)
4172 || decl_address_invariant_p (base
)))
4173 return build_invariant_address (TREE_TYPE (rhs
),
4176 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4177 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4178 && (CONSTRUCTOR_NELTS (rhs
)
4179 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4184 vec
= XALLOCAVEC (tree
,
4185 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4186 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4188 val
= (*valueize
) (val
);
4189 if (TREE_CODE (val
) == INTEGER_CST
4190 || TREE_CODE (val
) == REAL_CST
4191 || TREE_CODE (val
) == FIXED_CST
)
4197 return build_vector (TREE_TYPE (rhs
), vec
);
4199 if (subcode
== OBJ_TYPE_REF
)
4201 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4202 /* If callee is constant, we can fold away the wrapper. */
4203 if (is_gimple_min_invariant (val
))
4207 if (kind
== tcc_reference
)
4209 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4210 || TREE_CODE (rhs
) == REALPART_EXPR
4211 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4212 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4214 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4215 return fold_unary_loc (EXPR_LOCATION (rhs
),
4217 TREE_TYPE (rhs
), val
);
4219 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
4220 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4222 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4223 return fold_ternary_loc (EXPR_LOCATION (rhs
),
4225 TREE_TYPE (rhs
), val
,
4226 TREE_OPERAND (rhs
, 1),
4227 TREE_OPERAND (rhs
, 2));
4229 else if (TREE_CODE (rhs
) == MEM_REF
4230 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4232 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4233 if (TREE_CODE (val
) == ADDR_EXPR
4234 && is_gimple_min_invariant (val
))
4236 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
4238 TREE_OPERAND (rhs
, 1));
4243 return fold_const_aggregate_ref_1 (rhs
, valueize
);
4245 else if (kind
== tcc_declaration
)
4246 return get_symbol_constant_value (rhs
);
4250 case GIMPLE_UNARY_RHS
:
4252 /* Handle unary operators that can appear in GIMPLE form.
4253 Note that we know the single operand must be a constant,
4254 so this should almost always return a simplified RHS. */
4255 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4258 fold_unary_ignore_overflow_loc (loc
, subcode
,
4259 gimple_expr_type (stmt
), op0
);
4262 case GIMPLE_BINARY_RHS
:
4264 /* Handle binary operators that can appear in GIMPLE form. */
4265 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4266 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4268 /* Translate &x + CST into an invariant form suitable for
4269 further propagation. */
4270 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
4271 && TREE_CODE (op0
) == ADDR_EXPR
4272 && TREE_CODE (op1
) == INTEGER_CST
)
4274 tree off
= fold_convert (ptr_type_node
, op1
);
4275 return build_fold_addr_expr_loc
4277 fold_build2 (MEM_REF
,
4278 TREE_TYPE (TREE_TYPE (op0
)),
4279 unshare_expr (op0
), off
));
4282 return fold_binary_loc (loc
, subcode
,
4283 gimple_expr_type (stmt
), op0
, op1
);
4286 case GIMPLE_TERNARY_RHS
:
4288 /* Handle ternary operators that can appear in GIMPLE form. */
4289 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4290 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4291 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
4293 /* Fold embedded expressions in ternary codes. */
4294 if ((subcode
== COND_EXPR
4295 || subcode
== VEC_COND_EXPR
)
4296 && COMPARISON_CLASS_P (op0
))
4298 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
4299 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
4300 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
4301 TREE_TYPE (op0
), op00
, op01
);
4306 return fold_ternary_loc (loc
, subcode
,
4307 gimple_expr_type (stmt
), op0
, op1
, op2
);
4319 if (gimple_call_internal_p (stmt
))
4321 enum tree_code subcode
= ERROR_MARK
;
4322 switch (gimple_call_internal_fn (stmt
))
4324 case IFN_UBSAN_CHECK_ADD
:
4325 subcode
= PLUS_EXPR
;
4327 case IFN_UBSAN_CHECK_SUB
:
4328 subcode
= MINUS_EXPR
;
4330 case IFN_UBSAN_CHECK_MUL
:
4331 subcode
= MULT_EXPR
;
4336 tree arg0
= gimple_call_arg (stmt
, 0);
4337 tree arg1
= gimple_call_arg (stmt
, 1);
4338 tree op0
= (*valueize
) (arg0
);
4339 tree op1
= (*valueize
) (arg1
);
4341 if (TREE_CODE (op0
) != INTEGER_CST
4342 || TREE_CODE (op1
) != INTEGER_CST
)
4347 /* x * 0 = 0 * x = 0 without overflow. */
4348 if (integer_zerop (op0
) || integer_zerop (op1
))
4349 return build_zero_cst (TREE_TYPE (arg0
));
4352 /* y - y = 0 without overflow. */
4353 if (operand_equal_p (op0
, op1
, 0))
4354 return build_zero_cst (TREE_TYPE (arg0
));
4361 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
4363 && TREE_CODE (res
) == INTEGER_CST
4364 && !TREE_OVERFLOW (res
))
4369 fn
= (*valueize
) (gimple_call_fn (stmt
));
4370 if (TREE_CODE (fn
) == ADDR_EXPR
4371 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4372 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
4373 && gimple_builtin_call_types_compatible_p (stmt
,
4374 TREE_OPERAND (fn
, 0)))
4376 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
4379 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4380 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
4381 call
= build_call_array_loc (loc
,
4382 gimple_call_return_type (stmt
),
4383 fn
, gimple_call_num_args (stmt
), args
);
4384 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
4387 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4388 STRIP_NOPS (retval
);
4389 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
4401 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4402 Returns NULL_TREE if folding to a constant is not possible, otherwise
4403 returns a constant according to is_gimple_min_invariant. */
4406 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
4408 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
4409 if (res
&& is_gimple_min_invariant (res
))
4415 /* The following set of functions are supposed to fold references using
4416 their constant initializers. */
4418 static tree
fold_ctor_reference (tree type
, tree ctor
,
4419 unsigned HOST_WIDE_INT offset
,
4420 unsigned HOST_WIDE_INT size
, tree
);
4422 /* See if we can find constructor defining value of BASE.
4423 When we know the consructor with constant offset (such as
4424 base is array[40] and we do know constructor of array), then
4425 BIT_OFFSET is adjusted accordingly.
4427 As a special case, return error_mark_node when constructor
4428 is not explicitly available, but it is known to be zero
4429 such as 'static const int a;'. */
4431 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
4432 tree (*valueize
)(tree
))
4434 HOST_WIDE_INT bit_offset2
, size
, max_size
;
4435 if (TREE_CODE (base
) == MEM_REF
)
4437 if (!integer_zerop (TREE_OPERAND (base
, 1)))
4439 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
4441 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
4446 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
4447 base
= valueize (TREE_OPERAND (base
, 0));
4448 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
4450 base
= TREE_OPERAND (base
, 0);
4453 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4454 DECL_INITIAL. If BASE is a nested reference into another
4455 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4456 the inner reference. */
4457 switch (TREE_CODE (base
))
4462 tree init
= ctor_for_folding (base
);
4464 /* Our semantic is exact opposite of ctor_for_folding;
4465 NULL means unknown, while error_mark_node is 0. */
4466 if (init
== error_mark_node
)
4469 return error_mark_node
;
4475 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
4476 if (max_size
== -1 || size
!= max_size
)
4478 *bit_offset
+= bit_offset2
;
4479 return get_base_constructor (base
, bit_offset
, valueize
);
4490 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4491 SIZE to the memory at bit OFFSET. */
4494 fold_array_ctor_reference (tree type
, tree ctor
,
4495 unsigned HOST_WIDE_INT offset
,
4496 unsigned HOST_WIDE_INT size
,
4499 unsigned HOST_WIDE_INT cnt
;
4501 offset_int low_bound
;
4502 offset_int elt_size
;
4503 offset_int index
, max_index
;
4504 offset_int access_index
;
4505 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
4506 HOST_WIDE_INT inner_offset
;
4508 /* Compute low bound and elt size. */
4509 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
4510 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
4511 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
4513 /* Static constructors for variably sized objects makes no sense. */
4514 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
4515 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
4516 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
4520 /* Static constructors for variably sized objects makes no sense. */
4521 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
4523 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
4525 /* We can handle only constantly sized accesses that are known to not
4526 be larger than size of array element. */
4527 if (!TYPE_SIZE_UNIT (type
)
4528 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
4529 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
4533 /* Compute the array index we look for. */
4534 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
4536 access_index
+= low_bound
;
4538 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
4539 TYPE_SIGN (index_type
));
4541 /* And offset within the access. */
4542 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
4544 /* See if the array field is large enough to span whole access. We do not
4545 care to fold accesses spanning multiple array indexes. */
4546 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
4549 index
= low_bound
- 1;
4551 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4552 TYPE_SIGN (index_type
));
4554 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
4556 /* Array constructor might explicitely set index, or specify range
4557 or leave index NULL meaning that it is next index after previous
4561 if (TREE_CODE (cfield
) == INTEGER_CST
)
4562 max_index
= index
= wi::to_offset (cfield
);
4565 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
4566 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
4567 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
4574 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4575 TYPE_SIGN (index_type
));
4579 /* Do we have match? */
4580 if (wi::cmpu (access_index
, index
) >= 0
4581 && wi::cmpu (access_index
, max_index
) <= 0)
4582 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
4585 /* When memory is not explicitely mentioned in constructor,
4586 it is 0 (or out of range). */
4587 return build_zero_cst (type
);
4590 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4591 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4594 fold_nonarray_ctor_reference (tree type
, tree ctor
,
4595 unsigned HOST_WIDE_INT offset
,
4596 unsigned HOST_WIDE_INT size
,
4599 unsigned HOST_WIDE_INT cnt
;
4602 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
4605 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
4606 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
4607 tree field_size
= DECL_SIZE (cfield
);
4608 offset_int bitoffset
;
4609 offset_int bitoffset_end
, access_end
;
4611 /* Variable sized objects in static constructors makes no sense,
4612 but field_size can be NULL for flexible array members. */
4613 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
4614 && TREE_CODE (byte_offset
) == INTEGER_CST
4615 && (field_size
!= NULL_TREE
4616 ? TREE_CODE (field_size
) == INTEGER_CST
4617 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
4619 /* Compute bit offset of the field. */
4620 bitoffset
= (wi::to_offset (field_offset
)
4621 + wi::lshift (wi::to_offset (byte_offset
),
4622 LOG2_BITS_PER_UNIT
));
4623 /* Compute bit offset where the field ends. */
4624 if (field_size
!= NULL_TREE
)
4625 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
4629 access_end
= offset_int (offset
) + size
;
4631 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4632 [BITOFFSET, BITOFFSET_END)? */
4633 if (wi::cmps (access_end
, bitoffset
) > 0
4634 && (field_size
== NULL_TREE
4635 || wi::lts_p (offset
, bitoffset_end
)))
4637 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
4638 /* We do have overlap. Now see if field is large enough to
4639 cover the access. Give up for accesses spanning multiple
4641 if (wi::cmps (access_end
, bitoffset_end
) > 0)
4643 if (wi::lts_p (offset
, bitoffset
))
4645 return fold_ctor_reference (type
, cval
,
4646 inner_offset
.to_uhwi (), size
,
4650 /* When memory is not explicitely mentioned in constructor, it is 0. */
4651 return build_zero_cst (type
);
4654 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4655 to the memory at bit OFFSET. */
4658 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
4659 unsigned HOST_WIDE_INT size
, tree from_decl
)
4663 /* We found the field with exact match. */
4664 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
4666 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4668 /* We are at the end of walk, see if we can view convert the
4670 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
4671 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4672 && !compare_tree_int (TYPE_SIZE (type
), size
)
4673 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
4675 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4676 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
4681 /* For constants and byte-aligned/sized reads try to go through
4682 native_encode/interpret. */
4683 if (CONSTANT_CLASS_P (ctor
)
4684 && BITS_PER_UNIT
== 8
4685 && offset
% BITS_PER_UNIT
== 0
4686 && size
% BITS_PER_UNIT
== 0
4687 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
4689 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
4690 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
4691 offset
/ BITS_PER_UNIT
) > 0)
4692 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
4694 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
4697 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
4698 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
4699 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
4702 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
4709 /* Return the tree representing the element referenced by T if T is an
4710 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4711 names using VALUEIZE. Return NULL_TREE otherwise. */
4714 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
4716 tree ctor
, idx
, base
;
4717 HOST_WIDE_INT offset
, size
, max_size
;
4720 if (TREE_THIS_VOLATILE (t
))
4723 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
4724 return get_symbol_constant_value (t
);
4726 tem
= fold_read_from_constant_string (t
);
4730 switch (TREE_CODE (t
))
4733 case ARRAY_RANGE_REF
:
4734 /* Constant indexes are handled well by get_base_constructor.
4735 Only special case variable offsets.
4736 FIXME: This code can't handle nested references with variable indexes
4737 (they will be handled only by iteration of ccp). Perhaps we can bring
4738 get_ref_base_and_extent here and make it use a valueize callback. */
4739 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
4741 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
4742 && TREE_CODE (idx
) == INTEGER_CST
)
4744 tree low_bound
, unit_size
;
4746 /* If the resulting bit-offset is constant, track it. */
4747 if ((low_bound
= array_ref_low_bound (t
),
4748 TREE_CODE (low_bound
) == INTEGER_CST
)
4749 && (unit_size
= array_ref_element_size (t
),
4750 tree_fits_uhwi_p (unit_size
)))
4753 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
4754 TYPE_PRECISION (TREE_TYPE (idx
)));
4756 if (wi::fits_shwi_p (woffset
))
4758 offset
= woffset
.to_shwi ();
4759 /* TODO: This code seems wrong, multiply then check
4760 to see if it fits. */
4761 offset
*= tree_to_uhwi (unit_size
);
4762 offset
*= BITS_PER_UNIT
;
4764 base
= TREE_OPERAND (t
, 0);
4765 ctor
= get_base_constructor (base
, &offset
, valueize
);
4766 /* Empty constructor. Always fold to 0. */
4767 if (ctor
== error_mark_node
)
4768 return build_zero_cst (TREE_TYPE (t
));
4769 /* Out of bound array access. Value is undefined,
4773 /* We can not determine ctor. */
4776 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
4777 tree_to_uhwi (unit_size
)
4787 case TARGET_MEM_REF
:
4789 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
4790 ctor
= get_base_constructor (base
, &offset
, valueize
);
4792 /* Empty constructor. Always fold to 0. */
4793 if (ctor
== error_mark_node
)
4794 return build_zero_cst (TREE_TYPE (t
));
4795 /* We do not know precise address. */
4796 if (max_size
== -1 || max_size
!= size
)
4798 /* We can not determine ctor. */
4802 /* Out of bound array access. Value is undefined, but don't fold. */
4806 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
4812 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
4813 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
4814 return fold_build1_loc (EXPR_LOCATION (t
),
4815 TREE_CODE (t
), TREE_TYPE (t
), c
);
4827 fold_const_aggregate_ref (tree t
)
4829 return fold_const_aggregate_ref_1 (t
, NULL
);
4832 /* Lookup virtual method with index TOKEN in a virtual table V
4834 Set CAN_REFER if non-NULL to false if method
4835 is not referable or if the virtual table is ill-formed (such as rewriten
4836 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4839 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
4841 unsigned HOST_WIDE_INT offset
,
4844 tree vtable
= v
, init
, fn
;
4845 unsigned HOST_WIDE_INT size
;
4846 unsigned HOST_WIDE_INT elt_size
, access_index
;
4852 /* First of all double check we have virtual table. */
4853 if (TREE_CODE (v
) != VAR_DECL
4854 || !DECL_VIRTUAL_P (v
))
4856 gcc_assert (in_lto_p
);
4857 /* Pass down that we lost track of the target. */
4863 init
= ctor_for_folding (v
);
4865 /* The virtual tables should always be born with constructors
4866 and we always should assume that they are avaialble for
4867 folding. At the moment we do not stream them in all cases,
4868 but it should never happen that ctor seem unreachable. */
4870 if (init
== error_mark_node
)
4872 gcc_assert (in_lto_p
);
4873 /* Pass down that we lost track of the target. */
4878 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
4879 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
4880 offset
*= BITS_PER_UNIT
;
4881 offset
+= token
* size
;
4883 /* Lookup the value in the constructor that is assumed to be array.
4884 This is equivalent to
4885 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
4886 offset, size, NULL);
4887 but in a constant time. We expect that frontend produced a simple
4888 array without indexed initializers. */
4890 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
4891 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
4892 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
4893 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
4895 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
4896 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
4898 /* This code makes an assumption that there are no
4899 indexed fileds produced by C++ FE, so we can directly index the array. */
4900 if (access_index
< CONSTRUCTOR_NELTS (init
))
4902 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
4903 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
4909 /* For type inconsistent program we may end up looking up virtual method
4910 in virtual table that does not contain TOKEN entries. We may overrun
4911 the virtual table and pick up a constant or RTTI info pointer.
4912 In any case the call is undefined. */
4914 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
4915 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
4916 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4919 fn
= TREE_OPERAND (fn
, 0);
4921 /* When cgraph node is missing and function is not public, we cannot
4922 devirtualize. This can happen in WHOPR when the actual method
4923 ends up in other partition, because we found devirtualization
4924 possibility too late. */
4925 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
4936 /* Make sure we create a cgraph node for functions we'll reference.
4937 They can be non-existent if the reference comes from an entry
4938 of an external vtable for example. */
4939 cgraph_node::get_create (fn
);
4944 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
4945 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
4946 KNOWN_BINFO carries the binfo describing the true type of
4947 OBJ_TYPE_REF_OBJECT(REF).
4948 Set CAN_REFER if non-NULL to false if method
4949 is not referable or if the virtual table is ill-formed (such as rewriten
4950 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4953 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
4956 unsigned HOST_WIDE_INT offset
;
4959 v
= BINFO_VTABLE (known_binfo
);
4960 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
4964 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
4970 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
4973 /* Return true iff VAL is a gimple expression that is known to be
4974 non-negative. Restricted to floating-point inputs. */
4977 gimple_val_nonnegative_real_p (tree val
)
4981 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
4983 /* Use existing logic for non-gimple trees. */
4984 if (tree_expr_nonnegative_p (val
))
4987 if (TREE_CODE (val
) != SSA_NAME
)
4990 /* Currently we look only at the immediately defining statement
4991 to make this determination, since recursion on defining
4992 statements of operands can lead to quadratic behavior in the
4993 worst case. This is expected to catch almost all occurrences
4994 in practice. It would be possible to implement limited-depth
4995 recursion if important cases are lost. Alternatively, passes
4996 that need this information (such as the pow/powi lowering code
4997 in the cse_sincos pass) could be revised to provide it through
4998 dataflow propagation. */
5000 def_stmt
= SSA_NAME_DEF_STMT (val
);
5002 if (is_gimple_assign (def_stmt
))
5006 /* See fold-const.c:tree_expr_nonnegative_p for additional
5007 cases that could be handled with recursion. */
5009 switch (gimple_assign_rhs_code (def_stmt
))
5012 /* Always true for floating-point operands. */
5016 /* True if the two operands are identical (since we are
5017 restricted to floating-point inputs). */
5018 op0
= gimple_assign_rhs1 (def_stmt
);
5019 op1
= gimple_assign_rhs2 (def_stmt
);
5022 || operand_equal_p (op0
, op1
, 0))
5029 else if (is_gimple_call (def_stmt
))
5031 tree fndecl
= gimple_call_fndecl (def_stmt
);
5033 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5037 switch (DECL_FUNCTION_CODE (fndecl
))
5039 CASE_FLT_FN (BUILT_IN_ACOS
):
5040 CASE_FLT_FN (BUILT_IN_ACOSH
):
5041 CASE_FLT_FN (BUILT_IN_CABS
):
5042 CASE_FLT_FN (BUILT_IN_COSH
):
5043 CASE_FLT_FN (BUILT_IN_ERFC
):
5044 CASE_FLT_FN (BUILT_IN_EXP
):
5045 CASE_FLT_FN (BUILT_IN_EXP10
):
5046 CASE_FLT_FN (BUILT_IN_EXP2
):
5047 CASE_FLT_FN (BUILT_IN_FABS
):
5048 CASE_FLT_FN (BUILT_IN_FDIM
):
5049 CASE_FLT_FN (BUILT_IN_HYPOT
):
5050 CASE_FLT_FN (BUILT_IN_POW10
):
5053 CASE_FLT_FN (BUILT_IN_SQRT
):
5054 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5055 nonnegative inputs. */
5056 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
5061 CASE_FLT_FN (BUILT_IN_POWI
):
5062 /* True if the second argument is an even integer. */
5063 arg1
= gimple_call_arg (def_stmt
, 1);
5065 if (TREE_CODE (arg1
) == INTEGER_CST
5066 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5071 CASE_FLT_FN (BUILT_IN_POW
):
5072 /* True if the second argument is an even integer-valued
5074 arg1
= gimple_call_arg (def_stmt
, 1);
5076 if (TREE_CODE (arg1
) == REAL_CST
)
5081 c
= TREE_REAL_CST (arg1
);
5082 n
= real_to_integer (&c
);
5086 REAL_VALUE_TYPE cint
;
5087 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5088 if (real_identical (&c
, &cint
))
5104 /* Given a pointer value OP0, return a simplified version of an
5105 indirection through OP0, or NULL_TREE if no simplification is
5106 possible. Note that the resulting type may be different from
5107 the type pointed to in the sense that it is still compatible
5108 from the langhooks point of view. */
5111 gimple_fold_indirect_ref (tree t
)
5113 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5118 subtype
= TREE_TYPE (sub
);
5119 if (!POINTER_TYPE_P (subtype
))
5122 if (TREE_CODE (sub
) == ADDR_EXPR
)
5124 tree op
= TREE_OPERAND (sub
, 0);
5125 tree optype
= TREE_TYPE (op
);
5127 if (useless_type_conversion_p (type
, optype
))
5130 /* *(foo *)&fooarray => fooarray[0] */
5131 if (TREE_CODE (optype
) == ARRAY_TYPE
5132 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5133 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5135 tree type_domain
= TYPE_DOMAIN (optype
);
5136 tree min_val
= size_zero_node
;
5137 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5138 min_val
= TYPE_MIN_VALUE (type_domain
);
5139 if (TREE_CODE (min_val
) == INTEGER_CST
)
5140 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5142 /* *(foo *)&complexfoo => __real__ complexfoo */
5143 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5144 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5145 return fold_build1 (REALPART_EXPR
, type
, op
);
5146 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5147 else if (TREE_CODE (optype
) == VECTOR_TYPE
5148 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5150 tree part_width
= TYPE_SIZE (type
);
5151 tree index
= bitsize_int (0);
5152 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5156 /* *(p + CST) -> ... */
5157 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5158 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5160 tree addr
= TREE_OPERAND (sub
, 0);
5161 tree off
= TREE_OPERAND (sub
, 1);
5165 addrtype
= TREE_TYPE (addr
);
5167 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5168 if (TREE_CODE (addr
) == ADDR_EXPR
5169 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5170 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5171 && tree_fits_uhwi_p (off
))
5173 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5174 tree part_width
= TYPE_SIZE (type
);
5175 unsigned HOST_WIDE_INT part_widthi
5176 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5177 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5178 tree index
= bitsize_int (indexi
);
5179 if (offset
/ part_widthi
5180 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5181 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5185 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5186 if (TREE_CODE (addr
) == ADDR_EXPR
5187 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5188 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5190 tree size
= TYPE_SIZE_UNIT (type
);
5191 if (tree_int_cst_equal (size
, off
))
5192 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5195 /* *(p + CST) -> MEM_REF <p, CST>. */
5196 if (TREE_CODE (addr
) != ADDR_EXPR
5197 || DECL_P (TREE_OPERAND (addr
, 0)))
5198 return fold_build2 (MEM_REF
, type
,
5200 wide_int_to_tree (ptype
, off
));
5203 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5204 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5205 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5206 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5209 tree min_val
= size_zero_node
;
5211 sub
= gimple_fold_indirect_ref (sub
);
5213 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5214 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5215 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5216 min_val
= TYPE_MIN_VALUE (type_domain
);
5217 if (TREE_CODE (min_val
) == INTEGER_CST
)
5218 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5224 /* Return true if CODE is an operation that when operating on signed
5225 integer types involves undefined behavior on overflow and the
5226 operation can be expressed with unsigned arithmetic. */
5229 arith_code_with_undefined_signed_overflow (tree_code code
)
5237 case POINTER_PLUS_EXPR
:
5244 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5245 operation that can be transformed to unsigned arithmetic by converting
5246 its operand, carrying out the operation in the corresponding unsigned
5247 type and converting the result back to the original type.
5249 Returns a sequence of statements that replace STMT and also contain
5250 a modified form of STMT itself. */
5253 rewrite_to_defined_overflow (gimple stmt
)
5255 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5257 fprintf (dump_file
, "rewriting stmt with undefined signed "
5259 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5262 tree lhs
= gimple_assign_lhs (stmt
);
5263 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5264 gimple_seq stmts
= NULL
;
5265 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5267 gimple_seq stmts2
= NULL
;
5268 gimple_set_op (stmt
, i
,
5269 force_gimple_operand (fold_convert (type
,
5270 gimple_op (stmt
, i
)),
5271 &stmts2
, true, NULL_TREE
));
5272 gimple_seq_add_seq (&stmts
, stmts2
);
5274 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5275 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5276 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5277 gimple_seq_add_stmt (&stmts
, stmt
);
5278 gimple cvt
= gimple_build_assign_with_ops
5279 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
5280 gimple_seq_add_stmt (&stmts
, cvt
);