1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
34 #include "basic-block.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
37 #include "gimple-fold.h"
38 #include "gimple-expr.h"
42 #include "gimple-iterator.h"
43 #include "gimple-ssa.h"
44 #include "tree-ssanames.h"
45 #include "tree-into-ssa.h"
48 #include "tree-ssa-propagate.h"
50 #include "ipa-utils.h"
51 #include "gimple-pretty-print.h"
52 #include "tree-ssa-address.h"
53 #include "langhooks.h"
54 #include "gimplify-me.h"
59 /* Return true when DECL can be referenced from current unit.
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
75 3) COMDAT functions referred by external vtables that
76 we devirtualize only during final compilation stage.
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
82 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
85 struct cgraph_node
*node
;
88 if (DECL_ABSTRACT_P (decl
))
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
93 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (symtab
->function_flags_ready
)
103 snode
= symtab_node::get (decl
);
104 if (!snode
|| !snode
->definition
)
106 node
= dyn_cast
<cgraph_node
*> (snode
);
107 return !node
|| !node
->global
.inlined_to
;
110 /* We will later output the initializer, so we can refer to it.
111 So we are concerned only when DECL comes from initializer of
112 external var or var that has been optimized out. */
114 || TREE_CODE (from_decl
) != VAR_DECL
115 || (!DECL_EXTERNAL (from_decl
)
116 && (vnode
= varpool_node::get (from_decl
)) != NULL
117 && vnode
->definition
)
119 && (vnode
= varpool_node::get (from_decl
)) != NULL
120 && vnode
->in_other_partition
))
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl
)
126 && DECL_EXTERNAL (decl
)
127 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
128 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
139 As observed in PR20991 for already optimized out comdat virtual functions
140 it may be tempting to not necessarily give up because the copy will be
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
146 if (!symtab
->function_flags_ready
)
149 snode
= symtab_node::get (decl
);
151 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
152 && (!snode
->in_other_partition
153 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
155 node
= dyn_cast
<cgraph_node
*> (snode
);
156 return !node
|| !node
->global
.inlined_to
;
159 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
164 canonicalize_constructor_val (tree cval
, tree from_decl
)
166 tree orig_cval
= cval
;
168 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
171 tree ptr
= TREE_OPERAND (cval
, 0);
172 if (is_gimple_min_invariant (ptr
))
173 cval
= build1_loc (EXPR_LOCATION (cval
),
174 ADDR_EXPR
, TREE_TYPE (ptr
),
175 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
177 fold_convert (ptr_type_node
,
178 TREE_OPERAND (cval
, 1))));
180 if (TREE_CODE (cval
) == ADDR_EXPR
)
182 tree base
= NULL_TREE
;
183 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
185 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
187 TREE_OPERAND (cval
, 0) = base
;
190 base
= get_base_address (TREE_OPERAND (cval
, 0));
194 if ((TREE_CODE (base
) == VAR_DECL
195 || TREE_CODE (base
) == FUNCTION_DECL
)
196 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
198 if (TREE_CODE (base
) == VAR_DECL
)
199 TREE_ADDRESSABLE (base
) = 1;
200 else if (TREE_CODE (base
) == FUNCTION_DECL
)
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
205 cgraph_node::get_create (base
);
207 /* Fixup types in global initializers. */
208 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
209 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
212 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
215 if (TREE_OVERFLOW_P (cval
))
216 return drop_tree_overflow (cval
);
220 /* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
224 get_symbol_constant_value (tree sym
)
226 tree val
= ctor_for_folding (sym
);
227 if (val
!= error_mark_node
)
231 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
232 if (val
&& is_gimple_min_invariant (val
))
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
241 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
242 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
243 return build_zero_cst (TREE_TYPE (sym
));
251 /* Subroutine of fold_stmt. We perform several simplifications of the
252 memory reference tree EXPR and make sure to re-gimplify them properly
253 after propagation of constant addresses. IS_LHS is true if the
254 reference is supposed to be an lvalue. */
257 maybe_fold_reference (tree expr
, bool is_lhs
)
261 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
262 || TREE_CODE (expr
) == REALPART_EXPR
263 || TREE_CODE (expr
) == IMAGPART_EXPR
)
264 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
265 return fold_unary_loc (EXPR_LOCATION (expr
),
268 TREE_OPERAND (expr
, 0));
269 else if (TREE_CODE (expr
) == BIT_FIELD_REF
270 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
271 return fold_ternary_loc (EXPR_LOCATION (expr
),
274 TREE_OPERAND (expr
, 0),
275 TREE_OPERAND (expr
, 1),
276 TREE_OPERAND (expr
, 2));
279 && (result
= fold_const_aggregate_ref (expr
))
280 && is_gimple_min_invariant (result
))
287 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
288 replacement rhs for the statement or NULL_TREE if no simplification
289 could be made. It is assumed that the operands have been previously
293 fold_gimple_assign (gimple_stmt_iterator
*si
)
295 gimple stmt
= gsi_stmt (*si
);
296 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
297 location_t loc
= gimple_location (stmt
);
299 tree result
= NULL_TREE
;
301 switch (get_gimple_rhs_class (subcode
))
303 case GIMPLE_SINGLE_RHS
:
305 tree rhs
= gimple_assign_rhs1 (stmt
);
307 if (REFERENCE_CLASS_P (rhs
))
308 return maybe_fold_reference (rhs
, false);
310 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
312 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
313 if (is_gimple_min_invariant (val
))
315 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
318 vec
<cgraph_node
*>targets
319 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
320 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
322 if (dump_enabled_p ())
324 location_t loc
= gimple_location_safe (stmt
);
325 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
326 "resolving virtual function address "
327 "reference to function %s\n",
328 targets
.length () == 1
329 ? targets
[0]->name ()
332 if (targets
.length () == 1)
334 val
= fold_convert (TREE_TYPE (val
),
335 build_fold_addr_expr_loc
336 (loc
, targets
[0]->decl
));
337 STRIP_USELESS_TYPE_CONVERSION (val
);
340 /* We can not use __builtin_unreachable here because it
341 can not have address taken. */
342 val
= build_int_cst (TREE_TYPE (val
), 0);
348 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
350 tree ref
= TREE_OPERAND (rhs
, 0);
351 tree tem
= maybe_fold_reference (ref
, true);
353 && TREE_CODE (tem
) == MEM_REF
354 && integer_zerop (TREE_OPERAND (tem
, 1)))
355 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
357 result
= fold_convert (TREE_TYPE (rhs
),
358 build_fold_addr_expr_loc (loc
, tem
));
359 else if (TREE_CODE (ref
) == MEM_REF
360 && integer_zerop (TREE_OPERAND (ref
, 1)))
361 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
364 else if (TREE_CODE (rhs
) == CONSTRUCTOR
365 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
366 && (CONSTRUCTOR_NELTS (rhs
)
367 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
369 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
373 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
374 if (TREE_CODE (val
) != INTEGER_CST
375 && TREE_CODE (val
) != REAL_CST
376 && TREE_CODE (val
) != FIXED_CST
)
379 return build_vector_from_ctor (TREE_TYPE (rhs
),
380 CONSTRUCTOR_ELTS (rhs
));
383 else if (DECL_P (rhs
))
384 return get_symbol_constant_value (rhs
);
386 /* If we couldn't fold the RHS, hand over to the generic
388 if (result
== NULL_TREE
)
391 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
392 that may have been added by fold, and "useless" type
393 conversions that might now be apparent due to propagation. */
394 STRIP_USELESS_TYPE_CONVERSION (result
);
396 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
403 case GIMPLE_UNARY_RHS
:
405 tree rhs
= gimple_assign_rhs1 (stmt
);
407 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
410 /* If the operation was a conversion do _not_ mark a
411 resulting constant with TREE_OVERFLOW if the original
412 constant was not. These conversions have implementation
413 defined behavior and retaining the TREE_OVERFLOW flag
414 here would confuse later passes such as VRP. */
415 if (CONVERT_EXPR_CODE_P (subcode
)
416 && TREE_CODE (result
) == INTEGER_CST
417 && TREE_CODE (rhs
) == INTEGER_CST
)
418 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
420 STRIP_USELESS_TYPE_CONVERSION (result
);
421 if (valid_gimple_rhs_p (result
))
427 case GIMPLE_BINARY_RHS
:
428 /* Try to canonicalize for boolean-typed X the comparisons
429 X == 0, X == 1, X != 0, and X != 1. */
430 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
431 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
433 tree lhs
= gimple_assign_lhs (stmt
);
434 tree op1
= gimple_assign_rhs1 (stmt
);
435 tree op2
= gimple_assign_rhs2 (stmt
);
436 tree type
= TREE_TYPE (op1
);
438 /* Check whether the comparison operands are of the same boolean
439 type as the result type is.
440 Check that second operand is an integer-constant with value
442 if (TREE_CODE (op2
) == INTEGER_CST
443 && (integer_zerop (op2
) || integer_onep (op2
))
444 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
446 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
447 bool is_logical_not
= false;
449 /* X == 0 and X != 1 is a logical-not.of X
450 X == 1 and X != 0 is X */
451 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
452 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
453 is_logical_not
= true;
455 if (is_logical_not
== false)
457 /* Only for one-bit precision typed X the transformation
458 !X -> ~X is valied. */
459 else if (TYPE_PRECISION (type
) == 1)
460 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
462 /* Otherwise we use !X -> X ^ 1. */
464 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
465 type
, op1
, build_int_cst (type
, 1));
471 result
= fold_binary_loc (loc
, subcode
,
472 TREE_TYPE (gimple_assign_lhs (stmt
)),
473 gimple_assign_rhs1 (stmt
),
474 gimple_assign_rhs2 (stmt
));
478 STRIP_USELESS_TYPE_CONVERSION (result
);
479 if (valid_gimple_rhs_p (result
))
484 case GIMPLE_TERNARY_RHS
:
485 /* Try to fold a conditional expression. */
486 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
488 tree op0
= gimple_assign_rhs1 (stmt
);
491 location_t cond_loc
= gimple_location (stmt
);
493 if (COMPARISON_CLASS_P (op0
))
495 fold_defer_overflow_warnings ();
496 tem
= fold_binary_loc (cond_loc
,
497 TREE_CODE (op0
), TREE_TYPE (op0
),
498 TREE_OPERAND (op0
, 0),
499 TREE_OPERAND (op0
, 1));
500 /* This is actually a conditional expression, not a GIMPLE
501 conditional statement, however, the valid_gimple_rhs_p
502 test still applies. */
503 set
= (tem
&& is_gimple_condexpr (tem
)
504 && valid_gimple_rhs_p (tem
));
505 fold_undefer_overflow_warnings (set
, stmt
, 0);
507 else if (is_gimple_min_invariant (op0
))
516 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
517 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
518 gimple_assign_rhs2 (stmt
),
519 gimple_assign_rhs3 (stmt
));
523 result
= fold_ternary_loc (loc
, subcode
,
524 TREE_TYPE (gimple_assign_lhs (stmt
)),
525 gimple_assign_rhs1 (stmt
),
526 gimple_assign_rhs2 (stmt
),
527 gimple_assign_rhs3 (stmt
));
531 STRIP_USELESS_TYPE_CONVERSION (result
);
532 if (valid_gimple_rhs_p (result
))
537 case GIMPLE_INVALID_RHS
:
544 /* Attempt to fold a conditional statement. Return true if any changes were
545 made. We only attempt to fold the condition expression, and do not perform
546 any transformation that would require alteration of the cfg. It is
547 assumed that the operands have been previously folded. */
550 fold_gimple_cond (gimple stmt
)
552 tree result
= fold_binary_loc (gimple_location (stmt
),
553 gimple_cond_code (stmt
),
555 gimple_cond_lhs (stmt
),
556 gimple_cond_rhs (stmt
));
560 STRIP_USELESS_TYPE_CONVERSION (result
);
561 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
563 gimple_cond_set_condition_from_tree (stmt
, result
);
572 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
573 adjusting the replacement stmts location and virtual operands.
574 If the statement has a lhs the last stmt in the sequence is expected
575 to assign to that lhs. */
578 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
580 gimple stmt
= gsi_stmt (*si_p
);
582 if (gimple_has_location (stmt
))
583 annotate_all_with_location (stmts
, gimple_location (stmt
));
585 /* First iterate over the replacement statements backward, assigning
586 virtual operands to their defining statements. */
587 gimple laststore
= NULL
;
588 for (gimple_stmt_iterator i
= gsi_last (stmts
);
589 !gsi_end_p (i
); gsi_prev (&i
))
591 gimple new_stmt
= gsi_stmt (i
);
592 if ((gimple_assign_single_p (new_stmt
)
593 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
594 || (is_gimple_call (new_stmt
)
595 && (gimple_call_flags (new_stmt
)
596 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
600 vdef
= gimple_vdef (stmt
);
602 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
603 gimple_set_vdef (new_stmt
, vdef
);
604 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
605 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
606 laststore
= new_stmt
;
610 /* Second iterate over the statements forward, assigning virtual
611 operands to their uses. */
612 tree reaching_vuse
= gimple_vuse (stmt
);
613 for (gimple_stmt_iterator i
= gsi_start (stmts
);
614 !gsi_end_p (i
); gsi_next (&i
))
616 gimple new_stmt
= gsi_stmt (i
);
617 /* If the new statement possibly has a VUSE, update it with exact SSA
618 name we know will reach this one. */
619 if (gimple_has_mem_ops (new_stmt
))
620 gimple_set_vuse (new_stmt
, reaching_vuse
);
621 gimple_set_modified (new_stmt
, true);
622 if (gimple_vdef (new_stmt
))
623 reaching_vuse
= gimple_vdef (new_stmt
);
626 /* If the new sequence does not do a store release the virtual
627 definition of the original statement. */
629 && reaching_vuse
== gimple_vuse (stmt
))
631 tree vdef
= gimple_vdef (stmt
);
633 && TREE_CODE (vdef
) == SSA_NAME
)
635 unlink_stmt_vdef (stmt
);
636 release_ssa_name (vdef
);
640 /* Finally replace the original statement with the sequence. */
641 gsi_replace_with_seq (si_p
, stmts
, false);
644 /* Convert EXPR into a GIMPLE value suitable for substitution on the
645 RHS of an assignment. Insert the necessary statements before
646 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
647 is replaced. If the call is expected to produces a result, then it
648 is replaced by an assignment of the new RHS to the result variable.
649 If the result is to be ignored, then the call is replaced by a
650 GIMPLE_NOP. A proper VDEF chain is retained by making the first
651 VUSE and the last VDEF of the whole sequence be the same as the replaced
652 statement and using new SSA names for stores in between. */
655 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
658 gimple stmt
, new_stmt
;
659 gimple_stmt_iterator i
;
660 gimple_seq stmts
= NULL
;
662 stmt
= gsi_stmt (*si_p
);
664 gcc_assert (is_gimple_call (stmt
));
666 push_gimplify_context (gimple_in_ssa_p (cfun
));
668 lhs
= gimple_call_lhs (stmt
);
669 if (lhs
== NULL_TREE
)
671 gimplify_and_add (expr
, &stmts
);
672 /* We can end up with folding a memcpy of an empty class assignment
673 which gets optimized away by C++ gimplification. */
674 if (gimple_seq_empty_p (stmts
))
676 pop_gimplify_context (NULL
);
677 if (gimple_in_ssa_p (cfun
))
679 unlink_stmt_vdef (stmt
);
682 gsi_replace (si_p
, gimple_build_nop (), true);
688 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
689 new_stmt
= gimple_build_assign (lhs
, tmp
);
690 i
= gsi_last (stmts
);
691 gsi_insert_after_without_update (&i
, new_stmt
,
692 GSI_CONTINUE_LINKING
);
695 pop_gimplify_context (NULL
);
697 gsi_replace_with_seq_vops (si_p
, stmts
);
701 /* Replace the call at *GSI with the gimple value VAL. */
704 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
706 gimple stmt
= gsi_stmt (*gsi
);
707 tree lhs
= gimple_call_lhs (stmt
);
711 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
712 val
= fold_convert (TREE_TYPE (lhs
), val
);
713 repl
= gimple_build_assign (lhs
, val
);
716 repl
= gimple_build_nop ();
717 tree vdef
= gimple_vdef (stmt
);
718 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
720 unlink_stmt_vdef (stmt
);
721 release_ssa_name (vdef
);
723 gsi_replace (gsi
, repl
, true);
726 /* Replace the call at *GSI with the new call REPL and fold that
730 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
732 gimple stmt
= gsi_stmt (*gsi
);
733 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
734 gimple_set_location (repl
, gimple_location (stmt
));
735 if (gimple_vdef (stmt
)
736 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
738 gimple_set_vdef (repl
, gimple_vdef (stmt
));
739 gimple_set_vuse (repl
, gimple_vuse (stmt
));
740 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
742 gsi_replace (gsi
, repl
, true);
746 /* Return true if VAR is a VAR_DECL or a component thereof. */
749 var_decl_component_p (tree var
)
752 while (handled_component_p (inner
))
753 inner
= TREE_OPERAND (inner
, 0);
754 return SSA_VAR_P (inner
);
757 /* Fold function call to builtin mem{{,p}cpy,move}. Return
758 NULL_TREE if no simplification can be made.
759 If ENDP is 0, return DEST (like memcpy).
760 If ENDP is 1, return DEST+LEN (like mempcpy).
761 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
762 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
766 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
767 tree dest
, tree src
, int endp
)
769 gimple stmt
= gsi_stmt (*gsi
);
770 tree lhs
= gimple_call_lhs (stmt
);
771 tree len
= gimple_call_arg (stmt
, 2);
772 tree destvar
, srcvar
;
773 location_t loc
= gimple_location (stmt
);
775 /* If the LEN parameter is zero, return DEST. */
776 if (integer_zerop (len
))
779 if (gimple_call_lhs (stmt
))
780 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
782 repl
= gimple_build_nop ();
783 tree vdef
= gimple_vdef (stmt
);
784 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
786 unlink_stmt_vdef (stmt
);
787 release_ssa_name (vdef
);
789 gsi_replace (gsi
, repl
, true);
793 /* If SRC and DEST are the same (and not volatile), return
794 DEST{,+LEN,+LEN-1}. */
795 if (operand_equal_p (src
, dest
, 0))
797 unlink_stmt_vdef (stmt
);
798 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
799 release_ssa_name (gimple_vdef (stmt
));
802 gsi_replace (gsi
, gimple_build_nop (), true);
809 tree srctype
, desttype
;
810 unsigned int src_align
, dest_align
;
813 /* Build accesses at offset zero with a ref-all character type. */
814 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
817 /* If we can perform the copy efficiently with first doing all loads
818 and then all stores inline it that way. Currently efficiently
819 means that we can load all the memory into a single integer
820 register which is what MOVE_MAX gives us. */
821 src_align
= get_pointer_alignment (src
);
822 dest_align
= get_pointer_alignment (dest
);
823 if (tree_fits_uhwi_p (len
)
824 && compare_tree_int (len
, MOVE_MAX
) <= 0
825 /* ??? Don't transform copies from strings with known length this
826 confuses the tree-ssa-strlen.c. This doesn't handle
827 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
829 && !c_strlen (src
, 2))
831 unsigned ilen
= tree_to_uhwi (len
);
832 if (exact_log2 (ilen
) != -1)
834 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
836 && TYPE_MODE (type
) != BLKmode
837 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
839 /* If the destination pointer is not aligned we must be able
840 to emit an unaligned store. */
841 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
842 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
845 tree desttype
= type
;
846 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
847 srctype
= build_aligned_type (type
, src_align
);
848 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
849 tree tem
= fold_const_aggregate_ref (srcmem
);
852 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
853 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
859 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
861 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
862 if (gimple_in_ssa_p (cfun
))
863 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
866 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
),
868 gimple_assign_set_lhs (new_stmt
, srcmem
);
869 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
870 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
872 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
873 desttype
= build_aligned_type (type
, dest_align
);
875 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
878 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
879 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
880 if (gimple_vdef (new_stmt
)
881 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
882 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
885 gsi_replace (gsi
, new_stmt
, true);
888 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
897 /* Both DEST and SRC must be pointer types.
898 ??? This is what old code did. Is the testing for pointer types
901 If either SRC is readonly or length is 1, we can use memcpy. */
902 if (!dest_align
|| !src_align
)
904 if (readonly_data_expr (src
)
905 || (tree_fits_uhwi_p (len
)
906 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
907 >= tree_to_uhwi (len
))))
909 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
912 gimple_call_set_fndecl (stmt
, fn
);
913 gimple_call_set_arg (stmt
, 0, dest
);
914 gimple_call_set_arg (stmt
, 1, src
);
919 /* If *src and *dest can't overlap, optimize into memcpy as well. */
920 if (TREE_CODE (src
) == ADDR_EXPR
921 && TREE_CODE (dest
) == ADDR_EXPR
)
923 tree src_base
, dest_base
, fn
;
924 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
925 HOST_WIDE_INT size
= -1;
926 HOST_WIDE_INT maxsize
= -1;
928 srcvar
= TREE_OPERAND (src
, 0);
929 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
931 destvar
= TREE_OPERAND (dest
, 0);
932 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
934 if (tree_fits_uhwi_p (len
))
935 maxsize
= tree_to_uhwi (len
);
938 src_offset
/= BITS_PER_UNIT
;
939 dest_offset
/= BITS_PER_UNIT
;
940 if (SSA_VAR_P (src_base
)
941 && SSA_VAR_P (dest_base
))
943 if (operand_equal_p (src_base
, dest_base
, 0)
944 && ranges_overlap_p (src_offset
, maxsize
,
945 dest_offset
, maxsize
))
948 else if (TREE_CODE (src_base
) == MEM_REF
949 && TREE_CODE (dest_base
) == MEM_REF
)
951 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
952 TREE_OPERAND (dest_base
, 0), 0))
954 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
955 if (!wi::fits_shwi_p (off
))
957 src_offset
= off
.to_shwi ();
959 off
= mem_ref_offset (dest_base
) + dest_offset
;
960 if (!wi::fits_shwi_p (off
))
962 dest_offset
= off
.to_shwi ();
963 if (ranges_overlap_p (src_offset
, maxsize
,
964 dest_offset
, maxsize
))
970 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
973 gimple_call_set_fndecl (stmt
, fn
);
974 gimple_call_set_arg (stmt
, 0, dest
);
975 gimple_call_set_arg (stmt
, 1, src
);
980 /* If the destination and source do not alias optimize into
982 if ((is_gimple_min_invariant (dest
)
983 || TREE_CODE (dest
) == SSA_NAME
)
984 && (is_gimple_min_invariant (src
)
985 || TREE_CODE (src
) == SSA_NAME
))
988 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
989 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
990 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
993 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
996 gimple_call_set_fndecl (stmt
, fn
);
997 gimple_call_set_arg (stmt
, 0, dest
);
998 gimple_call_set_arg (stmt
, 1, src
);
1007 if (!tree_fits_shwi_p (len
))
1010 This logic lose for arguments like (type *)malloc (sizeof (type)),
1011 since we strip the casts of up to VOID return value from malloc.
1012 Perhaps we ought to inherit type from non-VOID argument here? */
1015 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1016 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1018 /* In the following try to find a type that is most natural to be
1019 used for the memcpy source and destination and that allows
1020 the most optimization when memcpy is turned into a plain assignment
1021 using that type. In theory we could always use a char[len] type
1022 but that only gains us that the destination and source possibly
1023 no longer will have their address taken. */
1024 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1025 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1027 tree tem
= TREE_OPERAND (src
, 0);
1029 if (tem
!= TREE_OPERAND (src
, 0))
1030 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1032 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1034 tree tem
= TREE_OPERAND (dest
, 0);
1036 if (tem
!= TREE_OPERAND (dest
, 0))
1037 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1039 srctype
= TREE_TYPE (TREE_TYPE (src
));
1040 if (TREE_CODE (srctype
) == ARRAY_TYPE
1041 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1043 srctype
= TREE_TYPE (srctype
);
1045 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1047 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1048 if (TREE_CODE (desttype
) == ARRAY_TYPE
1049 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1051 desttype
= TREE_TYPE (desttype
);
1053 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1055 if (TREE_ADDRESSABLE (srctype
)
1056 || TREE_ADDRESSABLE (desttype
))
1059 /* Make sure we are not copying using a floating-point mode or
1060 a type whose size possibly does not match its precision. */
1061 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1062 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1063 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1064 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1065 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1066 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1067 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1068 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1076 src_align
= get_pointer_alignment (src
);
1077 dest_align
= get_pointer_alignment (dest
);
1078 if (dest_align
< TYPE_ALIGN (desttype
)
1079 || src_align
< TYPE_ALIGN (srctype
))
1083 STRIP_NOPS (destvar
);
1084 if (TREE_CODE (destvar
) == ADDR_EXPR
1085 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1086 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1087 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1089 destvar
= NULL_TREE
;
1092 STRIP_NOPS (srcvar
);
1093 if (TREE_CODE (srcvar
) == ADDR_EXPR
1094 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1095 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1098 || src_align
>= TYPE_ALIGN (desttype
))
1099 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1101 else if (!STRICT_ALIGNMENT
)
1103 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1105 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1113 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1116 if (srcvar
== NULL_TREE
)
1119 if (src_align
>= TYPE_ALIGN (desttype
))
1120 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1123 if (STRICT_ALIGNMENT
)
1125 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1127 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1130 else if (destvar
== NULL_TREE
)
1133 if (dest_align
>= TYPE_ALIGN (srctype
))
1134 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1137 if (STRICT_ALIGNMENT
)
1139 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1141 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1146 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1148 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1149 if (gimple_in_ssa_p (cfun
))
1150 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1152 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
), NULL
);
1153 gimple_assign_set_lhs (new_stmt
, srcvar
);
1154 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1155 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1157 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1158 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1159 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1160 if (gimple_vdef (new_stmt
)
1161 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1162 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1165 gsi_replace (gsi
, new_stmt
, true);
1168 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1172 if (endp
== 0 || endp
== 3)
1175 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1177 if (endp
== 2 || endp
== 1)
1178 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1180 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1182 gimple repl
= gimple_build_assign (lhs
, dest
);
1183 gsi_replace (gsi
, repl
, true);
1187 /* Fold function call to builtin memset or bzero at *GSI setting the
1188 memory of size LEN to VAL. Return whether a simplification was made. */
1191 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1193 gimple stmt
= gsi_stmt (*gsi
);
1195 unsigned HOST_WIDE_INT length
, cval
;
1197 /* If the LEN parameter is zero, return DEST. */
1198 if (integer_zerop (len
))
1200 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1204 if (! tree_fits_uhwi_p (len
))
1207 if (TREE_CODE (c
) != INTEGER_CST
)
1210 tree dest
= gimple_call_arg (stmt
, 0);
1212 if (TREE_CODE (var
) != ADDR_EXPR
)
1215 var
= TREE_OPERAND (var
, 0);
1216 if (TREE_THIS_VOLATILE (var
))
1219 etype
= TREE_TYPE (var
);
1220 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1221 etype
= TREE_TYPE (etype
);
1223 if (!INTEGRAL_TYPE_P (etype
)
1224 && !POINTER_TYPE_P (etype
))
1227 if (! var_decl_component_p (var
))
1230 length
= tree_to_uhwi (len
);
1231 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1232 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1235 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1238 if (integer_zerop (c
))
1242 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1245 cval
= TREE_INT_CST_LOW (c
);
1249 cval
|= (cval
<< 31) << 1;
1252 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1253 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1254 gimple_set_vuse (store
, gimple_vuse (stmt
));
1255 tree vdef
= gimple_vdef (stmt
);
1256 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1258 gimple_set_vdef (store
, gimple_vdef (stmt
));
1259 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1261 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1262 if (gimple_call_lhs (stmt
))
1264 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1265 gsi_replace (gsi
, asgn
, true);
1269 gimple_stmt_iterator gsi2
= *gsi
;
1271 gsi_remove (&gsi2
, true);
1278 /* Return the string length, maximum string length or maximum value of
1280 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1281 is not NULL and, for TYPE == 0, its value is not equal to the length
1282 we determine or if we are unable to determine the length or value,
1283 return false. VISITED is a bitmap of visited variables.
1284 TYPE is 0 if string length should be returned, 1 for maximum string
1285 length and 2 for maximum value ARG can have. */
1288 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1293 if (TREE_CODE (arg
) != SSA_NAME
)
1295 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1296 if (TREE_CODE (arg
) == ADDR_EXPR
1297 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1298 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1300 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1301 if (TREE_CODE (aop0
) == INDIRECT_REF
1302 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1303 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1304 length
, visited
, type
);
1310 if (TREE_CODE (val
) != INTEGER_CST
1311 || tree_int_cst_sgn (val
) < 0)
1315 val
= c_strlen (arg
, 1);
1323 if (TREE_CODE (*length
) != INTEGER_CST
1324 || TREE_CODE (val
) != INTEGER_CST
)
1327 if (tree_int_cst_lt (*length
, val
))
1331 else if (simple_cst_equal (val
, *length
) != 1)
1339 /* If ARG is registered for SSA update we cannot look at its defining
1341 if (name_registered_for_update_p (arg
))
1344 /* If we were already here, break the infinite cycle. */
1346 *visited
= BITMAP_ALLOC (NULL
);
1347 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1351 def_stmt
= SSA_NAME_DEF_STMT (var
);
1353 switch (gimple_code (def_stmt
))
1356 /* The RHS of the statement defining VAR must either have a
1357 constant length or come from another SSA_NAME with a constant
1359 if (gimple_assign_single_p (def_stmt
)
1360 || gimple_assign_unary_nop_p (def_stmt
))
1362 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1363 return get_maxval_strlen (rhs
, length
, visited
, type
);
1365 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1367 tree op2
= gimple_assign_rhs2 (def_stmt
);
1368 tree op3
= gimple_assign_rhs3 (def_stmt
);
1369 return get_maxval_strlen (op2
, length
, visited
, type
)
1370 && get_maxval_strlen (op3
, length
, visited
, type
);
1376 /* All the arguments of the PHI node must have the same constant
1380 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1382 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1384 /* If this PHI has itself as an argument, we cannot
1385 determine the string length of this argument. However,
1386 if we can find a constant string length for the other
1387 PHI args then we can still be sure that this is a
1388 constant string length. So be optimistic and just
1389 continue with the next argument. */
1390 if (arg
== gimple_phi_result (def_stmt
))
1393 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1405 get_maxval_strlen (tree arg
, int type
)
1407 bitmap visited
= NULL
;
1408 tree len
= NULL_TREE
;
1409 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1412 BITMAP_FREE (visited
);
1418 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1419 If LEN is not NULL, it represents the length of the string to be
1420 copied. Return NULL_TREE if no simplification can be made. */
1423 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1424 tree dest
, tree src
)
1426 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1429 /* If SRC and DEST are the same (and not volatile), return DEST. */
1430 if (operand_equal_p (src
, dest
, 0))
1432 replace_call_with_value (gsi
, dest
);
1436 if (optimize_function_for_size_p (cfun
))
1439 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1443 tree len
= get_maxval_strlen (src
, 0);
1447 len
= fold_convert_loc (loc
, size_type_node
, len
);
1448 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1449 len
= force_gimple_operand_gsi (gsi
, len
, true,
1450 NULL_TREE
, true, GSI_SAME_STMT
);
1451 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1452 replace_call_with_call_and_fold (gsi
, repl
);
1456 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1457 If SLEN is not NULL, it represents the length of the source string.
1458 Return NULL_TREE if no simplification can be made. */
1461 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1462 tree dest
, tree src
, tree len
)
1464 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1467 /* If the LEN parameter is zero, return DEST. */
1468 if (integer_zerop (len
))
1470 replace_call_with_value (gsi
, dest
);
1474 /* We can't compare slen with len as constants below if len is not a
1476 if (TREE_CODE (len
) != INTEGER_CST
)
1479 /* Now, we must be passed a constant src ptr parameter. */
1480 tree slen
= get_maxval_strlen (src
, 0);
1481 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1484 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1486 /* We do not support simplification of this case, though we do
1487 support it when expanding trees into RTL. */
1488 /* FIXME: generate a call to __builtin_memset. */
1489 if (tree_int_cst_lt (slen
, len
))
1492 /* OK transform into builtin memcpy. */
1493 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1497 len
= fold_convert_loc (loc
, size_type_node
, len
);
1498 len
= force_gimple_operand_gsi (gsi
, len
, true,
1499 NULL_TREE
, true, GSI_SAME_STMT
);
1500 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1501 replace_call_with_call_and_fold (gsi
, repl
);
1505 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1508 Return NULL_TREE if no simplification was possible, otherwise return the
1509 simplified form of the call as a tree.
1511 The simplified form may be a constant or other expression which
1512 computes the same value, but in a more efficient manner (including
1513 calls to other builtin functions).
1515 The call may contain arguments which need to be evaluated, but
1516 which are not useful to determine the result of the call. In
1517 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1518 COMPOUND_EXPR will be an argument which must be evaluated.
1519 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1520 COMPOUND_EXPR in the chain will contain the tree for the simplified
1521 form of the builtin function call. */
1524 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1526 gimple stmt
= gsi_stmt (*gsi
);
1527 location_t loc
= gimple_location (stmt
);
1529 const char *p
= c_getstr (src
);
1531 /* If the string length is zero, return the dst parameter. */
1532 if (p
&& *p
== '\0')
1534 replace_call_with_value (gsi
, dst
);
1538 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1541 /* See if we can store by pieces into (dst + strlen(dst)). */
1543 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1544 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1546 if (!strlen_fn
|| !memcpy_fn
)
1549 /* If the length of the source string isn't computable don't
1550 split strcat into strlen and memcpy. */
1551 tree len
= get_maxval_strlen (src
, 0);
1555 /* Create strlen (dst). */
1556 gimple_seq stmts
= NULL
, stmts2
;
1557 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1558 gimple_set_location (repl
, loc
);
1559 if (gimple_in_ssa_p (cfun
))
1560 newdst
= make_ssa_name (size_type_node
, NULL
);
1562 newdst
= create_tmp_reg (size_type_node
, NULL
);
1563 gimple_call_set_lhs (repl
, newdst
);
1564 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1566 /* Create (dst p+ strlen (dst)). */
1567 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1568 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1569 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1571 len
= fold_convert_loc (loc
, size_type_node
, len
);
1572 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1573 build_int_cst (size_type_node
, 1));
1574 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1575 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1577 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1578 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1579 if (gimple_call_lhs (stmt
))
1581 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1582 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1583 gsi_replace_with_seq_vops (gsi
, stmts
);
1584 /* gsi now points at the assignment to the lhs, get a
1585 stmt iterator to the memcpy call.
1586 ??? We can't use gsi_for_stmt as that doesn't work when the
1587 CFG isn't built yet. */
1588 gimple_stmt_iterator gsi2
= *gsi
;
1594 gsi_replace_with_seq_vops (gsi
, stmts
);
1600 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1601 are the arguments to the call. */
1604 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1606 gimple stmt
= gsi_stmt (*gsi
);
1607 tree dest
= gimple_call_arg (stmt
, 0);
1608 tree src
= gimple_call_arg (stmt
, 1);
1609 tree size
= gimple_call_arg (stmt
, 2);
1615 /* If the SRC parameter is "", return DEST. */
1616 if (p
&& *p
== '\0')
1618 replace_call_with_value (gsi
, dest
);
1622 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1625 /* If __builtin_strcat_chk is used, assume strcat is available. */
1626 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1630 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1631 replace_call_with_call_and_fold (gsi
, repl
);
1635 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1636 to the call. IGNORE is true if the value returned
1637 by the builtin will be ignored. UNLOCKED is true is true if this
1638 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1639 the known length of the string. Return NULL_TREE if no simplification
1643 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1644 tree arg0
, tree arg1
,
1647 gimple stmt
= gsi_stmt (*gsi
);
1649 /* If we're using an unlocked function, assume the other unlocked
1650 functions exist explicitly. */
1651 tree
const fn_fputc
= (unlocked
1652 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1653 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1654 tree
const fn_fwrite
= (unlocked
1655 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1656 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1658 /* If the return value is used, don't do the transformation. */
1659 if (gimple_call_lhs (stmt
))
1662 /* Get the length of the string passed to fputs. If the length
1663 can't be determined, punt. */
1664 tree len
= get_maxval_strlen (arg0
, 0);
1666 || TREE_CODE (len
) != INTEGER_CST
)
1669 switch (compare_tree_int (len
, 1))
1671 case -1: /* length is 0, delete the call entirely . */
1672 replace_call_with_value (gsi
, integer_zero_node
);
1675 case 0: /* length is 1, call fputc. */
1677 const char *p
= c_getstr (arg0
);
1683 gimple repl
= gimple_build_call (fn_fputc
, 2,
1685 (integer_type_node
, p
[0]), arg1
);
1686 replace_call_with_call_and_fold (gsi
, repl
);
1691 case 1: /* length is greater than 1, call fwrite. */
1693 /* If optimizing for size keep fputs. */
1694 if (optimize_function_for_size_p (cfun
))
1696 /* New argument list transforming fputs(string, stream) to
1697 fwrite(string, 1, len, stream). */
1701 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1702 size_one_node
, len
, arg1
);
1703 replace_call_with_call_and_fold (gsi
, repl
);
1712 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1713 DEST, SRC, LEN, and SIZE are the arguments to the call.
1714 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1715 code of the builtin. If MAXLEN is not NULL, it is maximum length
1716 passed as third argument. */
1719 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1720 tree dest
, tree src
, tree len
, tree size
,
1721 enum built_in_function fcode
)
1723 gimple stmt
= gsi_stmt (*gsi
);
1724 location_t loc
= gimple_location (stmt
);
1725 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1728 /* If SRC and DEST are the same (and not volatile), return DEST
1729 (resp. DEST+LEN for __mempcpy_chk). */
1730 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1732 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1734 replace_call_with_value (gsi
, dest
);
1739 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1740 temp
= force_gimple_operand_gsi (gsi
, temp
,
1741 false, NULL_TREE
, true,
1743 replace_call_with_value (gsi
, temp
);
1748 if (! tree_fits_uhwi_p (size
))
1751 tree maxlen
= get_maxval_strlen (len
, 2);
1752 if (! integer_all_onesp (size
))
1754 if (! tree_fits_uhwi_p (len
))
1756 /* If LEN is not constant, try MAXLEN too.
1757 For MAXLEN only allow optimizing into non-_ocs function
1758 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1759 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1761 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1763 /* (void) __mempcpy_chk () can be optimized into
1764 (void) __memcpy_chk (). */
1765 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1769 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1770 replace_call_with_call_and_fold (gsi
, repl
);
1779 if (tree_int_cst_lt (size
, maxlen
))
1784 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1785 mem{cpy,pcpy,move,set} is available. */
1788 case BUILT_IN_MEMCPY_CHK
:
1789 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1791 case BUILT_IN_MEMPCPY_CHK
:
1792 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1794 case BUILT_IN_MEMMOVE_CHK
:
1795 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1797 case BUILT_IN_MEMSET_CHK
:
1798 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1807 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1808 replace_call_with_call_and_fold (gsi
, repl
);
1812 /* Fold a call to the __st[rp]cpy_chk builtin.
1813 DEST, SRC, and SIZE are the arguments to the call.
1814 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1815 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1816 strings passed as second argument. */
1819 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1821 tree src
, tree size
,
1822 enum built_in_function fcode
)
1824 gimple stmt
= gsi_stmt (*gsi
);
1825 location_t loc
= gimple_location (stmt
);
1826 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1829 /* If SRC and DEST are the same (and not volatile), return DEST. */
1830 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1832 replace_call_with_value (gsi
, dest
);
1836 if (! tree_fits_uhwi_p (size
))
1839 tree maxlen
= get_maxval_strlen (src
, 1);
1840 if (! integer_all_onesp (size
))
1842 len
= c_strlen (src
, 1);
1843 if (! len
|| ! tree_fits_uhwi_p (len
))
1845 /* If LEN is not constant, try MAXLEN too.
1846 For MAXLEN only allow optimizing into non-_ocs function
1847 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1848 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1850 if (fcode
== BUILT_IN_STPCPY_CHK
)
1855 /* If return value of __stpcpy_chk is ignored,
1856 optimize into __strcpy_chk. */
1857 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1861 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1862 replace_call_with_call_and_fold (gsi
, repl
);
1866 if (! len
|| TREE_SIDE_EFFECTS (len
))
1869 /* If c_strlen returned something, but not a constant,
1870 transform __strcpy_chk into __memcpy_chk. */
1871 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1875 len
= fold_convert_loc (loc
, size_type_node
, len
);
1876 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1877 build_int_cst (size_type_node
, 1));
1878 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1879 true, GSI_SAME_STMT
);
1880 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1881 replace_call_with_call_and_fold (gsi
, repl
);
1888 if (! tree_int_cst_lt (maxlen
, size
))
1892 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1893 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1894 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1898 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1899 replace_call_with_call_and_fold (gsi
, repl
);
1903 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1904 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1905 length passed as third argument. IGNORE is true if return value can be
1906 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1909 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1910 tree dest
, tree src
,
1911 tree len
, tree size
,
1912 enum built_in_function fcode
)
1914 gimple stmt
= gsi_stmt (*gsi
);
1915 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1918 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1920 /* If return value of __stpncpy_chk is ignored,
1921 optimize into __strncpy_chk. */
1922 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1925 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1926 replace_call_with_call_and_fold (gsi
, repl
);
1931 if (! tree_fits_uhwi_p (size
))
1934 tree maxlen
= get_maxval_strlen (len
, 2);
1935 if (! integer_all_onesp (size
))
1937 if (! tree_fits_uhwi_p (len
))
1939 /* If LEN is not constant, try MAXLEN too.
1940 For MAXLEN only allow optimizing into non-_ocs function
1941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1942 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1948 if (tree_int_cst_lt (size
, maxlen
))
1952 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1953 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
1954 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
1958 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1959 replace_call_with_call_and_fold (gsi
, repl
);
1963 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1964 NULL_TREE if a normal call should be emitted rather than expanding
1965 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1966 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1967 passed as second argument. */
1970 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
1971 enum built_in_function fcode
)
1973 gimple stmt
= gsi_stmt (*gsi
);
1974 tree dest
, size
, len
, fn
, fmt
, flag
;
1975 const char *fmt_str
;
1977 /* Verify the required arguments in the original call. */
1978 if (gimple_call_num_args (stmt
) < 5)
1981 dest
= gimple_call_arg (stmt
, 0);
1982 len
= gimple_call_arg (stmt
, 1);
1983 flag
= gimple_call_arg (stmt
, 2);
1984 size
= gimple_call_arg (stmt
, 3);
1985 fmt
= gimple_call_arg (stmt
, 4);
1987 if (! tree_fits_uhwi_p (size
))
1990 if (! integer_all_onesp (size
))
1992 tree maxlen
= get_maxval_strlen (len
, 2);
1993 if (! tree_fits_uhwi_p (len
))
1995 /* If LEN is not constant, try MAXLEN too.
1996 For MAXLEN only allow optimizing into non-_ocs function
1997 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1998 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2004 if (tree_int_cst_lt (size
, maxlen
))
2008 if (!init_target_chars ())
2011 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2012 or if format doesn't contain % chars or is "%s". */
2013 if (! integer_zerop (flag
))
2015 fmt_str
= c_getstr (fmt
);
2016 if (fmt_str
== NULL
)
2018 if (strchr (fmt_str
, target_percent
) != NULL
2019 && strcmp (fmt_str
, target_percent_s
))
2023 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2025 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2026 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2030 /* Replace the called function and the first 5 argument by 3 retaining
2031 trailing varargs. */
2032 gimple_call_set_fndecl (stmt
, fn
);
2033 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2034 gimple_call_set_arg (stmt
, 0, dest
);
2035 gimple_call_set_arg (stmt
, 1, len
);
2036 gimple_call_set_arg (stmt
, 2, fmt
);
2037 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2038 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2039 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2044 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2045 Return NULL_TREE if a normal call should be emitted rather than
2046 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2047 or BUILT_IN_VSPRINTF_CHK. */
2050 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2051 enum built_in_function fcode
)
2053 gimple stmt
= gsi_stmt (*gsi
);
2054 tree dest
, size
, len
, fn
, fmt
, flag
;
2055 const char *fmt_str
;
2056 unsigned nargs
= gimple_call_num_args (stmt
);
2058 /* Verify the required arguments in the original call. */
2061 dest
= gimple_call_arg (stmt
, 0);
2062 flag
= gimple_call_arg (stmt
, 1);
2063 size
= gimple_call_arg (stmt
, 2);
2064 fmt
= gimple_call_arg (stmt
, 3);
2066 if (! tree_fits_uhwi_p (size
))
2071 if (!init_target_chars ())
2074 /* Check whether the format is a literal string constant. */
2075 fmt_str
= c_getstr (fmt
);
2076 if (fmt_str
!= NULL
)
2078 /* If the format doesn't contain % args or %%, we know the size. */
2079 if (strchr (fmt_str
, target_percent
) == 0)
2081 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2082 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2084 /* If the format is "%s" and first ... argument is a string literal,
2085 we know the size too. */
2086 else if (fcode
== BUILT_IN_SPRINTF_CHK
2087 && strcmp (fmt_str
, target_percent_s
) == 0)
2093 arg
= gimple_call_arg (stmt
, 4);
2094 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2096 len
= c_strlen (arg
, 1);
2097 if (! len
|| ! tree_fits_uhwi_p (len
))
2104 if (! integer_all_onesp (size
))
2106 if (! len
|| ! tree_int_cst_lt (len
, size
))
2110 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2111 or if format doesn't contain % chars or is "%s". */
2112 if (! integer_zerop (flag
))
2114 if (fmt_str
== NULL
)
2116 if (strchr (fmt_str
, target_percent
) != NULL
2117 && strcmp (fmt_str
, target_percent_s
))
2121 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2122 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2123 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2127 /* Replace the called function and the first 4 argument by 2 retaining
2128 trailing varargs. */
2129 gimple_call_set_fndecl (stmt
, fn
);
2130 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2131 gimple_call_set_arg (stmt
, 0, dest
);
2132 gimple_call_set_arg (stmt
, 1, fmt
);
2133 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2134 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2135 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2140 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2141 ORIG may be null if this is a 2-argument call. We don't attempt to
2142 simplify calls with more than 3 arguments.
2144 Return NULL_TREE if no simplification was possible, otherwise return the
2145 simplified form of the call as a tree. If IGNORED is true, it means that
2146 the caller does not use the returned value of the function. */
2149 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2151 gimple stmt
= gsi_stmt (*gsi
);
2152 tree dest
= gimple_call_arg (stmt
, 0);
2153 tree fmt
= gimple_call_arg (stmt
, 1);
2154 tree orig
= NULL_TREE
;
2155 const char *fmt_str
= NULL
;
2157 /* Verify the required arguments in the original call. We deal with two
2158 types of sprintf() calls: 'sprintf (str, fmt)' and
2159 'sprintf (dest, "%s", orig)'. */
2160 if (gimple_call_num_args (stmt
) > 3)
2163 if (gimple_call_num_args (stmt
) == 3)
2164 orig
= gimple_call_arg (stmt
, 2);
2166 /* Check whether the format is a literal string constant. */
2167 fmt_str
= c_getstr (fmt
);
2168 if (fmt_str
== NULL
)
2171 if (!init_target_chars ())
2174 /* If the format doesn't contain % args or %%, use strcpy. */
2175 if (strchr (fmt_str
, target_percent
) == NULL
)
2177 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2182 /* Don't optimize sprintf (buf, "abc", ptr++). */
2186 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2187 'format' is known to contain no % formats. */
2188 gimple_seq stmts
= NULL
;
2189 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2190 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2191 if (gimple_call_lhs (stmt
))
2193 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2194 build_int_cst (integer_type_node
,
2196 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2197 gsi_replace_with_seq_vops (gsi
, stmts
);
2198 /* gsi now points at the assignment to the lhs, get a
2199 stmt iterator to the memcpy call.
2200 ??? We can't use gsi_for_stmt as that doesn't work when the
2201 CFG isn't built yet. */
2202 gimple_stmt_iterator gsi2
= *gsi
;
2208 gsi_replace_with_seq_vops (gsi
, stmts
);
2214 /* If the format is "%s", use strcpy if the result isn't used. */
2215 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2218 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2223 /* Don't crash on sprintf (str1, "%s"). */
2227 tree orig_len
= NULL_TREE
;
2228 if (gimple_call_lhs (stmt
))
2230 orig_len
= get_maxval_strlen (orig
, 0);
2235 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2236 gimple_seq stmts
= NULL
;
2237 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2238 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2239 if (gimple_call_lhs (stmt
))
2241 if (!useless_type_conversion_p (integer_type_node
,
2242 TREE_TYPE (orig_len
)))
2243 orig_len
= fold_convert (integer_type_node
, orig_len
);
2244 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2245 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2246 gsi_replace_with_seq_vops (gsi
, stmts
);
2247 /* gsi now points at the assignment to the lhs, get a
2248 stmt iterator to the memcpy call.
2249 ??? We can't use gsi_for_stmt as that doesn't work when the
2250 CFG isn't built yet. */
2251 gimple_stmt_iterator gsi2
= *gsi
;
2257 gsi_replace_with_seq_vops (gsi
, stmts
);
2265 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2266 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2267 attempt to simplify calls with more than 4 arguments.
2269 Return NULL_TREE if no simplification was possible, otherwise return the
2270 simplified form of the call as a tree. If IGNORED is true, it means that
2271 the caller does not use the returned value of the function. */
2274 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2276 gimple stmt
= gsi_stmt (*gsi
);
2277 tree dest
= gimple_call_arg (stmt
, 0);
2278 tree destsize
= gimple_call_arg (stmt
, 1);
2279 tree fmt
= gimple_call_arg (stmt
, 2);
2280 tree orig
= NULL_TREE
;
2281 const char *fmt_str
= NULL
;
2283 if (gimple_call_num_args (stmt
) > 4)
2286 if (gimple_call_num_args (stmt
) == 4)
2287 orig
= gimple_call_arg (stmt
, 3);
2289 if (!tree_fits_uhwi_p (destsize
))
2291 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2293 /* Check whether the format is a literal string constant. */
2294 fmt_str
= c_getstr (fmt
);
2295 if (fmt_str
== NULL
)
2298 if (!init_target_chars ())
2301 /* If the format doesn't contain % args or %%, use strcpy. */
2302 if (strchr (fmt_str
, target_percent
) == NULL
)
2304 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2308 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2312 /* We could expand this as
2313 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2315 memcpy (str, fmt_with_nul_at_cstm1, cst);
2316 but in the former case that might increase code size
2317 and in the latter case grow .rodata section too much.
2319 size_t len
= strlen (fmt_str
);
2323 gimple_seq stmts
= NULL
;
2324 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2325 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2326 if (gimple_call_lhs (stmt
))
2328 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2329 build_int_cst (integer_type_node
, len
));
2330 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2331 gsi_replace_with_seq_vops (gsi
, stmts
);
2332 /* gsi now points at the assignment to the lhs, get a
2333 stmt iterator to the memcpy call.
2334 ??? We can't use gsi_for_stmt as that doesn't work when the
2335 CFG isn't built yet. */
2336 gimple_stmt_iterator gsi2
= *gsi
;
2342 gsi_replace_with_seq_vops (gsi
, stmts
);
2348 /* If the format is "%s", use strcpy if the result isn't used. */
2349 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2351 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2355 /* Don't crash on snprintf (str1, cst, "%s"). */
2359 tree orig_len
= get_maxval_strlen (orig
, 0);
2363 /* We could expand this as
2364 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2366 memcpy (str1, str2_with_nul_at_cstm1, cst);
2367 but in the former case that might increase code size
2368 and in the latter case grow .rodata section too much.
2370 if (compare_tree_int (orig_len
, destlen
) >= 0)
2373 /* Convert snprintf (str1, cst, "%s", str2) into
2374 strcpy (str1, str2) if strlen (str2) < cst. */
2375 gimple_seq stmts
= NULL
;
2376 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2377 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2378 if (gimple_call_lhs (stmt
))
2380 if (!useless_type_conversion_p (integer_type_node
,
2381 TREE_TYPE (orig_len
)))
2382 orig_len
= fold_convert (integer_type_node
, orig_len
);
2383 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2384 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2385 gsi_replace_with_seq_vops (gsi
, stmts
);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2
= *gsi
;
2396 gsi_replace_with_seq_vops (gsi
, stmts
);
2405 /* Fold a call to __builtin_strlen with known length LEN. */
2408 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2410 gimple stmt
= gsi_stmt (*gsi
);
2411 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2414 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2415 replace_call_with_value (gsi
, len
);
2420 /* Fold the non-target builtin at *GSI and return whether any simplification
2424 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2426 gimple stmt
= gsi_stmt (*gsi
);
2427 tree callee
= gimple_call_fndecl (stmt
);
2429 /* Give up for always_inline inline builtins until they are
2431 if (avoid_folding_inline_builtin (callee
))
2434 switch (DECL_FUNCTION_CODE (callee
))
2436 case BUILT_IN_BZERO
:
2437 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2438 gimple_call_arg (stmt
, 1));
2439 case BUILT_IN_MEMSET
:
2440 return gimple_fold_builtin_memset (gsi
,
2441 gimple_call_arg (stmt
, 1),
2442 gimple_call_arg (stmt
, 2));
2443 case BUILT_IN_BCOPY
:
2444 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2445 gimple_call_arg (stmt
, 0), 3);
2446 case BUILT_IN_MEMCPY
:
2447 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2448 gimple_call_arg (stmt
, 1), 0);
2449 case BUILT_IN_MEMPCPY
:
2450 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2451 gimple_call_arg (stmt
, 1), 1);
2452 case BUILT_IN_MEMMOVE
:
2453 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2454 gimple_call_arg (stmt
, 1), 3);
2455 case BUILT_IN_SPRINTF_CHK
:
2456 case BUILT_IN_VSPRINTF_CHK
:
2457 return gimple_fold_builtin_sprintf_chk (gsi
, DECL_FUNCTION_CODE (callee
));
2458 case BUILT_IN_STRCAT_CHK
:
2459 return gimple_fold_builtin_strcat_chk (gsi
);
2460 case BUILT_IN_STRLEN
:
2461 return gimple_fold_builtin_strlen (gsi
);
2462 case BUILT_IN_STRCPY
:
2463 return gimple_fold_builtin_strcpy (gsi
,
2464 gimple_call_arg (stmt
, 0),
2465 gimple_call_arg (stmt
, 1));
2466 case BUILT_IN_STRNCPY
:
2467 return gimple_fold_builtin_strncpy (gsi
,
2468 gimple_call_arg (stmt
, 0),
2469 gimple_call_arg (stmt
, 1),
2470 gimple_call_arg (stmt
, 2));
2471 case BUILT_IN_STRCAT
:
2472 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2473 gimple_call_arg (stmt
, 1));
2474 case BUILT_IN_FPUTS
:
2475 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2476 gimple_call_arg (stmt
, 1), false);
2477 case BUILT_IN_FPUTS_UNLOCKED
:
2478 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2479 gimple_call_arg (stmt
, 1), true);
2480 case BUILT_IN_MEMCPY_CHK
:
2481 case BUILT_IN_MEMPCPY_CHK
:
2482 case BUILT_IN_MEMMOVE_CHK
:
2483 case BUILT_IN_MEMSET_CHK
:
2484 return gimple_fold_builtin_memory_chk (gsi
,
2485 gimple_call_arg (stmt
, 0),
2486 gimple_call_arg (stmt
, 1),
2487 gimple_call_arg (stmt
, 2),
2488 gimple_call_arg (stmt
, 3),
2489 DECL_FUNCTION_CODE (callee
));
2490 case BUILT_IN_STRCPY_CHK
:
2491 case BUILT_IN_STPCPY_CHK
:
2492 return gimple_fold_builtin_stxcpy_chk (gsi
,
2493 gimple_call_arg (stmt
, 0),
2494 gimple_call_arg (stmt
, 1),
2495 gimple_call_arg (stmt
, 2),
2496 DECL_FUNCTION_CODE (callee
));
2497 case BUILT_IN_STRNCPY_CHK
:
2498 case BUILT_IN_STPNCPY_CHK
:
2499 return gimple_fold_builtin_stxncpy_chk (gsi
,
2500 gimple_call_arg (stmt
, 0),
2501 gimple_call_arg (stmt
, 1),
2502 gimple_call_arg (stmt
, 2),
2503 gimple_call_arg (stmt
, 3),
2504 DECL_FUNCTION_CODE (callee
));
2505 case BUILT_IN_SNPRINTF_CHK
:
2506 case BUILT_IN_VSNPRINTF_CHK
:
2507 return gimple_fold_builtin_snprintf_chk (gsi
,
2508 DECL_FUNCTION_CODE (callee
));
2509 case BUILT_IN_SNPRINTF
:
2510 return gimple_fold_builtin_snprintf (gsi
);
2511 case BUILT_IN_SPRINTF
:
2512 return gimple_fold_builtin_sprintf (gsi
);
2516 /* Try the generic builtin folder. */
2517 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2518 tree result
= fold_call_stmt (stmt
, ignore
);
2522 STRIP_NOPS (result
);
2524 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2525 if (!update_call_from_tree (gsi
, result
))
2526 gimplify_and_update_call_from_tree (gsi
, result
);
2533 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2534 The statement may be replaced by another statement, e.g., if the call
2535 simplifies to a constant value. Return true if any changes were made.
2536 It is assumed that the operands have been previously folded. */
2539 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2541 gimple stmt
= gsi_stmt (*gsi
);
2543 bool changed
= false;
2546 /* Fold *& in call arguments. */
2547 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2548 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2550 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2553 gimple_call_set_arg (stmt
, i
, tmp
);
2558 /* Check for virtual calls that became direct calls. */
2559 callee
= gimple_call_fn (stmt
);
2560 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2562 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2564 if (dump_file
&& virtual_method_call_p (callee
)
2565 && !possible_polymorphic_call_target_p
2566 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2567 (OBJ_TYPE_REF_EXPR (callee
)))))
2570 "Type inheritance inconsistent devirtualization of ");
2571 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2572 fprintf (dump_file
, " to ");
2573 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2574 fprintf (dump_file
, "\n");
2577 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2580 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2583 vec
<cgraph_node
*>targets
2584 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2585 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2587 tree lhs
= gimple_call_lhs (stmt
);
2588 if (dump_enabled_p ())
2590 location_t loc
= gimple_location_safe (stmt
);
2591 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2592 "folding virtual function call to %s\n",
2593 targets
.length () == 1
2594 ? targets
[0]->name ()
2595 : "__builtin_unreachable");
2597 if (targets
.length () == 1)
2599 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
2601 /* If the call becomes noreturn, remove the lhs. */
2602 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
2604 if (TREE_CODE (lhs
) == SSA_NAME
)
2606 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2607 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2608 gimple new_stmt
= gimple_build_assign (lhs
, def
);
2609 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2611 gimple_call_set_lhs (stmt
, NULL_TREE
);
2616 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2617 gimple new_stmt
= gimple_build_call (fndecl
, 0);
2618 gimple_set_location (new_stmt
, gimple_location (stmt
));
2619 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2621 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2622 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2624 /* To satisfy condition for
2625 cgraph_update_edges_for_call_stmt_node,
2626 we need to preserve GIMPLE_CALL statement
2627 at position of GSI iterator. */
2628 update_call_from_tree (gsi
, def
);
2629 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
2632 gsi_replace (gsi
, new_stmt
, true);
2642 /* Check for builtins that CCP can handle using information not
2643 available in the generic fold routines. */
2644 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
2646 if (gimple_fold_builtin (gsi
))
2649 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
2651 changed
|= targetm
.gimple_fold_builtin (gsi
);
2653 else if (gimple_call_internal_p (stmt
))
2655 enum tree_code subcode
= ERROR_MARK
;
2656 tree result
= NULL_TREE
;
2657 switch (gimple_call_internal_fn (stmt
))
2659 case IFN_BUILTIN_EXPECT
:
2660 result
= fold_builtin_expect (gimple_location (stmt
),
2661 gimple_call_arg (stmt
, 0),
2662 gimple_call_arg (stmt
, 1),
2663 gimple_call_arg (stmt
, 2));
2665 case IFN_UBSAN_OBJECT_SIZE
:
2666 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
2667 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
2668 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
2669 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
2670 gimple_call_arg (stmt
, 2))))
2672 gsi_replace (gsi
, gimple_build_nop (), true);
2673 unlink_stmt_vdef (stmt
);
2674 release_defs (stmt
);
2678 case IFN_UBSAN_CHECK_ADD
:
2679 subcode
= PLUS_EXPR
;
2681 case IFN_UBSAN_CHECK_SUB
:
2682 subcode
= MINUS_EXPR
;
2684 case IFN_UBSAN_CHECK_MUL
:
2685 subcode
= MULT_EXPR
;
2690 if (subcode
!= ERROR_MARK
)
2692 tree arg0
= gimple_call_arg (stmt
, 0);
2693 tree arg1
= gimple_call_arg (stmt
, 1);
2694 /* x = y + 0; x = y - 0; x = y * 0; */
2695 if (integer_zerop (arg1
))
2696 result
= subcode
== MULT_EXPR
2697 ? build_zero_cst (TREE_TYPE (arg0
))
2699 /* x = 0 + y; x = 0 * y; */
2700 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
2701 result
= subcode
== MULT_EXPR
2702 ? build_zero_cst (TREE_TYPE (arg0
))
2705 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
2706 result
= build_zero_cst (TREE_TYPE (arg0
));
2707 /* x = y * 1; x = 1 * y; */
2708 else if (subcode
== MULT_EXPR
)
2710 if (integer_onep (arg1
))
2712 else if (integer_onep (arg0
))
2718 if (!update_call_from_tree (gsi
, result
))
2719 gimplify_and_update_call_from_tree (gsi
, result
);
2727 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2730 maybe_canonicalize_mem_ref_addr (tree
*t
)
2734 if (TREE_CODE (*t
) == ADDR_EXPR
)
2735 t
= &TREE_OPERAND (*t
, 0);
2737 while (handled_component_p (*t
))
2738 t
= &TREE_OPERAND (*t
, 0);
2740 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2741 of invariant addresses into a SSA name MEM_REF address. */
2742 if (TREE_CODE (*t
) == MEM_REF
2743 || TREE_CODE (*t
) == TARGET_MEM_REF
)
2745 tree addr
= TREE_OPERAND (*t
, 0);
2746 if (TREE_CODE (addr
) == ADDR_EXPR
2747 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
2748 || handled_component_p (TREE_OPERAND (addr
, 0))))
2751 HOST_WIDE_INT coffset
;
2752 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
2757 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
2758 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
2759 TREE_OPERAND (*t
, 1),
2760 size_int (coffset
));
2763 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
2764 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
2767 /* Canonicalize back MEM_REFs to plain reference trees if the object
2768 accessed is a decl that has the same access semantics as the MEM_REF. */
2769 if (TREE_CODE (*t
) == MEM_REF
2770 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
2771 && integer_zerop (TREE_OPERAND (*t
, 1)))
2773 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2774 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
2775 if (/* Same volatile qualification. */
2776 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
2777 /* Same TBAA behavior with -fstrict-aliasing. */
2778 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
2779 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
2780 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
2781 /* Same alignment. */
2782 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
2783 /* We have to look out here to not drop a required conversion
2784 from the rhs to the lhs if *t appears on the lhs or vice-versa
2785 if it appears on the rhs. Thus require strict type
2787 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
2789 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2794 /* Canonicalize TARGET_MEM_REF in particular with respect to
2795 the indexes becoming constant. */
2796 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
2798 tree tem
= maybe_fold_tmr (*t
);
2809 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2810 distinguishes both cases. */
2813 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
)
2815 bool changed
= false;
2816 gimple stmt
= gsi_stmt (*gsi
);
2819 /* First do required canonicalization of [TARGET_]MEM_REF addresses
2821 ??? This shouldn't be done in generic folding but in the
2822 propagation helpers which also know whether an address was
2824 switch (gimple_code (stmt
))
2827 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
2829 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
2830 if ((REFERENCE_CLASS_P (*rhs
)
2831 || TREE_CODE (*rhs
) == ADDR_EXPR
)
2832 && maybe_canonicalize_mem_ref_addr (rhs
))
2834 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
2835 if (REFERENCE_CLASS_P (*lhs
)
2836 && maybe_canonicalize_mem_ref_addr (lhs
))
2842 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2844 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
2845 if (REFERENCE_CLASS_P (*arg
)
2846 && maybe_canonicalize_mem_ref_addr (arg
))
2849 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
2851 && REFERENCE_CLASS_P (*lhs
)
2852 && maybe_canonicalize_mem_ref_addr (lhs
))
2858 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2860 tree link
= gimple_asm_output_op (stmt
, i
);
2861 tree op
= TREE_VALUE (link
);
2862 if (REFERENCE_CLASS_P (op
)
2863 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
2866 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2868 tree link
= gimple_asm_input_op (stmt
, i
);
2869 tree op
= TREE_VALUE (link
);
2870 if ((REFERENCE_CLASS_P (op
)
2871 || TREE_CODE (op
) == ADDR_EXPR
)
2872 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
2878 if (gimple_debug_bind_p (stmt
))
2880 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
2882 && (REFERENCE_CLASS_P (*val
)
2883 || TREE_CODE (*val
) == ADDR_EXPR
)
2884 && maybe_canonicalize_mem_ref_addr (val
))
2891 /* Fold the main computation performed by the statement. */
2892 switch (gimple_code (stmt
))
2896 unsigned old_num_ops
= gimple_num_ops (stmt
);
2897 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
2898 tree lhs
= gimple_assign_lhs (stmt
);
2900 /* First canonicalize operand order. This avoids building new
2901 trees if this is the only thing fold would later do. */
2902 if ((commutative_tree_code (subcode
)
2903 || commutative_ternary_tree_code (subcode
))
2904 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
2905 gimple_assign_rhs2 (stmt
), false))
2907 tree tem
= gimple_assign_rhs1 (stmt
);
2908 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
2909 gimple_assign_set_rhs2 (stmt
, tem
);
2912 new_rhs
= fold_gimple_assign (gsi
);
2914 && !useless_type_conversion_p (TREE_TYPE (lhs
),
2915 TREE_TYPE (new_rhs
)))
2916 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
2919 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
2921 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
2928 changed
|= fold_gimple_cond (stmt
);
2932 changed
|= gimple_fold_call (gsi
, inplace
);
2936 /* Fold *& in asm operands. */
2939 const char **oconstraints
;
2940 const char *constraint
;
2941 bool allows_mem
, allows_reg
;
2943 noutputs
= gimple_asm_noutputs (stmt
);
2944 oconstraints
= XALLOCAVEC (const char *, noutputs
);
2946 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
2948 tree link
= gimple_asm_output_op (stmt
, i
);
2949 tree op
= TREE_VALUE (link
);
2951 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
2952 if (REFERENCE_CLASS_P (op
)
2953 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
2955 TREE_VALUE (link
) = op
;
2959 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
2961 tree link
= gimple_asm_input_op (stmt
, i
);
2962 tree op
= TREE_VALUE (link
);
2964 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
2965 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
2966 oconstraints
, &allows_mem
, &allows_reg
);
2967 if (REFERENCE_CLASS_P (op
)
2968 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
2971 TREE_VALUE (link
) = op
;
2979 if (gimple_debug_bind_p (stmt
))
2981 tree val
= gimple_debug_bind_get_value (stmt
);
2983 && REFERENCE_CLASS_P (val
))
2985 tree tem
= maybe_fold_reference (val
, false);
2988 gimple_debug_bind_set_value (stmt
, tem
);
2993 && TREE_CODE (val
) == ADDR_EXPR
)
2995 tree ref
= TREE_OPERAND (val
, 0);
2996 tree tem
= maybe_fold_reference (ref
, false);
2999 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3000 gimple_debug_bind_set_value (stmt
, tem
);
3010 stmt
= gsi_stmt (*gsi
);
3012 /* Fold *& on the lhs. */
3013 if (gimple_has_lhs (stmt
))
3015 tree lhs
= gimple_get_lhs (stmt
);
3016 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3018 tree new_lhs
= maybe_fold_reference (lhs
, true);
3021 gimple_set_lhs (stmt
, new_lhs
);
3030 /* Fold the statement pointed to by GSI. In some cases, this function may
3031 replace the whole statement with a new one. Returns true iff folding
3033 The statement pointed to by GSI should be in valid gimple form but may
3034 be in unfolded state as resulting from for example constant propagation
3035 which can produce *&x = 0. */
3038 fold_stmt (gimple_stmt_iterator
*gsi
)
3040 return fold_stmt_1 (gsi
, false);
3043 /* Perform the minimal folding on statement *GSI. Only operations like
3044 *&x created by constant propagation are handled. The statement cannot
3045 be replaced with a new one. Return true if the statement was
3046 changed, false otherwise.
3047 The statement *GSI should be in valid gimple form but may
3048 be in unfolded state as resulting from for example constant propagation
3049 which can produce *&x = 0. */
3052 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3054 gimple stmt
= gsi_stmt (*gsi
);
3055 bool changed
= fold_stmt_1 (gsi
, true);
3056 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3060 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3061 if EXPR is null or we don't know how.
3062 If non-null, the result always has boolean type. */
3065 canonicalize_bool (tree expr
, bool invert
)
3071 if (integer_nonzerop (expr
))
3072 return boolean_false_node
;
3073 else if (integer_zerop (expr
))
3074 return boolean_true_node
;
3075 else if (TREE_CODE (expr
) == SSA_NAME
)
3076 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3077 build_int_cst (TREE_TYPE (expr
), 0));
3078 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3079 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3081 TREE_OPERAND (expr
, 0),
3082 TREE_OPERAND (expr
, 1));
3088 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3090 if (integer_nonzerop (expr
))
3091 return boolean_true_node
;
3092 else if (integer_zerop (expr
))
3093 return boolean_false_node
;
3094 else if (TREE_CODE (expr
) == SSA_NAME
)
3095 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3096 build_int_cst (TREE_TYPE (expr
), 0));
3097 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3098 return fold_build2 (TREE_CODE (expr
),
3100 TREE_OPERAND (expr
, 0),
3101 TREE_OPERAND (expr
, 1));
3107 /* Check to see if a boolean expression EXPR is logically equivalent to the
3108 comparison (OP1 CODE OP2). Check for various identities involving
3112 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3113 const_tree op1
, const_tree op2
)
3117 /* The obvious case. */
3118 if (TREE_CODE (expr
) == code
3119 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3120 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3123 /* Check for comparing (name, name != 0) and the case where expr
3124 is an SSA_NAME with a definition matching the comparison. */
3125 if (TREE_CODE (expr
) == SSA_NAME
3126 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3128 if (operand_equal_p (expr
, op1
, 0))
3129 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3130 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3131 s
= SSA_NAME_DEF_STMT (expr
);
3132 if (is_gimple_assign (s
)
3133 && gimple_assign_rhs_code (s
) == code
3134 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3135 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3139 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3140 of name is a comparison, recurse. */
3141 if (TREE_CODE (op1
) == SSA_NAME
3142 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3144 s
= SSA_NAME_DEF_STMT (op1
);
3145 if (is_gimple_assign (s
)
3146 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3148 enum tree_code c
= gimple_assign_rhs_code (s
);
3149 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3150 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3151 return same_bool_comparison_p (expr
, c
,
3152 gimple_assign_rhs1 (s
),
3153 gimple_assign_rhs2 (s
));
3154 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3155 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3156 return same_bool_comparison_p (expr
,
3157 invert_tree_comparison (c
, false),
3158 gimple_assign_rhs1 (s
),
3159 gimple_assign_rhs2 (s
));
3165 /* Check to see if two boolean expressions OP1 and OP2 are logically
3169 same_bool_result_p (const_tree op1
, const_tree op2
)
3171 /* Simple cases first. */
3172 if (operand_equal_p (op1
, op2
, 0))
3175 /* Check the cases where at least one of the operands is a comparison.
3176 These are a bit smarter than operand_equal_p in that they apply some
3177 identifies on SSA_NAMEs. */
3178 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3179 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3180 TREE_OPERAND (op2
, 0),
3181 TREE_OPERAND (op2
, 1)))
3183 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3184 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3185 TREE_OPERAND (op1
, 0),
3186 TREE_OPERAND (op1
, 1)))
3193 /* Forward declarations for some mutually recursive functions. */
3196 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3197 enum tree_code code2
, tree op2a
, tree op2b
);
3199 and_var_with_comparison (tree var
, bool invert
,
3200 enum tree_code code2
, tree op2a
, tree op2b
);
3202 and_var_with_comparison_1 (gimple stmt
,
3203 enum tree_code code2
, tree op2a
, tree op2b
);
3205 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3206 enum tree_code code2
, tree op2a
, tree op2b
);
3208 or_var_with_comparison (tree var
, bool invert
,
3209 enum tree_code code2
, tree op2a
, tree op2b
);
3211 or_var_with_comparison_1 (gimple stmt
,
3212 enum tree_code code2
, tree op2a
, tree op2b
);
3214 /* Helper function for and_comparisons_1: try to simplify the AND of the
3215 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3216 If INVERT is true, invert the value of the VAR before doing the AND.
3217 Return NULL_EXPR if we can't simplify this to a single expression. */
3220 and_var_with_comparison (tree var
, bool invert
,
3221 enum tree_code code2
, tree op2a
, tree op2b
)
3224 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3226 /* We can only deal with variables whose definitions are assignments. */
3227 if (!is_gimple_assign (stmt
))
3230 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3231 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3232 Then we only have to consider the simpler non-inverted cases. */
3234 t
= or_var_with_comparison_1 (stmt
,
3235 invert_tree_comparison (code2
, false),
3238 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3239 return canonicalize_bool (t
, invert
);
3242 /* Try to simplify the AND of the ssa variable defined by the assignment
3243 STMT with the comparison specified by (OP2A CODE2 OP2B).
3244 Return NULL_EXPR if we can't simplify this to a single expression. */
3247 and_var_with_comparison_1 (gimple stmt
,
3248 enum tree_code code2
, tree op2a
, tree op2b
)
3250 tree var
= gimple_assign_lhs (stmt
);
3251 tree true_test_var
= NULL_TREE
;
3252 tree false_test_var
= NULL_TREE
;
3253 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3255 /* Check for identities like (var AND (var == 0)) => false. */
3256 if (TREE_CODE (op2a
) == SSA_NAME
3257 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3259 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3260 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3262 true_test_var
= op2a
;
3263 if (var
== true_test_var
)
3266 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3267 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3269 false_test_var
= op2a
;
3270 if (var
== false_test_var
)
3271 return boolean_false_node
;
3275 /* If the definition is a comparison, recurse on it. */
3276 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3278 tree t
= and_comparisons_1 (innercode
,
3279 gimple_assign_rhs1 (stmt
),
3280 gimple_assign_rhs2 (stmt
),
3288 /* If the definition is an AND or OR expression, we may be able to
3289 simplify by reassociating. */
3290 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3291 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3293 tree inner1
= gimple_assign_rhs1 (stmt
);
3294 tree inner2
= gimple_assign_rhs2 (stmt
);
3297 tree partial
= NULL_TREE
;
3298 bool is_and
= (innercode
== BIT_AND_EXPR
);
3300 /* Check for boolean identities that don't require recursive examination
3302 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3303 inner1 AND (inner1 OR inner2) => inner1
3304 !inner1 AND (inner1 AND inner2) => false
3305 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3306 Likewise for similar cases involving inner2. */
3307 if (inner1
== true_test_var
)
3308 return (is_and
? var
: inner1
);
3309 else if (inner2
== true_test_var
)
3310 return (is_and
? var
: inner2
);
3311 else if (inner1
== false_test_var
)
3313 ? boolean_false_node
3314 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3315 else if (inner2
== false_test_var
)
3317 ? boolean_false_node
3318 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3320 /* Next, redistribute/reassociate the AND across the inner tests.
3321 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3322 if (TREE_CODE (inner1
) == SSA_NAME
3323 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3324 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3325 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3326 gimple_assign_rhs1 (s
),
3327 gimple_assign_rhs2 (s
),
3328 code2
, op2a
, op2b
)))
3330 /* Handle the AND case, where we are reassociating:
3331 (inner1 AND inner2) AND (op2a code2 op2b)
3333 If the partial result t is a constant, we win. Otherwise
3334 continue on to try reassociating with the other inner test. */
3337 if (integer_onep (t
))
3339 else if (integer_zerop (t
))
3340 return boolean_false_node
;
3343 /* Handle the OR case, where we are redistributing:
3344 (inner1 OR inner2) AND (op2a code2 op2b)
3345 => (t OR (inner2 AND (op2a code2 op2b))) */
3346 else if (integer_onep (t
))
3347 return boolean_true_node
;
3349 /* Save partial result for later. */
3353 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3354 if (TREE_CODE (inner2
) == SSA_NAME
3355 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3356 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3357 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3358 gimple_assign_rhs1 (s
),
3359 gimple_assign_rhs2 (s
),
3360 code2
, op2a
, op2b
)))
3362 /* Handle the AND case, where we are reassociating:
3363 (inner1 AND inner2) AND (op2a code2 op2b)
3364 => (inner1 AND t) */
3367 if (integer_onep (t
))
3369 else if (integer_zerop (t
))
3370 return boolean_false_node
;
3371 /* If both are the same, we can apply the identity
3373 else if (partial
&& same_bool_result_p (t
, partial
))
3377 /* Handle the OR case. where we are redistributing:
3378 (inner1 OR inner2) AND (op2a code2 op2b)
3379 => (t OR (inner1 AND (op2a code2 op2b)))
3380 => (t OR partial) */
3383 if (integer_onep (t
))
3384 return boolean_true_node
;
3387 /* We already got a simplification for the other
3388 operand to the redistributed OR expression. The
3389 interesting case is when at least one is false.
3390 Or, if both are the same, we can apply the identity
3392 if (integer_zerop (partial
))
3394 else if (integer_zerop (t
))
3396 else if (same_bool_result_p (t
, partial
))
3405 /* Try to simplify the AND of two comparisons defined by
3406 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3407 If this can be done without constructing an intermediate value,
3408 return the resulting tree; otherwise NULL_TREE is returned.
3409 This function is deliberately asymmetric as it recurses on SSA_DEFs
3410 in the first comparison but not the second. */
3413 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3414 enum tree_code code2
, tree op2a
, tree op2b
)
3416 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3418 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3419 if (operand_equal_p (op1a
, op2a
, 0)
3420 && operand_equal_p (op1b
, op2b
, 0))
3422 /* Result will be either NULL_TREE, or a combined comparison. */
3423 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3424 TRUTH_ANDIF_EXPR
, code1
, code2
,
3425 truth_type
, op1a
, op1b
);
3430 /* Likewise the swapped case of the above. */
3431 if (operand_equal_p (op1a
, op2b
, 0)
3432 && operand_equal_p (op1b
, op2a
, 0))
3434 /* Result will be either NULL_TREE, or a combined comparison. */
3435 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3436 TRUTH_ANDIF_EXPR
, code1
,
3437 swap_tree_comparison (code2
),
3438 truth_type
, op1a
, op1b
);
3443 /* If both comparisons are of the same value against constants, we might
3444 be able to merge them. */
3445 if (operand_equal_p (op1a
, op2a
, 0)
3446 && TREE_CODE (op1b
) == INTEGER_CST
3447 && TREE_CODE (op2b
) == INTEGER_CST
)
3449 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3451 /* If we have (op1a == op1b), we should either be able to
3452 return that or FALSE, depending on whether the constant op1b
3453 also satisfies the other comparison against op2b. */
3454 if (code1
== EQ_EXPR
)
3460 case EQ_EXPR
: val
= (cmp
== 0); break;
3461 case NE_EXPR
: val
= (cmp
!= 0); break;
3462 case LT_EXPR
: val
= (cmp
< 0); break;
3463 case GT_EXPR
: val
= (cmp
> 0); break;
3464 case LE_EXPR
: val
= (cmp
<= 0); break;
3465 case GE_EXPR
: val
= (cmp
>= 0); break;
3466 default: done
= false;
3471 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3473 return boolean_false_node
;
3476 /* Likewise if the second comparison is an == comparison. */
3477 else if (code2
== EQ_EXPR
)
3483 case EQ_EXPR
: val
= (cmp
== 0); break;
3484 case NE_EXPR
: val
= (cmp
!= 0); break;
3485 case LT_EXPR
: val
= (cmp
> 0); break;
3486 case GT_EXPR
: val
= (cmp
< 0); break;
3487 case LE_EXPR
: val
= (cmp
>= 0); break;
3488 case GE_EXPR
: val
= (cmp
<= 0); break;
3489 default: done
= false;
3494 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3496 return boolean_false_node
;
3500 /* Same business with inequality tests. */
3501 else if (code1
== NE_EXPR
)
3506 case EQ_EXPR
: val
= (cmp
!= 0); break;
3507 case NE_EXPR
: val
= (cmp
== 0); break;
3508 case LT_EXPR
: val
= (cmp
>= 0); break;
3509 case GT_EXPR
: val
= (cmp
<= 0); break;
3510 case LE_EXPR
: val
= (cmp
> 0); break;
3511 case GE_EXPR
: val
= (cmp
< 0); break;
3516 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3518 else if (code2
== NE_EXPR
)
3523 case EQ_EXPR
: val
= (cmp
== 0); break;
3524 case NE_EXPR
: val
= (cmp
!= 0); break;
3525 case LT_EXPR
: val
= (cmp
<= 0); break;
3526 case GT_EXPR
: val
= (cmp
>= 0); break;
3527 case LE_EXPR
: val
= (cmp
< 0); break;
3528 case GE_EXPR
: val
= (cmp
> 0); break;
3533 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3536 /* Chose the more restrictive of two < or <= comparisons. */
3537 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3538 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3540 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3541 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3543 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3546 /* Likewise chose the more restrictive of two > or >= comparisons. */
3547 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3548 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3550 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
3551 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3553 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3556 /* Check for singleton ranges. */
3558 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
3559 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
3560 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
3562 /* Check for disjoint ranges. */
3564 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
3565 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3566 return boolean_false_node
;
3568 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
3569 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3570 return boolean_false_node
;
3573 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3574 NAME's definition is a truth value. See if there are any simplifications
3575 that can be done against the NAME's definition. */
3576 if (TREE_CODE (op1a
) == SSA_NAME
3577 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
3578 && (integer_zerop (op1b
) || integer_onep (op1b
)))
3580 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
3581 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
3582 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
3583 switch (gimple_code (stmt
))
3586 /* Try to simplify by copy-propagating the definition. */
3587 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
3590 /* If every argument to the PHI produces the same result when
3591 ANDed with the second comparison, we win.
3592 Do not do this unless the type is bool since we need a bool
3593 result here anyway. */
3594 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
3596 tree result
= NULL_TREE
;
3598 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
3600 tree arg
= gimple_phi_arg_def (stmt
, i
);
3602 /* If this PHI has itself as an argument, ignore it.
3603 If all the other args produce the same result,
3605 if (arg
== gimple_phi_result (stmt
))
3607 else if (TREE_CODE (arg
) == INTEGER_CST
)
3609 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
3612 result
= boolean_false_node
;
3613 else if (!integer_zerop (result
))
3617 result
= fold_build2 (code2
, boolean_type_node
,
3619 else if (!same_bool_comparison_p (result
,
3623 else if (TREE_CODE (arg
) == SSA_NAME
3624 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
3627 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
3628 /* In simple cases we can look through PHI nodes,
3629 but we have to be careful with loops.
3631 if (! dom_info_available_p (CDI_DOMINATORS
)
3632 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
3633 || dominated_by_p (CDI_DOMINATORS
,
3634 gimple_bb (def_stmt
),
3637 temp
= and_var_with_comparison (arg
, invert
, code2
,
3643 else if (!same_bool_result_p (result
, temp
))
3659 /* Try to simplify the AND of two comparisons, specified by
3660 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3661 If this can be simplified to a single expression (without requiring
3662 introducing more SSA variables to hold intermediate values),
3663 return the resulting tree. Otherwise return NULL_TREE.
3664 If the result expression is non-null, it has boolean type. */
3667 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
3668 enum tree_code code2
, tree op2a
, tree op2b
)
3670 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
3674 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
3677 /* Helper function for or_comparisons_1: try to simplify the OR of the
3678 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3679 If INVERT is true, invert the value of VAR before doing the OR.
3680 Return NULL_EXPR if we can't simplify this to a single expression. */
3683 or_var_with_comparison (tree var
, bool invert
,
3684 enum tree_code code2
, tree op2a
, tree op2b
)
3687 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3689 /* We can only deal with variables whose definitions are assignments. */
3690 if (!is_gimple_assign (stmt
))
3693 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3694 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3695 Then we only have to consider the simpler non-inverted cases. */
3697 t
= and_var_with_comparison_1 (stmt
,
3698 invert_tree_comparison (code2
, false),
3701 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3702 return canonicalize_bool (t
, invert
);
3705 /* Try to simplify the OR of the ssa variable defined by the assignment
3706 STMT with the comparison specified by (OP2A CODE2 OP2B).
3707 Return NULL_EXPR if we can't simplify this to a single expression. */
3710 or_var_with_comparison_1 (gimple stmt
,
3711 enum tree_code code2
, tree op2a
, tree op2b
)
3713 tree var
= gimple_assign_lhs (stmt
);
3714 tree true_test_var
= NULL_TREE
;
3715 tree false_test_var
= NULL_TREE
;
3716 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3718 /* Check for identities like (var OR (var != 0)) => true . */
3719 if (TREE_CODE (op2a
) == SSA_NAME
3720 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3722 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3723 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3725 true_test_var
= op2a
;
3726 if (var
== true_test_var
)
3729 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3730 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3732 false_test_var
= op2a
;
3733 if (var
== false_test_var
)
3734 return boolean_true_node
;
3738 /* If the definition is a comparison, recurse on it. */
3739 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3741 tree t
= or_comparisons_1 (innercode
,
3742 gimple_assign_rhs1 (stmt
),
3743 gimple_assign_rhs2 (stmt
),
3751 /* If the definition is an AND or OR expression, we may be able to
3752 simplify by reassociating. */
3753 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3754 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3756 tree inner1
= gimple_assign_rhs1 (stmt
);
3757 tree inner2
= gimple_assign_rhs2 (stmt
);
3760 tree partial
= NULL_TREE
;
3761 bool is_or
= (innercode
== BIT_IOR_EXPR
);
3763 /* Check for boolean identities that don't require recursive examination
3765 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3766 inner1 OR (inner1 AND inner2) => inner1
3767 !inner1 OR (inner1 OR inner2) => true
3768 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3770 if (inner1
== true_test_var
)
3771 return (is_or
? var
: inner1
);
3772 else if (inner2
== true_test_var
)
3773 return (is_or
? var
: inner2
);
3774 else if (inner1
== false_test_var
)
3777 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3778 else if (inner2
== false_test_var
)
3781 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3783 /* Next, redistribute/reassociate the OR across the inner tests.
3784 Compute the first partial result, (inner1 OR (op2a code op2b)) */
3785 if (TREE_CODE (inner1
) == SSA_NAME
3786 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3787 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3788 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
3789 gimple_assign_rhs1 (s
),
3790 gimple_assign_rhs2 (s
),
3791 code2
, op2a
, op2b
)))
3793 /* Handle the OR case, where we are reassociating:
3794 (inner1 OR inner2) OR (op2a code2 op2b)
3796 If the partial result t is a constant, we win. Otherwise
3797 continue on to try reassociating with the other inner test. */
3800 if (integer_onep (t
))
3801 return boolean_true_node
;
3802 else if (integer_zerop (t
))
3806 /* Handle the AND case, where we are redistributing:
3807 (inner1 AND inner2) OR (op2a code2 op2b)
3808 => (t AND (inner2 OR (op2a code op2b))) */
3809 else if (integer_zerop (t
))
3810 return boolean_false_node
;
3812 /* Save partial result for later. */
3816 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
3817 if (TREE_CODE (inner2
) == SSA_NAME
3818 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3819 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3820 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
3821 gimple_assign_rhs1 (s
),
3822 gimple_assign_rhs2 (s
),
3823 code2
, op2a
, op2b
)))
3825 /* Handle the OR case, where we are reassociating:
3826 (inner1 OR inner2) OR (op2a code2 op2b)
3828 => (t OR partial) */
3831 if (integer_zerop (t
))
3833 else if (integer_onep (t
))
3834 return boolean_true_node
;
3835 /* If both are the same, we can apply the identity
3837 else if (partial
&& same_bool_result_p (t
, partial
))
3841 /* Handle the AND case, where we are redistributing:
3842 (inner1 AND inner2) OR (op2a code2 op2b)
3843 => (t AND (inner1 OR (op2a code2 op2b)))
3844 => (t AND partial) */
3847 if (integer_zerop (t
))
3848 return boolean_false_node
;
3851 /* We already got a simplification for the other
3852 operand to the redistributed AND expression. The
3853 interesting case is when at least one is true.
3854 Or, if both are the same, we can apply the identity
3856 if (integer_onep (partial
))
3858 else if (integer_onep (t
))
3860 else if (same_bool_result_p (t
, partial
))
3869 /* Try to simplify the OR of two comparisons defined by
3870 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3871 If this can be done without constructing an intermediate value,
3872 return the resulting tree; otherwise NULL_TREE is returned.
3873 This function is deliberately asymmetric as it recurses on SSA_DEFs
3874 in the first comparison but not the second. */
3877 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3878 enum tree_code code2
, tree op2a
, tree op2b
)
3880 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3882 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
3883 if (operand_equal_p (op1a
, op2a
, 0)
3884 && operand_equal_p (op1b
, op2b
, 0))
3886 /* Result will be either NULL_TREE, or a combined comparison. */
3887 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3888 TRUTH_ORIF_EXPR
, code1
, code2
,
3889 truth_type
, op1a
, op1b
);
3894 /* Likewise the swapped case of the above. */
3895 if (operand_equal_p (op1a
, op2b
, 0)
3896 && operand_equal_p (op1b
, op2a
, 0))
3898 /* Result will be either NULL_TREE, or a combined comparison. */
3899 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3900 TRUTH_ORIF_EXPR
, code1
,
3901 swap_tree_comparison (code2
),
3902 truth_type
, op1a
, op1b
);
3907 /* If both comparisons are of the same value against constants, we might
3908 be able to merge them. */
3909 if (operand_equal_p (op1a
, op2a
, 0)
3910 && TREE_CODE (op1b
) == INTEGER_CST
3911 && TREE_CODE (op2b
) == INTEGER_CST
)
3913 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3915 /* If we have (op1a != op1b), we should either be able to
3916 return that or TRUE, depending on whether the constant op1b
3917 also satisfies the other comparison against op2b. */
3918 if (code1
== NE_EXPR
)
3924 case EQ_EXPR
: val
= (cmp
== 0); break;
3925 case NE_EXPR
: val
= (cmp
!= 0); break;
3926 case LT_EXPR
: val
= (cmp
< 0); break;
3927 case GT_EXPR
: val
= (cmp
> 0); break;
3928 case LE_EXPR
: val
= (cmp
<= 0); break;
3929 case GE_EXPR
: val
= (cmp
>= 0); break;
3930 default: done
= false;
3935 return boolean_true_node
;
3937 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3940 /* Likewise if the second comparison is a != comparison. */
3941 else if (code2
== NE_EXPR
)
3947 case EQ_EXPR
: val
= (cmp
== 0); break;
3948 case NE_EXPR
: val
= (cmp
!= 0); break;
3949 case LT_EXPR
: val
= (cmp
> 0); break;
3950 case GT_EXPR
: val
= (cmp
< 0); break;
3951 case LE_EXPR
: val
= (cmp
>= 0); break;
3952 case GE_EXPR
: val
= (cmp
<= 0); break;
3953 default: done
= false;
3958 return boolean_true_node
;
3960 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3964 /* See if an equality test is redundant with the other comparison. */
3965 else if (code1
== EQ_EXPR
)
3970 case EQ_EXPR
: val
= (cmp
== 0); break;
3971 case NE_EXPR
: val
= (cmp
!= 0); break;
3972 case LT_EXPR
: val
= (cmp
< 0); break;
3973 case GT_EXPR
: val
= (cmp
> 0); break;
3974 case LE_EXPR
: val
= (cmp
<= 0); break;
3975 case GE_EXPR
: val
= (cmp
>= 0); break;
3980 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3982 else if (code2
== EQ_EXPR
)
3987 case EQ_EXPR
: val
= (cmp
== 0); break;
3988 case NE_EXPR
: val
= (cmp
!= 0); break;
3989 case LT_EXPR
: val
= (cmp
> 0); break;
3990 case GT_EXPR
: val
= (cmp
< 0); break;
3991 case LE_EXPR
: val
= (cmp
>= 0); break;
3992 case GE_EXPR
: val
= (cmp
<= 0); break;
3997 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4000 /* Chose the less restrictive of two < or <= comparisons. */
4001 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4002 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4004 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4005 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4007 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4010 /* Likewise chose the less restrictive of two > or >= comparisons. */
4011 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4012 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4014 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4015 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4017 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4020 /* Check for singleton ranges. */
4022 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4023 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4024 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4026 /* Check for less/greater pairs that don't restrict the range at all. */
4028 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4029 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4030 return boolean_true_node
;
4032 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4033 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4034 return boolean_true_node
;
4037 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4038 NAME's definition is a truth value. See if there are any simplifications
4039 that can be done against the NAME's definition. */
4040 if (TREE_CODE (op1a
) == SSA_NAME
4041 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4042 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4044 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4045 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4046 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4047 switch (gimple_code (stmt
))
4050 /* Try to simplify by copy-propagating the definition. */
4051 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4054 /* If every argument to the PHI produces the same result when
4055 ORed with the second comparison, we win.
4056 Do not do this unless the type is bool since we need a bool
4057 result here anyway. */
4058 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4060 tree result
= NULL_TREE
;
4062 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4064 tree arg
= gimple_phi_arg_def (stmt
, i
);
4066 /* If this PHI has itself as an argument, ignore it.
4067 If all the other args produce the same result,
4069 if (arg
== gimple_phi_result (stmt
))
4071 else if (TREE_CODE (arg
) == INTEGER_CST
)
4073 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4076 result
= boolean_true_node
;
4077 else if (!integer_onep (result
))
4081 result
= fold_build2 (code2
, boolean_type_node
,
4083 else if (!same_bool_comparison_p (result
,
4087 else if (TREE_CODE (arg
) == SSA_NAME
4088 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4091 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4092 /* In simple cases we can look through PHI nodes,
4093 but we have to be careful with loops.
4095 if (! dom_info_available_p (CDI_DOMINATORS
)
4096 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4097 || dominated_by_p (CDI_DOMINATORS
,
4098 gimple_bb (def_stmt
),
4101 temp
= or_var_with_comparison (arg
, invert
, code2
,
4107 else if (!same_bool_result_p (result
, temp
))
4123 /* Try to simplify the OR of two comparisons, specified by
4124 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4125 If this can be simplified to a single expression (without requiring
4126 introducing more SSA variables to hold intermediate values),
4127 return the resulting tree. Otherwise return NULL_TREE.
4128 If the result expression is non-null, it has boolean type. */
4131 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4132 enum tree_code code2
, tree op2a
, tree op2b
)
4134 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4138 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4142 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4144 Either NULL_TREE, a simplified but non-constant or a constant
4147 ??? This should go into a gimple-fold-inline.h file to be eventually
4148 privatized with the single valueize function used in the various TUs
4149 to avoid the indirect function call overhead. */
4152 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
4154 location_t loc
= gimple_location (stmt
);
4155 switch (gimple_code (stmt
))
4159 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4161 switch (get_gimple_rhs_class (subcode
))
4163 case GIMPLE_SINGLE_RHS
:
4165 tree rhs
= gimple_assign_rhs1 (stmt
);
4166 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4168 if (TREE_CODE (rhs
) == SSA_NAME
)
4170 /* If the RHS is an SSA_NAME, return its known constant value,
4172 return (*valueize
) (rhs
);
4174 /* Handle propagating invariant addresses into address
4176 else if (TREE_CODE (rhs
) == ADDR_EXPR
4177 && !is_gimple_min_invariant (rhs
))
4179 HOST_WIDE_INT offset
= 0;
4181 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4185 && (CONSTANT_CLASS_P (base
)
4186 || decl_address_invariant_p (base
)))
4187 return build_invariant_address (TREE_TYPE (rhs
),
4190 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4191 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4192 && (CONSTRUCTOR_NELTS (rhs
)
4193 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4198 vec
= XALLOCAVEC (tree
,
4199 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4200 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4202 val
= (*valueize
) (val
);
4203 if (TREE_CODE (val
) == INTEGER_CST
4204 || TREE_CODE (val
) == REAL_CST
4205 || TREE_CODE (val
) == FIXED_CST
)
4211 return build_vector (TREE_TYPE (rhs
), vec
);
4213 if (subcode
== OBJ_TYPE_REF
)
4215 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4216 /* If callee is constant, we can fold away the wrapper. */
4217 if (is_gimple_min_invariant (val
))
4221 if (kind
== tcc_reference
)
4223 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4224 || TREE_CODE (rhs
) == REALPART_EXPR
4225 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4226 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4228 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4229 return fold_unary_loc (EXPR_LOCATION (rhs
),
4231 TREE_TYPE (rhs
), val
);
4233 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
4234 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4236 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4237 return fold_ternary_loc (EXPR_LOCATION (rhs
),
4239 TREE_TYPE (rhs
), val
,
4240 TREE_OPERAND (rhs
, 1),
4241 TREE_OPERAND (rhs
, 2));
4243 else if (TREE_CODE (rhs
) == MEM_REF
4244 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4246 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4247 if (TREE_CODE (val
) == ADDR_EXPR
4248 && is_gimple_min_invariant (val
))
4250 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
4252 TREE_OPERAND (rhs
, 1));
4257 return fold_const_aggregate_ref_1 (rhs
, valueize
);
4259 else if (kind
== tcc_declaration
)
4260 return get_symbol_constant_value (rhs
);
4264 case GIMPLE_UNARY_RHS
:
4266 /* Handle unary operators that can appear in GIMPLE form.
4267 Note that we know the single operand must be a constant,
4268 so this should almost always return a simplified RHS. */
4269 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4272 fold_unary_ignore_overflow_loc (loc
, subcode
,
4273 gimple_expr_type (stmt
), op0
);
4276 case GIMPLE_BINARY_RHS
:
4278 /* Handle binary operators that can appear in GIMPLE form. */
4279 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4280 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4282 /* Translate &x + CST into an invariant form suitable for
4283 further propagation. */
4284 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
4285 && TREE_CODE (op0
) == ADDR_EXPR
4286 && TREE_CODE (op1
) == INTEGER_CST
)
4288 tree off
= fold_convert (ptr_type_node
, op1
);
4289 return build_fold_addr_expr_loc
4291 fold_build2 (MEM_REF
,
4292 TREE_TYPE (TREE_TYPE (op0
)),
4293 unshare_expr (op0
), off
));
4296 return fold_binary_loc (loc
, subcode
,
4297 gimple_expr_type (stmt
), op0
, op1
);
4300 case GIMPLE_TERNARY_RHS
:
4302 /* Handle ternary operators that can appear in GIMPLE form. */
4303 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4304 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4305 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
4307 /* Fold embedded expressions in ternary codes. */
4308 if ((subcode
== COND_EXPR
4309 || subcode
== VEC_COND_EXPR
)
4310 && COMPARISON_CLASS_P (op0
))
4312 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
4313 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
4314 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
4315 TREE_TYPE (op0
), op00
, op01
);
4320 return fold_ternary_loc (loc
, subcode
,
4321 gimple_expr_type (stmt
), op0
, op1
, op2
);
4333 if (gimple_call_internal_p (stmt
))
4335 enum tree_code subcode
= ERROR_MARK
;
4336 switch (gimple_call_internal_fn (stmt
))
4338 case IFN_UBSAN_CHECK_ADD
:
4339 subcode
= PLUS_EXPR
;
4341 case IFN_UBSAN_CHECK_SUB
:
4342 subcode
= MINUS_EXPR
;
4344 case IFN_UBSAN_CHECK_MUL
:
4345 subcode
= MULT_EXPR
;
4350 tree arg0
= gimple_call_arg (stmt
, 0);
4351 tree arg1
= gimple_call_arg (stmt
, 1);
4352 tree op0
= (*valueize
) (arg0
);
4353 tree op1
= (*valueize
) (arg1
);
4355 if (TREE_CODE (op0
) != INTEGER_CST
4356 || TREE_CODE (op1
) != INTEGER_CST
)
4361 /* x * 0 = 0 * x = 0 without overflow. */
4362 if (integer_zerop (op0
) || integer_zerop (op1
))
4363 return build_zero_cst (TREE_TYPE (arg0
));
4366 /* y - y = 0 without overflow. */
4367 if (operand_equal_p (op0
, op1
, 0))
4368 return build_zero_cst (TREE_TYPE (arg0
));
4375 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
4377 && TREE_CODE (res
) == INTEGER_CST
4378 && !TREE_OVERFLOW (res
))
4383 fn
= (*valueize
) (gimple_call_fn (stmt
));
4384 if (TREE_CODE (fn
) == ADDR_EXPR
4385 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4386 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
4387 && gimple_builtin_call_types_compatible_p (stmt
,
4388 TREE_OPERAND (fn
, 0)))
4390 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
4393 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4394 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
4395 call
= build_call_array_loc (loc
,
4396 gimple_call_return_type (stmt
),
4397 fn
, gimple_call_num_args (stmt
), args
);
4398 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
4401 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4402 STRIP_NOPS (retval
);
4403 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
4415 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4416 Returns NULL_TREE if folding to a constant is not possible, otherwise
4417 returns a constant according to is_gimple_min_invariant. */
4420 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
4422 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
4423 if (res
&& is_gimple_min_invariant (res
))
4429 /* The following set of functions are supposed to fold references using
4430 their constant initializers. */
4432 static tree
fold_ctor_reference (tree type
, tree ctor
,
4433 unsigned HOST_WIDE_INT offset
,
4434 unsigned HOST_WIDE_INT size
, tree
);
4436 /* See if we can find constructor defining value of BASE.
4437 When we know the consructor with constant offset (such as
4438 base is array[40] and we do know constructor of array), then
4439 BIT_OFFSET is adjusted accordingly.
4441 As a special case, return error_mark_node when constructor
4442 is not explicitly available, but it is known to be zero
4443 such as 'static const int a;'. */
4445 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
4446 tree (*valueize
)(tree
))
4448 HOST_WIDE_INT bit_offset2
, size
, max_size
;
4449 if (TREE_CODE (base
) == MEM_REF
)
4451 if (!integer_zerop (TREE_OPERAND (base
, 1)))
4453 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
4455 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
4460 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
4461 base
= valueize (TREE_OPERAND (base
, 0));
4462 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
4464 base
= TREE_OPERAND (base
, 0);
4467 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4468 DECL_INITIAL. If BASE is a nested reference into another
4469 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4470 the inner reference. */
4471 switch (TREE_CODE (base
))
4476 tree init
= ctor_for_folding (base
);
4478 /* Our semantic is exact opposite of ctor_for_folding;
4479 NULL means unknown, while error_mark_node is 0. */
4480 if (init
== error_mark_node
)
4483 return error_mark_node
;
4489 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
4490 if (max_size
== -1 || size
!= max_size
)
4492 *bit_offset
+= bit_offset2
;
4493 return get_base_constructor (base
, bit_offset
, valueize
);
4504 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4505 SIZE to the memory at bit OFFSET. */
4508 fold_array_ctor_reference (tree type
, tree ctor
,
4509 unsigned HOST_WIDE_INT offset
,
4510 unsigned HOST_WIDE_INT size
,
4513 unsigned HOST_WIDE_INT cnt
;
4515 offset_int low_bound
;
4516 offset_int elt_size
;
4517 offset_int index
, max_index
;
4518 offset_int access_index
;
4519 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
4520 HOST_WIDE_INT inner_offset
;
4522 /* Compute low bound and elt size. */
4523 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
4524 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
4525 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
4527 /* Static constructors for variably sized objects makes no sense. */
4528 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
4529 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
4530 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
4534 /* Static constructors for variably sized objects makes no sense. */
4535 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
4537 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
4539 /* We can handle only constantly sized accesses that are known to not
4540 be larger than size of array element. */
4541 if (!TYPE_SIZE_UNIT (type
)
4542 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
4543 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
4547 /* Compute the array index we look for. */
4548 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
4550 access_index
+= low_bound
;
4552 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
4553 TYPE_SIGN (index_type
));
4555 /* And offset within the access. */
4556 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
4558 /* See if the array field is large enough to span whole access. We do not
4559 care to fold accesses spanning multiple array indexes. */
4560 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
4563 index
= low_bound
- 1;
4565 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4566 TYPE_SIGN (index_type
));
4568 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
4570 /* Array constructor might explicitely set index, or specify range
4571 or leave index NULL meaning that it is next index after previous
4575 if (TREE_CODE (cfield
) == INTEGER_CST
)
4576 max_index
= index
= wi::to_offset (cfield
);
4579 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
4580 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
4581 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
4588 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4589 TYPE_SIGN (index_type
));
4593 /* Do we have match? */
4594 if (wi::cmpu (access_index
, index
) >= 0
4595 && wi::cmpu (access_index
, max_index
) <= 0)
4596 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
4599 /* When memory is not explicitely mentioned in constructor,
4600 it is 0 (or out of range). */
4601 return build_zero_cst (type
);
4604 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4605 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4608 fold_nonarray_ctor_reference (tree type
, tree ctor
,
4609 unsigned HOST_WIDE_INT offset
,
4610 unsigned HOST_WIDE_INT size
,
4613 unsigned HOST_WIDE_INT cnt
;
4616 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
4619 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
4620 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
4621 tree field_size
= DECL_SIZE (cfield
);
4622 offset_int bitoffset
;
4623 offset_int bitoffset_end
, access_end
;
4625 /* Variable sized objects in static constructors makes no sense,
4626 but field_size can be NULL for flexible array members. */
4627 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
4628 && TREE_CODE (byte_offset
) == INTEGER_CST
4629 && (field_size
!= NULL_TREE
4630 ? TREE_CODE (field_size
) == INTEGER_CST
4631 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
4633 /* Compute bit offset of the field. */
4634 bitoffset
= (wi::to_offset (field_offset
)
4635 + wi::lshift (wi::to_offset (byte_offset
),
4636 LOG2_BITS_PER_UNIT
));
4637 /* Compute bit offset where the field ends. */
4638 if (field_size
!= NULL_TREE
)
4639 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
4643 access_end
= offset_int (offset
) + size
;
4645 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4646 [BITOFFSET, BITOFFSET_END)? */
4647 if (wi::cmps (access_end
, bitoffset
) > 0
4648 && (field_size
== NULL_TREE
4649 || wi::lts_p (offset
, bitoffset_end
)))
4651 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
4652 /* We do have overlap. Now see if field is large enough to
4653 cover the access. Give up for accesses spanning multiple
4655 if (wi::cmps (access_end
, bitoffset_end
) > 0)
4657 if (wi::lts_p (offset
, bitoffset
))
4659 return fold_ctor_reference (type
, cval
,
4660 inner_offset
.to_uhwi (), size
,
4664 /* When memory is not explicitely mentioned in constructor, it is 0. */
4665 return build_zero_cst (type
);
4668 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4669 to the memory at bit OFFSET. */
4672 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
4673 unsigned HOST_WIDE_INT size
, tree from_decl
)
4677 /* We found the field with exact match. */
4678 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
4680 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4682 /* We are at the end of walk, see if we can view convert the
4684 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
4685 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4686 && !compare_tree_int (TYPE_SIZE (type
), size
)
4687 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
4689 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4690 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
4695 /* For constants and byte-aligned/sized reads try to go through
4696 native_encode/interpret. */
4697 if (CONSTANT_CLASS_P (ctor
)
4698 && BITS_PER_UNIT
== 8
4699 && offset
% BITS_PER_UNIT
== 0
4700 && size
% BITS_PER_UNIT
== 0
4701 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
4703 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
4704 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
4705 offset
/ BITS_PER_UNIT
) > 0)
4706 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
4708 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
4711 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
4712 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
4713 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
4716 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
4723 /* Return the tree representing the element referenced by T if T is an
4724 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4725 names using VALUEIZE. Return NULL_TREE otherwise. */
4728 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
4730 tree ctor
, idx
, base
;
4731 HOST_WIDE_INT offset
, size
, max_size
;
4734 if (TREE_THIS_VOLATILE (t
))
4737 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
4738 return get_symbol_constant_value (t
);
4740 tem
= fold_read_from_constant_string (t
);
4744 switch (TREE_CODE (t
))
4747 case ARRAY_RANGE_REF
:
4748 /* Constant indexes are handled well by get_base_constructor.
4749 Only special case variable offsets.
4750 FIXME: This code can't handle nested references with variable indexes
4751 (they will be handled only by iteration of ccp). Perhaps we can bring
4752 get_ref_base_and_extent here and make it use a valueize callback. */
4753 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
4755 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
4756 && TREE_CODE (idx
) == INTEGER_CST
)
4758 tree low_bound
, unit_size
;
4760 /* If the resulting bit-offset is constant, track it. */
4761 if ((low_bound
= array_ref_low_bound (t
),
4762 TREE_CODE (low_bound
) == INTEGER_CST
)
4763 && (unit_size
= array_ref_element_size (t
),
4764 tree_fits_uhwi_p (unit_size
)))
4767 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
4768 TYPE_PRECISION (TREE_TYPE (idx
)));
4770 if (wi::fits_shwi_p (woffset
))
4772 offset
= woffset
.to_shwi ();
4773 /* TODO: This code seems wrong, multiply then check
4774 to see if it fits. */
4775 offset
*= tree_to_uhwi (unit_size
);
4776 offset
*= BITS_PER_UNIT
;
4778 base
= TREE_OPERAND (t
, 0);
4779 ctor
= get_base_constructor (base
, &offset
, valueize
);
4780 /* Empty constructor. Always fold to 0. */
4781 if (ctor
== error_mark_node
)
4782 return build_zero_cst (TREE_TYPE (t
));
4783 /* Out of bound array access. Value is undefined,
4787 /* We can not determine ctor. */
4790 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
4791 tree_to_uhwi (unit_size
)
4801 case TARGET_MEM_REF
:
4803 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
4804 ctor
= get_base_constructor (base
, &offset
, valueize
);
4806 /* Empty constructor. Always fold to 0. */
4807 if (ctor
== error_mark_node
)
4808 return build_zero_cst (TREE_TYPE (t
));
4809 /* We do not know precise address. */
4810 if (max_size
== -1 || max_size
!= size
)
4812 /* We can not determine ctor. */
4816 /* Out of bound array access. Value is undefined, but don't fold. */
4820 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
4826 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
4827 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
4828 return fold_build1_loc (EXPR_LOCATION (t
),
4829 TREE_CODE (t
), TREE_TYPE (t
), c
);
4841 fold_const_aggregate_ref (tree t
)
4843 return fold_const_aggregate_ref_1 (t
, NULL
);
4846 /* Lookup virtual method with index TOKEN in a virtual table V
4848 Set CAN_REFER if non-NULL to false if method
4849 is not referable or if the virtual table is ill-formed (such as rewriten
4850 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4853 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
4855 unsigned HOST_WIDE_INT offset
,
4858 tree vtable
= v
, init
, fn
;
4859 unsigned HOST_WIDE_INT size
;
4860 unsigned HOST_WIDE_INT elt_size
, access_index
;
4866 /* First of all double check we have virtual table. */
4867 if (TREE_CODE (v
) != VAR_DECL
4868 || !DECL_VIRTUAL_P (v
))
4870 gcc_assert (in_lto_p
);
4871 /* Pass down that we lost track of the target. */
4877 init
= ctor_for_folding (v
);
4879 /* The virtual tables should always be born with constructors
4880 and we always should assume that they are avaialble for
4881 folding. At the moment we do not stream them in all cases,
4882 but it should never happen that ctor seem unreachable. */
4884 if (init
== error_mark_node
)
4886 gcc_assert (in_lto_p
);
4887 /* Pass down that we lost track of the target. */
4892 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
4893 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
4894 offset
*= BITS_PER_UNIT
;
4895 offset
+= token
* size
;
4897 /* Lookup the value in the constructor that is assumed to be array.
4898 This is equivalent to
4899 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
4900 offset, size, NULL);
4901 but in a constant time. We expect that frontend produced a simple
4902 array without indexed initializers. */
4904 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
4905 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
4906 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
4907 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
4909 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
4910 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
4912 /* This code makes an assumption that there are no
4913 indexed fileds produced by C++ FE, so we can directly index the array. */
4914 if (access_index
< CONSTRUCTOR_NELTS (init
))
4916 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
4917 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
4923 /* For type inconsistent program we may end up looking up virtual method
4924 in virtual table that does not contain TOKEN entries. We may overrun
4925 the virtual table and pick up a constant or RTTI info pointer.
4926 In any case the call is undefined. */
4928 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
4929 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
4930 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
4933 fn
= TREE_OPERAND (fn
, 0);
4935 /* When cgraph node is missing and function is not public, we cannot
4936 devirtualize. This can happen in WHOPR when the actual method
4937 ends up in other partition, because we found devirtualization
4938 possibility too late. */
4939 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
4950 /* Make sure we create a cgraph node for functions we'll reference.
4951 They can be non-existent if the reference comes from an entry
4952 of an external vtable for example. */
4953 cgraph_node::get_create (fn
);
4958 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
4959 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
4960 KNOWN_BINFO carries the binfo describing the true type of
4961 OBJ_TYPE_REF_OBJECT(REF).
4962 Set CAN_REFER if non-NULL to false if method
4963 is not referable or if the virtual table is ill-formed (such as rewriten
4964 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
4967 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
4970 unsigned HOST_WIDE_INT offset
;
4973 v
= BINFO_VTABLE (known_binfo
);
4974 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
4978 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
4984 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
4987 /* Return true iff VAL is a gimple expression that is known to be
4988 non-negative. Restricted to floating-point inputs. */
4991 gimple_val_nonnegative_real_p (tree val
)
4995 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
4997 /* Use existing logic for non-gimple trees. */
4998 if (tree_expr_nonnegative_p (val
))
5001 if (TREE_CODE (val
) != SSA_NAME
)
5004 /* Currently we look only at the immediately defining statement
5005 to make this determination, since recursion on defining
5006 statements of operands can lead to quadratic behavior in the
5007 worst case. This is expected to catch almost all occurrences
5008 in practice. It would be possible to implement limited-depth
5009 recursion if important cases are lost. Alternatively, passes
5010 that need this information (such as the pow/powi lowering code
5011 in the cse_sincos pass) could be revised to provide it through
5012 dataflow propagation. */
5014 def_stmt
= SSA_NAME_DEF_STMT (val
);
5016 if (is_gimple_assign (def_stmt
))
5020 /* See fold-const.c:tree_expr_nonnegative_p for additional
5021 cases that could be handled with recursion. */
5023 switch (gimple_assign_rhs_code (def_stmt
))
5026 /* Always true for floating-point operands. */
5030 /* True if the two operands are identical (since we are
5031 restricted to floating-point inputs). */
5032 op0
= gimple_assign_rhs1 (def_stmt
);
5033 op1
= gimple_assign_rhs2 (def_stmt
);
5036 || operand_equal_p (op0
, op1
, 0))
5043 else if (is_gimple_call (def_stmt
))
5045 tree fndecl
= gimple_call_fndecl (def_stmt
);
5047 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5051 switch (DECL_FUNCTION_CODE (fndecl
))
5053 CASE_FLT_FN (BUILT_IN_ACOS
):
5054 CASE_FLT_FN (BUILT_IN_ACOSH
):
5055 CASE_FLT_FN (BUILT_IN_CABS
):
5056 CASE_FLT_FN (BUILT_IN_COSH
):
5057 CASE_FLT_FN (BUILT_IN_ERFC
):
5058 CASE_FLT_FN (BUILT_IN_EXP
):
5059 CASE_FLT_FN (BUILT_IN_EXP10
):
5060 CASE_FLT_FN (BUILT_IN_EXP2
):
5061 CASE_FLT_FN (BUILT_IN_FABS
):
5062 CASE_FLT_FN (BUILT_IN_FDIM
):
5063 CASE_FLT_FN (BUILT_IN_HYPOT
):
5064 CASE_FLT_FN (BUILT_IN_POW10
):
5067 CASE_FLT_FN (BUILT_IN_SQRT
):
5068 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5069 nonnegative inputs. */
5070 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
5075 CASE_FLT_FN (BUILT_IN_POWI
):
5076 /* True if the second argument is an even integer. */
5077 arg1
= gimple_call_arg (def_stmt
, 1);
5079 if (TREE_CODE (arg1
) == INTEGER_CST
5080 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5085 CASE_FLT_FN (BUILT_IN_POW
):
5086 /* True if the second argument is an even integer-valued
5088 arg1
= gimple_call_arg (def_stmt
, 1);
5090 if (TREE_CODE (arg1
) == REAL_CST
)
5095 c
= TREE_REAL_CST (arg1
);
5096 n
= real_to_integer (&c
);
5100 REAL_VALUE_TYPE cint
;
5101 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5102 if (real_identical (&c
, &cint
))
5118 /* Given a pointer value OP0, return a simplified version of an
5119 indirection through OP0, or NULL_TREE if no simplification is
5120 possible. Note that the resulting type may be different from
5121 the type pointed to in the sense that it is still compatible
5122 from the langhooks point of view. */
5125 gimple_fold_indirect_ref (tree t
)
5127 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5132 subtype
= TREE_TYPE (sub
);
5133 if (!POINTER_TYPE_P (subtype
))
5136 if (TREE_CODE (sub
) == ADDR_EXPR
)
5138 tree op
= TREE_OPERAND (sub
, 0);
5139 tree optype
= TREE_TYPE (op
);
5141 if (useless_type_conversion_p (type
, optype
))
5144 /* *(foo *)&fooarray => fooarray[0] */
5145 if (TREE_CODE (optype
) == ARRAY_TYPE
5146 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5147 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5149 tree type_domain
= TYPE_DOMAIN (optype
);
5150 tree min_val
= size_zero_node
;
5151 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5152 min_val
= TYPE_MIN_VALUE (type_domain
);
5153 if (TREE_CODE (min_val
) == INTEGER_CST
)
5154 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5156 /* *(foo *)&complexfoo => __real__ complexfoo */
5157 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5158 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5159 return fold_build1 (REALPART_EXPR
, type
, op
);
5160 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5161 else if (TREE_CODE (optype
) == VECTOR_TYPE
5162 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5164 tree part_width
= TYPE_SIZE (type
);
5165 tree index
= bitsize_int (0);
5166 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5170 /* *(p + CST) -> ... */
5171 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5172 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5174 tree addr
= TREE_OPERAND (sub
, 0);
5175 tree off
= TREE_OPERAND (sub
, 1);
5179 addrtype
= TREE_TYPE (addr
);
5181 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5182 if (TREE_CODE (addr
) == ADDR_EXPR
5183 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5184 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5185 && tree_fits_uhwi_p (off
))
5187 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5188 tree part_width
= TYPE_SIZE (type
);
5189 unsigned HOST_WIDE_INT part_widthi
5190 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5191 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5192 tree index
= bitsize_int (indexi
);
5193 if (offset
/ part_widthi
5194 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5195 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5199 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5200 if (TREE_CODE (addr
) == ADDR_EXPR
5201 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5202 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5204 tree size
= TYPE_SIZE_UNIT (type
);
5205 if (tree_int_cst_equal (size
, off
))
5206 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5209 /* *(p + CST) -> MEM_REF <p, CST>. */
5210 if (TREE_CODE (addr
) != ADDR_EXPR
5211 || DECL_P (TREE_OPERAND (addr
, 0)))
5212 return fold_build2 (MEM_REF
, type
,
5214 wide_int_to_tree (ptype
, off
));
5217 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5218 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5219 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5220 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5223 tree min_val
= size_zero_node
;
5225 sub
= gimple_fold_indirect_ref (sub
);
5227 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5228 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5229 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5230 min_val
= TYPE_MIN_VALUE (type_domain
);
5231 if (TREE_CODE (min_val
) == INTEGER_CST
)
5232 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5238 /* Return true if CODE is an operation that when operating on signed
5239 integer types involves undefined behavior on overflow and the
5240 operation can be expressed with unsigned arithmetic. */
5243 arith_code_with_undefined_signed_overflow (tree_code code
)
5251 case POINTER_PLUS_EXPR
:
5258 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5259 operation that can be transformed to unsigned arithmetic by converting
5260 its operand, carrying out the operation in the corresponding unsigned
5261 type and converting the result back to the original type.
5263 Returns a sequence of statements that replace STMT and also contain
5264 a modified form of STMT itself. */
5267 rewrite_to_defined_overflow (gimple stmt
)
5269 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5271 fprintf (dump_file
, "rewriting stmt with undefined signed "
5273 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5276 tree lhs
= gimple_assign_lhs (stmt
);
5277 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5278 gimple_seq stmts
= NULL
;
5279 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5281 gimple_seq stmts2
= NULL
;
5282 gimple_set_op (stmt
, i
,
5283 force_gimple_operand (fold_convert (type
,
5284 gimple_op (stmt
, i
)),
5285 &stmts2
, true, NULL_TREE
));
5286 gimple_seq_add_seq (&stmts
, stmts2
);
5288 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5289 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5290 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5291 gimple_seq_add_stmt (&stmts
, stmt
);
5292 gimple cvt
= gimple_build_assign_with_ops
5293 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
5294 gimple_seq_add_stmt (&stmts
, cvt
);
5299 /* Return OP converted to TYPE by emitting a conversion statement on SEQ
5300 if required using location LOC. Note that OP will be returned
5301 unmodified if GIMPLE does not require an explicit conversion between
5302 its type and TYPE. */
5305 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
5307 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
5309 op
= fold_convert_loc (loc
, type
, op
);
5310 gimple_seq stmts
= NULL
;
5311 op
= force_gimple_operand (op
, &stmts
, true, NULL_TREE
);
5312 gimple_seq_add_seq_without_update (seq
, stmts
);