1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "fold-const.h"
29 #include "stringpool.h"
30 #include "hard-reg-set.h"
34 #include "insn-config.h"
43 #include "stor-layout.h"
47 #include "dominance.h"
48 #include "basic-block.h"
49 #include "tree-ssa-alias.h"
50 #include "internal-fn.h"
51 #include "gimple-fold.h"
52 #include "gimple-expr.h"
55 #include "gimple-iterator.h"
56 #include "gimple-ssa.h"
57 #include "tree-ssanames.h"
58 #include "tree-into-ssa.h"
61 #include "tree-ssa-propagate.h"
64 #include "ipa-utils.h"
65 #include "gimple-pretty-print.h"
66 #include "tree-ssa-address.h"
67 #include "langhooks.h"
68 #include "gimplify-me.h"
73 #include "gimple-match.h"
74 #include "tree-phinodes.h"
75 #include "ssa-iterators.h"
77 /* Return true when DECL can be referenced from current unit.
78 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
79 We can get declarations that are not possible to reference for various
82 1) When analyzing C++ virtual tables.
83 C++ virtual tables do have known constructors even
84 when they are keyed to other compilation unit.
85 Those tables can contain pointers to methods and vars
86 in other units. Those methods have both STATIC and EXTERNAL
88 2) In WHOPR mode devirtualization might lead to reference
89 to method that was partitioned elsehwere.
90 In this case we have static VAR_DECL or FUNCTION_DECL
91 that has no corresponding callgraph/varpool node
93 3) COMDAT functions referred by external vtables that
94 we devirtualize only during final compilation stage.
95 At this time we already decided that we will not output
96 the function body and thus we can't reference the symbol
100 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
103 struct cgraph_node
*node
;
106 if (DECL_ABSTRACT_P (decl
))
109 /* We are concerned only about static/external vars and functions. */
110 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
111 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
114 /* Static objects can be referred only if they was not optimized out yet. */
115 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
117 /* Before we start optimizing unreachable code we can be sure all
118 static objects are defined. */
119 if (symtab
->function_flags_ready
)
121 snode
= symtab_node::get (decl
);
122 if (!snode
|| !snode
->definition
)
124 node
= dyn_cast
<cgraph_node
*> (snode
);
125 return !node
|| !node
->global
.inlined_to
;
128 /* We will later output the initializer, so we can refer to it.
129 So we are concerned only when DECL comes from initializer of
130 external var or var that has been optimized out. */
132 || TREE_CODE (from_decl
) != VAR_DECL
133 || (!DECL_EXTERNAL (from_decl
)
134 && (vnode
= varpool_node::get (from_decl
)) != NULL
135 && vnode
->definition
)
137 && (vnode
= varpool_node::get (from_decl
)) != NULL
138 && vnode
->in_other_partition
))
140 /* We are folding reference from external vtable. The vtable may reffer
141 to a symbol keyed to other compilation unit. The other compilation
142 unit may be in separate DSO and the symbol may be hidden. */
143 if (DECL_VISIBILITY_SPECIFIED (decl
)
144 && DECL_EXTERNAL (decl
)
145 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
146 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
148 /* When function is public, we always can introduce new reference.
149 Exception are the COMDAT functions where introducing a direct
150 reference imply need to include function body in the curren tunit. */
151 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
153 /* We have COMDAT. We are going to check if we still have definition
154 or if the definition is going to be output in other partition.
155 Bypass this when gimplifying; all needed functions will be produced.
157 As observed in PR20991 for already optimized out comdat virtual functions
158 it may be tempting to not necessarily give up because the copy will be
159 output elsewhere when corresponding vtable is output.
160 This is however not possible - ABI specify that COMDATs are output in
161 units where they are used and when the other unit was compiled with LTO
162 it is possible that vtable was kept public while the function itself
164 if (!symtab
->function_flags_ready
)
167 snode
= symtab_node::get (decl
);
169 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
170 && (!snode
->in_other_partition
171 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
173 node
= dyn_cast
<cgraph_node
*> (snode
);
174 return !node
|| !node
->global
.inlined_to
;
177 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
178 acceptable form for is_gimple_min_invariant.
179 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
182 canonicalize_constructor_val (tree cval
, tree from_decl
)
184 tree orig_cval
= cval
;
186 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
187 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
189 tree ptr
= TREE_OPERAND (cval
, 0);
190 if (is_gimple_min_invariant (ptr
))
191 cval
= build1_loc (EXPR_LOCATION (cval
),
192 ADDR_EXPR
, TREE_TYPE (ptr
),
193 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
195 fold_convert (ptr_type_node
,
196 TREE_OPERAND (cval
, 1))));
198 if (TREE_CODE (cval
) == ADDR_EXPR
)
200 tree base
= NULL_TREE
;
201 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
203 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
205 TREE_OPERAND (cval
, 0) = base
;
208 base
= get_base_address (TREE_OPERAND (cval
, 0));
212 if ((TREE_CODE (base
) == VAR_DECL
213 || TREE_CODE (base
) == FUNCTION_DECL
)
214 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
216 if (TREE_CODE (base
) == VAR_DECL
)
217 TREE_ADDRESSABLE (base
) = 1;
218 else if (TREE_CODE (base
) == FUNCTION_DECL
)
220 /* Make sure we create a cgraph node for functions we'll reference.
221 They can be non-existent if the reference comes from an entry
222 of an external vtable for example. */
223 cgraph_node::get_create (base
);
225 /* Fixup types in global initializers. */
226 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
227 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
229 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
230 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
233 if (TREE_OVERFLOW_P (cval
))
234 return drop_tree_overflow (cval
);
238 /* If SYM is a constant variable with known value, return the value.
239 NULL_TREE is returned otherwise. */
242 get_symbol_constant_value (tree sym
)
244 tree val
= ctor_for_folding (sym
);
245 if (val
!= error_mark_node
)
249 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
250 if (val
&& is_gimple_min_invariant (val
))
255 /* Variables declared 'const' without an initializer
256 have zero as the initializer if they may not be
257 overridden at link or run time. */
259 && is_gimple_reg_type (TREE_TYPE (sym
)))
260 return build_zero_cst (TREE_TYPE (sym
));
268 /* Subroutine of fold_stmt. We perform several simplifications of the
269 memory reference tree EXPR and make sure to re-gimplify them properly
270 after propagation of constant addresses. IS_LHS is true if the
271 reference is supposed to be an lvalue. */
274 maybe_fold_reference (tree expr
, bool is_lhs
)
278 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
279 || TREE_CODE (expr
) == REALPART_EXPR
280 || TREE_CODE (expr
) == IMAGPART_EXPR
)
281 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
282 return fold_unary_loc (EXPR_LOCATION (expr
),
285 TREE_OPERAND (expr
, 0));
286 else if (TREE_CODE (expr
) == BIT_FIELD_REF
287 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
288 return fold_ternary_loc (EXPR_LOCATION (expr
),
291 TREE_OPERAND (expr
, 0),
292 TREE_OPERAND (expr
, 1),
293 TREE_OPERAND (expr
, 2));
296 && (result
= fold_const_aggregate_ref (expr
))
297 && is_gimple_min_invariant (result
))
304 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
305 replacement rhs for the statement or NULL_TREE if no simplification
306 could be made. It is assumed that the operands have been previously
310 fold_gimple_assign (gimple_stmt_iterator
*si
)
312 gimple stmt
= gsi_stmt (*si
);
313 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
314 location_t loc
= gimple_location (stmt
);
316 tree result
= NULL_TREE
;
318 switch (get_gimple_rhs_class (subcode
))
320 case GIMPLE_SINGLE_RHS
:
322 tree rhs
= gimple_assign_rhs1 (stmt
);
324 if (TREE_CLOBBER_P (rhs
))
327 if (REFERENCE_CLASS_P (rhs
))
328 return maybe_fold_reference (rhs
, false);
330 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
332 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
333 if (is_gimple_min_invariant (val
))
335 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
338 vec
<cgraph_node
*>targets
339 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
340 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
342 if (dump_enabled_p ())
344 location_t loc
= gimple_location_safe (stmt
);
345 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
346 "resolving virtual function address "
347 "reference to function %s\n",
348 targets
.length () == 1
349 ? targets
[0]->name ()
352 if (targets
.length () == 1)
354 val
= fold_convert (TREE_TYPE (val
),
355 build_fold_addr_expr_loc
356 (loc
, targets
[0]->decl
));
357 STRIP_USELESS_TYPE_CONVERSION (val
);
360 /* We can not use __builtin_unreachable here because it
361 can not have address taken. */
362 val
= build_int_cst (TREE_TYPE (val
), 0);
368 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
370 tree ref
= TREE_OPERAND (rhs
, 0);
371 tree tem
= maybe_fold_reference (ref
, true);
373 && TREE_CODE (tem
) == MEM_REF
374 && integer_zerop (TREE_OPERAND (tem
, 1)))
375 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
377 result
= fold_convert (TREE_TYPE (rhs
),
378 build_fold_addr_expr_loc (loc
, tem
));
379 else if (TREE_CODE (ref
) == MEM_REF
380 && integer_zerop (TREE_OPERAND (ref
, 1)))
381 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
384 else if (TREE_CODE (rhs
) == CONSTRUCTOR
385 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
386 && (CONSTRUCTOR_NELTS (rhs
)
387 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
389 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
393 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
394 if (TREE_CODE (val
) != INTEGER_CST
395 && TREE_CODE (val
) != REAL_CST
396 && TREE_CODE (val
) != FIXED_CST
)
399 return build_vector_from_ctor (TREE_TYPE (rhs
),
400 CONSTRUCTOR_ELTS (rhs
));
403 else if (DECL_P (rhs
))
404 return get_symbol_constant_value (rhs
);
406 /* If we couldn't fold the RHS, hand over to the generic
408 if (result
== NULL_TREE
)
411 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
412 that may have been added by fold, and "useless" type
413 conversions that might now be apparent due to propagation. */
414 STRIP_USELESS_TYPE_CONVERSION (result
);
416 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
423 case GIMPLE_UNARY_RHS
:
426 case GIMPLE_BINARY_RHS
:
427 /* Try to canonicalize for boolean-typed X the comparisons
428 X == 0, X == 1, X != 0, and X != 1. */
429 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
430 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
432 tree lhs
= gimple_assign_lhs (stmt
);
433 tree op1
= gimple_assign_rhs1 (stmt
);
434 tree op2
= gimple_assign_rhs2 (stmt
);
435 tree type
= TREE_TYPE (op1
);
437 /* Check whether the comparison operands are of the same boolean
438 type as the result type is.
439 Check that second operand is an integer-constant with value
441 if (TREE_CODE (op2
) == INTEGER_CST
442 && (integer_zerop (op2
) || integer_onep (op2
))
443 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
445 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
446 bool is_logical_not
= false;
448 /* X == 0 and X != 1 is a logical-not.of X
449 X == 1 and X != 0 is X */
450 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
451 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
452 is_logical_not
= true;
454 if (is_logical_not
== false)
456 /* Only for one-bit precision typed X the transformation
457 !X -> ~X is valied. */
458 else if (TYPE_PRECISION (type
) == 1)
459 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
461 /* Otherwise we use !X -> X ^ 1. */
463 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
464 type
, op1
, build_int_cst (type
, 1));
470 result
= fold_binary_loc (loc
, subcode
,
471 TREE_TYPE (gimple_assign_lhs (stmt
)),
472 gimple_assign_rhs1 (stmt
),
473 gimple_assign_rhs2 (stmt
));
477 STRIP_USELESS_TYPE_CONVERSION (result
);
478 if (valid_gimple_rhs_p (result
))
483 case GIMPLE_TERNARY_RHS
:
484 /* Try to fold a conditional expression. */
485 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
487 tree op0
= gimple_assign_rhs1 (stmt
);
490 location_t cond_loc
= gimple_location (stmt
);
492 if (COMPARISON_CLASS_P (op0
))
494 fold_defer_overflow_warnings ();
495 tem
= fold_binary_loc (cond_loc
,
496 TREE_CODE (op0
), TREE_TYPE (op0
),
497 TREE_OPERAND (op0
, 0),
498 TREE_OPERAND (op0
, 1));
499 /* This is actually a conditional expression, not a GIMPLE
500 conditional statement, however, the valid_gimple_rhs_p
501 test still applies. */
502 set
= (tem
&& is_gimple_condexpr (tem
)
503 && valid_gimple_rhs_p (tem
));
504 fold_undefer_overflow_warnings (set
, stmt
, 0);
506 else if (is_gimple_min_invariant (op0
))
515 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
516 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
517 gimple_assign_rhs2 (stmt
),
518 gimple_assign_rhs3 (stmt
));
522 result
= fold_ternary_loc (loc
, subcode
,
523 TREE_TYPE (gimple_assign_lhs (stmt
)),
524 gimple_assign_rhs1 (stmt
),
525 gimple_assign_rhs2 (stmt
),
526 gimple_assign_rhs3 (stmt
));
530 STRIP_USELESS_TYPE_CONVERSION (result
);
531 if (valid_gimple_rhs_p (result
))
536 case GIMPLE_INVALID_RHS
:
543 /* Attempt to fold a conditional statement. Return true if any changes were
544 made. We only attempt to fold the condition expression, and do not perform
545 any transformation that would require alteration of the cfg. It is
546 assumed that the operands have been previously folded. */
549 fold_gimple_cond (gcond
*stmt
)
551 tree result
= fold_binary_loc (gimple_location (stmt
),
552 gimple_cond_code (stmt
),
554 gimple_cond_lhs (stmt
),
555 gimple_cond_rhs (stmt
));
559 STRIP_USELESS_TYPE_CONVERSION (result
);
560 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
562 gimple_cond_set_condition_from_tree (stmt
, result
);
571 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
572 adjusting the replacement stmts location and virtual operands.
573 If the statement has a lhs the last stmt in the sequence is expected
574 to assign to that lhs. */
577 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
579 gimple stmt
= gsi_stmt (*si_p
);
581 if (gimple_has_location (stmt
))
582 annotate_all_with_location (stmts
, gimple_location (stmt
));
584 /* First iterate over the replacement statements backward, assigning
585 virtual operands to their defining statements. */
586 gimple laststore
= NULL
;
587 for (gimple_stmt_iterator i
= gsi_last (stmts
);
588 !gsi_end_p (i
); gsi_prev (&i
))
590 gimple new_stmt
= gsi_stmt (i
);
591 if ((gimple_assign_single_p (new_stmt
)
592 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
593 || (is_gimple_call (new_stmt
)
594 && (gimple_call_flags (new_stmt
)
595 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
599 vdef
= gimple_vdef (stmt
);
601 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
602 gimple_set_vdef (new_stmt
, vdef
);
603 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
604 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
605 laststore
= new_stmt
;
609 /* Second iterate over the statements forward, assigning virtual
610 operands to their uses. */
611 tree reaching_vuse
= gimple_vuse (stmt
);
612 for (gimple_stmt_iterator i
= gsi_start (stmts
);
613 !gsi_end_p (i
); gsi_next (&i
))
615 gimple new_stmt
= gsi_stmt (i
);
616 /* If the new statement possibly has a VUSE, update it with exact SSA
617 name we know will reach this one. */
618 if (gimple_has_mem_ops (new_stmt
))
619 gimple_set_vuse (new_stmt
, reaching_vuse
);
620 gimple_set_modified (new_stmt
, true);
621 if (gimple_vdef (new_stmt
))
622 reaching_vuse
= gimple_vdef (new_stmt
);
625 /* If the new sequence does not do a store release the virtual
626 definition of the original statement. */
628 && reaching_vuse
== gimple_vuse (stmt
))
630 tree vdef
= gimple_vdef (stmt
);
632 && TREE_CODE (vdef
) == SSA_NAME
)
634 unlink_stmt_vdef (stmt
);
635 release_ssa_name (vdef
);
639 /* Finally replace the original statement with the sequence. */
640 gsi_replace_with_seq (si_p
, stmts
, false);
643 /* Convert EXPR into a GIMPLE value suitable for substitution on the
644 RHS of an assignment. Insert the necessary statements before
645 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
646 is replaced. If the call is expected to produces a result, then it
647 is replaced by an assignment of the new RHS to the result variable.
648 If the result is to be ignored, then the call is replaced by a
649 GIMPLE_NOP. A proper VDEF chain is retained by making the first
650 VUSE and the last VDEF of the whole sequence be the same as the replaced
651 statement and using new SSA names for stores in between. */
654 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
657 gimple stmt
, new_stmt
;
658 gimple_stmt_iterator i
;
659 gimple_seq stmts
= NULL
;
661 stmt
= gsi_stmt (*si_p
);
663 gcc_assert (is_gimple_call (stmt
));
665 push_gimplify_context (gimple_in_ssa_p (cfun
));
667 lhs
= gimple_call_lhs (stmt
);
668 if (lhs
== NULL_TREE
)
670 gimplify_and_add (expr
, &stmts
);
671 /* We can end up with folding a memcpy of an empty class assignment
672 which gets optimized away by C++ gimplification. */
673 if (gimple_seq_empty_p (stmts
))
675 pop_gimplify_context (NULL
);
676 if (gimple_in_ssa_p (cfun
))
678 unlink_stmt_vdef (stmt
);
681 gsi_replace (si_p
, gimple_build_nop (), true);
687 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
688 new_stmt
= gimple_build_assign (lhs
, tmp
);
689 i
= gsi_last (stmts
);
690 gsi_insert_after_without_update (&i
, new_stmt
,
691 GSI_CONTINUE_LINKING
);
694 pop_gimplify_context (NULL
);
696 gsi_replace_with_seq_vops (si_p
, stmts
);
700 /* Replace the call at *GSI with the gimple value VAL. */
703 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
705 gimple stmt
= gsi_stmt (*gsi
);
706 tree lhs
= gimple_call_lhs (stmt
);
710 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
711 val
= fold_convert (TREE_TYPE (lhs
), val
);
712 repl
= gimple_build_assign (lhs
, val
);
715 repl
= gimple_build_nop ();
716 tree vdef
= gimple_vdef (stmt
);
717 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
719 unlink_stmt_vdef (stmt
);
720 release_ssa_name (vdef
);
722 gsi_replace (gsi
, repl
, true);
725 /* Replace the call at *GSI with the new call REPL and fold that
729 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
731 gimple stmt
= gsi_stmt (*gsi
);
732 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
733 gimple_set_location (repl
, gimple_location (stmt
));
734 if (gimple_vdef (stmt
)
735 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
737 gimple_set_vdef (repl
, gimple_vdef (stmt
));
738 gimple_set_vuse (repl
, gimple_vuse (stmt
));
739 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
741 gsi_replace (gsi
, repl
, true);
745 /* Return true if VAR is a VAR_DECL or a component thereof. */
748 var_decl_component_p (tree var
)
751 while (handled_component_p (inner
))
752 inner
= TREE_OPERAND (inner
, 0);
753 return SSA_VAR_P (inner
);
756 /* Fold function call to builtin mem{{,p}cpy,move}. Return
757 false if no simplification can be made.
758 If ENDP is 0, return DEST (like memcpy).
759 If ENDP is 1, return DEST+LEN (like mempcpy).
760 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
761 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
765 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
766 tree dest
, tree src
, int endp
)
768 gimple stmt
= gsi_stmt (*gsi
);
769 tree lhs
= gimple_call_lhs (stmt
);
770 tree len
= gimple_call_arg (stmt
, 2);
771 tree destvar
, srcvar
;
772 location_t loc
= gimple_location (stmt
);
774 /* If the LEN parameter is zero, return DEST. */
775 if (integer_zerop (len
))
778 if (gimple_call_lhs (stmt
))
779 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
781 repl
= gimple_build_nop ();
782 tree vdef
= gimple_vdef (stmt
);
783 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
785 unlink_stmt_vdef (stmt
);
786 release_ssa_name (vdef
);
788 gsi_replace (gsi
, repl
, true);
792 /* If SRC and DEST are the same (and not volatile), return
793 DEST{,+LEN,+LEN-1}. */
794 if (operand_equal_p (src
, dest
, 0))
796 unlink_stmt_vdef (stmt
);
797 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
798 release_ssa_name (gimple_vdef (stmt
));
801 gsi_replace (gsi
, gimple_build_nop (), true);
808 tree srctype
, desttype
;
809 unsigned int src_align
, dest_align
;
812 /* Build accesses at offset zero with a ref-all character type. */
813 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
816 /* If we can perform the copy efficiently with first doing all loads
817 and then all stores inline it that way. Currently efficiently
818 means that we can load all the memory into a single integer
819 register which is what MOVE_MAX gives us. */
820 src_align
= get_pointer_alignment (src
);
821 dest_align
= get_pointer_alignment (dest
);
822 if (tree_fits_uhwi_p (len
)
823 && compare_tree_int (len
, MOVE_MAX
) <= 0
824 /* ??? Don't transform copies from strings with known length this
825 confuses the tree-ssa-strlen.c. This doesn't handle
826 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
828 && !c_strlen (src
, 2))
830 unsigned ilen
= tree_to_uhwi (len
);
831 if (exact_log2 (ilen
) != -1)
833 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
835 && TYPE_MODE (type
) != BLKmode
836 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
838 /* If the destination pointer is not aligned we must be able
839 to emit an unaligned store. */
840 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
841 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
844 tree desttype
= type
;
845 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
846 srctype
= build_aligned_type (type
, src_align
);
847 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
848 tree tem
= fold_const_aggregate_ref (srcmem
);
851 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
852 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
858 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
860 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
861 if (gimple_in_ssa_p (cfun
))
862 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
865 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
866 gimple_assign_set_lhs (new_stmt
, srcmem
);
867 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
868 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
870 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
871 desttype
= build_aligned_type (type
, dest_align
);
873 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
876 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
877 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
878 if (gimple_vdef (new_stmt
)
879 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
880 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
883 gsi_replace (gsi
, new_stmt
, true);
886 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
895 /* Both DEST and SRC must be pointer types.
896 ??? This is what old code did. Is the testing for pointer types
899 If either SRC is readonly or length is 1, we can use memcpy. */
900 if (!dest_align
|| !src_align
)
902 if (readonly_data_expr (src
)
903 || (tree_fits_uhwi_p (len
)
904 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
905 >= tree_to_uhwi (len
))))
907 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
910 gimple_call_set_fndecl (stmt
, fn
);
911 gimple_call_set_arg (stmt
, 0, dest
);
912 gimple_call_set_arg (stmt
, 1, src
);
917 /* If *src and *dest can't overlap, optimize into memcpy as well. */
918 if (TREE_CODE (src
) == ADDR_EXPR
919 && TREE_CODE (dest
) == ADDR_EXPR
)
921 tree src_base
, dest_base
, fn
;
922 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
923 HOST_WIDE_INT size
= -1;
924 HOST_WIDE_INT maxsize
= -1;
926 srcvar
= TREE_OPERAND (src
, 0);
927 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
929 destvar
= TREE_OPERAND (dest
, 0);
930 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
932 if (tree_fits_uhwi_p (len
))
933 maxsize
= tree_to_uhwi (len
);
936 src_offset
/= BITS_PER_UNIT
;
937 dest_offset
/= BITS_PER_UNIT
;
938 if (SSA_VAR_P (src_base
)
939 && SSA_VAR_P (dest_base
))
941 if (operand_equal_p (src_base
, dest_base
, 0)
942 && ranges_overlap_p (src_offset
, maxsize
,
943 dest_offset
, maxsize
))
946 else if (TREE_CODE (src_base
) == MEM_REF
947 && TREE_CODE (dest_base
) == MEM_REF
)
949 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
950 TREE_OPERAND (dest_base
, 0), 0))
952 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
953 if (!wi::fits_shwi_p (off
))
955 src_offset
= off
.to_shwi ();
957 off
= mem_ref_offset (dest_base
) + dest_offset
;
958 if (!wi::fits_shwi_p (off
))
960 dest_offset
= off
.to_shwi ();
961 if (ranges_overlap_p (src_offset
, maxsize
,
962 dest_offset
, maxsize
))
968 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
971 gimple_call_set_fndecl (stmt
, fn
);
972 gimple_call_set_arg (stmt
, 0, dest
);
973 gimple_call_set_arg (stmt
, 1, src
);
978 /* If the destination and source do not alias optimize into
980 if ((is_gimple_min_invariant (dest
)
981 || TREE_CODE (dest
) == SSA_NAME
)
982 && (is_gimple_min_invariant (src
)
983 || TREE_CODE (src
) == SSA_NAME
))
986 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
987 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
988 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
991 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
994 gimple_call_set_fndecl (stmt
, fn
);
995 gimple_call_set_arg (stmt
, 0, dest
);
996 gimple_call_set_arg (stmt
, 1, src
);
1005 if (!tree_fits_shwi_p (len
))
1008 This logic lose for arguments like (type *)malloc (sizeof (type)),
1009 since we strip the casts of up to VOID return value from malloc.
1010 Perhaps we ought to inherit type from non-VOID argument here? */
1013 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1014 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1016 /* In the following try to find a type that is most natural to be
1017 used for the memcpy source and destination and that allows
1018 the most optimization when memcpy is turned into a plain assignment
1019 using that type. In theory we could always use a char[len] type
1020 but that only gains us that the destination and source possibly
1021 no longer will have their address taken. */
1022 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1023 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1025 tree tem
= TREE_OPERAND (src
, 0);
1027 if (tem
!= TREE_OPERAND (src
, 0))
1028 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1030 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1032 tree tem
= TREE_OPERAND (dest
, 0);
1034 if (tem
!= TREE_OPERAND (dest
, 0))
1035 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1037 srctype
= TREE_TYPE (TREE_TYPE (src
));
1038 if (TREE_CODE (srctype
) == ARRAY_TYPE
1039 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1041 srctype
= TREE_TYPE (srctype
);
1043 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1045 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1046 if (TREE_CODE (desttype
) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1049 desttype
= TREE_TYPE (desttype
);
1051 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1053 if (TREE_ADDRESSABLE (srctype
)
1054 || TREE_ADDRESSABLE (desttype
))
1057 /* Make sure we are not copying using a floating-point mode or
1058 a type whose size possibly does not match its precision. */
1059 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1060 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1061 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1062 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1063 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1064 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1065 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1066 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1074 src_align
= get_pointer_alignment (src
);
1075 dest_align
= get_pointer_alignment (dest
);
1076 if (dest_align
< TYPE_ALIGN (desttype
)
1077 || src_align
< TYPE_ALIGN (srctype
))
1081 STRIP_NOPS (destvar
);
1082 if (TREE_CODE (destvar
) == ADDR_EXPR
1083 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1084 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1085 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1087 destvar
= NULL_TREE
;
1090 STRIP_NOPS (srcvar
);
1091 if (TREE_CODE (srcvar
) == ADDR_EXPR
1092 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1093 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1096 || src_align
>= TYPE_ALIGN (desttype
))
1097 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1099 else if (!STRICT_ALIGNMENT
)
1101 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1103 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1111 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1114 if (srcvar
== NULL_TREE
)
1117 if (src_align
>= TYPE_ALIGN (desttype
))
1118 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1121 if (STRICT_ALIGNMENT
)
1123 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1125 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1128 else if (destvar
== NULL_TREE
)
1131 if (dest_align
>= TYPE_ALIGN (srctype
))
1132 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1135 if (STRICT_ALIGNMENT
)
1137 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1139 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1144 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1146 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1147 if (gimple_in_ssa_p (cfun
))
1148 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1150 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1151 gimple_assign_set_lhs (new_stmt
, srcvar
);
1152 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1153 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1155 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1156 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1157 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1158 if (gimple_vdef (new_stmt
)
1159 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1163 gsi_replace (gsi
, new_stmt
, true);
1166 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1170 if (endp
== 0 || endp
== 3)
1173 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1175 if (endp
== 2 || endp
== 1)
1176 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1178 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1180 gimple repl
= gimple_build_assign (lhs
, dest
);
1181 gsi_replace (gsi
, repl
, true);
1185 /* Fold function call to builtin memset or bzero at *GSI setting the
1186 memory of size LEN to VAL. Return whether a simplification was made. */
1189 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1191 gimple stmt
= gsi_stmt (*gsi
);
1193 unsigned HOST_WIDE_INT length
, cval
;
1195 /* If the LEN parameter is zero, return DEST. */
1196 if (integer_zerop (len
))
1198 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1202 if (! tree_fits_uhwi_p (len
))
1205 if (TREE_CODE (c
) != INTEGER_CST
)
1208 tree dest
= gimple_call_arg (stmt
, 0);
1210 if (TREE_CODE (var
) != ADDR_EXPR
)
1213 var
= TREE_OPERAND (var
, 0);
1214 if (TREE_THIS_VOLATILE (var
))
1217 etype
= TREE_TYPE (var
);
1218 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1219 etype
= TREE_TYPE (etype
);
1221 if (!INTEGRAL_TYPE_P (etype
)
1222 && !POINTER_TYPE_P (etype
))
1225 if (! var_decl_component_p (var
))
1228 length
= tree_to_uhwi (len
);
1229 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1230 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1233 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1236 if (integer_zerop (c
))
1240 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1243 cval
= TREE_INT_CST_LOW (c
);
1247 cval
|= (cval
<< 31) << 1;
1250 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1251 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1252 gimple_set_vuse (store
, gimple_vuse (stmt
));
1253 tree vdef
= gimple_vdef (stmt
);
1254 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1256 gimple_set_vdef (store
, gimple_vdef (stmt
));
1257 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1259 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1260 if (gimple_call_lhs (stmt
))
1262 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1263 gsi_replace (gsi
, asgn
, true);
1267 gimple_stmt_iterator gsi2
= *gsi
;
1269 gsi_remove (&gsi2
, true);
1276 /* Return the string length, maximum string length or maximum value of
1278 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1279 is not NULL and, for TYPE == 0, its value is not equal to the length
1280 we determine or if we are unable to determine the length or value,
1281 return false. VISITED is a bitmap of visited variables.
1282 TYPE is 0 if string length should be returned, 1 for maximum string
1283 length and 2 for maximum value ARG can have. */
1286 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1291 if (TREE_CODE (arg
) != SSA_NAME
)
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg
) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1298 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1299 if (TREE_CODE (aop0
) == INDIRECT_REF
1300 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1301 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1302 length
, visited
, type
);
1308 if (TREE_CODE (val
) != INTEGER_CST
1309 || tree_int_cst_sgn (val
) < 0)
1313 val
= c_strlen (arg
, 1);
1321 if (TREE_CODE (*length
) != INTEGER_CST
1322 || TREE_CODE (val
) != INTEGER_CST
)
1325 if (tree_int_cst_lt (*length
, val
))
1329 else if (simple_cst_equal (val
, *length
) != 1)
1337 /* If ARG is registered for SSA update we cannot look at its defining
1339 if (name_registered_for_update_p (arg
))
1342 /* If we were already here, break the infinite cycle. */
1344 *visited
= BITMAP_ALLOC (NULL
);
1345 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1349 def_stmt
= SSA_NAME_DEF_STMT (var
);
1351 switch (gimple_code (def_stmt
))
1354 /* The RHS of the statement defining VAR must either have a
1355 constant length or come from another SSA_NAME with a constant
1357 if (gimple_assign_single_p (def_stmt
)
1358 || gimple_assign_unary_nop_p (def_stmt
))
1360 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1361 return get_maxval_strlen (rhs
, length
, visited
, type
);
1363 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1365 tree op2
= gimple_assign_rhs2 (def_stmt
);
1366 tree op3
= gimple_assign_rhs3 (def_stmt
);
1367 return get_maxval_strlen (op2
, length
, visited
, type
)
1368 && get_maxval_strlen (op3
, length
, visited
, type
);
1374 /* All the arguments of the PHI node must have the same constant
1378 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1380 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1382 /* If this PHI has itself as an argument, we cannot
1383 determine the string length of this argument. However,
1384 if we can find a constant string length for the other
1385 PHI args then we can still be sure that this is a
1386 constant string length. So be optimistic and just
1387 continue with the next argument. */
1388 if (arg
== gimple_phi_result (def_stmt
))
1391 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1403 get_maxval_strlen (tree arg
, int type
)
1405 bitmap visited
= NULL
;
1406 tree len
= NULL_TREE
;
1407 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1410 BITMAP_FREE (visited
);
1416 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1417 If LEN is not NULL, it represents the length of the string to be
1418 copied. Return NULL_TREE if no simplification can be made. */
1421 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1422 tree dest
, tree src
)
1424 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1427 /* If SRC and DEST are the same (and not volatile), return DEST. */
1428 if (operand_equal_p (src
, dest
, 0))
1430 replace_call_with_value (gsi
, dest
);
1434 if (optimize_function_for_size_p (cfun
))
1437 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1441 tree len
= get_maxval_strlen (src
, 0);
1445 len
= fold_convert_loc (loc
, size_type_node
, len
);
1446 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1447 len
= force_gimple_operand_gsi (gsi
, len
, true,
1448 NULL_TREE
, true, GSI_SAME_STMT
);
1449 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1450 replace_call_with_call_and_fold (gsi
, repl
);
1454 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1455 If SLEN is not NULL, it represents the length of the source string.
1456 Return NULL_TREE if no simplification can be made. */
1459 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1460 tree dest
, tree src
, tree len
)
1462 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1465 /* If the LEN parameter is zero, return DEST. */
1466 if (integer_zerop (len
))
1468 replace_call_with_value (gsi
, dest
);
1472 /* We can't compare slen with len as constants below if len is not a
1474 if (TREE_CODE (len
) != INTEGER_CST
)
1477 /* Now, we must be passed a constant src ptr parameter. */
1478 tree slen
= get_maxval_strlen (src
, 0);
1479 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1482 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1484 /* We do not support simplification of this case, though we do
1485 support it when expanding trees into RTL. */
1486 /* FIXME: generate a call to __builtin_memset. */
1487 if (tree_int_cst_lt (slen
, len
))
1490 /* OK transform into builtin memcpy. */
1491 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1495 len
= fold_convert_loc (loc
, size_type_node
, len
);
1496 len
= force_gimple_operand_gsi (gsi
, len
, true,
1497 NULL_TREE
, true, GSI_SAME_STMT
);
1498 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1499 replace_call_with_call_and_fold (gsi
, repl
);
1503 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1506 Return NULL_TREE if no simplification was possible, otherwise return the
1507 simplified form of the call as a tree.
1509 The simplified form may be a constant or other expression which
1510 computes the same value, but in a more efficient manner (including
1511 calls to other builtin functions).
1513 The call may contain arguments which need to be evaluated, but
1514 which are not useful to determine the result of the call. In
1515 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1516 COMPOUND_EXPR will be an argument which must be evaluated.
1517 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1518 COMPOUND_EXPR in the chain will contain the tree for the simplified
1519 form of the builtin function call. */
1522 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1524 gimple stmt
= gsi_stmt (*gsi
);
1525 location_t loc
= gimple_location (stmt
);
1527 const char *p
= c_getstr (src
);
1529 /* If the string length is zero, return the dst parameter. */
1530 if (p
&& *p
== '\0')
1532 replace_call_with_value (gsi
, dst
);
1536 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1539 /* See if we can store by pieces into (dst + strlen(dst)). */
1541 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1542 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1544 if (!strlen_fn
|| !memcpy_fn
)
1547 /* If the length of the source string isn't computable don't
1548 split strcat into strlen and memcpy. */
1549 tree len
= get_maxval_strlen (src
, 0);
1553 /* Create strlen (dst). */
1554 gimple_seq stmts
= NULL
, stmts2
;
1555 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1556 gimple_set_location (repl
, loc
);
1557 if (gimple_in_ssa_p (cfun
))
1558 newdst
= make_ssa_name (size_type_node
);
1560 newdst
= create_tmp_reg (size_type_node
);
1561 gimple_call_set_lhs (repl
, newdst
);
1562 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1564 /* Create (dst p+ strlen (dst)). */
1565 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1566 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1567 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1569 len
= fold_convert_loc (loc
, size_type_node
, len
);
1570 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1571 build_int_cst (size_type_node
, 1));
1572 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1573 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1575 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1576 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1577 if (gimple_call_lhs (stmt
))
1579 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1580 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1581 gsi_replace_with_seq_vops (gsi
, stmts
);
1582 /* gsi now points at the assignment to the lhs, get a
1583 stmt iterator to the memcpy call.
1584 ??? We can't use gsi_for_stmt as that doesn't work when the
1585 CFG isn't built yet. */
1586 gimple_stmt_iterator gsi2
= *gsi
;
1592 gsi_replace_with_seq_vops (gsi
, stmts
);
1598 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1599 are the arguments to the call. */
1602 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1604 gimple stmt
= gsi_stmt (*gsi
);
1605 tree dest
= gimple_call_arg (stmt
, 0);
1606 tree src
= gimple_call_arg (stmt
, 1);
1607 tree size
= gimple_call_arg (stmt
, 2);
1613 /* If the SRC parameter is "", return DEST. */
1614 if (p
&& *p
== '\0')
1616 replace_call_with_value (gsi
, dest
);
1620 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1623 /* If __builtin_strcat_chk is used, assume strcat is available. */
1624 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1628 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1629 replace_call_with_call_and_fold (gsi
, repl
);
1633 /* Simplify a call to the strncat builtin. */
1636 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1638 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1639 tree dst
= gimple_call_arg (stmt
, 0);
1640 tree src
= gimple_call_arg (stmt
, 1);
1641 tree len
= gimple_call_arg (stmt
, 2);
1643 const char *p
= c_getstr (src
);
1645 /* If the requested length is zero, or the src parameter string
1646 length is zero, return the dst parameter. */
1647 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1649 replace_call_with_value (gsi
, dst
);
1653 /* If the requested len is greater than or equal to the string
1654 length, call strcat. */
1655 if (TREE_CODE (len
) == INTEGER_CST
&& p
1656 && compare_tree_int (len
, strlen (p
)) >= 0)
1658 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1660 /* If the replacement _DECL isn't initialized, don't do the
1665 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1666 replace_call_with_call_and_fold (gsi
, repl
);
1673 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1677 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1679 gimple stmt
= gsi_stmt (*gsi
);
1680 tree dest
= gimple_call_arg (stmt
, 0);
1681 tree src
= gimple_call_arg (stmt
, 1);
1682 tree len
= gimple_call_arg (stmt
, 2);
1683 tree size
= gimple_call_arg (stmt
, 3);
1688 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1689 if ((p
&& *p
== '\0')
1690 || integer_zerop (len
))
1692 replace_call_with_value (gsi
, dest
);
1696 if (! tree_fits_uhwi_p (size
))
1699 if (! integer_all_onesp (size
))
1701 tree src_len
= c_strlen (src
, 1);
1703 && tree_fits_uhwi_p (src_len
)
1704 && tree_fits_uhwi_p (len
)
1705 && ! tree_int_cst_lt (len
, src_len
))
1707 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1708 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1712 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1713 replace_call_with_call_and_fold (gsi
, repl
);
1719 /* If __builtin_strncat_chk is used, assume strncat is available. */
1720 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1724 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1725 replace_call_with_call_and_fold (gsi
, repl
);
1729 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1730 to the call. IGNORE is true if the value returned
1731 by the builtin will be ignored. UNLOCKED is true is true if this
1732 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1733 the known length of the string. Return NULL_TREE if no simplification
1737 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1738 tree arg0
, tree arg1
,
1741 gimple stmt
= gsi_stmt (*gsi
);
1743 /* If we're using an unlocked function, assume the other unlocked
1744 functions exist explicitly. */
1745 tree
const fn_fputc
= (unlocked
1746 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1747 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1748 tree
const fn_fwrite
= (unlocked
1749 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1750 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1752 /* If the return value is used, don't do the transformation. */
1753 if (gimple_call_lhs (stmt
))
1756 /* Get the length of the string passed to fputs. If the length
1757 can't be determined, punt. */
1758 tree len
= get_maxval_strlen (arg0
, 0);
1760 || TREE_CODE (len
) != INTEGER_CST
)
1763 switch (compare_tree_int (len
, 1))
1765 case -1: /* length is 0, delete the call entirely . */
1766 replace_call_with_value (gsi
, integer_zero_node
);
1769 case 0: /* length is 1, call fputc. */
1771 const char *p
= c_getstr (arg0
);
1777 gimple repl
= gimple_build_call (fn_fputc
, 2,
1779 (integer_type_node
, p
[0]), arg1
);
1780 replace_call_with_call_and_fold (gsi
, repl
);
1785 case 1: /* length is greater than 1, call fwrite. */
1787 /* If optimizing for size keep fputs. */
1788 if (optimize_function_for_size_p (cfun
))
1790 /* New argument list transforming fputs(string, stream) to
1791 fwrite(string, 1, len, stream). */
1795 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1796 size_one_node
, len
, arg1
);
1797 replace_call_with_call_and_fold (gsi
, repl
);
1806 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1807 DEST, SRC, LEN, and SIZE are the arguments to the call.
1808 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1809 code of the builtin. If MAXLEN is not NULL, it is maximum length
1810 passed as third argument. */
1813 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1814 tree dest
, tree src
, tree len
, tree size
,
1815 enum built_in_function fcode
)
1817 gimple stmt
= gsi_stmt (*gsi
);
1818 location_t loc
= gimple_location (stmt
);
1819 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1822 /* If SRC and DEST are the same (and not volatile), return DEST
1823 (resp. DEST+LEN for __mempcpy_chk). */
1824 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1826 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1828 replace_call_with_value (gsi
, dest
);
1833 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1834 temp
= force_gimple_operand_gsi (gsi
, temp
,
1835 false, NULL_TREE
, true,
1837 replace_call_with_value (gsi
, temp
);
1842 if (! tree_fits_uhwi_p (size
))
1845 tree maxlen
= get_maxval_strlen (len
, 2);
1846 if (! integer_all_onesp (size
))
1848 if (! tree_fits_uhwi_p (len
))
1850 /* If LEN is not constant, try MAXLEN too.
1851 For MAXLEN only allow optimizing into non-_ocs function
1852 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1853 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1855 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1857 /* (void) __mempcpy_chk () can be optimized into
1858 (void) __memcpy_chk (). */
1859 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1863 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1864 replace_call_with_call_and_fold (gsi
, repl
);
1873 if (tree_int_cst_lt (size
, maxlen
))
1878 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1879 mem{cpy,pcpy,move,set} is available. */
1882 case BUILT_IN_MEMCPY_CHK
:
1883 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1885 case BUILT_IN_MEMPCPY_CHK
:
1886 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1888 case BUILT_IN_MEMMOVE_CHK
:
1889 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1891 case BUILT_IN_MEMSET_CHK
:
1892 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1901 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1902 replace_call_with_call_and_fold (gsi
, repl
);
1906 /* Fold a call to the __st[rp]cpy_chk builtin.
1907 DEST, SRC, and SIZE are the arguments to the call.
1908 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1909 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1910 strings passed as second argument. */
1913 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1915 tree src
, tree size
,
1916 enum built_in_function fcode
)
1918 gimple stmt
= gsi_stmt (*gsi
);
1919 location_t loc
= gimple_location (stmt
);
1920 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1923 /* If SRC and DEST are the same (and not volatile), return DEST. */
1924 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1926 replace_call_with_value (gsi
, dest
);
1930 if (! tree_fits_uhwi_p (size
))
1933 tree maxlen
= get_maxval_strlen (src
, 1);
1934 if (! integer_all_onesp (size
))
1936 len
= c_strlen (src
, 1);
1937 if (! len
|| ! tree_fits_uhwi_p (len
))
1939 /* If LEN is not constant, try MAXLEN too.
1940 For MAXLEN only allow optimizing into non-_ocs function
1941 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1942 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1944 if (fcode
== BUILT_IN_STPCPY_CHK
)
1949 /* If return value of __stpcpy_chk is ignored,
1950 optimize into __strcpy_chk. */
1951 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1955 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1956 replace_call_with_call_and_fold (gsi
, repl
);
1960 if (! len
|| TREE_SIDE_EFFECTS (len
))
1963 /* If c_strlen returned something, but not a constant,
1964 transform __strcpy_chk into __memcpy_chk. */
1965 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1969 len
= fold_convert_loc (loc
, size_type_node
, len
);
1970 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1971 build_int_cst (size_type_node
, 1));
1972 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1973 true, GSI_SAME_STMT
);
1974 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1975 replace_call_with_call_and_fold (gsi
, repl
);
1982 if (! tree_int_cst_lt (maxlen
, size
))
1986 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1987 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1988 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1992 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1993 replace_call_with_call_and_fold (gsi
, repl
);
1997 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1998 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1999 length passed as third argument. IGNORE is true if return value can be
2000 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2003 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2004 tree dest
, tree src
,
2005 tree len
, tree size
,
2006 enum built_in_function fcode
)
2008 gimple stmt
= gsi_stmt (*gsi
);
2009 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2012 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2014 /* If return value of __stpncpy_chk is ignored,
2015 optimize into __strncpy_chk. */
2016 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2019 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2020 replace_call_with_call_and_fold (gsi
, repl
);
2025 if (! tree_fits_uhwi_p (size
))
2028 tree maxlen
= get_maxval_strlen (len
, 2);
2029 if (! integer_all_onesp (size
))
2031 if (! tree_fits_uhwi_p (len
))
2033 /* If LEN is not constant, try MAXLEN too.
2034 For MAXLEN only allow optimizing into non-_ocs function
2035 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2036 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2042 if (tree_int_cst_lt (size
, maxlen
))
2046 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2047 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2048 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2052 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2053 replace_call_with_call_and_fold (gsi
, repl
);
2057 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2058 Return NULL_TREE if no simplification can be made. */
2061 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2063 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2064 location_t loc
= gimple_location (stmt
);
2065 tree dest
= gimple_call_arg (stmt
, 0);
2066 tree src
= gimple_call_arg (stmt
, 1);
2067 tree fn
, len
, lenp1
;
2069 /* If the result is unused, replace stpcpy with strcpy. */
2070 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2072 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2075 gimple_call_set_fndecl (stmt
, fn
);
2080 len
= c_strlen (src
, 1);
2082 || TREE_CODE (len
) != INTEGER_CST
)
2085 if (optimize_function_for_size_p (cfun
)
2086 /* If length is zero it's small enough. */
2087 && !integer_zerop (len
))
2090 /* If the source has a known length replace stpcpy with memcpy. */
2091 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2095 gimple_seq stmts
= NULL
;
2096 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2097 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2098 tem
, build_int_cst (size_type_node
, 1));
2099 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2100 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2101 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2102 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2103 if (gimple_vdef (repl
)
2104 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2105 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2106 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2107 /* Replace the result with dest + len. */
2109 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2110 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2111 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2112 POINTER_PLUS_EXPR
, dest
, tem
);
2113 gsi_replace (gsi
, ret
, true);
2114 /* Finally fold the memcpy call. */
2115 gimple_stmt_iterator gsi2
= *gsi
;
2121 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2122 NULL_TREE if a normal call should be emitted rather than expanding
2123 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2124 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2125 passed as second argument. */
2128 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2129 enum built_in_function fcode
)
2131 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2132 tree dest
, size
, len
, fn
, fmt
, flag
;
2133 const char *fmt_str
;
2135 /* Verify the required arguments in the original call. */
2136 if (gimple_call_num_args (stmt
) < 5)
2139 dest
= gimple_call_arg (stmt
, 0);
2140 len
= gimple_call_arg (stmt
, 1);
2141 flag
= gimple_call_arg (stmt
, 2);
2142 size
= gimple_call_arg (stmt
, 3);
2143 fmt
= gimple_call_arg (stmt
, 4);
2145 if (! tree_fits_uhwi_p (size
))
2148 if (! integer_all_onesp (size
))
2150 tree maxlen
= get_maxval_strlen (len
, 2);
2151 if (! tree_fits_uhwi_p (len
))
2153 /* If LEN is not constant, try MAXLEN too.
2154 For MAXLEN only allow optimizing into non-_ocs function
2155 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2156 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2162 if (tree_int_cst_lt (size
, maxlen
))
2166 if (!init_target_chars ())
2169 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2170 or if format doesn't contain % chars or is "%s". */
2171 if (! integer_zerop (flag
))
2173 fmt_str
= c_getstr (fmt
);
2174 if (fmt_str
== NULL
)
2176 if (strchr (fmt_str
, target_percent
) != NULL
2177 && strcmp (fmt_str
, target_percent_s
))
2181 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2183 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2184 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2188 /* Replace the called function and the first 5 argument by 3 retaining
2189 trailing varargs. */
2190 gimple_call_set_fndecl (stmt
, fn
);
2191 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2192 gimple_call_set_arg (stmt
, 0, dest
);
2193 gimple_call_set_arg (stmt
, 1, len
);
2194 gimple_call_set_arg (stmt
, 2, fmt
);
2195 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2196 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2197 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2202 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2203 Return NULL_TREE if a normal call should be emitted rather than
2204 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2205 or BUILT_IN_VSPRINTF_CHK. */
2208 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2209 enum built_in_function fcode
)
2211 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2212 tree dest
, size
, len
, fn
, fmt
, flag
;
2213 const char *fmt_str
;
2214 unsigned nargs
= gimple_call_num_args (stmt
);
2216 /* Verify the required arguments in the original call. */
2219 dest
= gimple_call_arg (stmt
, 0);
2220 flag
= gimple_call_arg (stmt
, 1);
2221 size
= gimple_call_arg (stmt
, 2);
2222 fmt
= gimple_call_arg (stmt
, 3);
2224 if (! tree_fits_uhwi_p (size
))
2229 if (!init_target_chars ())
2232 /* Check whether the format is a literal string constant. */
2233 fmt_str
= c_getstr (fmt
);
2234 if (fmt_str
!= NULL
)
2236 /* If the format doesn't contain % args or %%, we know the size. */
2237 if (strchr (fmt_str
, target_percent
) == 0)
2239 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2240 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2242 /* If the format is "%s" and first ... argument is a string literal,
2243 we know the size too. */
2244 else if (fcode
== BUILT_IN_SPRINTF_CHK
2245 && strcmp (fmt_str
, target_percent_s
) == 0)
2251 arg
= gimple_call_arg (stmt
, 4);
2252 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2254 len
= c_strlen (arg
, 1);
2255 if (! len
|| ! tree_fits_uhwi_p (len
))
2262 if (! integer_all_onesp (size
))
2264 if (! len
|| ! tree_int_cst_lt (len
, size
))
2268 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2269 or if format doesn't contain % chars or is "%s". */
2270 if (! integer_zerop (flag
))
2272 if (fmt_str
== NULL
)
2274 if (strchr (fmt_str
, target_percent
) != NULL
2275 && strcmp (fmt_str
, target_percent_s
))
2279 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2280 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2281 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2285 /* Replace the called function and the first 4 argument by 2 retaining
2286 trailing varargs. */
2287 gimple_call_set_fndecl (stmt
, fn
);
2288 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2289 gimple_call_set_arg (stmt
, 0, dest
);
2290 gimple_call_set_arg (stmt
, 1, fmt
);
2291 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2292 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2293 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2298 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2299 ORIG may be null if this is a 2-argument call. We don't attempt to
2300 simplify calls with more than 3 arguments.
2302 Return NULL_TREE if no simplification was possible, otherwise return the
2303 simplified form of the call as a tree. If IGNORED is true, it means that
2304 the caller does not use the returned value of the function. */
2307 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2309 gimple stmt
= gsi_stmt (*gsi
);
2310 tree dest
= gimple_call_arg (stmt
, 0);
2311 tree fmt
= gimple_call_arg (stmt
, 1);
2312 tree orig
= NULL_TREE
;
2313 const char *fmt_str
= NULL
;
2315 /* Verify the required arguments in the original call. We deal with two
2316 types of sprintf() calls: 'sprintf (str, fmt)' and
2317 'sprintf (dest, "%s", orig)'. */
2318 if (gimple_call_num_args (stmt
) > 3)
2321 if (gimple_call_num_args (stmt
) == 3)
2322 orig
= gimple_call_arg (stmt
, 2);
2324 /* Check whether the format is a literal string constant. */
2325 fmt_str
= c_getstr (fmt
);
2326 if (fmt_str
== NULL
)
2329 if (!init_target_chars ())
2332 /* If the format doesn't contain % args or %%, use strcpy. */
2333 if (strchr (fmt_str
, target_percent
) == NULL
)
2335 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2340 /* Don't optimize sprintf (buf, "abc", ptr++). */
2344 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2345 'format' is known to contain no % formats. */
2346 gimple_seq stmts
= NULL
;
2347 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2348 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2349 if (gimple_call_lhs (stmt
))
2351 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2352 build_int_cst (integer_type_node
,
2354 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2355 gsi_replace_with_seq_vops (gsi
, stmts
);
2356 /* gsi now points at the assignment to the lhs, get a
2357 stmt iterator to the memcpy call.
2358 ??? We can't use gsi_for_stmt as that doesn't work when the
2359 CFG isn't built yet. */
2360 gimple_stmt_iterator gsi2
= *gsi
;
2366 gsi_replace_with_seq_vops (gsi
, stmts
);
2372 /* If the format is "%s", use strcpy if the result isn't used. */
2373 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2376 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2381 /* Don't crash on sprintf (str1, "%s"). */
2385 tree orig_len
= NULL_TREE
;
2386 if (gimple_call_lhs (stmt
))
2388 orig_len
= get_maxval_strlen (orig
, 0);
2393 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2394 gimple_seq stmts
= NULL
;
2395 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2396 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2397 if (gimple_call_lhs (stmt
))
2399 if (!useless_type_conversion_p (integer_type_node
,
2400 TREE_TYPE (orig_len
)))
2401 orig_len
= fold_convert (integer_type_node
, orig_len
);
2402 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2403 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2404 gsi_replace_with_seq_vops (gsi
, stmts
);
2405 /* gsi now points at the assignment to the lhs, get a
2406 stmt iterator to the memcpy call.
2407 ??? We can't use gsi_for_stmt as that doesn't work when the
2408 CFG isn't built yet. */
2409 gimple_stmt_iterator gsi2
= *gsi
;
2415 gsi_replace_with_seq_vops (gsi
, stmts
);
2423 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2424 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2425 attempt to simplify calls with more than 4 arguments.
2427 Return NULL_TREE if no simplification was possible, otherwise return the
2428 simplified form of the call as a tree. If IGNORED is true, it means that
2429 the caller does not use the returned value of the function. */
2432 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2434 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2435 tree dest
= gimple_call_arg (stmt
, 0);
2436 tree destsize
= gimple_call_arg (stmt
, 1);
2437 tree fmt
= gimple_call_arg (stmt
, 2);
2438 tree orig
= NULL_TREE
;
2439 const char *fmt_str
= NULL
;
2441 if (gimple_call_num_args (stmt
) > 4)
2444 if (gimple_call_num_args (stmt
) == 4)
2445 orig
= gimple_call_arg (stmt
, 3);
2447 if (!tree_fits_uhwi_p (destsize
))
2449 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2451 /* Check whether the format is a literal string constant. */
2452 fmt_str
= c_getstr (fmt
);
2453 if (fmt_str
== NULL
)
2456 if (!init_target_chars ())
2459 /* If the format doesn't contain % args or %%, use strcpy. */
2460 if (strchr (fmt_str
, target_percent
) == NULL
)
2462 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2466 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2470 /* We could expand this as
2471 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2473 memcpy (str, fmt_with_nul_at_cstm1, cst);
2474 but in the former case that might increase code size
2475 and in the latter case grow .rodata section too much.
2477 size_t len
= strlen (fmt_str
);
2481 gimple_seq stmts
= NULL
;
2482 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2483 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2484 if (gimple_call_lhs (stmt
))
2486 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2487 build_int_cst (integer_type_node
, len
));
2488 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2489 gsi_replace_with_seq_vops (gsi
, stmts
);
2490 /* gsi now points at the assignment to the lhs, get a
2491 stmt iterator to the memcpy call.
2492 ??? We can't use gsi_for_stmt as that doesn't work when the
2493 CFG isn't built yet. */
2494 gimple_stmt_iterator gsi2
= *gsi
;
2500 gsi_replace_with_seq_vops (gsi
, stmts
);
2506 /* If the format is "%s", use strcpy if the result isn't used. */
2507 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2509 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2513 /* Don't crash on snprintf (str1, cst, "%s"). */
2517 tree orig_len
= get_maxval_strlen (orig
, 0);
2518 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2521 /* We could expand this as
2522 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2524 memcpy (str1, str2_with_nul_at_cstm1, cst);
2525 but in the former case that might increase code size
2526 and in the latter case grow .rodata section too much.
2528 if (compare_tree_int (orig_len
, destlen
) >= 0)
2531 /* Convert snprintf (str1, cst, "%s", str2) into
2532 strcpy (str1, str2) if strlen (str2) < cst. */
2533 gimple_seq stmts
= NULL
;
2534 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2535 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2536 if (gimple_call_lhs (stmt
))
2538 if (!useless_type_conversion_p (integer_type_node
,
2539 TREE_TYPE (orig_len
)))
2540 orig_len
= fold_convert (integer_type_node
, orig_len
);
2541 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2542 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2543 gsi_replace_with_seq_vops (gsi
, stmts
);
2544 /* gsi now points at the assignment to the lhs, get a
2545 stmt iterator to the memcpy call.
2546 ??? We can't use gsi_for_stmt as that doesn't work when the
2547 CFG isn't built yet. */
2548 gimple_stmt_iterator gsi2
= *gsi
;
2554 gsi_replace_with_seq_vops (gsi
, stmts
);
2562 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2563 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2564 more than 3 arguments, and ARG may be null in the 2-argument case.
2566 Return NULL_TREE if no simplification was possible, otherwise return the
2567 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2568 code of the function to be simplified. */
2571 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2572 tree fp
, tree fmt
, tree arg
,
2573 enum built_in_function fcode
)
2575 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2576 tree fn_fputc
, fn_fputs
;
2577 const char *fmt_str
= NULL
;
2579 /* If the return value is used, don't do the transformation. */
2580 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2583 /* Check whether the format is a literal string constant. */
2584 fmt_str
= c_getstr (fmt
);
2585 if (fmt_str
== NULL
)
2588 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2590 /* If we're using an unlocked function, assume the other
2591 unlocked functions exist explicitly. */
2592 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2593 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2597 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2598 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2601 if (!init_target_chars ())
2604 /* If the format doesn't contain % args or %%, use strcpy. */
2605 if (strchr (fmt_str
, target_percent
) == NULL
)
2607 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2611 /* If the format specifier was "", fprintf does nothing. */
2612 if (fmt_str
[0] == '\0')
2614 replace_call_with_value (gsi
, NULL_TREE
);
2618 /* When "string" doesn't contain %, replace all cases of
2619 fprintf (fp, string) with fputs (string, fp). The fputs
2620 builtin will take care of special cases like length == 1. */
2623 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2624 replace_call_with_call_and_fold (gsi
, repl
);
2629 /* The other optimizations can be done only on the non-va_list variants. */
2630 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2633 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2634 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2636 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2640 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2641 replace_call_with_call_and_fold (gsi
, repl
);
2646 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2647 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2650 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2654 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2655 replace_call_with_call_and_fold (gsi
, repl
);
2663 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2664 FMT and ARG are the arguments to the call; we don't fold cases with
2665 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2667 Return NULL_TREE if no simplification was possible, otherwise return the
2668 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2669 code of the function to be simplified. */
2672 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2673 tree arg
, enum built_in_function fcode
)
2675 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2676 tree fn_putchar
, fn_puts
, newarg
;
2677 const char *fmt_str
= NULL
;
2679 /* If the return value is used, don't do the transformation. */
2680 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2683 /* Check whether the format is a literal string constant. */
2684 fmt_str
= c_getstr (fmt
);
2685 if (fmt_str
== NULL
)
2688 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2690 /* If we're using an unlocked function, assume the other
2691 unlocked functions exist explicitly. */
2692 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2693 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2697 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2698 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2701 if (!init_target_chars ())
2704 if (strcmp (fmt_str
, target_percent_s
) == 0
2705 || strchr (fmt_str
, target_percent
) == NULL
)
2709 if (strcmp (fmt_str
, target_percent_s
) == 0)
2711 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2714 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2717 str
= c_getstr (arg
);
2723 /* The format specifier doesn't contain any '%' characters. */
2724 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2730 /* If the string was "", printf does nothing. */
2733 replace_call_with_value (gsi
, NULL_TREE
);
2737 /* If the string has length of 1, call putchar. */
2740 /* Given printf("c"), (where c is any one character,)
2741 convert "c"[0] to an int and pass that to the replacement
2743 newarg
= build_int_cst (integer_type_node
, str
[0]);
2746 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2747 replace_call_with_call_and_fold (gsi
, repl
);
2753 /* If the string was "string\n", call puts("string"). */
2754 size_t len
= strlen (str
);
2755 if ((unsigned char)str
[len
- 1] == target_newline
2756 && (size_t) (int) len
== len
2760 tree offset_node
, string_cst
;
2762 /* Create a NUL-terminated string that's one char shorter
2763 than the original, stripping off the trailing '\n'. */
2764 newarg
= build_string_literal (len
, str
);
2765 string_cst
= string_constant (newarg
, &offset_node
);
2766 gcc_checking_assert (string_cst
2767 && (TREE_STRING_LENGTH (string_cst
)
2769 && integer_zerop (offset_node
)
2771 TREE_STRING_POINTER (string_cst
)[len
- 1]
2773 /* build_string_literal creates a new STRING_CST,
2774 modify it in place to avoid double copying. */
2775 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2776 newstr
[len
- 1] = '\0';
2779 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2780 replace_call_with_call_and_fold (gsi
, repl
);
2785 /* We'd like to arrange to call fputs(string,stdout) here,
2786 but we need stdout and don't have a way to get it yet. */
2791 /* The other optimizations can be done only on the non-va_list variants. */
2792 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2795 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2796 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2798 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2802 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2803 replace_call_with_call_and_fold (gsi
, repl
);
2808 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2809 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2811 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2816 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2817 replace_call_with_call_and_fold (gsi
, repl
);
2827 /* Fold a call to __builtin_strlen with known length LEN. */
2830 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2832 gimple stmt
= gsi_stmt (*gsi
);
2833 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2836 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2837 replace_call_with_value (gsi
, len
);
2842 /* Fold the non-target builtin at *GSI and return whether any simplification
2846 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2848 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2849 tree callee
= gimple_call_fndecl (stmt
);
2851 /* Give up for always_inline inline builtins until they are
2853 if (avoid_folding_inline_builtin (callee
))
2856 unsigned n
= gimple_call_num_args (stmt
);
2857 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2860 case BUILT_IN_BZERO
:
2861 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2862 gimple_call_arg (stmt
, 1));
2863 case BUILT_IN_MEMSET
:
2864 return gimple_fold_builtin_memset (gsi
,
2865 gimple_call_arg (stmt
, 1),
2866 gimple_call_arg (stmt
, 2));
2867 case BUILT_IN_BCOPY
:
2868 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2869 gimple_call_arg (stmt
, 0), 3);
2870 case BUILT_IN_MEMCPY
:
2871 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2872 gimple_call_arg (stmt
, 1), 0);
2873 case BUILT_IN_MEMPCPY
:
2874 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2875 gimple_call_arg (stmt
, 1), 1);
2876 case BUILT_IN_MEMMOVE
:
2877 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2878 gimple_call_arg (stmt
, 1), 3);
2879 case BUILT_IN_SPRINTF_CHK
:
2880 case BUILT_IN_VSPRINTF_CHK
:
2881 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2882 case BUILT_IN_STRCAT_CHK
:
2883 return gimple_fold_builtin_strcat_chk (gsi
);
2884 case BUILT_IN_STRNCAT_CHK
:
2885 return gimple_fold_builtin_strncat_chk (gsi
);
2886 case BUILT_IN_STRLEN
:
2887 return gimple_fold_builtin_strlen (gsi
);
2888 case BUILT_IN_STRCPY
:
2889 return gimple_fold_builtin_strcpy (gsi
,
2890 gimple_call_arg (stmt
, 0),
2891 gimple_call_arg (stmt
, 1));
2892 case BUILT_IN_STRNCPY
:
2893 return gimple_fold_builtin_strncpy (gsi
,
2894 gimple_call_arg (stmt
, 0),
2895 gimple_call_arg (stmt
, 1),
2896 gimple_call_arg (stmt
, 2));
2897 case BUILT_IN_STRCAT
:
2898 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2899 gimple_call_arg (stmt
, 1));
2900 case BUILT_IN_STRNCAT
:
2901 return gimple_fold_builtin_strncat (gsi
);
2902 case BUILT_IN_FPUTS
:
2903 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2904 gimple_call_arg (stmt
, 1), false);
2905 case BUILT_IN_FPUTS_UNLOCKED
:
2906 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2907 gimple_call_arg (stmt
, 1), true);
2908 case BUILT_IN_MEMCPY_CHK
:
2909 case BUILT_IN_MEMPCPY_CHK
:
2910 case BUILT_IN_MEMMOVE_CHK
:
2911 case BUILT_IN_MEMSET_CHK
:
2912 return gimple_fold_builtin_memory_chk (gsi
,
2913 gimple_call_arg (stmt
, 0),
2914 gimple_call_arg (stmt
, 1),
2915 gimple_call_arg (stmt
, 2),
2916 gimple_call_arg (stmt
, 3),
2918 case BUILT_IN_STPCPY
:
2919 return gimple_fold_builtin_stpcpy (gsi
);
2920 case BUILT_IN_STRCPY_CHK
:
2921 case BUILT_IN_STPCPY_CHK
:
2922 return gimple_fold_builtin_stxcpy_chk (gsi
,
2923 gimple_call_arg (stmt
, 0),
2924 gimple_call_arg (stmt
, 1),
2925 gimple_call_arg (stmt
, 2),
2927 case BUILT_IN_STRNCPY_CHK
:
2928 case BUILT_IN_STPNCPY_CHK
:
2929 return gimple_fold_builtin_stxncpy_chk (gsi
,
2930 gimple_call_arg (stmt
, 0),
2931 gimple_call_arg (stmt
, 1),
2932 gimple_call_arg (stmt
, 2),
2933 gimple_call_arg (stmt
, 3),
2935 case BUILT_IN_SNPRINTF_CHK
:
2936 case BUILT_IN_VSNPRINTF_CHK
:
2937 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2938 case BUILT_IN_SNPRINTF
:
2939 return gimple_fold_builtin_snprintf (gsi
);
2940 case BUILT_IN_SPRINTF
:
2941 return gimple_fold_builtin_sprintf (gsi
);
2942 case BUILT_IN_FPRINTF
:
2943 case BUILT_IN_FPRINTF_UNLOCKED
:
2944 case BUILT_IN_VFPRINTF
:
2945 if (n
== 2 || n
== 3)
2946 return gimple_fold_builtin_fprintf (gsi
,
2947 gimple_call_arg (stmt
, 0),
2948 gimple_call_arg (stmt
, 1),
2950 ? gimple_call_arg (stmt
, 2)
2954 case BUILT_IN_FPRINTF_CHK
:
2955 case BUILT_IN_VFPRINTF_CHK
:
2956 if (n
== 3 || n
== 4)
2957 return gimple_fold_builtin_fprintf (gsi
,
2958 gimple_call_arg (stmt
, 0),
2959 gimple_call_arg (stmt
, 2),
2961 ? gimple_call_arg (stmt
, 3)
2965 case BUILT_IN_PRINTF
:
2966 case BUILT_IN_PRINTF_UNLOCKED
:
2967 case BUILT_IN_VPRINTF
:
2968 if (n
== 1 || n
== 2)
2969 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2971 ? gimple_call_arg (stmt
, 1)
2972 : NULL_TREE
, fcode
);
2974 case BUILT_IN_PRINTF_CHK
:
2975 case BUILT_IN_VPRINTF_CHK
:
2976 if (n
== 2 || n
== 3)
2977 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2979 ? gimple_call_arg (stmt
, 2)
2980 : NULL_TREE
, fcode
);
2984 /* Try the generic builtin folder. */
2985 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2986 tree result
= fold_call_stmt (stmt
, ignore
);
2990 STRIP_NOPS (result
);
2992 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2993 if (!update_call_from_tree (gsi
, result
))
2994 gimplify_and_update_call_from_tree (gsi
, result
);
3001 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3002 doesn't fit into TYPE. The test for overflow should be regardless of
3003 -fwrapv, and even for unsigned types. */
3006 arith_overflowed_p (enum tree_code code
, const_tree type
,
3007 const_tree arg0
, const_tree arg1
)
3009 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3010 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3012 widest2_int warg0
= widest2_int_cst (arg0
);
3013 widest2_int warg1
= widest2_int_cst (arg1
);
3017 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3018 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3019 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3020 default: gcc_unreachable ();
3022 signop sign
= TYPE_SIGN (type
);
3023 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3025 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3028 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3029 The statement may be replaced by another statement, e.g., if the call
3030 simplifies to a constant value. Return true if any changes were made.
3031 It is assumed that the operands have been previously folded. */
3034 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3036 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3038 bool changed
= false;
3041 /* Fold *& in call arguments. */
3042 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3043 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3045 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3048 gimple_call_set_arg (stmt
, i
, tmp
);
3053 /* Check for virtual calls that became direct calls. */
3054 callee
= gimple_call_fn (stmt
);
3055 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3057 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3059 if (dump_file
&& virtual_method_call_p (callee
)
3060 && !possible_polymorphic_call_target_p
3061 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3062 (OBJ_TYPE_REF_EXPR (callee
)))))
3065 "Type inheritance inconsistent devirtualization of ");
3066 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3067 fprintf (dump_file
, " to ");
3068 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3069 fprintf (dump_file
, "\n");
3072 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3075 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3078 vec
<cgraph_node
*>targets
3079 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3080 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3082 tree lhs
= gimple_call_lhs (stmt
);
3083 if (dump_enabled_p ())
3085 location_t loc
= gimple_location_safe (stmt
);
3086 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3087 "folding virtual function call to %s\n",
3088 targets
.length () == 1
3089 ? targets
[0]->name ()
3090 : "__builtin_unreachable");
3092 if (targets
.length () == 1)
3094 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3096 /* If the call becomes noreturn, remove the lhs. */
3097 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3099 if (TREE_CODE (lhs
) == SSA_NAME
)
3101 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3102 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3103 gimple new_stmt
= gimple_build_assign (lhs
, def
);
3104 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3106 gimple_call_set_lhs (stmt
, NULL_TREE
);
3108 maybe_remove_unused_call_args (cfun
, stmt
);
3112 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3113 gimple new_stmt
= gimple_build_call (fndecl
, 0);
3114 gimple_set_location (new_stmt
, gimple_location (stmt
));
3115 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3117 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3118 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3120 /* To satisfy condition for
3121 cgraph_update_edges_for_call_stmt_node,
3122 we need to preserve GIMPLE_CALL statement
3123 at position of GSI iterator. */
3124 update_call_from_tree (gsi
, def
);
3125 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3129 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3130 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3131 gsi_replace (gsi
, new_stmt
, false);
3139 /* Check for indirect calls that became direct calls, and then
3140 no longer require a static chain. */
3141 if (gimple_call_chain (stmt
))
3143 tree fn
= gimple_call_fndecl (stmt
);
3144 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3146 gimple_call_set_chain (stmt
, NULL
);
3151 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3154 gimple_call_set_chain (stmt
, tmp
);
3163 /* Check for builtins that CCP can handle using information not
3164 available in the generic fold routines. */
3165 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3167 if (gimple_fold_builtin (gsi
))
3170 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3172 changed
|= targetm
.gimple_fold_builtin (gsi
);
3174 else if (gimple_call_internal_p (stmt
))
3176 enum tree_code subcode
= ERROR_MARK
;
3177 tree result
= NULL_TREE
;
3178 bool cplx_result
= false;
3179 tree overflow
= NULL_TREE
;
3180 switch (gimple_call_internal_fn (stmt
))
3182 case IFN_BUILTIN_EXPECT
:
3183 result
= fold_builtin_expect (gimple_location (stmt
),
3184 gimple_call_arg (stmt
, 0),
3185 gimple_call_arg (stmt
, 1),
3186 gimple_call_arg (stmt
, 2));
3188 case IFN_UBSAN_OBJECT_SIZE
:
3189 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3190 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3191 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3192 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3193 gimple_call_arg (stmt
, 2))))
3195 gsi_replace (gsi
, gimple_build_nop (), true);
3196 unlink_stmt_vdef (stmt
);
3197 release_defs (stmt
);
3201 case IFN_UBSAN_CHECK_ADD
:
3202 subcode
= PLUS_EXPR
;
3204 case IFN_UBSAN_CHECK_SUB
:
3205 subcode
= MINUS_EXPR
;
3207 case IFN_UBSAN_CHECK_MUL
:
3208 subcode
= MULT_EXPR
;
3210 case IFN_ADD_OVERFLOW
:
3211 subcode
= PLUS_EXPR
;
3214 case IFN_SUB_OVERFLOW
:
3215 subcode
= MINUS_EXPR
;
3218 case IFN_MUL_OVERFLOW
:
3219 subcode
= MULT_EXPR
;
3225 if (subcode
!= ERROR_MARK
)
3227 tree arg0
= gimple_call_arg (stmt
, 0);
3228 tree arg1
= gimple_call_arg (stmt
, 1);
3229 tree type
= TREE_TYPE (arg0
);
3232 tree lhs
= gimple_call_lhs (stmt
);
3233 if (lhs
== NULL_TREE
)
3236 type
= TREE_TYPE (TREE_TYPE (lhs
));
3238 if (type
== NULL_TREE
)
3240 /* x = y + 0; x = y - 0; x = y * 0; */
3241 else if (integer_zerop (arg1
))
3242 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3243 /* x = 0 + y; x = 0 * y; */
3244 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3245 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3247 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3248 result
= integer_zero_node
;
3249 /* x = y * 1; x = 1 * y; */
3250 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3252 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3254 else if (TREE_CODE (arg0
) == INTEGER_CST
3255 && TREE_CODE (arg1
) == INTEGER_CST
)
3258 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3259 fold_convert (type
, arg1
));
3261 result
= int_const_binop (subcode
, arg0
, arg1
);
3262 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3265 overflow
= build_one_cst (type
);
3272 if (result
== integer_zero_node
)
3273 result
= build_zero_cst (type
);
3274 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3276 if (TREE_CODE (result
) == INTEGER_CST
)
3278 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3280 overflow
= build_one_cst (type
);
3282 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3283 && TYPE_UNSIGNED (type
))
3284 || (TYPE_PRECISION (type
)
3285 < (TYPE_PRECISION (TREE_TYPE (result
))
3286 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3287 && !TYPE_UNSIGNED (type
)))))
3290 result
= fold_convert (type
, result
);
3297 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3298 result
= drop_tree_overflow (result
);
3301 if (overflow
== NULL_TREE
)
3302 overflow
= build_zero_cst (TREE_TYPE (result
));
3303 tree ctype
= build_complex_type (TREE_TYPE (result
));
3304 if (TREE_CODE (result
) == INTEGER_CST
3305 && TREE_CODE (overflow
) == INTEGER_CST
)
3306 result
= build_complex (ctype
, result
, overflow
);
3308 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3309 ctype
, result
, overflow
);
3311 if (!update_call_from_tree (gsi
, result
))
3312 gimplify_and_update_call_from_tree (gsi
, result
);
3321 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3324 Replaces *GSI with the simplification result in RCODE and OPS
3325 and the associated statements in *SEQ. Does the replacement
3326 according to INPLACE and returns true if the operation succeeded. */
3329 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3330 code_helper rcode
, tree
*ops
,
3331 gimple_seq
*seq
, bool inplace
)
3333 gimple stmt
= gsi_stmt (*gsi
);
3335 /* Play safe and do not allow abnormals to be mentioned in
3336 newly created statements. See also maybe_push_res_to_seq. */
3337 if ((TREE_CODE (ops
[0]) == SSA_NAME
3338 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
3340 && TREE_CODE (ops
[1]) == SSA_NAME
3341 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
3343 && TREE_CODE (ops
[2]) == SSA_NAME
3344 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
3347 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3349 gcc_assert (rcode
.is_tree_code ());
3350 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3351 /* GIMPLE_CONDs condition may not throw. */
3352 && (!flag_exceptions
3353 || !cfun
->can_throw_non_call_exceptions
3354 || !operation_could_trap_p (rcode
,
3355 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3357 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3358 else if (rcode
== SSA_NAME
)
3359 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3360 build_zero_cst (TREE_TYPE (ops
[0])));
3361 else if (rcode
== INTEGER_CST
)
3363 if (integer_zerop (ops
[0]))
3364 gimple_cond_make_false (cond_stmt
);
3366 gimple_cond_make_true (cond_stmt
);
3370 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3374 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3375 build_zero_cst (TREE_TYPE (res
)));
3379 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3381 fprintf (dump_file
, "gimple_simplified to ");
3382 if (!gimple_seq_empty_p (*seq
))
3383 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3384 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3387 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3390 else if (is_gimple_assign (stmt
)
3391 && rcode
.is_tree_code ())
3394 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3396 maybe_build_generic_op (rcode
,
3397 TREE_TYPE (gimple_assign_lhs (stmt
)),
3398 &ops
[0], ops
[1], ops
[2]);
3399 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3400 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3402 fprintf (dump_file
, "gimple_simplified to ");
3403 if (!gimple_seq_empty_p (*seq
))
3404 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3405 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3408 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3414 if (gimple_has_lhs (stmt
))
3416 tree lhs
= gimple_get_lhs (stmt
);
3417 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3420 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3422 fprintf (dump_file
, "gimple_simplified to ");
3423 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3425 gsi_replace_with_seq_vops (gsi
, *seq
);
3435 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3438 maybe_canonicalize_mem_ref_addr (tree
*t
)
3442 if (TREE_CODE (*t
) == ADDR_EXPR
)
3443 t
= &TREE_OPERAND (*t
, 0);
3445 while (handled_component_p (*t
))
3446 t
= &TREE_OPERAND (*t
, 0);
3448 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3449 of invariant addresses into a SSA name MEM_REF address. */
3450 if (TREE_CODE (*t
) == MEM_REF
3451 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3453 tree addr
= TREE_OPERAND (*t
, 0);
3454 if (TREE_CODE (addr
) == ADDR_EXPR
3455 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3456 || handled_component_p (TREE_OPERAND (addr
, 0))))
3459 HOST_WIDE_INT coffset
;
3460 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3465 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3466 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3467 TREE_OPERAND (*t
, 1),
3468 size_int (coffset
));
3471 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3472 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3475 /* Canonicalize back MEM_REFs to plain reference trees if the object
3476 accessed is a decl that has the same access semantics as the MEM_REF. */
3477 if (TREE_CODE (*t
) == MEM_REF
3478 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3479 && integer_zerop (TREE_OPERAND (*t
, 1))
3480 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3482 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3483 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3484 if (/* Same volatile qualification. */
3485 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3486 /* Same TBAA behavior with -fstrict-aliasing. */
3487 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3488 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3489 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3490 /* Same alignment. */
3491 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3492 /* We have to look out here to not drop a required conversion
3493 from the rhs to the lhs if *t appears on the lhs or vice-versa
3494 if it appears on the rhs. Thus require strict type
3496 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3498 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3503 /* Canonicalize TARGET_MEM_REF in particular with respect to
3504 the indexes becoming constant. */
3505 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3507 tree tem
= maybe_fold_tmr (*t
);
3518 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3519 distinguishes both cases. */
3522 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3524 bool changed
= false;
3525 gimple stmt
= gsi_stmt (*gsi
);
3528 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3530 ??? This shouldn't be done in generic folding but in the
3531 propagation helpers which also know whether an address was
3533 switch (gimple_code (stmt
))
3536 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3538 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3539 if ((REFERENCE_CLASS_P (*rhs
)
3540 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3541 && maybe_canonicalize_mem_ref_addr (rhs
))
3543 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3544 if (REFERENCE_CLASS_P (*lhs
)
3545 && maybe_canonicalize_mem_ref_addr (lhs
))
3551 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3553 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3554 if (REFERENCE_CLASS_P (*arg
)
3555 && maybe_canonicalize_mem_ref_addr (arg
))
3558 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3560 && REFERENCE_CLASS_P (*lhs
)
3561 && maybe_canonicalize_mem_ref_addr (lhs
))
3567 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3568 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3570 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3571 tree op
= TREE_VALUE (link
);
3572 if (REFERENCE_CLASS_P (op
)
3573 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3576 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3578 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3579 tree op
= TREE_VALUE (link
);
3580 if ((REFERENCE_CLASS_P (op
)
3581 || TREE_CODE (op
) == ADDR_EXPR
)
3582 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3588 if (gimple_debug_bind_p (stmt
))
3590 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3592 && (REFERENCE_CLASS_P (*val
)
3593 || TREE_CODE (*val
) == ADDR_EXPR
)
3594 && maybe_canonicalize_mem_ref_addr (val
))
3601 /* Dispatch to pattern-based folding. */
3603 || is_gimple_assign (stmt
)
3604 || gimple_code (stmt
) == GIMPLE_COND
)
3606 gimple_seq seq
= NULL
;
3609 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3610 valueize
, valueize
))
3612 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3615 gimple_seq_discard (seq
);
3619 stmt
= gsi_stmt (*gsi
);
3621 /* Fold the main computation performed by the statement. */
3622 switch (gimple_code (stmt
))
3626 unsigned old_num_ops
= gimple_num_ops (stmt
);
3627 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3628 tree lhs
= gimple_assign_lhs (stmt
);
3630 /* First canonicalize operand order. This avoids building new
3631 trees if this is the only thing fold would later do. */
3632 if ((commutative_tree_code (subcode
)
3633 || commutative_ternary_tree_code (subcode
))
3634 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3635 gimple_assign_rhs2 (stmt
), false))
3637 tree tem
= gimple_assign_rhs1 (stmt
);
3638 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3639 gimple_assign_set_rhs2 (stmt
, tem
);
3642 new_rhs
= fold_gimple_assign (gsi
);
3644 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3645 TREE_TYPE (new_rhs
)))
3646 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3649 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3651 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3658 changed
|= fold_gimple_cond (as_a
<gcond
*> (stmt
));
3662 changed
|= gimple_fold_call (gsi
, inplace
);
3666 /* Fold *& in asm operands. */
3668 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3670 const char **oconstraints
;
3671 const char *constraint
;
3672 bool allows_mem
, allows_reg
;
3674 noutputs
= gimple_asm_noutputs (asm_stmt
);
3675 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3677 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3679 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3680 tree op
= TREE_VALUE (link
);
3682 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3683 if (REFERENCE_CLASS_P (op
)
3684 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3686 TREE_VALUE (link
) = op
;
3690 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3692 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3693 tree op
= TREE_VALUE (link
);
3695 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3696 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3697 oconstraints
, &allows_mem
, &allows_reg
);
3698 if (REFERENCE_CLASS_P (op
)
3699 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3702 TREE_VALUE (link
) = op
;
3710 if (gimple_debug_bind_p (stmt
))
3712 tree val
= gimple_debug_bind_get_value (stmt
);
3714 && REFERENCE_CLASS_P (val
))
3716 tree tem
= maybe_fold_reference (val
, false);
3719 gimple_debug_bind_set_value (stmt
, tem
);
3724 && TREE_CODE (val
) == ADDR_EXPR
)
3726 tree ref
= TREE_OPERAND (val
, 0);
3727 tree tem
= maybe_fold_reference (ref
, false);
3730 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3731 gimple_debug_bind_set_value (stmt
, tem
);
3741 stmt
= gsi_stmt (*gsi
);
3743 /* Fold *& on the lhs. */
3744 if (gimple_has_lhs (stmt
))
3746 tree lhs
= gimple_get_lhs (stmt
);
3747 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3749 tree new_lhs
= maybe_fold_reference (lhs
, true);
3752 gimple_set_lhs (stmt
, new_lhs
);
3761 /* Valueziation callback that ends up not following SSA edges. */
3764 no_follow_ssa_edges (tree
)
3769 /* Valueization callback that ends up following single-use SSA edges only. */
3772 follow_single_use_edges (tree val
)
3774 if (TREE_CODE (val
) == SSA_NAME
3775 && !has_single_use (val
))
3780 /* Fold the statement pointed to by GSI. In some cases, this function may
3781 replace the whole statement with a new one. Returns true iff folding
3783 The statement pointed to by GSI should be in valid gimple form but may
3784 be in unfolded state as resulting from for example constant propagation
3785 which can produce *&x = 0. */
3788 fold_stmt (gimple_stmt_iterator
*gsi
)
3790 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3794 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3796 return fold_stmt_1 (gsi
, false, valueize
);
3799 /* Perform the minimal folding on statement *GSI. Only operations like
3800 *&x created by constant propagation are handled. The statement cannot
3801 be replaced with a new one. Return true if the statement was
3802 changed, false otherwise.
3803 The statement *GSI should be in valid gimple form but may
3804 be in unfolded state as resulting from for example constant propagation
3805 which can produce *&x = 0. */
3808 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3810 gimple stmt
= gsi_stmt (*gsi
);
3811 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3812 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3816 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3817 if EXPR is null or we don't know how.
3818 If non-null, the result always has boolean type. */
3821 canonicalize_bool (tree expr
, bool invert
)
3827 if (integer_nonzerop (expr
))
3828 return boolean_false_node
;
3829 else if (integer_zerop (expr
))
3830 return boolean_true_node
;
3831 else if (TREE_CODE (expr
) == SSA_NAME
)
3832 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3833 build_int_cst (TREE_TYPE (expr
), 0));
3834 else if (COMPARISON_CLASS_P (expr
))
3835 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3837 TREE_OPERAND (expr
, 0),
3838 TREE_OPERAND (expr
, 1));
3844 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3846 if (integer_nonzerop (expr
))
3847 return boolean_true_node
;
3848 else if (integer_zerop (expr
))
3849 return boolean_false_node
;
3850 else if (TREE_CODE (expr
) == SSA_NAME
)
3851 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3852 build_int_cst (TREE_TYPE (expr
), 0));
3853 else if (COMPARISON_CLASS_P (expr
))
3854 return fold_build2 (TREE_CODE (expr
),
3856 TREE_OPERAND (expr
, 0),
3857 TREE_OPERAND (expr
, 1));
3863 /* Check to see if a boolean expression EXPR is logically equivalent to the
3864 comparison (OP1 CODE OP2). Check for various identities involving
3868 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3869 const_tree op1
, const_tree op2
)
3873 /* The obvious case. */
3874 if (TREE_CODE (expr
) == code
3875 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3876 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3879 /* Check for comparing (name, name != 0) and the case where expr
3880 is an SSA_NAME with a definition matching the comparison. */
3881 if (TREE_CODE (expr
) == SSA_NAME
3882 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3884 if (operand_equal_p (expr
, op1
, 0))
3885 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3886 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3887 s
= SSA_NAME_DEF_STMT (expr
);
3888 if (is_gimple_assign (s
)
3889 && gimple_assign_rhs_code (s
) == code
3890 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3891 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3895 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3896 of name is a comparison, recurse. */
3897 if (TREE_CODE (op1
) == SSA_NAME
3898 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3900 s
= SSA_NAME_DEF_STMT (op1
);
3901 if (is_gimple_assign (s
)
3902 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3904 enum tree_code c
= gimple_assign_rhs_code (s
);
3905 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3906 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3907 return same_bool_comparison_p (expr
, c
,
3908 gimple_assign_rhs1 (s
),
3909 gimple_assign_rhs2 (s
));
3910 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3911 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3912 return same_bool_comparison_p (expr
,
3913 invert_tree_comparison (c
, false),
3914 gimple_assign_rhs1 (s
),
3915 gimple_assign_rhs2 (s
));
3921 /* Check to see if two boolean expressions OP1 and OP2 are logically
3925 same_bool_result_p (const_tree op1
, const_tree op2
)
3927 /* Simple cases first. */
3928 if (operand_equal_p (op1
, op2
, 0))
3931 /* Check the cases where at least one of the operands is a comparison.
3932 These are a bit smarter than operand_equal_p in that they apply some
3933 identifies on SSA_NAMEs. */
3934 if (COMPARISON_CLASS_P (op2
)
3935 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3936 TREE_OPERAND (op2
, 0),
3937 TREE_OPERAND (op2
, 1)))
3939 if (COMPARISON_CLASS_P (op1
)
3940 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3941 TREE_OPERAND (op1
, 0),
3942 TREE_OPERAND (op1
, 1)))
3949 /* Forward declarations for some mutually recursive functions. */
3952 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3953 enum tree_code code2
, tree op2a
, tree op2b
);
3955 and_var_with_comparison (tree var
, bool invert
,
3956 enum tree_code code2
, tree op2a
, tree op2b
);
3958 and_var_with_comparison_1 (gimple stmt
,
3959 enum tree_code code2
, tree op2a
, tree op2b
);
3961 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3962 enum tree_code code2
, tree op2a
, tree op2b
);
3964 or_var_with_comparison (tree var
, bool invert
,
3965 enum tree_code code2
, tree op2a
, tree op2b
);
3967 or_var_with_comparison_1 (gimple stmt
,
3968 enum tree_code code2
, tree op2a
, tree op2b
);
3970 /* Helper function for and_comparisons_1: try to simplify the AND of the
3971 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3972 If INVERT is true, invert the value of the VAR before doing the AND.
3973 Return NULL_EXPR if we can't simplify this to a single expression. */
3976 and_var_with_comparison (tree var
, bool invert
,
3977 enum tree_code code2
, tree op2a
, tree op2b
)
3980 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3982 /* We can only deal with variables whose definitions are assignments. */
3983 if (!is_gimple_assign (stmt
))
3986 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3987 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3988 Then we only have to consider the simpler non-inverted cases. */
3990 t
= or_var_with_comparison_1 (stmt
,
3991 invert_tree_comparison (code2
, false),
3994 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3995 return canonicalize_bool (t
, invert
);
3998 /* Try to simplify the AND of the ssa variable defined by the assignment
3999 STMT with the comparison specified by (OP2A CODE2 OP2B).
4000 Return NULL_EXPR if we can't simplify this to a single expression. */
4003 and_var_with_comparison_1 (gimple stmt
,
4004 enum tree_code code2
, tree op2a
, tree op2b
)
4006 tree var
= gimple_assign_lhs (stmt
);
4007 tree true_test_var
= NULL_TREE
;
4008 tree false_test_var
= NULL_TREE
;
4009 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4011 /* Check for identities like (var AND (var == 0)) => false. */
4012 if (TREE_CODE (op2a
) == SSA_NAME
4013 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4015 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4016 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4018 true_test_var
= op2a
;
4019 if (var
== true_test_var
)
4022 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4023 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4025 false_test_var
= op2a
;
4026 if (var
== false_test_var
)
4027 return boolean_false_node
;
4031 /* If the definition is a comparison, recurse on it. */
4032 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4034 tree t
= and_comparisons_1 (innercode
,
4035 gimple_assign_rhs1 (stmt
),
4036 gimple_assign_rhs2 (stmt
),
4044 /* If the definition is an AND or OR expression, we may be able to
4045 simplify by reassociating. */
4046 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4047 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4049 tree inner1
= gimple_assign_rhs1 (stmt
);
4050 tree inner2
= gimple_assign_rhs2 (stmt
);
4053 tree partial
= NULL_TREE
;
4054 bool is_and
= (innercode
== BIT_AND_EXPR
);
4056 /* Check for boolean identities that don't require recursive examination
4058 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4059 inner1 AND (inner1 OR inner2) => inner1
4060 !inner1 AND (inner1 AND inner2) => false
4061 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4062 Likewise for similar cases involving inner2. */
4063 if (inner1
== true_test_var
)
4064 return (is_and
? var
: inner1
);
4065 else if (inner2
== true_test_var
)
4066 return (is_and
? var
: inner2
);
4067 else if (inner1
== false_test_var
)
4069 ? boolean_false_node
4070 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4071 else if (inner2
== false_test_var
)
4073 ? boolean_false_node
4074 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4076 /* Next, redistribute/reassociate the AND across the inner tests.
4077 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4078 if (TREE_CODE (inner1
) == SSA_NAME
4079 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4080 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4081 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4082 gimple_assign_rhs1 (s
),
4083 gimple_assign_rhs2 (s
),
4084 code2
, op2a
, op2b
)))
4086 /* Handle the AND case, where we are reassociating:
4087 (inner1 AND inner2) AND (op2a code2 op2b)
4089 If the partial result t is a constant, we win. Otherwise
4090 continue on to try reassociating with the other inner test. */
4093 if (integer_onep (t
))
4095 else if (integer_zerop (t
))
4096 return boolean_false_node
;
4099 /* Handle the OR case, where we are redistributing:
4100 (inner1 OR inner2) AND (op2a code2 op2b)
4101 => (t OR (inner2 AND (op2a code2 op2b))) */
4102 else if (integer_onep (t
))
4103 return boolean_true_node
;
4105 /* Save partial result for later. */
4109 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4110 if (TREE_CODE (inner2
) == SSA_NAME
4111 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4112 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4113 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4114 gimple_assign_rhs1 (s
),
4115 gimple_assign_rhs2 (s
),
4116 code2
, op2a
, op2b
)))
4118 /* Handle the AND case, where we are reassociating:
4119 (inner1 AND inner2) AND (op2a code2 op2b)
4120 => (inner1 AND t) */
4123 if (integer_onep (t
))
4125 else if (integer_zerop (t
))
4126 return boolean_false_node
;
4127 /* If both are the same, we can apply the identity
4129 else if (partial
&& same_bool_result_p (t
, partial
))
4133 /* Handle the OR case. where we are redistributing:
4134 (inner1 OR inner2) AND (op2a code2 op2b)
4135 => (t OR (inner1 AND (op2a code2 op2b)))
4136 => (t OR partial) */
4139 if (integer_onep (t
))
4140 return boolean_true_node
;
4143 /* We already got a simplification for the other
4144 operand to the redistributed OR expression. The
4145 interesting case is when at least one is false.
4146 Or, if both are the same, we can apply the identity
4148 if (integer_zerop (partial
))
4150 else if (integer_zerop (t
))
4152 else if (same_bool_result_p (t
, partial
))
4161 /* Try to simplify the AND of two comparisons defined by
4162 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4163 If this can be done without constructing an intermediate value,
4164 return the resulting tree; otherwise NULL_TREE is returned.
4165 This function is deliberately asymmetric as it recurses on SSA_DEFs
4166 in the first comparison but not the second. */
4169 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4170 enum tree_code code2
, tree op2a
, tree op2b
)
4172 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4174 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4175 if (operand_equal_p (op1a
, op2a
, 0)
4176 && operand_equal_p (op1b
, op2b
, 0))
4178 /* Result will be either NULL_TREE, or a combined comparison. */
4179 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4180 TRUTH_ANDIF_EXPR
, code1
, code2
,
4181 truth_type
, op1a
, op1b
);
4186 /* Likewise the swapped case of the above. */
4187 if (operand_equal_p (op1a
, op2b
, 0)
4188 && operand_equal_p (op1b
, op2a
, 0))
4190 /* Result will be either NULL_TREE, or a combined comparison. */
4191 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4192 TRUTH_ANDIF_EXPR
, code1
,
4193 swap_tree_comparison (code2
),
4194 truth_type
, op1a
, op1b
);
4199 /* If both comparisons are of the same value against constants, we might
4200 be able to merge them. */
4201 if (operand_equal_p (op1a
, op2a
, 0)
4202 && TREE_CODE (op1b
) == INTEGER_CST
4203 && TREE_CODE (op2b
) == INTEGER_CST
)
4205 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4207 /* If we have (op1a == op1b), we should either be able to
4208 return that or FALSE, depending on whether the constant op1b
4209 also satisfies the other comparison against op2b. */
4210 if (code1
== EQ_EXPR
)
4216 case EQ_EXPR
: val
= (cmp
== 0); break;
4217 case NE_EXPR
: val
= (cmp
!= 0); break;
4218 case LT_EXPR
: val
= (cmp
< 0); break;
4219 case GT_EXPR
: val
= (cmp
> 0); break;
4220 case LE_EXPR
: val
= (cmp
<= 0); break;
4221 case GE_EXPR
: val
= (cmp
>= 0); break;
4222 default: done
= false;
4227 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4229 return boolean_false_node
;
4232 /* Likewise if the second comparison is an == comparison. */
4233 else if (code2
== EQ_EXPR
)
4239 case EQ_EXPR
: val
= (cmp
== 0); break;
4240 case NE_EXPR
: val
= (cmp
!= 0); break;
4241 case LT_EXPR
: val
= (cmp
> 0); break;
4242 case GT_EXPR
: val
= (cmp
< 0); break;
4243 case LE_EXPR
: val
= (cmp
>= 0); break;
4244 case GE_EXPR
: val
= (cmp
<= 0); break;
4245 default: done
= false;
4250 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4252 return boolean_false_node
;
4256 /* Same business with inequality tests. */
4257 else if (code1
== NE_EXPR
)
4262 case EQ_EXPR
: val
= (cmp
!= 0); break;
4263 case NE_EXPR
: val
= (cmp
== 0); break;
4264 case LT_EXPR
: val
= (cmp
>= 0); break;
4265 case GT_EXPR
: val
= (cmp
<= 0); break;
4266 case LE_EXPR
: val
= (cmp
> 0); break;
4267 case GE_EXPR
: val
= (cmp
< 0); break;
4272 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4274 else if (code2
== NE_EXPR
)
4279 case EQ_EXPR
: val
= (cmp
== 0); break;
4280 case NE_EXPR
: val
= (cmp
!= 0); break;
4281 case LT_EXPR
: val
= (cmp
<= 0); break;
4282 case GT_EXPR
: val
= (cmp
>= 0); break;
4283 case LE_EXPR
: val
= (cmp
< 0); break;
4284 case GE_EXPR
: val
= (cmp
> 0); break;
4289 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4292 /* Chose the more restrictive of two < or <= comparisons. */
4293 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4294 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4296 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4297 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4299 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4302 /* Likewise chose the more restrictive of two > or >= comparisons. */
4303 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4304 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4306 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4307 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4309 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4312 /* Check for singleton ranges. */
4314 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4315 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4316 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4318 /* Check for disjoint ranges. */
4320 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4321 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4322 return boolean_false_node
;
4324 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4325 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4326 return boolean_false_node
;
4329 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4330 NAME's definition is a truth value. See if there are any simplifications
4331 that can be done against the NAME's definition. */
4332 if (TREE_CODE (op1a
) == SSA_NAME
4333 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4334 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4336 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4337 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4338 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4339 switch (gimple_code (stmt
))
4342 /* Try to simplify by copy-propagating the definition. */
4343 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4346 /* If every argument to the PHI produces the same result when
4347 ANDed with the second comparison, we win.
4348 Do not do this unless the type is bool since we need a bool
4349 result here anyway. */
4350 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4352 tree result
= NULL_TREE
;
4354 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4356 tree arg
= gimple_phi_arg_def (stmt
, i
);
4358 /* If this PHI has itself as an argument, ignore it.
4359 If all the other args produce the same result,
4361 if (arg
== gimple_phi_result (stmt
))
4363 else if (TREE_CODE (arg
) == INTEGER_CST
)
4365 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4368 result
= boolean_false_node
;
4369 else if (!integer_zerop (result
))
4373 result
= fold_build2 (code2
, boolean_type_node
,
4375 else if (!same_bool_comparison_p (result
,
4379 else if (TREE_CODE (arg
) == SSA_NAME
4380 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4383 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4384 /* In simple cases we can look through PHI nodes,
4385 but we have to be careful with loops.
4387 if (! dom_info_available_p (CDI_DOMINATORS
)
4388 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4389 || dominated_by_p (CDI_DOMINATORS
,
4390 gimple_bb (def_stmt
),
4393 temp
= and_var_with_comparison (arg
, invert
, code2
,
4399 else if (!same_bool_result_p (result
, temp
))
4415 /* Try to simplify the AND of two comparisons, specified by
4416 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4417 If this can be simplified to a single expression (without requiring
4418 introducing more SSA variables to hold intermediate values),
4419 return the resulting tree. Otherwise return NULL_TREE.
4420 If the result expression is non-null, it has boolean type. */
4423 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4424 enum tree_code code2
, tree op2a
, tree op2b
)
4426 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4430 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4433 /* Helper function for or_comparisons_1: try to simplify the OR of the
4434 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4435 If INVERT is true, invert the value of VAR before doing the OR.
4436 Return NULL_EXPR if we can't simplify this to a single expression. */
4439 or_var_with_comparison (tree var
, bool invert
,
4440 enum tree_code code2
, tree op2a
, tree op2b
)
4443 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4445 /* We can only deal with variables whose definitions are assignments. */
4446 if (!is_gimple_assign (stmt
))
4449 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4450 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4451 Then we only have to consider the simpler non-inverted cases. */
4453 t
= and_var_with_comparison_1 (stmt
,
4454 invert_tree_comparison (code2
, false),
4457 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4458 return canonicalize_bool (t
, invert
);
4461 /* Try to simplify the OR of the ssa variable defined by the assignment
4462 STMT with the comparison specified by (OP2A CODE2 OP2B).
4463 Return NULL_EXPR if we can't simplify this to a single expression. */
4466 or_var_with_comparison_1 (gimple stmt
,
4467 enum tree_code code2
, tree op2a
, tree op2b
)
4469 tree var
= gimple_assign_lhs (stmt
);
4470 tree true_test_var
= NULL_TREE
;
4471 tree false_test_var
= NULL_TREE
;
4472 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4474 /* Check for identities like (var OR (var != 0)) => true . */
4475 if (TREE_CODE (op2a
) == SSA_NAME
4476 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4478 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4479 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4481 true_test_var
= op2a
;
4482 if (var
== true_test_var
)
4485 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4486 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4488 false_test_var
= op2a
;
4489 if (var
== false_test_var
)
4490 return boolean_true_node
;
4494 /* If the definition is a comparison, recurse on it. */
4495 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4497 tree t
= or_comparisons_1 (innercode
,
4498 gimple_assign_rhs1 (stmt
),
4499 gimple_assign_rhs2 (stmt
),
4507 /* If the definition is an AND or OR expression, we may be able to
4508 simplify by reassociating. */
4509 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4510 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4512 tree inner1
= gimple_assign_rhs1 (stmt
);
4513 tree inner2
= gimple_assign_rhs2 (stmt
);
4516 tree partial
= NULL_TREE
;
4517 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4519 /* Check for boolean identities that don't require recursive examination
4521 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4522 inner1 OR (inner1 AND inner2) => inner1
4523 !inner1 OR (inner1 OR inner2) => true
4524 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4526 if (inner1
== true_test_var
)
4527 return (is_or
? var
: inner1
);
4528 else if (inner2
== true_test_var
)
4529 return (is_or
? var
: inner2
);
4530 else if (inner1
== false_test_var
)
4533 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4534 else if (inner2
== false_test_var
)
4537 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4539 /* Next, redistribute/reassociate the OR across the inner tests.
4540 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4541 if (TREE_CODE (inner1
) == SSA_NAME
4542 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4543 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4544 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4545 gimple_assign_rhs1 (s
),
4546 gimple_assign_rhs2 (s
),
4547 code2
, op2a
, op2b
)))
4549 /* Handle the OR case, where we are reassociating:
4550 (inner1 OR inner2) OR (op2a code2 op2b)
4552 If the partial result t is a constant, we win. Otherwise
4553 continue on to try reassociating with the other inner test. */
4556 if (integer_onep (t
))
4557 return boolean_true_node
;
4558 else if (integer_zerop (t
))
4562 /* Handle the AND case, where we are redistributing:
4563 (inner1 AND inner2) OR (op2a code2 op2b)
4564 => (t AND (inner2 OR (op2a code op2b))) */
4565 else if (integer_zerop (t
))
4566 return boolean_false_node
;
4568 /* Save partial result for later. */
4572 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4573 if (TREE_CODE (inner2
) == SSA_NAME
4574 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4575 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4576 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4577 gimple_assign_rhs1 (s
),
4578 gimple_assign_rhs2 (s
),
4579 code2
, op2a
, op2b
)))
4581 /* Handle the OR case, where we are reassociating:
4582 (inner1 OR inner2) OR (op2a code2 op2b)
4584 => (t OR partial) */
4587 if (integer_zerop (t
))
4589 else if (integer_onep (t
))
4590 return boolean_true_node
;
4591 /* If both are the same, we can apply the identity
4593 else if (partial
&& same_bool_result_p (t
, partial
))
4597 /* Handle the AND case, where we are redistributing:
4598 (inner1 AND inner2) OR (op2a code2 op2b)
4599 => (t AND (inner1 OR (op2a code2 op2b)))
4600 => (t AND partial) */
4603 if (integer_zerop (t
))
4604 return boolean_false_node
;
4607 /* We already got a simplification for the other
4608 operand to the redistributed AND expression. The
4609 interesting case is when at least one is true.
4610 Or, if both are the same, we can apply the identity
4612 if (integer_onep (partial
))
4614 else if (integer_onep (t
))
4616 else if (same_bool_result_p (t
, partial
))
4625 /* Try to simplify the OR of two comparisons defined by
4626 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4627 If this can be done without constructing an intermediate value,
4628 return the resulting tree; otherwise NULL_TREE is returned.
4629 This function is deliberately asymmetric as it recurses on SSA_DEFs
4630 in the first comparison but not the second. */
4633 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4634 enum tree_code code2
, tree op2a
, tree op2b
)
4636 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4638 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4639 if (operand_equal_p (op1a
, op2a
, 0)
4640 && operand_equal_p (op1b
, op2b
, 0))
4642 /* Result will be either NULL_TREE, or a combined comparison. */
4643 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4644 TRUTH_ORIF_EXPR
, code1
, code2
,
4645 truth_type
, op1a
, op1b
);
4650 /* Likewise the swapped case of the above. */
4651 if (operand_equal_p (op1a
, op2b
, 0)
4652 && operand_equal_p (op1b
, op2a
, 0))
4654 /* Result will be either NULL_TREE, or a combined comparison. */
4655 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4656 TRUTH_ORIF_EXPR
, code1
,
4657 swap_tree_comparison (code2
),
4658 truth_type
, op1a
, op1b
);
4663 /* If both comparisons are of the same value against constants, we might
4664 be able to merge them. */
4665 if (operand_equal_p (op1a
, op2a
, 0)
4666 && TREE_CODE (op1b
) == INTEGER_CST
4667 && TREE_CODE (op2b
) == INTEGER_CST
)
4669 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4671 /* If we have (op1a != op1b), we should either be able to
4672 return that or TRUE, depending on whether the constant op1b
4673 also satisfies the other comparison against op2b. */
4674 if (code1
== NE_EXPR
)
4680 case EQ_EXPR
: val
= (cmp
== 0); break;
4681 case NE_EXPR
: val
= (cmp
!= 0); break;
4682 case LT_EXPR
: val
= (cmp
< 0); break;
4683 case GT_EXPR
: val
= (cmp
> 0); break;
4684 case LE_EXPR
: val
= (cmp
<= 0); break;
4685 case GE_EXPR
: val
= (cmp
>= 0); break;
4686 default: done
= false;
4691 return boolean_true_node
;
4693 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4696 /* Likewise if the second comparison is a != comparison. */
4697 else if (code2
== NE_EXPR
)
4703 case EQ_EXPR
: val
= (cmp
== 0); break;
4704 case NE_EXPR
: val
= (cmp
!= 0); break;
4705 case LT_EXPR
: val
= (cmp
> 0); break;
4706 case GT_EXPR
: val
= (cmp
< 0); break;
4707 case LE_EXPR
: val
= (cmp
>= 0); break;
4708 case GE_EXPR
: val
= (cmp
<= 0); break;
4709 default: done
= false;
4714 return boolean_true_node
;
4716 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4720 /* See if an equality test is redundant with the other comparison. */
4721 else if (code1
== EQ_EXPR
)
4726 case EQ_EXPR
: val
= (cmp
== 0); break;
4727 case NE_EXPR
: val
= (cmp
!= 0); break;
4728 case LT_EXPR
: val
= (cmp
< 0); break;
4729 case GT_EXPR
: val
= (cmp
> 0); break;
4730 case LE_EXPR
: val
= (cmp
<= 0); break;
4731 case GE_EXPR
: val
= (cmp
>= 0); break;
4736 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4738 else if (code2
== EQ_EXPR
)
4743 case EQ_EXPR
: val
= (cmp
== 0); break;
4744 case NE_EXPR
: val
= (cmp
!= 0); break;
4745 case LT_EXPR
: val
= (cmp
> 0); break;
4746 case GT_EXPR
: val
= (cmp
< 0); break;
4747 case LE_EXPR
: val
= (cmp
>= 0); break;
4748 case GE_EXPR
: val
= (cmp
<= 0); break;
4753 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4756 /* Chose the less restrictive of two < or <= comparisons. */
4757 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4758 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4760 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4761 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4763 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4766 /* Likewise chose the less restrictive of two > or >= comparisons. */
4767 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4768 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4770 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4771 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4773 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4776 /* Check for singleton ranges. */
4778 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4779 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4780 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4782 /* Check for less/greater pairs that don't restrict the range at all. */
4784 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4785 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4786 return boolean_true_node
;
4788 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4789 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4790 return boolean_true_node
;
4793 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4794 NAME's definition is a truth value. See if there are any simplifications
4795 that can be done against the NAME's definition. */
4796 if (TREE_CODE (op1a
) == SSA_NAME
4797 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4798 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4800 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4801 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4802 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4803 switch (gimple_code (stmt
))
4806 /* Try to simplify by copy-propagating the definition. */
4807 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4810 /* If every argument to the PHI produces the same result when
4811 ORed with the second comparison, we win.
4812 Do not do this unless the type is bool since we need a bool
4813 result here anyway. */
4814 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4816 tree result
= NULL_TREE
;
4818 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4820 tree arg
= gimple_phi_arg_def (stmt
, i
);
4822 /* If this PHI has itself as an argument, ignore it.
4823 If all the other args produce the same result,
4825 if (arg
== gimple_phi_result (stmt
))
4827 else if (TREE_CODE (arg
) == INTEGER_CST
)
4829 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4832 result
= boolean_true_node
;
4833 else if (!integer_onep (result
))
4837 result
= fold_build2 (code2
, boolean_type_node
,
4839 else if (!same_bool_comparison_p (result
,
4843 else if (TREE_CODE (arg
) == SSA_NAME
4844 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4847 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4848 /* In simple cases we can look through PHI nodes,
4849 but we have to be careful with loops.
4851 if (! dom_info_available_p (CDI_DOMINATORS
)
4852 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4853 || dominated_by_p (CDI_DOMINATORS
,
4854 gimple_bb (def_stmt
),
4857 temp
= or_var_with_comparison (arg
, invert
, code2
,
4863 else if (!same_bool_result_p (result
, temp
))
4879 /* Try to simplify the OR of two comparisons, specified by
4880 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4881 If this can be simplified to a single expression (without requiring
4882 introducing more SSA variables to hold intermediate values),
4883 return the resulting tree. Otherwise return NULL_TREE.
4884 If the result expression is non-null, it has boolean type. */
4887 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4888 enum tree_code code2
, tree op2a
, tree op2b
)
4890 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4894 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4898 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4900 Either NULL_TREE, a simplified but non-constant or a constant
4903 ??? This should go into a gimple-fold-inline.h file to be eventually
4904 privatized with the single valueize function used in the various TUs
4905 to avoid the indirect function call overhead. */
4908 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4909 tree (*gvalueize
) (tree
))
4913 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4914 edges if there are intermediate VARYING defs. For this reason
4915 do not follow SSA edges here even though SCCVN can technically
4916 just deal fine with that. */
4917 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
)
4918 && rcode
.is_tree_code ()
4919 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4920 || ((tree_code
) rcode
) == ADDR_EXPR
)
4921 && is_gimple_val (ops
[0]))
4924 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4926 fprintf (dump_file
, "Match-and-simplified ");
4927 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4928 fprintf (dump_file
, " to ");
4929 print_generic_expr (dump_file
, res
, 0);
4930 fprintf (dump_file
, "\n");
4935 location_t loc
= gimple_location (stmt
);
4936 switch (gimple_code (stmt
))
4940 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4942 switch (get_gimple_rhs_class (subcode
))
4944 case GIMPLE_SINGLE_RHS
:
4946 tree rhs
= gimple_assign_rhs1 (stmt
);
4947 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4949 if (TREE_CODE (rhs
) == SSA_NAME
)
4951 /* If the RHS is an SSA_NAME, return its known constant value,
4953 return (*valueize
) (rhs
);
4955 /* Handle propagating invariant addresses into address
4957 else if (TREE_CODE (rhs
) == ADDR_EXPR
4958 && !is_gimple_min_invariant (rhs
))
4960 HOST_WIDE_INT offset
= 0;
4962 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4966 && (CONSTANT_CLASS_P (base
)
4967 || decl_address_invariant_p (base
)))
4968 return build_invariant_address (TREE_TYPE (rhs
),
4971 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4972 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4973 && (CONSTRUCTOR_NELTS (rhs
)
4974 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4979 vec
= XALLOCAVEC (tree
,
4980 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4981 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4983 val
= (*valueize
) (val
);
4984 if (TREE_CODE (val
) == INTEGER_CST
4985 || TREE_CODE (val
) == REAL_CST
4986 || TREE_CODE (val
) == FIXED_CST
)
4992 return build_vector (TREE_TYPE (rhs
), vec
);
4994 if (subcode
== OBJ_TYPE_REF
)
4996 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4997 /* If callee is constant, we can fold away the wrapper. */
4998 if (is_gimple_min_invariant (val
))
5002 if (kind
== tcc_reference
)
5004 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5005 || TREE_CODE (rhs
) == REALPART_EXPR
5006 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5007 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5009 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5010 return fold_unary_loc (EXPR_LOCATION (rhs
),
5012 TREE_TYPE (rhs
), val
);
5014 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5015 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5017 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5018 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5020 TREE_TYPE (rhs
), val
,
5021 TREE_OPERAND (rhs
, 1),
5022 TREE_OPERAND (rhs
, 2));
5024 else if (TREE_CODE (rhs
) == MEM_REF
5025 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5027 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5028 if (TREE_CODE (val
) == ADDR_EXPR
5029 && is_gimple_min_invariant (val
))
5031 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5033 TREE_OPERAND (rhs
, 1));
5038 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5040 else if (kind
== tcc_declaration
)
5041 return get_symbol_constant_value (rhs
);
5045 case GIMPLE_UNARY_RHS
:
5048 case GIMPLE_BINARY_RHS
:
5050 /* Handle binary operators that can appear in GIMPLE form. */
5051 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5052 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5054 /* Translate &x + CST into an invariant form suitable for
5055 further propagation. */
5056 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
5057 && TREE_CODE (op0
) == ADDR_EXPR
5058 && TREE_CODE (op1
) == INTEGER_CST
)
5060 tree off
= fold_convert (ptr_type_node
, op1
);
5061 return build_fold_addr_expr_loc
5063 fold_build2 (MEM_REF
,
5064 TREE_TYPE (TREE_TYPE (op0
)),
5065 unshare_expr (op0
), off
));
5068 return fold_binary_loc (loc
, subcode
,
5069 gimple_expr_type (stmt
), op0
, op1
);
5072 case GIMPLE_TERNARY_RHS
:
5074 /* Handle ternary operators that can appear in GIMPLE form. */
5075 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5076 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5077 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5079 /* Fold embedded expressions in ternary codes. */
5080 if ((subcode
== COND_EXPR
5081 || subcode
== VEC_COND_EXPR
)
5082 && COMPARISON_CLASS_P (op0
))
5084 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
5085 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
5086 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
5087 TREE_TYPE (op0
), op00
, op01
);
5092 return fold_ternary_loc (loc
, subcode
,
5093 gimple_expr_type (stmt
), op0
, op1
, op2
);
5104 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5106 if (gimple_call_internal_p (stmt
))
5108 enum tree_code subcode
= ERROR_MARK
;
5109 switch (gimple_call_internal_fn (stmt
))
5111 case IFN_UBSAN_CHECK_ADD
:
5112 subcode
= PLUS_EXPR
;
5114 case IFN_UBSAN_CHECK_SUB
:
5115 subcode
= MINUS_EXPR
;
5117 case IFN_UBSAN_CHECK_MUL
:
5118 subcode
= MULT_EXPR
;
5123 tree arg0
= gimple_call_arg (stmt
, 0);
5124 tree arg1
= gimple_call_arg (stmt
, 1);
5125 tree op0
= (*valueize
) (arg0
);
5126 tree op1
= (*valueize
) (arg1
);
5128 if (TREE_CODE (op0
) != INTEGER_CST
5129 || TREE_CODE (op1
) != INTEGER_CST
)
5134 /* x * 0 = 0 * x = 0 without overflow. */
5135 if (integer_zerop (op0
) || integer_zerop (op1
))
5136 return build_zero_cst (TREE_TYPE (arg0
));
5139 /* y - y = 0 without overflow. */
5140 if (operand_equal_p (op0
, op1
, 0))
5141 return build_zero_cst (TREE_TYPE (arg0
));
5148 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5150 && TREE_CODE (res
) == INTEGER_CST
5151 && !TREE_OVERFLOW (res
))
5156 fn
= (*valueize
) (gimple_call_fn (stmt
));
5157 if (TREE_CODE (fn
) == ADDR_EXPR
5158 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5159 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5160 && gimple_builtin_call_types_compatible_p (stmt
,
5161 TREE_OPERAND (fn
, 0)))
5163 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5166 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5167 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5168 retval
= fold_builtin_call_array (loc
,
5169 gimple_call_return_type (call_stmt
),
5170 fn
, gimple_call_num_args (stmt
), args
);
5173 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5174 STRIP_NOPS (retval
);
5175 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5188 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5189 Returns NULL_TREE if folding to a constant is not possible, otherwise
5190 returns a constant according to is_gimple_min_invariant. */
5193 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
5195 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5196 if (res
&& is_gimple_min_invariant (res
))
5202 /* The following set of functions are supposed to fold references using
5203 their constant initializers. */
5205 /* See if we can find constructor defining value of BASE.
5206 When we know the consructor with constant offset (such as
5207 base is array[40] and we do know constructor of array), then
5208 BIT_OFFSET is adjusted accordingly.
5210 As a special case, return error_mark_node when constructor
5211 is not explicitly available, but it is known to be zero
5212 such as 'static const int a;'. */
5214 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5215 tree (*valueize
)(tree
))
5217 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5218 if (TREE_CODE (base
) == MEM_REF
)
5220 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5222 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5224 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5229 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5230 base
= valueize (TREE_OPERAND (base
, 0));
5231 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5233 base
= TREE_OPERAND (base
, 0);
5236 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5237 DECL_INITIAL. If BASE is a nested reference into another
5238 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5239 the inner reference. */
5240 switch (TREE_CODE (base
))
5245 tree init
= ctor_for_folding (base
);
5247 /* Our semantic is exact opposite of ctor_for_folding;
5248 NULL means unknown, while error_mark_node is 0. */
5249 if (init
== error_mark_node
)
5252 return error_mark_node
;
5258 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5259 if (max_size
== -1 || size
!= max_size
)
5261 *bit_offset
+= bit_offset2
;
5262 return get_base_constructor (base
, bit_offset
, valueize
);
5273 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5274 SIZE to the memory at bit OFFSET. */
5277 fold_array_ctor_reference (tree type
, tree ctor
,
5278 unsigned HOST_WIDE_INT offset
,
5279 unsigned HOST_WIDE_INT size
,
5282 unsigned HOST_WIDE_INT cnt
;
5284 offset_int low_bound
;
5285 offset_int elt_size
;
5286 offset_int index
, max_index
;
5287 offset_int access_index
;
5288 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5289 HOST_WIDE_INT inner_offset
;
5291 /* Compute low bound and elt size. */
5292 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5293 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5294 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5296 /* Static constructors for variably sized objects makes no sense. */
5297 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5298 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5299 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5303 /* Static constructors for variably sized objects makes no sense. */
5304 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5306 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5308 /* We can handle only constantly sized accesses that are known to not
5309 be larger than size of array element. */
5310 if (!TYPE_SIZE_UNIT (type
)
5311 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5312 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5316 /* Compute the array index we look for. */
5317 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5319 access_index
+= low_bound
;
5321 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5322 TYPE_SIGN (index_type
));
5324 /* And offset within the access. */
5325 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5327 /* See if the array field is large enough to span whole access. We do not
5328 care to fold accesses spanning multiple array indexes. */
5329 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5332 index
= low_bound
- 1;
5334 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5335 TYPE_SIGN (index_type
));
5337 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5339 /* Array constructor might explicitely set index, or specify range
5340 or leave index NULL meaning that it is next index after previous
5344 if (TREE_CODE (cfield
) == INTEGER_CST
)
5345 max_index
= index
= wi::to_offset (cfield
);
5348 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5349 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5350 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5357 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5358 TYPE_SIGN (index_type
));
5362 /* Do we have match? */
5363 if (wi::cmpu (access_index
, index
) >= 0
5364 && wi::cmpu (access_index
, max_index
) <= 0)
5365 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5368 /* When memory is not explicitely mentioned in constructor,
5369 it is 0 (or out of range). */
5370 return build_zero_cst (type
);
5373 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5374 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5377 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5378 unsigned HOST_WIDE_INT offset
,
5379 unsigned HOST_WIDE_INT size
,
5382 unsigned HOST_WIDE_INT cnt
;
5385 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5388 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5389 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5390 tree field_size
= DECL_SIZE (cfield
);
5391 offset_int bitoffset
;
5392 offset_int bitoffset_end
, access_end
;
5394 /* Variable sized objects in static constructors makes no sense,
5395 but field_size can be NULL for flexible array members. */
5396 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5397 && TREE_CODE (byte_offset
) == INTEGER_CST
5398 && (field_size
!= NULL_TREE
5399 ? TREE_CODE (field_size
) == INTEGER_CST
5400 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5402 /* Compute bit offset of the field. */
5403 bitoffset
= (wi::to_offset (field_offset
)
5404 + wi::lshift (wi::to_offset (byte_offset
),
5405 LOG2_BITS_PER_UNIT
));
5406 /* Compute bit offset where the field ends. */
5407 if (field_size
!= NULL_TREE
)
5408 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5412 access_end
= offset_int (offset
) + size
;
5414 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5415 [BITOFFSET, BITOFFSET_END)? */
5416 if (wi::cmps (access_end
, bitoffset
) > 0
5417 && (field_size
== NULL_TREE
5418 || wi::lts_p (offset
, bitoffset_end
)))
5420 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5421 /* We do have overlap. Now see if field is large enough to
5422 cover the access. Give up for accesses spanning multiple
5424 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5426 if (wi::lts_p (offset
, bitoffset
))
5428 return fold_ctor_reference (type
, cval
,
5429 inner_offset
.to_uhwi (), size
,
5433 /* When memory is not explicitely mentioned in constructor, it is 0. */
5434 return build_zero_cst (type
);
5437 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5438 to the memory at bit OFFSET. */
5441 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5442 unsigned HOST_WIDE_INT size
, tree from_decl
)
5446 /* We found the field with exact match. */
5447 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5449 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5451 /* We are at the end of walk, see if we can view convert the
5453 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5454 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5455 && !compare_tree_int (TYPE_SIZE (type
), size
)
5456 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5458 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5459 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5461 STRIP_USELESS_TYPE_CONVERSION (ret
);
5464 /* For constants and byte-aligned/sized reads try to go through
5465 native_encode/interpret. */
5466 if (CONSTANT_CLASS_P (ctor
)
5467 && BITS_PER_UNIT
== 8
5468 && offset
% BITS_PER_UNIT
== 0
5469 && size
% BITS_PER_UNIT
== 0
5470 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5472 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5473 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5474 offset
/ BITS_PER_UNIT
) > 0)
5475 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5477 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5480 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5481 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5482 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5485 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5492 /* Return the tree representing the element referenced by T if T is an
5493 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5494 names using VALUEIZE. Return NULL_TREE otherwise. */
5497 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5499 tree ctor
, idx
, base
;
5500 HOST_WIDE_INT offset
, size
, max_size
;
5503 if (TREE_THIS_VOLATILE (t
))
5507 return get_symbol_constant_value (t
);
5509 tem
= fold_read_from_constant_string (t
);
5513 switch (TREE_CODE (t
))
5516 case ARRAY_RANGE_REF
:
5517 /* Constant indexes are handled well by get_base_constructor.
5518 Only special case variable offsets.
5519 FIXME: This code can't handle nested references with variable indexes
5520 (they will be handled only by iteration of ccp). Perhaps we can bring
5521 get_ref_base_and_extent here and make it use a valueize callback. */
5522 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5524 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5525 && TREE_CODE (idx
) == INTEGER_CST
)
5527 tree low_bound
, unit_size
;
5529 /* If the resulting bit-offset is constant, track it. */
5530 if ((low_bound
= array_ref_low_bound (t
),
5531 TREE_CODE (low_bound
) == INTEGER_CST
)
5532 && (unit_size
= array_ref_element_size (t
),
5533 tree_fits_uhwi_p (unit_size
)))
5536 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5537 TYPE_PRECISION (TREE_TYPE (idx
)));
5539 if (wi::fits_shwi_p (woffset
))
5541 offset
= woffset
.to_shwi ();
5542 /* TODO: This code seems wrong, multiply then check
5543 to see if it fits. */
5544 offset
*= tree_to_uhwi (unit_size
);
5545 offset
*= BITS_PER_UNIT
;
5547 base
= TREE_OPERAND (t
, 0);
5548 ctor
= get_base_constructor (base
, &offset
, valueize
);
5549 /* Empty constructor. Always fold to 0. */
5550 if (ctor
== error_mark_node
)
5551 return build_zero_cst (TREE_TYPE (t
));
5552 /* Out of bound array access. Value is undefined,
5556 /* We can not determine ctor. */
5559 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5560 tree_to_uhwi (unit_size
)
5570 case TARGET_MEM_REF
:
5572 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5573 ctor
= get_base_constructor (base
, &offset
, valueize
);
5575 /* Empty constructor. Always fold to 0. */
5576 if (ctor
== error_mark_node
)
5577 return build_zero_cst (TREE_TYPE (t
));
5578 /* We do not know precise address. */
5579 if (max_size
== -1 || max_size
!= size
)
5581 /* We can not determine ctor. */
5585 /* Out of bound array access. Value is undefined, but don't fold. */
5589 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5595 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5596 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5597 return fold_build1_loc (EXPR_LOCATION (t
),
5598 TREE_CODE (t
), TREE_TYPE (t
), c
);
5610 fold_const_aggregate_ref (tree t
)
5612 return fold_const_aggregate_ref_1 (t
, NULL
);
5615 /* Lookup virtual method with index TOKEN in a virtual table V
5617 Set CAN_REFER if non-NULL to false if method
5618 is not referable or if the virtual table is ill-formed (such as rewriten
5619 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5622 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5624 unsigned HOST_WIDE_INT offset
,
5627 tree vtable
= v
, init
, fn
;
5628 unsigned HOST_WIDE_INT size
;
5629 unsigned HOST_WIDE_INT elt_size
, access_index
;
5635 /* First of all double check we have virtual table. */
5636 if (TREE_CODE (v
) != VAR_DECL
5637 || !DECL_VIRTUAL_P (v
))
5639 /* Pass down that we lost track of the target. */
5645 init
= ctor_for_folding (v
);
5647 /* The virtual tables should always be born with constructors
5648 and we always should assume that they are avaialble for
5649 folding. At the moment we do not stream them in all cases,
5650 but it should never happen that ctor seem unreachable. */
5652 if (init
== error_mark_node
)
5654 gcc_assert (in_lto_p
);
5655 /* Pass down that we lost track of the target. */
5660 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5661 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5662 offset
*= BITS_PER_UNIT
;
5663 offset
+= token
* size
;
5665 /* Lookup the value in the constructor that is assumed to be array.
5666 This is equivalent to
5667 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5668 offset, size, NULL);
5669 but in a constant time. We expect that frontend produced a simple
5670 array without indexed initializers. */
5672 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5673 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5674 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5675 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5677 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5678 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5680 /* This code makes an assumption that there are no
5681 indexed fileds produced by C++ FE, so we can directly index the array. */
5682 if (access_index
< CONSTRUCTOR_NELTS (init
))
5684 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5685 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5691 /* For type inconsistent program we may end up looking up virtual method
5692 in virtual table that does not contain TOKEN entries. We may overrun
5693 the virtual table and pick up a constant or RTTI info pointer.
5694 In any case the call is undefined. */
5696 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5697 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5698 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5701 fn
= TREE_OPERAND (fn
, 0);
5703 /* When cgraph node is missing and function is not public, we cannot
5704 devirtualize. This can happen in WHOPR when the actual method
5705 ends up in other partition, because we found devirtualization
5706 possibility too late. */
5707 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5718 /* Make sure we create a cgraph node for functions we'll reference.
5719 They can be non-existent if the reference comes from an entry
5720 of an external vtable for example. */
5721 cgraph_node::get_create (fn
);
5726 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5727 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5728 KNOWN_BINFO carries the binfo describing the true type of
5729 OBJ_TYPE_REF_OBJECT(REF).
5730 Set CAN_REFER if non-NULL to false if method
5731 is not referable or if the virtual table is ill-formed (such as rewriten
5732 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5735 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5738 unsigned HOST_WIDE_INT offset
;
5741 v
= BINFO_VTABLE (known_binfo
);
5742 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5746 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5752 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5755 /* Return true iff VAL is a gimple expression that is known to be
5756 non-negative. Restricted to floating-point inputs. */
5759 gimple_val_nonnegative_real_p (tree val
)
5763 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5765 /* Use existing logic for non-gimple trees. */
5766 if (tree_expr_nonnegative_p (val
))
5769 if (TREE_CODE (val
) != SSA_NAME
)
5772 /* Currently we look only at the immediately defining statement
5773 to make this determination, since recursion on defining
5774 statements of operands can lead to quadratic behavior in the
5775 worst case. This is expected to catch almost all occurrences
5776 in practice. It would be possible to implement limited-depth
5777 recursion if important cases are lost. Alternatively, passes
5778 that need this information (such as the pow/powi lowering code
5779 in the cse_sincos pass) could be revised to provide it through
5780 dataflow propagation. */
5782 def_stmt
= SSA_NAME_DEF_STMT (val
);
5784 if (is_gimple_assign (def_stmt
))
5788 /* See fold-const.c:tree_expr_nonnegative_p for additional
5789 cases that could be handled with recursion. */
5791 switch (gimple_assign_rhs_code (def_stmt
))
5794 /* Always true for floating-point operands. */
5798 /* True if the two operands are identical (since we are
5799 restricted to floating-point inputs). */
5800 op0
= gimple_assign_rhs1 (def_stmt
);
5801 op1
= gimple_assign_rhs2 (def_stmt
);
5804 || operand_equal_p (op0
, op1
, 0))
5811 else if (is_gimple_call (def_stmt
))
5813 tree fndecl
= gimple_call_fndecl (def_stmt
);
5815 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5819 switch (DECL_FUNCTION_CODE (fndecl
))
5821 CASE_FLT_FN (BUILT_IN_ACOS
):
5822 CASE_FLT_FN (BUILT_IN_ACOSH
):
5823 CASE_FLT_FN (BUILT_IN_CABS
):
5824 CASE_FLT_FN (BUILT_IN_COSH
):
5825 CASE_FLT_FN (BUILT_IN_ERFC
):
5826 CASE_FLT_FN (BUILT_IN_EXP
):
5827 CASE_FLT_FN (BUILT_IN_EXP10
):
5828 CASE_FLT_FN (BUILT_IN_EXP2
):
5829 CASE_FLT_FN (BUILT_IN_FABS
):
5830 CASE_FLT_FN (BUILT_IN_FDIM
):
5831 CASE_FLT_FN (BUILT_IN_HYPOT
):
5832 CASE_FLT_FN (BUILT_IN_POW10
):
5835 CASE_FLT_FN (BUILT_IN_SQRT
):
5836 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5837 nonnegative inputs. */
5838 if (!HONOR_SIGNED_ZEROS (val
))
5843 CASE_FLT_FN (BUILT_IN_POWI
):
5844 /* True if the second argument is an even integer. */
5845 arg1
= gimple_call_arg (def_stmt
, 1);
5847 if (TREE_CODE (arg1
) == INTEGER_CST
5848 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5853 CASE_FLT_FN (BUILT_IN_POW
):
5854 /* True if the second argument is an even integer-valued
5856 arg1
= gimple_call_arg (def_stmt
, 1);
5858 if (TREE_CODE (arg1
) == REAL_CST
)
5863 c
= TREE_REAL_CST (arg1
);
5864 n
= real_to_integer (&c
);
5868 REAL_VALUE_TYPE cint
;
5869 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5870 if (real_identical (&c
, &cint
))
5886 /* Given a pointer value OP0, return a simplified version of an
5887 indirection through OP0, or NULL_TREE if no simplification is
5888 possible. Note that the resulting type may be different from
5889 the type pointed to in the sense that it is still compatible
5890 from the langhooks point of view. */
5893 gimple_fold_indirect_ref (tree t
)
5895 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5900 subtype
= TREE_TYPE (sub
);
5901 if (!POINTER_TYPE_P (subtype
))
5904 if (TREE_CODE (sub
) == ADDR_EXPR
)
5906 tree op
= TREE_OPERAND (sub
, 0);
5907 tree optype
= TREE_TYPE (op
);
5909 if (useless_type_conversion_p (type
, optype
))
5912 /* *(foo *)&fooarray => fooarray[0] */
5913 if (TREE_CODE (optype
) == ARRAY_TYPE
5914 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5915 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5917 tree type_domain
= TYPE_DOMAIN (optype
);
5918 tree min_val
= size_zero_node
;
5919 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5920 min_val
= TYPE_MIN_VALUE (type_domain
);
5921 if (TREE_CODE (min_val
) == INTEGER_CST
)
5922 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5924 /* *(foo *)&complexfoo => __real__ complexfoo */
5925 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5926 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5927 return fold_build1 (REALPART_EXPR
, type
, op
);
5928 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5929 else if (TREE_CODE (optype
) == VECTOR_TYPE
5930 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5932 tree part_width
= TYPE_SIZE (type
);
5933 tree index
= bitsize_int (0);
5934 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5938 /* *(p + CST) -> ... */
5939 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5940 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5942 tree addr
= TREE_OPERAND (sub
, 0);
5943 tree off
= TREE_OPERAND (sub
, 1);
5947 addrtype
= TREE_TYPE (addr
);
5949 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5950 if (TREE_CODE (addr
) == ADDR_EXPR
5951 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5952 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5953 && tree_fits_uhwi_p (off
))
5955 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5956 tree part_width
= TYPE_SIZE (type
);
5957 unsigned HOST_WIDE_INT part_widthi
5958 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5959 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5960 tree index
= bitsize_int (indexi
);
5961 if (offset
/ part_widthi
5962 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5963 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5967 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5968 if (TREE_CODE (addr
) == ADDR_EXPR
5969 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5970 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5972 tree size
= TYPE_SIZE_UNIT (type
);
5973 if (tree_int_cst_equal (size
, off
))
5974 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5977 /* *(p + CST) -> MEM_REF <p, CST>. */
5978 if (TREE_CODE (addr
) != ADDR_EXPR
5979 || DECL_P (TREE_OPERAND (addr
, 0)))
5980 return fold_build2 (MEM_REF
, type
,
5982 wide_int_to_tree (ptype
, off
));
5985 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5986 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5987 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5988 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5991 tree min_val
= size_zero_node
;
5993 sub
= gimple_fold_indirect_ref (sub
);
5995 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5996 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5997 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5998 min_val
= TYPE_MIN_VALUE (type_domain
);
5999 if (TREE_CODE (min_val
) == INTEGER_CST
)
6000 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6006 /* Return true if CODE is an operation that when operating on signed
6007 integer types involves undefined behavior on overflow and the
6008 operation can be expressed with unsigned arithmetic. */
6011 arith_code_with_undefined_signed_overflow (tree_code code
)
6019 case POINTER_PLUS_EXPR
:
6026 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6027 operation that can be transformed to unsigned arithmetic by converting
6028 its operand, carrying out the operation in the corresponding unsigned
6029 type and converting the result back to the original type.
6031 Returns a sequence of statements that replace STMT and also contain
6032 a modified form of STMT itself. */
6035 rewrite_to_defined_overflow (gimple stmt
)
6037 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6039 fprintf (dump_file
, "rewriting stmt with undefined signed "
6041 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6044 tree lhs
= gimple_assign_lhs (stmt
);
6045 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6046 gimple_seq stmts
= NULL
;
6047 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6049 gimple_seq stmts2
= NULL
;
6050 gimple_set_op (stmt
, i
,
6051 force_gimple_operand (fold_convert (type
,
6052 gimple_op (stmt
, i
)),
6053 &stmts2
, true, NULL_TREE
));
6054 gimple_seq_add_seq (&stmts
, stmts2
);
6056 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6057 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6058 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6059 gimple_seq_add_stmt (&stmts
, stmt
);
6060 gimple cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6061 gimple_seq_add_stmt (&stmts
, cvt
);
6067 /* The valueization hook we use for the gimple_build API simplification.
6068 This makes us match fold_buildN behavior by only combining with
6069 statements in the sequence(s) we are currently building. */
6072 gimple_build_valueize (tree op
)
6074 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6079 /* Build the expression CODE OP0 of type TYPE with location LOC,
6080 simplifying it first if possible. Returns the built
6081 expression value and appends statements possibly defining it
6085 gimple_build (gimple_seq
*seq
, location_t loc
,
6086 enum tree_code code
, tree type
, tree op0
)
6088 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6091 if (gimple_in_ssa_p (cfun
))
6092 res
= make_ssa_name (type
);
6094 res
= create_tmp_reg (type
);
6096 if (code
== REALPART_EXPR
6097 || code
== IMAGPART_EXPR
6098 || code
== VIEW_CONVERT_EXPR
)
6099 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6101 stmt
= gimple_build_assign (res
, code
, op0
);
6102 gimple_set_location (stmt
, loc
);
6103 gimple_seq_add_stmt_without_update (seq
, stmt
);
6108 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6109 simplifying it first if possible. Returns the built
6110 expression value and appends statements possibly defining it
6114 gimple_build (gimple_seq
*seq
, location_t loc
,
6115 enum tree_code code
, tree type
, tree op0
, tree op1
)
6117 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6120 if (gimple_in_ssa_p (cfun
))
6121 res
= make_ssa_name (type
);
6123 res
= create_tmp_reg (type
);
6124 gimple stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6125 gimple_set_location (stmt
, loc
);
6126 gimple_seq_add_stmt_without_update (seq
, stmt
);
6131 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6132 simplifying it first if possible. Returns the built
6133 expression value and appends statements possibly defining it
6137 gimple_build (gimple_seq
*seq
, location_t loc
,
6138 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6140 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6141 seq
, gimple_build_valueize
);
6144 if (gimple_in_ssa_p (cfun
))
6145 res
= make_ssa_name (type
);
6147 res
= create_tmp_reg (type
);
6149 if (code
== BIT_FIELD_REF
)
6150 stmt
= gimple_build_assign (res
, code
,
6151 build3 (code
, type
, op0
, op1
, op2
));
6153 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6154 gimple_set_location (stmt
, loc
);
6155 gimple_seq_add_stmt_without_update (seq
, stmt
);
6160 /* Build the call FN (ARG0) with a result of type TYPE
6161 (or no result if TYPE is void) with location LOC,
6162 simplifying it first if possible. Returns the built
6163 expression value (or NULL_TREE if TYPE is void) and appends
6164 statements possibly defining it to SEQ. */
6167 gimple_build (gimple_seq
*seq
, location_t loc
,
6168 enum built_in_function fn
, tree type
, tree arg0
)
6170 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6173 tree decl
= builtin_decl_implicit (fn
);
6174 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
6175 if (!VOID_TYPE_P (type
))
6177 if (gimple_in_ssa_p (cfun
))
6178 res
= make_ssa_name (type
);
6180 res
= create_tmp_reg (type
);
6181 gimple_call_set_lhs (stmt
, res
);
6183 gimple_set_location (stmt
, loc
);
6184 gimple_seq_add_stmt_without_update (seq
, stmt
);
6189 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6190 (or no result if TYPE is void) with location LOC,
6191 simplifying it first if possible. Returns the built
6192 expression value (or NULL_TREE if TYPE is void) and appends
6193 statements possibly defining it to SEQ. */
6196 gimple_build (gimple_seq
*seq
, location_t loc
,
6197 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6199 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6202 tree decl
= builtin_decl_implicit (fn
);
6203 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6204 if (!VOID_TYPE_P (type
))
6206 if (gimple_in_ssa_p (cfun
))
6207 res
= make_ssa_name (type
);
6209 res
= create_tmp_reg (type
);
6210 gimple_call_set_lhs (stmt
, res
);
6212 gimple_set_location (stmt
, loc
);
6213 gimple_seq_add_stmt_without_update (seq
, stmt
);
6218 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6219 (or no result if TYPE is void) with location LOC,
6220 simplifying it first if possible. Returns the built
6221 expression value (or NULL_TREE if TYPE is void) and appends
6222 statements possibly defining it to SEQ. */
6225 gimple_build (gimple_seq
*seq
, location_t loc
,
6226 enum built_in_function fn
, tree type
,
6227 tree arg0
, tree arg1
, tree arg2
)
6229 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6230 seq
, gimple_build_valueize
);
6233 tree decl
= builtin_decl_implicit (fn
);
6234 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6235 if (!VOID_TYPE_P (type
))
6237 if (gimple_in_ssa_p (cfun
))
6238 res
= make_ssa_name (type
);
6240 res
= create_tmp_reg (type
);
6241 gimple_call_set_lhs (stmt
, res
);
6243 gimple_set_location (stmt
, loc
);
6244 gimple_seq_add_stmt_without_update (seq
, stmt
);
6249 /* Build the conversion (TYPE) OP with a result of type TYPE
6250 with location LOC if such conversion is neccesary in GIMPLE,
6251 simplifying it first.
6252 Returns the built expression value and appends
6253 statements possibly defining it to SEQ. */
6256 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6258 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6260 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);