1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
35 #include "hard-reg-set.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
56 #include "tree-ssa-propagate.h"
59 #include "plugin-api.h"
62 #include "ipa-utils.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-ssa-address.h"
65 #include "langhooks.h"
66 #include "gimplify-me.h"
71 #include "gimple-match.h"
72 #include "tree-phinodes.h"
73 #include "ssa-iterators.h"
75 /* Return true when DECL can be referenced from current unit.
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
91 3) COMDAT functions referred by external vtables that
92 we devirtualize only during final compilation stage.
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
98 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
101 struct cgraph_node
*node
;
104 if (DECL_ABSTRACT_P (decl
))
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
109 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
117 if (symtab
->function_flags_ready
)
119 snode
= symtab_node::get (decl
);
120 if (!snode
|| !snode
->definition
)
122 node
= dyn_cast
<cgraph_node
*> (snode
);
123 return !node
|| !node
->global
.inlined_to
;
126 /* We will later output the initializer, so we can refer to it.
127 So we are concerned only when DECL comes from initializer of
128 external var or var that has been optimized out. */
130 || TREE_CODE (from_decl
) != VAR_DECL
131 || (!DECL_EXTERNAL (from_decl
)
132 && (vnode
= varpool_node::get (from_decl
)) != NULL
133 && vnode
->definition
)
135 && (vnode
= varpool_node::get (from_decl
)) != NULL
136 && vnode
->in_other_partition
))
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl
)
142 && DECL_EXTERNAL (decl
)
143 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
144 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
155 As observed in PR20991 for already optimized out comdat virtual functions
156 it may be tempting to not necessarily give up because the copy will be
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
162 if (!symtab
->function_flags_ready
)
165 snode
= symtab_node::get (decl
);
167 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
168 && (!snode
->in_other_partition
169 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
171 node
= dyn_cast
<cgraph_node
*> (snode
);
172 return !node
|| !node
->global
.inlined_to
;
175 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
180 canonicalize_constructor_val (tree cval
, tree from_decl
)
182 tree orig_cval
= cval
;
184 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
187 tree ptr
= TREE_OPERAND (cval
, 0);
188 if (is_gimple_min_invariant (ptr
))
189 cval
= build1_loc (EXPR_LOCATION (cval
),
190 ADDR_EXPR
, TREE_TYPE (ptr
),
191 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
193 fold_convert (ptr_type_node
,
194 TREE_OPERAND (cval
, 1))));
196 if (TREE_CODE (cval
) == ADDR_EXPR
)
198 tree base
= NULL_TREE
;
199 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
201 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
203 TREE_OPERAND (cval
, 0) = base
;
206 base
= get_base_address (TREE_OPERAND (cval
, 0));
210 if ((TREE_CODE (base
) == VAR_DECL
211 || TREE_CODE (base
) == FUNCTION_DECL
)
212 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
214 if (TREE_CODE (base
) == VAR_DECL
)
215 TREE_ADDRESSABLE (base
) = 1;
216 else if (TREE_CODE (base
) == FUNCTION_DECL
)
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base
);
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
225 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
228 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
231 if (TREE_OVERFLOW_P (cval
))
232 return drop_tree_overflow (cval
);
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
240 get_symbol_constant_value (tree sym
)
242 tree val
= ctor_for_folding (sym
);
243 if (val
!= error_mark_node
)
247 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
248 if (val
&& is_gimple_min_invariant (val
))
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
257 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
258 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
259 return build_zero_cst (TREE_TYPE (sym
));
267 /* Subroutine of fold_stmt. We perform several simplifications of the
268 memory reference tree EXPR and make sure to re-gimplify them properly
269 after propagation of constant addresses. IS_LHS is true if the
270 reference is supposed to be an lvalue. */
273 maybe_fold_reference (tree expr
, bool is_lhs
)
277 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
278 || TREE_CODE (expr
) == REALPART_EXPR
279 || TREE_CODE (expr
) == IMAGPART_EXPR
)
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
281 return fold_unary_loc (EXPR_LOCATION (expr
),
284 TREE_OPERAND (expr
, 0));
285 else if (TREE_CODE (expr
) == BIT_FIELD_REF
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
287 return fold_ternary_loc (EXPR_LOCATION (expr
),
290 TREE_OPERAND (expr
, 0),
291 TREE_OPERAND (expr
, 1),
292 TREE_OPERAND (expr
, 2));
295 && (result
= fold_const_aggregate_ref (expr
))
296 && is_gimple_min_invariant (result
))
303 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
304 replacement rhs for the statement or NULL_TREE if no simplification
305 could be made. It is assumed that the operands have been previously
309 fold_gimple_assign (gimple_stmt_iterator
*si
)
311 gimple stmt
= gsi_stmt (*si
);
312 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
313 location_t loc
= gimple_location (stmt
);
315 tree result
= NULL_TREE
;
317 switch (get_gimple_rhs_class (subcode
))
319 case GIMPLE_SINGLE_RHS
:
321 tree rhs
= gimple_assign_rhs1 (stmt
);
323 if (TREE_CLOBBER_P (rhs
))
326 if (REFERENCE_CLASS_P (rhs
))
327 return maybe_fold_reference (rhs
, false);
329 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
331 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
332 if (is_gimple_min_invariant (val
))
334 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
337 vec
<cgraph_node
*>targets
338 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
339 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
341 if (dump_enabled_p ())
343 location_t loc
= gimple_location_safe (stmt
);
344 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
345 "resolving virtual function address "
346 "reference to function %s\n",
347 targets
.length () == 1
348 ? targets
[0]->name ()
351 if (targets
.length () == 1)
353 val
= fold_convert (TREE_TYPE (val
),
354 build_fold_addr_expr_loc
355 (loc
, targets
[0]->decl
));
356 STRIP_USELESS_TYPE_CONVERSION (val
);
359 /* We can not use __builtin_unreachable here because it
360 can not have address taken. */
361 val
= build_int_cst (TREE_TYPE (val
), 0);
367 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
369 tree ref
= TREE_OPERAND (rhs
, 0);
370 tree tem
= maybe_fold_reference (ref
, true);
372 && TREE_CODE (tem
) == MEM_REF
373 && integer_zerop (TREE_OPERAND (tem
, 1)))
374 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
376 result
= fold_convert (TREE_TYPE (rhs
),
377 build_fold_addr_expr_loc (loc
, tem
));
378 else if (TREE_CODE (ref
) == MEM_REF
379 && integer_zerop (TREE_OPERAND (ref
, 1)))
380 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
383 else if (TREE_CODE (rhs
) == CONSTRUCTOR
384 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
385 && (CONSTRUCTOR_NELTS (rhs
)
386 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
388 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
392 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
393 if (TREE_CODE (val
) != INTEGER_CST
394 && TREE_CODE (val
) != REAL_CST
395 && TREE_CODE (val
) != FIXED_CST
)
398 return build_vector_from_ctor (TREE_TYPE (rhs
),
399 CONSTRUCTOR_ELTS (rhs
));
402 else if (DECL_P (rhs
))
403 return get_symbol_constant_value (rhs
);
405 /* If we couldn't fold the RHS, hand over to the generic
407 if (result
== NULL_TREE
)
410 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
411 that may have been added by fold, and "useless" type
412 conversions that might now be apparent due to propagation. */
413 STRIP_USELESS_TYPE_CONVERSION (result
);
415 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
422 case GIMPLE_UNARY_RHS
:
425 case GIMPLE_BINARY_RHS
:
426 /* Try to canonicalize for boolean-typed X the comparisons
427 X == 0, X == 1, X != 0, and X != 1. */
428 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
429 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
431 tree lhs
= gimple_assign_lhs (stmt
);
432 tree op1
= gimple_assign_rhs1 (stmt
);
433 tree op2
= gimple_assign_rhs2 (stmt
);
434 tree type
= TREE_TYPE (op1
);
436 /* Check whether the comparison operands are of the same boolean
437 type as the result type is.
438 Check that second operand is an integer-constant with value
440 if (TREE_CODE (op2
) == INTEGER_CST
441 && (integer_zerop (op2
) || integer_onep (op2
))
442 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
444 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
445 bool is_logical_not
= false;
447 /* X == 0 and X != 1 is a logical-not.of X
448 X == 1 and X != 0 is X */
449 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
450 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
451 is_logical_not
= true;
453 if (is_logical_not
== false)
455 /* Only for one-bit precision typed X the transformation
456 !X -> ~X is valied. */
457 else if (TYPE_PRECISION (type
) == 1)
458 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
460 /* Otherwise we use !X -> X ^ 1. */
462 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
463 type
, op1
, build_int_cst (type
, 1));
469 result
= fold_binary_loc (loc
, subcode
,
470 TREE_TYPE (gimple_assign_lhs (stmt
)),
471 gimple_assign_rhs1 (stmt
),
472 gimple_assign_rhs2 (stmt
));
476 STRIP_USELESS_TYPE_CONVERSION (result
);
477 if (valid_gimple_rhs_p (result
))
482 case GIMPLE_TERNARY_RHS
:
483 /* Try to fold a conditional expression. */
484 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
486 tree op0
= gimple_assign_rhs1 (stmt
);
489 location_t cond_loc
= gimple_location (stmt
);
491 if (COMPARISON_CLASS_P (op0
))
493 fold_defer_overflow_warnings ();
494 tem
= fold_binary_loc (cond_loc
,
495 TREE_CODE (op0
), TREE_TYPE (op0
),
496 TREE_OPERAND (op0
, 0),
497 TREE_OPERAND (op0
, 1));
498 /* This is actually a conditional expression, not a GIMPLE
499 conditional statement, however, the valid_gimple_rhs_p
500 test still applies. */
501 set
= (tem
&& is_gimple_condexpr (tem
)
502 && valid_gimple_rhs_p (tem
));
503 fold_undefer_overflow_warnings (set
, stmt
, 0);
505 else if (is_gimple_min_invariant (op0
))
514 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
515 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
516 gimple_assign_rhs2 (stmt
),
517 gimple_assign_rhs3 (stmt
));
521 result
= fold_ternary_loc (loc
, subcode
,
522 TREE_TYPE (gimple_assign_lhs (stmt
)),
523 gimple_assign_rhs1 (stmt
),
524 gimple_assign_rhs2 (stmt
),
525 gimple_assign_rhs3 (stmt
));
529 STRIP_USELESS_TYPE_CONVERSION (result
);
530 if (valid_gimple_rhs_p (result
))
535 case GIMPLE_INVALID_RHS
:
542 /* Attempt to fold a conditional statement. Return true if any changes were
543 made. We only attempt to fold the condition expression, and do not perform
544 any transformation that would require alteration of the cfg. It is
545 assumed that the operands have been previously folded. */
548 fold_gimple_cond (gcond
*stmt
)
550 tree result
= fold_binary_loc (gimple_location (stmt
),
551 gimple_cond_code (stmt
),
553 gimple_cond_lhs (stmt
),
554 gimple_cond_rhs (stmt
));
558 STRIP_USELESS_TYPE_CONVERSION (result
);
559 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
561 gimple_cond_set_condition_from_tree (stmt
, result
);
570 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
571 adjusting the replacement stmts location and virtual operands.
572 If the statement has a lhs the last stmt in the sequence is expected
573 to assign to that lhs. */
576 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
578 gimple stmt
= gsi_stmt (*si_p
);
580 if (gimple_has_location (stmt
))
581 annotate_all_with_location (stmts
, gimple_location (stmt
));
583 /* First iterate over the replacement statements backward, assigning
584 virtual operands to their defining statements. */
585 gimple laststore
= NULL
;
586 for (gimple_stmt_iterator i
= gsi_last (stmts
);
587 !gsi_end_p (i
); gsi_prev (&i
))
589 gimple new_stmt
= gsi_stmt (i
);
590 if ((gimple_assign_single_p (new_stmt
)
591 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
592 || (is_gimple_call (new_stmt
)
593 && (gimple_call_flags (new_stmt
)
594 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
598 vdef
= gimple_vdef (stmt
);
600 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
601 gimple_set_vdef (new_stmt
, vdef
);
602 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
603 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
604 laststore
= new_stmt
;
608 /* Second iterate over the statements forward, assigning virtual
609 operands to their uses. */
610 tree reaching_vuse
= gimple_vuse (stmt
);
611 for (gimple_stmt_iterator i
= gsi_start (stmts
);
612 !gsi_end_p (i
); gsi_next (&i
))
614 gimple new_stmt
= gsi_stmt (i
);
615 /* If the new statement possibly has a VUSE, update it with exact SSA
616 name we know will reach this one. */
617 if (gimple_has_mem_ops (new_stmt
))
618 gimple_set_vuse (new_stmt
, reaching_vuse
);
619 gimple_set_modified (new_stmt
, true);
620 if (gimple_vdef (new_stmt
))
621 reaching_vuse
= gimple_vdef (new_stmt
);
624 /* If the new sequence does not do a store release the virtual
625 definition of the original statement. */
627 && reaching_vuse
== gimple_vuse (stmt
))
629 tree vdef
= gimple_vdef (stmt
);
631 && TREE_CODE (vdef
) == SSA_NAME
)
633 unlink_stmt_vdef (stmt
);
634 release_ssa_name (vdef
);
638 /* Finally replace the original statement with the sequence. */
639 gsi_replace_with_seq (si_p
, stmts
, false);
642 /* Convert EXPR into a GIMPLE value suitable for substitution on the
643 RHS of an assignment. Insert the necessary statements before
644 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
645 is replaced. If the call is expected to produces a result, then it
646 is replaced by an assignment of the new RHS to the result variable.
647 If the result is to be ignored, then the call is replaced by a
648 GIMPLE_NOP. A proper VDEF chain is retained by making the first
649 VUSE and the last VDEF of the whole sequence be the same as the replaced
650 statement and using new SSA names for stores in between. */
653 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
656 gimple stmt
, new_stmt
;
657 gimple_stmt_iterator i
;
658 gimple_seq stmts
= NULL
;
660 stmt
= gsi_stmt (*si_p
);
662 gcc_assert (is_gimple_call (stmt
));
664 push_gimplify_context (gimple_in_ssa_p (cfun
));
666 lhs
= gimple_call_lhs (stmt
);
667 if (lhs
== NULL_TREE
)
669 gimplify_and_add (expr
, &stmts
);
670 /* We can end up with folding a memcpy of an empty class assignment
671 which gets optimized away by C++ gimplification. */
672 if (gimple_seq_empty_p (stmts
))
674 pop_gimplify_context (NULL
);
675 if (gimple_in_ssa_p (cfun
))
677 unlink_stmt_vdef (stmt
);
680 gsi_replace (si_p
, gimple_build_nop (), true);
686 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
687 new_stmt
= gimple_build_assign (lhs
, tmp
);
688 i
= gsi_last (stmts
);
689 gsi_insert_after_without_update (&i
, new_stmt
,
690 GSI_CONTINUE_LINKING
);
693 pop_gimplify_context (NULL
);
695 gsi_replace_with_seq_vops (si_p
, stmts
);
699 /* Replace the call at *GSI with the gimple value VAL. */
702 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
704 gimple stmt
= gsi_stmt (*gsi
);
705 tree lhs
= gimple_call_lhs (stmt
);
709 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
710 val
= fold_convert (TREE_TYPE (lhs
), val
);
711 repl
= gimple_build_assign (lhs
, val
);
714 repl
= gimple_build_nop ();
715 tree vdef
= gimple_vdef (stmt
);
716 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
718 unlink_stmt_vdef (stmt
);
719 release_ssa_name (vdef
);
721 gsi_replace (gsi
, repl
, true);
724 /* Replace the call at *GSI with the new call REPL and fold that
728 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
730 gimple stmt
= gsi_stmt (*gsi
);
731 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
732 gimple_set_location (repl
, gimple_location (stmt
));
733 if (gimple_vdef (stmt
)
734 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
736 gimple_set_vdef (repl
, gimple_vdef (stmt
));
737 gimple_set_vuse (repl
, gimple_vuse (stmt
));
738 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
740 gsi_replace (gsi
, repl
, true);
744 /* Return true if VAR is a VAR_DECL or a component thereof. */
747 var_decl_component_p (tree var
)
750 while (handled_component_p (inner
))
751 inner
= TREE_OPERAND (inner
, 0);
752 return SSA_VAR_P (inner
);
755 /* Fold function call to builtin mem{{,p}cpy,move}. Return
756 NULL_TREE if no simplification can be made.
757 If ENDP is 0, return DEST (like memcpy).
758 If ENDP is 1, return DEST+LEN (like mempcpy).
759 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
760 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
764 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
765 tree dest
, tree src
, int endp
)
767 gimple stmt
= gsi_stmt (*gsi
);
768 tree lhs
= gimple_call_lhs (stmt
);
769 tree len
= gimple_call_arg (stmt
, 2);
770 tree destvar
, srcvar
;
771 location_t loc
= gimple_location (stmt
);
773 /* If the LEN parameter is zero, return DEST. */
774 if (integer_zerop (len
))
777 if (gimple_call_lhs (stmt
))
778 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
780 repl
= gimple_build_nop ();
781 tree vdef
= gimple_vdef (stmt
);
782 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
784 unlink_stmt_vdef (stmt
);
785 release_ssa_name (vdef
);
787 gsi_replace (gsi
, repl
, true);
791 /* If SRC and DEST are the same (and not volatile), return
792 DEST{,+LEN,+LEN-1}. */
793 if (operand_equal_p (src
, dest
, 0))
795 unlink_stmt_vdef (stmt
);
796 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
797 release_ssa_name (gimple_vdef (stmt
));
800 gsi_replace (gsi
, gimple_build_nop (), true);
807 tree srctype
, desttype
;
808 unsigned int src_align
, dest_align
;
811 /* Build accesses at offset zero with a ref-all character type. */
812 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
815 /* If we can perform the copy efficiently with first doing all loads
816 and then all stores inline it that way. Currently efficiently
817 means that we can load all the memory into a single integer
818 register which is what MOVE_MAX gives us. */
819 src_align
= get_pointer_alignment (src
);
820 dest_align
= get_pointer_alignment (dest
);
821 if (tree_fits_uhwi_p (len
)
822 && compare_tree_int (len
, MOVE_MAX
) <= 0
823 /* ??? Don't transform copies from strings with known length this
824 confuses the tree-ssa-strlen.c. This doesn't handle
825 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
827 && !c_strlen (src
, 2))
829 unsigned ilen
= tree_to_uhwi (len
);
830 if (exact_log2 (ilen
) != -1)
832 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
834 && TYPE_MODE (type
) != BLKmode
835 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
837 /* If the destination pointer is not aligned we must be able
838 to emit an unaligned store. */
839 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
840 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
843 tree desttype
= type
;
844 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
845 srctype
= build_aligned_type (type
, src_align
);
846 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
847 tree tem
= fold_const_aggregate_ref (srcmem
);
850 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
851 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
857 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
859 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
860 if (gimple_in_ssa_p (cfun
))
861 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
864 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
),
866 gimple_assign_set_lhs (new_stmt
, srcmem
);
867 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
868 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
870 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
871 desttype
= build_aligned_type (type
, dest_align
);
873 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
876 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
877 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
878 if (gimple_vdef (new_stmt
)
879 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
880 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
883 gsi_replace (gsi
, new_stmt
, true);
886 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
895 /* Both DEST and SRC must be pointer types.
896 ??? This is what old code did. Is the testing for pointer types
899 If either SRC is readonly or length is 1, we can use memcpy. */
900 if (!dest_align
|| !src_align
)
902 if (readonly_data_expr (src
)
903 || (tree_fits_uhwi_p (len
)
904 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
905 >= tree_to_uhwi (len
))))
907 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
910 gimple_call_set_fndecl (stmt
, fn
);
911 gimple_call_set_arg (stmt
, 0, dest
);
912 gimple_call_set_arg (stmt
, 1, src
);
917 /* If *src and *dest can't overlap, optimize into memcpy as well. */
918 if (TREE_CODE (src
) == ADDR_EXPR
919 && TREE_CODE (dest
) == ADDR_EXPR
)
921 tree src_base
, dest_base
, fn
;
922 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
923 HOST_WIDE_INT size
= -1;
924 HOST_WIDE_INT maxsize
= -1;
926 srcvar
= TREE_OPERAND (src
, 0);
927 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
929 destvar
= TREE_OPERAND (dest
, 0);
930 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
932 if (tree_fits_uhwi_p (len
))
933 maxsize
= tree_to_uhwi (len
);
936 src_offset
/= BITS_PER_UNIT
;
937 dest_offset
/= BITS_PER_UNIT
;
938 if (SSA_VAR_P (src_base
)
939 && SSA_VAR_P (dest_base
))
941 if (operand_equal_p (src_base
, dest_base
, 0)
942 && ranges_overlap_p (src_offset
, maxsize
,
943 dest_offset
, maxsize
))
946 else if (TREE_CODE (src_base
) == MEM_REF
947 && TREE_CODE (dest_base
) == MEM_REF
)
949 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
950 TREE_OPERAND (dest_base
, 0), 0))
952 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
953 if (!wi::fits_shwi_p (off
))
955 src_offset
= off
.to_shwi ();
957 off
= mem_ref_offset (dest_base
) + dest_offset
;
958 if (!wi::fits_shwi_p (off
))
960 dest_offset
= off
.to_shwi ();
961 if (ranges_overlap_p (src_offset
, maxsize
,
962 dest_offset
, maxsize
))
968 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
971 gimple_call_set_fndecl (stmt
, fn
);
972 gimple_call_set_arg (stmt
, 0, dest
);
973 gimple_call_set_arg (stmt
, 1, src
);
978 /* If the destination and source do not alias optimize into
980 if ((is_gimple_min_invariant (dest
)
981 || TREE_CODE (dest
) == SSA_NAME
)
982 && (is_gimple_min_invariant (src
)
983 || TREE_CODE (src
) == SSA_NAME
))
986 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
987 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
988 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
991 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
994 gimple_call_set_fndecl (stmt
, fn
);
995 gimple_call_set_arg (stmt
, 0, dest
);
996 gimple_call_set_arg (stmt
, 1, src
);
1005 if (!tree_fits_shwi_p (len
))
1008 This logic lose for arguments like (type *)malloc (sizeof (type)),
1009 since we strip the casts of up to VOID return value from malloc.
1010 Perhaps we ought to inherit type from non-VOID argument here? */
1013 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1014 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1016 /* In the following try to find a type that is most natural to be
1017 used for the memcpy source and destination and that allows
1018 the most optimization when memcpy is turned into a plain assignment
1019 using that type. In theory we could always use a char[len] type
1020 but that only gains us that the destination and source possibly
1021 no longer will have their address taken. */
1022 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1023 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1025 tree tem
= TREE_OPERAND (src
, 0);
1027 if (tem
!= TREE_OPERAND (src
, 0))
1028 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1030 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1032 tree tem
= TREE_OPERAND (dest
, 0);
1034 if (tem
!= TREE_OPERAND (dest
, 0))
1035 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1037 srctype
= TREE_TYPE (TREE_TYPE (src
));
1038 if (TREE_CODE (srctype
) == ARRAY_TYPE
1039 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1041 srctype
= TREE_TYPE (srctype
);
1043 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1045 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1046 if (TREE_CODE (desttype
) == ARRAY_TYPE
1047 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1049 desttype
= TREE_TYPE (desttype
);
1051 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1053 if (TREE_ADDRESSABLE (srctype
)
1054 || TREE_ADDRESSABLE (desttype
))
1057 /* Make sure we are not copying using a floating-point mode or
1058 a type whose size possibly does not match its precision. */
1059 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1060 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1061 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1062 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1063 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1064 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1065 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1066 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1074 src_align
= get_pointer_alignment (src
);
1075 dest_align
= get_pointer_alignment (dest
);
1076 if (dest_align
< TYPE_ALIGN (desttype
)
1077 || src_align
< TYPE_ALIGN (srctype
))
1081 STRIP_NOPS (destvar
);
1082 if (TREE_CODE (destvar
) == ADDR_EXPR
1083 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1084 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1085 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1087 destvar
= NULL_TREE
;
1090 STRIP_NOPS (srcvar
);
1091 if (TREE_CODE (srcvar
) == ADDR_EXPR
1092 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1093 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1096 || src_align
>= TYPE_ALIGN (desttype
))
1097 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1099 else if (!STRICT_ALIGNMENT
)
1101 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1103 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1111 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1114 if (srcvar
== NULL_TREE
)
1117 if (src_align
>= TYPE_ALIGN (desttype
))
1118 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1121 if (STRICT_ALIGNMENT
)
1123 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1125 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1128 else if (destvar
== NULL_TREE
)
1131 if (dest_align
>= TYPE_ALIGN (srctype
))
1132 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1135 if (STRICT_ALIGNMENT
)
1137 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1139 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1144 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1146 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1147 if (gimple_in_ssa_p (cfun
))
1148 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1150 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
), NULL
);
1151 gimple_assign_set_lhs (new_stmt
, srcvar
);
1152 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1153 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1155 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1156 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1157 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1158 if (gimple_vdef (new_stmt
)
1159 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1160 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1163 gsi_replace (gsi
, new_stmt
, true);
1166 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1170 if (endp
== 0 || endp
== 3)
1173 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1175 if (endp
== 2 || endp
== 1)
1176 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1178 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1180 gimple repl
= gimple_build_assign (lhs
, dest
);
1181 gsi_replace (gsi
, repl
, true);
1185 /* Fold function call to builtin memset or bzero at *GSI setting the
1186 memory of size LEN to VAL. Return whether a simplification was made. */
1189 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1191 gimple stmt
= gsi_stmt (*gsi
);
1193 unsigned HOST_WIDE_INT length
, cval
;
1195 /* If the LEN parameter is zero, return DEST. */
1196 if (integer_zerop (len
))
1198 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1202 if (! tree_fits_uhwi_p (len
))
1205 if (TREE_CODE (c
) != INTEGER_CST
)
1208 tree dest
= gimple_call_arg (stmt
, 0);
1210 if (TREE_CODE (var
) != ADDR_EXPR
)
1213 var
= TREE_OPERAND (var
, 0);
1214 if (TREE_THIS_VOLATILE (var
))
1217 etype
= TREE_TYPE (var
);
1218 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1219 etype
= TREE_TYPE (etype
);
1221 if (!INTEGRAL_TYPE_P (etype
)
1222 && !POINTER_TYPE_P (etype
))
1225 if (! var_decl_component_p (var
))
1228 length
= tree_to_uhwi (len
);
1229 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1230 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1233 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1236 if (integer_zerop (c
))
1240 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1243 cval
= TREE_INT_CST_LOW (c
);
1247 cval
|= (cval
<< 31) << 1;
1250 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1251 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1252 gimple_set_vuse (store
, gimple_vuse (stmt
));
1253 tree vdef
= gimple_vdef (stmt
);
1254 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1256 gimple_set_vdef (store
, gimple_vdef (stmt
));
1257 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1259 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1260 if (gimple_call_lhs (stmt
))
1262 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1263 gsi_replace (gsi
, asgn
, true);
1267 gimple_stmt_iterator gsi2
= *gsi
;
1269 gsi_remove (&gsi2
, true);
1276 /* Return the string length, maximum string length or maximum value of
1278 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1279 is not NULL and, for TYPE == 0, its value is not equal to the length
1280 we determine or if we are unable to determine the length or value,
1281 return false. VISITED is a bitmap of visited variables.
1282 TYPE is 0 if string length should be returned, 1 for maximum string
1283 length and 2 for maximum value ARG can have. */
1286 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1291 if (TREE_CODE (arg
) != SSA_NAME
)
1293 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1294 if (TREE_CODE (arg
) == ADDR_EXPR
1295 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1296 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1298 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1299 if (TREE_CODE (aop0
) == INDIRECT_REF
1300 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1301 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1302 length
, visited
, type
);
1308 if (TREE_CODE (val
) != INTEGER_CST
1309 || tree_int_cst_sgn (val
) < 0)
1313 val
= c_strlen (arg
, 1);
1321 if (TREE_CODE (*length
) != INTEGER_CST
1322 || TREE_CODE (val
) != INTEGER_CST
)
1325 if (tree_int_cst_lt (*length
, val
))
1329 else if (simple_cst_equal (val
, *length
) != 1)
1337 /* If ARG is registered for SSA update we cannot look at its defining
1339 if (name_registered_for_update_p (arg
))
1342 /* If we were already here, break the infinite cycle. */
1344 *visited
= BITMAP_ALLOC (NULL
);
1345 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1349 def_stmt
= SSA_NAME_DEF_STMT (var
);
1351 switch (gimple_code (def_stmt
))
1354 /* The RHS of the statement defining VAR must either have a
1355 constant length or come from another SSA_NAME with a constant
1357 if (gimple_assign_single_p (def_stmt
)
1358 || gimple_assign_unary_nop_p (def_stmt
))
1360 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1361 return get_maxval_strlen (rhs
, length
, visited
, type
);
1363 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1365 tree op2
= gimple_assign_rhs2 (def_stmt
);
1366 tree op3
= gimple_assign_rhs3 (def_stmt
);
1367 return get_maxval_strlen (op2
, length
, visited
, type
)
1368 && get_maxval_strlen (op3
, length
, visited
, type
);
1374 /* All the arguments of the PHI node must have the same constant
1378 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1380 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1382 /* If this PHI has itself as an argument, we cannot
1383 determine the string length of this argument. However,
1384 if we can find a constant string length for the other
1385 PHI args then we can still be sure that this is a
1386 constant string length. So be optimistic and just
1387 continue with the next argument. */
1388 if (arg
== gimple_phi_result (def_stmt
))
1391 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1403 get_maxval_strlen (tree arg
, int type
)
1405 bitmap visited
= NULL
;
1406 tree len
= NULL_TREE
;
1407 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1410 BITMAP_FREE (visited
);
1416 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1417 If LEN is not NULL, it represents the length of the string to be
1418 copied. Return NULL_TREE if no simplification can be made. */
1421 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1422 tree dest
, tree src
)
1424 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1427 /* If SRC and DEST are the same (and not volatile), return DEST. */
1428 if (operand_equal_p (src
, dest
, 0))
1430 replace_call_with_value (gsi
, dest
);
1434 if (optimize_function_for_size_p (cfun
))
1437 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1441 tree len
= get_maxval_strlen (src
, 0);
1445 len
= fold_convert_loc (loc
, size_type_node
, len
);
1446 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1447 len
= force_gimple_operand_gsi (gsi
, len
, true,
1448 NULL_TREE
, true, GSI_SAME_STMT
);
1449 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1450 replace_call_with_call_and_fold (gsi
, repl
);
1454 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1455 If SLEN is not NULL, it represents the length of the source string.
1456 Return NULL_TREE if no simplification can be made. */
1459 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1460 tree dest
, tree src
, tree len
)
1462 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1465 /* If the LEN parameter is zero, return DEST. */
1466 if (integer_zerop (len
))
1468 replace_call_with_value (gsi
, dest
);
1472 /* We can't compare slen with len as constants below if len is not a
1474 if (TREE_CODE (len
) != INTEGER_CST
)
1477 /* Now, we must be passed a constant src ptr parameter. */
1478 tree slen
= get_maxval_strlen (src
, 0);
1479 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1482 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1484 /* We do not support simplification of this case, though we do
1485 support it when expanding trees into RTL. */
1486 /* FIXME: generate a call to __builtin_memset. */
1487 if (tree_int_cst_lt (slen
, len
))
1490 /* OK transform into builtin memcpy. */
1491 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1495 len
= fold_convert_loc (loc
, size_type_node
, len
);
1496 len
= force_gimple_operand_gsi (gsi
, len
, true,
1497 NULL_TREE
, true, GSI_SAME_STMT
);
1498 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1499 replace_call_with_call_and_fold (gsi
, repl
);
1503 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1506 Return NULL_TREE if no simplification was possible, otherwise return the
1507 simplified form of the call as a tree.
1509 The simplified form may be a constant or other expression which
1510 computes the same value, but in a more efficient manner (including
1511 calls to other builtin functions).
1513 The call may contain arguments which need to be evaluated, but
1514 which are not useful to determine the result of the call. In
1515 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1516 COMPOUND_EXPR will be an argument which must be evaluated.
1517 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1518 COMPOUND_EXPR in the chain will contain the tree for the simplified
1519 form of the builtin function call. */
1522 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1524 gimple stmt
= gsi_stmt (*gsi
);
1525 location_t loc
= gimple_location (stmt
);
1527 const char *p
= c_getstr (src
);
1529 /* If the string length is zero, return the dst parameter. */
1530 if (p
&& *p
== '\0')
1532 replace_call_with_value (gsi
, dst
);
1536 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1539 /* See if we can store by pieces into (dst + strlen(dst)). */
1541 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1542 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1544 if (!strlen_fn
|| !memcpy_fn
)
1547 /* If the length of the source string isn't computable don't
1548 split strcat into strlen and memcpy. */
1549 tree len
= get_maxval_strlen (src
, 0);
1553 /* Create strlen (dst). */
1554 gimple_seq stmts
= NULL
, stmts2
;
1555 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1556 gimple_set_location (repl
, loc
);
1557 if (gimple_in_ssa_p (cfun
))
1558 newdst
= make_ssa_name (size_type_node
, NULL
);
1560 newdst
= create_tmp_reg (size_type_node
, NULL
);
1561 gimple_call_set_lhs (repl
, newdst
);
1562 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1564 /* Create (dst p+ strlen (dst)). */
1565 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1566 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1567 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1569 len
= fold_convert_loc (loc
, size_type_node
, len
);
1570 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1571 build_int_cst (size_type_node
, 1));
1572 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1573 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1575 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1576 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1577 if (gimple_call_lhs (stmt
))
1579 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1580 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1581 gsi_replace_with_seq_vops (gsi
, stmts
);
1582 /* gsi now points at the assignment to the lhs, get a
1583 stmt iterator to the memcpy call.
1584 ??? We can't use gsi_for_stmt as that doesn't work when the
1585 CFG isn't built yet. */
1586 gimple_stmt_iterator gsi2
= *gsi
;
1592 gsi_replace_with_seq_vops (gsi
, stmts
);
1598 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1599 are the arguments to the call. */
1602 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1604 gimple stmt
= gsi_stmt (*gsi
);
1605 tree dest
= gimple_call_arg (stmt
, 0);
1606 tree src
= gimple_call_arg (stmt
, 1);
1607 tree size
= gimple_call_arg (stmt
, 2);
1613 /* If the SRC parameter is "", return DEST. */
1614 if (p
&& *p
== '\0')
1616 replace_call_with_value (gsi
, dest
);
1620 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1623 /* If __builtin_strcat_chk is used, assume strcat is available. */
1624 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1628 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1629 replace_call_with_call_and_fold (gsi
, repl
);
1633 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1637 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1639 gimple stmt
= gsi_stmt (*gsi
);
1640 tree dest
= gimple_call_arg (stmt
, 0);
1641 tree src
= gimple_call_arg (stmt
, 1);
1642 tree len
= gimple_call_arg (stmt
, 2);
1643 tree size
= gimple_call_arg (stmt
, 3);
1648 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1649 if ((p
&& *p
== '\0')
1650 || integer_zerop (len
))
1652 replace_call_with_value (gsi
, dest
);
1656 if (! tree_fits_uhwi_p (size
))
1659 if (! integer_all_onesp (size
))
1661 tree src_len
= c_strlen (src
, 1);
1663 && tree_fits_uhwi_p (src_len
)
1664 && tree_fits_uhwi_p (len
)
1665 && ! tree_int_cst_lt (len
, src_len
))
1667 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1668 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1672 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1673 replace_call_with_call_and_fold (gsi
, repl
);
1679 /* If __builtin_strncat_chk is used, assume strncat is available. */
1680 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1684 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1685 replace_call_with_call_and_fold (gsi
, repl
);
1689 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1690 to the call. IGNORE is true if the value returned
1691 by the builtin will be ignored. UNLOCKED is true is true if this
1692 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1693 the known length of the string. Return NULL_TREE if no simplification
1697 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1698 tree arg0
, tree arg1
,
1701 gimple stmt
= gsi_stmt (*gsi
);
1703 /* If we're using an unlocked function, assume the other unlocked
1704 functions exist explicitly. */
1705 tree
const fn_fputc
= (unlocked
1706 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1707 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1708 tree
const fn_fwrite
= (unlocked
1709 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1710 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1712 /* If the return value is used, don't do the transformation. */
1713 if (gimple_call_lhs (stmt
))
1716 /* Get the length of the string passed to fputs. If the length
1717 can't be determined, punt. */
1718 tree len
= get_maxval_strlen (arg0
, 0);
1720 || TREE_CODE (len
) != INTEGER_CST
)
1723 switch (compare_tree_int (len
, 1))
1725 case -1: /* length is 0, delete the call entirely . */
1726 replace_call_with_value (gsi
, integer_zero_node
);
1729 case 0: /* length is 1, call fputc. */
1731 const char *p
= c_getstr (arg0
);
1737 gimple repl
= gimple_build_call (fn_fputc
, 2,
1739 (integer_type_node
, p
[0]), arg1
);
1740 replace_call_with_call_and_fold (gsi
, repl
);
1745 case 1: /* length is greater than 1, call fwrite. */
1747 /* If optimizing for size keep fputs. */
1748 if (optimize_function_for_size_p (cfun
))
1750 /* New argument list transforming fputs(string, stream) to
1751 fwrite(string, 1, len, stream). */
1755 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1756 size_one_node
, len
, arg1
);
1757 replace_call_with_call_and_fold (gsi
, repl
);
1766 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1767 DEST, SRC, LEN, and SIZE are the arguments to the call.
1768 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1769 code of the builtin. If MAXLEN is not NULL, it is maximum length
1770 passed as third argument. */
1773 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1774 tree dest
, tree src
, tree len
, tree size
,
1775 enum built_in_function fcode
)
1777 gimple stmt
= gsi_stmt (*gsi
);
1778 location_t loc
= gimple_location (stmt
);
1779 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1782 /* If SRC and DEST are the same (and not volatile), return DEST
1783 (resp. DEST+LEN for __mempcpy_chk). */
1784 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1786 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1788 replace_call_with_value (gsi
, dest
);
1793 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1794 temp
= force_gimple_operand_gsi (gsi
, temp
,
1795 false, NULL_TREE
, true,
1797 replace_call_with_value (gsi
, temp
);
1802 if (! tree_fits_uhwi_p (size
))
1805 tree maxlen
= get_maxval_strlen (len
, 2);
1806 if (! integer_all_onesp (size
))
1808 if (! tree_fits_uhwi_p (len
))
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1815 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1817 /* (void) __mempcpy_chk () can be optimized into
1818 (void) __memcpy_chk (). */
1819 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1823 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1824 replace_call_with_call_and_fold (gsi
, repl
);
1833 if (tree_int_cst_lt (size
, maxlen
))
1838 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1839 mem{cpy,pcpy,move,set} is available. */
1842 case BUILT_IN_MEMCPY_CHK
:
1843 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1845 case BUILT_IN_MEMPCPY_CHK
:
1846 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1848 case BUILT_IN_MEMMOVE_CHK
:
1849 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1851 case BUILT_IN_MEMSET_CHK
:
1852 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1861 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1862 replace_call_with_call_and_fold (gsi
, repl
);
1866 /* Fold a call to the __st[rp]cpy_chk builtin.
1867 DEST, SRC, and SIZE are the arguments to the call.
1868 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1869 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1870 strings passed as second argument. */
1873 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1875 tree src
, tree size
,
1876 enum built_in_function fcode
)
1878 gimple stmt
= gsi_stmt (*gsi
);
1879 location_t loc
= gimple_location (stmt
);
1880 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1883 /* If SRC and DEST are the same (and not volatile), return DEST. */
1884 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1886 replace_call_with_value (gsi
, dest
);
1890 if (! tree_fits_uhwi_p (size
))
1893 tree maxlen
= get_maxval_strlen (src
, 1);
1894 if (! integer_all_onesp (size
))
1896 len
= c_strlen (src
, 1);
1897 if (! len
|| ! tree_fits_uhwi_p (len
))
1899 /* If LEN is not constant, try MAXLEN too.
1900 For MAXLEN only allow optimizing into non-_ocs function
1901 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1902 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1904 if (fcode
== BUILT_IN_STPCPY_CHK
)
1909 /* If return value of __stpcpy_chk is ignored,
1910 optimize into __strcpy_chk. */
1911 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1915 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1916 replace_call_with_call_and_fold (gsi
, repl
);
1920 if (! len
|| TREE_SIDE_EFFECTS (len
))
1923 /* If c_strlen returned something, but not a constant,
1924 transform __strcpy_chk into __memcpy_chk. */
1925 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1929 len
= fold_convert_loc (loc
, size_type_node
, len
);
1930 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1931 build_int_cst (size_type_node
, 1));
1932 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1933 true, GSI_SAME_STMT
);
1934 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1935 replace_call_with_call_and_fold (gsi
, repl
);
1942 if (! tree_int_cst_lt (maxlen
, size
))
1946 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1947 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1948 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1952 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1953 replace_call_with_call_and_fold (gsi
, repl
);
1957 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1958 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1959 length passed as third argument. IGNORE is true if return value can be
1960 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1963 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1964 tree dest
, tree src
,
1965 tree len
, tree size
,
1966 enum built_in_function fcode
)
1968 gimple stmt
= gsi_stmt (*gsi
);
1969 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1972 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1974 /* If return value of __stpncpy_chk is ignored,
1975 optimize into __strncpy_chk. */
1976 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1979 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1980 replace_call_with_call_and_fold (gsi
, repl
);
1985 if (! tree_fits_uhwi_p (size
))
1988 tree maxlen
= get_maxval_strlen (len
, 2);
1989 if (! integer_all_onesp (size
))
1991 if (! tree_fits_uhwi_p (len
))
1993 /* If LEN is not constant, try MAXLEN too.
1994 For MAXLEN only allow optimizing into non-_ocs function
1995 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1996 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2002 if (tree_int_cst_lt (size
, maxlen
))
2006 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2007 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2008 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2012 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2013 replace_call_with_call_and_fold (gsi
, repl
);
2017 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2018 NULL_TREE if a normal call should be emitted rather than expanding
2019 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2020 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2021 passed as second argument. */
2024 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2025 enum built_in_function fcode
)
2027 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2028 tree dest
, size
, len
, fn
, fmt
, flag
;
2029 const char *fmt_str
;
2031 /* Verify the required arguments in the original call. */
2032 if (gimple_call_num_args (stmt
) < 5)
2035 dest
= gimple_call_arg (stmt
, 0);
2036 len
= gimple_call_arg (stmt
, 1);
2037 flag
= gimple_call_arg (stmt
, 2);
2038 size
= gimple_call_arg (stmt
, 3);
2039 fmt
= gimple_call_arg (stmt
, 4);
2041 if (! tree_fits_uhwi_p (size
))
2044 if (! integer_all_onesp (size
))
2046 tree maxlen
= get_maxval_strlen (len
, 2);
2047 if (! tree_fits_uhwi_p (len
))
2049 /* If LEN is not constant, try MAXLEN too.
2050 For MAXLEN only allow optimizing into non-_ocs function
2051 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2052 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2058 if (tree_int_cst_lt (size
, maxlen
))
2062 if (!init_target_chars ())
2065 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2066 or if format doesn't contain % chars or is "%s". */
2067 if (! integer_zerop (flag
))
2069 fmt_str
= c_getstr (fmt
);
2070 if (fmt_str
== NULL
)
2072 if (strchr (fmt_str
, target_percent
) != NULL
2073 && strcmp (fmt_str
, target_percent_s
))
2077 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2079 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2080 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2084 /* Replace the called function and the first 5 argument by 3 retaining
2085 trailing varargs. */
2086 gimple_call_set_fndecl (stmt
, fn
);
2087 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2088 gimple_call_set_arg (stmt
, 0, dest
);
2089 gimple_call_set_arg (stmt
, 1, len
);
2090 gimple_call_set_arg (stmt
, 2, fmt
);
2091 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2092 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2093 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2098 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2099 Return NULL_TREE if a normal call should be emitted rather than
2100 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2101 or BUILT_IN_VSPRINTF_CHK. */
2104 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2105 enum built_in_function fcode
)
2107 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2108 tree dest
, size
, len
, fn
, fmt
, flag
;
2109 const char *fmt_str
;
2110 unsigned nargs
= gimple_call_num_args (stmt
);
2112 /* Verify the required arguments in the original call. */
2115 dest
= gimple_call_arg (stmt
, 0);
2116 flag
= gimple_call_arg (stmt
, 1);
2117 size
= gimple_call_arg (stmt
, 2);
2118 fmt
= gimple_call_arg (stmt
, 3);
2120 if (! tree_fits_uhwi_p (size
))
2125 if (!init_target_chars ())
2128 /* Check whether the format is a literal string constant. */
2129 fmt_str
= c_getstr (fmt
);
2130 if (fmt_str
!= NULL
)
2132 /* If the format doesn't contain % args or %%, we know the size. */
2133 if (strchr (fmt_str
, target_percent
) == 0)
2135 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2136 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2138 /* If the format is "%s" and first ... argument is a string literal,
2139 we know the size too. */
2140 else if (fcode
== BUILT_IN_SPRINTF_CHK
2141 && strcmp (fmt_str
, target_percent_s
) == 0)
2147 arg
= gimple_call_arg (stmt
, 4);
2148 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2150 len
= c_strlen (arg
, 1);
2151 if (! len
|| ! tree_fits_uhwi_p (len
))
2158 if (! integer_all_onesp (size
))
2160 if (! len
|| ! tree_int_cst_lt (len
, size
))
2164 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2165 or if format doesn't contain % chars or is "%s". */
2166 if (! integer_zerop (flag
))
2168 if (fmt_str
== NULL
)
2170 if (strchr (fmt_str
, target_percent
) != NULL
2171 && strcmp (fmt_str
, target_percent_s
))
2175 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2176 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2177 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2181 /* Replace the called function and the first 4 argument by 2 retaining
2182 trailing varargs. */
2183 gimple_call_set_fndecl (stmt
, fn
);
2184 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2185 gimple_call_set_arg (stmt
, 0, dest
);
2186 gimple_call_set_arg (stmt
, 1, fmt
);
2187 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2188 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2189 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2194 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2195 ORIG may be null if this is a 2-argument call. We don't attempt to
2196 simplify calls with more than 3 arguments.
2198 Return NULL_TREE if no simplification was possible, otherwise return the
2199 simplified form of the call as a tree. If IGNORED is true, it means that
2200 the caller does not use the returned value of the function. */
2203 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2205 gimple stmt
= gsi_stmt (*gsi
);
2206 tree dest
= gimple_call_arg (stmt
, 0);
2207 tree fmt
= gimple_call_arg (stmt
, 1);
2208 tree orig
= NULL_TREE
;
2209 const char *fmt_str
= NULL
;
2211 /* Verify the required arguments in the original call. We deal with two
2212 types of sprintf() calls: 'sprintf (str, fmt)' and
2213 'sprintf (dest, "%s", orig)'. */
2214 if (gimple_call_num_args (stmt
) > 3)
2217 if (gimple_call_num_args (stmt
) == 3)
2218 orig
= gimple_call_arg (stmt
, 2);
2220 /* Check whether the format is a literal string constant. */
2221 fmt_str
= c_getstr (fmt
);
2222 if (fmt_str
== NULL
)
2225 if (!init_target_chars ())
2228 /* If the format doesn't contain % args or %%, use strcpy. */
2229 if (strchr (fmt_str
, target_percent
) == NULL
)
2231 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2236 /* Don't optimize sprintf (buf, "abc", ptr++). */
2240 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2241 'format' is known to contain no % formats. */
2242 gimple_seq stmts
= NULL
;
2243 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2244 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2245 if (gimple_call_lhs (stmt
))
2247 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2248 build_int_cst (integer_type_node
,
2250 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2251 gsi_replace_with_seq_vops (gsi
, stmts
);
2252 /* gsi now points at the assignment to the lhs, get a
2253 stmt iterator to the memcpy call.
2254 ??? We can't use gsi_for_stmt as that doesn't work when the
2255 CFG isn't built yet. */
2256 gimple_stmt_iterator gsi2
= *gsi
;
2262 gsi_replace_with_seq_vops (gsi
, stmts
);
2268 /* If the format is "%s", use strcpy if the result isn't used. */
2269 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2272 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2277 /* Don't crash on sprintf (str1, "%s"). */
2281 tree orig_len
= NULL_TREE
;
2282 if (gimple_call_lhs (stmt
))
2284 orig_len
= get_maxval_strlen (orig
, 0);
2289 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2290 gimple_seq stmts
= NULL
;
2291 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2292 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2293 if (gimple_call_lhs (stmt
))
2295 if (!useless_type_conversion_p (integer_type_node
,
2296 TREE_TYPE (orig_len
)))
2297 orig_len
= fold_convert (integer_type_node
, orig_len
);
2298 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2299 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2300 gsi_replace_with_seq_vops (gsi
, stmts
);
2301 /* gsi now points at the assignment to the lhs, get a
2302 stmt iterator to the memcpy call.
2303 ??? We can't use gsi_for_stmt as that doesn't work when the
2304 CFG isn't built yet. */
2305 gimple_stmt_iterator gsi2
= *gsi
;
2311 gsi_replace_with_seq_vops (gsi
, stmts
);
2319 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2320 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2321 attempt to simplify calls with more than 4 arguments.
2323 Return NULL_TREE if no simplification was possible, otherwise return the
2324 simplified form of the call as a tree. If IGNORED is true, it means that
2325 the caller does not use the returned value of the function. */
2328 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2330 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2331 tree dest
= gimple_call_arg (stmt
, 0);
2332 tree destsize
= gimple_call_arg (stmt
, 1);
2333 tree fmt
= gimple_call_arg (stmt
, 2);
2334 tree orig
= NULL_TREE
;
2335 const char *fmt_str
= NULL
;
2337 if (gimple_call_num_args (stmt
) > 4)
2340 if (gimple_call_num_args (stmt
) == 4)
2341 orig
= gimple_call_arg (stmt
, 3);
2343 if (!tree_fits_uhwi_p (destsize
))
2345 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2347 /* Check whether the format is a literal string constant. */
2348 fmt_str
= c_getstr (fmt
);
2349 if (fmt_str
== NULL
)
2352 if (!init_target_chars ())
2355 /* If the format doesn't contain % args or %%, use strcpy. */
2356 if (strchr (fmt_str
, target_percent
) == NULL
)
2358 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2362 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2366 /* We could expand this as
2367 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2369 memcpy (str, fmt_with_nul_at_cstm1, cst);
2370 but in the former case that might increase code size
2371 and in the latter case grow .rodata section too much.
2373 size_t len
= strlen (fmt_str
);
2377 gimple_seq stmts
= NULL
;
2378 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2379 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2380 if (gimple_call_lhs (stmt
))
2382 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2383 build_int_cst (integer_type_node
, len
));
2384 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2385 gsi_replace_with_seq_vops (gsi
, stmts
);
2386 /* gsi now points at the assignment to the lhs, get a
2387 stmt iterator to the memcpy call.
2388 ??? We can't use gsi_for_stmt as that doesn't work when the
2389 CFG isn't built yet. */
2390 gimple_stmt_iterator gsi2
= *gsi
;
2396 gsi_replace_with_seq_vops (gsi
, stmts
);
2402 /* If the format is "%s", use strcpy if the result isn't used. */
2403 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2405 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2409 /* Don't crash on snprintf (str1, cst, "%s"). */
2413 tree orig_len
= get_maxval_strlen (orig
, 0);
2417 /* We could expand this as
2418 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2420 memcpy (str1, str2_with_nul_at_cstm1, cst);
2421 but in the former case that might increase code size
2422 and in the latter case grow .rodata section too much.
2424 if (compare_tree_int (orig_len
, destlen
) >= 0)
2427 /* Convert snprintf (str1, cst, "%s", str2) into
2428 strcpy (str1, str2) if strlen (str2) < cst. */
2429 gimple_seq stmts
= NULL
;
2430 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2431 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2432 if (gimple_call_lhs (stmt
))
2434 if (!useless_type_conversion_p (integer_type_node
,
2435 TREE_TYPE (orig_len
)))
2436 orig_len
= fold_convert (integer_type_node
, orig_len
);
2437 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2438 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2439 gsi_replace_with_seq_vops (gsi
, stmts
);
2440 /* gsi now points at the assignment to the lhs, get a
2441 stmt iterator to the memcpy call.
2442 ??? We can't use gsi_for_stmt as that doesn't work when the
2443 CFG isn't built yet. */
2444 gimple_stmt_iterator gsi2
= *gsi
;
2450 gsi_replace_with_seq_vops (gsi
, stmts
);
2459 /* Fold a call to __builtin_strlen with known length LEN. */
2462 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2464 gimple stmt
= gsi_stmt (*gsi
);
2465 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2468 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2469 replace_call_with_value (gsi
, len
);
2474 /* Fold the non-target builtin at *GSI and return whether any simplification
2478 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2480 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2481 tree callee
= gimple_call_fndecl (stmt
);
2483 /* Give up for always_inline inline builtins until they are
2485 if (avoid_folding_inline_builtin (callee
))
2488 switch (DECL_FUNCTION_CODE (callee
))
2490 case BUILT_IN_BZERO
:
2491 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2492 gimple_call_arg (stmt
, 1));
2493 case BUILT_IN_MEMSET
:
2494 return gimple_fold_builtin_memset (gsi
,
2495 gimple_call_arg (stmt
, 1),
2496 gimple_call_arg (stmt
, 2));
2497 case BUILT_IN_BCOPY
:
2498 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2499 gimple_call_arg (stmt
, 0), 3);
2500 case BUILT_IN_MEMCPY
:
2501 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2502 gimple_call_arg (stmt
, 1), 0);
2503 case BUILT_IN_MEMPCPY
:
2504 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2505 gimple_call_arg (stmt
, 1), 1);
2506 case BUILT_IN_MEMMOVE
:
2507 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2508 gimple_call_arg (stmt
, 1), 3);
2509 case BUILT_IN_SPRINTF_CHK
:
2510 case BUILT_IN_VSPRINTF_CHK
:
2511 return gimple_fold_builtin_sprintf_chk (gsi
, DECL_FUNCTION_CODE (callee
));
2512 case BUILT_IN_STRCAT_CHK
:
2513 return gimple_fold_builtin_strcat_chk (gsi
);
2514 case BUILT_IN_STRNCAT_CHK
:
2515 return gimple_fold_builtin_strncat_chk (gsi
);
2516 case BUILT_IN_STRLEN
:
2517 return gimple_fold_builtin_strlen (gsi
);
2518 case BUILT_IN_STRCPY
:
2519 return gimple_fold_builtin_strcpy (gsi
,
2520 gimple_call_arg (stmt
, 0),
2521 gimple_call_arg (stmt
, 1));
2522 case BUILT_IN_STRNCPY
:
2523 return gimple_fold_builtin_strncpy (gsi
,
2524 gimple_call_arg (stmt
, 0),
2525 gimple_call_arg (stmt
, 1),
2526 gimple_call_arg (stmt
, 2));
2527 case BUILT_IN_STRCAT
:
2528 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2529 gimple_call_arg (stmt
, 1));
2530 case BUILT_IN_FPUTS
:
2531 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2532 gimple_call_arg (stmt
, 1), false);
2533 case BUILT_IN_FPUTS_UNLOCKED
:
2534 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2535 gimple_call_arg (stmt
, 1), true);
2536 case BUILT_IN_MEMCPY_CHK
:
2537 case BUILT_IN_MEMPCPY_CHK
:
2538 case BUILT_IN_MEMMOVE_CHK
:
2539 case BUILT_IN_MEMSET_CHK
:
2540 return gimple_fold_builtin_memory_chk (gsi
,
2541 gimple_call_arg (stmt
, 0),
2542 gimple_call_arg (stmt
, 1),
2543 gimple_call_arg (stmt
, 2),
2544 gimple_call_arg (stmt
, 3),
2545 DECL_FUNCTION_CODE (callee
));
2546 case BUILT_IN_STRCPY_CHK
:
2547 case BUILT_IN_STPCPY_CHK
:
2548 return gimple_fold_builtin_stxcpy_chk (gsi
,
2549 gimple_call_arg (stmt
, 0),
2550 gimple_call_arg (stmt
, 1),
2551 gimple_call_arg (stmt
, 2),
2552 DECL_FUNCTION_CODE (callee
));
2553 case BUILT_IN_STRNCPY_CHK
:
2554 case BUILT_IN_STPNCPY_CHK
:
2555 return gimple_fold_builtin_stxncpy_chk (gsi
,
2556 gimple_call_arg (stmt
, 0),
2557 gimple_call_arg (stmt
, 1),
2558 gimple_call_arg (stmt
, 2),
2559 gimple_call_arg (stmt
, 3),
2560 DECL_FUNCTION_CODE (callee
));
2561 case BUILT_IN_SNPRINTF_CHK
:
2562 case BUILT_IN_VSNPRINTF_CHK
:
2563 return gimple_fold_builtin_snprintf_chk (gsi
,
2564 DECL_FUNCTION_CODE (callee
));
2565 case BUILT_IN_SNPRINTF
:
2566 return gimple_fold_builtin_snprintf (gsi
);
2567 case BUILT_IN_SPRINTF
:
2568 return gimple_fold_builtin_sprintf (gsi
);
2572 /* Try the generic builtin folder. */
2573 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2574 tree result
= fold_call_stmt (stmt
, ignore
);
2578 STRIP_NOPS (result
);
2580 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2581 if (!update_call_from_tree (gsi
, result
))
2582 gimplify_and_update_call_from_tree (gsi
, result
);
2589 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2590 doesn't fit into TYPE. The test for overflow should be regardless of
2591 -fwrapv, and even for unsigned types. */
2594 arith_overflowed_p (enum tree_code code
, const_tree type
,
2595 const_tree arg0
, const_tree arg1
)
2597 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
2598 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
2600 widest2_int warg0
= widest2_int_cst (arg0
);
2601 widest2_int warg1
= widest2_int_cst (arg1
);
2605 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
2606 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
2607 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
2608 default: gcc_unreachable ();
2610 signop sign
= TYPE_SIGN (type
);
2611 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
2613 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
2616 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2617 The statement may be replaced by another statement, e.g., if the call
2618 simplifies to a constant value. Return true if any changes were made.
2619 It is assumed that the operands have been previously folded. */
2622 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2624 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2626 bool changed
= false;
2629 /* Fold *& in call arguments. */
2630 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2631 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2633 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2636 gimple_call_set_arg (stmt
, i
, tmp
);
2641 /* Check for virtual calls that became direct calls. */
2642 callee
= gimple_call_fn (stmt
);
2643 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2645 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2647 if (dump_file
&& virtual_method_call_p (callee
)
2648 && !possible_polymorphic_call_target_p
2649 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2650 (OBJ_TYPE_REF_EXPR (callee
)))))
2653 "Type inheritance inconsistent devirtualization of ");
2654 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2655 fprintf (dump_file
, " to ");
2656 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2657 fprintf (dump_file
, "\n");
2660 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2663 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2666 vec
<cgraph_node
*>targets
2667 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2668 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2670 tree lhs
= gimple_call_lhs (stmt
);
2671 if (dump_enabled_p ())
2673 location_t loc
= gimple_location_safe (stmt
);
2674 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2675 "folding virtual function call to %s\n",
2676 targets
.length () == 1
2677 ? targets
[0]->name ()
2678 : "__builtin_unreachable");
2680 if (targets
.length () == 1)
2682 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
2684 /* If the call becomes noreturn, remove the lhs. */
2685 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
2687 if (TREE_CODE (lhs
) == SSA_NAME
)
2689 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2690 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2691 gimple new_stmt
= gimple_build_assign (lhs
, def
);
2692 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2694 gimple_call_set_lhs (stmt
, NULL_TREE
);
2699 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2700 gimple new_stmt
= gimple_build_call (fndecl
, 0);
2701 gimple_set_location (new_stmt
, gimple_location (stmt
));
2702 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2704 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2705 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2707 /* To satisfy condition for
2708 cgraph_update_edges_for_call_stmt_node,
2709 we need to preserve GIMPLE_CALL statement
2710 at position of GSI iterator. */
2711 update_call_from_tree (gsi
, def
);
2712 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
2716 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
2717 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
2718 gsi_replace (gsi
, new_stmt
, false);
2726 /* Check for indirect calls that became direct calls, and then
2727 no longer require a static chain. */
2728 if (gimple_call_chain (stmt
))
2730 tree fn
= gimple_call_fndecl (stmt
);
2731 if (fn
&& !DECL_STATIC_CHAIN (fn
))
2733 gimple_call_set_chain (stmt
, NULL
);
2738 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
2741 gimple_call_set_chain (stmt
, tmp
);
2750 /* Check for builtins that CCP can handle using information not
2751 available in the generic fold routines. */
2752 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
2754 if (gimple_fold_builtin (gsi
))
2757 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
2759 changed
|= targetm
.gimple_fold_builtin (gsi
);
2761 else if (gimple_call_internal_p (stmt
))
2763 enum tree_code subcode
= ERROR_MARK
;
2764 tree result
= NULL_TREE
;
2765 bool cplx_result
= false;
2766 tree overflow
= NULL_TREE
;
2767 switch (gimple_call_internal_fn (stmt
))
2769 case IFN_BUILTIN_EXPECT
:
2770 result
= fold_builtin_expect (gimple_location (stmt
),
2771 gimple_call_arg (stmt
, 0),
2772 gimple_call_arg (stmt
, 1),
2773 gimple_call_arg (stmt
, 2));
2775 case IFN_UBSAN_OBJECT_SIZE
:
2776 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
2777 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
2778 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
2779 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
2780 gimple_call_arg (stmt
, 2))))
2782 gsi_replace (gsi
, gimple_build_nop (), true);
2783 unlink_stmt_vdef (stmt
);
2784 release_defs (stmt
);
2788 case IFN_UBSAN_CHECK_ADD
:
2789 subcode
= PLUS_EXPR
;
2791 case IFN_UBSAN_CHECK_SUB
:
2792 subcode
= MINUS_EXPR
;
2794 case IFN_UBSAN_CHECK_MUL
:
2795 subcode
= MULT_EXPR
;
2797 case IFN_ADD_OVERFLOW
:
2798 subcode
= PLUS_EXPR
;
2801 case IFN_SUB_OVERFLOW
:
2802 subcode
= MINUS_EXPR
;
2805 case IFN_MUL_OVERFLOW
:
2806 subcode
= MULT_EXPR
;
2812 if (subcode
!= ERROR_MARK
)
2814 tree arg0
= gimple_call_arg (stmt
, 0);
2815 tree arg1
= gimple_call_arg (stmt
, 1);
2816 tree type
= TREE_TYPE (arg0
);
2819 tree lhs
= gimple_call_lhs (stmt
);
2820 if (lhs
== NULL_TREE
)
2823 type
= TREE_TYPE (TREE_TYPE (lhs
));
2825 if (type
== NULL_TREE
)
2827 /* x = y + 0; x = y - 0; x = y * 0; */
2828 else if (integer_zerop (arg1
))
2829 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
2830 /* x = 0 + y; x = 0 * y; */
2831 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
2832 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
2834 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
2835 result
= integer_zero_node
;
2836 /* x = y * 1; x = 1 * y; */
2837 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
2839 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
2841 else if (TREE_CODE (arg0
) == INTEGER_CST
2842 && TREE_CODE (arg1
) == INTEGER_CST
)
2845 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
2846 fold_convert (type
, arg1
));
2848 result
= int_const_binop (subcode
, arg0
, arg1
);
2849 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
2852 overflow
= build_one_cst (type
);
2859 if (result
== integer_zero_node
)
2860 result
= build_zero_cst (type
);
2861 else if (cplx_result
&& TREE_TYPE (result
) != type
)
2863 if (TREE_CODE (result
) == INTEGER_CST
)
2865 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
2867 overflow
= build_one_cst (type
);
2869 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
2870 && TYPE_UNSIGNED (type
))
2871 || (TYPE_PRECISION (type
)
2872 < (TYPE_PRECISION (TREE_TYPE (result
))
2873 + (TYPE_UNSIGNED (TREE_TYPE (result
))
2874 && !TYPE_UNSIGNED (type
)))))
2877 result
= fold_convert (type
, result
);
2884 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
2885 result
= drop_tree_overflow (result
);
2888 if (overflow
== NULL_TREE
)
2889 overflow
= build_zero_cst (TREE_TYPE (result
));
2890 tree ctype
= build_complex_type (TREE_TYPE (result
));
2891 if (TREE_CODE (result
) == INTEGER_CST
2892 && TREE_CODE (overflow
) == INTEGER_CST
)
2893 result
= build_complex (ctype
, result
, overflow
);
2895 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
2896 ctype
, result
, overflow
);
2898 if (!update_call_from_tree (gsi
, result
))
2899 gimplify_and_update_call_from_tree (gsi
, result
);
2908 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2911 Replaces *GSI with the simplification result in RCODE and OPS
2912 and the associated statements in *SEQ. Does the replacement
2913 according to INPLACE and returns true if the operation succeeded. */
2916 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
2917 code_helper rcode
, tree
*ops
,
2918 gimple_seq
*seq
, bool inplace
)
2920 gimple stmt
= gsi_stmt (*gsi
);
2922 /* Play safe and do not allow abnormals to be mentioned in
2923 newly created statements. See also maybe_push_res_to_seq. */
2924 if ((TREE_CODE (ops
[0]) == SSA_NAME
2925 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
2927 && TREE_CODE (ops
[1]) == SSA_NAME
2928 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
2930 && TREE_CODE (ops
[2]) == SSA_NAME
2931 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
2934 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
2936 gcc_assert (rcode
.is_tree_code ());
2937 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
2938 /* GIMPLE_CONDs condition may not throw. */
2939 && (!flag_exceptions
2940 || !cfun
->can_throw_non_call_exceptions
2941 || !operation_could_trap_p (rcode
,
2942 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
2944 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
2945 else if (rcode
== SSA_NAME
)
2946 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
2947 build_zero_cst (TREE_TYPE (ops
[0])));
2948 else if (rcode
== INTEGER_CST
)
2950 if (integer_zerop (ops
[0]))
2951 gimple_cond_make_false (cond_stmt
);
2953 gimple_cond_make_true (cond_stmt
);
2957 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
2961 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
2962 build_zero_cst (TREE_TYPE (res
)));
2966 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2968 fprintf (dump_file
, "gimple_simplified to ");
2969 if (!gimple_seq_empty_p (*seq
))
2970 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2971 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
2974 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
2977 else if (is_gimple_assign (stmt
)
2978 && rcode
.is_tree_code ())
2981 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
2983 maybe_build_generic_op (rcode
,
2984 TREE_TYPE (gimple_assign_lhs (stmt
)),
2985 &ops
[0], ops
[1], ops
[2]);
2986 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
2987 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2989 fprintf (dump_file
, "gimple_simplified to ");
2990 if (!gimple_seq_empty_p (*seq
))
2991 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2992 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
2995 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3001 if (gimple_has_lhs (stmt
))
3003 tree lhs
= gimple_get_lhs (stmt
);
3004 maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3006 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3008 fprintf (dump_file
, "gimple_simplified to ");
3009 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3011 gsi_replace_with_seq_vops (gsi
, *seq
);
3021 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3024 maybe_canonicalize_mem_ref_addr (tree
*t
)
3028 if (TREE_CODE (*t
) == ADDR_EXPR
)
3029 t
= &TREE_OPERAND (*t
, 0);
3031 while (handled_component_p (*t
))
3032 t
= &TREE_OPERAND (*t
, 0);
3034 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3035 of invariant addresses into a SSA name MEM_REF address. */
3036 if (TREE_CODE (*t
) == MEM_REF
3037 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3039 tree addr
= TREE_OPERAND (*t
, 0);
3040 if (TREE_CODE (addr
) == ADDR_EXPR
3041 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3042 || handled_component_p (TREE_OPERAND (addr
, 0))))
3045 HOST_WIDE_INT coffset
;
3046 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3051 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3052 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3053 TREE_OPERAND (*t
, 1),
3054 size_int (coffset
));
3057 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3058 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3061 /* Canonicalize back MEM_REFs to plain reference trees if the object
3062 accessed is a decl that has the same access semantics as the MEM_REF. */
3063 if (TREE_CODE (*t
) == MEM_REF
3064 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3065 && integer_zerop (TREE_OPERAND (*t
, 1)))
3067 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3068 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3069 if (/* Same volatile qualification. */
3070 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3071 /* Same TBAA behavior with -fstrict-aliasing. */
3072 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3073 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3074 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3075 /* Same alignment. */
3076 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3077 /* We have to look out here to not drop a required conversion
3078 from the rhs to the lhs if *t appears on the lhs or vice-versa
3079 if it appears on the rhs. Thus require strict type
3081 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3083 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3088 /* Canonicalize TARGET_MEM_REF in particular with respect to
3089 the indexes becoming constant. */
3090 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3092 tree tem
= maybe_fold_tmr (*t
);
3103 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3104 distinguishes both cases. */
3107 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3109 bool changed
= false;
3110 gimple stmt
= gsi_stmt (*gsi
);
3113 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3115 ??? This shouldn't be done in generic folding but in the
3116 propagation helpers which also know whether an address was
3118 switch (gimple_code (stmt
))
3121 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3123 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3124 if ((REFERENCE_CLASS_P (*rhs
)
3125 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3126 && maybe_canonicalize_mem_ref_addr (rhs
))
3128 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3129 if (REFERENCE_CLASS_P (*lhs
)
3130 && maybe_canonicalize_mem_ref_addr (lhs
))
3136 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3138 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3139 if (REFERENCE_CLASS_P (*arg
)
3140 && maybe_canonicalize_mem_ref_addr (arg
))
3143 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3145 && REFERENCE_CLASS_P (*lhs
)
3146 && maybe_canonicalize_mem_ref_addr (lhs
))
3152 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3153 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3155 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3156 tree op
= TREE_VALUE (link
);
3157 if (REFERENCE_CLASS_P (op
)
3158 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3161 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3163 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3164 tree op
= TREE_VALUE (link
);
3165 if ((REFERENCE_CLASS_P (op
)
3166 || TREE_CODE (op
) == ADDR_EXPR
)
3167 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3173 if (gimple_debug_bind_p (stmt
))
3175 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3177 && (REFERENCE_CLASS_P (*val
)
3178 || TREE_CODE (*val
) == ADDR_EXPR
)
3179 && maybe_canonicalize_mem_ref_addr (val
))
3186 /* Dispatch to pattern-based folding. */
3188 || is_gimple_assign (stmt
)
3189 || gimple_code (stmt
) == GIMPLE_COND
)
3191 gimple_seq seq
= NULL
;
3194 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
, valueize
))
3196 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3199 gimple_seq_discard (seq
);
3203 stmt
= gsi_stmt (*gsi
);
3205 /* Fold the main computation performed by the statement. */
3206 switch (gimple_code (stmt
))
3210 unsigned old_num_ops
= gimple_num_ops (stmt
);
3211 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3212 tree lhs
= gimple_assign_lhs (stmt
);
3214 /* First canonicalize operand order. This avoids building new
3215 trees if this is the only thing fold would later do. */
3216 if ((commutative_tree_code (subcode
)
3217 || commutative_ternary_tree_code (subcode
))
3218 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3219 gimple_assign_rhs2 (stmt
), false))
3221 tree tem
= gimple_assign_rhs1 (stmt
);
3222 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3223 gimple_assign_set_rhs2 (stmt
, tem
);
3226 new_rhs
= fold_gimple_assign (gsi
);
3228 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3229 TREE_TYPE (new_rhs
)))
3230 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3233 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3235 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3242 changed
|= fold_gimple_cond (as_a
<gcond
*> (stmt
));
3246 changed
|= gimple_fold_call (gsi
, inplace
);
3250 /* Fold *& in asm operands. */
3252 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3254 const char **oconstraints
;
3255 const char *constraint
;
3256 bool allows_mem
, allows_reg
;
3258 noutputs
= gimple_asm_noutputs (asm_stmt
);
3259 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3261 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3263 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3264 tree op
= TREE_VALUE (link
);
3266 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3267 if (REFERENCE_CLASS_P (op
)
3268 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3270 TREE_VALUE (link
) = op
;
3274 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3276 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3277 tree op
= TREE_VALUE (link
);
3279 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3280 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3281 oconstraints
, &allows_mem
, &allows_reg
);
3282 if (REFERENCE_CLASS_P (op
)
3283 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3286 TREE_VALUE (link
) = op
;
3294 if (gimple_debug_bind_p (stmt
))
3296 tree val
= gimple_debug_bind_get_value (stmt
);
3298 && REFERENCE_CLASS_P (val
))
3300 tree tem
= maybe_fold_reference (val
, false);
3303 gimple_debug_bind_set_value (stmt
, tem
);
3308 && TREE_CODE (val
) == ADDR_EXPR
)
3310 tree ref
= TREE_OPERAND (val
, 0);
3311 tree tem
= maybe_fold_reference (ref
, false);
3314 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3315 gimple_debug_bind_set_value (stmt
, tem
);
3325 stmt
= gsi_stmt (*gsi
);
3327 /* Fold *& on the lhs. */
3328 if (gimple_has_lhs (stmt
))
3330 tree lhs
= gimple_get_lhs (stmt
);
3331 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3333 tree new_lhs
= maybe_fold_reference (lhs
, true);
3336 gimple_set_lhs (stmt
, new_lhs
);
3345 /* Valueziation callback that ends up not following SSA edges. */
3348 no_follow_ssa_edges (tree
)
3353 /* Valueization callback that ends up following single-use SSA edges only. */
3356 follow_single_use_edges (tree val
)
3358 if (TREE_CODE (val
) == SSA_NAME
3359 && !has_single_use (val
))
3364 /* Fold the statement pointed to by GSI. In some cases, this function may
3365 replace the whole statement with a new one. Returns true iff folding
3367 The statement pointed to by GSI should be in valid gimple form but may
3368 be in unfolded state as resulting from for example constant propagation
3369 which can produce *&x = 0. */
3372 fold_stmt (gimple_stmt_iterator
*gsi
)
3374 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3378 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3380 return fold_stmt_1 (gsi
, false, valueize
);
3383 /* Perform the minimal folding on statement *GSI. Only operations like
3384 *&x created by constant propagation are handled. The statement cannot
3385 be replaced with a new one. Return true if the statement was
3386 changed, false otherwise.
3387 The statement *GSI should be in valid gimple form but may
3388 be in unfolded state as resulting from for example constant propagation
3389 which can produce *&x = 0. */
3392 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3394 gimple stmt
= gsi_stmt (*gsi
);
3395 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3396 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3400 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3401 if EXPR is null or we don't know how.
3402 If non-null, the result always has boolean type. */
3405 canonicalize_bool (tree expr
, bool invert
)
3411 if (integer_nonzerop (expr
))
3412 return boolean_false_node
;
3413 else if (integer_zerop (expr
))
3414 return boolean_true_node
;
3415 else if (TREE_CODE (expr
) == SSA_NAME
)
3416 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3417 build_int_cst (TREE_TYPE (expr
), 0));
3418 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3419 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3421 TREE_OPERAND (expr
, 0),
3422 TREE_OPERAND (expr
, 1));
3428 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3430 if (integer_nonzerop (expr
))
3431 return boolean_true_node
;
3432 else if (integer_zerop (expr
))
3433 return boolean_false_node
;
3434 else if (TREE_CODE (expr
) == SSA_NAME
)
3435 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3436 build_int_cst (TREE_TYPE (expr
), 0));
3437 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3438 return fold_build2 (TREE_CODE (expr
),
3440 TREE_OPERAND (expr
, 0),
3441 TREE_OPERAND (expr
, 1));
3447 /* Check to see if a boolean expression EXPR is logically equivalent to the
3448 comparison (OP1 CODE OP2). Check for various identities involving
3452 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3453 const_tree op1
, const_tree op2
)
3457 /* The obvious case. */
3458 if (TREE_CODE (expr
) == code
3459 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3460 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3463 /* Check for comparing (name, name != 0) and the case where expr
3464 is an SSA_NAME with a definition matching the comparison. */
3465 if (TREE_CODE (expr
) == SSA_NAME
3466 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3468 if (operand_equal_p (expr
, op1
, 0))
3469 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3470 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3471 s
= SSA_NAME_DEF_STMT (expr
);
3472 if (is_gimple_assign (s
)
3473 && gimple_assign_rhs_code (s
) == code
3474 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3475 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3479 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3480 of name is a comparison, recurse. */
3481 if (TREE_CODE (op1
) == SSA_NAME
3482 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3484 s
= SSA_NAME_DEF_STMT (op1
);
3485 if (is_gimple_assign (s
)
3486 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3488 enum tree_code c
= gimple_assign_rhs_code (s
);
3489 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3490 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3491 return same_bool_comparison_p (expr
, c
,
3492 gimple_assign_rhs1 (s
),
3493 gimple_assign_rhs2 (s
));
3494 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3495 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3496 return same_bool_comparison_p (expr
,
3497 invert_tree_comparison (c
, false),
3498 gimple_assign_rhs1 (s
),
3499 gimple_assign_rhs2 (s
));
3505 /* Check to see if two boolean expressions OP1 and OP2 are logically
3509 same_bool_result_p (const_tree op1
, const_tree op2
)
3511 /* Simple cases first. */
3512 if (operand_equal_p (op1
, op2
, 0))
3515 /* Check the cases where at least one of the operands is a comparison.
3516 These are a bit smarter than operand_equal_p in that they apply some
3517 identifies on SSA_NAMEs. */
3518 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3519 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3520 TREE_OPERAND (op2
, 0),
3521 TREE_OPERAND (op2
, 1)))
3523 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3524 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3525 TREE_OPERAND (op1
, 0),
3526 TREE_OPERAND (op1
, 1)))
3533 /* Forward declarations for some mutually recursive functions. */
3536 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3537 enum tree_code code2
, tree op2a
, tree op2b
);
3539 and_var_with_comparison (tree var
, bool invert
,
3540 enum tree_code code2
, tree op2a
, tree op2b
);
3542 and_var_with_comparison_1 (gimple stmt
,
3543 enum tree_code code2
, tree op2a
, tree op2b
);
3545 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3546 enum tree_code code2
, tree op2a
, tree op2b
);
3548 or_var_with_comparison (tree var
, bool invert
,
3549 enum tree_code code2
, tree op2a
, tree op2b
);
3551 or_var_with_comparison_1 (gimple stmt
,
3552 enum tree_code code2
, tree op2a
, tree op2b
);
3554 /* Helper function for and_comparisons_1: try to simplify the AND of the
3555 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3556 If INVERT is true, invert the value of the VAR before doing the AND.
3557 Return NULL_EXPR if we can't simplify this to a single expression. */
3560 and_var_with_comparison (tree var
, bool invert
,
3561 enum tree_code code2
, tree op2a
, tree op2b
)
3564 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3566 /* We can only deal with variables whose definitions are assignments. */
3567 if (!is_gimple_assign (stmt
))
3570 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3571 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3572 Then we only have to consider the simpler non-inverted cases. */
3574 t
= or_var_with_comparison_1 (stmt
,
3575 invert_tree_comparison (code2
, false),
3578 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3579 return canonicalize_bool (t
, invert
);
3582 /* Try to simplify the AND of the ssa variable defined by the assignment
3583 STMT with the comparison specified by (OP2A CODE2 OP2B).
3584 Return NULL_EXPR if we can't simplify this to a single expression. */
3587 and_var_with_comparison_1 (gimple stmt
,
3588 enum tree_code code2
, tree op2a
, tree op2b
)
3590 tree var
= gimple_assign_lhs (stmt
);
3591 tree true_test_var
= NULL_TREE
;
3592 tree false_test_var
= NULL_TREE
;
3593 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3595 /* Check for identities like (var AND (var == 0)) => false. */
3596 if (TREE_CODE (op2a
) == SSA_NAME
3597 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3599 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3600 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3602 true_test_var
= op2a
;
3603 if (var
== true_test_var
)
3606 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3607 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3609 false_test_var
= op2a
;
3610 if (var
== false_test_var
)
3611 return boolean_false_node
;
3615 /* If the definition is a comparison, recurse on it. */
3616 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3618 tree t
= and_comparisons_1 (innercode
,
3619 gimple_assign_rhs1 (stmt
),
3620 gimple_assign_rhs2 (stmt
),
3628 /* If the definition is an AND or OR expression, we may be able to
3629 simplify by reassociating. */
3630 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3631 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3633 tree inner1
= gimple_assign_rhs1 (stmt
);
3634 tree inner2
= gimple_assign_rhs2 (stmt
);
3637 tree partial
= NULL_TREE
;
3638 bool is_and
= (innercode
== BIT_AND_EXPR
);
3640 /* Check for boolean identities that don't require recursive examination
3642 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3643 inner1 AND (inner1 OR inner2) => inner1
3644 !inner1 AND (inner1 AND inner2) => false
3645 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3646 Likewise for similar cases involving inner2. */
3647 if (inner1
== true_test_var
)
3648 return (is_and
? var
: inner1
);
3649 else if (inner2
== true_test_var
)
3650 return (is_and
? var
: inner2
);
3651 else if (inner1
== false_test_var
)
3653 ? boolean_false_node
3654 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3655 else if (inner2
== false_test_var
)
3657 ? boolean_false_node
3658 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3660 /* Next, redistribute/reassociate the AND across the inner tests.
3661 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3662 if (TREE_CODE (inner1
) == SSA_NAME
3663 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3664 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3665 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3666 gimple_assign_rhs1 (s
),
3667 gimple_assign_rhs2 (s
),
3668 code2
, op2a
, op2b
)))
3670 /* Handle the AND case, where we are reassociating:
3671 (inner1 AND inner2) AND (op2a code2 op2b)
3673 If the partial result t is a constant, we win. Otherwise
3674 continue on to try reassociating with the other inner test. */
3677 if (integer_onep (t
))
3679 else if (integer_zerop (t
))
3680 return boolean_false_node
;
3683 /* Handle the OR case, where we are redistributing:
3684 (inner1 OR inner2) AND (op2a code2 op2b)
3685 => (t OR (inner2 AND (op2a code2 op2b))) */
3686 else if (integer_onep (t
))
3687 return boolean_true_node
;
3689 /* Save partial result for later. */
3693 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3694 if (TREE_CODE (inner2
) == SSA_NAME
3695 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3696 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3697 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3698 gimple_assign_rhs1 (s
),
3699 gimple_assign_rhs2 (s
),
3700 code2
, op2a
, op2b
)))
3702 /* Handle the AND case, where we are reassociating:
3703 (inner1 AND inner2) AND (op2a code2 op2b)
3704 => (inner1 AND t) */
3707 if (integer_onep (t
))
3709 else if (integer_zerop (t
))
3710 return boolean_false_node
;
3711 /* If both are the same, we can apply the identity
3713 else if (partial
&& same_bool_result_p (t
, partial
))
3717 /* Handle the OR case. where we are redistributing:
3718 (inner1 OR inner2) AND (op2a code2 op2b)
3719 => (t OR (inner1 AND (op2a code2 op2b)))
3720 => (t OR partial) */
3723 if (integer_onep (t
))
3724 return boolean_true_node
;
3727 /* We already got a simplification for the other
3728 operand to the redistributed OR expression. The
3729 interesting case is when at least one is false.
3730 Or, if both are the same, we can apply the identity
3732 if (integer_zerop (partial
))
3734 else if (integer_zerop (t
))
3736 else if (same_bool_result_p (t
, partial
))
3745 /* Try to simplify the AND of two comparisons defined by
3746 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3747 If this can be done without constructing an intermediate value,
3748 return the resulting tree; otherwise NULL_TREE is returned.
3749 This function is deliberately asymmetric as it recurses on SSA_DEFs
3750 in the first comparison but not the second. */
3753 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3754 enum tree_code code2
, tree op2a
, tree op2b
)
3756 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3758 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3759 if (operand_equal_p (op1a
, op2a
, 0)
3760 && operand_equal_p (op1b
, op2b
, 0))
3762 /* Result will be either NULL_TREE, or a combined comparison. */
3763 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3764 TRUTH_ANDIF_EXPR
, code1
, code2
,
3765 truth_type
, op1a
, op1b
);
3770 /* Likewise the swapped case of the above. */
3771 if (operand_equal_p (op1a
, op2b
, 0)
3772 && operand_equal_p (op1b
, op2a
, 0))
3774 /* Result will be either NULL_TREE, or a combined comparison. */
3775 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3776 TRUTH_ANDIF_EXPR
, code1
,
3777 swap_tree_comparison (code2
),
3778 truth_type
, op1a
, op1b
);
3783 /* If both comparisons are of the same value against constants, we might
3784 be able to merge them. */
3785 if (operand_equal_p (op1a
, op2a
, 0)
3786 && TREE_CODE (op1b
) == INTEGER_CST
3787 && TREE_CODE (op2b
) == INTEGER_CST
)
3789 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3791 /* If we have (op1a == op1b), we should either be able to
3792 return that or FALSE, depending on whether the constant op1b
3793 also satisfies the other comparison against op2b. */
3794 if (code1
== EQ_EXPR
)
3800 case EQ_EXPR
: val
= (cmp
== 0); break;
3801 case NE_EXPR
: val
= (cmp
!= 0); break;
3802 case LT_EXPR
: val
= (cmp
< 0); break;
3803 case GT_EXPR
: val
= (cmp
> 0); break;
3804 case LE_EXPR
: val
= (cmp
<= 0); break;
3805 case GE_EXPR
: val
= (cmp
>= 0); break;
3806 default: done
= false;
3811 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3813 return boolean_false_node
;
3816 /* Likewise if the second comparison is an == comparison. */
3817 else if (code2
== EQ_EXPR
)
3823 case EQ_EXPR
: val
= (cmp
== 0); break;
3824 case NE_EXPR
: val
= (cmp
!= 0); break;
3825 case LT_EXPR
: val
= (cmp
> 0); break;
3826 case GT_EXPR
: val
= (cmp
< 0); break;
3827 case LE_EXPR
: val
= (cmp
>= 0); break;
3828 case GE_EXPR
: val
= (cmp
<= 0); break;
3829 default: done
= false;
3834 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3836 return boolean_false_node
;
3840 /* Same business with inequality tests. */
3841 else if (code1
== NE_EXPR
)
3846 case EQ_EXPR
: val
= (cmp
!= 0); break;
3847 case NE_EXPR
: val
= (cmp
== 0); break;
3848 case LT_EXPR
: val
= (cmp
>= 0); break;
3849 case GT_EXPR
: val
= (cmp
<= 0); break;
3850 case LE_EXPR
: val
= (cmp
> 0); break;
3851 case GE_EXPR
: val
= (cmp
< 0); break;
3856 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3858 else if (code2
== NE_EXPR
)
3863 case EQ_EXPR
: val
= (cmp
== 0); break;
3864 case NE_EXPR
: val
= (cmp
!= 0); break;
3865 case LT_EXPR
: val
= (cmp
<= 0); break;
3866 case GT_EXPR
: val
= (cmp
>= 0); break;
3867 case LE_EXPR
: val
= (cmp
< 0); break;
3868 case GE_EXPR
: val
= (cmp
> 0); break;
3873 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3876 /* Chose the more restrictive of two < or <= comparisons. */
3877 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3878 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3880 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3881 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3883 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3886 /* Likewise chose the more restrictive of two > or >= comparisons. */
3887 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3888 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3890 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
3891 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3893 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3896 /* Check for singleton ranges. */
3898 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
3899 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
3900 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
3902 /* Check for disjoint ranges. */
3904 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
3905 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3906 return boolean_false_node
;
3908 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
3909 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3910 return boolean_false_node
;
3913 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3914 NAME's definition is a truth value. See if there are any simplifications
3915 that can be done against the NAME's definition. */
3916 if (TREE_CODE (op1a
) == SSA_NAME
3917 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
3918 && (integer_zerop (op1b
) || integer_onep (op1b
)))
3920 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
3921 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
3922 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
3923 switch (gimple_code (stmt
))
3926 /* Try to simplify by copy-propagating the definition. */
3927 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
3930 /* If every argument to the PHI produces the same result when
3931 ANDed with the second comparison, we win.
3932 Do not do this unless the type is bool since we need a bool
3933 result here anyway. */
3934 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
3936 tree result
= NULL_TREE
;
3938 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
3940 tree arg
= gimple_phi_arg_def (stmt
, i
);
3942 /* If this PHI has itself as an argument, ignore it.
3943 If all the other args produce the same result,
3945 if (arg
== gimple_phi_result (stmt
))
3947 else if (TREE_CODE (arg
) == INTEGER_CST
)
3949 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
3952 result
= boolean_false_node
;
3953 else if (!integer_zerop (result
))
3957 result
= fold_build2 (code2
, boolean_type_node
,
3959 else if (!same_bool_comparison_p (result
,
3963 else if (TREE_CODE (arg
) == SSA_NAME
3964 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
3967 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
3968 /* In simple cases we can look through PHI nodes,
3969 but we have to be careful with loops.
3971 if (! dom_info_available_p (CDI_DOMINATORS
)
3972 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
3973 || dominated_by_p (CDI_DOMINATORS
,
3974 gimple_bb (def_stmt
),
3977 temp
= and_var_with_comparison (arg
, invert
, code2
,
3983 else if (!same_bool_result_p (result
, temp
))
3999 /* Try to simplify the AND of two comparisons, specified by
4000 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4001 If this can be simplified to a single expression (without requiring
4002 introducing more SSA variables to hold intermediate values),
4003 return the resulting tree. Otherwise return NULL_TREE.
4004 If the result expression is non-null, it has boolean type. */
4007 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4008 enum tree_code code2
, tree op2a
, tree op2b
)
4010 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4014 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4017 /* Helper function for or_comparisons_1: try to simplify the OR of the
4018 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4019 If INVERT is true, invert the value of VAR before doing the OR.
4020 Return NULL_EXPR if we can't simplify this to a single expression. */
4023 or_var_with_comparison (tree var
, bool invert
,
4024 enum tree_code code2
, tree op2a
, tree op2b
)
4027 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4029 /* We can only deal with variables whose definitions are assignments. */
4030 if (!is_gimple_assign (stmt
))
4033 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4034 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4035 Then we only have to consider the simpler non-inverted cases. */
4037 t
= and_var_with_comparison_1 (stmt
,
4038 invert_tree_comparison (code2
, false),
4041 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4042 return canonicalize_bool (t
, invert
);
4045 /* Try to simplify the OR of the ssa variable defined by the assignment
4046 STMT with the comparison specified by (OP2A CODE2 OP2B).
4047 Return NULL_EXPR if we can't simplify this to a single expression. */
4050 or_var_with_comparison_1 (gimple stmt
,
4051 enum tree_code code2
, tree op2a
, tree op2b
)
4053 tree var
= gimple_assign_lhs (stmt
);
4054 tree true_test_var
= NULL_TREE
;
4055 tree false_test_var
= NULL_TREE
;
4056 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4058 /* Check for identities like (var OR (var != 0)) => true . */
4059 if (TREE_CODE (op2a
) == SSA_NAME
4060 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4062 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4063 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4065 true_test_var
= op2a
;
4066 if (var
== true_test_var
)
4069 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4070 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4072 false_test_var
= op2a
;
4073 if (var
== false_test_var
)
4074 return boolean_true_node
;
4078 /* If the definition is a comparison, recurse on it. */
4079 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4081 tree t
= or_comparisons_1 (innercode
,
4082 gimple_assign_rhs1 (stmt
),
4083 gimple_assign_rhs2 (stmt
),
4091 /* If the definition is an AND or OR expression, we may be able to
4092 simplify by reassociating. */
4093 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4094 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4096 tree inner1
= gimple_assign_rhs1 (stmt
);
4097 tree inner2
= gimple_assign_rhs2 (stmt
);
4100 tree partial
= NULL_TREE
;
4101 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4103 /* Check for boolean identities that don't require recursive examination
4105 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4106 inner1 OR (inner1 AND inner2) => inner1
4107 !inner1 OR (inner1 OR inner2) => true
4108 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4110 if (inner1
== true_test_var
)
4111 return (is_or
? var
: inner1
);
4112 else if (inner2
== true_test_var
)
4113 return (is_or
? var
: inner2
);
4114 else if (inner1
== false_test_var
)
4117 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4118 else if (inner2
== false_test_var
)
4121 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4123 /* Next, redistribute/reassociate the OR across the inner tests.
4124 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4125 if (TREE_CODE (inner1
) == SSA_NAME
4126 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4127 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4128 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4129 gimple_assign_rhs1 (s
),
4130 gimple_assign_rhs2 (s
),
4131 code2
, op2a
, op2b
)))
4133 /* Handle the OR case, where we are reassociating:
4134 (inner1 OR inner2) OR (op2a code2 op2b)
4136 If the partial result t is a constant, we win. Otherwise
4137 continue on to try reassociating with the other inner test. */
4140 if (integer_onep (t
))
4141 return boolean_true_node
;
4142 else if (integer_zerop (t
))
4146 /* Handle the AND case, where we are redistributing:
4147 (inner1 AND inner2) OR (op2a code2 op2b)
4148 => (t AND (inner2 OR (op2a code op2b))) */
4149 else if (integer_zerop (t
))
4150 return boolean_false_node
;
4152 /* Save partial result for later. */
4156 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4157 if (TREE_CODE (inner2
) == SSA_NAME
4158 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4159 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4160 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4161 gimple_assign_rhs1 (s
),
4162 gimple_assign_rhs2 (s
),
4163 code2
, op2a
, op2b
)))
4165 /* Handle the OR case, where we are reassociating:
4166 (inner1 OR inner2) OR (op2a code2 op2b)
4168 => (t OR partial) */
4171 if (integer_zerop (t
))
4173 else if (integer_onep (t
))
4174 return boolean_true_node
;
4175 /* If both are the same, we can apply the identity
4177 else if (partial
&& same_bool_result_p (t
, partial
))
4181 /* Handle the AND case, where we are redistributing:
4182 (inner1 AND inner2) OR (op2a code2 op2b)
4183 => (t AND (inner1 OR (op2a code2 op2b)))
4184 => (t AND partial) */
4187 if (integer_zerop (t
))
4188 return boolean_false_node
;
4191 /* We already got a simplification for the other
4192 operand to the redistributed AND expression. The
4193 interesting case is when at least one is true.
4194 Or, if both are the same, we can apply the identity
4196 if (integer_onep (partial
))
4198 else if (integer_onep (t
))
4200 else if (same_bool_result_p (t
, partial
))
4209 /* Try to simplify the OR of two comparisons defined by
4210 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4211 If this can be done without constructing an intermediate value,
4212 return the resulting tree; otherwise NULL_TREE is returned.
4213 This function is deliberately asymmetric as it recurses on SSA_DEFs
4214 in the first comparison but not the second. */
4217 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4218 enum tree_code code2
, tree op2a
, tree op2b
)
4220 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4222 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4223 if (operand_equal_p (op1a
, op2a
, 0)
4224 && operand_equal_p (op1b
, op2b
, 0))
4226 /* Result will be either NULL_TREE, or a combined comparison. */
4227 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4228 TRUTH_ORIF_EXPR
, code1
, code2
,
4229 truth_type
, op1a
, op1b
);
4234 /* Likewise the swapped case of the above. */
4235 if (operand_equal_p (op1a
, op2b
, 0)
4236 && operand_equal_p (op1b
, op2a
, 0))
4238 /* Result will be either NULL_TREE, or a combined comparison. */
4239 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4240 TRUTH_ORIF_EXPR
, code1
,
4241 swap_tree_comparison (code2
),
4242 truth_type
, op1a
, op1b
);
4247 /* If both comparisons are of the same value against constants, we might
4248 be able to merge them. */
4249 if (operand_equal_p (op1a
, op2a
, 0)
4250 && TREE_CODE (op1b
) == INTEGER_CST
4251 && TREE_CODE (op2b
) == INTEGER_CST
)
4253 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4255 /* If we have (op1a != op1b), we should either be able to
4256 return that or TRUE, depending on whether the constant op1b
4257 also satisfies the other comparison against op2b. */
4258 if (code1
== NE_EXPR
)
4264 case EQ_EXPR
: val
= (cmp
== 0); break;
4265 case NE_EXPR
: val
= (cmp
!= 0); break;
4266 case LT_EXPR
: val
= (cmp
< 0); break;
4267 case GT_EXPR
: val
= (cmp
> 0); break;
4268 case LE_EXPR
: val
= (cmp
<= 0); break;
4269 case GE_EXPR
: val
= (cmp
>= 0); break;
4270 default: done
= false;
4275 return boolean_true_node
;
4277 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4280 /* Likewise if the second comparison is a != comparison. */
4281 else if (code2
== NE_EXPR
)
4287 case EQ_EXPR
: val
= (cmp
== 0); break;
4288 case NE_EXPR
: val
= (cmp
!= 0); break;
4289 case LT_EXPR
: val
= (cmp
> 0); break;
4290 case GT_EXPR
: val
= (cmp
< 0); break;
4291 case LE_EXPR
: val
= (cmp
>= 0); break;
4292 case GE_EXPR
: val
= (cmp
<= 0); break;
4293 default: done
= false;
4298 return boolean_true_node
;
4300 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4304 /* See if an equality test is redundant with the other comparison. */
4305 else if (code1
== EQ_EXPR
)
4310 case EQ_EXPR
: val
= (cmp
== 0); break;
4311 case NE_EXPR
: val
= (cmp
!= 0); break;
4312 case LT_EXPR
: val
= (cmp
< 0); break;
4313 case GT_EXPR
: val
= (cmp
> 0); break;
4314 case LE_EXPR
: val
= (cmp
<= 0); break;
4315 case GE_EXPR
: val
= (cmp
>= 0); break;
4320 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4322 else if (code2
== EQ_EXPR
)
4327 case EQ_EXPR
: val
= (cmp
== 0); break;
4328 case NE_EXPR
: val
= (cmp
!= 0); break;
4329 case LT_EXPR
: val
= (cmp
> 0); break;
4330 case GT_EXPR
: val
= (cmp
< 0); break;
4331 case LE_EXPR
: val
= (cmp
>= 0); break;
4332 case GE_EXPR
: val
= (cmp
<= 0); break;
4337 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4340 /* Chose the less restrictive of two < or <= comparisons. */
4341 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4342 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4344 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4345 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4347 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4350 /* Likewise chose the less restrictive of two > or >= comparisons. */
4351 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4352 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4354 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4355 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4357 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4360 /* Check for singleton ranges. */
4362 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4363 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4364 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4366 /* Check for less/greater pairs that don't restrict the range at all. */
4368 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4369 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4370 return boolean_true_node
;
4372 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4373 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4374 return boolean_true_node
;
4377 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4378 NAME's definition is a truth value. See if there are any simplifications
4379 that can be done against the NAME's definition. */
4380 if (TREE_CODE (op1a
) == SSA_NAME
4381 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4382 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4384 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4385 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4386 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4387 switch (gimple_code (stmt
))
4390 /* Try to simplify by copy-propagating the definition. */
4391 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4394 /* If every argument to the PHI produces the same result when
4395 ORed with the second comparison, we win.
4396 Do not do this unless the type is bool since we need a bool
4397 result here anyway. */
4398 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4400 tree result
= NULL_TREE
;
4402 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4404 tree arg
= gimple_phi_arg_def (stmt
, i
);
4406 /* If this PHI has itself as an argument, ignore it.
4407 If all the other args produce the same result,
4409 if (arg
== gimple_phi_result (stmt
))
4411 else if (TREE_CODE (arg
) == INTEGER_CST
)
4413 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4416 result
= boolean_true_node
;
4417 else if (!integer_onep (result
))
4421 result
= fold_build2 (code2
, boolean_type_node
,
4423 else if (!same_bool_comparison_p (result
,
4427 else if (TREE_CODE (arg
) == SSA_NAME
4428 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4431 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4432 /* In simple cases we can look through PHI nodes,
4433 but we have to be careful with loops.
4435 if (! dom_info_available_p (CDI_DOMINATORS
)
4436 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4437 || dominated_by_p (CDI_DOMINATORS
,
4438 gimple_bb (def_stmt
),
4441 temp
= or_var_with_comparison (arg
, invert
, code2
,
4447 else if (!same_bool_result_p (result
, temp
))
4463 /* Try to simplify the OR of two comparisons, specified by
4464 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4465 If this can be simplified to a single expression (without requiring
4466 introducing more SSA variables to hold intermediate values),
4467 return the resulting tree. Otherwise return NULL_TREE.
4468 If the result expression is non-null, it has boolean type. */
4471 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4472 enum tree_code code2
, tree op2a
, tree op2b
)
4474 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4478 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4482 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4484 Either NULL_TREE, a simplified but non-constant or a constant
4487 ??? This should go into a gimple-fold-inline.h file to be eventually
4488 privatized with the single valueize function used in the various TUs
4489 to avoid the indirect function call overhead. */
4492 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4493 tree (*gvalueize
) (tree
))
4497 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4498 edges if there are intermediate VARYING defs. For this reason
4499 do not follow SSA edges here even though SCCVN can technically
4500 just deal fine with that. */
4501 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
)
4502 && rcode
.is_tree_code ()
4503 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4504 || ((tree_code
) rcode
) == ADDR_EXPR
)
4505 && is_gimple_val (ops
[0]))
4508 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4510 fprintf (dump_file
, "Match-and-simplified ");
4511 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4512 fprintf (dump_file
, " to ");
4513 print_generic_expr (dump_file
, res
, 0);
4514 fprintf (dump_file
, "\n");
4519 location_t loc
= gimple_location (stmt
);
4520 switch (gimple_code (stmt
))
4524 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4526 switch (get_gimple_rhs_class (subcode
))
4528 case GIMPLE_SINGLE_RHS
:
4530 tree rhs
= gimple_assign_rhs1 (stmt
);
4531 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4533 if (TREE_CODE (rhs
) == SSA_NAME
)
4535 /* If the RHS is an SSA_NAME, return its known constant value,
4537 return (*valueize
) (rhs
);
4539 /* Handle propagating invariant addresses into address
4541 else if (TREE_CODE (rhs
) == ADDR_EXPR
4542 && !is_gimple_min_invariant (rhs
))
4544 HOST_WIDE_INT offset
= 0;
4546 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4550 && (CONSTANT_CLASS_P (base
)
4551 || decl_address_invariant_p (base
)))
4552 return build_invariant_address (TREE_TYPE (rhs
),
4555 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4556 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4557 && (CONSTRUCTOR_NELTS (rhs
)
4558 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4563 vec
= XALLOCAVEC (tree
,
4564 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4565 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4567 val
= (*valueize
) (val
);
4568 if (TREE_CODE (val
) == INTEGER_CST
4569 || TREE_CODE (val
) == REAL_CST
4570 || TREE_CODE (val
) == FIXED_CST
)
4576 return build_vector (TREE_TYPE (rhs
), vec
);
4578 if (subcode
== OBJ_TYPE_REF
)
4580 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4581 /* If callee is constant, we can fold away the wrapper. */
4582 if (is_gimple_min_invariant (val
))
4586 if (kind
== tcc_reference
)
4588 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4589 || TREE_CODE (rhs
) == REALPART_EXPR
4590 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4591 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4593 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4594 return fold_unary_loc (EXPR_LOCATION (rhs
),
4596 TREE_TYPE (rhs
), val
);
4598 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
4599 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4601 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4602 return fold_ternary_loc (EXPR_LOCATION (rhs
),
4604 TREE_TYPE (rhs
), val
,
4605 TREE_OPERAND (rhs
, 1),
4606 TREE_OPERAND (rhs
, 2));
4608 else if (TREE_CODE (rhs
) == MEM_REF
4609 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4611 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4612 if (TREE_CODE (val
) == ADDR_EXPR
4613 && is_gimple_min_invariant (val
))
4615 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
4617 TREE_OPERAND (rhs
, 1));
4622 return fold_const_aggregate_ref_1 (rhs
, valueize
);
4624 else if (kind
== tcc_declaration
)
4625 return get_symbol_constant_value (rhs
);
4629 case GIMPLE_UNARY_RHS
:
4632 case GIMPLE_BINARY_RHS
:
4634 /* Handle binary operators that can appear in GIMPLE form. */
4635 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4636 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4638 /* Translate &x + CST into an invariant form suitable for
4639 further propagation. */
4640 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
4641 && TREE_CODE (op0
) == ADDR_EXPR
4642 && TREE_CODE (op1
) == INTEGER_CST
)
4644 tree off
= fold_convert (ptr_type_node
, op1
);
4645 return build_fold_addr_expr_loc
4647 fold_build2 (MEM_REF
,
4648 TREE_TYPE (TREE_TYPE (op0
)),
4649 unshare_expr (op0
), off
));
4652 return fold_binary_loc (loc
, subcode
,
4653 gimple_expr_type (stmt
), op0
, op1
);
4656 case GIMPLE_TERNARY_RHS
:
4658 /* Handle ternary operators that can appear in GIMPLE form. */
4659 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4660 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4661 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
4663 /* Fold embedded expressions in ternary codes. */
4664 if ((subcode
== COND_EXPR
4665 || subcode
== VEC_COND_EXPR
)
4666 && COMPARISON_CLASS_P (op0
))
4668 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
4669 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
4670 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
4671 TREE_TYPE (op0
), op00
, op01
);
4676 return fold_ternary_loc (loc
, subcode
,
4677 gimple_expr_type (stmt
), op0
, op1
, op2
);
4688 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
4690 if (gimple_call_internal_p (stmt
))
4692 enum tree_code subcode
= ERROR_MARK
;
4693 switch (gimple_call_internal_fn (stmt
))
4695 case IFN_UBSAN_CHECK_ADD
:
4696 subcode
= PLUS_EXPR
;
4698 case IFN_UBSAN_CHECK_SUB
:
4699 subcode
= MINUS_EXPR
;
4701 case IFN_UBSAN_CHECK_MUL
:
4702 subcode
= MULT_EXPR
;
4707 tree arg0
= gimple_call_arg (stmt
, 0);
4708 tree arg1
= gimple_call_arg (stmt
, 1);
4709 tree op0
= (*valueize
) (arg0
);
4710 tree op1
= (*valueize
) (arg1
);
4712 if (TREE_CODE (op0
) != INTEGER_CST
4713 || TREE_CODE (op1
) != INTEGER_CST
)
4718 /* x * 0 = 0 * x = 0 without overflow. */
4719 if (integer_zerop (op0
) || integer_zerop (op1
))
4720 return build_zero_cst (TREE_TYPE (arg0
));
4723 /* y - y = 0 without overflow. */
4724 if (operand_equal_p (op0
, op1
, 0))
4725 return build_zero_cst (TREE_TYPE (arg0
));
4732 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
4734 && TREE_CODE (res
) == INTEGER_CST
4735 && !TREE_OVERFLOW (res
))
4740 fn
= (*valueize
) (gimple_call_fn (stmt
));
4741 if (TREE_CODE (fn
) == ADDR_EXPR
4742 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4743 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
4744 && gimple_builtin_call_types_compatible_p (stmt
,
4745 TREE_OPERAND (fn
, 0)))
4747 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
4750 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4751 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
4752 call
= build_call_array_loc (loc
,
4753 gimple_call_return_type (call_stmt
),
4754 fn
, gimple_call_num_args (stmt
), args
);
4755 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
4758 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4759 STRIP_NOPS (retval
);
4760 retval
= fold_convert (gimple_call_return_type (call_stmt
),
4773 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4774 Returns NULL_TREE if folding to a constant is not possible, otherwise
4775 returns a constant according to is_gimple_min_invariant. */
4778 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
4780 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
4781 if (res
&& is_gimple_min_invariant (res
))
4787 /* The following set of functions are supposed to fold references using
4788 their constant initializers. */
4790 static tree
fold_ctor_reference (tree type
, tree ctor
,
4791 unsigned HOST_WIDE_INT offset
,
4792 unsigned HOST_WIDE_INT size
, tree
);
4794 /* See if we can find constructor defining value of BASE.
4795 When we know the consructor with constant offset (such as
4796 base is array[40] and we do know constructor of array), then
4797 BIT_OFFSET is adjusted accordingly.
4799 As a special case, return error_mark_node when constructor
4800 is not explicitly available, but it is known to be zero
4801 such as 'static const int a;'. */
4803 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
4804 tree (*valueize
)(tree
))
4806 HOST_WIDE_INT bit_offset2
, size
, max_size
;
4807 if (TREE_CODE (base
) == MEM_REF
)
4809 if (!integer_zerop (TREE_OPERAND (base
, 1)))
4811 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
4813 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
4818 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
4819 base
= valueize (TREE_OPERAND (base
, 0));
4820 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
4822 base
= TREE_OPERAND (base
, 0);
4825 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4826 DECL_INITIAL. If BASE is a nested reference into another
4827 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4828 the inner reference. */
4829 switch (TREE_CODE (base
))
4834 tree init
= ctor_for_folding (base
);
4836 /* Our semantic is exact opposite of ctor_for_folding;
4837 NULL means unknown, while error_mark_node is 0. */
4838 if (init
== error_mark_node
)
4841 return error_mark_node
;
4847 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
4848 if (max_size
== -1 || size
!= max_size
)
4850 *bit_offset
+= bit_offset2
;
4851 return get_base_constructor (base
, bit_offset
, valueize
);
4862 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4863 SIZE to the memory at bit OFFSET. */
4866 fold_array_ctor_reference (tree type
, tree ctor
,
4867 unsigned HOST_WIDE_INT offset
,
4868 unsigned HOST_WIDE_INT size
,
4871 unsigned HOST_WIDE_INT cnt
;
4873 offset_int low_bound
;
4874 offset_int elt_size
;
4875 offset_int index
, max_index
;
4876 offset_int access_index
;
4877 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
4878 HOST_WIDE_INT inner_offset
;
4880 /* Compute low bound and elt size. */
4881 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
4882 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
4883 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
4885 /* Static constructors for variably sized objects makes no sense. */
4886 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
4887 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
4888 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
4892 /* Static constructors for variably sized objects makes no sense. */
4893 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
4895 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
4897 /* We can handle only constantly sized accesses that are known to not
4898 be larger than size of array element. */
4899 if (!TYPE_SIZE_UNIT (type
)
4900 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
4901 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
4905 /* Compute the array index we look for. */
4906 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
4908 access_index
+= low_bound
;
4910 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
4911 TYPE_SIGN (index_type
));
4913 /* And offset within the access. */
4914 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
4916 /* See if the array field is large enough to span whole access. We do not
4917 care to fold accesses spanning multiple array indexes. */
4918 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
4921 index
= low_bound
- 1;
4923 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4924 TYPE_SIGN (index_type
));
4926 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
4928 /* Array constructor might explicitely set index, or specify range
4929 or leave index NULL meaning that it is next index after previous
4933 if (TREE_CODE (cfield
) == INTEGER_CST
)
4934 max_index
= index
= wi::to_offset (cfield
);
4937 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
4938 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
4939 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
4946 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4947 TYPE_SIGN (index_type
));
4951 /* Do we have match? */
4952 if (wi::cmpu (access_index
, index
) >= 0
4953 && wi::cmpu (access_index
, max_index
) <= 0)
4954 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
4957 /* When memory is not explicitely mentioned in constructor,
4958 it is 0 (or out of range). */
4959 return build_zero_cst (type
);
4962 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4963 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4966 fold_nonarray_ctor_reference (tree type
, tree ctor
,
4967 unsigned HOST_WIDE_INT offset
,
4968 unsigned HOST_WIDE_INT size
,
4971 unsigned HOST_WIDE_INT cnt
;
4974 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
4977 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
4978 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
4979 tree field_size
= DECL_SIZE (cfield
);
4980 offset_int bitoffset
;
4981 offset_int bitoffset_end
, access_end
;
4983 /* Variable sized objects in static constructors makes no sense,
4984 but field_size can be NULL for flexible array members. */
4985 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
4986 && TREE_CODE (byte_offset
) == INTEGER_CST
4987 && (field_size
!= NULL_TREE
4988 ? TREE_CODE (field_size
) == INTEGER_CST
4989 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
4991 /* Compute bit offset of the field. */
4992 bitoffset
= (wi::to_offset (field_offset
)
4993 + wi::lshift (wi::to_offset (byte_offset
),
4994 LOG2_BITS_PER_UNIT
));
4995 /* Compute bit offset where the field ends. */
4996 if (field_size
!= NULL_TREE
)
4997 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5001 access_end
= offset_int (offset
) + size
;
5003 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5004 [BITOFFSET, BITOFFSET_END)? */
5005 if (wi::cmps (access_end
, bitoffset
) > 0
5006 && (field_size
== NULL_TREE
5007 || wi::lts_p (offset
, bitoffset_end
)))
5009 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5010 /* We do have overlap. Now see if field is large enough to
5011 cover the access. Give up for accesses spanning multiple
5013 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5015 if (wi::lts_p (offset
, bitoffset
))
5017 return fold_ctor_reference (type
, cval
,
5018 inner_offset
.to_uhwi (), size
,
5022 /* When memory is not explicitely mentioned in constructor, it is 0. */
5023 return build_zero_cst (type
);
5026 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5027 to the memory at bit OFFSET. */
5030 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5031 unsigned HOST_WIDE_INT size
, tree from_decl
)
5035 /* We found the field with exact match. */
5036 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5038 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5040 /* We are at the end of walk, see if we can view convert the
5042 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5043 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5044 && !compare_tree_int (TYPE_SIZE (type
), size
)
5045 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5047 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5048 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5053 /* For constants and byte-aligned/sized reads try to go through
5054 native_encode/interpret. */
5055 if (CONSTANT_CLASS_P (ctor
)
5056 && BITS_PER_UNIT
== 8
5057 && offset
% BITS_PER_UNIT
== 0
5058 && size
% BITS_PER_UNIT
== 0
5059 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5061 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5062 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5063 offset
/ BITS_PER_UNIT
) > 0)
5064 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5066 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5069 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5070 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5071 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5074 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5081 /* Return the tree representing the element referenced by T if T is an
5082 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5083 names using VALUEIZE. Return NULL_TREE otherwise. */
5086 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5088 tree ctor
, idx
, base
;
5089 HOST_WIDE_INT offset
, size
, max_size
;
5092 if (TREE_THIS_VOLATILE (t
))
5095 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
5096 return get_symbol_constant_value (t
);
5098 tem
= fold_read_from_constant_string (t
);
5102 switch (TREE_CODE (t
))
5105 case ARRAY_RANGE_REF
:
5106 /* Constant indexes are handled well by get_base_constructor.
5107 Only special case variable offsets.
5108 FIXME: This code can't handle nested references with variable indexes
5109 (they will be handled only by iteration of ccp). Perhaps we can bring
5110 get_ref_base_and_extent here and make it use a valueize callback. */
5111 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5113 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5114 && TREE_CODE (idx
) == INTEGER_CST
)
5116 tree low_bound
, unit_size
;
5118 /* If the resulting bit-offset is constant, track it. */
5119 if ((low_bound
= array_ref_low_bound (t
),
5120 TREE_CODE (low_bound
) == INTEGER_CST
)
5121 && (unit_size
= array_ref_element_size (t
),
5122 tree_fits_uhwi_p (unit_size
)))
5125 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5126 TYPE_PRECISION (TREE_TYPE (idx
)));
5128 if (wi::fits_shwi_p (woffset
))
5130 offset
= woffset
.to_shwi ();
5131 /* TODO: This code seems wrong, multiply then check
5132 to see if it fits. */
5133 offset
*= tree_to_uhwi (unit_size
);
5134 offset
*= BITS_PER_UNIT
;
5136 base
= TREE_OPERAND (t
, 0);
5137 ctor
= get_base_constructor (base
, &offset
, valueize
);
5138 /* Empty constructor. Always fold to 0. */
5139 if (ctor
== error_mark_node
)
5140 return build_zero_cst (TREE_TYPE (t
));
5141 /* Out of bound array access. Value is undefined,
5145 /* We can not determine ctor. */
5148 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5149 tree_to_uhwi (unit_size
)
5159 case TARGET_MEM_REF
:
5161 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5162 ctor
= get_base_constructor (base
, &offset
, valueize
);
5164 /* Empty constructor. Always fold to 0. */
5165 if (ctor
== error_mark_node
)
5166 return build_zero_cst (TREE_TYPE (t
));
5167 /* We do not know precise address. */
5168 if (max_size
== -1 || max_size
!= size
)
5170 /* We can not determine ctor. */
5174 /* Out of bound array access. Value is undefined, but don't fold. */
5178 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5184 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5185 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5186 return fold_build1_loc (EXPR_LOCATION (t
),
5187 TREE_CODE (t
), TREE_TYPE (t
), c
);
5199 fold_const_aggregate_ref (tree t
)
5201 return fold_const_aggregate_ref_1 (t
, NULL
);
5204 /* Lookup virtual method with index TOKEN in a virtual table V
5206 Set CAN_REFER if non-NULL to false if method
5207 is not referable or if the virtual table is ill-formed (such as rewriten
5208 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5211 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5213 unsigned HOST_WIDE_INT offset
,
5216 tree vtable
= v
, init
, fn
;
5217 unsigned HOST_WIDE_INT size
;
5218 unsigned HOST_WIDE_INT elt_size
, access_index
;
5224 /* First of all double check we have virtual table. */
5225 if (TREE_CODE (v
) != VAR_DECL
5226 || !DECL_VIRTUAL_P (v
))
5228 gcc_assert (in_lto_p
);
5229 /* Pass down that we lost track of the target. */
5235 init
= ctor_for_folding (v
);
5237 /* The virtual tables should always be born with constructors
5238 and we always should assume that they are avaialble for
5239 folding. At the moment we do not stream them in all cases,
5240 but it should never happen that ctor seem unreachable. */
5242 if (init
== error_mark_node
)
5244 gcc_assert (in_lto_p
);
5245 /* Pass down that we lost track of the target. */
5250 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5251 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5252 offset
*= BITS_PER_UNIT
;
5253 offset
+= token
* size
;
5255 /* Lookup the value in the constructor that is assumed to be array.
5256 This is equivalent to
5257 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5258 offset, size, NULL);
5259 but in a constant time. We expect that frontend produced a simple
5260 array without indexed initializers. */
5262 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5263 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5264 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5265 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5267 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5268 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5270 /* This code makes an assumption that there are no
5271 indexed fileds produced by C++ FE, so we can directly index the array. */
5272 if (access_index
< CONSTRUCTOR_NELTS (init
))
5274 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5275 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5281 /* For type inconsistent program we may end up looking up virtual method
5282 in virtual table that does not contain TOKEN entries. We may overrun
5283 the virtual table and pick up a constant or RTTI info pointer.
5284 In any case the call is undefined. */
5286 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5287 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5288 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5291 fn
= TREE_OPERAND (fn
, 0);
5293 /* When cgraph node is missing and function is not public, we cannot
5294 devirtualize. This can happen in WHOPR when the actual method
5295 ends up in other partition, because we found devirtualization
5296 possibility too late. */
5297 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5308 /* Make sure we create a cgraph node for functions we'll reference.
5309 They can be non-existent if the reference comes from an entry
5310 of an external vtable for example. */
5311 cgraph_node::get_create (fn
);
5316 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5317 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5318 KNOWN_BINFO carries the binfo describing the true type of
5319 OBJ_TYPE_REF_OBJECT(REF).
5320 Set CAN_REFER if non-NULL to false if method
5321 is not referable or if the virtual table is ill-formed (such as rewriten
5322 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5325 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5328 unsigned HOST_WIDE_INT offset
;
5331 v
= BINFO_VTABLE (known_binfo
);
5332 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5336 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5342 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5345 /* Return true iff VAL is a gimple expression that is known to be
5346 non-negative. Restricted to floating-point inputs. */
5349 gimple_val_nonnegative_real_p (tree val
)
5353 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5355 /* Use existing logic for non-gimple trees. */
5356 if (tree_expr_nonnegative_p (val
))
5359 if (TREE_CODE (val
) != SSA_NAME
)
5362 /* Currently we look only at the immediately defining statement
5363 to make this determination, since recursion on defining
5364 statements of operands can lead to quadratic behavior in the
5365 worst case. This is expected to catch almost all occurrences
5366 in practice. It would be possible to implement limited-depth
5367 recursion if important cases are lost. Alternatively, passes
5368 that need this information (such as the pow/powi lowering code
5369 in the cse_sincos pass) could be revised to provide it through
5370 dataflow propagation. */
5372 def_stmt
= SSA_NAME_DEF_STMT (val
);
5374 if (is_gimple_assign (def_stmt
))
5378 /* See fold-const.c:tree_expr_nonnegative_p for additional
5379 cases that could be handled with recursion. */
5381 switch (gimple_assign_rhs_code (def_stmt
))
5384 /* Always true for floating-point operands. */
5388 /* True if the two operands are identical (since we are
5389 restricted to floating-point inputs). */
5390 op0
= gimple_assign_rhs1 (def_stmt
);
5391 op1
= gimple_assign_rhs2 (def_stmt
);
5394 || operand_equal_p (op0
, op1
, 0))
5401 else if (is_gimple_call (def_stmt
))
5403 tree fndecl
= gimple_call_fndecl (def_stmt
);
5405 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5409 switch (DECL_FUNCTION_CODE (fndecl
))
5411 CASE_FLT_FN (BUILT_IN_ACOS
):
5412 CASE_FLT_FN (BUILT_IN_ACOSH
):
5413 CASE_FLT_FN (BUILT_IN_CABS
):
5414 CASE_FLT_FN (BUILT_IN_COSH
):
5415 CASE_FLT_FN (BUILT_IN_ERFC
):
5416 CASE_FLT_FN (BUILT_IN_EXP
):
5417 CASE_FLT_FN (BUILT_IN_EXP10
):
5418 CASE_FLT_FN (BUILT_IN_EXP2
):
5419 CASE_FLT_FN (BUILT_IN_FABS
):
5420 CASE_FLT_FN (BUILT_IN_FDIM
):
5421 CASE_FLT_FN (BUILT_IN_HYPOT
):
5422 CASE_FLT_FN (BUILT_IN_POW10
):
5425 CASE_FLT_FN (BUILT_IN_SQRT
):
5426 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5427 nonnegative inputs. */
5428 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
5433 CASE_FLT_FN (BUILT_IN_POWI
):
5434 /* True if the second argument is an even integer. */
5435 arg1
= gimple_call_arg (def_stmt
, 1);
5437 if (TREE_CODE (arg1
) == INTEGER_CST
5438 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5443 CASE_FLT_FN (BUILT_IN_POW
):
5444 /* True if the second argument is an even integer-valued
5446 arg1
= gimple_call_arg (def_stmt
, 1);
5448 if (TREE_CODE (arg1
) == REAL_CST
)
5453 c
= TREE_REAL_CST (arg1
);
5454 n
= real_to_integer (&c
);
5458 REAL_VALUE_TYPE cint
;
5459 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5460 if (real_identical (&c
, &cint
))
5476 /* Given a pointer value OP0, return a simplified version of an
5477 indirection through OP0, or NULL_TREE if no simplification is
5478 possible. Note that the resulting type may be different from
5479 the type pointed to in the sense that it is still compatible
5480 from the langhooks point of view. */
5483 gimple_fold_indirect_ref (tree t
)
5485 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5490 subtype
= TREE_TYPE (sub
);
5491 if (!POINTER_TYPE_P (subtype
))
5494 if (TREE_CODE (sub
) == ADDR_EXPR
)
5496 tree op
= TREE_OPERAND (sub
, 0);
5497 tree optype
= TREE_TYPE (op
);
5499 if (useless_type_conversion_p (type
, optype
))
5502 /* *(foo *)&fooarray => fooarray[0] */
5503 if (TREE_CODE (optype
) == ARRAY_TYPE
5504 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5505 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5507 tree type_domain
= TYPE_DOMAIN (optype
);
5508 tree min_val
= size_zero_node
;
5509 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5510 min_val
= TYPE_MIN_VALUE (type_domain
);
5511 if (TREE_CODE (min_val
) == INTEGER_CST
)
5512 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5514 /* *(foo *)&complexfoo => __real__ complexfoo */
5515 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5516 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5517 return fold_build1 (REALPART_EXPR
, type
, op
);
5518 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5519 else if (TREE_CODE (optype
) == VECTOR_TYPE
5520 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5522 tree part_width
= TYPE_SIZE (type
);
5523 tree index
= bitsize_int (0);
5524 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5528 /* *(p + CST) -> ... */
5529 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5530 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5532 tree addr
= TREE_OPERAND (sub
, 0);
5533 tree off
= TREE_OPERAND (sub
, 1);
5537 addrtype
= TREE_TYPE (addr
);
5539 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5540 if (TREE_CODE (addr
) == ADDR_EXPR
5541 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5542 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5543 && tree_fits_uhwi_p (off
))
5545 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5546 tree part_width
= TYPE_SIZE (type
);
5547 unsigned HOST_WIDE_INT part_widthi
5548 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5549 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5550 tree index
= bitsize_int (indexi
);
5551 if (offset
/ part_widthi
5552 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5553 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5557 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5558 if (TREE_CODE (addr
) == ADDR_EXPR
5559 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5560 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5562 tree size
= TYPE_SIZE_UNIT (type
);
5563 if (tree_int_cst_equal (size
, off
))
5564 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5567 /* *(p + CST) -> MEM_REF <p, CST>. */
5568 if (TREE_CODE (addr
) != ADDR_EXPR
5569 || DECL_P (TREE_OPERAND (addr
, 0)))
5570 return fold_build2 (MEM_REF
, type
,
5572 wide_int_to_tree (ptype
, off
));
5575 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5576 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5577 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5578 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5581 tree min_val
= size_zero_node
;
5583 sub
= gimple_fold_indirect_ref (sub
);
5585 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5586 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5587 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5588 min_val
= TYPE_MIN_VALUE (type_domain
);
5589 if (TREE_CODE (min_val
) == INTEGER_CST
)
5590 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5596 /* Return true if CODE is an operation that when operating on signed
5597 integer types involves undefined behavior on overflow and the
5598 operation can be expressed with unsigned arithmetic. */
5601 arith_code_with_undefined_signed_overflow (tree_code code
)
5609 case POINTER_PLUS_EXPR
:
5616 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5617 operation that can be transformed to unsigned arithmetic by converting
5618 its operand, carrying out the operation in the corresponding unsigned
5619 type and converting the result back to the original type.
5621 Returns a sequence of statements that replace STMT and also contain
5622 a modified form of STMT itself. */
5625 rewrite_to_defined_overflow (gimple stmt
)
5627 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5629 fprintf (dump_file
, "rewriting stmt with undefined signed "
5631 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5634 tree lhs
= gimple_assign_lhs (stmt
);
5635 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5636 gimple_seq stmts
= NULL
;
5637 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5639 gimple_seq stmts2
= NULL
;
5640 gimple_set_op (stmt
, i
,
5641 force_gimple_operand (fold_convert (type
,
5642 gimple_op (stmt
, i
)),
5643 &stmts2
, true, NULL_TREE
));
5644 gimple_seq_add_seq (&stmts
, stmts2
);
5646 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5647 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5648 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5649 gimple_seq_add_stmt (&stmts
, stmt
);
5650 gimple cvt
= gimple_build_assign_with_ops (NOP_EXPR
, lhs
,
5651 gimple_assign_lhs (stmt
));
5652 gimple_seq_add_stmt (&stmts
, cvt
);
5658 /* Build the expression CODE OP0 of type TYPE with location LOC,
5659 simplifying it first if possible using VALUEIZE if not NULL.
5660 OP0 is expected to be valueized already. Returns the built
5661 expression value and appends statements possibly defining it
5665 gimple_build (gimple_seq
*seq
, location_t loc
,
5666 enum tree_code code
, tree type
, tree op0
,
5667 tree (*valueize
)(tree
))
5669 tree res
= gimple_simplify (code
, type
, op0
, seq
, valueize
);
5672 if (gimple_in_ssa_p (cfun
))
5673 res
= make_ssa_name (type
, NULL
);
5675 res
= create_tmp_reg (type
, NULL
);
5677 if (code
== REALPART_EXPR
5678 || code
== IMAGPART_EXPR
5679 || code
== VIEW_CONVERT_EXPR
)
5680 stmt
= gimple_build_assign_with_ops (code
, res
,
5681 build1 (code
, type
, op0
));
5683 stmt
= gimple_build_assign_with_ops (code
, res
, op0
);
5684 gimple_set_location (stmt
, loc
);
5685 gimple_seq_add_stmt_without_update (seq
, stmt
);
5690 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5691 simplifying it first if possible using VALUEIZE if not NULL.
5692 OP0 and OP1 are expected to be valueized already. Returns the built
5693 expression value and appends statements possibly defining it
5697 gimple_build (gimple_seq
*seq
, location_t loc
,
5698 enum tree_code code
, tree type
, tree op0
, tree op1
,
5699 tree (*valueize
)(tree
))
5701 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, valueize
);
5704 if (gimple_in_ssa_p (cfun
))
5705 res
= make_ssa_name (type
, NULL
);
5707 res
= create_tmp_reg (type
, NULL
);
5708 gimple stmt
= gimple_build_assign_with_ops (code
, res
, op0
, op1
);
5709 gimple_set_location (stmt
, loc
);
5710 gimple_seq_add_stmt_without_update (seq
, stmt
);
5715 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5716 simplifying it first if possible using VALUEIZE if not NULL.
5717 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5718 expression value and appends statements possibly defining it
5722 gimple_build (gimple_seq
*seq
, location_t loc
,
5723 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
,
5724 tree (*valueize
)(tree
))
5726 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
5730 if (gimple_in_ssa_p (cfun
))
5731 res
= make_ssa_name (type
, NULL
);
5733 res
= create_tmp_reg (type
, NULL
);
5735 if (code
== BIT_FIELD_REF
)
5736 stmt
= gimple_build_assign_with_ops (code
, res
,
5737 build3 (BIT_FIELD_REF
, type
,
5740 stmt
= gimple_build_assign_with_ops (code
, res
, op0
, op1
, op2
);
5741 gimple_set_location (stmt
, loc
);
5742 gimple_seq_add_stmt_without_update (seq
, stmt
);
5747 /* Build the call FN (ARG0) with a result of type TYPE
5748 (or no result if TYPE is void) with location LOC,
5749 simplifying it first if possible using VALUEIZE if not NULL.
5750 ARG0 is expected to be valueized already. Returns the built
5751 expression value (or NULL_TREE if TYPE is void) and appends
5752 statements possibly defining it to SEQ. */
5755 gimple_build (gimple_seq
*seq
, location_t loc
,
5756 enum built_in_function fn
, tree type
, tree arg0
,
5757 tree (*valueize
)(tree
))
5759 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, valueize
);
5762 tree decl
= builtin_decl_implicit (fn
);
5763 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
5764 if (!VOID_TYPE_P (type
))
5766 if (gimple_in_ssa_p (cfun
))
5767 res
= make_ssa_name (type
, NULL
);
5769 res
= create_tmp_reg (type
, NULL
);
5770 gimple_call_set_lhs (stmt
, res
);
5772 gimple_set_location (stmt
, loc
);
5773 gimple_seq_add_stmt_without_update (seq
, stmt
);
5778 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5779 (or no result if TYPE is void) with location LOC,
5780 simplifying it first if possible using VALUEIZE if not NULL.
5781 ARG0 is expected to be valueized already. Returns the built
5782 expression value (or NULL_TREE if TYPE is void) and appends
5783 statements possibly defining it to SEQ. */
5786 gimple_build (gimple_seq
*seq
, location_t loc
,
5787 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
,
5788 tree (*valueize
)(tree
))
5790 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, valueize
);
5793 tree decl
= builtin_decl_implicit (fn
);
5794 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
5795 if (!VOID_TYPE_P (type
))
5797 if (gimple_in_ssa_p (cfun
))
5798 res
= make_ssa_name (type
, NULL
);
5800 res
= create_tmp_reg (type
, NULL
);
5801 gimple_call_set_lhs (stmt
, res
);
5803 gimple_set_location (stmt
, loc
);
5804 gimple_seq_add_stmt_without_update (seq
, stmt
);
5809 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5810 (or no result if TYPE is void) with location LOC,
5811 simplifying it first if possible using VALUEIZE if not NULL.
5812 ARG0 is expected to be valueized already. Returns the built
5813 expression value (or NULL_TREE if TYPE is void) and appends
5814 statements possibly defining it to SEQ. */
5817 gimple_build (gimple_seq
*seq
, location_t loc
,
5818 enum built_in_function fn
, tree type
,
5819 tree arg0
, tree arg1
, tree arg2
,
5820 tree (*valueize
)(tree
))
5822 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
, seq
, valueize
);
5825 tree decl
= builtin_decl_implicit (fn
);
5826 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
5827 if (!VOID_TYPE_P (type
))
5829 if (gimple_in_ssa_p (cfun
))
5830 res
= make_ssa_name (type
, NULL
);
5832 res
= create_tmp_reg (type
, NULL
);
5833 gimple_call_set_lhs (stmt
, res
);
5835 gimple_set_location (stmt
, loc
);
5836 gimple_seq_add_stmt_without_update (seq
, stmt
);
5841 /* Build the conversion (TYPE) OP with a result of type TYPE
5842 with location LOC if such conversion is neccesary in GIMPLE,
5843 simplifying it first.
5844 Returns the built expression value and appends
5845 statements possibly defining it to SEQ. */
5848 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
5850 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
5852 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);