1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
35 #include "hard-reg-set.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
56 #include "tree-ssa-propagate.h"
59 #include "plugin-api.h"
62 #include "ipa-utils.h"
63 #include "gimple-pretty-print.h"
64 #include "tree-ssa-address.h"
65 #include "langhooks.h"
66 #include "gimplify-me.h"
71 #include "gimple-match.h"
72 #include "tree-phinodes.h"
73 #include "ssa-iterators.h"
75 /* Return true when DECL can be referenced from current unit.
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
91 3) COMDAT functions referred by external vtables that
92 we devirtualize only during final compilation stage.
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
98 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
101 struct cgraph_node
*node
;
104 if (DECL_ABSTRACT_P (decl
))
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
109 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
117 if (symtab
->function_flags_ready
)
119 snode
= symtab_node::get (decl
);
120 if (!snode
|| !snode
->definition
)
122 node
= dyn_cast
<cgraph_node
*> (snode
);
123 return !node
|| !node
->global
.inlined_to
;
126 /* We will later output the initializer, so we can refer to it.
127 So we are concerned only when DECL comes from initializer of
128 external var or var that has been optimized out. */
130 || TREE_CODE (from_decl
) != VAR_DECL
131 || (!DECL_EXTERNAL (from_decl
)
132 && (vnode
= varpool_node::get (from_decl
)) != NULL
133 && vnode
->definition
)
135 && (vnode
= varpool_node::get (from_decl
)) != NULL
136 && vnode
->in_other_partition
))
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl
)
142 && DECL_EXTERNAL (decl
)
143 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
144 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
155 As observed in PR20991 for already optimized out comdat virtual functions
156 it may be tempting to not necessarily give up because the copy will be
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
162 if (!symtab
->function_flags_ready
)
165 snode
= symtab_node::get (decl
);
167 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
168 && (!snode
->in_other_partition
169 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
171 node
= dyn_cast
<cgraph_node
*> (snode
);
172 return !node
|| !node
->global
.inlined_to
;
175 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
180 canonicalize_constructor_val (tree cval
, tree from_decl
)
182 tree orig_cval
= cval
;
184 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
187 tree ptr
= TREE_OPERAND (cval
, 0);
188 if (is_gimple_min_invariant (ptr
))
189 cval
= build1_loc (EXPR_LOCATION (cval
),
190 ADDR_EXPR
, TREE_TYPE (ptr
),
191 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
193 fold_convert (ptr_type_node
,
194 TREE_OPERAND (cval
, 1))));
196 if (TREE_CODE (cval
) == ADDR_EXPR
)
198 tree base
= NULL_TREE
;
199 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
201 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
203 TREE_OPERAND (cval
, 0) = base
;
206 base
= get_base_address (TREE_OPERAND (cval
, 0));
210 if ((TREE_CODE (base
) == VAR_DECL
211 || TREE_CODE (base
) == FUNCTION_DECL
)
212 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
214 if (TREE_CODE (base
) == VAR_DECL
)
215 TREE_ADDRESSABLE (base
) = 1;
216 else if (TREE_CODE (base
) == FUNCTION_DECL
)
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base
);
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
225 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
228 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
231 if (TREE_OVERFLOW_P (cval
))
232 return drop_tree_overflow (cval
);
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
240 get_symbol_constant_value (tree sym
)
242 tree val
= ctor_for_folding (sym
);
243 if (val
!= error_mark_node
)
247 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
248 if (val
&& is_gimple_min_invariant (val
))
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
257 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
258 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
259 return build_zero_cst (TREE_TYPE (sym
));
267 /* Subroutine of fold_stmt. We perform several simplifications of the
268 memory reference tree EXPR and make sure to re-gimplify them properly
269 after propagation of constant addresses. IS_LHS is true if the
270 reference is supposed to be an lvalue. */
273 maybe_fold_reference (tree expr
, bool is_lhs
)
277 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
278 || TREE_CODE (expr
) == REALPART_EXPR
279 || TREE_CODE (expr
) == IMAGPART_EXPR
)
280 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
281 return fold_unary_loc (EXPR_LOCATION (expr
),
284 TREE_OPERAND (expr
, 0));
285 else if (TREE_CODE (expr
) == BIT_FIELD_REF
286 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
287 return fold_ternary_loc (EXPR_LOCATION (expr
),
290 TREE_OPERAND (expr
, 0),
291 TREE_OPERAND (expr
, 1),
292 TREE_OPERAND (expr
, 2));
295 && (result
= fold_const_aggregate_ref (expr
))
296 && is_gimple_min_invariant (result
))
303 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
304 replacement rhs for the statement or NULL_TREE if no simplification
305 could be made. It is assumed that the operands have been previously
309 fold_gimple_assign (gimple_stmt_iterator
*si
)
311 gimple stmt
= gsi_stmt (*si
);
312 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
313 location_t loc
= gimple_location (stmt
);
315 tree result
= NULL_TREE
;
317 switch (get_gimple_rhs_class (subcode
))
319 case GIMPLE_SINGLE_RHS
:
321 tree rhs
= gimple_assign_rhs1 (stmt
);
323 if (REFERENCE_CLASS_P (rhs
))
324 return maybe_fold_reference (rhs
, false);
326 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
328 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
329 if (is_gimple_min_invariant (val
))
331 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
334 vec
<cgraph_node
*>targets
335 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
336 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
338 if (dump_enabled_p ())
340 location_t loc
= gimple_location_safe (stmt
);
341 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
342 "resolving virtual function address "
343 "reference to function %s\n",
344 targets
.length () == 1
345 ? targets
[0]->name ()
348 if (targets
.length () == 1)
350 val
= fold_convert (TREE_TYPE (val
),
351 build_fold_addr_expr_loc
352 (loc
, targets
[0]->decl
));
353 STRIP_USELESS_TYPE_CONVERSION (val
);
356 /* We can not use __builtin_unreachable here because it
357 can not have address taken. */
358 val
= build_int_cst (TREE_TYPE (val
), 0);
364 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
366 tree ref
= TREE_OPERAND (rhs
, 0);
367 tree tem
= maybe_fold_reference (ref
, true);
369 && TREE_CODE (tem
) == MEM_REF
370 && integer_zerop (TREE_OPERAND (tem
, 1)))
371 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
373 result
= fold_convert (TREE_TYPE (rhs
),
374 build_fold_addr_expr_loc (loc
, tem
));
375 else if (TREE_CODE (ref
) == MEM_REF
376 && integer_zerop (TREE_OPERAND (ref
, 1)))
377 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
380 else if (TREE_CODE (rhs
) == CONSTRUCTOR
381 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
382 && (CONSTRUCTOR_NELTS (rhs
)
383 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
385 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
389 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
390 if (TREE_CODE (val
) != INTEGER_CST
391 && TREE_CODE (val
) != REAL_CST
392 && TREE_CODE (val
) != FIXED_CST
)
395 return build_vector_from_ctor (TREE_TYPE (rhs
),
396 CONSTRUCTOR_ELTS (rhs
));
399 else if (DECL_P (rhs
))
400 return get_symbol_constant_value (rhs
);
402 /* If we couldn't fold the RHS, hand over to the generic
404 if (result
== NULL_TREE
)
407 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
408 that may have been added by fold, and "useless" type
409 conversions that might now be apparent due to propagation. */
410 STRIP_USELESS_TYPE_CONVERSION (result
);
412 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
419 case GIMPLE_UNARY_RHS
:
421 tree rhs
= gimple_assign_rhs1 (stmt
);
423 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
426 /* If the operation was a conversion do _not_ mark a
427 resulting constant with TREE_OVERFLOW if the original
428 constant was not. These conversions have implementation
429 defined behavior and retaining the TREE_OVERFLOW flag
430 here would confuse later passes such as VRP. */
431 if (CONVERT_EXPR_CODE_P (subcode
)
432 && TREE_CODE (result
) == INTEGER_CST
433 && TREE_CODE (rhs
) == INTEGER_CST
)
434 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
436 STRIP_USELESS_TYPE_CONVERSION (result
);
437 if (valid_gimple_rhs_p (result
))
443 case GIMPLE_BINARY_RHS
:
444 /* Try to canonicalize for boolean-typed X the comparisons
445 X == 0, X == 1, X != 0, and X != 1. */
446 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
447 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
449 tree lhs
= gimple_assign_lhs (stmt
);
450 tree op1
= gimple_assign_rhs1 (stmt
);
451 tree op2
= gimple_assign_rhs2 (stmt
);
452 tree type
= TREE_TYPE (op1
);
454 /* Check whether the comparison operands are of the same boolean
455 type as the result type is.
456 Check that second operand is an integer-constant with value
458 if (TREE_CODE (op2
) == INTEGER_CST
459 && (integer_zerop (op2
) || integer_onep (op2
))
460 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
462 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
463 bool is_logical_not
= false;
465 /* X == 0 and X != 1 is a logical-not.of X
466 X == 1 and X != 0 is X */
467 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
468 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
469 is_logical_not
= true;
471 if (is_logical_not
== false)
473 /* Only for one-bit precision typed X the transformation
474 !X -> ~X is valied. */
475 else if (TYPE_PRECISION (type
) == 1)
476 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
478 /* Otherwise we use !X -> X ^ 1. */
480 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
481 type
, op1
, build_int_cst (type
, 1));
487 result
= fold_binary_loc (loc
, subcode
,
488 TREE_TYPE (gimple_assign_lhs (stmt
)),
489 gimple_assign_rhs1 (stmt
),
490 gimple_assign_rhs2 (stmt
));
494 STRIP_USELESS_TYPE_CONVERSION (result
);
495 if (valid_gimple_rhs_p (result
))
500 case GIMPLE_TERNARY_RHS
:
501 /* Try to fold a conditional expression. */
502 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
504 tree op0
= gimple_assign_rhs1 (stmt
);
507 location_t cond_loc
= gimple_location (stmt
);
509 if (COMPARISON_CLASS_P (op0
))
511 fold_defer_overflow_warnings ();
512 tem
= fold_binary_loc (cond_loc
,
513 TREE_CODE (op0
), TREE_TYPE (op0
),
514 TREE_OPERAND (op0
, 0),
515 TREE_OPERAND (op0
, 1));
516 /* This is actually a conditional expression, not a GIMPLE
517 conditional statement, however, the valid_gimple_rhs_p
518 test still applies. */
519 set
= (tem
&& is_gimple_condexpr (tem
)
520 && valid_gimple_rhs_p (tem
));
521 fold_undefer_overflow_warnings (set
, stmt
, 0);
523 else if (is_gimple_min_invariant (op0
))
532 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
533 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
534 gimple_assign_rhs2 (stmt
),
535 gimple_assign_rhs3 (stmt
));
539 result
= fold_ternary_loc (loc
, subcode
,
540 TREE_TYPE (gimple_assign_lhs (stmt
)),
541 gimple_assign_rhs1 (stmt
),
542 gimple_assign_rhs2 (stmt
),
543 gimple_assign_rhs3 (stmt
));
547 STRIP_USELESS_TYPE_CONVERSION (result
);
548 if (valid_gimple_rhs_p (result
))
553 case GIMPLE_INVALID_RHS
:
560 /* Attempt to fold a conditional statement. Return true if any changes were
561 made. We only attempt to fold the condition expression, and do not perform
562 any transformation that would require alteration of the cfg. It is
563 assumed that the operands have been previously folded. */
566 fold_gimple_cond (gimple stmt
)
568 tree result
= fold_binary_loc (gimple_location (stmt
),
569 gimple_cond_code (stmt
),
571 gimple_cond_lhs (stmt
),
572 gimple_cond_rhs (stmt
));
576 STRIP_USELESS_TYPE_CONVERSION (result
);
577 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
579 gimple_cond_set_condition_from_tree (stmt
, result
);
588 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
589 adjusting the replacement stmts location and virtual operands.
590 If the statement has a lhs the last stmt in the sequence is expected
591 to assign to that lhs. */
594 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
596 gimple stmt
= gsi_stmt (*si_p
);
598 if (gimple_has_location (stmt
))
599 annotate_all_with_location (stmts
, gimple_location (stmt
));
601 /* First iterate over the replacement statements backward, assigning
602 virtual operands to their defining statements. */
603 gimple laststore
= NULL
;
604 for (gimple_stmt_iterator i
= gsi_last (stmts
);
605 !gsi_end_p (i
); gsi_prev (&i
))
607 gimple new_stmt
= gsi_stmt (i
);
608 if ((gimple_assign_single_p (new_stmt
)
609 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
610 || (is_gimple_call (new_stmt
)
611 && (gimple_call_flags (new_stmt
)
612 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
616 vdef
= gimple_vdef (stmt
);
618 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
619 gimple_set_vdef (new_stmt
, vdef
);
620 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
621 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
622 laststore
= new_stmt
;
626 /* Second iterate over the statements forward, assigning virtual
627 operands to their uses. */
628 tree reaching_vuse
= gimple_vuse (stmt
);
629 for (gimple_stmt_iterator i
= gsi_start (stmts
);
630 !gsi_end_p (i
); gsi_next (&i
))
632 gimple new_stmt
= gsi_stmt (i
);
633 /* If the new statement possibly has a VUSE, update it with exact SSA
634 name we know will reach this one. */
635 if (gimple_has_mem_ops (new_stmt
))
636 gimple_set_vuse (new_stmt
, reaching_vuse
);
637 gimple_set_modified (new_stmt
, true);
638 if (gimple_vdef (new_stmt
))
639 reaching_vuse
= gimple_vdef (new_stmt
);
642 /* If the new sequence does not do a store release the virtual
643 definition of the original statement. */
645 && reaching_vuse
== gimple_vuse (stmt
))
647 tree vdef
= gimple_vdef (stmt
);
649 && TREE_CODE (vdef
) == SSA_NAME
)
651 unlink_stmt_vdef (stmt
);
652 release_ssa_name (vdef
);
656 /* Finally replace the original statement with the sequence. */
657 gsi_replace_with_seq (si_p
, stmts
, false);
660 /* Convert EXPR into a GIMPLE value suitable for substitution on the
661 RHS of an assignment. Insert the necessary statements before
662 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
663 is replaced. If the call is expected to produces a result, then it
664 is replaced by an assignment of the new RHS to the result variable.
665 If the result is to be ignored, then the call is replaced by a
666 GIMPLE_NOP. A proper VDEF chain is retained by making the first
667 VUSE and the last VDEF of the whole sequence be the same as the replaced
668 statement and using new SSA names for stores in between. */
671 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
674 gimple stmt
, new_stmt
;
675 gimple_stmt_iterator i
;
676 gimple_seq stmts
= NULL
;
678 stmt
= gsi_stmt (*si_p
);
680 gcc_assert (is_gimple_call (stmt
));
682 push_gimplify_context (gimple_in_ssa_p (cfun
));
684 lhs
= gimple_call_lhs (stmt
);
685 if (lhs
== NULL_TREE
)
687 gimplify_and_add (expr
, &stmts
);
688 /* We can end up with folding a memcpy of an empty class assignment
689 which gets optimized away by C++ gimplification. */
690 if (gimple_seq_empty_p (stmts
))
692 pop_gimplify_context (NULL
);
693 if (gimple_in_ssa_p (cfun
))
695 unlink_stmt_vdef (stmt
);
698 gsi_replace (si_p
, gimple_build_nop (), true);
704 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
705 new_stmt
= gimple_build_assign (lhs
, tmp
);
706 i
= gsi_last (stmts
);
707 gsi_insert_after_without_update (&i
, new_stmt
,
708 GSI_CONTINUE_LINKING
);
711 pop_gimplify_context (NULL
);
713 gsi_replace_with_seq_vops (si_p
, stmts
);
717 /* Replace the call at *GSI with the gimple value VAL. */
720 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
722 gimple stmt
= gsi_stmt (*gsi
);
723 tree lhs
= gimple_call_lhs (stmt
);
727 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
728 val
= fold_convert (TREE_TYPE (lhs
), val
);
729 repl
= gimple_build_assign (lhs
, val
);
732 repl
= gimple_build_nop ();
733 tree vdef
= gimple_vdef (stmt
);
734 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
736 unlink_stmt_vdef (stmt
);
737 release_ssa_name (vdef
);
739 gsi_replace (gsi
, repl
, true);
742 /* Replace the call at *GSI with the new call REPL and fold that
746 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
748 gimple stmt
= gsi_stmt (*gsi
);
749 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
750 gimple_set_location (repl
, gimple_location (stmt
));
751 if (gimple_vdef (stmt
)
752 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
754 gimple_set_vdef (repl
, gimple_vdef (stmt
));
755 gimple_set_vuse (repl
, gimple_vuse (stmt
));
756 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
758 gsi_replace (gsi
, repl
, true);
762 /* Return true if VAR is a VAR_DECL or a component thereof. */
765 var_decl_component_p (tree var
)
768 while (handled_component_p (inner
))
769 inner
= TREE_OPERAND (inner
, 0);
770 return SSA_VAR_P (inner
);
773 /* Fold function call to builtin mem{{,p}cpy,move}. Return
774 NULL_TREE if no simplification can be made.
775 If ENDP is 0, return DEST (like memcpy).
776 If ENDP is 1, return DEST+LEN (like mempcpy).
777 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
778 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
782 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
783 tree dest
, tree src
, int endp
)
785 gimple stmt
= gsi_stmt (*gsi
);
786 tree lhs
= gimple_call_lhs (stmt
);
787 tree len
= gimple_call_arg (stmt
, 2);
788 tree destvar
, srcvar
;
789 location_t loc
= gimple_location (stmt
);
791 /* If the LEN parameter is zero, return DEST. */
792 if (integer_zerop (len
))
795 if (gimple_call_lhs (stmt
))
796 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
798 repl
= gimple_build_nop ();
799 tree vdef
= gimple_vdef (stmt
);
800 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
802 unlink_stmt_vdef (stmt
);
803 release_ssa_name (vdef
);
805 gsi_replace (gsi
, repl
, true);
809 /* If SRC and DEST are the same (and not volatile), return
810 DEST{,+LEN,+LEN-1}. */
811 if (operand_equal_p (src
, dest
, 0))
813 unlink_stmt_vdef (stmt
);
814 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
815 release_ssa_name (gimple_vdef (stmt
));
818 gsi_replace (gsi
, gimple_build_nop (), true);
825 tree srctype
, desttype
;
826 unsigned int src_align
, dest_align
;
829 /* Build accesses at offset zero with a ref-all character type. */
830 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
833 /* If we can perform the copy efficiently with first doing all loads
834 and then all stores inline it that way. Currently efficiently
835 means that we can load all the memory into a single integer
836 register which is what MOVE_MAX gives us. */
837 src_align
= get_pointer_alignment (src
);
838 dest_align
= get_pointer_alignment (dest
);
839 if (tree_fits_uhwi_p (len
)
840 && compare_tree_int (len
, MOVE_MAX
) <= 0
841 /* ??? Don't transform copies from strings with known length this
842 confuses the tree-ssa-strlen.c. This doesn't handle
843 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
845 && !c_strlen (src
, 2))
847 unsigned ilen
= tree_to_uhwi (len
);
848 if (exact_log2 (ilen
) != -1)
850 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
852 && TYPE_MODE (type
) != BLKmode
853 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
855 /* If the destination pointer is not aligned we must be able
856 to emit an unaligned store. */
857 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
858 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
861 tree desttype
= type
;
862 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
863 srctype
= build_aligned_type (type
, src_align
);
864 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
865 tree tem
= fold_const_aggregate_ref (srcmem
);
868 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
869 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
875 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
877 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
878 if (gimple_in_ssa_p (cfun
))
879 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
882 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
),
884 gimple_assign_set_lhs (new_stmt
, srcmem
);
885 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
886 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
888 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
889 desttype
= build_aligned_type (type
, dest_align
);
891 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
894 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
895 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
896 if (gimple_vdef (new_stmt
)
897 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
898 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
901 gsi_replace (gsi
, new_stmt
, true);
904 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
913 /* Both DEST and SRC must be pointer types.
914 ??? This is what old code did. Is the testing for pointer types
917 If either SRC is readonly or length is 1, we can use memcpy. */
918 if (!dest_align
|| !src_align
)
920 if (readonly_data_expr (src
)
921 || (tree_fits_uhwi_p (len
)
922 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
923 >= tree_to_uhwi (len
))))
925 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
928 gimple_call_set_fndecl (stmt
, fn
);
929 gimple_call_set_arg (stmt
, 0, dest
);
930 gimple_call_set_arg (stmt
, 1, src
);
935 /* If *src and *dest can't overlap, optimize into memcpy as well. */
936 if (TREE_CODE (src
) == ADDR_EXPR
937 && TREE_CODE (dest
) == ADDR_EXPR
)
939 tree src_base
, dest_base
, fn
;
940 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
941 HOST_WIDE_INT size
= -1;
942 HOST_WIDE_INT maxsize
= -1;
944 srcvar
= TREE_OPERAND (src
, 0);
945 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
947 destvar
= TREE_OPERAND (dest
, 0);
948 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
950 if (tree_fits_uhwi_p (len
))
951 maxsize
= tree_to_uhwi (len
);
954 src_offset
/= BITS_PER_UNIT
;
955 dest_offset
/= BITS_PER_UNIT
;
956 if (SSA_VAR_P (src_base
)
957 && SSA_VAR_P (dest_base
))
959 if (operand_equal_p (src_base
, dest_base
, 0)
960 && ranges_overlap_p (src_offset
, maxsize
,
961 dest_offset
, maxsize
))
964 else if (TREE_CODE (src_base
) == MEM_REF
965 && TREE_CODE (dest_base
) == MEM_REF
)
967 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
968 TREE_OPERAND (dest_base
, 0), 0))
970 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
971 if (!wi::fits_shwi_p (off
))
973 src_offset
= off
.to_shwi ();
975 off
= mem_ref_offset (dest_base
) + dest_offset
;
976 if (!wi::fits_shwi_p (off
))
978 dest_offset
= off
.to_shwi ();
979 if (ranges_overlap_p (src_offset
, maxsize
,
980 dest_offset
, maxsize
))
986 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
989 gimple_call_set_fndecl (stmt
, fn
);
990 gimple_call_set_arg (stmt
, 0, dest
);
991 gimple_call_set_arg (stmt
, 1, src
);
996 /* If the destination and source do not alias optimize into
998 if ((is_gimple_min_invariant (dest
)
999 || TREE_CODE (dest
) == SSA_NAME
)
1000 && (is_gimple_min_invariant (src
)
1001 || TREE_CODE (src
) == SSA_NAME
))
1004 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1005 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1006 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1009 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1012 gimple_call_set_fndecl (stmt
, fn
);
1013 gimple_call_set_arg (stmt
, 0, dest
);
1014 gimple_call_set_arg (stmt
, 1, src
);
1023 if (!tree_fits_shwi_p (len
))
1026 This logic lose for arguments like (type *)malloc (sizeof (type)),
1027 since we strip the casts of up to VOID return value from malloc.
1028 Perhaps we ought to inherit type from non-VOID argument here? */
1031 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1032 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1034 /* In the following try to find a type that is most natural to be
1035 used for the memcpy source and destination and that allows
1036 the most optimization when memcpy is turned into a plain assignment
1037 using that type. In theory we could always use a char[len] type
1038 but that only gains us that the destination and source possibly
1039 no longer will have their address taken. */
1040 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1041 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1043 tree tem
= TREE_OPERAND (src
, 0);
1045 if (tem
!= TREE_OPERAND (src
, 0))
1046 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1048 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1050 tree tem
= TREE_OPERAND (dest
, 0);
1052 if (tem
!= TREE_OPERAND (dest
, 0))
1053 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1055 srctype
= TREE_TYPE (TREE_TYPE (src
));
1056 if (TREE_CODE (srctype
) == ARRAY_TYPE
1057 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1059 srctype
= TREE_TYPE (srctype
);
1061 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1063 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1064 if (TREE_CODE (desttype
) == ARRAY_TYPE
1065 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1067 desttype
= TREE_TYPE (desttype
);
1069 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1071 if (TREE_ADDRESSABLE (srctype
)
1072 || TREE_ADDRESSABLE (desttype
))
1075 /* Make sure we are not copying using a floating-point mode or
1076 a type whose size possibly does not match its precision. */
1077 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1078 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1079 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1080 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1081 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1082 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1083 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1084 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1092 src_align
= get_pointer_alignment (src
);
1093 dest_align
= get_pointer_alignment (dest
);
1094 if (dest_align
< TYPE_ALIGN (desttype
)
1095 || src_align
< TYPE_ALIGN (srctype
))
1099 STRIP_NOPS (destvar
);
1100 if (TREE_CODE (destvar
) == ADDR_EXPR
1101 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1102 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1103 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1105 destvar
= NULL_TREE
;
1108 STRIP_NOPS (srcvar
);
1109 if (TREE_CODE (srcvar
) == ADDR_EXPR
1110 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1111 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1114 || src_align
>= TYPE_ALIGN (desttype
))
1115 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1117 else if (!STRICT_ALIGNMENT
)
1119 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1121 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1129 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1132 if (srcvar
== NULL_TREE
)
1135 if (src_align
>= TYPE_ALIGN (desttype
))
1136 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1139 if (STRICT_ALIGNMENT
)
1141 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1143 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1146 else if (destvar
== NULL_TREE
)
1149 if (dest_align
>= TYPE_ALIGN (srctype
))
1150 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1153 if (STRICT_ALIGNMENT
)
1155 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1157 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1162 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1164 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1165 if (gimple_in_ssa_p (cfun
))
1166 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1168 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
), NULL
);
1169 gimple_assign_set_lhs (new_stmt
, srcvar
);
1170 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1171 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1173 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1174 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1175 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1176 if (gimple_vdef (new_stmt
)
1177 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1178 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1181 gsi_replace (gsi
, new_stmt
, true);
1184 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1188 if (endp
== 0 || endp
== 3)
1191 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1193 if (endp
== 2 || endp
== 1)
1194 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1196 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1198 gimple repl
= gimple_build_assign (lhs
, dest
);
1199 gsi_replace (gsi
, repl
, true);
1203 /* Fold function call to builtin memset or bzero at *GSI setting the
1204 memory of size LEN to VAL. Return whether a simplification was made. */
1207 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1209 gimple stmt
= gsi_stmt (*gsi
);
1211 unsigned HOST_WIDE_INT length
, cval
;
1213 /* If the LEN parameter is zero, return DEST. */
1214 if (integer_zerop (len
))
1216 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1220 if (! tree_fits_uhwi_p (len
))
1223 if (TREE_CODE (c
) != INTEGER_CST
)
1226 tree dest
= gimple_call_arg (stmt
, 0);
1228 if (TREE_CODE (var
) != ADDR_EXPR
)
1231 var
= TREE_OPERAND (var
, 0);
1232 if (TREE_THIS_VOLATILE (var
))
1235 etype
= TREE_TYPE (var
);
1236 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1237 etype
= TREE_TYPE (etype
);
1239 if (!INTEGRAL_TYPE_P (etype
)
1240 && !POINTER_TYPE_P (etype
))
1243 if (! var_decl_component_p (var
))
1246 length
= tree_to_uhwi (len
);
1247 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1248 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1251 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1254 if (integer_zerop (c
))
1258 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1261 cval
= TREE_INT_CST_LOW (c
);
1265 cval
|= (cval
<< 31) << 1;
1268 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1269 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1270 gimple_set_vuse (store
, gimple_vuse (stmt
));
1271 tree vdef
= gimple_vdef (stmt
);
1272 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1274 gimple_set_vdef (store
, gimple_vdef (stmt
));
1275 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1277 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1278 if (gimple_call_lhs (stmt
))
1280 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1281 gsi_replace (gsi
, asgn
, true);
1285 gimple_stmt_iterator gsi2
= *gsi
;
1287 gsi_remove (&gsi2
, true);
1294 /* Return the string length, maximum string length or maximum value of
1296 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1297 is not NULL and, for TYPE == 0, its value is not equal to the length
1298 we determine or if we are unable to determine the length or value,
1299 return false. VISITED is a bitmap of visited variables.
1300 TYPE is 0 if string length should be returned, 1 for maximum string
1301 length and 2 for maximum value ARG can have. */
1304 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1309 if (TREE_CODE (arg
) != SSA_NAME
)
1311 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1312 if (TREE_CODE (arg
) == ADDR_EXPR
1313 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1314 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1316 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1317 if (TREE_CODE (aop0
) == INDIRECT_REF
1318 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1319 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1320 length
, visited
, type
);
1326 if (TREE_CODE (val
) != INTEGER_CST
1327 || tree_int_cst_sgn (val
) < 0)
1331 val
= c_strlen (arg
, 1);
1339 if (TREE_CODE (*length
) != INTEGER_CST
1340 || TREE_CODE (val
) != INTEGER_CST
)
1343 if (tree_int_cst_lt (*length
, val
))
1347 else if (simple_cst_equal (val
, *length
) != 1)
1355 /* If ARG is registered for SSA update we cannot look at its defining
1357 if (name_registered_for_update_p (arg
))
1360 /* If we were already here, break the infinite cycle. */
1362 *visited
= BITMAP_ALLOC (NULL
);
1363 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1367 def_stmt
= SSA_NAME_DEF_STMT (var
);
1369 switch (gimple_code (def_stmt
))
1372 /* The RHS of the statement defining VAR must either have a
1373 constant length or come from another SSA_NAME with a constant
1375 if (gimple_assign_single_p (def_stmt
)
1376 || gimple_assign_unary_nop_p (def_stmt
))
1378 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1379 return get_maxval_strlen (rhs
, length
, visited
, type
);
1381 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1383 tree op2
= gimple_assign_rhs2 (def_stmt
);
1384 tree op3
= gimple_assign_rhs3 (def_stmt
);
1385 return get_maxval_strlen (op2
, length
, visited
, type
)
1386 && get_maxval_strlen (op3
, length
, visited
, type
);
1392 /* All the arguments of the PHI node must have the same constant
1396 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1398 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1400 /* If this PHI has itself as an argument, we cannot
1401 determine the string length of this argument. However,
1402 if we can find a constant string length for the other
1403 PHI args then we can still be sure that this is a
1404 constant string length. So be optimistic and just
1405 continue with the next argument. */
1406 if (arg
== gimple_phi_result (def_stmt
))
1409 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1421 get_maxval_strlen (tree arg
, int type
)
1423 bitmap visited
= NULL
;
1424 tree len
= NULL_TREE
;
1425 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1428 BITMAP_FREE (visited
);
1434 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1435 If LEN is not NULL, it represents the length of the string to be
1436 copied. Return NULL_TREE if no simplification can be made. */
1439 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1440 tree dest
, tree src
)
1442 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1445 /* If SRC and DEST are the same (and not volatile), return DEST. */
1446 if (operand_equal_p (src
, dest
, 0))
1448 replace_call_with_value (gsi
, dest
);
1452 if (optimize_function_for_size_p (cfun
))
1455 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1459 tree len
= get_maxval_strlen (src
, 0);
1463 len
= fold_convert_loc (loc
, size_type_node
, len
);
1464 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1465 len
= force_gimple_operand_gsi (gsi
, len
, true,
1466 NULL_TREE
, true, GSI_SAME_STMT
);
1467 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1468 replace_call_with_call_and_fold (gsi
, repl
);
1472 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1473 If SLEN is not NULL, it represents the length of the source string.
1474 Return NULL_TREE if no simplification can be made. */
1477 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1478 tree dest
, tree src
, tree len
)
1480 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1483 /* If the LEN parameter is zero, return DEST. */
1484 if (integer_zerop (len
))
1486 replace_call_with_value (gsi
, dest
);
1490 /* We can't compare slen with len as constants below if len is not a
1492 if (TREE_CODE (len
) != INTEGER_CST
)
1495 /* Now, we must be passed a constant src ptr parameter. */
1496 tree slen
= get_maxval_strlen (src
, 0);
1497 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1500 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1502 /* We do not support simplification of this case, though we do
1503 support it when expanding trees into RTL. */
1504 /* FIXME: generate a call to __builtin_memset. */
1505 if (tree_int_cst_lt (slen
, len
))
1508 /* OK transform into builtin memcpy. */
1509 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1513 len
= fold_convert_loc (loc
, size_type_node
, len
);
1514 len
= force_gimple_operand_gsi (gsi
, len
, true,
1515 NULL_TREE
, true, GSI_SAME_STMT
);
1516 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1517 replace_call_with_call_and_fold (gsi
, repl
);
1521 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1524 Return NULL_TREE if no simplification was possible, otherwise return the
1525 simplified form of the call as a tree.
1527 The simplified form may be a constant or other expression which
1528 computes the same value, but in a more efficient manner (including
1529 calls to other builtin functions).
1531 The call may contain arguments which need to be evaluated, but
1532 which are not useful to determine the result of the call. In
1533 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1534 COMPOUND_EXPR will be an argument which must be evaluated.
1535 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1536 COMPOUND_EXPR in the chain will contain the tree for the simplified
1537 form of the builtin function call. */
1540 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1542 gimple stmt
= gsi_stmt (*gsi
);
1543 location_t loc
= gimple_location (stmt
);
1545 const char *p
= c_getstr (src
);
1547 /* If the string length is zero, return the dst parameter. */
1548 if (p
&& *p
== '\0')
1550 replace_call_with_value (gsi
, dst
);
1554 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1557 /* See if we can store by pieces into (dst + strlen(dst)). */
1559 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1560 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1562 if (!strlen_fn
|| !memcpy_fn
)
1565 /* If the length of the source string isn't computable don't
1566 split strcat into strlen and memcpy. */
1567 tree len
= get_maxval_strlen (src
, 0);
1571 /* Create strlen (dst). */
1572 gimple_seq stmts
= NULL
, stmts2
;
1573 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1574 gimple_set_location (repl
, loc
);
1575 if (gimple_in_ssa_p (cfun
))
1576 newdst
= make_ssa_name (size_type_node
, NULL
);
1578 newdst
= create_tmp_reg (size_type_node
, NULL
);
1579 gimple_call_set_lhs (repl
, newdst
);
1580 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1582 /* Create (dst p+ strlen (dst)). */
1583 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1584 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1585 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1587 len
= fold_convert_loc (loc
, size_type_node
, len
);
1588 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1589 build_int_cst (size_type_node
, 1));
1590 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1591 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1593 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1594 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1595 if (gimple_call_lhs (stmt
))
1597 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1598 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1599 gsi_replace_with_seq_vops (gsi
, stmts
);
1600 /* gsi now points at the assignment to the lhs, get a
1601 stmt iterator to the memcpy call.
1602 ??? We can't use gsi_for_stmt as that doesn't work when the
1603 CFG isn't built yet. */
1604 gimple_stmt_iterator gsi2
= *gsi
;
1610 gsi_replace_with_seq_vops (gsi
, stmts
);
1616 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1617 are the arguments to the call. */
1620 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1622 gimple stmt
= gsi_stmt (*gsi
);
1623 tree dest
= gimple_call_arg (stmt
, 0);
1624 tree src
= gimple_call_arg (stmt
, 1);
1625 tree size
= gimple_call_arg (stmt
, 2);
1631 /* If the SRC parameter is "", return DEST. */
1632 if (p
&& *p
== '\0')
1634 replace_call_with_value (gsi
, dest
);
1638 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1641 /* If __builtin_strcat_chk is used, assume strcat is available. */
1642 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1646 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1647 replace_call_with_call_and_fold (gsi
, repl
);
1651 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1655 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1657 gimple stmt
= gsi_stmt (*gsi
);
1658 tree dest
= gimple_call_arg (stmt
, 0);
1659 tree src
= gimple_call_arg (stmt
, 1);
1660 tree len
= gimple_call_arg (stmt
, 2);
1661 tree size
= gimple_call_arg (stmt
, 3);
1666 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1667 if ((p
&& *p
== '\0')
1668 || integer_zerop (len
))
1670 replace_call_with_value (gsi
, dest
);
1674 if (! tree_fits_uhwi_p (size
))
1677 if (! integer_all_onesp (size
))
1679 tree src_len
= c_strlen (src
, 1);
1681 && tree_fits_uhwi_p (src_len
)
1682 && tree_fits_uhwi_p (len
)
1683 && ! tree_int_cst_lt (len
, src_len
))
1685 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1686 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1690 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1691 replace_call_with_call_and_fold (gsi
, repl
);
1697 /* If __builtin_strncat_chk is used, assume strncat is available. */
1698 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1702 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1703 replace_call_with_call_and_fold (gsi
, repl
);
1707 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1708 to the call. IGNORE is true if the value returned
1709 by the builtin will be ignored. UNLOCKED is true is true if this
1710 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1711 the known length of the string. Return NULL_TREE if no simplification
1715 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1716 tree arg0
, tree arg1
,
1719 gimple stmt
= gsi_stmt (*gsi
);
1721 /* If we're using an unlocked function, assume the other unlocked
1722 functions exist explicitly. */
1723 tree
const fn_fputc
= (unlocked
1724 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1725 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1726 tree
const fn_fwrite
= (unlocked
1727 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1728 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1730 /* If the return value is used, don't do the transformation. */
1731 if (gimple_call_lhs (stmt
))
1734 /* Get the length of the string passed to fputs. If the length
1735 can't be determined, punt. */
1736 tree len
= get_maxval_strlen (arg0
, 0);
1738 || TREE_CODE (len
) != INTEGER_CST
)
1741 switch (compare_tree_int (len
, 1))
1743 case -1: /* length is 0, delete the call entirely . */
1744 replace_call_with_value (gsi
, integer_zero_node
);
1747 case 0: /* length is 1, call fputc. */
1749 const char *p
= c_getstr (arg0
);
1755 gimple repl
= gimple_build_call (fn_fputc
, 2,
1757 (integer_type_node
, p
[0]), arg1
);
1758 replace_call_with_call_and_fold (gsi
, repl
);
1763 case 1: /* length is greater than 1, call fwrite. */
1765 /* If optimizing for size keep fputs. */
1766 if (optimize_function_for_size_p (cfun
))
1768 /* New argument list transforming fputs(string, stream) to
1769 fwrite(string, 1, len, stream). */
1773 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1774 size_one_node
, len
, arg1
);
1775 replace_call_with_call_and_fold (gsi
, repl
);
1784 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1785 DEST, SRC, LEN, and SIZE are the arguments to the call.
1786 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1787 code of the builtin. If MAXLEN is not NULL, it is maximum length
1788 passed as third argument. */
1791 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1792 tree dest
, tree src
, tree len
, tree size
,
1793 enum built_in_function fcode
)
1795 gimple stmt
= gsi_stmt (*gsi
);
1796 location_t loc
= gimple_location (stmt
);
1797 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1800 /* If SRC and DEST are the same (and not volatile), return DEST
1801 (resp. DEST+LEN for __mempcpy_chk). */
1802 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1804 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1806 replace_call_with_value (gsi
, dest
);
1811 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1812 temp
= force_gimple_operand_gsi (gsi
, temp
,
1813 false, NULL_TREE
, true,
1815 replace_call_with_value (gsi
, temp
);
1820 if (! tree_fits_uhwi_p (size
))
1823 tree maxlen
= get_maxval_strlen (len
, 2);
1824 if (! integer_all_onesp (size
))
1826 if (! tree_fits_uhwi_p (len
))
1828 /* If LEN is not constant, try MAXLEN too.
1829 For MAXLEN only allow optimizing into non-_ocs function
1830 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1831 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1833 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1835 /* (void) __mempcpy_chk () can be optimized into
1836 (void) __memcpy_chk (). */
1837 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1841 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1842 replace_call_with_call_and_fold (gsi
, repl
);
1851 if (tree_int_cst_lt (size
, maxlen
))
1856 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1857 mem{cpy,pcpy,move,set} is available. */
1860 case BUILT_IN_MEMCPY_CHK
:
1861 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1863 case BUILT_IN_MEMPCPY_CHK
:
1864 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1866 case BUILT_IN_MEMMOVE_CHK
:
1867 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1869 case BUILT_IN_MEMSET_CHK
:
1870 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1879 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1880 replace_call_with_call_and_fold (gsi
, repl
);
1884 /* Fold a call to the __st[rp]cpy_chk builtin.
1885 DEST, SRC, and SIZE are the arguments to the call.
1886 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1887 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1888 strings passed as second argument. */
1891 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1893 tree src
, tree size
,
1894 enum built_in_function fcode
)
1896 gimple stmt
= gsi_stmt (*gsi
);
1897 location_t loc
= gimple_location (stmt
);
1898 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1901 /* If SRC and DEST are the same (and not volatile), return DEST. */
1902 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1904 replace_call_with_value (gsi
, dest
);
1908 if (! tree_fits_uhwi_p (size
))
1911 tree maxlen
= get_maxval_strlen (src
, 1);
1912 if (! integer_all_onesp (size
))
1914 len
= c_strlen (src
, 1);
1915 if (! len
|| ! tree_fits_uhwi_p (len
))
1917 /* If LEN is not constant, try MAXLEN too.
1918 For MAXLEN only allow optimizing into non-_ocs function
1919 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1920 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1922 if (fcode
== BUILT_IN_STPCPY_CHK
)
1927 /* If return value of __stpcpy_chk is ignored,
1928 optimize into __strcpy_chk. */
1929 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1933 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1934 replace_call_with_call_and_fold (gsi
, repl
);
1938 if (! len
|| TREE_SIDE_EFFECTS (len
))
1941 /* If c_strlen returned something, but not a constant,
1942 transform __strcpy_chk into __memcpy_chk. */
1943 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1947 len
= fold_convert_loc (loc
, size_type_node
, len
);
1948 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1949 build_int_cst (size_type_node
, 1));
1950 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1951 true, GSI_SAME_STMT
);
1952 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1953 replace_call_with_call_and_fold (gsi
, repl
);
1960 if (! tree_int_cst_lt (maxlen
, size
))
1964 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1965 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1966 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1970 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1971 replace_call_with_call_and_fold (gsi
, repl
);
1975 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1976 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1977 length passed as third argument. IGNORE is true if return value can be
1978 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1981 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1982 tree dest
, tree src
,
1983 tree len
, tree size
,
1984 enum built_in_function fcode
)
1986 gimple stmt
= gsi_stmt (*gsi
);
1987 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1990 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1992 /* If return value of __stpncpy_chk is ignored,
1993 optimize into __strncpy_chk. */
1994 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1997 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1998 replace_call_with_call_and_fold (gsi
, repl
);
2003 if (! tree_fits_uhwi_p (size
))
2006 tree maxlen
= get_maxval_strlen (len
, 2);
2007 if (! integer_all_onesp (size
))
2009 if (! tree_fits_uhwi_p (len
))
2011 /* If LEN is not constant, try MAXLEN too.
2012 For MAXLEN only allow optimizing into non-_ocs function
2013 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2014 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2020 if (tree_int_cst_lt (size
, maxlen
))
2024 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2025 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2026 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2030 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2031 replace_call_with_call_and_fold (gsi
, repl
);
2035 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2036 NULL_TREE if a normal call should be emitted rather than expanding
2037 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2038 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2039 passed as second argument. */
2042 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2043 enum built_in_function fcode
)
2045 gimple stmt
= gsi_stmt (*gsi
);
2046 tree dest
, size
, len
, fn
, fmt
, flag
;
2047 const char *fmt_str
;
2049 /* Verify the required arguments in the original call. */
2050 if (gimple_call_num_args (stmt
) < 5)
2053 dest
= gimple_call_arg (stmt
, 0);
2054 len
= gimple_call_arg (stmt
, 1);
2055 flag
= gimple_call_arg (stmt
, 2);
2056 size
= gimple_call_arg (stmt
, 3);
2057 fmt
= gimple_call_arg (stmt
, 4);
2059 if (! tree_fits_uhwi_p (size
))
2062 if (! integer_all_onesp (size
))
2064 tree maxlen
= get_maxval_strlen (len
, 2);
2065 if (! tree_fits_uhwi_p (len
))
2067 /* If LEN is not constant, try MAXLEN too.
2068 For MAXLEN only allow optimizing into non-_ocs function
2069 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2070 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2076 if (tree_int_cst_lt (size
, maxlen
))
2080 if (!init_target_chars ())
2083 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2084 or if format doesn't contain % chars or is "%s". */
2085 if (! integer_zerop (flag
))
2087 fmt_str
= c_getstr (fmt
);
2088 if (fmt_str
== NULL
)
2090 if (strchr (fmt_str
, target_percent
) != NULL
2091 && strcmp (fmt_str
, target_percent_s
))
2095 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2097 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2098 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2102 /* Replace the called function and the first 5 argument by 3 retaining
2103 trailing varargs. */
2104 gimple_call_set_fndecl (stmt
, fn
);
2105 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2106 gimple_call_set_arg (stmt
, 0, dest
);
2107 gimple_call_set_arg (stmt
, 1, len
);
2108 gimple_call_set_arg (stmt
, 2, fmt
);
2109 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2110 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2111 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2116 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2117 Return NULL_TREE if a normal call should be emitted rather than
2118 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2119 or BUILT_IN_VSPRINTF_CHK. */
2122 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2123 enum built_in_function fcode
)
2125 gimple stmt
= gsi_stmt (*gsi
);
2126 tree dest
, size
, len
, fn
, fmt
, flag
;
2127 const char *fmt_str
;
2128 unsigned nargs
= gimple_call_num_args (stmt
);
2130 /* Verify the required arguments in the original call. */
2133 dest
= gimple_call_arg (stmt
, 0);
2134 flag
= gimple_call_arg (stmt
, 1);
2135 size
= gimple_call_arg (stmt
, 2);
2136 fmt
= gimple_call_arg (stmt
, 3);
2138 if (! tree_fits_uhwi_p (size
))
2143 if (!init_target_chars ())
2146 /* Check whether the format is a literal string constant. */
2147 fmt_str
= c_getstr (fmt
);
2148 if (fmt_str
!= NULL
)
2150 /* If the format doesn't contain % args or %%, we know the size. */
2151 if (strchr (fmt_str
, target_percent
) == 0)
2153 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2154 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2156 /* If the format is "%s" and first ... argument is a string literal,
2157 we know the size too. */
2158 else if (fcode
== BUILT_IN_SPRINTF_CHK
2159 && strcmp (fmt_str
, target_percent_s
) == 0)
2165 arg
= gimple_call_arg (stmt
, 4);
2166 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2168 len
= c_strlen (arg
, 1);
2169 if (! len
|| ! tree_fits_uhwi_p (len
))
2176 if (! integer_all_onesp (size
))
2178 if (! len
|| ! tree_int_cst_lt (len
, size
))
2182 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2183 or if format doesn't contain % chars or is "%s". */
2184 if (! integer_zerop (flag
))
2186 if (fmt_str
== NULL
)
2188 if (strchr (fmt_str
, target_percent
) != NULL
2189 && strcmp (fmt_str
, target_percent_s
))
2193 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2194 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2195 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2199 /* Replace the called function and the first 4 argument by 2 retaining
2200 trailing varargs. */
2201 gimple_call_set_fndecl (stmt
, fn
);
2202 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2203 gimple_call_set_arg (stmt
, 0, dest
);
2204 gimple_call_set_arg (stmt
, 1, fmt
);
2205 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2206 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2207 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2212 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2213 ORIG may be null if this is a 2-argument call. We don't attempt to
2214 simplify calls with more than 3 arguments.
2216 Return NULL_TREE if no simplification was possible, otherwise return the
2217 simplified form of the call as a tree. If IGNORED is true, it means that
2218 the caller does not use the returned value of the function. */
2221 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2223 gimple stmt
= gsi_stmt (*gsi
);
2224 tree dest
= gimple_call_arg (stmt
, 0);
2225 tree fmt
= gimple_call_arg (stmt
, 1);
2226 tree orig
= NULL_TREE
;
2227 const char *fmt_str
= NULL
;
2229 /* Verify the required arguments in the original call. We deal with two
2230 types of sprintf() calls: 'sprintf (str, fmt)' and
2231 'sprintf (dest, "%s", orig)'. */
2232 if (gimple_call_num_args (stmt
) > 3)
2235 if (gimple_call_num_args (stmt
) == 3)
2236 orig
= gimple_call_arg (stmt
, 2);
2238 /* Check whether the format is a literal string constant. */
2239 fmt_str
= c_getstr (fmt
);
2240 if (fmt_str
== NULL
)
2243 if (!init_target_chars ())
2246 /* If the format doesn't contain % args or %%, use strcpy. */
2247 if (strchr (fmt_str
, target_percent
) == NULL
)
2249 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2254 /* Don't optimize sprintf (buf, "abc", ptr++). */
2258 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2259 'format' is known to contain no % formats. */
2260 gimple_seq stmts
= NULL
;
2261 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2262 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2263 if (gimple_call_lhs (stmt
))
2265 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2266 build_int_cst (integer_type_node
,
2268 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2269 gsi_replace_with_seq_vops (gsi
, stmts
);
2270 /* gsi now points at the assignment to the lhs, get a
2271 stmt iterator to the memcpy call.
2272 ??? We can't use gsi_for_stmt as that doesn't work when the
2273 CFG isn't built yet. */
2274 gimple_stmt_iterator gsi2
= *gsi
;
2280 gsi_replace_with_seq_vops (gsi
, stmts
);
2286 /* If the format is "%s", use strcpy if the result isn't used. */
2287 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2290 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2295 /* Don't crash on sprintf (str1, "%s"). */
2299 tree orig_len
= NULL_TREE
;
2300 if (gimple_call_lhs (stmt
))
2302 orig_len
= get_maxval_strlen (orig
, 0);
2307 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2308 gimple_seq stmts
= NULL
;
2309 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2310 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2311 if (gimple_call_lhs (stmt
))
2313 if (!useless_type_conversion_p (integer_type_node
,
2314 TREE_TYPE (orig_len
)))
2315 orig_len
= fold_convert (integer_type_node
, orig_len
);
2316 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2317 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2318 gsi_replace_with_seq_vops (gsi
, stmts
);
2319 /* gsi now points at the assignment to the lhs, get a
2320 stmt iterator to the memcpy call.
2321 ??? We can't use gsi_for_stmt as that doesn't work when the
2322 CFG isn't built yet. */
2323 gimple_stmt_iterator gsi2
= *gsi
;
2329 gsi_replace_with_seq_vops (gsi
, stmts
);
2337 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2338 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2339 attempt to simplify calls with more than 4 arguments.
2341 Return NULL_TREE if no simplification was possible, otherwise return the
2342 simplified form of the call as a tree. If IGNORED is true, it means that
2343 the caller does not use the returned value of the function. */
2346 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2348 gimple stmt
= gsi_stmt (*gsi
);
2349 tree dest
= gimple_call_arg (stmt
, 0);
2350 tree destsize
= gimple_call_arg (stmt
, 1);
2351 tree fmt
= gimple_call_arg (stmt
, 2);
2352 tree orig
= NULL_TREE
;
2353 const char *fmt_str
= NULL
;
2355 if (gimple_call_num_args (stmt
) > 4)
2358 if (gimple_call_num_args (stmt
) == 4)
2359 orig
= gimple_call_arg (stmt
, 3);
2361 if (!tree_fits_uhwi_p (destsize
))
2363 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2365 /* Check whether the format is a literal string constant. */
2366 fmt_str
= c_getstr (fmt
);
2367 if (fmt_str
== NULL
)
2370 if (!init_target_chars ())
2373 /* If the format doesn't contain % args or %%, use strcpy. */
2374 if (strchr (fmt_str
, target_percent
) == NULL
)
2376 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2380 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2384 /* We could expand this as
2385 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2387 memcpy (str, fmt_with_nul_at_cstm1, cst);
2388 but in the former case that might increase code size
2389 and in the latter case grow .rodata section too much.
2391 size_t len
= strlen (fmt_str
);
2395 gimple_seq stmts
= NULL
;
2396 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2397 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2398 if (gimple_call_lhs (stmt
))
2400 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2401 build_int_cst (integer_type_node
, len
));
2402 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2403 gsi_replace_with_seq_vops (gsi
, stmts
);
2404 /* gsi now points at the assignment to the lhs, get a
2405 stmt iterator to the memcpy call.
2406 ??? We can't use gsi_for_stmt as that doesn't work when the
2407 CFG isn't built yet. */
2408 gimple_stmt_iterator gsi2
= *gsi
;
2414 gsi_replace_with_seq_vops (gsi
, stmts
);
2420 /* If the format is "%s", use strcpy if the result isn't used. */
2421 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2423 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2427 /* Don't crash on snprintf (str1, cst, "%s"). */
2431 tree orig_len
= get_maxval_strlen (orig
, 0);
2435 /* We could expand this as
2436 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2438 memcpy (str1, str2_with_nul_at_cstm1, cst);
2439 but in the former case that might increase code size
2440 and in the latter case grow .rodata section too much.
2442 if (compare_tree_int (orig_len
, destlen
) >= 0)
2445 /* Convert snprintf (str1, cst, "%s", str2) into
2446 strcpy (str1, str2) if strlen (str2) < cst. */
2447 gimple_seq stmts
= NULL
;
2448 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2449 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2450 if (gimple_call_lhs (stmt
))
2452 if (!useless_type_conversion_p (integer_type_node
,
2453 TREE_TYPE (orig_len
)))
2454 orig_len
= fold_convert (integer_type_node
, orig_len
);
2455 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2456 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2457 gsi_replace_with_seq_vops (gsi
, stmts
);
2458 /* gsi now points at the assignment to the lhs, get a
2459 stmt iterator to the memcpy call.
2460 ??? We can't use gsi_for_stmt as that doesn't work when the
2461 CFG isn't built yet. */
2462 gimple_stmt_iterator gsi2
= *gsi
;
2468 gsi_replace_with_seq_vops (gsi
, stmts
);
2477 /* Fold a call to __builtin_strlen with known length LEN. */
2480 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2482 gimple stmt
= gsi_stmt (*gsi
);
2483 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2486 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2487 replace_call_with_value (gsi
, len
);
2492 /* Fold the non-target builtin at *GSI and return whether any simplification
2496 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2498 gimple stmt
= gsi_stmt (*gsi
);
2499 tree callee
= gimple_call_fndecl (stmt
);
2501 /* Give up for always_inline inline builtins until they are
2503 if (avoid_folding_inline_builtin (callee
))
2506 switch (DECL_FUNCTION_CODE (callee
))
2508 case BUILT_IN_BZERO
:
2509 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2510 gimple_call_arg (stmt
, 1));
2511 case BUILT_IN_MEMSET
:
2512 return gimple_fold_builtin_memset (gsi
,
2513 gimple_call_arg (stmt
, 1),
2514 gimple_call_arg (stmt
, 2));
2515 case BUILT_IN_BCOPY
:
2516 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2517 gimple_call_arg (stmt
, 0), 3);
2518 case BUILT_IN_MEMCPY
:
2519 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2520 gimple_call_arg (stmt
, 1), 0);
2521 case BUILT_IN_MEMPCPY
:
2522 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2523 gimple_call_arg (stmt
, 1), 1);
2524 case BUILT_IN_MEMMOVE
:
2525 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2526 gimple_call_arg (stmt
, 1), 3);
2527 case BUILT_IN_SPRINTF_CHK
:
2528 case BUILT_IN_VSPRINTF_CHK
:
2529 return gimple_fold_builtin_sprintf_chk (gsi
, DECL_FUNCTION_CODE (callee
));
2530 case BUILT_IN_STRCAT_CHK
:
2531 return gimple_fold_builtin_strcat_chk (gsi
);
2532 case BUILT_IN_STRNCAT_CHK
:
2533 return gimple_fold_builtin_strncat_chk (gsi
);
2534 case BUILT_IN_STRLEN
:
2535 return gimple_fold_builtin_strlen (gsi
);
2536 case BUILT_IN_STRCPY
:
2537 return gimple_fold_builtin_strcpy (gsi
,
2538 gimple_call_arg (stmt
, 0),
2539 gimple_call_arg (stmt
, 1));
2540 case BUILT_IN_STRNCPY
:
2541 return gimple_fold_builtin_strncpy (gsi
,
2542 gimple_call_arg (stmt
, 0),
2543 gimple_call_arg (stmt
, 1),
2544 gimple_call_arg (stmt
, 2));
2545 case BUILT_IN_STRCAT
:
2546 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2547 gimple_call_arg (stmt
, 1));
2548 case BUILT_IN_FPUTS
:
2549 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2550 gimple_call_arg (stmt
, 1), false);
2551 case BUILT_IN_FPUTS_UNLOCKED
:
2552 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2553 gimple_call_arg (stmt
, 1), true);
2554 case BUILT_IN_MEMCPY_CHK
:
2555 case BUILT_IN_MEMPCPY_CHK
:
2556 case BUILT_IN_MEMMOVE_CHK
:
2557 case BUILT_IN_MEMSET_CHK
:
2558 return gimple_fold_builtin_memory_chk (gsi
,
2559 gimple_call_arg (stmt
, 0),
2560 gimple_call_arg (stmt
, 1),
2561 gimple_call_arg (stmt
, 2),
2562 gimple_call_arg (stmt
, 3),
2563 DECL_FUNCTION_CODE (callee
));
2564 case BUILT_IN_STRCPY_CHK
:
2565 case BUILT_IN_STPCPY_CHK
:
2566 return gimple_fold_builtin_stxcpy_chk (gsi
,
2567 gimple_call_arg (stmt
, 0),
2568 gimple_call_arg (stmt
, 1),
2569 gimple_call_arg (stmt
, 2),
2570 DECL_FUNCTION_CODE (callee
));
2571 case BUILT_IN_STRNCPY_CHK
:
2572 case BUILT_IN_STPNCPY_CHK
:
2573 return gimple_fold_builtin_stxncpy_chk (gsi
,
2574 gimple_call_arg (stmt
, 0),
2575 gimple_call_arg (stmt
, 1),
2576 gimple_call_arg (stmt
, 2),
2577 gimple_call_arg (stmt
, 3),
2578 DECL_FUNCTION_CODE (callee
));
2579 case BUILT_IN_SNPRINTF_CHK
:
2580 case BUILT_IN_VSNPRINTF_CHK
:
2581 return gimple_fold_builtin_snprintf_chk (gsi
,
2582 DECL_FUNCTION_CODE (callee
));
2583 case BUILT_IN_SNPRINTF
:
2584 return gimple_fold_builtin_snprintf (gsi
);
2585 case BUILT_IN_SPRINTF
:
2586 return gimple_fold_builtin_sprintf (gsi
);
2590 /* Try the generic builtin folder. */
2591 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2592 tree result
= fold_call_stmt (stmt
, ignore
);
2596 STRIP_NOPS (result
);
2598 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2599 if (!update_call_from_tree (gsi
, result
))
2600 gimplify_and_update_call_from_tree (gsi
, result
);
2607 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2608 The statement may be replaced by another statement, e.g., if the call
2609 simplifies to a constant value. Return true if any changes were made.
2610 It is assumed that the operands have been previously folded. */
2613 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2615 gimple stmt
= gsi_stmt (*gsi
);
2617 bool changed
= false;
2620 /* Fold *& in call arguments. */
2621 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2622 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2624 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2627 gimple_call_set_arg (stmt
, i
, tmp
);
2632 /* Check for virtual calls that became direct calls. */
2633 callee
= gimple_call_fn (stmt
);
2634 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2636 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2638 if (dump_file
&& virtual_method_call_p (callee
)
2639 && !possible_polymorphic_call_target_p
2640 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2641 (OBJ_TYPE_REF_EXPR (callee
)))))
2644 "Type inheritance inconsistent devirtualization of ");
2645 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2646 fprintf (dump_file
, " to ");
2647 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2648 fprintf (dump_file
, "\n");
2651 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2654 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2657 vec
<cgraph_node
*>targets
2658 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2659 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2661 tree lhs
= gimple_call_lhs (stmt
);
2662 if (dump_enabled_p ())
2664 location_t loc
= gimple_location_safe (stmt
);
2665 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2666 "folding virtual function call to %s\n",
2667 targets
.length () == 1
2668 ? targets
[0]->name ()
2669 : "__builtin_unreachable");
2671 if (targets
.length () == 1)
2673 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
2675 /* If the call becomes noreturn, remove the lhs. */
2676 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
2678 if (TREE_CODE (lhs
) == SSA_NAME
)
2680 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2681 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2682 gimple new_stmt
= gimple_build_assign (lhs
, def
);
2683 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2685 gimple_call_set_lhs (stmt
, NULL_TREE
);
2690 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2691 gimple new_stmt
= gimple_build_call (fndecl
, 0);
2692 gimple_set_location (new_stmt
, gimple_location (stmt
));
2693 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2695 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2696 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2698 /* To satisfy condition for
2699 cgraph_update_edges_for_call_stmt_node,
2700 we need to preserve GIMPLE_CALL statement
2701 at position of GSI iterator. */
2702 update_call_from_tree (gsi
, def
);
2703 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
2707 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
2708 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
2709 gsi_replace (gsi
, new_stmt
, false);
2720 /* Check for builtins that CCP can handle using information not
2721 available in the generic fold routines. */
2722 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
2724 if (gimple_fold_builtin (gsi
))
2727 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
2729 changed
|= targetm
.gimple_fold_builtin (gsi
);
2731 else if (gimple_call_internal_p (stmt
))
2733 enum tree_code subcode
= ERROR_MARK
;
2734 tree result
= NULL_TREE
;
2735 switch (gimple_call_internal_fn (stmt
))
2737 case IFN_BUILTIN_EXPECT
:
2738 result
= fold_builtin_expect (gimple_location (stmt
),
2739 gimple_call_arg (stmt
, 0),
2740 gimple_call_arg (stmt
, 1),
2741 gimple_call_arg (stmt
, 2));
2743 case IFN_UBSAN_OBJECT_SIZE
:
2744 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
2745 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
2746 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
2747 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
2748 gimple_call_arg (stmt
, 2))))
2750 gsi_replace (gsi
, gimple_build_nop (), true);
2751 unlink_stmt_vdef (stmt
);
2752 release_defs (stmt
);
2756 case IFN_UBSAN_CHECK_ADD
:
2757 subcode
= PLUS_EXPR
;
2759 case IFN_UBSAN_CHECK_SUB
:
2760 subcode
= MINUS_EXPR
;
2762 case IFN_UBSAN_CHECK_MUL
:
2763 subcode
= MULT_EXPR
;
2768 if (subcode
!= ERROR_MARK
)
2770 tree arg0
= gimple_call_arg (stmt
, 0);
2771 tree arg1
= gimple_call_arg (stmt
, 1);
2772 /* x = y + 0; x = y - 0; x = y * 0; */
2773 if (integer_zerop (arg1
))
2774 result
= subcode
== MULT_EXPR
2775 ? build_zero_cst (TREE_TYPE (arg0
))
2777 /* x = 0 + y; x = 0 * y; */
2778 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
2779 result
= subcode
== MULT_EXPR
2780 ? build_zero_cst (TREE_TYPE (arg0
))
2783 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
2784 result
= build_zero_cst (TREE_TYPE (arg0
));
2785 /* x = y * 1; x = 1 * y; */
2786 else if (subcode
== MULT_EXPR
)
2788 if (integer_onep (arg1
))
2790 else if (integer_onep (arg0
))
2796 if (!update_call_from_tree (gsi
, result
))
2797 gimplify_and_update_call_from_tree (gsi
, result
);
2806 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2809 Replaces *GSI with the simplification result in RCODE and OPS
2810 and the associated statements in *SEQ. Does the replacement
2811 according to INPLACE and returns true if the operation succeeded. */
2814 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
2815 code_helper rcode
, tree
*ops
,
2816 gimple_seq
*seq
, bool inplace
)
2818 gimple stmt
= gsi_stmt (*gsi
);
2820 /* Play safe and do not allow abnormals to be mentioned in
2821 newly created statements. See also maybe_push_res_to_seq. */
2822 if ((TREE_CODE (ops
[0]) == SSA_NAME
2823 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
2825 && TREE_CODE (ops
[1]) == SSA_NAME
2826 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
2828 && TREE_CODE (ops
[2]) == SSA_NAME
2829 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
2832 if (gimple_code (stmt
) == GIMPLE_COND
)
2834 gcc_assert (rcode
.is_tree_code ());
2835 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
2836 /* GIMPLE_CONDs condition may not throw. */
2837 && (!flag_exceptions
2838 || !cfun
->can_throw_non_call_exceptions
2839 || !operation_could_trap_p (rcode
,
2840 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
2842 gimple_cond_set_condition (stmt
, rcode
, ops
[0], ops
[1]);
2843 else if (rcode
== SSA_NAME
)
2844 gimple_cond_set_condition (stmt
, NE_EXPR
, ops
[0],
2845 build_zero_cst (TREE_TYPE (ops
[0])));
2846 else if (rcode
== INTEGER_CST
)
2848 if (integer_zerop (ops
[0]))
2849 gimple_cond_make_false (stmt
);
2851 gimple_cond_make_true (stmt
);
2855 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
2859 gimple_cond_set_condition (stmt
, NE_EXPR
, res
,
2860 build_zero_cst (TREE_TYPE (res
)));
2864 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2866 fprintf (dump_file
, "gimple_simplified to ");
2867 if (!gimple_seq_empty_p (*seq
))
2868 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2869 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
2872 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
2875 else if (is_gimple_assign (stmt
)
2876 && rcode
.is_tree_code ())
2879 || gimple_num_ops (stmt
) <= get_gimple_rhs_num_ops (rcode
))
2881 maybe_build_generic_op (rcode
,
2882 TREE_TYPE (gimple_assign_lhs (stmt
)),
2883 &ops
[0], ops
[1], ops
[2]);
2884 gimple_assign_set_rhs_with_ops_1 (gsi
, rcode
,
2885 ops
[0], ops
[1], ops
[2]);
2886 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2888 fprintf (dump_file
, "gimple_simplified to ");
2889 if (!gimple_seq_empty_p (*seq
))
2890 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2891 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
2894 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
2900 if (gimple_has_lhs (stmt
))
2902 tree lhs
= gimple_get_lhs (stmt
);
2903 maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
2905 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2907 fprintf (dump_file
, "gimple_simplified to ");
2908 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2910 gsi_replace_with_seq_vops (gsi
, *seq
);
2920 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2923 maybe_canonicalize_mem_ref_addr (tree
*t
)
2927 if (TREE_CODE (*t
) == ADDR_EXPR
)
2928 t
= &TREE_OPERAND (*t
, 0);
2930 while (handled_component_p (*t
))
2931 t
= &TREE_OPERAND (*t
, 0);
2933 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2934 of invariant addresses into a SSA name MEM_REF address. */
2935 if (TREE_CODE (*t
) == MEM_REF
2936 || TREE_CODE (*t
) == TARGET_MEM_REF
)
2938 tree addr
= TREE_OPERAND (*t
, 0);
2939 if (TREE_CODE (addr
) == ADDR_EXPR
2940 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
2941 || handled_component_p (TREE_OPERAND (addr
, 0))))
2944 HOST_WIDE_INT coffset
;
2945 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
2950 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
2951 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
2952 TREE_OPERAND (*t
, 1),
2953 size_int (coffset
));
2956 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
2957 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
2960 /* Canonicalize back MEM_REFs to plain reference trees if the object
2961 accessed is a decl that has the same access semantics as the MEM_REF. */
2962 if (TREE_CODE (*t
) == MEM_REF
2963 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
2964 && integer_zerop (TREE_OPERAND (*t
, 1)))
2966 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2967 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
2968 if (/* Same volatile qualification. */
2969 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
2970 /* Same TBAA behavior with -fstrict-aliasing. */
2971 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
2972 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
2973 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
2974 /* Same alignment. */
2975 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
2976 /* We have to look out here to not drop a required conversion
2977 from the rhs to the lhs if *t appears on the lhs or vice-versa
2978 if it appears on the rhs. Thus require strict type
2980 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
2982 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2987 /* Canonicalize TARGET_MEM_REF in particular with respect to
2988 the indexes becoming constant. */
2989 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
2991 tree tem
= maybe_fold_tmr (*t
);
3002 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3003 distinguishes both cases. */
3006 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3008 bool changed
= false;
3009 gimple stmt
= gsi_stmt (*gsi
);
3012 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3014 ??? This shouldn't be done in generic folding but in the
3015 propagation helpers which also know whether an address was
3017 switch (gimple_code (stmt
))
3020 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3022 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3023 if ((REFERENCE_CLASS_P (*rhs
)
3024 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3025 && maybe_canonicalize_mem_ref_addr (rhs
))
3027 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3028 if (REFERENCE_CLASS_P (*lhs
)
3029 && maybe_canonicalize_mem_ref_addr (lhs
))
3035 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3037 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3038 if (REFERENCE_CLASS_P (*arg
)
3039 && maybe_canonicalize_mem_ref_addr (arg
))
3042 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3044 && REFERENCE_CLASS_P (*lhs
)
3045 && maybe_canonicalize_mem_ref_addr (lhs
))
3051 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3053 tree link
= gimple_asm_output_op (stmt
, i
);
3054 tree op
= TREE_VALUE (link
);
3055 if (REFERENCE_CLASS_P (op
)
3056 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3059 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3061 tree link
= gimple_asm_input_op (stmt
, i
);
3062 tree op
= TREE_VALUE (link
);
3063 if ((REFERENCE_CLASS_P (op
)
3064 || TREE_CODE (op
) == ADDR_EXPR
)
3065 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3071 if (gimple_debug_bind_p (stmt
))
3073 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3075 && (REFERENCE_CLASS_P (*val
)
3076 || TREE_CODE (*val
) == ADDR_EXPR
)
3077 && maybe_canonicalize_mem_ref_addr (val
))
3084 /* Dispatch to pattern-based folding. */
3086 || is_gimple_assign (stmt
)
3087 || gimple_code (stmt
) == GIMPLE_COND
)
3089 gimple_seq seq
= NULL
;
3092 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
, valueize
))
3094 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3097 gimple_seq_discard (seq
);
3101 stmt
= gsi_stmt (*gsi
);
3103 /* Fold the main computation performed by the statement. */
3104 switch (gimple_code (stmt
))
3108 unsigned old_num_ops
= gimple_num_ops (stmt
);
3109 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3110 tree lhs
= gimple_assign_lhs (stmt
);
3112 /* First canonicalize operand order. This avoids building new
3113 trees if this is the only thing fold would later do. */
3114 if ((commutative_tree_code (subcode
)
3115 || commutative_ternary_tree_code (subcode
))
3116 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3117 gimple_assign_rhs2 (stmt
), false))
3119 tree tem
= gimple_assign_rhs1 (stmt
);
3120 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3121 gimple_assign_set_rhs2 (stmt
, tem
);
3124 new_rhs
= fold_gimple_assign (gsi
);
3126 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3127 TREE_TYPE (new_rhs
)))
3128 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3131 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3133 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3140 changed
|= fold_gimple_cond (stmt
);
3144 changed
|= gimple_fold_call (gsi
, inplace
);
3148 /* Fold *& in asm operands. */
3151 const char **oconstraints
;
3152 const char *constraint
;
3153 bool allows_mem
, allows_reg
;
3155 noutputs
= gimple_asm_noutputs (stmt
);
3156 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3158 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3160 tree link
= gimple_asm_output_op (stmt
, i
);
3161 tree op
= TREE_VALUE (link
);
3163 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3164 if (REFERENCE_CLASS_P (op
)
3165 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3167 TREE_VALUE (link
) = op
;
3171 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3173 tree link
= gimple_asm_input_op (stmt
, i
);
3174 tree op
= TREE_VALUE (link
);
3176 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3177 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3178 oconstraints
, &allows_mem
, &allows_reg
);
3179 if (REFERENCE_CLASS_P (op
)
3180 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3183 TREE_VALUE (link
) = op
;
3191 if (gimple_debug_bind_p (stmt
))
3193 tree val
= gimple_debug_bind_get_value (stmt
);
3195 && REFERENCE_CLASS_P (val
))
3197 tree tem
= maybe_fold_reference (val
, false);
3200 gimple_debug_bind_set_value (stmt
, tem
);
3205 && TREE_CODE (val
) == ADDR_EXPR
)
3207 tree ref
= TREE_OPERAND (val
, 0);
3208 tree tem
= maybe_fold_reference (ref
, false);
3211 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3212 gimple_debug_bind_set_value (stmt
, tem
);
3222 stmt
= gsi_stmt (*gsi
);
3224 /* Fold *& on the lhs. */
3225 if (gimple_has_lhs (stmt
))
3227 tree lhs
= gimple_get_lhs (stmt
);
3228 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3230 tree new_lhs
= maybe_fold_reference (lhs
, true);
3233 gimple_set_lhs (stmt
, new_lhs
);
3242 /* Valueziation callback that ends up not following SSA edges. */
3245 no_follow_ssa_edges (tree
)
3250 /* Valueization callback that ends up following single-use SSA edges only. */
3253 follow_single_use_edges (tree val
)
3255 if (TREE_CODE (val
) == SSA_NAME
3256 && !has_single_use (val
))
3261 /* Fold the statement pointed to by GSI. In some cases, this function may
3262 replace the whole statement with a new one. Returns true iff folding
3264 The statement pointed to by GSI should be in valid gimple form but may
3265 be in unfolded state as resulting from for example constant propagation
3266 which can produce *&x = 0. */
3269 fold_stmt (gimple_stmt_iterator
*gsi
)
3271 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3275 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3277 return fold_stmt_1 (gsi
, false, valueize
);
3280 /* Perform the minimal folding on statement *GSI. Only operations like
3281 *&x created by constant propagation are handled. The statement cannot
3282 be replaced with a new one. Return true if the statement was
3283 changed, false otherwise.
3284 The statement *GSI should be in valid gimple form but may
3285 be in unfolded state as resulting from for example constant propagation
3286 which can produce *&x = 0. */
3289 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3291 gimple stmt
= gsi_stmt (*gsi
);
3292 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3293 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3297 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3298 if EXPR is null or we don't know how.
3299 If non-null, the result always has boolean type. */
3302 canonicalize_bool (tree expr
, bool invert
)
3308 if (integer_nonzerop (expr
))
3309 return boolean_false_node
;
3310 else if (integer_zerop (expr
))
3311 return boolean_true_node
;
3312 else if (TREE_CODE (expr
) == SSA_NAME
)
3313 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3314 build_int_cst (TREE_TYPE (expr
), 0));
3315 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3316 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3318 TREE_OPERAND (expr
, 0),
3319 TREE_OPERAND (expr
, 1));
3325 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3327 if (integer_nonzerop (expr
))
3328 return boolean_true_node
;
3329 else if (integer_zerop (expr
))
3330 return boolean_false_node
;
3331 else if (TREE_CODE (expr
) == SSA_NAME
)
3332 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3333 build_int_cst (TREE_TYPE (expr
), 0));
3334 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3335 return fold_build2 (TREE_CODE (expr
),
3337 TREE_OPERAND (expr
, 0),
3338 TREE_OPERAND (expr
, 1));
3344 /* Check to see if a boolean expression EXPR is logically equivalent to the
3345 comparison (OP1 CODE OP2). Check for various identities involving
3349 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3350 const_tree op1
, const_tree op2
)
3354 /* The obvious case. */
3355 if (TREE_CODE (expr
) == code
3356 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3357 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3360 /* Check for comparing (name, name != 0) and the case where expr
3361 is an SSA_NAME with a definition matching the comparison. */
3362 if (TREE_CODE (expr
) == SSA_NAME
3363 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3365 if (operand_equal_p (expr
, op1
, 0))
3366 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3367 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3368 s
= SSA_NAME_DEF_STMT (expr
);
3369 if (is_gimple_assign (s
)
3370 && gimple_assign_rhs_code (s
) == code
3371 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3372 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3376 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3377 of name is a comparison, recurse. */
3378 if (TREE_CODE (op1
) == SSA_NAME
3379 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3381 s
= SSA_NAME_DEF_STMT (op1
);
3382 if (is_gimple_assign (s
)
3383 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3385 enum tree_code c
= gimple_assign_rhs_code (s
);
3386 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3387 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3388 return same_bool_comparison_p (expr
, c
,
3389 gimple_assign_rhs1 (s
),
3390 gimple_assign_rhs2 (s
));
3391 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3392 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3393 return same_bool_comparison_p (expr
,
3394 invert_tree_comparison (c
, false),
3395 gimple_assign_rhs1 (s
),
3396 gimple_assign_rhs2 (s
));
3402 /* Check to see if two boolean expressions OP1 and OP2 are logically
3406 same_bool_result_p (const_tree op1
, const_tree op2
)
3408 /* Simple cases first. */
3409 if (operand_equal_p (op1
, op2
, 0))
3412 /* Check the cases where at least one of the operands is a comparison.
3413 These are a bit smarter than operand_equal_p in that they apply some
3414 identifies on SSA_NAMEs. */
3415 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3416 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3417 TREE_OPERAND (op2
, 0),
3418 TREE_OPERAND (op2
, 1)))
3420 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3421 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3422 TREE_OPERAND (op1
, 0),
3423 TREE_OPERAND (op1
, 1)))
3430 /* Forward declarations for some mutually recursive functions. */
3433 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3434 enum tree_code code2
, tree op2a
, tree op2b
);
3436 and_var_with_comparison (tree var
, bool invert
,
3437 enum tree_code code2
, tree op2a
, tree op2b
);
3439 and_var_with_comparison_1 (gimple stmt
,
3440 enum tree_code code2
, tree op2a
, tree op2b
);
3442 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3443 enum tree_code code2
, tree op2a
, tree op2b
);
3445 or_var_with_comparison (tree var
, bool invert
,
3446 enum tree_code code2
, tree op2a
, tree op2b
);
3448 or_var_with_comparison_1 (gimple stmt
,
3449 enum tree_code code2
, tree op2a
, tree op2b
);
3451 /* Helper function for and_comparisons_1: try to simplify the AND of the
3452 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3453 If INVERT is true, invert the value of the VAR before doing the AND.
3454 Return NULL_EXPR if we can't simplify this to a single expression. */
3457 and_var_with_comparison (tree var
, bool invert
,
3458 enum tree_code code2
, tree op2a
, tree op2b
)
3461 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3463 /* We can only deal with variables whose definitions are assignments. */
3464 if (!is_gimple_assign (stmt
))
3467 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3468 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3469 Then we only have to consider the simpler non-inverted cases. */
3471 t
= or_var_with_comparison_1 (stmt
,
3472 invert_tree_comparison (code2
, false),
3475 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3476 return canonicalize_bool (t
, invert
);
3479 /* Try to simplify the AND of the ssa variable defined by the assignment
3480 STMT with the comparison specified by (OP2A CODE2 OP2B).
3481 Return NULL_EXPR if we can't simplify this to a single expression. */
3484 and_var_with_comparison_1 (gimple stmt
,
3485 enum tree_code code2
, tree op2a
, tree op2b
)
3487 tree var
= gimple_assign_lhs (stmt
);
3488 tree true_test_var
= NULL_TREE
;
3489 tree false_test_var
= NULL_TREE
;
3490 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3492 /* Check for identities like (var AND (var == 0)) => false. */
3493 if (TREE_CODE (op2a
) == SSA_NAME
3494 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3496 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3497 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3499 true_test_var
= op2a
;
3500 if (var
== true_test_var
)
3503 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3504 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3506 false_test_var
= op2a
;
3507 if (var
== false_test_var
)
3508 return boolean_false_node
;
3512 /* If the definition is a comparison, recurse on it. */
3513 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3515 tree t
= and_comparisons_1 (innercode
,
3516 gimple_assign_rhs1 (stmt
),
3517 gimple_assign_rhs2 (stmt
),
3525 /* If the definition is an AND or OR expression, we may be able to
3526 simplify by reassociating. */
3527 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3528 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3530 tree inner1
= gimple_assign_rhs1 (stmt
);
3531 tree inner2
= gimple_assign_rhs2 (stmt
);
3534 tree partial
= NULL_TREE
;
3535 bool is_and
= (innercode
== BIT_AND_EXPR
);
3537 /* Check for boolean identities that don't require recursive examination
3539 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3540 inner1 AND (inner1 OR inner2) => inner1
3541 !inner1 AND (inner1 AND inner2) => false
3542 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3543 Likewise for similar cases involving inner2. */
3544 if (inner1
== true_test_var
)
3545 return (is_and
? var
: inner1
);
3546 else if (inner2
== true_test_var
)
3547 return (is_and
? var
: inner2
);
3548 else if (inner1
== false_test_var
)
3550 ? boolean_false_node
3551 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3552 else if (inner2
== false_test_var
)
3554 ? boolean_false_node
3555 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3557 /* Next, redistribute/reassociate the AND across the inner tests.
3558 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3559 if (TREE_CODE (inner1
) == SSA_NAME
3560 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3561 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3562 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3563 gimple_assign_rhs1 (s
),
3564 gimple_assign_rhs2 (s
),
3565 code2
, op2a
, op2b
)))
3567 /* Handle the AND case, where we are reassociating:
3568 (inner1 AND inner2) AND (op2a code2 op2b)
3570 If the partial result t is a constant, we win. Otherwise
3571 continue on to try reassociating with the other inner test. */
3574 if (integer_onep (t
))
3576 else if (integer_zerop (t
))
3577 return boolean_false_node
;
3580 /* Handle the OR case, where we are redistributing:
3581 (inner1 OR inner2) AND (op2a code2 op2b)
3582 => (t OR (inner2 AND (op2a code2 op2b))) */
3583 else if (integer_onep (t
))
3584 return boolean_true_node
;
3586 /* Save partial result for later. */
3590 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3591 if (TREE_CODE (inner2
) == SSA_NAME
3592 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3593 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3594 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3595 gimple_assign_rhs1 (s
),
3596 gimple_assign_rhs2 (s
),
3597 code2
, op2a
, op2b
)))
3599 /* Handle the AND case, where we are reassociating:
3600 (inner1 AND inner2) AND (op2a code2 op2b)
3601 => (inner1 AND t) */
3604 if (integer_onep (t
))
3606 else if (integer_zerop (t
))
3607 return boolean_false_node
;
3608 /* If both are the same, we can apply the identity
3610 else if (partial
&& same_bool_result_p (t
, partial
))
3614 /* Handle the OR case. where we are redistributing:
3615 (inner1 OR inner2) AND (op2a code2 op2b)
3616 => (t OR (inner1 AND (op2a code2 op2b)))
3617 => (t OR partial) */
3620 if (integer_onep (t
))
3621 return boolean_true_node
;
3624 /* We already got a simplification for the other
3625 operand to the redistributed OR expression. The
3626 interesting case is when at least one is false.
3627 Or, if both are the same, we can apply the identity
3629 if (integer_zerop (partial
))
3631 else if (integer_zerop (t
))
3633 else if (same_bool_result_p (t
, partial
))
3642 /* Try to simplify the AND of two comparisons defined by
3643 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3644 If this can be done without constructing an intermediate value,
3645 return the resulting tree; otherwise NULL_TREE is returned.
3646 This function is deliberately asymmetric as it recurses on SSA_DEFs
3647 in the first comparison but not the second. */
3650 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3651 enum tree_code code2
, tree op2a
, tree op2b
)
3653 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3655 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3656 if (operand_equal_p (op1a
, op2a
, 0)
3657 && operand_equal_p (op1b
, op2b
, 0))
3659 /* Result will be either NULL_TREE, or a combined comparison. */
3660 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3661 TRUTH_ANDIF_EXPR
, code1
, code2
,
3662 truth_type
, op1a
, op1b
);
3667 /* Likewise the swapped case of the above. */
3668 if (operand_equal_p (op1a
, op2b
, 0)
3669 && operand_equal_p (op1b
, op2a
, 0))
3671 /* Result will be either NULL_TREE, or a combined comparison. */
3672 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3673 TRUTH_ANDIF_EXPR
, code1
,
3674 swap_tree_comparison (code2
),
3675 truth_type
, op1a
, op1b
);
3680 /* If both comparisons are of the same value against constants, we might
3681 be able to merge them. */
3682 if (operand_equal_p (op1a
, op2a
, 0)
3683 && TREE_CODE (op1b
) == INTEGER_CST
3684 && TREE_CODE (op2b
) == INTEGER_CST
)
3686 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3688 /* If we have (op1a == op1b), we should either be able to
3689 return that or FALSE, depending on whether the constant op1b
3690 also satisfies the other comparison against op2b. */
3691 if (code1
== EQ_EXPR
)
3697 case EQ_EXPR
: val
= (cmp
== 0); break;
3698 case NE_EXPR
: val
= (cmp
!= 0); break;
3699 case LT_EXPR
: val
= (cmp
< 0); break;
3700 case GT_EXPR
: val
= (cmp
> 0); break;
3701 case LE_EXPR
: val
= (cmp
<= 0); break;
3702 case GE_EXPR
: val
= (cmp
>= 0); break;
3703 default: done
= false;
3708 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3710 return boolean_false_node
;
3713 /* Likewise if the second comparison is an == comparison. */
3714 else if (code2
== EQ_EXPR
)
3720 case EQ_EXPR
: val
= (cmp
== 0); break;
3721 case NE_EXPR
: val
= (cmp
!= 0); break;
3722 case LT_EXPR
: val
= (cmp
> 0); break;
3723 case GT_EXPR
: val
= (cmp
< 0); break;
3724 case LE_EXPR
: val
= (cmp
>= 0); break;
3725 case GE_EXPR
: val
= (cmp
<= 0); break;
3726 default: done
= false;
3731 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3733 return boolean_false_node
;
3737 /* Same business with inequality tests. */
3738 else if (code1
== NE_EXPR
)
3743 case EQ_EXPR
: val
= (cmp
!= 0); break;
3744 case NE_EXPR
: val
= (cmp
== 0); break;
3745 case LT_EXPR
: val
= (cmp
>= 0); break;
3746 case GT_EXPR
: val
= (cmp
<= 0); break;
3747 case LE_EXPR
: val
= (cmp
> 0); break;
3748 case GE_EXPR
: val
= (cmp
< 0); break;
3753 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3755 else if (code2
== NE_EXPR
)
3760 case EQ_EXPR
: val
= (cmp
== 0); break;
3761 case NE_EXPR
: val
= (cmp
!= 0); break;
3762 case LT_EXPR
: val
= (cmp
<= 0); break;
3763 case GT_EXPR
: val
= (cmp
>= 0); break;
3764 case LE_EXPR
: val
= (cmp
< 0); break;
3765 case GE_EXPR
: val
= (cmp
> 0); break;
3770 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3773 /* Chose the more restrictive of two < or <= comparisons. */
3774 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3775 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3777 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3778 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3780 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3783 /* Likewise chose the more restrictive of two > or >= comparisons. */
3784 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3785 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3787 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
3788 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3790 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3793 /* Check for singleton ranges. */
3795 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
3796 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
3797 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
3799 /* Check for disjoint ranges. */
3801 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
3802 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3803 return boolean_false_node
;
3805 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
3806 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3807 return boolean_false_node
;
3810 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3811 NAME's definition is a truth value. See if there are any simplifications
3812 that can be done against the NAME's definition. */
3813 if (TREE_CODE (op1a
) == SSA_NAME
3814 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
3815 && (integer_zerop (op1b
) || integer_onep (op1b
)))
3817 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
3818 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
3819 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
3820 switch (gimple_code (stmt
))
3823 /* Try to simplify by copy-propagating the definition. */
3824 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
3827 /* If every argument to the PHI produces the same result when
3828 ANDed with the second comparison, we win.
3829 Do not do this unless the type is bool since we need a bool
3830 result here anyway. */
3831 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
3833 tree result
= NULL_TREE
;
3835 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
3837 tree arg
= gimple_phi_arg_def (stmt
, i
);
3839 /* If this PHI has itself as an argument, ignore it.
3840 If all the other args produce the same result,
3842 if (arg
== gimple_phi_result (stmt
))
3844 else if (TREE_CODE (arg
) == INTEGER_CST
)
3846 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
3849 result
= boolean_false_node
;
3850 else if (!integer_zerop (result
))
3854 result
= fold_build2 (code2
, boolean_type_node
,
3856 else if (!same_bool_comparison_p (result
,
3860 else if (TREE_CODE (arg
) == SSA_NAME
3861 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
3864 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
3865 /* In simple cases we can look through PHI nodes,
3866 but we have to be careful with loops.
3868 if (! dom_info_available_p (CDI_DOMINATORS
)
3869 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
3870 || dominated_by_p (CDI_DOMINATORS
,
3871 gimple_bb (def_stmt
),
3874 temp
= and_var_with_comparison (arg
, invert
, code2
,
3880 else if (!same_bool_result_p (result
, temp
))
3896 /* Try to simplify the AND of two comparisons, specified by
3897 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3898 If this can be simplified to a single expression (without requiring
3899 introducing more SSA variables to hold intermediate values),
3900 return the resulting tree. Otherwise return NULL_TREE.
3901 If the result expression is non-null, it has boolean type. */
3904 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
3905 enum tree_code code2
, tree op2a
, tree op2b
)
3907 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
3911 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
3914 /* Helper function for or_comparisons_1: try to simplify the OR of the
3915 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3916 If INVERT is true, invert the value of VAR before doing the OR.
3917 Return NULL_EXPR if we can't simplify this to a single expression. */
3920 or_var_with_comparison (tree var
, bool invert
,
3921 enum tree_code code2
, tree op2a
, tree op2b
)
3924 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3926 /* We can only deal with variables whose definitions are assignments. */
3927 if (!is_gimple_assign (stmt
))
3930 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3931 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3932 Then we only have to consider the simpler non-inverted cases. */
3934 t
= and_var_with_comparison_1 (stmt
,
3935 invert_tree_comparison (code2
, false),
3938 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3939 return canonicalize_bool (t
, invert
);
3942 /* Try to simplify the OR of the ssa variable defined by the assignment
3943 STMT with the comparison specified by (OP2A CODE2 OP2B).
3944 Return NULL_EXPR if we can't simplify this to a single expression. */
3947 or_var_with_comparison_1 (gimple stmt
,
3948 enum tree_code code2
, tree op2a
, tree op2b
)
3950 tree var
= gimple_assign_lhs (stmt
);
3951 tree true_test_var
= NULL_TREE
;
3952 tree false_test_var
= NULL_TREE
;
3953 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3955 /* Check for identities like (var OR (var != 0)) => true . */
3956 if (TREE_CODE (op2a
) == SSA_NAME
3957 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3959 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3960 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3962 true_test_var
= op2a
;
3963 if (var
== true_test_var
)
3966 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3967 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3969 false_test_var
= op2a
;
3970 if (var
== false_test_var
)
3971 return boolean_true_node
;
3975 /* If the definition is a comparison, recurse on it. */
3976 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3978 tree t
= or_comparisons_1 (innercode
,
3979 gimple_assign_rhs1 (stmt
),
3980 gimple_assign_rhs2 (stmt
),
3988 /* If the definition is an AND or OR expression, we may be able to
3989 simplify by reassociating. */
3990 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3991 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3993 tree inner1
= gimple_assign_rhs1 (stmt
);
3994 tree inner2
= gimple_assign_rhs2 (stmt
);
3997 tree partial
= NULL_TREE
;
3998 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4000 /* Check for boolean identities that don't require recursive examination
4002 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4003 inner1 OR (inner1 AND inner2) => inner1
4004 !inner1 OR (inner1 OR inner2) => true
4005 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4007 if (inner1
== true_test_var
)
4008 return (is_or
? var
: inner1
);
4009 else if (inner2
== true_test_var
)
4010 return (is_or
? var
: inner2
);
4011 else if (inner1
== false_test_var
)
4014 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4015 else if (inner2
== false_test_var
)
4018 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4020 /* Next, redistribute/reassociate the OR across the inner tests.
4021 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4022 if (TREE_CODE (inner1
) == SSA_NAME
4023 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4024 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4025 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4026 gimple_assign_rhs1 (s
),
4027 gimple_assign_rhs2 (s
),
4028 code2
, op2a
, op2b
)))
4030 /* Handle the OR case, where we are reassociating:
4031 (inner1 OR inner2) OR (op2a code2 op2b)
4033 If the partial result t is a constant, we win. Otherwise
4034 continue on to try reassociating with the other inner test. */
4037 if (integer_onep (t
))
4038 return boolean_true_node
;
4039 else if (integer_zerop (t
))
4043 /* Handle the AND case, where we are redistributing:
4044 (inner1 AND inner2) OR (op2a code2 op2b)
4045 => (t AND (inner2 OR (op2a code op2b))) */
4046 else if (integer_zerop (t
))
4047 return boolean_false_node
;
4049 /* Save partial result for later. */
4053 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4054 if (TREE_CODE (inner2
) == SSA_NAME
4055 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4056 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4057 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4058 gimple_assign_rhs1 (s
),
4059 gimple_assign_rhs2 (s
),
4060 code2
, op2a
, op2b
)))
4062 /* Handle the OR case, where we are reassociating:
4063 (inner1 OR inner2) OR (op2a code2 op2b)
4065 => (t OR partial) */
4068 if (integer_zerop (t
))
4070 else if (integer_onep (t
))
4071 return boolean_true_node
;
4072 /* If both are the same, we can apply the identity
4074 else if (partial
&& same_bool_result_p (t
, partial
))
4078 /* Handle the AND case, where we are redistributing:
4079 (inner1 AND inner2) OR (op2a code2 op2b)
4080 => (t AND (inner1 OR (op2a code2 op2b)))
4081 => (t AND partial) */
4084 if (integer_zerop (t
))
4085 return boolean_false_node
;
4088 /* We already got a simplification for the other
4089 operand to the redistributed AND expression. The
4090 interesting case is when at least one is true.
4091 Or, if both are the same, we can apply the identity
4093 if (integer_onep (partial
))
4095 else if (integer_onep (t
))
4097 else if (same_bool_result_p (t
, partial
))
4106 /* Try to simplify the OR of two comparisons defined by
4107 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4108 If this can be done without constructing an intermediate value,
4109 return the resulting tree; otherwise NULL_TREE is returned.
4110 This function is deliberately asymmetric as it recurses on SSA_DEFs
4111 in the first comparison but not the second. */
4114 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4115 enum tree_code code2
, tree op2a
, tree op2b
)
4117 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4119 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4120 if (operand_equal_p (op1a
, op2a
, 0)
4121 && operand_equal_p (op1b
, op2b
, 0))
4123 /* Result will be either NULL_TREE, or a combined comparison. */
4124 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4125 TRUTH_ORIF_EXPR
, code1
, code2
,
4126 truth_type
, op1a
, op1b
);
4131 /* Likewise the swapped case of the above. */
4132 if (operand_equal_p (op1a
, op2b
, 0)
4133 && operand_equal_p (op1b
, op2a
, 0))
4135 /* Result will be either NULL_TREE, or a combined comparison. */
4136 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4137 TRUTH_ORIF_EXPR
, code1
,
4138 swap_tree_comparison (code2
),
4139 truth_type
, op1a
, op1b
);
4144 /* If both comparisons are of the same value against constants, we might
4145 be able to merge them. */
4146 if (operand_equal_p (op1a
, op2a
, 0)
4147 && TREE_CODE (op1b
) == INTEGER_CST
4148 && TREE_CODE (op2b
) == INTEGER_CST
)
4150 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4152 /* If we have (op1a != op1b), we should either be able to
4153 return that or TRUE, depending on whether the constant op1b
4154 also satisfies the other comparison against op2b. */
4155 if (code1
== NE_EXPR
)
4161 case EQ_EXPR
: val
= (cmp
== 0); break;
4162 case NE_EXPR
: val
= (cmp
!= 0); break;
4163 case LT_EXPR
: val
= (cmp
< 0); break;
4164 case GT_EXPR
: val
= (cmp
> 0); break;
4165 case LE_EXPR
: val
= (cmp
<= 0); break;
4166 case GE_EXPR
: val
= (cmp
>= 0); break;
4167 default: done
= false;
4172 return boolean_true_node
;
4174 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4177 /* Likewise if the second comparison is a != comparison. */
4178 else if (code2
== NE_EXPR
)
4184 case EQ_EXPR
: val
= (cmp
== 0); break;
4185 case NE_EXPR
: val
= (cmp
!= 0); break;
4186 case LT_EXPR
: val
= (cmp
> 0); break;
4187 case GT_EXPR
: val
= (cmp
< 0); break;
4188 case LE_EXPR
: val
= (cmp
>= 0); break;
4189 case GE_EXPR
: val
= (cmp
<= 0); break;
4190 default: done
= false;
4195 return boolean_true_node
;
4197 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4201 /* See if an equality test is redundant with the other comparison. */
4202 else if (code1
== EQ_EXPR
)
4207 case EQ_EXPR
: val
= (cmp
== 0); break;
4208 case NE_EXPR
: val
= (cmp
!= 0); break;
4209 case LT_EXPR
: val
= (cmp
< 0); break;
4210 case GT_EXPR
: val
= (cmp
> 0); break;
4211 case LE_EXPR
: val
= (cmp
<= 0); break;
4212 case GE_EXPR
: val
= (cmp
>= 0); break;
4217 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4219 else if (code2
== EQ_EXPR
)
4224 case EQ_EXPR
: val
= (cmp
== 0); break;
4225 case NE_EXPR
: val
= (cmp
!= 0); break;
4226 case LT_EXPR
: val
= (cmp
> 0); break;
4227 case GT_EXPR
: val
= (cmp
< 0); break;
4228 case LE_EXPR
: val
= (cmp
>= 0); break;
4229 case GE_EXPR
: val
= (cmp
<= 0); break;
4234 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4237 /* Chose the less restrictive of two < or <= comparisons. */
4238 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4239 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4241 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4242 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4244 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4247 /* Likewise chose the less restrictive of two > or >= comparisons. */
4248 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4249 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4251 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4252 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4254 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4257 /* Check for singleton ranges. */
4259 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4260 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4261 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4263 /* Check for less/greater pairs that don't restrict the range at all. */
4265 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4266 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4267 return boolean_true_node
;
4269 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4270 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4271 return boolean_true_node
;
4274 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4275 NAME's definition is a truth value. See if there are any simplifications
4276 that can be done against the NAME's definition. */
4277 if (TREE_CODE (op1a
) == SSA_NAME
4278 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4279 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4281 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4282 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4283 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4284 switch (gimple_code (stmt
))
4287 /* Try to simplify by copy-propagating the definition. */
4288 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4291 /* If every argument to the PHI produces the same result when
4292 ORed with the second comparison, we win.
4293 Do not do this unless the type is bool since we need a bool
4294 result here anyway. */
4295 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4297 tree result
= NULL_TREE
;
4299 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4301 tree arg
= gimple_phi_arg_def (stmt
, i
);
4303 /* If this PHI has itself as an argument, ignore it.
4304 If all the other args produce the same result,
4306 if (arg
== gimple_phi_result (stmt
))
4308 else if (TREE_CODE (arg
) == INTEGER_CST
)
4310 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4313 result
= boolean_true_node
;
4314 else if (!integer_onep (result
))
4318 result
= fold_build2 (code2
, boolean_type_node
,
4320 else if (!same_bool_comparison_p (result
,
4324 else if (TREE_CODE (arg
) == SSA_NAME
4325 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4328 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4329 /* In simple cases we can look through PHI nodes,
4330 but we have to be careful with loops.
4332 if (! dom_info_available_p (CDI_DOMINATORS
)
4333 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4334 || dominated_by_p (CDI_DOMINATORS
,
4335 gimple_bb (def_stmt
),
4338 temp
= or_var_with_comparison (arg
, invert
, code2
,
4344 else if (!same_bool_result_p (result
, temp
))
4360 /* Try to simplify the OR of two comparisons, specified by
4361 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4362 If this can be simplified to a single expression (without requiring
4363 introducing more SSA variables to hold intermediate values),
4364 return the resulting tree. Otherwise return NULL_TREE.
4365 If the result expression is non-null, it has boolean type. */
4368 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4369 enum tree_code code2
, tree op2a
, tree op2b
)
4371 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4375 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4379 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4381 Either NULL_TREE, a simplified but non-constant or a constant
4384 ??? This should go into a gimple-fold-inline.h file to be eventually
4385 privatized with the single valueize function used in the various TUs
4386 to avoid the indirect function call overhead. */
4389 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
4393 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4394 edges if there are intermediate VARYING defs. For this reason
4395 do not follow SSA edges here even though SCCVN can technically
4396 just deal fine with that. */
4397 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, no_follow_ssa_edges
)
4398 && rcode
.is_tree_code ()
4399 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4400 || ((tree_code
) rcode
) == ADDR_EXPR
)
4401 && is_gimple_val (ops
[0]))
4404 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4406 fprintf (dump_file
, "Match-and-simplified ");
4407 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4408 fprintf (dump_file
, " to ");
4409 print_generic_expr (dump_file
, res
, 0);
4410 fprintf (dump_file
, "\n");
4415 location_t loc
= gimple_location (stmt
);
4416 switch (gimple_code (stmt
))
4420 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4422 switch (get_gimple_rhs_class (subcode
))
4424 case GIMPLE_SINGLE_RHS
:
4426 tree rhs
= gimple_assign_rhs1 (stmt
);
4427 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4429 if (TREE_CODE (rhs
) == SSA_NAME
)
4431 /* If the RHS is an SSA_NAME, return its known constant value,
4433 return (*valueize
) (rhs
);
4435 /* Handle propagating invariant addresses into address
4437 else if (TREE_CODE (rhs
) == ADDR_EXPR
4438 && !is_gimple_min_invariant (rhs
))
4440 HOST_WIDE_INT offset
= 0;
4442 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4446 && (CONSTANT_CLASS_P (base
)
4447 || decl_address_invariant_p (base
)))
4448 return build_invariant_address (TREE_TYPE (rhs
),
4451 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4452 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4453 && (CONSTRUCTOR_NELTS (rhs
)
4454 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4459 vec
= XALLOCAVEC (tree
,
4460 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4461 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4463 val
= (*valueize
) (val
);
4464 if (TREE_CODE (val
) == INTEGER_CST
4465 || TREE_CODE (val
) == REAL_CST
4466 || TREE_CODE (val
) == FIXED_CST
)
4472 return build_vector (TREE_TYPE (rhs
), vec
);
4474 if (subcode
== OBJ_TYPE_REF
)
4476 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4477 /* If callee is constant, we can fold away the wrapper. */
4478 if (is_gimple_min_invariant (val
))
4482 if (kind
== tcc_reference
)
4484 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4485 || TREE_CODE (rhs
) == REALPART_EXPR
4486 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4487 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4489 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4490 return fold_unary_loc (EXPR_LOCATION (rhs
),
4492 TREE_TYPE (rhs
), val
);
4494 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
4495 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4497 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4498 return fold_ternary_loc (EXPR_LOCATION (rhs
),
4500 TREE_TYPE (rhs
), val
,
4501 TREE_OPERAND (rhs
, 1),
4502 TREE_OPERAND (rhs
, 2));
4504 else if (TREE_CODE (rhs
) == MEM_REF
4505 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4507 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4508 if (TREE_CODE (val
) == ADDR_EXPR
4509 && is_gimple_min_invariant (val
))
4511 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
4513 TREE_OPERAND (rhs
, 1));
4518 return fold_const_aggregate_ref_1 (rhs
, valueize
);
4520 else if (kind
== tcc_declaration
)
4521 return get_symbol_constant_value (rhs
);
4525 case GIMPLE_UNARY_RHS
:
4527 /* Handle unary operators that can appear in GIMPLE form.
4528 Note that we know the single operand must be a constant,
4529 so this should almost always return a simplified RHS. */
4530 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4533 fold_unary_ignore_overflow_loc (loc
, subcode
,
4534 gimple_expr_type (stmt
), op0
);
4537 case GIMPLE_BINARY_RHS
:
4539 /* Handle binary operators that can appear in GIMPLE form. */
4540 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4541 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4543 /* Translate &x + CST into an invariant form suitable for
4544 further propagation. */
4545 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
4546 && TREE_CODE (op0
) == ADDR_EXPR
4547 && TREE_CODE (op1
) == INTEGER_CST
)
4549 tree off
= fold_convert (ptr_type_node
, op1
);
4550 return build_fold_addr_expr_loc
4552 fold_build2 (MEM_REF
,
4553 TREE_TYPE (TREE_TYPE (op0
)),
4554 unshare_expr (op0
), off
));
4557 return fold_binary_loc (loc
, subcode
,
4558 gimple_expr_type (stmt
), op0
, op1
);
4561 case GIMPLE_TERNARY_RHS
:
4563 /* Handle ternary operators that can appear in GIMPLE form. */
4564 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4565 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4566 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
4568 /* Fold embedded expressions in ternary codes. */
4569 if ((subcode
== COND_EXPR
4570 || subcode
== VEC_COND_EXPR
)
4571 && COMPARISON_CLASS_P (op0
))
4573 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
4574 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
4575 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
4576 TREE_TYPE (op0
), op00
, op01
);
4581 return fold_ternary_loc (loc
, subcode
,
4582 gimple_expr_type (stmt
), op0
, op1
, op2
);
4594 if (gimple_call_internal_p (stmt
))
4596 enum tree_code subcode
= ERROR_MARK
;
4597 switch (gimple_call_internal_fn (stmt
))
4599 case IFN_UBSAN_CHECK_ADD
:
4600 subcode
= PLUS_EXPR
;
4602 case IFN_UBSAN_CHECK_SUB
:
4603 subcode
= MINUS_EXPR
;
4605 case IFN_UBSAN_CHECK_MUL
:
4606 subcode
= MULT_EXPR
;
4611 tree arg0
= gimple_call_arg (stmt
, 0);
4612 tree arg1
= gimple_call_arg (stmt
, 1);
4613 tree op0
= (*valueize
) (arg0
);
4614 tree op1
= (*valueize
) (arg1
);
4616 if (TREE_CODE (op0
) != INTEGER_CST
4617 || TREE_CODE (op1
) != INTEGER_CST
)
4622 /* x * 0 = 0 * x = 0 without overflow. */
4623 if (integer_zerop (op0
) || integer_zerop (op1
))
4624 return build_zero_cst (TREE_TYPE (arg0
));
4627 /* y - y = 0 without overflow. */
4628 if (operand_equal_p (op0
, op1
, 0))
4629 return build_zero_cst (TREE_TYPE (arg0
));
4636 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
4638 && TREE_CODE (res
) == INTEGER_CST
4639 && !TREE_OVERFLOW (res
))
4644 fn
= (*valueize
) (gimple_call_fn (stmt
));
4645 if (TREE_CODE (fn
) == ADDR_EXPR
4646 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4647 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
4648 && gimple_builtin_call_types_compatible_p (stmt
,
4649 TREE_OPERAND (fn
, 0)))
4651 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
4654 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4655 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
4656 call
= build_call_array_loc (loc
,
4657 gimple_call_return_type (stmt
),
4658 fn
, gimple_call_num_args (stmt
), args
);
4659 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
4662 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4663 STRIP_NOPS (retval
);
4664 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
4676 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4677 Returns NULL_TREE if folding to a constant is not possible, otherwise
4678 returns a constant according to is_gimple_min_invariant. */
4681 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
4683 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
4684 if (res
&& is_gimple_min_invariant (res
))
4690 /* The following set of functions are supposed to fold references using
4691 their constant initializers. */
4693 static tree
fold_ctor_reference (tree type
, tree ctor
,
4694 unsigned HOST_WIDE_INT offset
,
4695 unsigned HOST_WIDE_INT size
, tree
);
4697 /* See if we can find constructor defining value of BASE.
4698 When we know the consructor with constant offset (such as
4699 base is array[40] and we do know constructor of array), then
4700 BIT_OFFSET is adjusted accordingly.
4702 As a special case, return error_mark_node when constructor
4703 is not explicitly available, but it is known to be zero
4704 such as 'static const int a;'. */
4706 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
4707 tree (*valueize
)(tree
))
4709 HOST_WIDE_INT bit_offset2
, size
, max_size
;
4710 if (TREE_CODE (base
) == MEM_REF
)
4712 if (!integer_zerop (TREE_OPERAND (base
, 1)))
4714 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
4716 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
4721 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
4722 base
= valueize (TREE_OPERAND (base
, 0));
4723 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
4725 base
= TREE_OPERAND (base
, 0);
4728 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4729 DECL_INITIAL. If BASE is a nested reference into another
4730 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4731 the inner reference. */
4732 switch (TREE_CODE (base
))
4737 tree init
= ctor_for_folding (base
);
4739 /* Our semantic is exact opposite of ctor_for_folding;
4740 NULL means unknown, while error_mark_node is 0. */
4741 if (init
== error_mark_node
)
4744 return error_mark_node
;
4750 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
4751 if (max_size
== -1 || size
!= max_size
)
4753 *bit_offset
+= bit_offset2
;
4754 return get_base_constructor (base
, bit_offset
, valueize
);
4765 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4766 SIZE to the memory at bit OFFSET. */
4769 fold_array_ctor_reference (tree type
, tree ctor
,
4770 unsigned HOST_WIDE_INT offset
,
4771 unsigned HOST_WIDE_INT size
,
4774 unsigned HOST_WIDE_INT cnt
;
4776 offset_int low_bound
;
4777 offset_int elt_size
;
4778 offset_int index
, max_index
;
4779 offset_int access_index
;
4780 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
4781 HOST_WIDE_INT inner_offset
;
4783 /* Compute low bound and elt size. */
4784 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
4785 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
4786 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
4788 /* Static constructors for variably sized objects makes no sense. */
4789 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
4790 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
4791 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
4795 /* Static constructors for variably sized objects makes no sense. */
4796 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
4798 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
4800 /* We can handle only constantly sized accesses that are known to not
4801 be larger than size of array element. */
4802 if (!TYPE_SIZE_UNIT (type
)
4803 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
4804 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
4808 /* Compute the array index we look for. */
4809 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
4811 access_index
+= low_bound
;
4813 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
4814 TYPE_SIGN (index_type
));
4816 /* And offset within the access. */
4817 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
4819 /* See if the array field is large enough to span whole access. We do not
4820 care to fold accesses spanning multiple array indexes. */
4821 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
4824 index
= low_bound
- 1;
4826 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4827 TYPE_SIGN (index_type
));
4829 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
4831 /* Array constructor might explicitely set index, or specify range
4832 or leave index NULL meaning that it is next index after previous
4836 if (TREE_CODE (cfield
) == INTEGER_CST
)
4837 max_index
= index
= wi::to_offset (cfield
);
4840 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
4841 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
4842 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
4849 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4850 TYPE_SIGN (index_type
));
4854 /* Do we have match? */
4855 if (wi::cmpu (access_index
, index
) >= 0
4856 && wi::cmpu (access_index
, max_index
) <= 0)
4857 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
4860 /* When memory is not explicitely mentioned in constructor,
4861 it is 0 (or out of range). */
4862 return build_zero_cst (type
);
4865 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4866 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4869 fold_nonarray_ctor_reference (tree type
, tree ctor
,
4870 unsigned HOST_WIDE_INT offset
,
4871 unsigned HOST_WIDE_INT size
,
4874 unsigned HOST_WIDE_INT cnt
;
4877 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
4880 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
4881 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
4882 tree field_size
= DECL_SIZE (cfield
);
4883 offset_int bitoffset
;
4884 offset_int bitoffset_end
, access_end
;
4886 /* Variable sized objects in static constructors makes no sense,
4887 but field_size can be NULL for flexible array members. */
4888 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
4889 && TREE_CODE (byte_offset
) == INTEGER_CST
4890 && (field_size
!= NULL_TREE
4891 ? TREE_CODE (field_size
) == INTEGER_CST
4892 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
4894 /* Compute bit offset of the field. */
4895 bitoffset
= (wi::to_offset (field_offset
)
4896 + wi::lshift (wi::to_offset (byte_offset
),
4897 LOG2_BITS_PER_UNIT
));
4898 /* Compute bit offset where the field ends. */
4899 if (field_size
!= NULL_TREE
)
4900 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
4904 access_end
= offset_int (offset
) + size
;
4906 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4907 [BITOFFSET, BITOFFSET_END)? */
4908 if (wi::cmps (access_end
, bitoffset
) > 0
4909 && (field_size
== NULL_TREE
4910 || wi::lts_p (offset
, bitoffset_end
)))
4912 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
4913 /* We do have overlap. Now see if field is large enough to
4914 cover the access. Give up for accesses spanning multiple
4916 if (wi::cmps (access_end
, bitoffset_end
) > 0)
4918 if (wi::lts_p (offset
, bitoffset
))
4920 return fold_ctor_reference (type
, cval
,
4921 inner_offset
.to_uhwi (), size
,
4925 /* When memory is not explicitely mentioned in constructor, it is 0. */
4926 return build_zero_cst (type
);
4929 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4930 to the memory at bit OFFSET. */
4933 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
4934 unsigned HOST_WIDE_INT size
, tree from_decl
)
4938 /* We found the field with exact match. */
4939 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
4941 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4943 /* We are at the end of walk, see if we can view convert the
4945 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
4946 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4947 && !compare_tree_int (TYPE_SIZE (type
), size
)
4948 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
4950 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4951 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
4956 /* For constants and byte-aligned/sized reads try to go through
4957 native_encode/interpret. */
4958 if (CONSTANT_CLASS_P (ctor
)
4959 && BITS_PER_UNIT
== 8
4960 && offset
% BITS_PER_UNIT
== 0
4961 && size
% BITS_PER_UNIT
== 0
4962 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
4964 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
4965 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
4966 offset
/ BITS_PER_UNIT
) > 0)
4967 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
4969 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
4972 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
4973 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
4974 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
4977 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
4984 /* Return the tree representing the element referenced by T if T is an
4985 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4986 names using VALUEIZE. Return NULL_TREE otherwise. */
4989 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
4991 tree ctor
, idx
, base
;
4992 HOST_WIDE_INT offset
, size
, max_size
;
4995 if (TREE_THIS_VOLATILE (t
))
4998 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
4999 return get_symbol_constant_value (t
);
5001 tem
= fold_read_from_constant_string (t
);
5005 switch (TREE_CODE (t
))
5008 case ARRAY_RANGE_REF
:
5009 /* Constant indexes are handled well by get_base_constructor.
5010 Only special case variable offsets.
5011 FIXME: This code can't handle nested references with variable indexes
5012 (they will be handled only by iteration of ccp). Perhaps we can bring
5013 get_ref_base_and_extent here and make it use a valueize callback. */
5014 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5016 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5017 && TREE_CODE (idx
) == INTEGER_CST
)
5019 tree low_bound
, unit_size
;
5021 /* If the resulting bit-offset is constant, track it. */
5022 if ((low_bound
= array_ref_low_bound (t
),
5023 TREE_CODE (low_bound
) == INTEGER_CST
)
5024 && (unit_size
= array_ref_element_size (t
),
5025 tree_fits_uhwi_p (unit_size
)))
5028 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5029 TYPE_PRECISION (TREE_TYPE (idx
)));
5031 if (wi::fits_shwi_p (woffset
))
5033 offset
= woffset
.to_shwi ();
5034 /* TODO: This code seems wrong, multiply then check
5035 to see if it fits. */
5036 offset
*= tree_to_uhwi (unit_size
);
5037 offset
*= BITS_PER_UNIT
;
5039 base
= TREE_OPERAND (t
, 0);
5040 ctor
= get_base_constructor (base
, &offset
, valueize
);
5041 /* Empty constructor. Always fold to 0. */
5042 if (ctor
== error_mark_node
)
5043 return build_zero_cst (TREE_TYPE (t
));
5044 /* Out of bound array access. Value is undefined,
5048 /* We can not determine ctor. */
5051 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5052 tree_to_uhwi (unit_size
)
5062 case TARGET_MEM_REF
:
5064 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5065 ctor
= get_base_constructor (base
, &offset
, valueize
);
5067 /* Empty constructor. Always fold to 0. */
5068 if (ctor
== error_mark_node
)
5069 return build_zero_cst (TREE_TYPE (t
));
5070 /* We do not know precise address. */
5071 if (max_size
== -1 || max_size
!= size
)
5073 /* We can not determine ctor. */
5077 /* Out of bound array access. Value is undefined, but don't fold. */
5081 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5087 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5088 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5089 return fold_build1_loc (EXPR_LOCATION (t
),
5090 TREE_CODE (t
), TREE_TYPE (t
), c
);
5102 fold_const_aggregate_ref (tree t
)
5104 return fold_const_aggregate_ref_1 (t
, NULL
);
5107 /* Lookup virtual method with index TOKEN in a virtual table V
5109 Set CAN_REFER if non-NULL to false if method
5110 is not referable or if the virtual table is ill-formed (such as rewriten
5111 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5114 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5116 unsigned HOST_WIDE_INT offset
,
5119 tree vtable
= v
, init
, fn
;
5120 unsigned HOST_WIDE_INT size
;
5121 unsigned HOST_WIDE_INT elt_size
, access_index
;
5127 /* First of all double check we have virtual table. */
5128 if (TREE_CODE (v
) != VAR_DECL
5129 || !DECL_VIRTUAL_P (v
))
5131 gcc_assert (in_lto_p
);
5132 /* Pass down that we lost track of the target. */
5138 init
= ctor_for_folding (v
);
5140 /* The virtual tables should always be born with constructors
5141 and we always should assume that they are avaialble for
5142 folding. At the moment we do not stream them in all cases,
5143 but it should never happen that ctor seem unreachable. */
5145 if (init
== error_mark_node
)
5147 gcc_assert (in_lto_p
);
5148 /* Pass down that we lost track of the target. */
5153 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5154 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5155 offset
*= BITS_PER_UNIT
;
5156 offset
+= token
* size
;
5158 /* Lookup the value in the constructor that is assumed to be array.
5159 This is equivalent to
5160 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5161 offset, size, NULL);
5162 but in a constant time. We expect that frontend produced a simple
5163 array without indexed initializers. */
5165 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5166 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5167 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5168 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5170 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5171 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5173 /* This code makes an assumption that there are no
5174 indexed fileds produced by C++ FE, so we can directly index the array. */
5175 if (access_index
< CONSTRUCTOR_NELTS (init
))
5177 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5178 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5184 /* For type inconsistent program we may end up looking up virtual method
5185 in virtual table that does not contain TOKEN entries. We may overrun
5186 the virtual table and pick up a constant or RTTI info pointer.
5187 In any case the call is undefined. */
5189 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5190 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5191 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5194 fn
= TREE_OPERAND (fn
, 0);
5196 /* When cgraph node is missing and function is not public, we cannot
5197 devirtualize. This can happen in WHOPR when the actual method
5198 ends up in other partition, because we found devirtualization
5199 possibility too late. */
5200 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5211 /* Make sure we create a cgraph node for functions we'll reference.
5212 They can be non-existent if the reference comes from an entry
5213 of an external vtable for example. */
5214 cgraph_node::get_create (fn
);
5219 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5220 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5221 KNOWN_BINFO carries the binfo describing the true type of
5222 OBJ_TYPE_REF_OBJECT(REF).
5223 Set CAN_REFER if non-NULL to false if method
5224 is not referable or if the virtual table is ill-formed (such as rewriten
5225 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5228 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5231 unsigned HOST_WIDE_INT offset
;
5234 v
= BINFO_VTABLE (known_binfo
);
5235 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5239 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5245 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5248 /* Return true iff VAL is a gimple expression that is known to be
5249 non-negative. Restricted to floating-point inputs. */
5252 gimple_val_nonnegative_real_p (tree val
)
5256 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5258 /* Use existing logic for non-gimple trees. */
5259 if (tree_expr_nonnegative_p (val
))
5262 if (TREE_CODE (val
) != SSA_NAME
)
5265 /* Currently we look only at the immediately defining statement
5266 to make this determination, since recursion on defining
5267 statements of operands can lead to quadratic behavior in the
5268 worst case. This is expected to catch almost all occurrences
5269 in practice. It would be possible to implement limited-depth
5270 recursion if important cases are lost. Alternatively, passes
5271 that need this information (such as the pow/powi lowering code
5272 in the cse_sincos pass) could be revised to provide it through
5273 dataflow propagation. */
5275 def_stmt
= SSA_NAME_DEF_STMT (val
);
5277 if (is_gimple_assign (def_stmt
))
5281 /* See fold-const.c:tree_expr_nonnegative_p for additional
5282 cases that could be handled with recursion. */
5284 switch (gimple_assign_rhs_code (def_stmt
))
5287 /* Always true for floating-point operands. */
5291 /* True if the two operands are identical (since we are
5292 restricted to floating-point inputs). */
5293 op0
= gimple_assign_rhs1 (def_stmt
);
5294 op1
= gimple_assign_rhs2 (def_stmt
);
5297 || operand_equal_p (op0
, op1
, 0))
5304 else if (is_gimple_call (def_stmt
))
5306 tree fndecl
= gimple_call_fndecl (def_stmt
);
5308 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5312 switch (DECL_FUNCTION_CODE (fndecl
))
5314 CASE_FLT_FN (BUILT_IN_ACOS
):
5315 CASE_FLT_FN (BUILT_IN_ACOSH
):
5316 CASE_FLT_FN (BUILT_IN_CABS
):
5317 CASE_FLT_FN (BUILT_IN_COSH
):
5318 CASE_FLT_FN (BUILT_IN_ERFC
):
5319 CASE_FLT_FN (BUILT_IN_EXP
):
5320 CASE_FLT_FN (BUILT_IN_EXP10
):
5321 CASE_FLT_FN (BUILT_IN_EXP2
):
5322 CASE_FLT_FN (BUILT_IN_FABS
):
5323 CASE_FLT_FN (BUILT_IN_FDIM
):
5324 CASE_FLT_FN (BUILT_IN_HYPOT
):
5325 CASE_FLT_FN (BUILT_IN_POW10
):
5328 CASE_FLT_FN (BUILT_IN_SQRT
):
5329 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5330 nonnegative inputs. */
5331 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
5336 CASE_FLT_FN (BUILT_IN_POWI
):
5337 /* True if the second argument is an even integer. */
5338 arg1
= gimple_call_arg (def_stmt
, 1);
5340 if (TREE_CODE (arg1
) == INTEGER_CST
5341 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5346 CASE_FLT_FN (BUILT_IN_POW
):
5347 /* True if the second argument is an even integer-valued
5349 arg1
= gimple_call_arg (def_stmt
, 1);
5351 if (TREE_CODE (arg1
) == REAL_CST
)
5356 c
= TREE_REAL_CST (arg1
);
5357 n
= real_to_integer (&c
);
5361 REAL_VALUE_TYPE cint
;
5362 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5363 if (real_identical (&c
, &cint
))
5379 /* Given a pointer value OP0, return a simplified version of an
5380 indirection through OP0, or NULL_TREE if no simplification is
5381 possible. Note that the resulting type may be different from
5382 the type pointed to in the sense that it is still compatible
5383 from the langhooks point of view. */
5386 gimple_fold_indirect_ref (tree t
)
5388 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5393 subtype
= TREE_TYPE (sub
);
5394 if (!POINTER_TYPE_P (subtype
))
5397 if (TREE_CODE (sub
) == ADDR_EXPR
)
5399 tree op
= TREE_OPERAND (sub
, 0);
5400 tree optype
= TREE_TYPE (op
);
5402 if (useless_type_conversion_p (type
, optype
))
5405 /* *(foo *)&fooarray => fooarray[0] */
5406 if (TREE_CODE (optype
) == ARRAY_TYPE
5407 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5408 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5410 tree type_domain
= TYPE_DOMAIN (optype
);
5411 tree min_val
= size_zero_node
;
5412 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5413 min_val
= TYPE_MIN_VALUE (type_domain
);
5414 if (TREE_CODE (min_val
) == INTEGER_CST
)
5415 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5417 /* *(foo *)&complexfoo => __real__ complexfoo */
5418 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5419 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5420 return fold_build1 (REALPART_EXPR
, type
, op
);
5421 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5422 else if (TREE_CODE (optype
) == VECTOR_TYPE
5423 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5425 tree part_width
= TYPE_SIZE (type
);
5426 tree index
= bitsize_int (0);
5427 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5431 /* *(p + CST) -> ... */
5432 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5433 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5435 tree addr
= TREE_OPERAND (sub
, 0);
5436 tree off
= TREE_OPERAND (sub
, 1);
5440 addrtype
= TREE_TYPE (addr
);
5442 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5443 if (TREE_CODE (addr
) == ADDR_EXPR
5444 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5445 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5446 && tree_fits_uhwi_p (off
))
5448 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5449 tree part_width
= TYPE_SIZE (type
);
5450 unsigned HOST_WIDE_INT part_widthi
5451 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5452 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5453 tree index
= bitsize_int (indexi
);
5454 if (offset
/ part_widthi
5455 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5456 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5460 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5461 if (TREE_CODE (addr
) == ADDR_EXPR
5462 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5463 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5465 tree size
= TYPE_SIZE_UNIT (type
);
5466 if (tree_int_cst_equal (size
, off
))
5467 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5470 /* *(p + CST) -> MEM_REF <p, CST>. */
5471 if (TREE_CODE (addr
) != ADDR_EXPR
5472 || DECL_P (TREE_OPERAND (addr
, 0)))
5473 return fold_build2 (MEM_REF
, type
,
5475 wide_int_to_tree (ptype
, off
));
5478 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5479 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5481 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5484 tree min_val
= size_zero_node
;
5486 sub
= gimple_fold_indirect_ref (sub
);
5488 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5489 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5490 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5491 min_val
= TYPE_MIN_VALUE (type_domain
);
5492 if (TREE_CODE (min_val
) == INTEGER_CST
)
5493 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5499 /* Return true if CODE is an operation that when operating on signed
5500 integer types involves undefined behavior on overflow and the
5501 operation can be expressed with unsigned arithmetic. */
5504 arith_code_with_undefined_signed_overflow (tree_code code
)
5512 case POINTER_PLUS_EXPR
:
5519 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5520 operation that can be transformed to unsigned arithmetic by converting
5521 its operand, carrying out the operation in the corresponding unsigned
5522 type and converting the result back to the original type.
5524 Returns a sequence of statements that replace STMT and also contain
5525 a modified form of STMT itself. */
5528 rewrite_to_defined_overflow (gimple stmt
)
5530 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5532 fprintf (dump_file
, "rewriting stmt with undefined signed "
5534 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5537 tree lhs
= gimple_assign_lhs (stmt
);
5538 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5539 gimple_seq stmts
= NULL
;
5540 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5542 gimple_seq stmts2
= NULL
;
5543 gimple_set_op (stmt
, i
,
5544 force_gimple_operand (fold_convert (type
,
5545 gimple_op (stmt
, i
)),
5546 &stmts2
, true, NULL_TREE
));
5547 gimple_seq_add_seq (&stmts
, stmts2
);
5549 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5550 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5551 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5552 gimple_seq_add_stmt (&stmts
, stmt
);
5553 gimple cvt
= gimple_build_assign_with_ops
5554 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
5555 gimple_seq_add_stmt (&stmts
, cvt
);
5561 /* Build the expression CODE OP0 of type TYPE with location LOC,
5562 simplifying it first if possible using VALUEIZE if not NULL.
5563 OP0 is expected to be valueized already. Returns the built
5564 expression value and appends statements possibly defining it
5568 gimple_build (gimple_seq
*seq
, location_t loc
,
5569 enum tree_code code
, tree type
, tree op0
,
5570 tree (*valueize
)(tree
))
5572 tree res
= gimple_simplify (code
, type
, op0
, seq
, valueize
);
5575 if (gimple_in_ssa_p (cfun
))
5576 res
= make_ssa_name (type
, NULL
);
5578 res
= create_tmp_reg (type
, NULL
);
5580 if (code
== REALPART_EXPR
5581 || code
== IMAGPART_EXPR
5582 || code
== VIEW_CONVERT_EXPR
)
5583 stmt
= gimple_build_assign_with_ops (code
, res
,
5587 stmt
= gimple_build_assign_with_ops (code
, res
, op0
, NULL_TREE
);
5588 gimple_set_location (stmt
, loc
);
5589 gimple_seq_add_stmt_without_update (seq
, stmt
);
5594 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5595 simplifying it first if possible using VALUEIZE if not NULL.
5596 OP0 and OP1 are expected to be valueized already. Returns the built
5597 expression value and appends statements possibly defining it
5601 gimple_build (gimple_seq
*seq
, location_t loc
,
5602 enum tree_code code
, tree type
, tree op0
, tree op1
,
5603 tree (*valueize
)(tree
))
5605 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, valueize
);
5608 if (gimple_in_ssa_p (cfun
))
5609 res
= make_ssa_name (type
, NULL
);
5611 res
= create_tmp_reg (type
, NULL
);
5612 gimple stmt
= gimple_build_assign_with_ops (code
, res
, op0
, op1
);
5613 gimple_set_location (stmt
, loc
);
5614 gimple_seq_add_stmt_without_update (seq
, stmt
);
5619 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5620 simplifying it first if possible using VALUEIZE if not NULL.
5621 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5622 expression value and appends statements possibly defining it
5626 gimple_build (gimple_seq
*seq
, location_t loc
,
5627 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
,
5628 tree (*valueize
)(tree
))
5630 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
5634 if (gimple_in_ssa_p (cfun
))
5635 res
= make_ssa_name (type
, NULL
);
5637 res
= create_tmp_reg (type
, NULL
);
5639 if (code
== BIT_FIELD_REF
)
5640 stmt
= gimple_build_assign_with_ops (code
, res
,
5641 build3 (BIT_FIELD_REF
, type
,
5645 stmt
= gimple_build_assign_with_ops (code
, res
, op0
, op1
, op2
);
5646 gimple_set_location (stmt
, loc
);
5647 gimple_seq_add_stmt_without_update (seq
, stmt
);
5652 /* Build the call FN (ARG0) with a result of type TYPE
5653 (or no result if TYPE is void) with location LOC,
5654 simplifying it first if possible using VALUEIZE if not NULL.
5655 ARG0 is expected to be valueized already. Returns the built
5656 expression value (or NULL_TREE if TYPE is void) and appends
5657 statements possibly defining it to SEQ. */
5660 gimple_build (gimple_seq
*seq
, location_t loc
,
5661 enum built_in_function fn
, tree type
, tree arg0
,
5662 tree (*valueize
)(tree
))
5664 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, valueize
);
5667 tree decl
= builtin_decl_implicit (fn
);
5668 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
5669 if (!VOID_TYPE_P (type
))
5671 if (gimple_in_ssa_p (cfun
))
5672 res
= make_ssa_name (type
, NULL
);
5674 res
= create_tmp_reg (type
, NULL
);
5675 gimple_call_set_lhs (stmt
, res
);
5677 gimple_set_location (stmt
, loc
);
5678 gimple_seq_add_stmt_without_update (seq
, stmt
);
5683 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5684 (or no result if TYPE is void) with location LOC,
5685 simplifying it first if possible using VALUEIZE if not NULL.
5686 ARG0 is expected to be valueized already. Returns the built
5687 expression value (or NULL_TREE if TYPE is void) and appends
5688 statements possibly defining it to SEQ. */
5691 gimple_build (gimple_seq
*seq
, location_t loc
,
5692 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
,
5693 tree (*valueize
)(tree
))
5695 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, valueize
);
5698 tree decl
= builtin_decl_implicit (fn
);
5699 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
5700 if (!VOID_TYPE_P (type
))
5702 if (gimple_in_ssa_p (cfun
))
5703 res
= make_ssa_name (type
, NULL
);
5705 res
= create_tmp_reg (type
, NULL
);
5706 gimple_call_set_lhs (stmt
, res
);
5708 gimple_set_location (stmt
, loc
);
5709 gimple_seq_add_stmt_without_update (seq
, stmt
);
5714 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5715 (or no result if TYPE is void) with location LOC,
5716 simplifying it first if possible using VALUEIZE if not NULL.
5717 ARG0 is expected to be valueized already. Returns the built
5718 expression value (or NULL_TREE if TYPE is void) and appends
5719 statements possibly defining it to SEQ. */
5722 gimple_build (gimple_seq
*seq
, location_t loc
,
5723 enum built_in_function fn
, tree type
,
5724 tree arg0
, tree arg1
, tree arg2
,
5725 tree (*valueize
)(tree
))
5727 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
, seq
, valueize
);
5730 tree decl
= builtin_decl_implicit (fn
);
5731 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
5732 if (!VOID_TYPE_P (type
))
5734 if (gimple_in_ssa_p (cfun
))
5735 res
= make_ssa_name (type
, NULL
);
5737 res
= create_tmp_reg (type
, NULL
);
5738 gimple_call_set_lhs (stmt
, res
);
5740 gimple_set_location (stmt
, loc
);
5741 gimple_seq_add_stmt_without_update (seq
, stmt
);
5746 /* Build the conversion (TYPE) OP with a result of type TYPE
5747 with location LOC if such conversion is neccesary in GIMPLE,
5748 simplifying it first.
5749 Returns the built expression value and appends
5750 statements possibly defining it to SEQ. */
5753 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
5755 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
5757 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);