1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
26 #include "stringpool.h"
29 #include "stor-layout.h"
35 #include "hard-reg-set.h"
41 #include "dominance.h"
42 #include "basic-block.h"
43 #include "tree-ssa-alias.h"
44 #include "internal-fn.h"
45 #include "gimple-fold.h"
46 #include "gimple-expr.h"
50 #include "gimple-iterator.h"
51 #include "gimple-ssa.h"
52 #include "tree-ssanames.h"
53 #include "tree-into-ssa.h"
56 #include "tree-ssa-propagate.h"
58 #include "ipa-utils.h"
59 #include "gimple-pretty-print.h"
60 #include "tree-ssa-address.h"
61 #include "langhooks.h"
62 #include "gimplify-me.h"
67 #include "gimple-match.h"
68 #include "tree-phinodes.h"
69 #include "ssa-iterators.h"
71 /* Return true when DECL can be referenced from current unit.
72 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
73 We can get declarations that are not possible to reference for various
76 1) When analyzing C++ virtual tables.
77 C++ virtual tables do have known constructors even
78 when they are keyed to other compilation unit.
79 Those tables can contain pointers to methods and vars
80 in other units. Those methods have both STATIC and EXTERNAL
82 2) In WHOPR mode devirtualization might lead to reference
83 to method that was partitioned elsehwere.
84 In this case we have static VAR_DECL or FUNCTION_DECL
85 that has no corresponding callgraph/varpool node
87 3) COMDAT functions referred by external vtables that
88 we devirtualize only during final compilation stage.
89 At this time we already decided that we will not output
90 the function body and thus we can't reference the symbol
94 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
97 struct cgraph_node
*node
;
100 if (DECL_ABSTRACT_P (decl
))
103 /* We are concerned only about static/external vars and functions. */
104 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
105 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
108 /* Static objects can be referred only if they was not optimized out yet. */
109 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
111 /* Before we start optimizing unreachable code we can be sure all
112 static objects are defined. */
113 if (symtab
->function_flags_ready
)
115 snode
= symtab_node::get (decl
);
116 if (!snode
|| !snode
->definition
)
118 node
= dyn_cast
<cgraph_node
*> (snode
);
119 return !node
|| !node
->global
.inlined_to
;
122 /* We will later output the initializer, so we can refer to it.
123 So we are concerned only when DECL comes from initializer of
124 external var or var that has been optimized out. */
126 || TREE_CODE (from_decl
) != VAR_DECL
127 || (!DECL_EXTERNAL (from_decl
)
128 && (vnode
= varpool_node::get (from_decl
)) != NULL
129 && vnode
->definition
)
131 && (vnode
= varpool_node::get (from_decl
)) != NULL
132 && vnode
->in_other_partition
))
134 /* We are folding reference from external vtable. The vtable may reffer
135 to a symbol keyed to other compilation unit. The other compilation
136 unit may be in separate DSO and the symbol may be hidden. */
137 if (DECL_VISIBILITY_SPECIFIED (decl
)
138 && DECL_EXTERNAL (decl
)
139 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
140 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
142 /* When function is public, we always can introduce new reference.
143 Exception are the COMDAT functions where introducing a direct
144 reference imply need to include function body in the curren tunit. */
145 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
147 /* We have COMDAT. We are going to check if we still have definition
148 or if the definition is going to be output in other partition.
149 Bypass this when gimplifying; all needed functions will be produced.
151 As observed in PR20991 for already optimized out comdat virtual functions
152 it may be tempting to not necessarily give up because the copy will be
153 output elsewhere when corresponding vtable is output.
154 This is however not possible - ABI specify that COMDATs are output in
155 units where they are used and when the other unit was compiled with LTO
156 it is possible that vtable was kept public while the function itself
158 if (!symtab
->function_flags_ready
)
161 snode
= symtab_node::get (decl
);
163 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
164 && (!snode
->in_other_partition
165 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
167 node
= dyn_cast
<cgraph_node
*> (snode
);
168 return !node
|| !node
->global
.inlined_to
;
171 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
172 acceptable form for is_gimple_min_invariant.
173 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
176 canonicalize_constructor_val (tree cval
, tree from_decl
)
178 tree orig_cval
= cval
;
180 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
181 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
183 tree ptr
= TREE_OPERAND (cval
, 0);
184 if (is_gimple_min_invariant (ptr
))
185 cval
= build1_loc (EXPR_LOCATION (cval
),
186 ADDR_EXPR
, TREE_TYPE (ptr
),
187 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
189 fold_convert (ptr_type_node
,
190 TREE_OPERAND (cval
, 1))));
192 if (TREE_CODE (cval
) == ADDR_EXPR
)
194 tree base
= NULL_TREE
;
195 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
197 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
199 TREE_OPERAND (cval
, 0) = base
;
202 base
= get_base_address (TREE_OPERAND (cval
, 0));
206 if ((TREE_CODE (base
) == VAR_DECL
207 || TREE_CODE (base
) == FUNCTION_DECL
)
208 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
210 if (TREE_CODE (base
) == VAR_DECL
)
211 TREE_ADDRESSABLE (base
) = 1;
212 else if (TREE_CODE (base
) == FUNCTION_DECL
)
214 /* Make sure we create a cgraph node for functions we'll reference.
215 They can be non-existent if the reference comes from an entry
216 of an external vtable for example. */
217 cgraph_node::get_create (base
);
219 /* Fixup types in global initializers. */
220 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
221 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
223 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
224 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
227 if (TREE_OVERFLOW_P (cval
))
228 return drop_tree_overflow (cval
);
232 /* If SYM is a constant variable with known value, return the value.
233 NULL_TREE is returned otherwise. */
236 get_symbol_constant_value (tree sym
)
238 tree val
= ctor_for_folding (sym
);
239 if (val
!= error_mark_node
)
243 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
244 if (val
&& is_gimple_min_invariant (val
))
249 /* Variables declared 'const' without an initializer
250 have zero as the initializer if they may not be
251 overridden at link or run time. */
253 && (INTEGRAL_TYPE_P (TREE_TYPE (sym
))
254 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym
))))
255 return build_zero_cst (TREE_TYPE (sym
));
263 /* Subroutine of fold_stmt. We perform several simplifications of the
264 memory reference tree EXPR and make sure to re-gimplify them properly
265 after propagation of constant addresses. IS_LHS is true if the
266 reference is supposed to be an lvalue. */
269 maybe_fold_reference (tree expr
, bool is_lhs
)
273 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
274 || TREE_CODE (expr
) == REALPART_EXPR
275 || TREE_CODE (expr
) == IMAGPART_EXPR
)
276 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
277 return fold_unary_loc (EXPR_LOCATION (expr
),
280 TREE_OPERAND (expr
, 0));
281 else if (TREE_CODE (expr
) == BIT_FIELD_REF
282 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
283 return fold_ternary_loc (EXPR_LOCATION (expr
),
286 TREE_OPERAND (expr
, 0),
287 TREE_OPERAND (expr
, 1),
288 TREE_OPERAND (expr
, 2));
291 && (result
= fold_const_aggregate_ref (expr
))
292 && is_gimple_min_invariant (result
))
299 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
300 replacement rhs for the statement or NULL_TREE if no simplification
301 could be made. It is assumed that the operands have been previously
305 fold_gimple_assign (gimple_stmt_iterator
*si
)
307 gimple stmt
= gsi_stmt (*si
);
308 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
309 location_t loc
= gimple_location (stmt
);
311 tree result
= NULL_TREE
;
313 switch (get_gimple_rhs_class (subcode
))
315 case GIMPLE_SINGLE_RHS
:
317 tree rhs
= gimple_assign_rhs1 (stmt
);
319 if (REFERENCE_CLASS_P (rhs
))
320 return maybe_fold_reference (rhs
, false);
322 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
324 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
325 if (is_gimple_min_invariant (val
))
327 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
330 vec
<cgraph_node
*>targets
331 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
332 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
334 if (dump_enabled_p ())
336 location_t loc
= gimple_location_safe (stmt
);
337 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
338 "resolving virtual function address "
339 "reference to function %s\n",
340 targets
.length () == 1
341 ? targets
[0]->name ()
344 if (targets
.length () == 1)
346 val
= fold_convert (TREE_TYPE (val
),
347 build_fold_addr_expr_loc
348 (loc
, targets
[0]->decl
));
349 STRIP_USELESS_TYPE_CONVERSION (val
);
352 /* We can not use __builtin_unreachable here because it
353 can not have address taken. */
354 val
= build_int_cst (TREE_TYPE (val
), 0);
360 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
362 tree ref
= TREE_OPERAND (rhs
, 0);
363 tree tem
= maybe_fold_reference (ref
, true);
365 && TREE_CODE (tem
) == MEM_REF
366 && integer_zerop (TREE_OPERAND (tem
, 1)))
367 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
369 result
= fold_convert (TREE_TYPE (rhs
),
370 build_fold_addr_expr_loc (loc
, tem
));
371 else if (TREE_CODE (ref
) == MEM_REF
372 && integer_zerop (TREE_OPERAND (ref
, 1)))
373 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
376 else if (TREE_CODE (rhs
) == CONSTRUCTOR
377 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
378 && (CONSTRUCTOR_NELTS (rhs
)
379 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
381 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
385 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
386 if (TREE_CODE (val
) != INTEGER_CST
387 && TREE_CODE (val
) != REAL_CST
388 && TREE_CODE (val
) != FIXED_CST
)
391 return build_vector_from_ctor (TREE_TYPE (rhs
),
392 CONSTRUCTOR_ELTS (rhs
));
395 else if (DECL_P (rhs
))
396 return get_symbol_constant_value (rhs
);
398 /* If we couldn't fold the RHS, hand over to the generic
400 if (result
== NULL_TREE
)
403 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
404 that may have been added by fold, and "useless" type
405 conversions that might now be apparent due to propagation. */
406 STRIP_USELESS_TYPE_CONVERSION (result
);
408 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
415 case GIMPLE_UNARY_RHS
:
417 tree rhs
= gimple_assign_rhs1 (stmt
);
419 result
= fold_unary_loc (loc
, subcode
, gimple_expr_type (stmt
), rhs
);
422 /* If the operation was a conversion do _not_ mark a
423 resulting constant with TREE_OVERFLOW if the original
424 constant was not. These conversions have implementation
425 defined behavior and retaining the TREE_OVERFLOW flag
426 here would confuse later passes such as VRP. */
427 if (CONVERT_EXPR_CODE_P (subcode
)
428 && TREE_CODE (result
) == INTEGER_CST
429 && TREE_CODE (rhs
) == INTEGER_CST
)
430 TREE_OVERFLOW (result
) = TREE_OVERFLOW (rhs
);
432 STRIP_USELESS_TYPE_CONVERSION (result
);
433 if (valid_gimple_rhs_p (result
))
439 case GIMPLE_BINARY_RHS
:
440 /* Try to canonicalize for boolean-typed X the comparisons
441 X == 0, X == 1, X != 0, and X != 1. */
442 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
443 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
445 tree lhs
= gimple_assign_lhs (stmt
);
446 tree op1
= gimple_assign_rhs1 (stmt
);
447 tree op2
= gimple_assign_rhs2 (stmt
);
448 tree type
= TREE_TYPE (op1
);
450 /* Check whether the comparison operands are of the same boolean
451 type as the result type is.
452 Check that second operand is an integer-constant with value
454 if (TREE_CODE (op2
) == INTEGER_CST
455 && (integer_zerop (op2
) || integer_onep (op2
))
456 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
458 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
459 bool is_logical_not
= false;
461 /* X == 0 and X != 1 is a logical-not.of X
462 X == 1 and X != 0 is X */
463 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
464 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
465 is_logical_not
= true;
467 if (is_logical_not
== false)
469 /* Only for one-bit precision typed X the transformation
470 !X -> ~X is valied. */
471 else if (TYPE_PRECISION (type
) == 1)
472 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
474 /* Otherwise we use !X -> X ^ 1. */
476 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
477 type
, op1
, build_int_cst (type
, 1));
483 result
= fold_binary_loc (loc
, subcode
,
484 TREE_TYPE (gimple_assign_lhs (stmt
)),
485 gimple_assign_rhs1 (stmt
),
486 gimple_assign_rhs2 (stmt
));
490 STRIP_USELESS_TYPE_CONVERSION (result
);
491 if (valid_gimple_rhs_p (result
))
496 case GIMPLE_TERNARY_RHS
:
497 /* Try to fold a conditional expression. */
498 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
500 tree op0
= gimple_assign_rhs1 (stmt
);
503 location_t cond_loc
= gimple_location (stmt
);
505 if (COMPARISON_CLASS_P (op0
))
507 fold_defer_overflow_warnings ();
508 tem
= fold_binary_loc (cond_loc
,
509 TREE_CODE (op0
), TREE_TYPE (op0
),
510 TREE_OPERAND (op0
, 0),
511 TREE_OPERAND (op0
, 1));
512 /* This is actually a conditional expression, not a GIMPLE
513 conditional statement, however, the valid_gimple_rhs_p
514 test still applies. */
515 set
= (tem
&& is_gimple_condexpr (tem
)
516 && valid_gimple_rhs_p (tem
));
517 fold_undefer_overflow_warnings (set
, stmt
, 0);
519 else if (is_gimple_min_invariant (op0
))
528 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
529 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
530 gimple_assign_rhs2 (stmt
),
531 gimple_assign_rhs3 (stmt
));
535 result
= fold_ternary_loc (loc
, subcode
,
536 TREE_TYPE (gimple_assign_lhs (stmt
)),
537 gimple_assign_rhs1 (stmt
),
538 gimple_assign_rhs2 (stmt
),
539 gimple_assign_rhs3 (stmt
));
543 STRIP_USELESS_TYPE_CONVERSION (result
);
544 if (valid_gimple_rhs_p (result
))
549 case GIMPLE_INVALID_RHS
:
556 /* Attempt to fold a conditional statement. Return true if any changes were
557 made. We only attempt to fold the condition expression, and do not perform
558 any transformation that would require alteration of the cfg. It is
559 assumed that the operands have been previously folded. */
562 fold_gimple_cond (gimple stmt
)
564 tree result
= fold_binary_loc (gimple_location (stmt
),
565 gimple_cond_code (stmt
),
567 gimple_cond_lhs (stmt
),
568 gimple_cond_rhs (stmt
));
572 STRIP_USELESS_TYPE_CONVERSION (result
);
573 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
575 gimple_cond_set_condition_from_tree (stmt
, result
);
584 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
585 adjusting the replacement stmts location and virtual operands.
586 If the statement has a lhs the last stmt in the sequence is expected
587 to assign to that lhs. */
590 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
592 gimple stmt
= gsi_stmt (*si_p
);
594 if (gimple_has_location (stmt
))
595 annotate_all_with_location (stmts
, gimple_location (stmt
));
597 /* First iterate over the replacement statements backward, assigning
598 virtual operands to their defining statements. */
599 gimple laststore
= NULL
;
600 for (gimple_stmt_iterator i
= gsi_last (stmts
);
601 !gsi_end_p (i
); gsi_prev (&i
))
603 gimple new_stmt
= gsi_stmt (i
);
604 if ((gimple_assign_single_p (new_stmt
)
605 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
606 || (is_gimple_call (new_stmt
)
607 && (gimple_call_flags (new_stmt
)
608 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
612 vdef
= gimple_vdef (stmt
);
614 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
615 gimple_set_vdef (new_stmt
, vdef
);
616 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
617 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
618 laststore
= new_stmt
;
622 /* Second iterate over the statements forward, assigning virtual
623 operands to their uses. */
624 tree reaching_vuse
= gimple_vuse (stmt
);
625 for (gimple_stmt_iterator i
= gsi_start (stmts
);
626 !gsi_end_p (i
); gsi_next (&i
))
628 gimple new_stmt
= gsi_stmt (i
);
629 /* If the new statement possibly has a VUSE, update it with exact SSA
630 name we know will reach this one. */
631 if (gimple_has_mem_ops (new_stmt
))
632 gimple_set_vuse (new_stmt
, reaching_vuse
);
633 gimple_set_modified (new_stmt
, true);
634 if (gimple_vdef (new_stmt
))
635 reaching_vuse
= gimple_vdef (new_stmt
);
638 /* If the new sequence does not do a store release the virtual
639 definition of the original statement. */
641 && reaching_vuse
== gimple_vuse (stmt
))
643 tree vdef
= gimple_vdef (stmt
);
645 && TREE_CODE (vdef
) == SSA_NAME
)
647 unlink_stmt_vdef (stmt
);
648 release_ssa_name (vdef
);
652 /* Finally replace the original statement with the sequence. */
653 gsi_replace_with_seq (si_p
, stmts
, false);
656 /* Convert EXPR into a GIMPLE value suitable for substitution on the
657 RHS of an assignment. Insert the necessary statements before
658 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
659 is replaced. If the call is expected to produces a result, then it
660 is replaced by an assignment of the new RHS to the result variable.
661 If the result is to be ignored, then the call is replaced by a
662 GIMPLE_NOP. A proper VDEF chain is retained by making the first
663 VUSE and the last VDEF of the whole sequence be the same as the replaced
664 statement and using new SSA names for stores in between. */
667 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
670 gimple stmt
, new_stmt
;
671 gimple_stmt_iterator i
;
672 gimple_seq stmts
= NULL
;
674 stmt
= gsi_stmt (*si_p
);
676 gcc_assert (is_gimple_call (stmt
));
678 push_gimplify_context (gimple_in_ssa_p (cfun
));
680 lhs
= gimple_call_lhs (stmt
);
681 if (lhs
== NULL_TREE
)
683 gimplify_and_add (expr
, &stmts
);
684 /* We can end up with folding a memcpy of an empty class assignment
685 which gets optimized away by C++ gimplification. */
686 if (gimple_seq_empty_p (stmts
))
688 pop_gimplify_context (NULL
);
689 if (gimple_in_ssa_p (cfun
))
691 unlink_stmt_vdef (stmt
);
694 gsi_replace (si_p
, gimple_build_nop (), true);
700 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
701 new_stmt
= gimple_build_assign (lhs
, tmp
);
702 i
= gsi_last (stmts
);
703 gsi_insert_after_without_update (&i
, new_stmt
,
704 GSI_CONTINUE_LINKING
);
707 pop_gimplify_context (NULL
);
709 gsi_replace_with_seq_vops (si_p
, stmts
);
713 /* Replace the call at *GSI with the gimple value VAL. */
716 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
718 gimple stmt
= gsi_stmt (*gsi
);
719 tree lhs
= gimple_call_lhs (stmt
);
723 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
724 val
= fold_convert (TREE_TYPE (lhs
), val
);
725 repl
= gimple_build_assign (lhs
, val
);
728 repl
= gimple_build_nop ();
729 tree vdef
= gimple_vdef (stmt
);
730 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
732 unlink_stmt_vdef (stmt
);
733 release_ssa_name (vdef
);
735 gsi_replace (gsi
, repl
, true);
738 /* Replace the call at *GSI with the new call REPL and fold that
742 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
744 gimple stmt
= gsi_stmt (*gsi
);
745 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
746 gimple_set_location (repl
, gimple_location (stmt
));
747 if (gimple_vdef (stmt
)
748 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
750 gimple_set_vdef (repl
, gimple_vdef (stmt
));
751 gimple_set_vuse (repl
, gimple_vuse (stmt
));
752 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
754 gsi_replace (gsi
, repl
, true);
758 /* Return true if VAR is a VAR_DECL or a component thereof. */
761 var_decl_component_p (tree var
)
764 while (handled_component_p (inner
))
765 inner
= TREE_OPERAND (inner
, 0);
766 return SSA_VAR_P (inner
);
769 /* Fold function call to builtin mem{{,p}cpy,move}. Return
770 NULL_TREE if no simplification can be made.
771 If ENDP is 0, return DEST (like memcpy).
772 If ENDP is 1, return DEST+LEN (like mempcpy).
773 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
774 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
778 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
779 tree dest
, tree src
, int endp
)
781 gimple stmt
= gsi_stmt (*gsi
);
782 tree lhs
= gimple_call_lhs (stmt
);
783 tree len
= gimple_call_arg (stmt
, 2);
784 tree destvar
, srcvar
;
785 location_t loc
= gimple_location (stmt
);
787 /* If the LEN parameter is zero, return DEST. */
788 if (integer_zerop (len
))
791 if (gimple_call_lhs (stmt
))
792 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
794 repl
= gimple_build_nop ();
795 tree vdef
= gimple_vdef (stmt
);
796 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
798 unlink_stmt_vdef (stmt
);
799 release_ssa_name (vdef
);
801 gsi_replace (gsi
, repl
, true);
805 /* If SRC and DEST are the same (and not volatile), return
806 DEST{,+LEN,+LEN-1}. */
807 if (operand_equal_p (src
, dest
, 0))
809 unlink_stmt_vdef (stmt
);
810 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
811 release_ssa_name (gimple_vdef (stmt
));
814 gsi_replace (gsi
, gimple_build_nop (), true);
821 tree srctype
, desttype
;
822 unsigned int src_align
, dest_align
;
825 /* Build accesses at offset zero with a ref-all character type. */
826 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
829 /* If we can perform the copy efficiently with first doing all loads
830 and then all stores inline it that way. Currently efficiently
831 means that we can load all the memory into a single integer
832 register which is what MOVE_MAX gives us. */
833 src_align
= get_pointer_alignment (src
);
834 dest_align
= get_pointer_alignment (dest
);
835 if (tree_fits_uhwi_p (len
)
836 && compare_tree_int (len
, MOVE_MAX
) <= 0
837 /* ??? Don't transform copies from strings with known length this
838 confuses the tree-ssa-strlen.c. This doesn't handle
839 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
841 && !c_strlen (src
, 2))
843 unsigned ilen
= tree_to_uhwi (len
);
844 if (exact_log2 (ilen
) != -1)
846 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
848 && TYPE_MODE (type
) != BLKmode
849 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
851 /* If the destination pointer is not aligned we must be able
852 to emit an unaligned store. */
853 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
854 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
857 tree desttype
= type
;
858 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
859 srctype
= build_aligned_type (type
, src_align
);
860 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
861 tree tem
= fold_const_aggregate_ref (srcmem
);
864 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
865 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
871 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
873 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
874 if (gimple_in_ssa_p (cfun
))
875 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
878 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
),
880 gimple_assign_set_lhs (new_stmt
, srcmem
);
881 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
882 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
884 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
885 desttype
= build_aligned_type (type
, dest_align
);
887 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
890 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
891 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
892 if (gimple_vdef (new_stmt
)
893 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
894 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
897 gsi_replace (gsi
, new_stmt
, true);
900 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
909 /* Both DEST and SRC must be pointer types.
910 ??? This is what old code did. Is the testing for pointer types
913 If either SRC is readonly or length is 1, we can use memcpy. */
914 if (!dest_align
|| !src_align
)
916 if (readonly_data_expr (src
)
917 || (tree_fits_uhwi_p (len
)
918 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
919 >= tree_to_uhwi (len
))))
921 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
924 gimple_call_set_fndecl (stmt
, fn
);
925 gimple_call_set_arg (stmt
, 0, dest
);
926 gimple_call_set_arg (stmt
, 1, src
);
931 /* If *src and *dest can't overlap, optimize into memcpy as well. */
932 if (TREE_CODE (src
) == ADDR_EXPR
933 && TREE_CODE (dest
) == ADDR_EXPR
)
935 tree src_base
, dest_base
, fn
;
936 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
937 HOST_WIDE_INT size
= -1;
938 HOST_WIDE_INT maxsize
= -1;
940 srcvar
= TREE_OPERAND (src
, 0);
941 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
943 destvar
= TREE_OPERAND (dest
, 0);
944 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
946 if (tree_fits_uhwi_p (len
))
947 maxsize
= tree_to_uhwi (len
);
950 src_offset
/= BITS_PER_UNIT
;
951 dest_offset
/= BITS_PER_UNIT
;
952 if (SSA_VAR_P (src_base
)
953 && SSA_VAR_P (dest_base
))
955 if (operand_equal_p (src_base
, dest_base
, 0)
956 && ranges_overlap_p (src_offset
, maxsize
,
957 dest_offset
, maxsize
))
960 else if (TREE_CODE (src_base
) == MEM_REF
961 && TREE_CODE (dest_base
) == MEM_REF
)
963 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
964 TREE_OPERAND (dest_base
, 0), 0))
966 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
967 if (!wi::fits_shwi_p (off
))
969 src_offset
= off
.to_shwi ();
971 off
= mem_ref_offset (dest_base
) + dest_offset
;
972 if (!wi::fits_shwi_p (off
))
974 dest_offset
= off
.to_shwi ();
975 if (ranges_overlap_p (src_offset
, maxsize
,
976 dest_offset
, maxsize
))
982 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
985 gimple_call_set_fndecl (stmt
, fn
);
986 gimple_call_set_arg (stmt
, 0, dest
);
987 gimple_call_set_arg (stmt
, 1, src
);
992 /* If the destination and source do not alias optimize into
994 if ((is_gimple_min_invariant (dest
)
995 || TREE_CODE (dest
) == SSA_NAME
)
996 && (is_gimple_min_invariant (src
)
997 || TREE_CODE (src
) == SSA_NAME
))
1000 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1001 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1002 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1005 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1008 gimple_call_set_fndecl (stmt
, fn
);
1009 gimple_call_set_arg (stmt
, 0, dest
);
1010 gimple_call_set_arg (stmt
, 1, src
);
1019 if (!tree_fits_shwi_p (len
))
1022 This logic lose for arguments like (type *)malloc (sizeof (type)),
1023 since we strip the casts of up to VOID return value from malloc.
1024 Perhaps we ought to inherit type from non-VOID argument here? */
1027 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1028 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1030 /* In the following try to find a type that is most natural to be
1031 used for the memcpy source and destination and that allows
1032 the most optimization when memcpy is turned into a plain assignment
1033 using that type. In theory we could always use a char[len] type
1034 but that only gains us that the destination and source possibly
1035 no longer will have their address taken. */
1036 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1037 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1039 tree tem
= TREE_OPERAND (src
, 0);
1041 if (tem
!= TREE_OPERAND (src
, 0))
1042 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1044 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1046 tree tem
= TREE_OPERAND (dest
, 0);
1048 if (tem
!= TREE_OPERAND (dest
, 0))
1049 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1051 srctype
= TREE_TYPE (TREE_TYPE (src
));
1052 if (TREE_CODE (srctype
) == ARRAY_TYPE
1053 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1055 srctype
= TREE_TYPE (srctype
);
1057 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1059 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1060 if (TREE_CODE (desttype
) == ARRAY_TYPE
1061 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1063 desttype
= TREE_TYPE (desttype
);
1065 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1067 if (TREE_ADDRESSABLE (srctype
)
1068 || TREE_ADDRESSABLE (desttype
))
1071 /* Make sure we are not copying using a floating-point mode or
1072 a type whose size possibly does not match its precision. */
1073 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1074 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1075 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1076 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1077 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1078 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1079 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1080 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1088 src_align
= get_pointer_alignment (src
);
1089 dest_align
= get_pointer_alignment (dest
);
1090 if (dest_align
< TYPE_ALIGN (desttype
)
1091 || src_align
< TYPE_ALIGN (srctype
))
1095 STRIP_NOPS (destvar
);
1096 if (TREE_CODE (destvar
) == ADDR_EXPR
1097 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1098 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1099 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1101 destvar
= NULL_TREE
;
1104 STRIP_NOPS (srcvar
);
1105 if (TREE_CODE (srcvar
) == ADDR_EXPR
1106 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1107 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1110 || src_align
>= TYPE_ALIGN (desttype
))
1111 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1113 else if (!STRICT_ALIGNMENT
)
1115 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1117 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1125 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1128 if (srcvar
== NULL_TREE
)
1131 if (src_align
>= TYPE_ALIGN (desttype
))
1132 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1135 if (STRICT_ALIGNMENT
)
1137 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1139 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1142 else if (destvar
== NULL_TREE
)
1145 if (dest_align
>= TYPE_ALIGN (srctype
))
1146 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1149 if (STRICT_ALIGNMENT
)
1151 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1153 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1158 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1160 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1161 if (gimple_in_ssa_p (cfun
))
1162 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1164 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
), NULL
);
1165 gimple_assign_set_lhs (new_stmt
, srcvar
);
1166 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1167 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1169 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1170 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1171 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1172 if (gimple_vdef (new_stmt
)
1173 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1174 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1177 gsi_replace (gsi
, new_stmt
, true);
1180 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1184 if (endp
== 0 || endp
== 3)
1187 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1189 if (endp
== 2 || endp
== 1)
1190 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1192 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1194 gimple repl
= gimple_build_assign (lhs
, dest
);
1195 gsi_replace (gsi
, repl
, true);
1199 /* Fold function call to builtin memset or bzero at *GSI setting the
1200 memory of size LEN to VAL. Return whether a simplification was made. */
1203 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1205 gimple stmt
= gsi_stmt (*gsi
);
1207 unsigned HOST_WIDE_INT length
, cval
;
1209 /* If the LEN parameter is zero, return DEST. */
1210 if (integer_zerop (len
))
1212 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1216 if (! tree_fits_uhwi_p (len
))
1219 if (TREE_CODE (c
) != INTEGER_CST
)
1222 tree dest
= gimple_call_arg (stmt
, 0);
1224 if (TREE_CODE (var
) != ADDR_EXPR
)
1227 var
= TREE_OPERAND (var
, 0);
1228 if (TREE_THIS_VOLATILE (var
))
1231 etype
= TREE_TYPE (var
);
1232 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1233 etype
= TREE_TYPE (etype
);
1235 if (!INTEGRAL_TYPE_P (etype
)
1236 && !POINTER_TYPE_P (etype
))
1239 if (! var_decl_component_p (var
))
1242 length
= tree_to_uhwi (len
);
1243 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1244 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1247 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1250 if (integer_zerop (c
))
1254 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1257 cval
= TREE_INT_CST_LOW (c
);
1261 cval
|= (cval
<< 31) << 1;
1264 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1265 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1266 gimple_set_vuse (store
, gimple_vuse (stmt
));
1267 tree vdef
= gimple_vdef (stmt
);
1268 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1270 gimple_set_vdef (store
, gimple_vdef (stmt
));
1271 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1273 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1274 if (gimple_call_lhs (stmt
))
1276 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1277 gsi_replace (gsi
, asgn
, true);
1281 gimple_stmt_iterator gsi2
= *gsi
;
1283 gsi_remove (&gsi2
, true);
1290 /* Return the string length, maximum string length or maximum value of
1292 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1293 is not NULL and, for TYPE == 0, its value is not equal to the length
1294 we determine or if we are unable to determine the length or value,
1295 return false. VISITED is a bitmap of visited variables.
1296 TYPE is 0 if string length should be returned, 1 for maximum string
1297 length and 2 for maximum value ARG can have. */
1300 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1305 if (TREE_CODE (arg
) != SSA_NAME
)
1307 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1308 if (TREE_CODE (arg
) == ADDR_EXPR
1309 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1310 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1312 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1313 if (TREE_CODE (aop0
) == INDIRECT_REF
1314 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1315 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1316 length
, visited
, type
);
1322 if (TREE_CODE (val
) != INTEGER_CST
1323 || tree_int_cst_sgn (val
) < 0)
1327 val
= c_strlen (arg
, 1);
1335 if (TREE_CODE (*length
) != INTEGER_CST
1336 || TREE_CODE (val
) != INTEGER_CST
)
1339 if (tree_int_cst_lt (*length
, val
))
1343 else if (simple_cst_equal (val
, *length
) != 1)
1351 /* If ARG is registered for SSA update we cannot look at its defining
1353 if (name_registered_for_update_p (arg
))
1356 /* If we were already here, break the infinite cycle. */
1358 *visited
= BITMAP_ALLOC (NULL
);
1359 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1363 def_stmt
= SSA_NAME_DEF_STMT (var
);
1365 switch (gimple_code (def_stmt
))
1368 /* The RHS of the statement defining VAR must either have a
1369 constant length or come from another SSA_NAME with a constant
1371 if (gimple_assign_single_p (def_stmt
)
1372 || gimple_assign_unary_nop_p (def_stmt
))
1374 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1375 return get_maxval_strlen (rhs
, length
, visited
, type
);
1377 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1379 tree op2
= gimple_assign_rhs2 (def_stmt
);
1380 tree op3
= gimple_assign_rhs3 (def_stmt
);
1381 return get_maxval_strlen (op2
, length
, visited
, type
)
1382 && get_maxval_strlen (op3
, length
, visited
, type
);
1388 /* All the arguments of the PHI node must have the same constant
1392 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1394 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1396 /* If this PHI has itself as an argument, we cannot
1397 determine the string length of this argument. However,
1398 if we can find a constant string length for the other
1399 PHI args then we can still be sure that this is a
1400 constant string length. So be optimistic and just
1401 continue with the next argument. */
1402 if (arg
== gimple_phi_result (def_stmt
))
1405 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1417 get_maxval_strlen (tree arg
, int type
)
1419 bitmap visited
= NULL
;
1420 tree len
= NULL_TREE
;
1421 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1424 BITMAP_FREE (visited
);
1430 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1431 If LEN is not NULL, it represents the length of the string to be
1432 copied. Return NULL_TREE if no simplification can be made. */
1435 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1436 tree dest
, tree src
)
1438 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1441 /* If SRC and DEST are the same (and not volatile), return DEST. */
1442 if (operand_equal_p (src
, dest
, 0))
1444 replace_call_with_value (gsi
, dest
);
1448 if (optimize_function_for_size_p (cfun
))
1451 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1455 tree len
= get_maxval_strlen (src
, 0);
1459 len
= fold_convert_loc (loc
, size_type_node
, len
);
1460 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1461 len
= force_gimple_operand_gsi (gsi
, len
, true,
1462 NULL_TREE
, true, GSI_SAME_STMT
);
1463 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1464 replace_call_with_call_and_fold (gsi
, repl
);
1468 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1469 If SLEN is not NULL, it represents the length of the source string.
1470 Return NULL_TREE if no simplification can be made. */
1473 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1474 tree dest
, tree src
, tree len
)
1476 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1479 /* If the LEN parameter is zero, return DEST. */
1480 if (integer_zerop (len
))
1482 replace_call_with_value (gsi
, dest
);
1486 /* We can't compare slen with len as constants below if len is not a
1488 if (TREE_CODE (len
) != INTEGER_CST
)
1491 /* Now, we must be passed a constant src ptr parameter. */
1492 tree slen
= get_maxval_strlen (src
, 0);
1493 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1496 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1498 /* We do not support simplification of this case, though we do
1499 support it when expanding trees into RTL. */
1500 /* FIXME: generate a call to __builtin_memset. */
1501 if (tree_int_cst_lt (slen
, len
))
1504 /* OK transform into builtin memcpy. */
1505 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1509 len
= fold_convert_loc (loc
, size_type_node
, len
);
1510 len
= force_gimple_operand_gsi (gsi
, len
, true,
1511 NULL_TREE
, true, GSI_SAME_STMT
);
1512 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1513 replace_call_with_call_and_fold (gsi
, repl
);
1517 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1520 Return NULL_TREE if no simplification was possible, otherwise return the
1521 simplified form of the call as a tree.
1523 The simplified form may be a constant or other expression which
1524 computes the same value, but in a more efficient manner (including
1525 calls to other builtin functions).
1527 The call may contain arguments which need to be evaluated, but
1528 which are not useful to determine the result of the call. In
1529 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1530 COMPOUND_EXPR will be an argument which must be evaluated.
1531 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1532 COMPOUND_EXPR in the chain will contain the tree for the simplified
1533 form of the builtin function call. */
1536 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1538 gimple stmt
= gsi_stmt (*gsi
);
1539 location_t loc
= gimple_location (stmt
);
1541 const char *p
= c_getstr (src
);
1543 /* If the string length is zero, return the dst parameter. */
1544 if (p
&& *p
== '\0')
1546 replace_call_with_value (gsi
, dst
);
1550 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1553 /* See if we can store by pieces into (dst + strlen(dst)). */
1555 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1556 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1558 if (!strlen_fn
|| !memcpy_fn
)
1561 /* If the length of the source string isn't computable don't
1562 split strcat into strlen and memcpy. */
1563 tree len
= get_maxval_strlen (src
, 0);
1567 /* Create strlen (dst). */
1568 gimple_seq stmts
= NULL
, stmts2
;
1569 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1570 gimple_set_location (repl
, loc
);
1571 if (gimple_in_ssa_p (cfun
))
1572 newdst
= make_ssa_name (size_type_node
, NULL
);
1574 newdst
= create_tmp_reg (size_type_node
, NULL
);
1575 gimple_call_set_lhs (repl
, newdst
);
1576 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1578 /* Create (dst p+ strlen (dst)). */
1579 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1580 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1581 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1583 len
= fold_convert_loc (loc
, size_type_node
, len
);
1584 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1585 build_int_cst (size_type_node
, 1));
1586 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1587 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1589 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1590 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1591 if (gimple_call_lhs (stmt
))
1593 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1594 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1595 gsi_replace_with_seq_vops (gsi
, stmts
);
1596 /* gsi now points at the assignment to the lhs, get a
1597 stmt iterator to the memcpy call.
1598 ??? We can't use gsi_for_stmt as that doesn't work when the
1599 CFG isn't built yet. */
1600 gimple_stmt_iterator gsi2
= *gsi
;
1606 gsi_replace_with_seq_vops (gsi
, stmts
);
1612 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1613 are the arguments to the call. */
1616 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1618 gimple stmt
= gsi_stmt (*gsi
);
1619 tree dest
= gimple_call_arg (stmt
, 0);
1620 tree src
= gimple_call_arg (stmt
, 1);
1621 tree size
= gimple_call_arg (stmt
, 2);
1627 /* If the SRC parameter is "", return DEST. */
1628 if (p
&& *p
== '\0')
1630 replace_call_with_value (gsi
, dest
);
1634 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1637 /* If __builtin_strcat_chk is used, assume strcat is available. */
1638 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1642 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1643 replace_call_with_call_and_fold (gsi
, repl
);
1647 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1651 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1653 gimple stmt
= gsi_stmt (*gsi
);
1654 tree dest
= gimple_call_arg (stmt
, 0);
1655 tree src
= gimple_call_arg (stmt
, 1);
1656 tree len
= gimple_call_arg (stmt
, 2);
1657 tree size
= gimple_call_arg (stmt
, 3);
1662 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1663 if ((p
&& *p
== '\0')
1664 || integer_zerop (len
))
1666 replace_call_with_value (gsi
, dest
);
1670 if (! tree_fits_uhwi_p (size
))
1673 if (! integer_all_onesp (size
))
1675 tree src_len
= c_strlen (src
, 1);
1677 && tree_fits_uhwi_p (src_len
)
1678 && tree_fits_uhwi_p (len
)
1679 && ! tree_int_cst_lt (len
, src_len
))
1681 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1682 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1686 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1687 replace_call_with_call_and_fold (gsi
, repl
);
1693 /* If __builtin_strncat_chk is used, assume strncat is available. */
1694 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1698 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1699 replace_call_with_call_and_fold (gsi
, repl
);
1703 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1704 to the call. IGNORE is true if the value returned
1705 by the builtin will be ignored. UNLOCKED is true is true if this
1706 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1707 the known length of the string. Return NULL_TREE if no simplification
1711 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1712 tree arg0
, tree arg1
,
1715 gimple stmt
= gsi_stmt (*gsi
);
1717 /* If we're using an unlocked function, assume the other unlocked
1718 functions exist explicitly. */
1719 tree
const fn_fputc
= (unlocked
1720 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1721 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1722 tree
const fn_fwrite
= (unlocked
1723 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1724 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1726 /* If the return value is used, don't do the transformation. */
1727 if (gimple_call_lhs (stmt
))
1730 /* Get the length of the string passed to fputs. If the length
1731 can't be determined, punt. */
1732 tree len
= get_maxval_strlen (arg0
, 0);
1734 || TREE_CODE (len
) != INTEGER_CST
)
1737 switch (compare_tree_int (len
, 1))
1739 case -1: /* length is 0, delete the call entirely . */
1740 replace_call_with_value (gsi
, integer_zero_node
);
1743 case 0: /* length is 1, call fputc. */
1745 const char *p
= c_getstr (arg0
);
1751 gimple repl
= gimple_build_call (fn_fputc
, 2,
1753 (integer_type_node
, p
[0]), arg1
);
1754 replace_call_with_call_and_fold (gsi
, repl
);
1759 case 1: /* length is greater than 1, call fwrite. */
1761 /* If optimizing for size keep fputs. */
1762 if (optimize_function_for_size_p (cfun
))
1764 /* New argument list transforming fputs(string, stream) to
1765 fwrite(string, 1, len, stream). */
1769 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1770 size_one_node
, len
, arg1
);
1771 replace_call_with_call_and_fold (gsi
, repl
);
1780 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1781 DEST, SRC, LEN, and SIZE are the arguments to the call.
1782 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1783 code of the builtin. If MAXLEN is not NULL, it is maximum length
1784 passed as third argument. */
1787 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1788 tree dest
, tree src
, tree len
, tree size
,
1789 enum built_in_function fcode
)
1791 gimple stmt
= gsi_stmt (*gsi
);
1792 location_t loc
= gimple_location (stmt
);
1793 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1796 /* If SRC and DEST are the same (and not volatile), return DEST
1797 (resp. DEST+LEN for __mempcpy_chk). */
1798 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1800 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1802 replace_call_with_value (gsi
, dest
);
1807 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1808 temp
= force_gimple_operand_gsi (gsi
, temp
,
1809 false, NULL_TREE
, true,
1811 replace_call_with_value (gsi
, temp
);
1816 if (! tree_fits_uhwi_p (size
))
1819 tree maxlen
= get_maxval_strlen (len
, 2);
1820 if (! integer_all_onesp (size
))
1822 if (! tree_fits_uhwi_p (len
))
1824 /* If LEN is not constant, try MAXLEN too.
1825 For MAXLEN only allow optimizing into non-_ocs function
1826 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1827 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1829 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1831 /* (void) __mempcpy_chk () can be optimized into
1832 (void) __memcpy_chk (). */
1833 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1837 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1838 replace_call_with_call_and_fold (gsi
, repl
);
1847 if (tree_int_cst_lt (size
, maxlen
))
1852 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1853 mem{cpy,pcpy,move,set} is available. */
1856 case BUILT_IN_MEMCPY_CHK
:
1857 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1859 case BUILT_IN_MEMPCPY_CHK
:
1860 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1862 case BUILT_IN_MEMMOVE_CHK
:
1863 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1865 case BUILT_IN_MEMSET_CHK
:
1866 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1875 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1876 replace_call_with_call_and_fold (gsi
, repl
);
1880 /* Fold a call to the __st[rp]cpy_chk builtin.
1881 DEST, SRC, and SIZE are the arguments to the call.
1882 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1883 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1884 strings passed as second argument. */
1887 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1889 tree src
, tree size
,
1890 enum built_in_function fcode
)
1892 gimple stmt
= gsi_stmt (*gsi
);
1893 location_t loc
= gimple_location (stmt
);
1894 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1897 /* If SRC and DEST are the same (and not volatile), return DEST. */
1898 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1900 replace_call_with_value (gsi
, dest
);
1904 if (! tree_fits_uhwi_p (size
))
1907 tree maxlen
= get_maxval_strlen (src
, 1);
1908 if (! integer_all_onesp (size
))
1910 len
= c_strlen (src
, 1);
1911 if (! len
|| ! tree_fits_uhwi_p (len
))
1913 /* If LEN is not constant, try MAXLEN too.
1914 For MAXLEN only allow optimizing into non-_ocs function
1915 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1916 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1918 if (fcode
== BUILT_IN_STPCPY_CHK
)
1923 /* If return value of __stpcpy_chk is ignored,
1924 optimize into __strcpy_chk. */
1925 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1929 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1930 replace_call_with_call_and_fold (gsi
, repl
);
1934 if (! len
|| TREE_SIDE_EFFECTS (len
))
1937 /* If c_strlen returned something, but not a constant,
1938 transform __strcpy_chk into __memcpy_chk. */
1939 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1943 len
= fold_convert_loc (loc
, size_type_node
, len
);
1944 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1945 build_int_cst (size_type_node
, 1));
1946 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1947 true, GSI_SAME_STMT
);
1948 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1949 replace_call_with_call_and_fold (gsi
, repl
);
1956 if (! tree_int_cst_lt (maxlen
, size
))
1960 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1961 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1962 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1966 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1967 replace_call_with_call_and_fold (gsi
, repl
);
1971 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1972 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1973 length passed as third argument. IGNORE is true if return value can be
1974 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1977 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1978 tree dest
, tree src
,
1979 tree len
, tree size
,
1980 enum built_in_function fcode
)
1982 gimple stmt
= gsi_stmt (*gsi
);
1983 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1986 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1988 /* If return value of __stpncpy_chk is ignored,
1989 optimize into __strncpy_chk. */
1990 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1993 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1994 replace_call_with_call_and_fold (gsi
, repl
);
1999 if (! tree_fits_uhwi_p (size
))
2002 tree maxlen
= get_maxval_strlen (len
, 2);
2003 if (! integer_all_onesp (size
))
2005 if (! tree_fits_uhwi_p (len
))
2007 /* If LEN is not constant, try MAXLEN too.
2008 For MAXLEN only allow optimizing into non-_ocs function
2009 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2010 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2016 if (tree_int_cst_lt (size
, maxlen
))
2020 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2021 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2022 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2026 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2027 replace_call_with_call_and_fold (gsi
, repl
);
2031 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2032 NULL_TREE if a normal call should be emitted rather than expanding
2033 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2034 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2035 passed as second argument. */
2038 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2039 enum built_in_function fcode
)
2041 gimple stmt
= gsi_stmt (*gsi
);
2042 tree dest
, size
, len
, fn
, fmt
, flag
;
2043 const char *fmt_str
;
2045 /* Verify the required arguments in the original call. */
2046 if (gimple_call_num_args (stmt
) < 5)
2049 dest
= gimple_call_arg (stmt
, 0);
2050 len
= gimple_call_arg (stmt
, 1);
2051 flag
= gimple_call_arg (stmt
, 2);
2052 size
= gimple_call_arg (stmt
, 3);
2053 fmt
= gimple_call_arg (stmt
, 4);
2055 if (! tree_fits_uhwi_p (size
))
2058 if (! integer_all_onesp (size
))
2060 tree maxlen
= get_maxval_strlen (len
, 2);
2061 if (! tree_fits_uhwi_p (len
))
2063 /* If LEN is not constant, try MAXLEN too.
2064 For MAXLEN only allow optimizing into non-_ocs function
2065 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2066 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2072 if (tree_int_cst_lt (size
, maxlen
))
2076 if (!init_target_chars ())
2079 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2080 or if format doesn't contain % chars or is "%s". */
2081 if (! integer_zerop (flag
))
2083 fmt_str
= c_getstr (fmt
);
2084 if (fmt_str
== NULL
)
2086 if (strchr (fmt_str
, target_percent
) != NULL
2087 && strcmp (fmt_str
, target_percent_s
))
2091 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2093 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2094 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2098 /* Replace the called function and the first 5 argument by 3 retaining
2099 trailing varargs. */
2100 gimple_call_set_fndecl (stmt
, fn
);
2101 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2102 gimple_call_set_arg (stmt
, 0, dest
);
2103 gimple_call_set_arg (stmt
, 1, len
);
2104 gimple_call_set_arg (stmt
, 2, fmt
);
2105 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2106 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2107 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2112 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2113 Return NULL_TREE if a normal call should be emitted rather than
2114 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2115 or BUILT_IN_VSPRINTF_CHK. */
2118 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2119 enum built_in_function fcode
)
2121 gimple stmt
= gsi_stmt (*gsi
);
2122 tree dest
, size
, len
, fn
, fmt
, flag
;
2123 const char *fmt_str
;
2124 unsigned nargs
= gimple_call_num_args (stmt
);
2126 /* Verify the required arguments in the original call. */
2129 dest
= gimple_call_arg (stmt
, 0);
2130 flag
= gimple_call_arg (stmt
, 1);
2131 size
= gimple_call_arg (stmt
, 2);
2132 fmt
= gimple_call_arg (stmt
, 3);
2134 if (! tree_fits_uhwi_p (size
))
2139 if (!init_target_chars ())
2142 /* Check whether the format is a literal string constant. */
2143 fmt_str
= c_getstr (fmt
);
2144 if (fmt_str
!= NULL
)
2146 /* If the format doesn't contain % args or %%, we know the size. */
2147 if (strchr (fmt_str
, target_percent
) == 0)
2149 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2150 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2152 /* If the format is "%s" and first ... argument is a string literal,
2153 we know the size too. */
2154 else if (fcode
== BUILT_IN_SPRINTF_CHK
2155 && strcmp (fmt_str
, target_percent_s
) == 0)
2161 arg
= gimple_call_arg (stmt
, 4);
2162 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2164 len
= c_strlen (arg
, 1);
2165 if (! len
|| ! tree_fits_uhwi_p (len
))
2172 if (! integer_all_onesp (size
))
2174 if (! len
|| ! tree_int_cst_lt (len
, size
))
2178 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2179 or if format doesn't contain % chars or is "%s". */
2180 if (! integer_zerop (flag
))
2182 if (fmt_str
== NULL
)
2184 if (strchr (fmt_str
, target_percent
) != NULL
2185 && strcmp (fmt_str
, target_percent_s
))
2189 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2190 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2191 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2195 /* Replace the called function and the first 4 argument by 2 retaining
2196 trailing varargs. */
2197 gimple_call_set_fndecl (stmt
, fn
);
2198 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2199 gimple_call_set_arg (stmt
, 0, dest
);
2200 gimple_call_set_arg (stmt
, 1, fmt
);
2201 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2202 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2203 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2208 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2209 ORIG may be null if this is a 2-argument call. We don't attempt to
2210 simplify calls with more than 3 arguments.
2212 Return NULL_TREE if no simplification was possible, otherwise return the
2213 simplified form of the call as a tree. If IGNORED is true, it means that
2214 the caller does not use the returned value of the function. */
2217 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2219 gimple stmt
= gsi_stmt (*gsi
);
2220 tree dest
= gimple_call_arg (stmt
, 0);
2221 tree fmt
= gimple_call_arg (stmt
, 1);
2222 tree orig
= NULL_TREE
;
2223 const char *fmt_str
= NULL
;
2225 /* Verify the required arguments in the original call. We deal with two
2226 types of sprintf() calls: 'sprintf (str, fmt)' and
2227 'sprintf (dest, "%s", orig)'. */
2228 if (gimple_call_num_args (stmt
) > 3)
2231 if (gimple_call_num_args (stmt
) == 3)
2232 orig
= gimple_call_arg (stmt
, 2);
2234 /* Check whether the format is a literal string constant. */
2235 fmt_str
= c_getstr (fmt
);
2236 if (fmt_str
== NULL
)
2239 if (!init_target_chars ())
2242 /* If the format doesn't contain % args or %%, use strcpy. */
2243 if (strchr (fmt_str
, target_percent
) == NULL
)
2245 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2250 /* Don't optimize sprintf (buf, "abc", ptr++). */
2254 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2255 'format' is known to contain no % formats. */
2256 gimple_seq stmts
= NULL
;
2257 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2258 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2259 if (gimple_call_lhs (stmt
))
2261 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2262 build_int_cst (integer_type_node
,
2264 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2265 gsi_replace_with_seq_vops (gsi
, stmts
);
2266 /* gsi now points at the assignment to the lhs, get a
2267 stmt iterator to the memcpy call.
2268 ??? We can't use gsi_for_stmt as that doesn't work when the
2269 CFG isn't built yet. */
2270 gimple_stmt_iterator gsi2
= *gsi
;
2276 gsi_replace_with_seq_vops (gsi
, stmts
);
2282 /* If the format is "%s", use strcpy if the result isn't used. */
2283 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2286 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2291 /* Don't crash on sprintf (str1, "%s"). */
2295 tree orig_len
= NULL_TREE
;
2296 if (gimple_call_lhs (stmt
))
2298 orig_len
= get_maxval_strlen (orig
, 0);
2303 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2304 gimple_seq stmts
= NULL
;
2305 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2306 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2307 if (gimple_call_lhs (stmt
))
2309 if (!useless_type_conversion_p (integer_type_node
,
2310 TREE_TYPE (orig_len
)))
2311 orig_len
= fold_convert (integer_type_node
, orig_len
);
2312 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2313 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2314 gsi_replace_with_seq_vops (gsi
, stmts
);
2315 /* gsi now points at the assignment to the lhs, get a
2316 stmt iterator to the memcpy call.
2317 ??? We can't use gsi_for_stmt as that doesn't work when the
2318 CFG isn't built yet. */
2319 gimple_stmt_iterator gsi2
= *gsi
;
2325 gsi_replace_with_seq_vops (gsi
, stmts
);
2333 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2334 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2335 attempt to simplify calls with more than 4 arguments.
2337 Return NULL_TREE if no simplification was possible, otherwise return the
2338 simplified form of the call as a tree. If IGNORED is true, it means that
2339 the caller does not use the returned value of the function. */
2342 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2344 gimple stmt
= gsi_stmt (*gsi
);
2345 tree dest
= gimple_call_arg (stmt
, 0);
2346 tree destsize
= gimple_call_arg (stmt
, 1);
2347 tree fmt
= gimple_call_arg (stmt
, 2);
2348 tree orig
= NULL_TREE
;
2349 const char *fmt_str
= NULL
;
2351 if (gimple_call_num_args (stmt
) > 4)
2354 if (gimple_call_num_args (stmt
) == 4)
2355 orig
= gimple_call_arg (stmt
, 3);
2357 if (!tree_fits_uhwi_p (destsize
))
2359 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2361 /* Check whether the format is a literal string constant. */
2362 fmt_str
= c_getstr (fmt
);
2363 if (fmt_str
== NULL
)
2366 if (!init_target_chars ())
2369 /* If the format doesn't contain % args or %%, use strcpy. */
2370 if (strchr (fmt_str
, target_percent
) == NULL
)
2372 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2376 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2380 /* We could expand this as
2381 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2383 memcpy (str, fmt_with_nul_at_cstm1, cst);
2384 but in the former case that might increase code size
2385 and in the latter case grow .rodata section too much.
2387 size_t len
= strlen (fmt_str
);
2391 gimple_seq stmts
= NULL
;
2392 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2393 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2394 if (gimple_call_lhs (stmt
))
2396 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2397 build_int_cst (integer_type_node
, len
));
2398 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2399 gsi_replace_with_seq_vops (gsi
, stmts
);
2400 /* gsi now points at the assignment to the lhs, get a
2401 stmt iterator to the memcpy call.
2402 ??? We can't use gsi_for_stmt as that doesn't work when the
2403 CFG isn't built yet. */
2404 gimple_stmt_iterator gsi2
= *gsi
;
2410 gsi_replace_with_seq_vops (gsi
, stmts
);
2416 /* If the format is "%s", use strcpy if the result isn't used. */
2417 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2419 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2423 /* Don't crash on snprintf (str1, cst, "%s"). */
2427 tree orig_len
= get_maxval_strlen (orig
, 0);
2431 /* We could expand this as
2432 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2434 memcpy (str1, str2_with_nul_at_cstm1, cst);
2435 but in the former case that might increase code size
2436 and in the latter case grow .rodata section too much.
2438 if (compare_tree_int (orig_len
, destlen
) >= 0)
2441 /* Convert snprintf (str1, cst, "%s", str2) into
2442 strcpy (str1, str2) if strlen (str2) < cst. */
2443 gimple_seq stmts
= NULL
;
2444 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2445 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2446 if (gimple_call_lhs (stmt
))
2448 if (!useless_type_conversion_p (integer_type_node
,
2449 TREE_TYPE (orig_len
)))
2450 orig_len
= fold_convert (integer_type_node
, orig_len
);
2451 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2452 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2453 gsi_replace_with_seq_vops (gsi
, stmts
);
2454 /* gsi now points at the assignment to the lhs, get a
2455 stmt iterator to the memcpy call.
2456 ??? We can't use gsi_for_stmt as that doesn't work when the
2457 CFG isn't built yet. */
2458 gimple_stmt_iterator gsi2
= *gsi
;
2464 gsi_replace_with_seq_vops (gsi
, stmts
);
2473 /* Fold a call to __builtin_strlen with known length LEN. */
2476 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2478 gimple stmt
= gsi_stmt (*gsi
);
2479 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2482 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2483 replace_call_with_value (gsi
, len
);
2488 /* Fold the non-target builtin at *GSI and return whether any simplification
2492 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2494 gimple stmt
= gsi_stmt (*gsi
);
2495 tree callee
= gimple_call_fndecl (stmt
);
2497 /* Give up for always_inline inline builtins until they are
2499 if (avoid_folding_inline_builtin (callee
))
2502 switch (DECL_FUNCTION_CODE (callee
))
2504 case BUILT_IN_BZERO
:
2505 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2506 gimple_call_arg (stmt
, 1));
2507 case BUILT_IN_MEMSET
:
2508 return gimple_fold_builtin_memset (gsi
,
2509 gimple_call_arg (stmt
, 1),
2510 gimple_call_arg (stmt
, 2));
2511 case BUILT_IN_BCOPY
:
2512 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2513 gimple_call_arg (stmt
, 0), 3);
2514 case BUILT_IN_MEMCPY
:
2515 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2516 gimple_call_arg (stmt
, 1), 0);
2517 case BUILT_IN_MEMPCPY
:
2518 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2519 gimple_call_arg (stmt
, 1), 1);
2520 case BUILT_IN_MEMMOVE
:
2521 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2522 gimple_call_arg (stmt
, 1), 3);
2523 case BUILT_IN_SPRINTF_CHK
:
2524 case BUILT_IN_VSPRINTF_CHK
:
2525 return gimple_fold_builtin_sprintf_chk (gsi
, DECL_FUNCTION_CODE (callee
));
2526 case BUILT_IN_STRCAT_CHK
:
2527 return gimple_fold_builtin_strcat_chk (gsi
);
2528 case BUILT_IN_STRNCAT_CHK
:
2529 return gimple_fold_builtin_strncat_chk (gsi
);
2530 case BUILT_IN_STRLEN
:
2531 return gimple_fold_builtin_strlen (gsi
);
2532 case BUILT_IN_STRCPY
:
2533 return gimple_fold_builtin_strcpy (gsi
,
2534 gimple_call_arg (stmt
, 0),
2535 gimple_call_arg (stmt
, 1));
2536 case BUILT_IN_STRNCPY
:
2537 return gimple_fold_builtin_strncpy (gsi
,
2538 gimple_call_arg (stmt
, 0),
2539 gimple_call_arg (stmt
, 1),
2540 gimple_call_arg (stmt
, 2));
2541 case BUILT_IN_STRCAT
:
2542 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2543 gimple_call_arg (stmt
, 1));
2544 case BUILT_IN_FPUTS
:
2545 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2546 gimple_call_arg (stmt
, 1), false);
2547 case BUILT_IN_FPUTS_UNLOCKED
:
2548 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2549 gimple_call_arg (stmt
, 1), true);
2550 case BUILT_IN_MEMCPY_CHK
:
2551 case BUILT_IN_MEMPCPY_CHK
:
2552 case BUILT_IN_MEMMOVE_CHK
:
2553 case BUILT_IN_MEMSET_CHK
:
2554 return gimple_fold_builtin_memory_chk (gsi
,
2555 gimple_call_arg (stmt
, 0),
2556 gimple_call_arg (stmt
, 1),
2557 gimple_call_arg (stmt
, 2),
2558 gimple_call_arg (stmt
, 3),
2559 DECL_FUNCTION_CODE (callee
));
2560 case BUILT_IN_STRCPY_CHK
:
2561 case BUILT_IN_STPCPY_CHK
:
2562 return gimple_fold_builtin_stxcpy_chk (gsi
,
2563 gimple_call_arg (stmt
, 0),
2564 gimple_call_arg (stmt
, 1),
2565 gimple_call_arg (stmt
, 2),
2566 DECL_FUNCTION_CODE (callee
));
2567 case BUILT_IN_STRNCPY_CHK
:
2568 case BUILT_IN_STPNCPY_CHK
:
2569 return gimple_fold_builtin_stxncpy_chk (gsi
,
2570 gimple_call_arg (stmt
, 0),
2571 gimple_call_arg (stmt
, 1),
2572 gimple_call_arg (stmt
, 2),
2573 gimple_call_arg (stmt
, 3),
2574 DECL_FUNCTION_CODE (callee
));
2575 case BUILT_IN_SNPRINTF_CHK
:
2576 case BUILT_IN_VSNPRINTF_CHK
:
2577 return gimple_fold_builtin_snprintf_chk (gsi
,
2578 DECL_FUNCTION_CODE (callee
));
2579 case BUILT_IN_SNPRINTF
:
2580 return gimple_fold_builtin_snprintf (gsi
);
2581 case BUILT_IN_SPRINTF
:
2582 return gimple_fold_builtin_sprintf (gsi
);
2586 /* Try the generic builtin folder. */
2587 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2588 tree result
= fold_call_stmt (stmt
, ignore
);
2592 STRIP_NOPS (result
);
2594 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2595 if (!update_call_from_tree (gsi
, result
))
2596 gimplify_and_update_call_from_tree (gsi
, result
);
2603 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2604 The statement may be replaced by another statement, e.g., if the call
2605 simplifies to a constant value. Return true if any changes were made.
2606 It is assumed that the operands have been previously folded. */
2609 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2611 gimple stmt
= gsi_stmt (*gsi
);
2613 bool changed
= false;
2616 /* Fold *& in call arguments. */
2617 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2618 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2620 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2623 gimple_call_set_arg (stmt
, i
, tmp
);
2628 /* Check for virtual calls that became direct calls. */
2629 callee
= gimple_call_fn (stmt
);
2630 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2632 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2634 if (dump_file
&& virtual_method_call_p (callee
)
2635 && !possible_polymorphic_call_target_p
2636 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2637 (OBJ_TYPE_REF_EXPR (callee
)))))
2640 "Type inheritance inconsistent devirtualization of ");
2641 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2642 fprintf (dump_file
, " to ");
2643 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2644 fprintf (dump_file
, "\n");
2647 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2650 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2653 vec
<cgraph_node
*>targets
2654 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2655 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2657 tree lhs
= gimple_call_lhs (stmt
);
2658 if (dump_enabled_p ())
2660 location_t loc
= gimple_location_safe (stmt
);
2661 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2662 "folding virtual function call to %s\n",
2663 targets
.length () == 1
2664 ? targets
[0]->name ()
2665 : "__builtin_unreachable");
2667 if (targets
.length () == 1)
2669 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
2671 /* If the call becomes noreturn, remove the lhs. */
2672 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
2674 if (TREE_CODE (lhs
) == SSA_NAME
)
2676 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2677 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2678 gimple new_stmt
= gimple_build_assign (lhs
, def
);
2679 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2681 gimple_call_set_lhs (stmt
, NULL_TREE
);
2686 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2687 gimple new_stmt
= gimple_build_call (fndecl
, 0);
2688 gimple_set_location (new_stmt
, gimple_location (stmt
));
2689 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2691 tree var
= create_tmp_var (TREE_TYPE (lhs
), NULL
);
2692 tree def
= get_or_create_ssa_default_def (cfun
, var
);
2694 /* To satisfy condition for
2695 cgraph_update_edges_for_call_stmt_node,
2696 we need to preserve GIMPLE_CALL statement
2697 at position of GSI iterator. */
2698 update_call_from_tree (gsi
, def
);
2699 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
2703 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
2704 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
2705 gsi_replace (gsi
, new_stmt
, false);
2716 /* Check for builtins that CCP can handle using information not
2717 available in the generic fold routines. */
2718 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
2720 if (gimple_fold_builtin (gsi
))
2723 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
2725 changed
|= targetm
.gimple_fold_builtin (gsi
);
2727 else if (gimple_call_internal_p (stmt
))
2729 enum tree_code subcode
= ERROR_MARK
;
2730 tree result
= NULL_TREE
;
2731 switch (gimple_call_internal_fn (stmt
))
2733 case IFN_BUILTIN_EXPECT
:
2734 result
= fold_builtin_expect (gimple_location (stmt
),
2735 gimple_call_arg (stmt
, 0),
2736 gimple_call_arg (stmt
, 1),
2737 gimple_call_arg (stmt
, 2));
2739 case IFN_UBSAN_OBJECT_SIZE
:
2740 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
2741 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
2742 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
2743 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
2744 gimple_call_arg (stmt
, 2))))
2746 gsi_replace (gsi
, gimple_build_nop (), true);
2747 unlink_stmt_vdef (stmt
);
2748 release_defs (stmt
);
2752 case IFN_UBSAN_CHECK_ADD
:
2753 subcode
= PLUS_EXPR
;
2755 case IFN_UBSAN_CHECK_SUB
:
2756 subcode
= MINUS_EXPR
;
2758 case IFN_UBSAN_CHECK_MUL
:
2759 subcode
= MULT_EXPR
;
2764 if (subcode
!= ERROR_MARK
)
2766 tree arg0
= gimple_call_arg (stmt
, 0);
2767 tree arg1
= gimple_call_arg (stmt
, 1);
2768 /* x = y + 0; x = y - 0; x = y * 0; */
2769 if (integer_zerop (arg1
))
2770 result
= subcode
== MULT_EXPR
2771 ? build_zero_cst (TREE_TYPE (arg0
))
2773 /* x = 0 + y; x = 0 * y; */
2774 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
2775 result
= subcode
== MULT_EXPR
2776 ? build_zero_cst (TREE_TYPE (arg0
))
2779 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
2780 result
= build_zero_cst (TREE_TYPE (arg0
));
2781 /* x = y * 1; x = 1 * y; */
2782 else if (subcode
== MULT_EXPR
)
2784 if (integer_onep (arg1
))
2786 else if (integer_onep (arg0
))
2792 if (!update_call_from_tree (gsi
, result
))
2793 gimplify_and_update_call_from_tree (gsi
, result
);
2802 /* Worker for fold_stmt_1 dispatch to pattern based folding with
2805 Replaces *GSI with the simplification result in RCODE and OPS
2806 and the associated statements in *SEQ. Does the replacement
2807 according to INPLACE and returns true if the operation succeeded. */
2810 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
2811 code_helper rcode
, tree
*ops
,
2812 gimple_seq
*seq
, bool inplace
)
2814 gimple stmt
= gsi_stmt (*gsi
);
2816 /* Play safe and do not allow abnormals to be mentioned in
2817 newly created statements. See also maybe_push_res_to_seq. */
2818 if ((TREE_CODE (ops
[0]) == SSA_NAME
2819 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
2821 && TREE_CODE (ops
[1]) == SSA_NAME
2822 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
2824 && TREE_CODE (ops
[2]) == SSA_NAME
2825 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
2828 if (gimple_code (stmt
) == GIMPLE_COND
)
2830 gcc_assert (rcode
.is_tree_code ());
2831 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
2832 /* GIMPLE_CONDs condition may not throw. */
2833 && (!flag_exceptions
2834 || !cfun
->can_throw_non_call_exceptions
2835 || !operation_could_trap_p (rcode
,
2836 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
2838 gimple_cond_set_condition (stmt
, rcode
, ops
[0], ops
[1]);
2839 else if (rcode
== SSA_NAME
)
2840 gimple_cond_set_condition (stmt
, NE_EXPR
, ops
[0],
2841 build_zero_cst (TREE_TYPE (ops
[0])));
2842 else if (rcode
== INTEGER_CST
)
2844 if (integer_zerop (ops
[0]))
2845 gimple_cond_make_false (stmt
);
2847 gimple_cond_make_true (stmt
);
2851 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
2855 gimple_cond_set_condition (stmt
, NE_EXPR
, res
,
2856 build_zero_cst (TREE_TYPE (res
)));
2860 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2862 fprintf (dump_file
, "gimple_simplified to ");
2863 if (!gimple_seq_empty_p (*seq
))
2864 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2865 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
2868 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
2871 else if (is_gimple_assign (stmt
)
2872 && rcode
.is_tree_code ())
2875 || gimple_num_ops (stmt
) <= get_gimple_rhs_num_ops (rcode
))
2877 maybe_build_generic_op (rcode
,
2878 TREE_TYPE (gimple_assign_lhs (stmt
)),
2879 &ops
[0], ops
[1], ops
[2]);
2880 gimple_assign_set_rhs_with_ops_1 (gsi
, rcode
,
2881 ops
[0], ops
[1], ops
[2]);
2882 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2884 fprintf (dump_file
, "gimple_simplified to ");
2885 if (!gimple_seq_empty_p (*seq
))
2886 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2887 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
2890 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
2896 if (gimple_has_lhs (stmt
))
2898 tree lhs
= gimple_get_lhs (stmt
);
2899 maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
2901 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2903 fprintf (dump_file
, "gimple_simplified to ");
2904 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
2906 gsi_replace_with_seq_vops (gsi
, *seq
);
2916 /* Canonicalize MEM_REFs invariant address operand after propagation. */
2919 maybe_canonicalize_mem_ref_addr (tree
*t
)
2923 if (TREE_CODE (*t
) == ADDR_EXPR
)
2924 t
= &TREE_OPERAND (*t
, 0);
2926 while (handled_component_p (*t
))
2927 t
= &TREE_OPERAND (*t
, 0);
2929 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
2930 of invariant addresses into a SSA name MEM_REF address. */
2931 if (TREE_CODE (*t
) == MEM_REF
2932 || TREE_CODE (*t
) == TARGET_MEM_REF
)
2934 tree addr
= TREE_OPERAND (*t
, 0);
2935 if (TREE_CODE (addr
) == ADDR_EXPR
2936 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
2937 || handled_component_p (TREE_OPERAND (addr
, 0))))
2940 HOST_WIDE_INT coffset
;
2941 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
2946 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
2947 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
2948 TREE_OPERAND (*t
, 1),
2949 size_int (coffset
));
2952 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
2953 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
2956 /* Canonicalize back MEM_REFs to plain reference trees if the object
2957 accessed is a decl that has the same access semantics as the MEM_REF. */
2958 if (TREE_CODE (*t
) == MEM_REF
2959 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
2960 && integer_zerop (TREE_OPERAND (*t
, 1)))
2962 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2963 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
2964 if (/* Same volatile qualification. */
2965 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
2966 /* Same TBAA behavior with -fstrict-aliasing. */
2967 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
2968 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
2969 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
2970 /* Same alignment. */
2971 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
2972 /* We have to look out here to not drop a required conversion
2973 from the rhs to the lhs if *t appears on the lhs or vice-versa
2974 if it appears on the rhs. Thus require strict type
2976 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
2978 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
2983 /* Canonicalize TARGET_MEM_REF in particular with respect to
2984 the indexes becoming constant. */
2985 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
2987 tree tem
= maybe_fold_tmr (*t
);
2998 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2999 distinguishes both cases. */
3002 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3004 bool changed
= false;
3005 gimple stmt
= gsi_stmt (*gsi
);
3008 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3010 ??? This shouldn't be done in generic folding but in the
3011 propagation helpers which also know whether an address was
3013 switch (gimple_code (stmt
))
3016 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3018 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3019 if ((REFERENCE_CLASS_P (*rhs
)
3020 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3021 && maybe_canonicalize_mem_ref_addr (rhs
))
3023 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3024 if (REFERENCE_CLASS_P (*lhs
)
3025 && maybe_canonicalize_mem_ref_addr (lhs
))
3031 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3033 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3034 if (REFERENCE_CLASS_P (*arg
)
3035 && maybe_canonicalize_mem_ref_addr (arg
))
3038 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3040 && REFERENCE_CLASS_P (*lhs
)
3041 && maybe_canonicalize_mem_ref_addr (lhs
))
3047 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3049 tree link
= gimple_asm_output_op (stmt
, i
);
3050 tree op
= TREE_VALUE (link
);
3051 if (REFERENCE_CLASS_P (op
)
3052 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3055 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3057 tree link
= gimple_asm_input_op (stmt
, i
);
3058 tree op
= TREE_VALUE (link
);
3059 if ((REFERENCE_CLASS_P (op
)
3060 || TREE_CODE (op
) == ADDR_EXPR
)
3061 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3067 if (gimple_debug_bind_p (stmt
))
3069 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3071 && (REFERENCE_CLASS_P (*val
)
3072 || TREE_CODE (*val
) == ADDR_EXPR
)
3073 && maybe_canonicalize_mem_ref_addr (val
))
3080 /* Dispatch to pattern-based folding. */
3082 || is_gimple_assign (stmt
)
3083 || gimple_code (stmt
) == GIMPLE_COND
)
3085 gimple_seq seq
= NULL
;
3088 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
, valueize
))
3090 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3093 gimple_seq_discard (seq
);
3097 stmt
= gsi_stmt (*gsi
);
3099 /* Fold the main computation performed by the statement. */
3100 switch (gimple_code (stmt
))
3104 unsigned old_num_ops
= gimple_num_ops (stmt
);
3105 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3106 tree lhs
= gimple_assign_lhs (stmt
);
3108 /* First canonicalize operand order. This avoids building new
3109 trees if this is the only thing fold would later do. */
3110 if ((commutative_tree_code (subcode
)
3111 || commutative_ternary_tree_code (subcode
))
3112 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3113 gimple_assign_rhs2 (stmt
), false))
3115 tree tem
= gimple_assign_rhs1 (stmt
);
3116 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3117 gimple_assign_set_rhs2 (stmt
, tem
);
3120 new_rhs
= fold_gimple_assign (gsi
);
3122 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3123 TREE_TYPE (new_rhs
)))
3124 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3127 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3129 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3136 changed
|= fold_gimple_cond (stmt
);
3140 changed
|= gimple_fold_call (gsi
, inplace
);
3144 /* Fold *& in asm operands. */
3147 const char **oconstraints
;
3148 const char *constraint
;
3149 bool allows_mem
, allows_reg
;
3151 noutputs
= gimple_asm_noutputs (stmt
);
3152 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3154 for (i
= 0; i
< gimple_asm_noutputs (stmt
); ++i
)
3156 tree link
= gimple_asm_output_op (stmt
, i
);
3157 tree op
= TREE_VALUE (link
);
3159 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3160 if (REFERENCE_CLASS_P (op
)
3161 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3163 TREE_VALUE (link
) = op
;
3167 for (i
= 0; i
< gimple_asm_ninputs (stmt
); ++i
)
3169 tree link
= gimple_asm_input_op (stmt
, i
);
3170 tree op
= TREE_VALUE (link
);
3172 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3173 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3174 oconstraints
, &allows_mem
, &allows_reg
);
3175 if (REFERENCE_CLASS_P (op
)
3176 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3179 TREE_VALUE (link
) = op
;
3187 if (gimple_debug_bind_p (stmt
))
3189 tree val
= gimple_debug_bind_get_value (stmt
);
3191 && REFERENCE_CLASS_P (val
))
3193 tree tem
= maybe_fold_reference (val
, false);
3196 gimple_debug_bind_set_value (stmt
, tem
);
3201 && TREE_CODE (val
) == ADDR_EXPR
)
3203 tree ref
= TREE_OPERAND (val
, 0);
3204 tree tem
= maybe_fold_reference (ref
, false);
3207 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3208 gimple_debug_bind_set_value (stmt
, tem
);
3218 stmt
= gsi_stmt (*gsi
);
3220 /* Fold *& on the lhs. */
3221 if (gimple_has_lhs (stmt
))
3223 tree lhs
= gimple_get_lhs (stmt
);
3224 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3226 tree new_lhs
= maybe_fold_reference (lhs
, true);
3229 gimple_set_lhs (stmt
, new_lhs
);
3238 /* Valueziation callback that ends up not following SSA edges. */
3241 no_follow_ssa_edges (tree
)
3246 /* Valueization callback that ends up following single-use SSA edges only. */
3249 follow_single_use_edges (tree val
)
3251 if (TREE_CODE (val
) == SSA_NAME
3252 && !has_single_use (val
))
3257 /* Fold the statement pointed to by GSI. In some cases, this function may
3258 replace the whole statement with a new one. Returns true iff folding
3260 The statement pointed to by GSI should be in valid gimple form but may
3261 be in unfolded state as resulting from for example constant propagation
3262 which can produce *&x = 0. */
3265 fold_stmt (gimple_stmt_iterator
*gsi
)
3267 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3271 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3273 return fold_stmt_1 (gsi
, false, valueize
);
3276 /* Perform the minimal folding on statement *GSI. Only operations like
3277 *&x created by constant propagation are handled. The statement cannot
3278 be replaced with a new one. Return true if the statement was
3279 changed, false otherwise.
3280 The statement *GSI should be in valid gimple form but may
3281 be in unfolded state as resulting from for example constant propagation
3282 which can produce *&x = 0. */
3285 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3287 gimple stmt
= gsi_stmt (*gsi
);
3288 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3289 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3293 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3294 if EXPR is null or we don't know how.
3295 If non-null, the result always has boolean type. */
3298 canonicalize_bool (tree expr
, bool invert
)
3304 if (integer_nonzerop (expr
))
3305 return boolean_false_node
;
3306 else if (integer_zerop (expr
))
3307 return boolean_true_node
;
3308 else if (TREE_CODE (expr
) == SSA_NAME
)
3309 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3310 build_int_cst (TREE_TYPE (expr
), 0));
3311 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3312 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3314 TREE_OPERAND (expr
, 0),
3315 TREE_OPERAND (expr
, 1));
3321 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3323 if (integer_nonzerop (expr
))
3324 return boolean_true_node
;
3325 else if (integer_zerop (expr
))
3326 return boolean_false_node
;
3327 else if (TREE_CODE (expr
) == SSA_NAME
)
3328 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3329 build_int_cst (TREE_TYPE (expr
), 0));
3330 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3331 return fold_build2 (TREE_CODE (expr
),
3333 TREE_OPERAND (expr
, 0),
3334 TREE_OPERAND (expr
, 1));
3340 /* Check to see if a boolean expression EXPR is logically equivalent to the
3341 comparison (OP1 CODE OP2). Check for various identities involving
3345 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3346 const_tree op1
, const_tree op2
)
3350 /* The obvious case. */
3351 if (TREE_CODE (expr
) == code
3352 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3353 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3356 /* Check for comparing (name, name != 0) and the case where expr
3357 is an SSA_NAME with a definition matching the comparison. */
3358 if (TREE_CODE (expr
) == SSA_NAME
3359 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3361 if (operand_equal_p (expr
, op1
, 0))
3362 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3363 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3364 s
= SSA_NAME_DEF_STMT (expr
);
3365 if (is_gimple_assign (s
)
3366 && gimple_assign_rhs_code (s
) == code
3367 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3368 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3372 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3373 of name is a comparison, recurse. */
3374 if (TREE_CODE (op1
) == SSA_NAME
3375 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3377 s
= SSA_NAME_DEF_STMT (op1
);
3378 if (is_gimple_assign (s
)
3379 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3381 enum tree_code c
= gimple_assign_rhs_code (s
);
3382 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3383 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3384 return same_bool_comparison_p (expr
, c
,
3385 gimple_assign_rhs1 (s
),
3386 gimple_assign_rhs2 (s
));
3387 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3388 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3389 return same_bool_comparison_p (expr
,
3390 invert_tree_comparison (c
, false),
3391 gimple_assign_rhs1 (s
),
3392 gimple_assign_rhs2 (s
));
3398 /* Check to see if two boolean expressions OP1 and OP2 are logically
3402 same_bool_result_p (const_tree op1
, const_tree op2
)
3404 /* Simple cases first. */
3405 if (operand_equal_p (op1
, op2
, 0))
3408 /* Check the cases where at least one of the operands is a comparison.
3409 These are a bit smarter than operand_equal_p in that they apply some
3410 identifies on SSA_NAMEs. */
3411 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3412 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3413 TREE_OPERAND (op2
, 0),
3414 TREE_OPERAND (op2
, 1)))
3416 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3417 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3418 TREE_OPERAND (op1
, 0),
3419 TREE_OPERAND (op1
, 1)))
3426 /* Forward declarations for some mutually recursive functions. */
3429 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3430 enum tree_code code2
, tree op2a
, tree op2b
);
3432 and_var_with_comparison (tree var
, bool invert
,
3433 enum tree_code code2
, tree op2a
, tree op2b
);
3435 and_var_with_comparison_1 (gimple stmt
,
3436 enum tree_code code2
, tree op2a
, tree op2b
);
3438 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3439 enum tree_code code2
, tree op2a
, tree op2b
);
3441 or_var_with_comparison (tree var
, bool invert
,
3442 enum tree_code code2
, tree op2a
, tree op2b
);
3444 or_var_with_comparison_1 (gimple stmt
,
3445 enum tree_code code2
, tree op2a
, tree op2b
);
3447 /* Helper function for and_comparisons_1: try to simplify the AND of the
3448 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3449 If INVERT is true, invert the value of the VAR before doing the AND.
3450 Return NULL_EXPR if we can't simplify this to a single expression. */
3453 and_var_with_comparison (tree var
, bool invert
,
3454 enum tree_code code2
, tree op2a
, tree op2b
)
3457 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3459 /* We can only deal with variables whose definitions are assignments. */
3460 if (!is_gimple_assign (stmt
))
3463 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3464 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3465 Then we only have to consider the simpler non-inverted cases. */
3467 t
= or_var_with_comparison_1 (stmt
,
3468 invert_tree_comparison (code2
, false),
3471 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3472 return canonicalize_bool (t
, invert
);
3475 /* Try to simplify the AND of the ssa variable defined by the assignment
3476 STMT with the comparison specified by (OP2A CODE2 OP2B).
3477 Return NULL_EXPR if we can't simplify this to a single expression. */
3480 and_var_with_comparison_1 (gimple stmt
,
3481 enum tree_code code2
, tree op2a
, tree op2b
)
3483 tree var
= gimple_assign_lhs (stmt
);
3484 tree true_test_var
= NULL_TREE
;
3485 tree false_test_var
= NULL_TREE
;
3486 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3488 /* Check for identities like (var AND (var == 0)) => false. */
3489 if (TREE_CODE (op2a
) == SSA_NAME
3490 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3492 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3493 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3495 true_test_var
= op2a
;
3496 if (var
== true_test_var
)
3499 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3500 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3502 false_test_var
= op2a
;
3503 if (var
== false_test_var
)
3504 return boolean_false_node
;
3508 /* If the definition is a comparison, recurse on it. */
3509 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3511 tree t
= and_comparisons_1 (innercode
,
3512 gimple_assign_rhs1 (stmt
),
3513 gimple_assign_rhs2 (stmt
),
3521 /* If the definition is an AND or OR expression, we may be able to
3522 simplify by reassociating. */
3523 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3524 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3526 tree inner1
= gimple_assign_rhs1 (stmt
);
3527 tree inner2
= gimple_assign_rhs2 (stmt
);
3530 tree partial
= NULL_TREE
;
3531 bool is_and
= (innercode
== BIT_AND_EXPR
);
3533 /* Check for boolean identities that don't require recursive examination
3535 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
3536 inner1 AND (inner1 OR inner2) => inner1
3537 !inner1 AND (inner1 AND inner2) => false
3538 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
3539 Likewise for similar cases involving inner2. */
3540 if (inner1
== true_test_var
)
3541 return (is_and
? var
: inner1
);
3542 else if (inner2
== true_test_var
)
3543 return (is_and
? var
: inner2
);
3544 else if (inner1
== false_test_var
)
3546 ? boolean_false_node
3547 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
3548 else if (inner2
== false_test_var
)
3550 ? boolean_false_node
3551 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
3553 /* Next, redistribute/reassociate the AND across the inner tests.
3554 Compute the first partial result, (inner1 AND (op2a code op2b)) */
3555 if (TREE_CODE (inner1
) == SSA_NAME
3556 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
3557 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3558 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3559 gimple_assign_rhs1 (s
),
3560 gimple_assign_rhs2 (s
),
3561 code2
, op2a
, op2b
)))
3563 /* Handle the AND case, where we are reassociating:
3564 (inner1 AND inner2) AND (op2a code2 op2b)
3566 If the partial result t is a constant, we win. Otherwise
3567 continue on to try reassociating with the other inner test. */
3570 if (integer_onep (t
))
3572 else if (integer_zerop (t
))
3573 return boolean_false_node
;
3576 /* Handle the OR case, where we are redistributing:
3577 (inner1 OR inner2) AND (op2a code2 op2b)
3578 => (t OR (inner2 AND (op2a code2 op2b))) */
3579 else if (integer_onep (t
))
3580 return boolean_true_node
;
3582 /* Save partial result for later. */
3586 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3587 if (TREE_CODE (inner2
) == SSA_NAME
3588 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
3589 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
3590 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
3591 gimple_assign_rhs1 (s
),
3592 gimple_assign_rhs2 (s
),
3593 code2
, op2a
, op2b
)))
3595 /* Handle the AND case, where we are reassociating:
3596 (inner1 AND inner2) AND (op2a code2 op2b)
3597 => (inner1 AND t) */
3600 if (integer_onep (t
))
3602 else if (integer_zerop (t
))
3603 return boolean_false_node
;
3604 /* If both are the same, we can apply the identity
3606 else if (partial
&& same_bool_result_p (t
, partial
))
3610 /* Handle the OR case. where we are redistributing:
3611 (inner1 OR inner2) AND (op2a code2 op2b)
3612 => (t OR (inner1 AND (op2a code2 op2b)))
3613 => (t OR partial) */
3616 if (integer_onep (t
))
3617 return boolean_true_node
;
3620 /* We already got a simplification for the other
3621 operand to the redistributed OR expression. The
3622 interesting case is when at least one is false.
3623 Or, if both are the same, we can apply the identity
3625 if (integer_zerop (partial
))
3627 else if (integer_zerop (t
))
3629 else if (same_bool_result_p (t
, partial
))
3638 /* Try to simplify the AND of two comparisons defined by
3639 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3640 If this can be done without constructing an intermediate value,
3641 return the resulting tree; otherwise NULL_TREE is returned.
3642 This function is deliberately asymmetric as it recurses on SSA_DEFs
3643 in the first comparison but not the second. */
3646 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3647 enum tree_code code2
, tree op2a
, tree op2b
)
3649 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
3651 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3652 if (operand_equal_p (op1a
, op2a
, 0)
3653 && operand_equal_p (op1b
, op2b
, 0))
3655 /* Result will be either NULL_TREE, or a combined comparison. */
3656 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3657 TRUTH_ANDIF_EXPR
, code1
, code2
,
3658 truth_type
, op1a
, op1b
);
3663 /* Likewise the swapped case of the above. */
3664 if (operand_equal_p (op1a
, op2b
, 0)
3665 && operand_equal_p (op1b
, op2a
, 0))
3667 /* Result will be either NULL_TREE, or a combined comparison. */
3668 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
3669 TRUTH_ANDIF_EXPR
, code1
,
3670 swap_tree_comparison (code2
),
3671 truth_type
, op1a
, op1b
);
3676 /* If both comparisons are of the same value against constants, we might
3677 be able to merge them. */
3678 if (operand_equal_p (op1a
, op2a
, 0)
3679 && TREE_CODE (op1b
) == INTEGER_CST
3680 && TREE_CODE (op2b
) == INTEGER_CST
)
3682 int cmp
= tree_int_cst_compare (op1b
, op2b
);
3684 /* If we have (op1a == op1b), we should either be able to
3685 return that or FALSE, depending on whether the constant op1b
3686 also satisfies the other comparison against op2b. */
3687 if (code1
== EQ_EXPR
)
3693 case EQ_EXPR
: val
= (cmp
== 0); break;
3694 case NE_EXPR
: val
= (cmp
!= 0); break;
3695 case LT_EXPR
: val
= (cmp
< 0); break;
3696 case GT_EXPR
: val
= (cmp
> 0); break;
3697 case LE_EXPR
: val
= (cmp
<= 0); break;
3698 case GE_EXPR
: val
= (cmp
>= 0); break;
3699 default: done
= false;
3704 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3706 return boolean_false_node
;
3709 /* Likewise if the second comparison is an == comparison. */
3710 else if (code2
== EQ_EXPR
)
3716 case EQ_EXPR
: val
= (cmp
== 0); break;
3717 case NE_EXPR
: val
= (cmp
!= 0); break;
3718 case LT_EXPR
: val
= (cmp
> 0); break;
3719 case GT_EXPR
: val
= (cmp
< 0); break;
3720 case LE_EXPR
: val
= (cmp
>= 0); break;
3721 case GE_EXPR
: val
= (cmp
<= 0); break;
3722 default: done
= false;
3727 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3729 return boolean_false_node
;
3733 /* Same business with inequality tests. */
3734 else if (code1
== NE_EXPR
)
3739 case EQ_EXPR
: val
= (cmp
!= 0); break;
3740 case NE_EXPR
: val
= (cmp
== 0); break;
3741 case LT_EXPR
: val
= (cmp
>= 0); break;
3742 case GT_EXPR
: val
= (cmp
<= 0); break;
3743 case LE_EXPR
: val
= (cmp
> 0); break;
3744 case GE_EXPR
: val
= (cmp
< 0); break;
3749 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3751 else if (code2
== NE_EXPR
)
3756 case EQ_EXPR
: val
= (cmp
== 0); break;
3757 case NE_EXPR
: val
= (cmp
!= 0); break;
3758 case LT_EXPR
: val
= (cmp
<= 0); break;
3759 case GT_EXPR
: val
= (cmp
>= 0); break;
3760 case LE_EXPR
: val
= (cmp
< 0); break;
3761 case GE_EXPR
: val
= (cmp
> 0); break;
3766 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3769 /* Chose the more restrictive of two < or <= comparisons. */
3770 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
3771 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3773 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
3774 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3776 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3779 /* Likewise chose the more restrictive of two > or >= comparisons. */
3780 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
3781 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3783 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
3784 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
3786 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
3789 /* Check for singleton ranges. */
3791 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
3792 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
3793 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
3795 /* Check for disjoint ranges. */
3797 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
3798 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
3799 return boolean_false_node
;
3801 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
3802 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
3803 return boolean_false_node
;
3806 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3807 NAME's definition is a truth value. See if there are any simplifications
3808 that can be done against the NAME's definition. */
3809 if (TREE_CODE (op1a
) == SSA_NAME
3810 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
3811 && (integer_zerop (op1b
) || integer_onep (op1b
)))
3813 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
3814 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
3815 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
3816 switch (gimple_code (stmt
))
3819 /* Try to simplify by copy-propagating the definition. */
3820 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
3823 /* If every argument to the PHI produces the same result when
3824 ANDed with the second comparison, we win.
3825 Do not do this unless the type is bool since we need a bool
3826 result here anyway. */
3827 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
3829 tree result
= NULL_TREE
;
3831 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
3833 tree arg
= gimple_phi_arg_def (stmt
, i
);
3835 /* If this PHI has itself as an argument, ignore it.
3836 If all the other args produce the same result,
3838 if (arg
== gimple_phi_result (stmt
))
3840 else if (TREE_CODE (arg
) == INTEGER_CST
)
3842 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
3845 result
= boolean_false_node
;
3846 else if (!integer_zerop (result
))
3850 result
= fold_build2 (code2
, boolean_type_node
,
3852 else if (!same_bool_comparison_p (result
,
3856 else if (TREE_CODE (arg
) == SSA_NAME
3857 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
3860 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
3861 /* In simple cases we can look through PHI nodes,
3862 but we have to be careful with loops.
3864 if (! dom_info_available_p (CDI_DOMINATORS
)
3865 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
3866 || dominated_by_p (CDI_DOMINATORS
,
3867 gimple_bb (def_stmt
),
3870 temp
= and_var_with_comparison (arg
, invert
, code2
,
3876 else if (!same_bool_result_p (result
, temp
))
3892 /* Try to simplify the AND of two comparisons, specified by
3893 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3894 If this can be simplified to a single expression (without requiring
3895 introducing more SSA variables to hold intermediate values),
3896 return the resulting tree. Otherwise return NULL_TREE.
3897 If the result expression is non-null, it has boolean type. */
3900 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
3901 enum tree_code code2
, tree op2a
, tree op2b
)
3903 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
3907 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
3910 /* Helper function for or_comparisons_1: try to simplify the OR of the
3911 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3912 If INVERT is true, invert the value of VAR before doing the OR.
3913 Return NULL_EXPR if we can't simplify this to a single expression. */
3916 or_var_with_comparison (tree var
, bool invert
,
3917 enum tree_code code2
, tree op2a
, tree op2b
)
3920 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3922 /* We can only deal with variables whose definitions are assignments. */
3923 if (!is_gimple_assign (stmt
))
3926 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3927 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3928 Then we only have to consider the simpler non-inverted cases. */
3930 t
= and_var_with_comparison_1 (stmt
,
3931 invert_tree_comparison (code2
, false),
3934 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
3935 return canonicalize_bool (t
, invert
);
3938 /* Try to simplify the OR of the ssa variable defined by the assignment
3939 STMT with the comparison specified by (OP2A CODE2 OP2B).
3940 Return NULL_EXPR if we can't simplify this to a single expression. */
3943 or_var_with_comparison_1 (gimple stmt
,
3944 enum tree_code code2
, tree op2a
, tree op2b
)
3946 tree var
= gimple_assign_lhs (stmt
);
3947 tree true_test_var
= NULL_TREE
;
3948 tree false_test_var
= NULL_TREE
;
3949 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
3951 /* Check for identities like (var OR (var != 0)) => true . */
3952 if (TREE_CODE (op2a
) == SSA_NAME
3953 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
3955 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
3956 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
3958 true_test_var
= op2a
;
3959 if (var
== true_test_var
)
3962 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
3963 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
3965 false_test_var
= op2a
;
3966 if (var
== false_test_var
)
3967 return boolean_true_node
;
3971 /* If the definition is a comparison, recurse on it. */
3972 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
3974 tree t
= or_comparisons_1 (innercode
,
3975 gimple_assign_rhs1 (stmt
),
3976 gimple_assign_rhs2 (stmt
),
3984 /* If the definition is an AND or OR expression, we may be able to
3985 simplify by reassociating. */
3986 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
3987 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
3989 tree inner1
= gimple_assign_rhs1 (stmt
);
3990 tree inner2
= gimple_assign_rhs2 (stmt
);
3993 tree partial
= NULL_TREE
;
3994 bool is_or
= (innercode
== BIT_IOR_EXPR
);
3996 /* Check for boolean identities that don't require recursive examination
3998 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3999 inner1 OR (inner1 AND inner2) => inner1
4000 !inner1 OR (inner1 OR inner2) => true
4001 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4003 if (inner1
== true_test_var
)
4004 return (is_or
? var
: inner1
);
4005 else if (inner2
== true_test_var
)
4006 return (is_or
? var
: inner2
);
4007 else if (inner1
== false_test_var
)
4010 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4011 else if (inner2
== false_test_var
)
4014 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4016 /* Next, redistribute/reassociate the OR across the inner tests.
4017 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4018 if (TREE_CODE (inner1
) == SSA_NAME
4019 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4020 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4021 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4022 gimple_assign_rhs1 (s
),
4023 gimple_assign_rhs2 (s
),
4024 code2
, op2a
, op2b
)))
4026 /* Handle the OR case, where we are reassociating:
4027 (inner1 OR inner2) OR (op2a code2 op2b)
4029 If the partial result t is a constant, we win. Otherwise
4030 continue on to try reassociating with the other inner test. */
4033 if (integer_onep (t
))
4034 return boolean_true_node
;
4035 else if (integer_zerop (t
))
4039 /* Handle the AND case, where we are redistributing:
4040 (inner1 AND inner2) OR (op2a code2 op2b)
4041 => (t AND (inner2 OR (op2a code op2b))) */
4042 else if (integer_zerop (t
))
4043 return boolean_false_node
;
4045 /* Save partial result for later. */
4049 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4050 if (TREE_CODE (inner2
) == SSA_NAME
4051 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4052 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4053 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4054 gimple_assign_rhs1 (s
),
4055 gimple_assign_rhs2 (s
),
4056 code2
, op2a
, op2b
)))
4058 /* Handle the OR case, where we are reassociating:
4059 (inner1 OR inner2) OR (op2a code2 op2b)
4061 => (t OR partial) */
4064 if (integer_zerop (t
))
4066 else if (integer_onep (t
))
4067 return boolean_true_node
;
4068 /* If both are the same, we can apply the identity
4070 else if (partial
&& same_bool_result_p (t
, partial
))
4074 /* Handle the AND case, where we are redistributing:
4075 (inner1 AND inner2) OR (op2a code2 op2b)
4076 => (t AND (inner1 OR (op2a code2 op2b)))
4077 => (t AND partial) */
4080 if (integer_zerop (t
))
4081 return boolean_false_node
;
4084 /* We already got a simplification for the other
4085 operand to the redistributed AND expression. The
4086 interesting case is when at least one is true.
4087 Or, if both are the same, we can apply the identity
4089 if (integer_onep (partial
))
4091 else if (integer_onep (t
))
4093 else if (same_bool_result_p (t
, partial
))
4102 /* Try to simplify the OR of two comparisons defined by
4103 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4104 If this can be done without constructing an intermediate value,
4105 return the resulting tree; otherwise NULL_TREE is returned.
4106 This function is deliberately asymmetric as it recurses on SSA_DEFs
4107 in the first comparison but not the second. */
4110 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4111 enum tree_code code2
, tree op2a
, tree op2b
)
4113 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4115 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4116 if (operand_equal_p (op1a
, op2a
, 0)
4117 && operand_equal_p (op1b
, op2b
, 0))
4119 /* Result will be either NULL_TREE, or a combined comparison. */
4120 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4121 TRUTH_ORIF_EXPR
, code1
, code2
,
4122 truth_type
, op1a
, op1b
);
4127 /* Likewise the swapped case of the above. */
4128 if (operand_equal_p (op1a
, op2b
, 0)
4129 && operand_equal_p (op1b
, op2a
, 0))
4131 /* Result will be either NULL_TREE, or a combined comparison. */
4132 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4133 TRUTH_ORIF_EXPR
, code1
,
4134 swap_tree_comparison (code2
),
4135 truth_type
, op1a
, op1b
);
4140 /* If both comparisons are of the same value against constants, we might
4141 be able to merge them. */
4142 if (operand_equal_p (op1a
, op2a
, 0)
4143 && TREE_CODE (op1b
) == INTEGER_CST
4144 && TREE_CODE (op2b
) == INTEGER_CST
)
4146 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4148 /* If we have (op1a != op1b), we should either be able to
4149 return that or TRUE, depending on whether the constant op1b
4150 also satisfies the other comparison against op2b. */
4151 if (code1
== NE_EXPR
)
4157 case EQ_EXPR
: val
= (cmp
== 0); break;
4158 case NE_EXPR
: val
= (cmp
!= 0); break;
4159 case LT_EXPR
: val
= (cmp
< 0); break;
4160 case GT_EXPR
: val
= (cmp
> 0); break;
4161 case LE_EXPR
: val
= (cmp
<= 0); break;
4162 case GE_EXPR
: val
= (cmp
>= 0); break;
4163 default: done
= false;
4168 return boolean_true_node
;
4170 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4173 /* Likewise if the second comparison is a != comparison. */
4174 else if (code2
== NE_EXPR
)
4180 case EQ_EXPR
: val
= (cmp
== 0); break;
4181 case NE_EXPR
: val
= (cmp
!= 0); break;
4182 case LT_EXPR
: val
= (cmp
> 0); break;
4183 case GT_EXPR
: val
= (cmp
< 0); break;
4184 case LE_EXPR
: val
= (cmp
>= 0); break;
4185 case GE_EXPR
: val
= (cmp
<= 0); break;
4186 default: done
= false;
4191 return boolean_true_node
;
4193 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4197 /* See if an equality test is redundant with the other comparison. */
4198 else if (code1
== EQ_EXPR
)
4203 case EQ_EXPR
: val
= (cmp
== 0); break;
4204 case NE_EXPR
: val
= (cmp
!= 0); break;
4205 case LT_EXPR
: val
= (cmp
< 0); break;
4206 case GT_EXPR
: val
= (cmp
> 0); break;
4207 case LE_EXPR
: val
= (cmp
<= 0); break;
4208 case GE_EXPR
: val
= (cmp
>= 0); break;
4213 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4215 else if (code2
== EQ_EXPR
)
4220 case EQ_EXPR
: val
= (cmp
== 0); break;
4221 case NE_EXPR
: val
= (cmp
!= 0); break;
4222 case LT_EXPR
: val
= (cmp
> 0); break;
4223 case GT_EXPR
: val
= (cmp
< 0); break;
4224 case LE_EXPR
: val
= (cmp
>= 0); break;
4225 case GE_EXPR
: val
= (cmp
<= 0); break;
4230 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4233 /* Chose the less restrictive of two < or <= comparisons. */
4234 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4235 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4237 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4238 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4240 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4243 /* Likewise chose the less restrictive of two > or >= comparisons. */
4244 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4245 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4247 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4248 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4250 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4253 /* Check for singleton ranges. */
4255 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4256 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4257 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4259 /* Check for less/greater pairs that don't restrict the range at all. */
4261 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4262 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4263 return boolean_true_node
;
4265 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4266 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4267 return boolean_true_node
;
4270 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4271 NAME's definition is a truth value. See if there are any simplifications
4272 that can be done against the NAME's definition. */
4273 if (TREE_CODE (op1a
) == SSA_NAME
4274 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4275 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4277 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4278 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4279 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4280 switch (gimple_code (stmt
))
4283 /* Try to simplify by copy-propagating the definition. */
4284 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4287 /* If every argument to the PHI produces the same result when
4288 ORed with the second comparison, we win.
4289 Do not do this unless the type is bool since we need a bool
4290 result here anyway. */
4291 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4293 tree result
= NULL_TREE
;
4295 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4297 tree arg
= gimple_phi_arg_def (stmt
, i
);
4299 /* If this PHI has itself as an argument, ignore it.
4300 If all the other args produce the same result,
4302 if (arg
== gimple_phi_result (stmt
))
4304 else if (TREE_CODE (arg
) == INTEGER_CST
)
4306 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4309 result
= boolean_true_node
;
4310 else if (!integer_onep (result
))
4314 result
= fold_build2 (code2
, boolean_type_node
,
4316 else if (!same_bool_comparison_p (result
,
4320 else if (TREE_CODE (arg
) == SSA_NAME
4321 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4324 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4325 /* In simple cases we can look through PHI nodes,
4326 but we have to be careful with loops.
4328 if (! dom_info_available_p (CDI_DOMINATORS
)
4329 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4330 || dominated_by_p (CDI_DOMINATORS
,
4331 gimple_bb (def_stmt
),
4334 temp
= or_var_with_comparison (arg
, invert
, code2
,
4340 else if (!same_bool_result_p (result
, temp
))
4356 /* Try to simplify the OR of two comparisons, specified by
4357 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4358 If this can be simplified to a single expression (without requiring
4359 introducing more SSA variables to hold intermediate values),
4360 return the resulting tree. Otherwise return NULL_TREE.
4361 If the result expression is non-null, it has boolean type. */
4364 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4365 enum tree_code code2
, tree op2a
, tree op2b
)
4367 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4371 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4375 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4377 Either NULL_TREE, a simplified but non-constant or a constant
4380 ??? This should go into a gimple-fold-inline.h file to be eventually
4381 privatized with the single valueize function used in the various TUs
4382 to avoid the indirect function call overhead. */
4385 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
))
4389 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4390 edges if there are intermediate VARYING defs. For this reason
4391 do not follow SSA edges here even though SCCVN can technically
4392 just deal fine with that. */
4393 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, no_follow_ssa_edges
)
4394 && rcode
.is_tree_code ()
4395 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4396 || ((tree_code
) rcode
) == ADDR_EXPR
)
4397 && is_gimple_val (ops
[0]))
4400 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4402 fprintf (dump_file
, "Match-and-simplified ");
4403 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4404 fprintf (dump_file
, " to ");
4405 print_generic_expr (dump_file
, res
, 0);
4406 fprintf (dump_file
, "\n");
4411 location_t loc
= gimple_location (stmt
);
4412 switch (gimple_code (stmt
))
4416 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4418 switch (get_gimple_rhs_class (subcode
))
4420 case GIMPLE_SINGLE_RHS
:
4422 tree rhs
= gimple_assign_rhs1 (stmt
);
4423 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4425 if (TREE_CODE (rhs
) == SSA_NAME
)
4427 /* If the RHS is an SSA_NAME, return its known constant value,
4429 return (*valueize
) (rhs
);
4431 /* Handle propagating invariant addresses into address
4433 else if (TREE_CODE (rhs
) == ADDR_EXPR
4434 && !is_gimple_min_invariant (rhs
))
4436 HOST_WIDE_INT offset
= 0;
4438 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4442 && (CONSTANT_CLASS_P (base
)
4443 || decl_address_invariant_p (base
)))
4444 return build_invariant_address (TREE_TYPE (rhs
),
4447 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4448 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4449 && (CONSTRUCTOR_NELTS (rhs
)
4450 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4455 vec
= XALLOCAVEC (tree
,
4456 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4457 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4459 val
= (*valueize
) (val
);
4460 if (TREE_CODE (val
) == INTEGER_CST
4461 || TREE_CODE (val
) == REAL_CST
4462 || TREE_CODE (val
) == FIXED_CST
)
4468 return build_vector (TREE_TYPE (rhs
), vec
);
4470 if (subcode
== OBJ_TYPE_REF
)
4472 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
4473 /* If callee is constant, we can fold away the wrapper. */
4474 if (is_gimple_min_invariant (val
))
4478 if (kind
== tcc_reference
)
4480 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
4481 || TREE_CODE (rhs
) == REALPART_EXPR
4482 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
4483 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4485 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4486 return fold_unary_loc (EXPR_LOCATION (rhs
),
4488 TREE_TYPE (rhs
), val
);
4490 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
4491 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4493 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4494 return fold_ternary_loc (EXPR_LOCATION (rhs
),
4496 TREE_TYPE (rhs
), val
,
4497 TREE_OPERAND (rhs
, 1),
4498 TREE_OPERAND (rhs
, 2));
4500 else if (TREE_CODE (rhs
) == MEM_REF
4501 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
4503 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
4504 if (TREE_CODE (val
) == ADDR_EXPR
4505 && is_gimple_min_invariant (val
))
4507 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
4509 TREE_OPERAND (rhs
, 1));
4514 return fold_const_aggregate_ref_1 (rhs
, valueize
);
4516 else if (kind
== tcc_declaration
)
4517 return get_symbol_constant_value (rhs
);
4521 case GIMPLE_UNARY_RHS
:
4523 /* Handle unary operators that can appear in GIMPLE form.
4524 Note that we know the single operand must be a constant,
4525 so this should almost always return a simplified RHS. */
4526 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4529 fold_unary_ignore_overflow_loc (loc
, subcode
,
4530 gimple_expr_type (stmt
), op0
);
4533 case GIMPLE_BINARY_RHS
:
4535 /* Handle binary operators that can appear in GIMPLE form. */
4536 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4537 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4539 /* Translate &x + CST into an invariant form suitable for
4540 further propagation. */
4541 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
4542 && TREE_CODE (op0
) == ADDR_EXPR
4543 && TREE_CODE (op1
) == INTEGER_CST
)
4545 tree off
= fold_convert (ptr_type_node
, op1
);
4546 return build_fold_addr_expr_loc
4548 fold_build2 (MEM_REF
,
4549 TREE_TYPE (TREE_TYPE (op0
)),
4550 unshare_expr (op0
), off
));
4553 return fold_binary_loc (loc
, subcode
,
4554 gimple_expr_type (stmt
), op0
, op1
);
4557 case GIMPLE_TERNARY_RHS
:
4559 /* Handle ternary operators that can appear in GIMPLE form. */
4560 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
4561 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
4562 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
4564 /* Fold embedded expressions in ternary codes. */
4565 if ((subcode
== COND_EXPR
4566 || subcode
== VEC_COND_EXPR
)
4567 && COMPARISON_CLASS_P (op0
))
4569 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
4570 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
4571 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
4572 TREE_TYPE (op0
), op00
, op01
);
4577 return fold_ternary_loc (loc
, subcode
,
4578 gimple_expr_type (stmt
), op0
, op1
, op2
);
4590 if (gimple_call_internal_p (stmt
))
4592 enum tree_code subcode
= ERROR_MARK
;
4593 switch (gimple_call_internal_fn (stmt
))
4595 case IFN_UBSAN_CHECK_ADD
:
4596 subcode
= PLUS_EXPR
;
4598 case IFN_UBSAN_CHECK_SUB
:
4599 subcode
= MINUS_EXPR
;
4601 case IFN_UBSAN_CHECK_MUL
:
4602 subcode
= MULT_EXPR
;
4607 tree arg0
= gimple_call_arg (stmt
, 0);
4608 tree arg1
= gimple_call_arg (stmt
, 1);
4609 tree op0
= (*valueize
) (arg0
);
4610 tree op1
= (*valueize
) (arg1
);
4612 if (TREE_CODE (op0
) != INTEGER_CST
4613 || TREE_CODE (op1
) != INTEGER_CST
)
4618 /* x * 0 = 0 * x = 0 without overflow. */
4619 if (integer_zerop (op0
) || integer_zerop (op1
))
4620 return build_zero_cst (TREE_TYPE (arg0
));
4623 /* y - y = 0 without overflow. */
4624 if (operand_equal_p (op0
, op1
, 0))
4625 return build_zero_cst (TREE_TYPE (arg0
));
4632 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
4634 && TREE_CODE (res
) == INTEGER_CST
4635 && !TREE_OVERFLOW (res
))
4640 fn
= (*valueize
) (gimple_call_fn (stmt
));
4641 if (TREE_CODE (fn
) == ADDR_EXPR
4642 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
4643 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
4644 && gimple_builtin_call_types_compatible_p (stmt
,
4645 TREE_OPERAND (fn
, 0)))
4647 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
4650 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4651 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
4652 call
= build_call_array_loc (loc
,
4653 gimple_call_return_type (stmt
),
4654 fn
, gimple_call_num_args (stmt
), args
);
4655 retval
= fold_call_expr (EXPR_LOCATION (call
), call
, false);
4658 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4659 STRIP_NOPS (retval
);
4660 retval
= fold_convert (gimple_call_return_type (stmt
), retval
);
4672 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4673 Returns NULL_TREE if folding to a constant is not possible, otherwise
4674 returns a constant according to is_gimple_min_invariant. */
4677 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
4679 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
4680 if (res
&& is_gimple_min_invariant (res
))
4686 /* The following set of functions are supposed to fold references using
4687 their constant initializers. */
4689 static tree
fold_ctor_reference (tree type
, tree ctor
,
4690 unsigned HOST_WIDE_INT offset
,
4691 unsigned HOST_WIDE_INT size
, tree
);
4693 /* See if we can find constructor defining value of BASE.
4694 When we know the consructor with constant offset (such as
4695 base is array[40] and we do know constructor of array), then
4696 BIT_OFFSET is adjusted accordingly.
4698 As a special case, return error_mark_node when constructor
4699 is not explicitly available, but it is known to be zero
4700 such as 'static const int a;'. */
4702 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
4703 tree (*valueize
)(tree
))
4705 HOST_WIDE_INT bit_offset2
, size
, max_size
;
4706 if (TREE_CODE (base
) == MEM_REF
)
4708 if (!integer_zerop (TREE_OPERAND (base
, 1)))
4710 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
4712 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
4717 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
4718 base
= valueize (TREE_OPERAND (base
, 0));
4719 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
4721 base
= TREE_OPERAND (base
, 0);
4724 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4725 DECL_INITIAL. If BASE is a nested reference into another
4726 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4727 the inner reference. */
4728 switch (TREE_CODE (base
))
4733 tree init
= ctor_for_folding (base
);
4735 /* Our semantic is exact opposite of ctor_for_folding;
4736 NULL means unknown, while error_mark_node is 0. */
4737 if (init
== error_mark_node
)
4740 return error_mark_node
;
4746 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
4747 if (max_size
== -1 || size
!= max_size
)
4749 *bit_offset
+= bit_offset2
;
4750 return get_base_constructor (base
, bit_offset
, valueize
);
4761 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4762 SIZE to the memory at bit OFFSET. */
4765 fold_array_ctor_reference (tree type
, tree ctor
,
4766 unsigned HOST_WIDE_INT offset
,
4767 unsigned HOST_WIDE_INT size
,
4770 unsigned HOST_WIDE_INT cnt
;
4772 offset_int low_bound
;
4773 offset_int elt_size
;
4774 offset_int index
, max_index
;
4775 offset_int access_index
;
4776 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
4777 HOST_WIDE_INT inner_offset
;
4779 /* Compute low bound and elt size. */
4780 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
4781 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
4782 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
4784 /* Static constructors for variably sized objects makes no sense. */
4785 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
4786 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
4787 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
4791 /* Static constructors for variably sized objects makes no sense. */
4792 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
4794 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
4796 /* We can handle only constantly sized accesses that are known to not
4797 be larger than size of array element. */
4798 if (!TYPE_SIZE_UNIT (type
)
4799 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
4800 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
4804 /* Compute the array index we look for. */
4805 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
4807 access_index
+= low_bound
;
4809 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
4810 TYPE_SIGN (index_type
));
4812 /* And offset within the access. */
4813 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
4815 /* See if the array field is large enough to span whole access. We do not
4816 care to fold accesses spanning multiple array indexes. */
4817 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
4820 index
= low_bound
- 1;
4822 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4823 TYPE_SIGN (index_type
));
4825 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
4827 /* Array constructor might explicitely set index, or specify range
4828 or leave index NULL meaning that it is next index after previous
4832 if (TREE_CODE (cfield
) == INTEGER_CST
)
4833 max_index
= index
= wi::to_offset (cfield
);
4836 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
4837 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
4838 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
4845 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
4846 TYPE_SIGN (index_type
));
4850 /* Do we have match? */
4851 if (wi::cmpu (access_index
, index
) >= 0
4852 && wi::cmpu (access_index
, max_index
) <= 0)
4853 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
4856 /* When memory is not explicitely mentioned in constructor,
4857 it is 0 (or out of range). */
4858 return build_zero_cst (type
);
4861 /* CTOR is CONSTRUCTOR of an aggregate or vector.
4862 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4865 fold_nonarray_ctor_reference (tree type
, tree ctor
,
4866 unsigned HOST_WIDE_INT offset
,
4867 unsigned HOST_WIDE_INT size
,
4870 unsigned HOST_WIDE_INT cnt
;
4873 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
4876 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
4877 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
4878 tree field_size
= DECL_SIZE (cfield
);
4879 offset_int bitoffset
;
4880 offset_int bitoffset_end
, access_end
;
4882 /* Variable sized objects in static constructors makes no sense,
4883 but field_size can be NULL for flexible array members. */
4884 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
4885 && TREE_CODE (byte_offset
) == INTEGER_CST
4886 && (field_size
!= NULL_TREE
4887 ? TREE_CODE (field_size
) == INTEGER_CST
4888 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
4890 /* Compute bit offset of the field. */
4891 bitoffset
= (wi::to_offset (field_offset
)
4892 + wi::lshift (wi::to_offset (byte_offset
),
4893 LOG2_BITS_PER_UNIT
));
4894 /* Compute bit offset where the field ends. */
4895 if (field_size
!= NULL_TREE
)
4896 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
4900 access_end
= offset_int (offset
) + size
;
4902 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4903 [BITOFFSET, BITOFFSET_END)? */
4904 if (wi::cmps (access_end
, bitoffset
) > 0
4905 && (field_size
== NULL_TREE
4906 || wi::lts_p (offset
, bitoffset_end
)))
4908 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
4909 /* We do have overlap. Now see if field is large enough to
4910 cover the access. Give up for accesses spanning multiple
4912 if (wi::cmps (access_end
, bitoffset_end
) > 0)
4914 if (wi::lts_p (offset
, bitoffset
))
4916 return fold_ctor_reference (type
, cval
,
4917 inner_offset
.to_uhwi (), size
,
4921 /* When memory is not explicitely mentioned in constructor, it is 0. */
4922 return build_zero_cst (type
);
4925 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4926 to the memory at bit OFFSET. */
4929 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
4930 unsigned HOST_WIDE_INT size
, tree from_decl
)
4934 /* We found the field with exact match. */
4935 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
4937 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4939 /* We are at the end of walk, see if we can view convert the
4941 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
4942 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4943 && !compare_tree_int (TYPE_SIZE (type
), size
)
4944 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
4946 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
4947 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
4952 /* For constants and byte-aligned/sized reads try to go through
4953 native_encode/interpret. */
4954 if (CONSTANT_CLASS_P (ctor
)
4955 && BITS_PER_UNIT
== 8
4956 && offset
% BITS_PER_UNIT
== 0
4957 && size
% BITS_PER_UNIT
== 0
4958 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
4960 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
4961 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
4962 offset
/ BITS_PER_UNIT
) > 0)
4963 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
4965 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
4968 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
4969 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
4970 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
4973 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
4980 /* Return the tree representing the element referenced by T if T is an
4981 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4982 names using VALUEIZE. Return NULL_TREE otherwise. */
4985 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
4987 tree ctor
, idx
, base
;
4988 HOST_WIDE_INT offset
, size
, max_size
;
4991 if (TREE_THIS_VOLATILE (t
))
4994 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
4995 return get_symbol_constant_value (t
);
4997 tem
= fold_read_from_constant_string (t
);
5001 switch (TREE_CODE (t
))
5004 case ARRAY_RANGE_REF
:
5005 /* Constant indexes are handled well by get_base_constructor.
5006 Only special case variable offsets.
5007 FIXME: This code can't handle nested references with variable indexes
5008 (they will be handled only by iteration of ccp). Perhaps we can bring
5009 get_ref_base_and_extent here and make it use a valueize callback. */
5010 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5012 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5013 && TREE_CODE (idx
) == INTEGER_CST
)
5015 tree low_bound
, unit_size
;
5017 /* If the resulting bit-offset is constant, track it. */
5018 if ((low_bound
= array_ref_low_bound (t
),
5019 TREE_CODE (low_bound
) == INTEGER_CST
)
5020 && (unit_size
= array_ref_element_size (t
),
5021 tree_fits_uhwi_p (unit_size
)))
5024 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5025 TYPE_PRECISION (TREE_TYPE (idx
)));
5027 if (wi::fits_shwi_p (woffset
))
5029 offset
= woffset
.to_shwi ();
5030 /* TODO: This code seems wrong, multiply then check
5031 to see if it fits. */
5032 offset
*= tree_to_uhwi (unit_size
);
5033 offset
*= BITS_PER_UNIT
;
5035 base
= TREE_OPERAND (t
, 0);
5036 ctor
= get_base_constructor (base
, &offset
, valueize
);
5037 /* Empty constructor. Always fold to 0. */
5038 if (ctor
== error_mark_node
)
5039 return build_zero_cst (TREE_TYPE (t
));
5040 /* Out of bound array access. Value is undefined,
5044 /* We can not determine ctor. */
5047 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5048 tree_to_uhwi (unit_size
)
5058 case TARGET_MEM_REF
:
5060 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5061 ctor
= get_base_constructor (base
, &offset
, valueize
);
5063 /* Empty constructor. Always fold to 0. */
5064 if (ctor
== error_mark_node
)
5065 return build_zero_cst (TREE_TYPE (t
));
5066 /* We do not know precise address. */
5067 if (max_size
== -1 || max_size
!= size
)
5069 /* We can not determine ctor. */
5073 /* Out of bound array access. Value is undefined, but don't fold. */
5077 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5083 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5084 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5085 return fold_build1_loc (EXPR_LOCATION (t
),
5086 TREE_CODE (t
), TREE_TYPE (t
), c
);
5098 fold_const_aggregate_ref (tree t
)
5100 return fold_const_aggregate_ref_1 (t
, NULL
);
5103 /* Lookup virtual method with index TOKEN in a virtual table V
5105 Set CAN_REFER if non-NULL to false if method
5106 is not referable or if the virtual table is ill-formed (such as rewriten
5107 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5110 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5112 unsigned HOST_WIDE_INT offset
,
5115 tree vtable
= v
, init
, fn
;
5116 unsigned HOST_WIDE_INT size
;
5117 unsigned HOST_WIDE_INT elt_size
, access_index
;
5123 /* First of all double check we have virtual table. */
5124 if (TREE_CODE (v
) != VAR_DECL
5125 || !DECL_VIRTUAL_P (v
))
5127 gcc_assert (in_lto_p
);
5128 /* Pass down that we lost track of the target. */
5134 init
= ctor_for_folding (v
);
5136 /* The virtual tables should always be born with constructors
5137 and we always should assume that they are avaialble for
5138 folding. At the moment we do not stream them in all cases,
5139 but it should never happen that ctor seem unreachable. */
5141 if (init
== error_mark_node
)
5143 gcc_assert (in_lto_p
);
5144 /* Pass down that we lost track of the target. */
5149 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5150 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5151 offset
*= BITS_PER_UNIT
;
5152 offset
+= token
* size
;
5154 /* Lookup the value in the constructor that is assumed to be array.
5155 This is equivalent to
5156 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5157 offset, size, NULL);
5158 but in a constant time. We expect that frontend produced a simple
5159 array without indexed initializers. */
5161 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5162 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5163 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5164 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5166 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5167 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5169 /* This code makes an assumption that there are no
5170 indexed fileds produced by C++ FE, so we can directly index the array. */
5171 if (access_index
< CONSTRUCTOR_NELTS (init
))
5173 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5174 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5180 /* For type inconsistent program we may end up looking up virtual method
5181 in virtual table that does not contain TOKEN entries. We may overrun
5182 the virtual table and pick up a constant or RTTI info pointer.
5183 In any case the call is undefined. */
5185 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5186 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5187 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5190 fn
= TREE_OPERAND (fn
, 0);
5192 /* When cgraph node is missing and function is not public, we cannot
5193 devirtualize. This can happen in WHOPR when the actual method
5194 ends up in other partition, because we found devirtualization
5195 possibility too late. */
5196 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5207 /* Make sure we create a cgraph node for functions we'll reference.
5208 They can be non-existent if the reference comes from an entry
5209 of an external vtable for example. */
5210 cgraph_node::get_create (fn
);
5215 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5216 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5217 KNOWN_BINFO carries the binfo describing the true type of
5218 OBJ_TYPE_REF_OBJECT(REF).
5219 Set CAN_REFER if non-NULL to false if method
5220 is not referable or if the virtual table is ill-formed (such as rewriten
5221 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5224 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5227 unsigned HOST_WIDE_INT offset
;
5230 v
= BINFO_VTABLE (known_binfo
);
5231 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5235 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5241 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5244 /* Return true iff VAL is a gimple expression that is known to be
5245 non-negative. Restricted to floating-point inputs. */
5248 gimple_val_nonnegative_real_p (tree val
)
5252 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5254 /* Use existing logic for non-gimple trees. */
5255 if (tree_expr_nonnegative_p (val
))
5258 if (TREE_CODE (val
) != SSA_NAME
)
5261 /* Currently we look only at the immediately defining statement
5262 to make this determination, since recursion on defining
5263 statements of operands can lead to quadratic behavior in the
5264 worst case. This is expected to catch almost all occurrences
5265 in practice. It would be possible to implement limited-depth
5266 recursion if important cases are lost. Alternatively, passes
5267 that need this information (such as the pow/powi lowering code
5268 in the cse_sincos pass) could be revised to provide it through
5269 dataflow propagation. */
5271 def_stmt
= SSA_NAME_DEF_STMT (val
);
5273 if (is_gimple_assign (def_stmt
))
5277 /* See fold-const.c:tree_expr_nonnegative_p for additional
5278 cases that could be handled with recursion. */
5280 switch (gimple_assign_rhs_code (def_stmt
))
5283 /* Always true for floating-point operands. */
5287 /* True if the two operands are identical (since we are
5288 restricted to floating-point inputs). */
5289 op0
= gimple_assign_rhs1 (def_stmt
);
5290 op1
= gimple_assign_rhs2 (def_stmt
);
5293 || operand_equal_p (op0
, op1
, 0))
5300 else if (is_gimple_call (def_stmt
))
5302 tree fndecl
= gimple_call_fndecl (def_stmt
);
5304 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5308 switch (DECL_FUNCTION_CODE (fndecl
))
5310 CASE_FLT_FN (BUILT_IN_ACOS
):
5311 CASE_FLT_FN (BUILT_IN_ACOSH
):
5312 CASE_FLT_FN (BUILT_IN_CABS
):
5313 CASE_FLT_FN (BUILT_IN_COSH
):
5314 CASE_FLT_FN (BUILT_IN_ERFC
):
5315 CASE_FLT_FN (BUILT_IN_EXP
):
5316 CASE_FLT_FN (BUILT_IN_EXP10
):
5317 CASE_FLT_FN (BUILT_IN_EXP2
):
5318 CASE_FLT_FN (BUILT_IN_FABS
):
5319 CASE_FLT_FN (BUILT_IN_FDIM
):
5320 CASE_FLT_FN (BUILT_IN_HYPOT
):
5321 CASE_FLT_FN (BUILT_IN_POW10
):
5324 CASE_FLT_FN (BUILT_IN_SQRT
):
5325 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5326 nonnegative inputs. */
5327 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val
))))
5332 CASE_FLT_FN (BUILT_IN_POWI
):
5333 /* True if the second argument is an even integer. */
5334 arg1
= gimple_call_arg (def_stmt
, 1);
5336 if (TREE_CODE (arg1
) == INTEGER_CST
5337 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5342 CASE_FLT_FN (BUILT_IN_POW
):
5343 /* True if the second argument is an even integer-valued
5345 arg1
= gimple_call_arg (def_stmt
, 1);
5347 if (TREE_CODE (arg1
) == REAL_CST
)
5352 c
= TREE_REAL_CST (arg1
);
5353 n
= real_to_integer (&c
);
5357 REAL_VALUE_TYPE cint
;
5358 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5359 if (real_identical (&c
, &cint
))
5375 /* Given a pointer value OP0, return a simplified version of an
5376 indirection through OP0, or NULL_TREE if no simplification is
5377 possible. Note that the resulting type may be different from
5378 the type pointed to in the sense that it is still compatible
5379 from the langhooks point of view. */
5382 gimple_fold_indirect_ref (tree t
)
5384 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5389 subtype
= TREE_TYPE (sub
);
5390 if (!POINTER_TYPE_P (subtype
))
5393 if (TREE_CODE (sub
) == ADDR_EXPR
)
5395 tree op
= TREE_OPERAND (sub
, 0);
5396 tree optype
= TREE_TYPE (op
);
5398 if (useless_type_conversion_p (type
, optype
))
5401 /* *(foo *)&fooarray => fooarray[0] */
5402 if (TREE_CODE (optype
) == ARRAY_TYPE
5403 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5404 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5406 tree type_domain
= TYPE_DOMAIN (optype
);
5407 tree min_val
= size_zero_node
;
5408 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5409 min_val
= TYPE_MIN_VALUE (type_domain
);
5410 if (TREE_CODE (min_val
) == INTEGER_CST
)
5411 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5413 /* *(foo *)&complexfoo => __real__ complexfoo */
5414 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5415 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5416 return fold_build1 (REALPART_EXPR
, type
, op
);
5417 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5418 else if (TREE_CODE (optype
) == VECTOR_TYPE
5419 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5421 tree part_width
= TYPE_SIZE (type
);
5422 tree index
= bitsize_int (0);
5423 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5427 /* *(p + CST) -> ... */
5428 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5429 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5431 tree addr
= TREE_OPERAND (sub
, 0);
5432 tree off
= TREE_OPERAND (sub
, 1);
5436 addrtype
= TREE_TYPE (addr
);
5438 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5439 if (TREE_CODE (addr
) == ADDR_EXPR
5440 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5441 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5442 && tree_fits_uhwi_p (off
))
5444 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5445 tree part_width
= TYPE_SIZE (type
);
5446 unsigned HOST_WIDE_INT part_widthi
5447 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5448 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5449 tree index
= bitsize_int (indexi
);
5450 if (offset
/ part_widthi
5451 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5452 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5456 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5457 if (TREE_CODE (addr
) == ADDR_EXPR
5458 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5459 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5461 tree size
= TYPE_SIZE_UNIT (type
);
5462 if (tree_int_cst_equal (size
, off
))
5463 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5466 /* *(p + CST) -> MEM_REF <p, CST>. */
5467 if (TREE_CODE (addr
) != ADDR_EXPR
5468 || DECL_P (TREE_OPERAND (addr
, 0)))
5469 return fold_build2 (MEM_REF
, type
,
5471 wide_int_to_tree (ptype
, off
));
5474 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5475 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
5476 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
5477 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
5480 tree min_val
= size_zero_node
;
5482 sub
= gimple_fold_indirect_ref (sub
);
5484 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
5485 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
5486 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5487 min_val
= TYPE_MIN_VALUE (type_domain
);
5488 if (TREE_CODE (min_val
) == INTEGER_CST
)
5489 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
5495 /* Return true if CODE is an operation that when operating on signed
5496 integer types involves undefined behavior on overflow and the
5497 operation can be expressed with unsigned arithmetic. */
5500 arith_code_with_undefined_signed_overflow (tree_code code
)
5508 case POINTER_PLUS_EXPR
:
5515 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
5516 operation that can be transformed to unsigned arithmetic by converting
5517 its operand, carrying out the operation in the corresponding unsigned
5518 type and converting the result back to the original type.
5520 Returns a sequence of statements that replace STMT and also contain
5521 a modified form of STMT itself. */
5524 rewrite_to_defined_overflow (gimple stmt
)
5526 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5528 fprintf (dump_file
, "rewriting stmt with undefined signed "
5530 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5533 tree lhs
= gimple_assign_lhs (stmt
);
5534 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
5535 gimple_seq stmts
= NULL
;
5536 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
5538 gimple_seq stmts2
= NULL
;
5539 gimple_set_op (stmt
, i
,
5540 force_gimple_operand (fold_convert (type
,
5541 gimple_op (stmt
, i
)),
5542 &stmts2
, true, NULL_TREE
));
5543 gimple_seq_add_seq (&stmts
, stmts2
);
5545 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
5546 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
5547 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
5548 gimple_seq_add_stmt (&stmts
, stmt
);
5549 gimple cvt
= gimple_build_assign_with_ops
5550 (NOP_EXPR
, lhs
, gimple_assign_lhs (stmt
), NULL_TREE
);
5551 gimple_seq_add_stmt (&stmts
, cvt
);
5557 /* Build the expression CODE OP0 of type TYPE with location LOC,
5558 simplifying it first if possible using VALUEIZE if not NULL.
5559 OP0 is expected to be valueized already. Returns the built
5560 expression value and appends statements possibly defining it
5564 gimple_build (gimple_seq
*seq
, location_t loc
,
5565 enum tree_code code
, tree type
, tree op0
,
5566 tree (*valueize
)(tree
))
5568 tree res
= gimple_simplify (code
, type
, op0
, seq
, valueize
);
5571 if (gimple_in_ssa_p (cfun
))
5572 res
= make_ssa_name (type
, NULL
);
5574 res
= create_tmp_reg (type
, NULL
);
5576 if (code
== REALPART_EXPR
5577 || code
== IMAGPART_EXPR
5578 || code
== VIEW_CONVERT_EXPR
)
5579 stmt
= gimple_build_assign_with_ops (code
, res
,
5583 stmt
= gimple_build_assign_with_ops (code
, res
, op0
, NULL_TREE
);
5584 gimple_set_location (stmt
, loc
);
5585 gimple_seq_add_stmt_without_update (seq
, stmt
);
5590 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
5591 simplifying it first if possible using VALUEIZE if not NULL.
5592 OP0 and OP1 are expected to be valueized already. Returns the built
5593 expression value and appends statements possibly defining it
5597 gimple_build (gimple_seq
*seq
, location_t loc
,
5598 enum tree_code code
, tree type
, tree op0
, tree op1
,
5599 tree (*valueize
)(tree
))
5601 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, valueize
);
5604 if (gimple_in_ssa_p (cfun
))
5605 res
= make_ssa_name (type
, NULL
);
5607 res
= create_tmp_reg (type
, NULL
);
5608 gimple stmt
= gimple_build_assign_with_ops (code
, res
, op0
, op1
);
5609 gimple_set_location (stmt
, loc
);
5610 gimple_seq_add_stmt_without_update (seq
, stmt
);
5615 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
5616 simplifying it first if possible using VALUEIZE if not NULL.
5617 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
5618 expression value and appends statements possibly defining it
5622 gimple_build (gimple_seq
*seq
, location_t loc
,
5623 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
,
5624 tree (*valueize
)(tree
))
5626 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
5630 if (gimple_in_ssa_p (cfun
))
5631 res
= make_ssa_name (type
, NULL
);
5633 res
= create_tmp_reg (type
, NULL
);
5635 if (code
== BIT_FIELD_REF
)
5636 stmt
= gimple_build_assign_with_ops (code
, res
,
5637 build3 (BIT_FIELD_REF
, type
,
5641 stmt
= gimple_build_assign_with_ops (code
, res
, op0
, op1
, op2
);
5642 gimple_set_location (stmt
, loc
);
5643 gimple_seq_add_stmt_without_update (seq
, stmt
);
5648 /* Build the call FN (ARG0) with a result of type TYPE
5649 (or no result if TYPE is void) with location LOC,
5650 simplifying it first if possible using VALUEIZE if not NULL.
5651 ARG0 is expected to be valueized already. Returns the built
5652 expression value (or NULL_TREE if TYPE is void) and appends
5653 statements possibly defining it to SEQ. */
5656 gimple_build (gimple_seq
*seq
, location_t loc
,
5657 enum built_in_function fn
, tree type
, tree arg0
,
5658 tree (*valueize
)(tree
))
5660 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, valueize
);
5663 tree decl
= builtin_decl_implicit (fn
);
5664 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
5665 if (!VOID_TYPE_P (type
))
5667 if (gimple_in_ssa_p (cfun
))
5668 res
= make_ssa_name (type
, NULL
);
5670 res
= create_tmp_reg (type
, NULL
);
5671 gimple_call_set_lhs (stmt
, res
);
5673 gimple_set_location (stmt
, loc
);
5674 gimple_seq_add_stmt_without_update (seq
, stmt
);
5679 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
5680 (or no result if TYPE is void) with location LOC,
5681 simplifying it first if possible using VALUEIZE if not NULL.
5682 ARG0 is expected to be valueized already. Returns the built
5683 expression value (or NULL_TREE if TYPE is void) and appends
5684 statements possibly defining it to SEQ. */
5687 gimple_build (gimple_seq
*seq
, location_t loc
,
5688 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
,
5689 tree (*valueize
)(tree
))
5691 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, valueize
);
5694 tree decl
= builtin_decl_implicit (fn
);
5695 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
5696 if (!VOID_TYPE_P (type
))
5698 if (gimple_in_ssa_p (cfun
))
5699 res
= make_ssa_name (type
, NULL
);
5701 res
= create_tmp_reg (type
, NULL
);
5702 gimple_call_set_lhs (stmt
, res
);
5704 gimple_set_location (stmt
, loc
);
5705 gimple_seq_add_stmt_without_update (seq
, stmt
);
5710 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
5711 (or no result if TYPE is void) with location LOC,
5712 simplifying it first if possible using VALUEIZE if not NULL.
5713 ARG0 is expected to be valueized already. Returns the built
5714 expression value (or NULL_TREE if TYPE is void) and appends
5715 statements possibly defining it to SEQ. */
5718 gimple_build (gimple_seq
*seq
, location_t loc
,
5719 enum built_in_function fn
, tree type
,
5720 tree arg0
, tree arg1
, tree arg2
,
5721 tree (*valueize
)(tree
))
5723 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
, seq
, valueize
);
5726 tree decl
= builtin_decl_implicit (fn
);
5727 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
5728 if (!VOID_TYPE_P (type
))
5730 if (gimple_in_ssa_p (cfun
))
5731 res
= make_ssa_name (type
, NULL
);
5733 res
= create_tmp_reg (type
, NULL
);
5734 gimple_call_set_lhs (stmt
, res
);
5736 gimple_set_location (stmt
, loc
);
5737 gimple_seq_add_stmt_without_update (seq
, stmt
);
5742 /* Build the conversion (TYPE) OP with a result of type TYPE
5743 with location LOC if such conversion is neccesary in GIMPLE,
5744 simplifying it first.
5745 Returns the built expression value and appends
5746 statements possibly defining it to SEQ. */
5749 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
5751 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
5753 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);