1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
38 #include "hard-reg-set.h"
42 #include "statistics.h"
44 #include "fixed-value.h"
45 #include "insn-config.h"
54 #include "stor-layout.h"
58 #include "dominance.h"
59 #include "basic-block.h"
60 #include "tree-ssa-alias.h"
61 #include "internal-fn.h"
62 #include "gimple-fold.h"
63 #include "gimple-expr.h"
67 #include "gimple-iterator.h"
68 #include "gimple-ssa.h"
69 #include "tree-ssanames.h"
70 #include "tree-into-ssa.h"
73 #include "tree-ssa-propagate.h"
76 #include "plugin-api.h"
79 #include "ipa-utils.h"
80 #include "gimple-pretty-print.h"
81 #include "tree-ssa-address.h"
82 #include "langhooks.h"
83 #include "gimplify-me.h"
88 #include "gimple-match.h"
89 #include "tree-phinodes.h"
90 #include "ssa-iterators.h"
93 /* Return true when DECL can be referenced from current unit.
94 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
95 We can get declarations that are not possible to reference for various
98 1) When analyzing C++ virtual tables.
99 C++ virtual tables do have known constructors even
100 when they are keyed to other compilation unit.
101 Those tables can contain pointers to methods and vars
102 in other units. Those methods have both STATIC and EXTERNAL
104 2) In WHOPR mode devirtualization might lead to reference
105 to method that was partitioned elsehwere.
106 In this case we have static VAR_DECL or FUNCTION_DECL
107 that has no corresponding callgraph/varpool node
109 3) COMDAT functions referred by external vtables that
110 we devirtualize only during final compilation stage.
111 At this time we already decided that we will not output
112 the function body and thus we can't reference the symbol
116 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
119 struct cgraph_node
*node
;
122 if (DECL_ABSTRACT_P (decl
))
125 /* We are concerned only about static/external vars and functions. */
126 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
127 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
130 /* Static objects can be referred only if they was not optimized out yet. */
131 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
133 /* Before we start optimizing unreachable code we can be sure all
134 static objects are defined. */
135 if (symtab
->function_flags_ready
)
137 snode
= symtab_node::get (decl
);
138 if (!snode
|| !snode
->definition
)
140 node
= dyn_cast
<cgraph_node
*> (snode
);
141 return !node
|| !node
->global
.inlined_to
;
144 /* We will later output the initializer, so we can refer to it.
145 So we are concerned only when DECL comes from initializer of
146 external var or var that has been optimized out. */
148 || TREE_CODE (from_decl
) != VAR_DECL
149 || (!DECL_EXTERNAL (from_decl
)
150 && (vnode
= varpool_node::get (from_decl
)) != NULL
151 && vnode
->definition
)
153 && (vnode
= varpool_node::get (from_decl
)) != NULL
154 && vnode
->in_other_partition
))
156 /* We are folding reference from external vtable. The vtable may reffer
157 to a symbol keyed to other compilation unit. The other compilation
158 unit may be in separate DSO and the symbol may be hidden. */
159 if (DECL_VISIBILITY_SPECIFIED (decl
)
160 && DECL_EXTERNAL (decl
)
161 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
162 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
164 /* When function is public, we always can introduce new reference.
165 Exception are the COMDAT functions where introducing a direct
166 reference imply need to include function body in the curren tunit. */
167 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
169 /* We have COMDAT. We are going to check if we still have definition
170 or if the definition is going to be output in other partition.
171 Bypass this when gimplifying; all needed functions will be produced.
173 As observed in PR20991 for already optimized out comdat virtual functions
174 it may be tempting to not necessarily give up because the copy will be
175 output elsewhere when corresponding vtable is output.
176 This is however not possible - ABI specify that COMDATs are output in
177 units where they are used and when the other unit was compiled with LTO
178 it is possible that vtable was kept public while the function itself
180 if (!symtab
->function_flags_ready
)
183 snode
= symtab_node::get (decl
);
185 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
186 && (!snode
->in_other_partition
187 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
189 node
= dyn_cast
<cgraph_node
*> (snode
);
190 return !node
|| !node
->global
.inlined_to
;
193 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
194 acceptable form for is_gimple_min_invariant.
195 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
198 canonicalize_constructor_val (tree cval
, tree from_decl
)
200 tree orig_cval
= cval
;
202 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
203 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
205 tree ptr
= TREE_OPERAND (cval
, 0);
206 if (is_gimple_min_invariant (ptr
))
207 cval
= build1_loc (EXPR_LOCATION (cval
),
208 ADDR_EXPR
, TREE_TYPE (ptr
),
209 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
211 fold_convert (ptr_type_node
,
212 TREE_OPERAND (cval
, 1))));
214 if (TREE_CODE (cval
) == ADDR_EXPR
)
216 tree base
= NULL_TREE
;
217 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
219 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
221 TREE_OPERAND (cval
, 0) = base
;
224 base
= get_base_address (TREE_OPERAND (cval
, 0));
228 if ((TREE_CODE (base
) == VAR_DECL
229 || TREE_CODE (base
) == FUNCTION_DECL
)
230 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
232 if (TREE_CODE (base
) == VAR_DECL
)
233 TREE_ADDRESSABLE (base
) = 1;
234 else if (TREE_CODE (base
) == FUNCTION_DECL
)
236 /* Make sure we create a cgraph node for functions we'll reference.
237 They can be non-existent if the reference comes from an entry
238 of an external vtable for example. */
239 cgraph_node::get_create (base
);
241 /* Fixup types in global initializers. */
242 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
243 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
245 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
246 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
249 if (TREE_OVERFLOW_P (cval
))
250 return drop_tree_overflow (cval
);
254 /* If SYM is a constant variable with known value, return the value.
255 NULL_TREE is returned otherwise. */
258 get_symbol_constant_value (tree sym
)
260 tree val
= ctor_for_folding (sym
);
261 if (val
!= error_mark_node
)
265 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
266 if (val
&& is_gimple_min_invariant (val
))
271 /* Variables declared 'const' without an initializer
272 have zero as the initializer if they may not be
273 overridden at link or run time. */
275 && is_gimple_reg_type (TREE_TYPE (sym
)))
276 return build_zero_cst (TREE_TYPE (sym
));
284 /* Subroutine of fold_stmt. We perform several simplifications of the
285 memory reference tree EXPR and make sure to re-gimplify them properly
286 after propagation of constant addresses. IS_LHS is true if the
287 reference is supposed to be an lvalue. */
290 maybe_fold_reference (tree expr
, bool is_lhs
)
294 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
295 || TREE_CODE (expr
) == REALPART_EXPR
296 || TREE_CODE (expr
) == IMAGPART_EXPR
)
297 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
298 return fold_unary_loc (EXPR_LOCATION (expr
),
301 TREE_OPERAND (expr
, 0));
302 else if (TREE_CODE (expr
) == BIT_FIELD_REF
303 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
304 return fold_ternary_loc (EXPR_LOCATION (expr
),
307 TREE_OPERAND (expr
, 0),
308 TREE_OPERAND (expr
, 1),
309 TREE_OPERAND (expr
, 2));
312 && (result
= fold_const_aggregate_ref (expr
))
313 && is_gimple_min_invariant (result
))
320 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
321 replacement rhs for the statement or NULL_TREE if no simplification
322 could be made. It is assumed that the operands have been previously
326 fold_gimple_assign (gimple_stmt_iterator
*si
)
328 gimple stmt
= gsi_stmt (*si
);
329 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
330 location_t loc
= gimple_location (stmt
);
332 tree result
= NULL_TREE
;
334 switch (get_gimple_rhs_class (subcode
))
336 case GIMPLE_SINGLE_RHS
:
338 tree rhs
= gimple_assign_rhs1 (stmt
);
340 if (TREE_CLOBBER_P (rhs
))
343 if (REFERENCE_CLASS_P (rhs
))
344 return maybe_fold_reference (rhs
, false);
346 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
348 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
349 if (is_gimple_min_invariant (val
))
351 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
354 vec
<cgraph_node
*>targets
355 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
356 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
358 if (dump_enabled_p ())
360 location_t loc
= gimple_location_safe (stmt
);
361 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
362 "resolving virtual function address "
363 "reference to function %s\n",
364 targets
.length () == 1
365 ? targets
[0]->name ()
368 if (targets
.length () == 1)
370 val
= fold_convert (TREE_TYPE (val
),
371 build_fold_addr_expr_loc
372 (loc
, targets
[0]->decl
));
373 STRIP_USELESS_TYPE_CONVERSION (val
);
376 /* We can not use __builtin_unreachable here because it
377 can not have address taken. */
378 val
= build_int_cst (TREE_TYPE (val
), 0);
384 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
386 tree ref
= TREE_OPERAND (rhs
, 0);
387 tree tem
= maybe_fold_reference (ref
, true);
389 && TREE_CODE (tem
) == MEM_REF
390 && integer_zerop (TREE_OPERAND (tem
, 1)))
391 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
393 result
= fold_convert (TREE_TYPE (rhs
),
394 build_fold_addr_expr_loc (loc
, tem
));
395 else if (TREE_CODE (ref
) == MEM_REF
396 && integer_zerop (TREE_OPERAND (ref
, 1)))
397 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
400 else if (TREE_CODE (rhs
) == CONSTRUCTOR
401 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
402 && (CONSTRUCTOR_NELTS (rhs
)
403 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
405 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
409 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
410 if (TREE_CODE (val
) != INTEGER_CST
411 && TREE_CODE (val
) != REAL_CST
412 && TREE_CODE (val
) != FIXED_CST
)
415 return build_vector_from_ctor (TREE_TYPE (rhs
),
416 CONSTRUCTOR_ELTS (rhs
));
419 else if (DECL_P (rhs
))
420 return get_symbol_constant_value (rhs
);
422 /* If we couldn't fold the RHS, hand over to the generic
424 if (result
== NULL_TREE
)
427 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
428 that may have been added by fold, and "useless" type
429 conversions that might now be apparent due to propagation. */
430 STRIP_USELESS_TYPE_CONVERSION (result
);
432 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
439 case GIMPLE_UNARY_RHS
:
442 case GIMPLE_BINARY_RHS
:
443 /* Try to canonicalize for boolean-typed X the comparisons
444 X == 0, X == 1, X != 0, and X != 1. */
445 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
446 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
448 tree lhs
= gimple_assign_lhs (stmt
);
449 tree op1
= gimple_assign_rhs1 (stmt
);
450 tree op2
= gimple_assign_rhs2 (stmt
);
451 tree type
= TREE_TYPE (op1
);
453 /* Check whether the comparison operands are of the same boolean
454 type as the result type is.
455 Check that second operand is an integer-constant with value
457 if (TREE_CODE (op2
) == INTEGER_CST
458 && (integer_zerop (op2
) || integer_onep (op2
))
459 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
461 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
462 bool is_logical_not
= false;
464 /* X == 0 and X != 1 is a logical-not.of X
465 X == 1 and X != 0 is X */
466 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
467 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
468 is_logical_not
= true;
470 if (is_logical_not
== false)
472 /* Only for one-bit precision typed X the transformation
473 !X -> ~X is valied. */
474 else if (TYPE_PRECISION (type
) == 1)
475 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
477 /* Otherwise we use !X -> X ^ 1. */
479 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
480 type
, op1
, build_int_cst (type
, 1));
486 result
= fold_binary_loc (loc
, subcode
,
487 TREE_TYPE (gimple_assign_lhs (stmt
)),
488 gimple_assign_rhs1 (stmt
),
489 gimple_assign_rhs2 (stmt
));
493 STRIP_USELESS_TYPE_CONVERSION (result
);
494 if (valid_gimple_rhs_p (result
))
499 case GIMPLE_TERNARY_RHS
:
500 /* Try to fold a conditional expression. */
501 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
503 tree op0
= gimple_assign_rhs1 (stmt
);
506 location_t cond_loc
= gimple_location (stmt
);
508 if (COMPARISON_CLASS_P (op0
))
510 fold_defer_overflow_warnings ();
511 tem
= fold_binary_loc (cond_loc
,
512 TREE_CODE (op0
), TREE_TYPE (op0
),
513 TREE_OPERAND (op0
, 0),
514 TREE_OPERAND (op0
, 1));
515 /* This is actually a conditional expression, not a GIMPLE
516 conditional statement, however, the valid_gimple_rhs_p
517 test still applies. */
518 set
= (tem
&& is_gimple_condexpr (tem
)
519 && valid_gimple_rhs_p (tem
));
520 fold_undefer_overflow_warnings (set
, stmt
, 0);
522 else if (is_gimple_min_invariant (op0
))
531 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
532 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
533 gimple_assign_rhs2 (stmt
),
534 gimple_assign_rhs3 (stmt
));
538 result
= fold_ternary_loc (loc
, subcode
,
539 TREE_TYPE (gimple_assign_lhs (stmt
)),
540 gimple_assign_rhs1 (stmt
),
541 gimple_assign_rhs2 (stmt
),
542 gimple_assign_rhs3 (stmt
));
546 STRIP_USELESS_TYPE_CONVERSION (result
);
547 if (valid_gimple_rhs_p (result
))
552 case GIMPLE_INVALID_RHS
:
559 /* Attempt to fold a conditional statement. Return true if any changes were
560 made. We only attempt to fold the condition expression, and do not perform
561 any transformation that would require alteration of the cfg. It is
562 assumed that the operands have been previously folded. */
565 fold_gimple_cond (gcond
*stmt
)
567 tree result
= fold_binary_loc (gimple_location (stmt
),
568 gimple_cond_code (stmt
),
570 gimple_cond_lhs (stmt
),
571 gimple_cond_rhs (stmt
));
575 STRIP_USELESS_TYPE_CONVERSION (result
);
576 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
578 gimple_cond_set_condition_from_tree (stmt
, result
);
587 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
588 adjusting the replacement stmts location and virtual operands.
589 If the statement has a lhs the last stmt in the sequence is expected
590 to assign to that lhs. */
593 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
595 gimple stmt
= gsi_stmt (*si_p
);
597 if (gimple_has_location (stmt
))
598 annotate_all_with_location (stmts
, gimple_location (stmt
));
600 /* First iterate over the replacement statements backward, assigning
601 virtual operands to their defining statements. */
602 gimple laststore
= NULL
;
603 for (gimple_stmt_iterator i
= gsi_last (stmts
);
604 !gsi_end_p (i
); gsi_prev (&i
))
606 gimple new_stmt
= gsi_stmt (i
);
607 if ((gimple_assign_single_p (new_stmt
)
608 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
609 || (is_gimple_call (new_stmt
)
610 && (gimple_call_flags (new_stmt
)
611 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
615 vdef
= gimple_vdef (stmt
);
617 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
618 gimple_set_vdef (new_stmt
, vdef
);
619 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
620 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
621 laststore
= new_stmt
;
625 /* Second iterate over the statements forward, assigning virtual
626 operands to their uses. */
627 tree reaching_vuse
= gimple_vuse (stmt
);
628 for (gimple_stmt_iterator i
= gsi_start (stmts
);
629 !gsi_end_p (i
); gsi_next (&i
))
631 gimple new_stmt
= gsi_stmt (i
);
632 /* If the new statement possibly has a VUSE, update it with exact SSA
633 name we know will reach this one. */
634 if (gimple_has_mem_ops (new_stmt
))
635 gimple_set_vuse (new_stmt
, reaching_vuse
);
636 gimple_set_modified (new_stmt
, true);
637 if (gimple_vdef (new_stmt
))
638 reaching_vuse
= gimple_vdef (new_stmt
);
641 /* If the new sequence does not do a store release the virtual
642 definition of the original statement. */
644 && reaching_vuse
== gimple_vuse (stmt
))
646 tree vdef
= gimple_vdef (stmt
);
648 && TREE_CODE (vdef
) == SSA_NAME
)
650 unlink_stmt_vdef (stmt
);
651 release_ssa_name (vdef
);
655 /* Finally replace the original statement with the sequence. */
656 gsi_replace_with_seq (si_p
, stmts
, false);
659 /* Convert EXPR into a GIMPLE value suitable for substitution on the
660 RHS of an assignment. Insert the necessary statements before
661 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
662 is replaced. If the call is expected to produces a result, then it
663 is replaced by an assignment of the new RHS to the result variable.
664 If the result is to be ignored, then the call is replaced by a
665 GIMPLE_NOP. A proper VDEF chain is retained by making the first
666 VUSE and the last VDEF of the whole sequence be the same as the replaced
667 statement and using new SSA names for stores in between. */
670 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
673 gimple stmt
, new_stmt
;
674 gimple_stmt_iterator i
;
675 gimple_seq stmts
= NULL
;
677 stmt
= gsi_stmt (*si_p
);
679 gcc_assert (is_gimple_call (stmt
));
681 push_gimplify_context (gimple_in_ssa_p (cfun
));
683 lhs
= gimple_call_lhs (stmt
);
684 if (lhs
== NULL_TREE
)
686 gimplify_and_add (expr
, &stmts
);
687 /* We can end up with folding a memcpy of an empty class assignment
688 which gets optimized away by C++ gimplification. */
689 if (gimple_seq_empty_p (stmts
))
691 pop_gimplify_context (NULL
);
692 if (gimple_in_ssa_p (cfun
))
694 unlink_stmt_vdef (stmt
);
697 gsi_replace (si_p
, gimple_build_nop (), false);
703 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
704 new_stmt
= gimple_build_assign (lhs
, tmp
);
705 i
= gsi_last (stmts
);
706 gsi_insert_after_without_update (&i
, new_stmt
,
707 GSI_CONTINUE_LINKING
);
710 pop_gimplify_context (NULL
);
712 gsi_replace_with_seq_vops (si_p
, stmts
);
716 /* Replace the call at *GSI with the gimple value VAL. */
719 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
721 gimple stmt
= gsi_stmt (*gsi
);
722 tree lhs
= gimple_call_lhs (stmt
);
726 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
727 val
= fold_convert (TREE_TYPE (lhs
), val
);
728 repl
= gimple_build_assign (lhs
, val
);
731 repl
= gimple_build_nop ();
732 tree vdef
= gimple_vdef (stmt
);
733 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
735 unlink_stmt_vdef (stmt
);
736 release_ssa_name (vdef
);
738 gsi_replace (gsi
, repl
, false);
741 /* Replace the call at *GSI with the new call REPL and fold that
745 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
747 gimple stmt
= gsi_stmt (*gsi
);
748 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
749 gimple_set_location (repl
, gimple_location (stmt
));
750 if (gimple_vdef (stmt
)
751 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
753 gimple_set_vdef (repl
, gimple_vdef (stmt
));
754 gimple_set_vuse (repl
, gimple_vuse (stmt
));
755 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
757 gsi_replace (gsi
, repl
, false);
761 /* Return true if VAR is a VAR_DECL or a component thereof. */
764 var_decl_component_p (tree var
)
767 while (handled_component_p (inner
))
768 inner
= TREE_OPERAND (inner
, 0);
769 return SSA_VAR_P (inner
);
772 /* Fold function call to builtin mem{{,p}cpy,move}. Return
773 false if no simplification can be made.
774 If ENDP is 0, return DEST (like memcpy).
775 If ENDP is 1, return DEST+LEN (like mempcpy).
776 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
777 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
781 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
782 tree dest
, tree src
, int endp
)
784 gimple stmt
= gsi_stmt (*gsi
);
785 tree lhs
= gimple_call_lhs (stmt
);
786 tree len
= gimple_call_arg (stmt
, 2);
787 tree destvar
, srcvar
;
788 location_t loc
= gimple_location (stmt
);
790 /* If the LEN parameter is zero, return DEST. */
791 if (integer_zerop (len
))
794 if (gimple_call_lhs (stmt
))
795 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
797 repl
= gimple_build_nop ();
798 tree vdef
= gimple_vdef (stmt
);
799 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
801 unlink_stmt_vdef (stmt
);
802 release_ssa_name (vdef
);
804 gsi_replace (gsi
, repl
, false);
808 /* If SRC and DEST are the same (and not volatile), return
809 DEST{,+LEN,+LEN-1}. */
810 if (operand_equal_p (src
, dest
, 0))
812 unlink_stmt_vdef (stmt
);
813 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
814 release_ssa_name (gimple_vdef (stmt
));
817 gsi_replace (gsi
, gimple_build_nop (), false);
824 tree srctype
, desttype
;
825 unsigned int src_align
, dest_align
;
828 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
829 pointers as wide integer) and also may result in huge function
830 size because of inlined bounds copy. Thus don't inline for
831 functions we want to instrument. */
832 if (flag_check_pointer_bounds
833 && chkp_instrumentable_p (cfun
->decl
)
834 /* Even if data may contain pointers we can inline if copy
835 less than a pointer size. */
836 && (!tree_fits_uhwi_p (len
)
837 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
840 /* Build accesses at offset zero with a ref-all character type. */
841 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
844 /* If we can perform the copy efficiently with first doing all loads
845 and then all stores inline it that way. Currently efficiently
846 means that we can load all the memory into a single integer
847 register which is what MOVE_MAX gives us. */
848 src_align
= get_pointer_alignment (src
);
849 dest_align
= get_pointer_alignment (dest
);
850 if (tree_fits_uhwi_p (len
)
851 && compare_tree_int (len
, MOVE_MAX
) <= 0
852 /* ??? Don't transform copies from strings with known length this
853 confuses the tree-ssa-strlen.c. This doesn't handle
854 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
856 && !c_strlen (src
, 2))
858 unsigned ilen
= tree_to_uhwi (len
);
859 if (exact_log2 (ilen
) != -1)
861 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
863 && TYPE_MODE (type
) != BLKmode
864 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
866 /* If the destination pointer is not aligned we must be able
867 to emit an unaligned store. */
868 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
869 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
872 tree desttype
= type
;
873 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
874 srctype
= build_aligned_type (type
, src_align
);
875 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
876 tree tem
= fold_const_aggregate_ref (srcmem
);
879 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
880 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
886 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
888 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
889 if (gimple_in_ssa_p (cfun
))
890 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
893 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
894 gimple_assign_set_lhs (new_stmt
, srcmem
);
895 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
896 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
898 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
899 desttype
= build_aligned_type (type
, dest_align
);
901 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
904 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
905 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
906 if (gimple_vdef (new_stmt
)
907 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
908 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
911 gsi_replace (gsi
, new_stmt
, false);
914 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
923 /* Both DEST and SRC must be pointer types.
924 ??? This is what old code did. Is the testing for pointer types
927 If either SRC is readonly or length is 1, we can use memcpy. */
928 if (!dest_align
|| !src_align
)
930 if (readonly_data_expr (src
)
931 || (tree_fits_uhwi_p (len
)
932 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
933 >= tree_to_uhwi (len
))))
935 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
938 gimple_call_set_fndecl (stmt
, fn
);
939 gimple_call_set_arg (stmt
, 0, dest
);
940 gimple_call_set_arg (stmt
, 1, src
);
945 /* If *src and *dest can't overlap, optimize into memcpy as well. */
946 if (TREE_CODE (src
) == ADDR_EXPR
947 && TREE_CODE (dest
) == ADDR_EXPR
)
949 tree src_base
, dest_base
, fn
;
950 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
951 HOST_WIDE_INT maxsize
;
953 srcvar
= TREE_OPERAND (src
, 0);
954 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
955 if (src_base
== NULL
)
957 destvar
= TREE_OPERAND (dest
, 0);
958 dest_base
= get_addr_base_and_unit_offset (destvar
,
960 if (dest_base
== NULL
)
962 if (tree_fits_uhwi_p (len
))
963 maxsize
= tree_to_uhwi (len
);
966 if (SSA_VAR_P (src_base
)
967 && SSA_VAR_P (dest_base
))
969 if (operand_equal_p (src_base
, dest_base
, 0)
970 && ranges_overlap_p (src_offset
, maxsize
,
971 dest_offset
, maxsize
))
974 else if (TREE_CODE (src_base
) == MEM_REF
975 && TREE_CODE (dest_base
) == MEM_REF
)
977 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
978 TREE_OPERAND (dest_base
, 0), 0))
980 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
981 if (!wi::fits_shwi_p (off
))
983 src_offset
= off
.to_shwi ();
985 off
= mem_ref_offset (dest_base
) + dest_offset
;
986 if (!wi::fits_shwi_p (off
))
988 dest_offset
= off
.to_shwi ();
989 if (ranges_overlap_p (src_offset
, maxsize
,
990 dest_offset
, maxsize
))
996 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
999 gimple_call_set_fndecl (stmt
, fn
);
1000 gimple_call_set_arg (stmt
, 0, dest
);
1001 gimple_call_set_arg (stmt
, 1, src
);
1006 /* If the destination and source do not alias optimize into
1008 if ((is_gimple_min_invariant (dest
)
1009 || TREE_CODE (dest
) == SSA_NAME
)
1010 && (is_gimple_min_invariant (src
)
1011 || TREE_CODE (src
) == SSA_NAME
))
1014 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1015 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1016 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1019 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1022 gimple_call_set_fndecl (stmt
, fn
);
1023 gimple_call_set_arg (stmt
, 0, dest
);
1024 gimple_call_set_arg (stmt
, 1, src
);
1033 if (!tree_fits_shwi_p (len
))
1036 This logic lose for arguments like (type *)malloc (sizeof (type)),
1037 since we strip the casts of up to VOID return value from malloc.
1038 Perhaps we ought to inherit type from non-VOID argument here? */
1041 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1042 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1044 /* In the following try to find a type that is most natural to be
1045 used for the memcpy source and destination and that allows
1046 the most optimization when memcpy is turned into a plain assignment
1047 using that type. In theory we could always use a char[len] type
1048 but that only gains us that the destination and source possibly
1049 no longer will have their address taken. */
1050 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1051 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1053 tree tem
= TREE_OPERAND (src
, 0);
1055 if (tem
!= TREE_OPERAND (src
, 0))
1056 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1058 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1060 tree tem
= TREE_OPERAND (dest
, 0);
1062 if (tem
!= TREE_OPERAND (dest
, 0))
1063 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1065 srctype
= TREE_TYPE (TREE_TYPE (src
));
1066 if (TREE_CODE (srctype
) == ARRAY_TYPE
1067 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1069 srctype
= TREE_TYPE (srctype
);
1071 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1073 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1074 if (TREE_CODE (desttype
) == ARRAY_TYPE
1075 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1077 desttype
= TREE_TYPE (desttype
);
1079 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1081 if (TREE_ADDRESSABLE (srctype
)
1082 || TREE_ADDRESSABLE (desttype
))
1085 /* Make sure we are not copying using a floating-point mode or
1086 a type whose size possibly does not match its precision. */
1087 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1088 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1089 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1090 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1091 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1092 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1093 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1094 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1102 src_align
= get_pointer_alignment (src
);
1103 dest_align
= get_pointer_alignment (dest
);
1104 if (dest_align
< TYPE_ALIGN (desttype
)
1105 || src_align
< TYPE_ALIGN (srctype
))
1109 STRIP_NOPS (destvar
);
1110 if (TREE_CODE (destvar
) == ADDR_EXPR
1111 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1112 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1113 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1115 destvar
= NULL_TREE
;
1118 STRIP_NOPS (srcvar
);
1119 if (TREE_CODE (srcvar
) == ADDR_EXPR
1120 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1121 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1124 || src_align
>= TYPE_ALIGN (desttype
))
1125 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1127 else if (!STRICT_ALIGNMENT
)
1129 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1131 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1139 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1142 if (srcvar
== NULL_TREE
)
1145 if (src_align
>= TYPE_ALIGN (desttype
))
1146 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1149 if (STRICT_ALIGNMENT
)
1151 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1153 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1156 else if (destvar
== NULL_TREE
)
1159 if (dest_align
>= TYPE_ALIGN (srctype
))
1160 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1163 if (STRICT_ALIGNMENT
)
1165 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1167 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1172 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1174 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1175 if (gimple_in_ssa_p (cfun
))
1176 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1178 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1179 gimple_assign_set_lhs (new_stmt
, srcvar
);
1180 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1181 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1183 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1184 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1185 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1186 if (gimple_vdef (new_stmt
)
1187 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1188 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1191 gsi_replace (gsi
, new_stmt
, false);
1194 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1198 if (endp
== 0 || endp
== 3)
1201 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1203 if (endp
== 2 || endp
== 1)
1204 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1206 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1208 gimple repl
= gimple_build_assign (lhs
, dest
);
1209 gsi_replace (gsi
, repl
, false);
1213 /* Fold function call to builtin memset or bzero at *GSI setting the
1214 memory of size LEN to VAL. Return whether a simplification was made. */
1217 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1219 gimple stmt
= gsi_stmt (*gsi
);
1221 unsigned HOST_WIDE_INT length
, cval
;
1223 /* If the LEN parameter is zero, return DEST. */
1224 if (integer_zerop (len
))
1226 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1230 if (! tree_fits_uhwi_p (len
))
1233 if (TREE_CODE (c
) != INTEGER_CST
)
1236 tree dest
= gimple_call_arg (stmt
, 0);
1238 if (TREE_CODE (var
) != ADDR_EXPR
)
1241 var
= TREE_OPERAND (var
, 0);
1242 if (TREE_THIS_VOLATILE (var
))
1245 etype
= TREE_TYPE (var
);
1246 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1247 etype
= TREE_TYPE (etype
);
1249 if (!INTEGRAL_TYPE_P (etype
)
1250 && !POINTER_TYPE_P (etype
))
1253 if (! var_decl_component_p (var
))
1256 length
= tree_to_uhwi (len
);
1257 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1258 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1261 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1264 if (integer_zerop (c
))
1268 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1271 cval
= TREE_INT_CST_LOW (c
);
1275 cval
|= (cval
<< 31) << 1;
1278 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1279 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1280 gimple_set_vuse (store
, gimple_vuse (stmt
));
1281 tree vdef
= gimple_vdef (stmt
);
1282 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1284 gimple_set_vdef (store
, gimple_vdef (stmt
));
1285 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1287 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1288 if (gimple_call_lhs (stmt
))
1290 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1291 gsi_replace (gsi
, asgn
, false);
1295 gimple_stmt_iterator gsi2
= *gsi
;
1297 gsi_remove (&gsi2
, true);
1304 /* Return the string length, maximum string length or maximum value of
1306 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1307 is not NULL and, for TYPE == 0, its value is not equal to the length
1308 we determine or if we are unable to determine the length or value,
1309 return false. VISITED is a bitmap of visited variables.
1310 TYPE is 0 if string length should be returned, 1 for maximum string
1311 length and 2 for maximum value ARG can have. */
1314 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1319 if (TREE_CODE (arg
) != SSA_NAME
)
1321 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1322 if (TREE_CODE (arg
) == ADDR_EXPR
1323 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1324 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1326 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1327 if (TREE_CODE (aop0
) == INDIRECT_REF
1328 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1329 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1330 length
, visited
, type
);
1336 if (TREE_CODE (val
) != INTEGER_CST
1337 || tree_int_cst_sgn (val
) < 0)
1341 val
= c_strlen (arg
, 1);
1349 if (TREE_CODE (*length
) != INTEGER_CST
1350 || TREE_CODE (val
) != INTEGER_CST
)
1353 if (tree_int_cst_lt (*length
, val
))
1357 else if (simple_cst_equal (val
, *length
) != 1)
1365 /* If ARG is registered for SSA update we cannot look at its defining
1367 if (name_registered_for_update_p (arg
))
1370 /* If we were already here, break the infinite cycle. */
1372 *visited
= BITMAP_ALLOC (NULL
);
1373 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1377 def_stmt
= SSA_NAME_DEF_STMT (var
);
1379 switch (gimple_code (def_stmt
))
1382 /* The RHS of the statement defining VAR must either have a
1383 constant length or come from another SSA_NAME with a constant
1385 if (gimple_assign_single_p (def_stmt
)
1386 || gimple_assign_unary_nop_p (def_stmt
))
1388 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1389 return get_maxval_strlen (rhs
, length
, visited
, type
);
1391 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1393 tree op2
= gimple_assign_rhs2 (def_stmt
);
1394 tree op3
= gimple_assign_rhs3 (def_stmt
);
1395 return get_maxval_strlen (op2
, length
, visited
, type
)
1396 && get_maxval_strlen (op3
, length
, visited
, type
);
1402 /* All the arguments of the PHI node must have the same constant
1406 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1408 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1410 /* If this PHI has itself as an argument, we cannot
1411 determine the string length of this argument. However,
1412 if we can find a constant string length for the other
1413 PHI args then we can still be sure that this is a
1414 constant string length. So be optimistic and just
1415 continue with the next argument. */
1416 if (arg
== gimple_phi_result (def_stmt
))
1419 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1431 get_maxval_strlen (tree arg
, int type
)
1433 bitmap visited
= NULL
;
1434 tree len
= NULL_TREE
;
1435 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1438 BITMAP_FREE (visited
);
1444 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1445 If LEN is not NULL, it represents the length of the string to be
1446 copied. Return NULL_TREE if no simplification can be made. */
1449 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1450 tree dest
, tree src
)
1452 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1455 /* If SRC and DEST are the same (and not volatile), return DEST. */
1456 if (operand_equal_p (src
, dest
, 0))
1458 replace_call_with_value (gsi
, dest
);
1462 if (optimize_function_for_size_p (cfun
))
1465 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1469 tree len
= get_maxval_strlen (src
, 0);
1473 len
= fold_convert_loc (loc
, size_type_node
, len
);
1474 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1475 len
= force_gimple_operand_gsi (gsi
, len
, true,
1476 NULL_TREE
, true, GSI_SAME_STMT
);
1477 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1478 replace_call_with_call_and_fold (gsi
, repl
);
1482 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1483 If SLEN is not NULL, it represents the length of the source string.
1484 Return NULL_TREE if no simplification can be made. */
1487 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1488 tree dest
, tree src
, tree len
)
1490 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1493 /* If the LEN parameter is zero, return DEST. */
1494 if (integer_zerop (len
))
1496 replace_call_with_value (gsi
, dest
);
1500 /* We can't compare slen with len as constants below if len is not a
1502 if (TREE_CODE (len
) != INTEGER_CST
)
1505 /* Now, we must be passed a constant src ptr parameter. */
1506 tree slen
= get_maxval_strlen (src
, 0);
1507 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1510 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1512 /* We do not support simplification of this case, though we do
1513 support it when expanding trees into RTL. */
1514 /* FIXME: generate a call to __builtin_memset. */
1515 if (tree_int_cst_lt (slen
, len
))
1518 /* OK transform into builtin memcpy. */
1519 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1523 len
= fold_convert_loc (loc
, size_type_node
, len
);
1524 len
= force_gimple_operand_gsi (gsi
, len
, true,
1525 NULL_TREE
, true, GSI_SAME_STMT
);
1526 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1527 replace_call_with_call_and_fold (gsi
, repl
);
1531 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1534 Return NULL_TREE if no simplification was possible, otherwise return the
1535 simplified form of the call as a tree.
1537 The simplified form may be a constant or other expression which
1538 computes the same value, but in a more efficient manner (including
1539 calls to other builtin functions).
1541 The call may contain arguments which need to be evaluated, but
1542 which are not useful to determine the result of the call. In
1543 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1544 COMPOUND_EXPR will be an argument which must be evaluated.
1545 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1546 COMPOUND_EXPR in the chain will contain the tree for the simplified
1547 form of the builtin function call. */
1550 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1552 gimple stmt
= gsi_stmt (*gsi
);
1553 location_t loc
= gimple_location (stmt
);
1555 const char *p
= c_getstr (src
);
1557 /* If the string length is zero, return the dst parameter. */
1558 if (p
&& *p
== '\0')
1560 replace_call_with_value (gsi
, dst
);
1564 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1567 /* See if we can store by pieces into (dst + strlen(dst)). */
1569 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1570 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1572 if (!strlen_fn
|| !memcpy_fn
)
1575 /* If the length of the source string isn't computable don't
1576 split strcat into strlen and memcpy. */
1577 tree len
= get_maxval_strlen (src
, 0);
1581 /* Create strlen (dst). */
1582 gimple_seq stmts
= NULL
, stmts2
;
1583 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1584 gimple_set_location (repl
, loc
);
1585 if (gimple_in_ssa_p (cfun
))
1586 newdst
= make_ssa_name (size_type_node
);
1588 newdst
= create_tmp_reg (size_type_node
);
1589 gimple_call_set_lhs (repl
, newdst
);
1590 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1592 /* Create (dst p+ strlen (dst)). */
1593 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1594 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1595 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1597 len
= fold_convert_loc (loc
, size_type_node
, len
);
1598 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1599 build_int_cst (size_type_node
, 1));
1600 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1601 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1603 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1604 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1605 if (gimple_call_lhs (stmt
))
1607 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1608 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1609 gsi_replace_with_seq_vops (gsi
, stmts
);
1610 /* gsi now points at the assignment to the lhs, get a
1611 stmt iterator to the memcpy call.
1612 ??? We can't use gsi_for_stmt as that doesn't work when the
1613 CFG isn't built yet. */
1614 gimple_stmt_iterator gsi2
= *gsi
;
1620 gsi_replace_with_seq_vops (gsi
, stmts
);
1626 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1627 are the arguments to the call. */
1630 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1632 gimple stmt
= gsi_stmt (*gsi
);
1633 tree dest
= gimple_call_arg (stmt
, 0);
1634 tree src
= gimple_call_arg (stmt
, 1);
1635 tree size
= gimple_call_arg (stmt
, 2);
1641 /* If the SRC parameter is "", return DEST. */
1642 if (p
&& *p
== '\0')
1644 replace_call_with_value (gsi
, dest
);
1648 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1651 /* If __builtin_strcat_chk is used, assume strcat is available. */
1652 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1656 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1657 replace_call_with_call_and_fold (gsi
, repl
);
1661 /* Simplify a call to the strncat builtin. */
1664 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1666 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1667 tree dst
= gimple_call_arg (stmt
, 0);
1668 tree src
= gimple_call_arg (stmt
, 1);
1669 tree len
= gimple_call_arg (stmt
, 2);
1671 const char *p
= c_getstr (src
);
1673 /* If the requested length is zero, or the src parameter string
1674 length is zero, return the dst parameter. */
1675 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1677 replace_call_with_value (gsi
, dst
);
1681 /* If the requested len is greater than or equal to the string
1682 length, call strcat. */
1683 if (TREE_CODE (len
) == INTEGER_CST
&& p
1684 && compare_tree_int (len
, strlen (p
)) >= 0)
1686 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1688 /* If the replacement _DECL isn't initialized, don't do the
1693 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1694 replace_call_with_call_and_fold (gsi
, repl
);
1701 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1705 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1707 gimple stmt
= gsi_stmt (*gsi
);
1708 tree dest
= gimple_call_arg (stmt
, 0);
1709 tree src
= gimple_call_arg (stmt
, 1);
1710 tree len
= gimple_call_arg (stmt
, 2);
1711 tree size
= gimple_call_arg (stmt
, 3);
1716 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1717 if ((p
&& *p
== '\0')
1718 || integer_zerop (len
))
1720 replace_call_with_value (gsi
, dest
);
1724 if (! tree_fits_uhwi_p (size
))
1727 if (! integer_all_onesp (size
))
1729 tree src_len
= c_strlen (src
, 1);
1731 && tree_fits_uhwi_p (src_len
)
1732 && tree_fits_uhwi_p (len
)
1733 && ! tree_int_cst_lt (len
, src_len
))
1735 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1736 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1740 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1741 replace_call_with_call_and_fold (gsi
, repl
);
1747 /* If __builtin_strncat_chk is used, assume strncat is available. */
1748 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1752 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1753 replace_call_with_call_and_fold (gsi
, repl
);
1757 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1758 to the call. IGNORE is true if the value returned
1759 by the builtin will be ignored. UNLOCKED is true is true if this
1760 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1761 the known length of the string. Return NULL_TREE if no simplification
1765 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1766 tree arg0
, tree arg1
,
1769 gimple stmt
= gsi_stmt (*gsi
);
1771 /* If we're using an unlocked function, assume the other unlocked
1772 functions exist explicitly. */
1773 tree
const fn_fputc
= (unlocked
1774 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1775 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1776 tree
const fn_fwrite
= (unlocked
1777 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1778 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1780 /* If the return value is used, don't do the transformation. */
1781 if (gimple_call_lhs (stmt
))
1784 /* Get the length of the string passed to fputs. If the length
1785 can't be determined, punt. */
1786 tree len
= get_maxval_strlen (arg0
, 0);
1788 || TREE_CODE (len
) != INTEGER_CST
)
1791 switch (compare_tree_int (len
, 1))
1793 case -1: /* length is 0, delete the call entirely . */
1794 replace_call_with_value (gsi
, integer_zero_node
);
1797 case 0: /* length is 1, call fputc. */
1799 const char *p
= c_getstr (arg0
);
1805 gimple repl
= gimple_build_call (fn_fputc
, 2,
1807 (integer_type_node
, p
[0]), arg1
);
1808 replace_call_with_call_and_fold (gsi
, repl
);
1813 case 1: /* length is greater than 1, call fwrite. */
1815 /* If optimizing for size keep fputs. */
1816 if (optimize_function_for_size_p (cfun
))
1818 /* New argument list transforming fputs(string, stream) to
1819 fwrite(string, 1, len, stream). */
1823 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1824 size_one_node
, len
, arg1
);
1825 replace_call_with_call_and_fold (gsi
, repl
);
1834 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1835 DEST, SRC, LEN, and SIZE are the arguments to the call.
1836 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1837 code of the builtin. If MAXLEN is not NULL, it is maximum length
1838 passed as third argument. */
1841 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1842 tree dest
, tree src
, tree len
, tree size
,
1843 enum built_in_function fcode
)
1845 gimple stmt
= gsi_stmt (*gsi
);
1846 location_t loc
= gimple_location (stmt
);
1847 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1850 /* If SRC and DEST are the same (and not volatile), return DEST
1851 (resp. DEST+LEN for __mempcpy_chk). */
1852 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1854 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1856 replace_call_with_value (gsi
, dest
);
1861 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1862 temp
= force_gimple_operand_gsi (gsi
, temp
,
1863 false, NULL_TREE
, true,
1865 replace_call_with_value (gsi
, temp
);
1870 if (! tree_fits_uhwi_p (size
))
1873 tree maxlen
= get_maxval_strlen (len
, 2);
1874 if (! integer_all_onesp (size
))
1876 if (! tree_fits_uhwi_p (len
))
1878 /* If LEN is not constant, try MAXLEN too.
1879 For MAXLEN only allow optimizing into non-_ocs function
1880 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1881 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1883 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1885 /* (void) __mempcpy_chk () can be optimized into
1886 (void) __memcpy_chk (). */
1887 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1891 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1892 replace_call_with_call_and_fold (gsi
, repl
);
1901 if (tree_int_cst_lt (size
, maxlen
))
1906 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1907 mem{cpy,pcpy,move,set} is available. */
1910 case BUILT_IN_MEMCPY_CHK
:
1911 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1913 case BUILT_IN_MEMPCPY_CHK
:
1914 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1916 case BUILT_IN_MEMMOVE_CHK
:
1917 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1919 case BUILT_IN_MEMSET_CHK
:
1920 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1929 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1930 replace_call_with_call_and_fold (gsi
, repl
);
1934 /* Fold a call to the __st[rp]cpy_chk builtin.
1935 DEST, SRC, and SIZE are the arguments to the call.
1936 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1937 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1938 strings passed as second argument. */
1941 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1943 tree src
, tree size
,
1944 enum built_in_function fcode
)
1946 gimple stmt
= gsi_stmt (*gsi
);
1947 location_t loc
= gimple_location (stmt
);
1948 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1951 /* If SRC and DEST are the same (and not volatile), return DEST. */
1952 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1954 replace_call_with_value (gsi
, dest
);
1958 if (! tree_fits_uhwi_p (size
))
1961 tree maxlen
= get_maxval_strlen (src
, 1);
1962 if (! integer_all_onesp (size
))
1964 len
= c_strlen (src
, 1);
1965 if (! len
|| ! tree_fits_uhwi_p (len
))
1967 /* If LEN is not constant, try MAXLEN too.
1968 For MAXLEN only allow optimizing into non-_ocs function
1969 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1970 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1972 if (fcode
== BUILT_IN_STPCPY_CHK
)
1977 /* If return value of __stpcpy_chk is ignored,
1978 optimize into __strcpy_chk. */
1979 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1983 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1984 replace_call_with_call_and_fold (gsi
, repl
);
1988 if (! len
|| TREE_SIDE_EFFECTS (len
))
1991 /* If c_strlen returned something, but not a constant,
1992 transform __strcpy_chk into __memcpy_chk. */
1993 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1997 len
= fold_convert_loc (loc
, size_type_node
, len
);
1998 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1999 build_int_cst (size_type_node
, 1));
2000 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
2001 true, GSI_SAME_STMT
);
2002 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2003 replace_call_with_call_and_fold (gsi
, repl
);
2010 if (! tree_int_cst_lt (maxlen
, size
))
2014 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2015 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2016 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2020 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
2021 replace_call_with_call_and_fold (gsi
, repl
);
2025 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2026 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2027 length passed as third argument. IGNORE is true if return value can be
2028 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2031 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2032 tree dest
, tree src
,
2033 tree len
, tree size
,
2034 enum built_in_function fcode
)
2036 gimple stmt
= gsi_stmt (*gsi
);
2037 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2040 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2042 /* If return value of __stpncpy_chk is ignored,
2043 optimize into __strncpy_chk. */
2044 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2047 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2048 replace_call_with_call_and_fold (gsi
, repl
);
2053 if (! tree_fits_uhwi_p (size
))
2056 tree maxlen
= get_maxval_strlen (len
, 2);
2057 if (! integer_all_onesp (size
))
2059 if (! tree_fits_uhwi_p (len
))
2061 /* If LEN is not constant, try MAXLEN too.
2062 For MAXLEN only allow optimizing into non-_ocs function
2063 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2064 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2070 if (tree_int_cst_lt (size
, maxlen
))
2074 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2075 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2076 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2080 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2081 replace_call_with_call_and_fold (gsi
, repl
);
2085 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2086 Return NULL_TREE if no simplification can be made. */
2089 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2091 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2092 location_t loc
= gimple_location (stmt
);
2093 tree dest
= gimple_call_arg (stmt
, 0);
2094 tree src
= gimple_call_arg (stmt
, 1);
2095 tree fn
, len
, lenp1
;
2097 /* If the result is unused, replace stpcpy with strcpy. */
2098 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2100 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2103 gimple_call_set_fndecl (stmt
, fn
);
2108 len
= c_strlen (src
, 1);
2110 || TREE_CODE (len
) != INTEGER_CST
)
2113 if (optimize_function_for_size_p (cfun
)
2114 /* If length is zero it's small enough. */
2115 && !integer_zerop (len
))
2118 /* If the source has a known length replace stpcpy with memcpy. */
2119 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2123 gimple_seq stmts
= NULL
;
2124 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2125 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2126 tem
, build_int_cst (size_type_node
, 1));
2127 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2128 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2129 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2130 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2131 if (gimple_vdef (repl
)
2132 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2133 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2134 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2135 /* Replace the result with dest + len. */
2137 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2138 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2139 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2140 POINTER_PLUS_EXPR
, dest
, tem
);
2141 gsi_replace (gsi
, ret
, false);
2142 /* Finally fold the memcpy call. */
2143 gimple_stmt_iterator gsi2
= *gsi
;
2149 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2150 NULL_TREE if a normal call should be emitted rather than expanding
2151 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2152 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2153 passed as second argument. */
2156 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2157 enum built_in_function fcode
)
2159 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2160 tree dest
, size
, len
, fn
, fmt
, flag
;
2161 const char *fmt_str
;
2163 /* Verify the required arguments in the original call. */
2164 if (gimple_call_num_args (stmt
) < 5)
2167 dest
= gimple_call_arg (stmt
, 0);
2168 len
= gimple_call_arg (stmt
, 1);
2169 flag
= gimple_call_arg (stmt
, 2);
2170 size
= gimple_call_arg (stmt
, 3);
2171 fmt
= gimple_call_arg (stmt
, 4);
2173 if (! tree_fits_uhwi_p (size
))
2176 if (! integer_all_onesp (size
))
2178 tree maxlen
= get_maxval_strlen (len
, 2);
2179 if (! tree_fits_uhwi_p (len
))
2181 /* If LEN is not constant, try MAXLEN too.
2182 For MAXLEN only allow optimizing into non-_ocs function
2183 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2184 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2190 if (tree_int_cst_lt (size
, maxlen
))
2194 if (!init_target_chars ())
2197 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2198 or if format doesn't contain % chars or is "%s". */
2199 if (! integer_zerop (flag
))
2201 fmt_str
= c_getstr (fmt
);
2202 if (fmt_str
== NULL
)
2204 if (strchr (fmt_str
, target_percent
) != NULL
2205 && strcmp (fmt_str
, target_percent_s
))
2209 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2211 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2212 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2216 /* Replace the called function and the first 5 argument by 3 retaining
2217 trailing varargs. */
2218 gimple_call_set_fndecl (stmt
, fn
);
2219 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2220 gimple_call_set_arg (stmt
, 0, dest
);
2221 gimple_call_set_arg (stmt
, 1, len
);
2222 gimple_call_set_arg (stmt
, 2, fmt
);
2223 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2224 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2225 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2230 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2231 Return NULL_TREE if a normal call should be emitted rather than
2232 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2233 or BUILT_IN_VSPRINTF_CHK. */
2236 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2237 enum built_in_function fcode
)
2239 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2240 tree dest
, size
, len
, fn
, fmt
, flag
;
2241 const char *fmt_str
;
2242 unsigned nargs
= gimple_call_num_args (stmt
);
2244 /* Verify the required arguments in the original call. */
2247 dest
= gimple_call_arg (stmt
, 0);
2248 flag
= gimple_call_arg (stmt
, 1);
2249 size
= gimple_call_arg (stmt
, 2);
2250 fmt
= gimple_call_arg (stmt
, 3);
2252 if (! tree_fits_uhwi_p (size
))
2257 if (!init_target_chars ())
2260 /* Check whether the format is a literal string constant. */
2261 fmt_str
= c_getstr (fmt
);
2262 if (fmt_str
!= NULL
)
2264 /* If the format doesn't contain % args or %%, we know the size. */
2265 if (strchr (fmt_str
, target_percent
) == 0)
2267 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2268 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2270 /* If the format is "%s" and first ... argument is a string literal,
2271 we know the size too. */
2272 else if (fcode
== BUILT_IN_SPRINTF_CHK
2273 && strcmp (fmt_str
, target_percent_s
) == 0)
2279 arg
= gimple_call_arg (stmt
, 4);
2280 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2282 len
= c_strlen (arg
, 1);
2283 if (! len
|| ! tree_fits_uhwi_p (len
))
2290 if (! integer_all_onesp (size
))
2292 if (! len
|| ! tree_int_cst_lt (len
, size
))
2296 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2297 or if format doesn't contain % chars or is "%s". */
2298 if (! integer_zerop (flag
))
2300 if (fmt_str
== NULL
)
2302 if (strchr (fmt_str
, target_percent
) != NULL
2303 && strcmp (fmt_str
, target_percent_s
))
2307 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2308 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2309 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2313 /* Replace the called function and the first 4 argument by 2 retaining
2314 trailing varargs. */
2315 gimple_call_set_fndecl (stmt
, fn
);
2316 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2317 gimple_call_set_arg (stmt
, 0, dest
);
2318 gimple_call_set_arg (stmt
, 1, fmt
);
2319 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2320 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2321 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2326 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2327 ORIG may be null if this is a 2-argument call. We don't attempt to
2328 simplify calls with more than 3 arguments.
2330 Return NULL_TREE if no simplification was possible, otherwise return the
2331 simplified form of the call as a tree. If IGNORED is true, it means that
2332 the caller does not use the returned value of the function. */
2335 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2337 gimple stmt
= gsi_stmt (*gsi
);
2338 tree dest
= gimple_call_arg (stmt
, 0);
2339 tree fmt
= gimple_call_arg (stmt
, 1);
2340 tree orig
= NULL_TREE
;
2341 const char *fmt_str
= NULL
;
2343 /* Verify the required arguments in the original call. We deal with two
2344 types of sprintf() calls: 'sprintf (str, fmt)' and
2345 'sprintf (dest, "%s", orig)'. */
2346 if (gimple_call_num_args (stmt
) > 3)
2349 if (gimple_call_num_args (stmt
) == 3)
2350 orig
= gimple_call_arg (stmt
, 2);
2352 /* Check whether the format is a literal string constant. */
2353 fmt_str
= c_getstr (fmt
);
2354 if (fmt_str
== NULL
)
2357 if (!init_target_chars ())
2360 /* If the format doesn't contain % args or %%, use strcpy. */
2361 if (strchr (fmt_str
, target_percent
) == NULL
)
2363 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2368 /* Don't optimize sprintf (buf, "abc", ptr++). */
2372 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2373 'format' is known to contain no % formats. */
2374 gimple_seq stmts
= NULL
;
2375 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2376 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2377 if (gimple_call_lhs (stmt
))
2379 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2380 build_int_cst (integer_type_node
,
2382 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2383 gsi_replace_with_seq_vops (gsi
, stmts
);
2384 /* gsi now points at the assignment to the lhs, get a
2385 stmt iterator to the memcpy call.
2386 ??? We can't use gsi_for_stmt as that doesn't work when the
2387 CFG isn't built yet. */
2388 gimple_stmt_iterator gsi2
= *gsi
;
2394 gsi_replace_with_seq_vops (gsi
, stmts
);
2400 /* If the format is "%s", use strcpy if the result isn't used. */
2401 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2404 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2409 /* Don't crash on sprintf (str1, "%s"). */
2413 tree orig_len
= NULL_TREE
;
2414 if (gimple_call_lhs (stmt
))
2416 orig_len
= get_maxval_strlen (orig
, 0);
2421 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2422 gimple_seq stmts
= NULL
;
2423 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2424 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2425 if (gimple_call_lhs (stmt
))
2427 if (!useless_type_conversion_p (integer_type_node
,
2428 TREE_TYPE (orig_len
)))
2429 orig_len
= fold_convert (integer_type_node
, orig_len
);
2430 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2431 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2432 gsi_replace_with_seq_vops (gsi
, stmts
);
2433 /* gsi now points at the assignment to the lhs, get a
2434 stmt iterator to the memcpy call.
2435 ??? We can't use gsi_for_stmt as that doesn't work when the
2436 CFG isn't built yet. */
2437 gimple_stmt_iterator gsi2
= *gsi
;
2443 gsi_replace_with_seq_vops (gsi
, stmts
);
2451 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2452 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2453 attempt to simplify calls with more than 4 arguments.
2455 Return NULL_TREE if no simplification was possible, otherwise return the
2456 simplified form of the call as a tree. If IGNORED is true, it means that
2457 the caller does not use the returned value of the function. */
2460 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2462 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2463 tree dest
= gimple_call_arg (stmt
, 0);
2464 tree destsize
= gimple_call_arg (stmt
, 1);
2465 tree fmt
= gimple_call_arg (stmt
, 2);
2466 tree orig
= NULL_TREE
;
2467 const char *fmt_str
= NULL
;
2469 if (gimple_call_num_args (stmt
) > 4)
2472 if (gimple_call_num_args (stmt
) == 4)
2473 orig
= gimple_call_arg (stmt
, 3);
2475 if (!tree_fits_uhwi_p (destsize
))
2477 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2479 /* Check whether the format is a literal string constant. */
2480 fmt_str
= c_getstr (fmt
);
2481 if (fmt_str
== NULL
)
2484 if (!init_target_chars ())
2487 /* If the format doesn't contain % args or %%, use strcpy. */
2488 if (strchr (fmt_str
, target_percent
) == NULL
)
2490 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2494 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2498 /* We could expand this as
2499 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2501 memcpy (str, fmt_with_nul_at_cstm1, cst);
2502 but in the former case that might increase code size
2503 and in the latter case grow .rodata section too much.
2505 size_t len
= strlen (fmt_str
);
2509 gimple_seq stmts
= NULL
;
2510 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2511 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2512 if (gimple_call_lhs (stmt
))
2514 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2515 build_int_cst (integer_type_node
, len
));
2516 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2517 gsi_replace_with_seq_vops (gsi
, stmts
);
2518 /* gsi now points at the assignment to the lhs, get a
2519 stmt iterator to the memcpy call.
2520 ??? We can't use gsi_for_stmt as that doesn't work when the
2521 CFG isn't built yet. */
2522 gimple_stmt_iterator gsi2
= *gsi
;
2528 gsi_replace_with_seq_vops (gsi
, stmts
);
2534 /* If the format is "%s", use strcpy if the result isn't used. */
2535 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2537 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2541 /* Don't crash on snprintf (str1, cst, "%s"). */
2545 tree orig_len
= get_maxval_strlen (orig
, 0);
2546 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2549 /* We could expand this as
2550 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2552 memcpy (str1, str2_with_nul_at_cstm1, cst);
2553 but in the former case that might increase code size
2554 and in the latter case grow .rodata section too much.
2556 if (compare_tree_int (orig_len
, destlen
) >= 0)
2559 /* Convert snprintf (str1, cst, "%s", str2) into
2560 strcpy (str1, str2) if strlen (str2) < cst. */
2561 gimple_seq stmts
= NULL
;
2562 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2563 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2564 if (gimple_call_lhs (stmt
))
2566 if (!useless_type_conversion_p (integer_type_node
,
2567 TREE_TYPE (orig_len
)))
2568 orig_len
= fold_convert (integer_type_node
, orig_len
);
2569 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2570 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2571 gsi_replace_with_seq_vops (gsi
, stmts
);
2572 /* gsi now points at the assignment to the lhs, get a
2573 stmt iterator to the memcpy call.
2574 ??? We can't use gsi_for_stmt as that doesn't work when the
2575 CFG isn't built yet. */
2576 gimple_stmt_iterator gsi2
= *gsi
;
2582 gsi_replace_with_seq_vops (gsi
, stmts
);
2590 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2591 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2592 more than 3 arguments, and ARG may be null in the 2-argument case.
2594 Return NULL_TREE if no simplification was possible, otherwise return the
2595 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2596 code of the function to be simplified. */
2599 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2600 tree fp
, tree fmt
, tree arg
,
2601 enum built_in_function fcode
)
2603 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2604 tree fn_fputc
, fn_fputs
;
2605 const char *fmt_str
= NULL
;
2607 /* If the return value is used, don't do the transformation. */
2608 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2611 /* Check whether the format is a literal string constant. */
2612 fmt_str
= c_getstr (fmt
);
2613 if (fmt_str
== NULL
)
2616 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2618 /* If we're using an unlocked function, assume the other
2619 unlocked functions exist explicitly. */
2620 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2621 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2625 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2626 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2629 if (!init_target_chars ())
2632 /* If the format doesn't contain % args or %%, use strcpy. */
2633 if (strchr (fmt_str
, target_percent
) == NULL
)
2635 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2639 /* If the format specifier was "", fprintf does nothing. */
2640 if (fmt_str
[0] == '\0')
2642 replace_call_with_value (gsi
, NULL_TREE
);
2646 /* When "string" doesn't contain %, replace all cases of
2647 fprintf (fp, string) with fputs (string, fp). The fputs
2648 builtin will take care of special cases like length == 1. */
2651 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2652 replace_call_with_call_and_fold (gsi
, repl
);
2657 /* The other optimizations can be done only on the non-va_list variants. */
2658 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2661 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2662 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2664 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2668 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2669 replace_call_with_call_and_fold (gsi
, repl
);
2674 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2675 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2678 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2682 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2683 replace_call_with_call_and_fold (gsi
, repl
);
2691 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2692 FMT and ARG are the arguments to the call; we don't fold cases with
2693 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2695 Return NULL_TREE if no simplification was possible, otherwise return the
2696 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2697 code of the function to be simplified. */
2700 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2701 tree arg
, enum built_in_function fcode
)
2703 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2704 tree fn_putchar
, fn_puts
, newarg
;
2705 const char *fmt_str
= NULL
;
2707 /* If the return value is used, don't do the transformation. */
2708 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2711 /* Check whether the format is a literal string constant. */
2712 fmt_str
= c_getstr (fmt
);
2713 if (fmt_str
== NULL
)
2716 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2718 /* If we're using an unlocked function, assume the other
2719 unlocked functions exist explicitly. */
2720 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2721 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2725 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2726 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2729 if (!init_target_chars ())
2732 if (strcmp (fmt_str
, target_percent_s
) == 0
2733 || strchr (fmt_str
, target_percent
) == NULL
)
2737 if (strcmp (fmt_str
, target_percent_s
) == 0)
2739 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2742 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2745 str
= c_getstr (arg
);
2751 /* The format specifier doesn't contain any '%' characters. */
2752 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2758 /* If the string was "", printf does nothing. */
2761 replace_call_with_value (gsi
, NULL_TREE
);
2765 /* If the string has length of 1, call putchar. */
2768 /* Given printf("c"), (where c is any one character,)
2769 convert "c"[0] to an int and pass that to the replacement
2771 newarg
= build_int_cst (integer_type_node
, str
[0]);
2774 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2775 replace_call_with_call_and_fold (gsi
, repl
);
2781 /* If the string was "string\n", call puts("string"). */
2782 size_t len
= strlen (str
);
2783 if ((unsigned char)str
[len
- 1] == target_newline
2784 && (size_t) (int) len
== len
2788 tree offset_node
, string_cst
;
2790 /* Create a NUL-terminated string that's one char shorter
2791 than the original, stripping off the trailing '\n'. */
2792 newarg
= build_string_literal (len
, str
);
2793 string_cst
= string_constant (newarg
, &offset_node
);
2794 gcc_checking_assert (string_cst
2795 && (TREE_STRING_LENGTH (string_cst
)
2797 && integer_zerop (offset_node
)
2799 TREE_STRING_POINTER (string_cst
)[len
- 1]
2801 /* build_string_literal creates a new STRING_CST,
2802 modify it in place to avoid double copying. */
2803 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2804 newstr
[len
- 1] = '\0';
2807 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2808 replace_call_with_call_and_fold (gsi
, repl
);
2813 /* We'd like to arrange to call fputs(string,stdout) here,
2814 but we need stdout and don't have a way to get it yet. */
2819 /* The other optimizations can be done only on the non-va_list variants. */
2820 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2823 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2824 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2826 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2830 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2831 replace_call_with_call_and_fold (gsi
, repl
);
2836 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2837 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2839 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2844 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2845 replace_call_with_call_and_fold (gsi
, repl
);
2855 /* Fold a call to __builtin_strlen with known length LEN. */
2858 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2860 gimple stmt
= gsi_stmt (*gsi
);
2861 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2864 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2865 replace_call_with_value (gsi
, len
);
2870 /* Fold the non-target builtin at *GSI and return whether any simplification
2874 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2876 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2877 tree callee
= gimple_call_fndecl (stmt
);
2879 /* Give up for always_inline inline builtins until they are
2881 if (avoid_folding_inline_builtin (callee
))
2884 unsigned n
= gimple_call_num_args (stmt
);
2885 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2888 case BUILT_IN_BZERO
:
2889 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2890 gimple_call_arg (stmt
, 1));
2891 case BUILT_IN_MEMSET
:
2892 return gimple_fold_builtin_memset (gsi
,
2893 gimple_call_arg (stmt
, 1),
2894 gimple_call_arg (stmt
, 2));
2895 case BUILT_IN_BCOPY
:
2896 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2897 gimple_call_arg (stmt
, 0), 3);
2898 case BUILT_IN_MEMCPY
:
2899 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2900 gimple_call_arg (stmt
, 1), 0);
2901 case BUILT_IN_MEMPCPY
:
2902 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2903 gimple_call_arg (stmt
, 1), 1);
2904 case BUILT_IN_MEMMOVE
:
2905 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2906 gimple_call_arg (stmt
, 1), 3);
2907 case BUILT_IN_SPRINTF_CHK
:
2908 case BUILT_IN_VSPRINTF_CHK
:
2909 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2910 case BUILT_IN_STRCAT_CHK
:
2911 return gimple_fold_builtin_strcat_chk (gsi
);
2912 case BUILT_IN_STRNCAT_CHK
:
2913 return gimple_fold_builtin_strncat_chk (gsi
);
2914 case BUILT_IN_STRLEN
:
2915 return gimple_fold_builtin_strlen (gsi
);
2916 case BUILT_IN_STRCPY
:
2917 return gimple_fold_builtin_strcpy (gsi
,
2918 gimple_call_arg (stmt
, 0),
2919 gimple_call_arg (stmt
, 1));
2920 case BUILT_IN_STRNCPY
:
2921 return gimple_fold_builtin_strncpy (gsi
,
2922 gimple_call_arg (stmt
, 0),
2923 gimple_call_arg (stmt
, 1),
2924 gimple_call_arg (stmt
, 2));
2925 case BUILT_IN_STRCAT
:
2926 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2927 gimple_call_arg (stmt
, 1));
2928 case BUILT_IN_STRNCAT
:
2929 return gimple_fold_builtin_strncat (gsi
);
2930 case BUILT_IN_FPUTS
:
2931 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2932 gimple_call_arg (stmt
, 1), false);
2933 case BUILT_IN_FPUTS_UNLOCKED
:
2934 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2935 gimple_call_arg (stmt
, 1), true);
2936 case BUILT_IN_MEMCPY_CHK
:
2937 case BUILT_IN_MEMPCPY_CHK
:
2938 case BUILT_IN_MEMMOVE_CHK
:
2939 case BUILT_IN_MEMSET_CHK
:
2940 return gimple_fold_builtin_memory_chk (gsi
,
2941 gimple_call_arg (stmt
, 0),
2942 gimple_call_arg (stmt
, 1),
2943 gimple_call_arg (stmt
, 2),
2944 gimple_call_arg (stmt
, 3),
2946 case BUILT_IN_STPCPY
:
2947 return gimple_fold_builtin_stpcpy (gsi
);
2948 case BUILT_IN_STRCPY_CHK
:
2949 case BUILT_IN_STPCPY_CHK
:
2950 return gimple_fold_builtin_stxcpy_chk (gsi
,
2951 gimple_call_arg (stmt
, 0),
2952 gimple_call_arg (stmt
, 1),
2953 gimple_call_arg (stmt
, 2),
2955 case BUILT_IN_STRNCPY_CHK
:
2956 case BUILT_IN_STPNCPY_CHK
:
2957 return gimple_fold_builtin_stxncpy_chk (gsi
,
2958 gimple_call_arg (stmt
, 0),
2959 gimple_call_arg (stmt
, 1),
2960 gimple_call_arg (stmt
, 2),
2961 gimple_call_arg (stmt
, 3),
2963 case BUILT_IN_SNPRINTF_CHK
:
2964 case BUILT_IN_VSNPRINTF_CHK
:
2965 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2966 case BUILT_IN_SNPRINTF
:
2967 return gimple_fold_builtin_snprintf (gsi
);
2968 case BUILT_IN_SPRINTF
:
2969 return gimple_fold_builtin_sprintf (gsi
);
2970 case BUILT_IN_FPRINTF
:
2971 case BUILT_IN_FPRINTF_UNLOCKED
:
2972 case BUILT_IN_VFPRINTF
:
2973 if (n
== 2 || n
== 3)
2974 return gimple_fold_builtin_fprintf (gsi
,
2975 gimple_call_arg (stmt
, 0),
2976 gimple_call_arg (stmt
, 1),
2978 ? gimple_call_arg (stmt
, 2)
2982 case BUILT_IN_FPRINTF_CHK
:
2983 case BUILT_IN_VFPRINTF_CHK
:
2984 if (n
== 3 || n
== 4)
2985 return gimple_fold_builtin_fprintf (gsi
,
2986 gimple_call_arg (stmt
, 0),
2987 gimple_call_arg (stmt
, 2),
2989 ? gimple_call_arg (stmt
, 3)
2993 case BUILT_IN_PRINTF
:
2994 case BUILT_IN_PRINTF_UNLOCKED
:
2995 case BUILT_IN_VPRINTF
:
2996 if (n
== 1 || n
== 2)
2997 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2999 ? gimple_call_arg (stmt
, 1)
3000 : NULL_TREE
, fcode
);
3002 case BUILT_IN_PRINTF_CHK
:
3003 case BUILT_IN_VPRINTF_CHK
:
3004 if (n
== 2 || n
== 3)
3005 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3007 ? gimple_call_arg (stmt
, 2)
3008 : NULL_TREE
, fcode
);
3012 /* Try the generic builtin folder. */
3013 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3014 tree result
= fold_call_stmt (stmt
, ignore
);
3018 STRIP_NOPS (result
);
3020 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3021 if (!update_call_from_tree (gsi
, result
))
3022 gimplify_and_update_call_from_tree (gsi
, result
);
3029 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3030 doesn't fit into TYPE. The test for overflow should be regardless of
3031 -fwrapv, and even for unsigned types. */
3034 arith_overflowed_p (enum tree_code code
, const_tree type
,
3035 const_tree arg0
, const_tree arg1
)
3037 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3038 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3040 widest2_int warg0
= widest2_int_cst (arg0
);
3041 widest2_int warg1
= widest2_int_cst (arg1
);
3045 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3046 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3047 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3048 default: gcc_unreachable ();
3050 signop sign
= TYPE_SIGN (type
);
3051 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3053 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3056 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3057 The statement may be replaced by another statement, e.g., if the call
3058 simplifies to a constant value. Return true if any changes were made.
3059 It is assumed that the operands have been previously folded. */
3062 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3064 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3066 bool changed
= false;
3069 /* Fold *& in call arguments. */
3070 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3071 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3073 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3076 gimple_call_set_arg (stmt
, i
, tmp
);
3081 /* Check for virtual calls that became direct calls. */
3082 callee
= gimple_call_fn (stmt
);
3083 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3085 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3087 if (dump_file
&& virtual_method_call_p (callee
)
3088 && !possible_polymorphic_call_target_p
3089 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3090 (OBJ_TYPE_REF_EXPR (callee
)))))
3093 "Type inheritance inconsistent devirtualization of ");
3094 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3095 fprintf (dump_file
, " to ");
3096 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3097 fprintf (dump_file
, "\n");
3100 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3103 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3106 vec
<cgraph_node
*>targets
3107 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3108 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3110 tree lhs
= gimple_call_lhs (stmt
);
3111 if (dump_enabled_p ())
3113 location_t loc
= gimple_location_safe (stmt
);
3114 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3115 "folding virtual function call to %s\n",
3116 targets
.length () == 1
3117 ? targets
[0]->name ()
3118 : "__builtin_unreachable");
3120 if (targets
.length () == 1)
3122 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3124 /* If the call becomes noreturn, remove the lhs. */
3125 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3127 if (TREE_CODE (lhs
) == SSA_NAME
)
3129 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3130 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3131 gimple new_stmt
= gimple_build_assign (lhs
, def
);
3132 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3134 gimple_call_set_lhs (stmt
, NULL_TREE
);
3136 maybe_remove_unused_call_args (cfun
, stmt
);
3140 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3141 gimple new_stmt
= gimple_build_call (fndecl
, 0);
3142 gimple_set_location (new_stmt
, gimple_location (stmt
));
3143 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3145 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3146 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3148 /* To satisfy condition for
3149 cgraph_update_edges_for_call_stmt_node,
3150 we need to preserve GIMPLE_CALL statement
3151 at position of GSI iterator. */
3152 update_call_from_tree (gsi
, def
);
3153 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3157 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3158 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3159 gsi_replace (gsi
, new_stmt
, false);
3167 /* Check for indirect calls that became direct calls, and then
3168 no longer require a static chain. */
3169 if (gimple_call_chain (stmt
))
3171 tree fn
= gimple_call_fndecl (stmt
);
3172 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3174 gimple_call_set_chain (stmt
, NULL
);
3179 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3182 gimple_call_set_chain (stmt
, tmp
);
3191 /* Check for builtins that CCP can handle using information not
3192 available in the generic fold routines. */
3193 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3195 if (gimple_fold_builtin (gsi
))
3198 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3200 changed
|= targetm
.gimple_fold_builtin (gsi
);
3202 else if (gimple_call_internal_p (stmt
))
3204 enum tree_code subcode
= ERROR_MARK
;
3205 tree result
= NULL_TREE
;
3206 bool cplx_result
= false;
3207 tree overflow
= NULL_TREE
;
3208 switch (gimple_call_internal_fn (stmt
))
3210 case IFN_BUILTIN_EXPECT
:
3211 result
= fold_builtin_expect (gimple_location (stmt
),
3212 gimple_call_arg (stmt
, 0),
3213 gimple_call_arg (stmt
, 1),
3214 gimple_call_arg (stmt
, 2));
3216 case IFN_UBSAN_OBJECT_SIZE
:
3217 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3218 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3219 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3220 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3221 gimple_call_arg (stmt
, 2))))
3223 gsi_replace (gsi
, gimple_build_nop (), false);
3224 unlink_stmt_vdef (stmt
);
3225 release_defs (stmt
);
3229 case IFN_UBSAN_CHECK_ADD
:
3230 subcode
= PLUS_EXPR
;
3232 case IFN_UBSAN_CHECK_SUB
:
3233 subcode
= MINUS_EXPR
;
3235 case IFN_UBSAN_CHECK_MUL
:
3236 subcode
= MULT_EXPR
;
3238 case IFN_ADD_OVERFLOW
:
3239 subcode
= PLUS_EXPR
;
3242 case IFN_SUB_OVERFLOW
:
3243 subcode
= MINUS_EXPR
;
3246 case IFN_MUL_OVERFLOW
:
3247 subcode
= MULT_EXPR
;
3253 if (subcode
!= ERROR_MARK
)
3255 tree arg0
= gimple_call_arg (stmt
, 0);
3256 tree arg1
= gimple_call_arg (stmt
, 1);
3257 tree type
= TREE_TYPE (arg0
);
3260 tree lhs
= gimple_call_lhs (stmt
);
3261 if (lhs
== NULL_TREE
)
3264 type
= TREE_TYPE (TREE_TYPE (lhs
));
3266 if (type
== NULL_TREE
)
3268 /* x = y + 0; x = y - 0; x = y * 0; */
3269 else if (integer_zerop (arg1
))
3270 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3271 /* x = 0 + y; x = 0 * y; */
3272 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3273 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3275 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3276 result
= integer_zero_node
;
3277 /* x = y * 1; x = 1 * y; */
3278 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3280 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3282 else if (TREE_CODE (arg0
) == INTEGER_CST
3283 && TREE_CODE (arg1
) == INTEGER_CST
)
3286 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3287 fold_convert (type
, arg1
));
3289 result
= int_const_binop (subcode
, arg0
, arg1
);
3290 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3293 overflow
= build_one_cst (type
);
3300 if (result
== integer_zero_node
)
3301 result
= build_zero_cst (type
);
3302 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3304 if (TREE_CODE (result
) == INTEGER_CST
)
3306 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3308 overflow
= build_one_cst (type
);
3310 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3311 && TYPE_UNSIGNED (type
))
3312 || (TYPE_PRECISION (type
)
3313 < (TYPE_PRECISION (TREE_TYPE (result
))
3314 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3315 && !TYPE_UNSIGNED (type
)))))
3318 result
= fold_convert (type
, result
);
3325 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3326 result
= drop_tree_overflow (result
);
3329 if (overflow
== NULL_TREE
)
3330 overflow
= build_zero_cst (TREE_TYPE (result
));
3331 tree ctype
= build_complex_type (TREE_TYPE (result
));
3332 if (TREE_CODE (result
) == INTEGER_CST
3333 && TREE_CODE (overflow
) == INTEGER_CST
)
3334 result
= build_complex (ctype
, result
, overflow
);
3336 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3337 ctype
, result
, overflow
);
3339 if (!update_call_from_tree (gsi
, result
))
3340 gimplify_and_update_call_from_tree (gsi
, result
);
3349 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3352 Replaces *GSI with the simplification result in RCODE and OPS
3353 and the associated statements in *SEQ. Does the replacement
3354 according to INPLACE and returns true if the operation succeeded. */
3357 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3358 code_helper rcode
, tree
*ops
,
3359 gimple_seq
*seq
, bool inplace
)
3361 gimple stmt
= gsi_stmt (*gsi
);
3363 /* Play safe and do not allow abnormals to be mentioned in
3364 newly created statements. See also maybe_push_res_to_seq. */
3365 if ((TREE_CODE (ops
[0]) == SSA_NAME
3366 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
3368 && TREE_CODE (ops
[1]) == SSA_NAME
3369 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
3371 && TREE_CODE (ops
[2]) == SSA_NAME
3372 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
3375 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3377 gcc_assert (rcode
.is_tree_code ());
3378 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3379 /* GIMPLE_CONDs condition may not throw. */
3380 && (!flag_exceptions
3381 || !cfun
->can_throw_non_call_exceptions
3382 || !operation_could_trap_p (rcode
,
3383 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3385 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3386 else if (rcode
== SSA_NAME
)
3387 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3388 build_zero_cst (TREE_TYPE (ops
[0])));
3389 else if (rcode
== INTEGER_CST
)
3391 if (integer_zerop (ops
[0]))
3392 gimple_cond_make_false (cond_stmt
);
3394 gimple_cond_make_true (cond_stmt
);
3398 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3402 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3403 build_zero_cst (TREE_TYPE (res
)));
3407 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3409 fprintf (dump_file
, "gimple_simplified to ");
3410 if (!gimple_seq_empty_p (*seq
))
3411 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3412 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3415 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3418 else if (is_gimple_assign (stmt
)
3419 && rcode
.is_tree_code ())
3422 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3424 maybe_build_generic_op (rcode
,
3425 TREE_TYPE (gimple_assign_lhs (stmt
)),
3426 &ops
[0], ops
[1], ops
[2]);
3427 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3428 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3430 fprintf (dump_file
, "gimple_simplified to ");
3431 if (!gimple_seq_empty_p (*seq
))
3432 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3433 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3436 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3442 if (gimple_has_lhs (stmt
))
3444 tree lhs
= gimple_get_lhs (stmt
);
3445 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3448 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3450 fprintf (dump_file
, "gimple_simplified to ");
3451 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3453 gsi_replace_with_seq_vops (gsi
, *seq
);
3463 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3466 maybe_canonicalize_mem_ref_addr (tree
*t
)
3470 if (TREE_CODE (*t
) == ADDR_EXPR
)
3471 t
= &TREE_OPERAND (*t
, 0);
3473 while (handled_component_p (*t
))
3474 t
= &TREE_OPERAND (*t
, 0);
3476 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3477 of invariant addresses into a SSA name MEM_REF address. */
3478 if (TREE_CODE (*t
) == MEM_REF
3479 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3481 tree addr
= TREE_OPERAND (*t
, 0);
3482 if (TREE_CODE (addr
) == ADDR_EXPR
3483 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3484 || handled_component_p (TREE_OPERAND (addr
, 0))))
3487 HOST_WIDE_INT coffset
;
3488 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3493 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3494 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3495 TREE_OPERAND (*t
, 1),
3496 size_int (coffset
));
3499 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3500 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3503 /* Canonicalize back MEM_REFs to plain reference trees if the object
3504 accessed is a decl that has the same access semantics as the MEM_REF. */
3505 if (TREE_CODE (*t
) == MEM_REF
3506 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3507 && integer_zerop (TREE_OPERAND (*t
, 1))
3508 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3510 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3511 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3512 if (/* Same volatile qualification. */
3513 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3514 /* Same TBAA behavior with -fstrict-aliasing. */
3515 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3516 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3517 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3518 /* Same alignment. */
3519 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3520 /* We have to look out here to not drop a required conversion
3521 from the rhs to the lhs if *t appears on the lhs or vice-versa
3522 if it appears on the rhs. Thus require strict type
3524 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3526 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3531 /* Canonicalize TARGET_MEM_REF in particular with respect to
3532 the indexes becoming constant. */
3533 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3535 tree tem
= maybe_fold_tmr (*t
);
3546 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3547 distinguishes both cases. */
3550 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3552 bool changed
= false;
3553 gimple stmt
= gsi_stmt (*gsi
);
3556 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3558 ??? This shouldn't be done in generic folding but in the
3559 propagation helpers which also know whether an address was
3561 switch (gimple_code (stmt
))
3564 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3566 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3567 if ((REFERENCE_CLASS_P (*rhs
)
3568 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3569 && maybe_canonicalize_mem_ref_addr (rhs
))
3571 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3572 if (REFERENCE_CLASS_P (*lhs
)
3573 && maybe_canonicalize_mem_ref_addr (lhs
))
3579 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3581 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3582 if (REFERENCE_CLASS_P (*arg
)
3583 && maybe_canonicalize_mem_ref_addr (arg
))
3586 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3588 && REFERENCE_CLASS_P (*lhs
)
3589 && maybe_canonicalize_mem_ref_addr (lhs
))
3595 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3596 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3598 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3599 tree op
= TREE_VALUE (link
);
3600 if (REFERENCE_CLASS_P (op
)
3601 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3604 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3606 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3607 tree op
= TREE_VALUE (link
);
3608 if ((REFERENCE_CLASS_P (op
)
3609 || TREE_CODE (op
) == ADDR_EXPR
)
3610 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3616 if (gimple_debug_bind_p (stmt
))
3618 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3620 && (REFERENCE_CLASS_P (*val
)
3621 || TREE_CODE (*val
) == ADDR_EXPR
)
3622 && maybe_canonicalize_mem_ref_addr (val
))
3629 /* Dispatch to pattern-based folding. */
3631 || is_gimple_assign (stmt
)
3632 || gimple_code (stmt
) == GIMPLE_COND
)
3634 gimple_seq seq
= NULL
;
3637 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
, valueize
))
3639 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3642 gimple_seq_discard (seq
);
3646 stmt
= gsi_stmt (*gsi
);
3648 /* Fold the main computation performed by the statement. */
3649 switch (gimple_code (stmt
))
3653 unsigned old_num_ops
= gimple_num_ops (stmt
);
3654 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3655 tree lhs
= gimple_assign_lhs (stmt
);
3657 /* First canonicalize operand order. This avoids building new
3658 trees if this is the only thing fold would later do. */
3659 if ((commutative_tree_code (subcode
)
3660 || commutative_ternary_tree_code (subcode
))
3661 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3662 gimple_assign_rhs2 (stmt
), false))
3664 tree tem
= gimple_assign_rhs1 (stmt
);
3665 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3666 gimple_assign_set_rhs2 (stmt
, tem
);
3669 new_rhs
= fold_gimple_assign (gsi
);
3671 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3672 TREE_TYPE (new_rhs
)))
3673 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3676 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3678 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3685 changed
|= fold_gimple_cond (as_a
<gcond
*> (stmt
));
3689 changed
|= gimple_fold_call (gsi
, inplace
);
3693 /* Fold *& in asm operands. */
3695 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3697 const char **oconstraints
;
3698 const char *constraint
;
3699 bool allows_mem
, allows_reg
;
3701 noutputs
= gimple_asm_noutputs (asm_stmt
);
3702 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3704 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3706 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3707 tree op
= TREE_VALUE (link
);
3709 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3710 if (REFERENCE_CLASS_P (op
)
3711 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3713 TREE_VALUE (link
) = op
;
3717 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3719 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3720 tree op
= TREE_VALUE (link
);
3722 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3723 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3724 oconstraints
, &allows_mem
, &allows_reg
);
3725 if (REFERENCE_CLASS_P (op
)
3726 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3729 TREE_VALUE (link
) = op
;
3737 if (gimple_debug_bind_p (stmt
))
3739 tree val
= gimple_debug_bind_get_value (stmt
);
3741 && REFERENCE_CLASS_P (val
))
3743 tree tem
= maybe_fold_reference (val
, false);
3746 gimple_debug_bind_set_value (stmt
, tem
);
3751 && TREE_CODE (val
) == ADDR_EXPR
)
3753 tree ref
= TREE_OPERAND (val
, 0);
3754 tree tem
= maybe_fold_reference (ref
, false);
3757 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3758 gimple_debug_bind_set_value (stmt
, tem
);
3768 stmt
= gsi_stmt (*gsi
);
3770 /* Fold *& on the lhs. */
3771 if (gimple_has_lhs (stmt
))
3773 tree lhs
= gimple_get_lhs (stmt
);
3774 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3776 tree new_lhs
= maybe_fold_reference (lhs
, true);
3779 gimple_set_lhs (stmt
, new_lhs
);
3788 /* Valueziation callback that ends up not following SSA edges. */
3791 no_follow_ssa_edges (tree
)
3796 /* Valueization callback that ends up following single-use SSA edges only. */
3799 follow_single_use_edges (tree val
)
3801 if (TREE_CODE (val
) == SSA_NAME
3802 && !has_single_use (val
))
3807 /* Fold the statement pointed to by GSI. In some cases, this function may
3808 replace the whole statement with a new one. Returns true iff folding
3810 The statement pointed to by GSI should be in valid gimple form but may
3811 be in unfolded state as resulting from for example constant propagation
3812 which can produce *&x = 0. */
3815 fold_stmt (gimple_stmt_iterator
*gsi
)
3817 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3821 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3823 return fold_stmt_1 (gsi
, false, valueize
);
3826 /* Perform the minimal folding on statement *GSI. Only operations like
3827 *&x created by constant propagation are handled. The statement cannot
3828 be replaced with a new one. Return true if the statement was
3829 changed, false otherwise.
3830 The statement *GSI should be in valid gimple form but may
3831 be in unfolded state as resulting from for example constant propagation
3832 which can produce *&x = 0. */
3835 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3837 gimple stmt
= gsi_stmt (*gsi
);
3838 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3839 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3843 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3844 if EXPR is null or we don't know how.
3845 If non-null, the result always has boolean type. */
3848 canonicalize_bool (tree expr
, bool invert
)
3854 if (integer_nonzerop (expr
))
3855 return boolean_false_node
;
3856 else if (integer_zerop (expr
))
3857 return boolean_true_node
;
3858 else if (TREE_CODE (expr
) == SSA_NAME
)
3859 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3860 build_int_cst (TREE_TYPE (expr
), 0));
3861 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3862 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3864 TREE_OPERAND (expr
, 0),
3865 TREE_OPERAND (expr
, 1));
3871 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3873 if (integer_nonzerop (expr
))
3874 return boolean_true_node
;
3875 else if (integer_zerop (expr
))
3876 return boolean_false_node
;
3877 else if (TREE_CODE (expr
) == SSA_NAME
)
3878 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3879 build_int_cst (TREE_TYPE (expr
), 0));
3880 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3881 return fold_build2 (TREE_CODE (expr
),
3883 TREE_OPERAND (expr
, 0),
3884 TREE_OPERAND (expr
, 1));
3890 /* Check to see if a boolean expression EXPR is logically equivalent to the
3891 comparison (OP1 CODE OP2). Check for various identities involving
3895 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3896 const_tree op1
, const_tree op2
)
3900 /* The obvious case. */
3901 if (TREE_CODE (expr
) == code
3902 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3903 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3906 /* Check for comparing (name, name != 0) and the case where expr
3907 is an SSA_NAME with a definition matching the comparison. */
3908 if (TREE_CODE (expr
) == SSA_NAME
3909 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3911 if (operand_equal_p (expr
, op1
, 0))
3912 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3913 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3914 s
= SSA_NAME_DEF_STMT (expr
);
3915 if (is_gimple_assign (s
)
3916 && gimple_assign_rhs_code (s
) == code
3917 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3918 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3922 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3923 of name is a comparison, recurse. */
3924 if (TREE_CODE (op1
) == SSA_NAME
3925 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3927 s
= SSA_NAME_DEF_STMT (op1
);
3928 if (is_gimple_assign (s
)
3929 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3931 enum tree_code c
= gimple_assign_rhs_code (s
);
3932 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3933 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3934 return same_bool_comparison_p (expr
, c
,
3935 gimple_assign_rhs1 (s
),
3936 gimple_assign_rhs2 (s
));
3937 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3938 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3939 return same_bool_comparison_p (expr
,
3940 invert_tree_comparison (c
, false),
3941 gimple_assign_rhs1 (s
),
3942 gimple_assign_rhs2 (s
));
3948 /* Check to see if two boolean expressions OP1 and OP2 are logically
3952 same_bool_result_p (const_tree op1
, const_tree op2
)
3954 /* Simple cases first. */
3955 if (operand_equal_p (op1
, op2
, 0))
3958 /* Check the cases where at least one of the operands is a comparison.
3959 These are a bit smarter than operand_equal_p in that they apply some
3960 identifies on SSA_NAMEs. */
3961 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3962 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3963 TREE_OPERAND (op2
, 0),
3964 TREE_OPERAND (op2
, 1)))
3966 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3967 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3968 TREE_OPERAND (op1
, 0),
3969 TREE_OPERAND (op1
, 1)))
3976 /* Forward declarations for some mutually recursive functions. */
3979 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3980 enum tree_code code2
, tree op2a
, tree op2b
);
3982 and_var_with_comparison (tree var
, bool invert
,
3983 enum tree_code code2
, tree op2a
, tree op2b
);
3985 and_var_with_comparison_1 (gimple stmt
,
3986 enum tree_code code2
, tree op2a
, tree op2b
);
3988 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3989 enum tree_code code2
, tree op2a
, tree op2b
);
3991 or_var_with_comparison (tree var
, bool invert
,
3992 enum tree_code code2
, tree op2a
, tree op2b
);
3994 or_var_with_comparison_1 (gimple stmt
,
3995 enum tree_code code2
, tree op2a
, tree op2b
);
3997 /* Helper function for and_comparisons_1: try to simplify the AND of the
3998 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3999 If INVERT is true, invert the value of the VAR before doing the AND.
4000 Return NULL_EXPR if we can't simplify this to a single expression. */
4003 and_var_with_comparison (tree var
, bool invert
,
4004 enum tree_code code2
, tree op2a
, tree op2b
)
4007 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4009 /* We can only deal with variables whose definitions are assignments. */
4010 if (!is_gimple_assign (stmt
))
4013 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4014 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4015 Then we only have to consider the simpler non-inverted cases. */
4017 t
= or_var_with_comparison_1 (stmt
,
4018 invert_tree_comparison (code2
, false),
4021 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4022 return canonicalize_bool (t
, invert
);
4025 /* Try to simplify the AND of the ssa variable defined by the assignment
4026 STMT with the comparison specified by (OP2A CODE2 OP2B).
4027 Return NULL_EXPR if we can't simplify this to a single expression. */
4030 and_var_with_comparison_1 (gimple stmt
,
4031 enum tree_code code2
, tree op2a
, tree op2b
)
4033 tree var
= gimple_assign_lhs (stmt
);
4034 tree true_test_var
= NULL_TREE
;
4035 tree false_test_var
= NULL_TREE
;
4036 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4038 /* Check for identities like (var AND (var == 0)) => false. */
4039 if (TREE_CODE (op2a
) == SSA_NAME
4040 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4042 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4043 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4045 true_test_var
= op2a
;
4046 if (var
== true_test_var
)
4049 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4050 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4052 false_test_var
= op2a
;
4053 if (var
== false_test_var
)
4054 return boolean_false_node
;
4058 /* If the definition is a comparison, recurse on it. */
4059 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4061 tree t
= and_comparisons_1 (innercode
,
4062 gimple_assign_rhs1 (stmt
),
4063 gimple_assign_rhs2 (stmt
),
4071 /* If the definition is an AND or OR expression, we may be able to
4072 simplify by reassociating. */
4073 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4074 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4076 tree inner1
= gimple_assign_rhs1 (stmt
);
4077 tree inner2
= gimple_assign_rhs2 (stmt
);
4080 tree partial
= NULL_TREE
;
4081 bool is_and
= (innercode
== BIT_AND_EXPR
);
4083 /* Check for boolean identities that don't require recursive examination
4085 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4086 inner1 AND (inner1 OR inner2) => inner1
4087 !inner1 AND (inner1 AND inner2) => false
4088 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4089 Likewise for similar cases involving inner2. */
4090 if (inner1
== true_test_var
)
4091 return (is_and
? var
: inner1
);
4092 else if (inner2
== true_test_var
)
4093 return (is_and
? var
: inner2
);
4094 else if (inner1
== false_test_var
)
4096 ? boolean_false_node
4097 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4098 else if (inner2
== false_test_var
)
4100 ? boolean_false_node
4101 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4103 /* Next, redistribute/reassociate the AND across the inner tests.
4104 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4105 if (TREE_CODE (inner1
) == SSA_NAME
4106 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4107 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4108 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4109 gimple_assign_rhs1 (s
),
4110 gimple_assign_rhs2 (s
),
4111 code2
, op2a
, op2b
)))
4113 /* Handle the AND case, where we are reassociating:
4114 (inner1 AND inner2) AND (op2a code2 op2b)
4116 If the partial result t is a constant, we win. Otherwise
4117 continue on to try reassociating with the other inner test. */
4120 if (integer_onep (t
))
4122 else if (integer_zerop (t
))
4123 return boolean_false_node
;
4126 /* Handle the OR case, where we are redistributing:
4127 (inner1 OR inner2) AND (op2a code2 op2b)
4128 => (t OR (inner2 AND (op2a code2 op2b))) */
4129 else if (integer_onep (t
))
4130 return boolean_true_node
;
4132 /* Save partial result for later. */
4136 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4137 if (TREE_CODE (inner2
) == SSA_NAME
4138 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4139 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4140 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4141 gimple_assign_rhs1 (s
),
4142 gimple_assign_rhs2 (s
),
4143 code2
, op2a
, op2b
)))
4145 /* Handle the AND case, where we are reassociating:
4146 (inner1 AND inner2) AND (op2a code2 op2b)
4147 => (inner1 AND t) */
4150 if (integer_onep (t
))
4152 else if (integer_zerop (t
))
4153 return boolean_false_node
;
4154 /* If both are the same, we can apply the identity
4156 else if (partial
&& same_bool_result_p (t
, partial
))
4160 /* Handle the OR case. where we are redistributing:
4161 (inner1 OR inner2) AND (op2a code2 op2b)
4162 => (t OR (inner1 AND (op2a code2 op2b)))
4163 => (t OR partial) */
4166 if (integer_onep (t
))
4167 return boolean_true_node
;
4170 /* We already got a simplification for the other
4171 operand to the redistributed OR expression. The
4172 interesting case is when at least one is false.
4173 Or, if both are the same, we can apply the identity
4175 if (integer_zerop (partial
))
4177 else if (integer_zerop (t
))
4179 else if (same_bool_result_p (t
, partial
))
4188 /* Try to simplify the AND of two comparisons defined by
4189 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4190 If this can be done without constructing an intermediate value,
4191 return the resulting tree; otherwise NULL_TREE is returned.
4192 This function is deliberately asymmetric as it recurses on SSA_DEFs
4193 in the first comparison but not the second. */
4196 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4197 enum tree_code code2
, tree op2a
, tree op2b
)
4199 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4201 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4202 if (operand_equal_p (op1a
, op2a
, 0)
4203 && operand_equal_p (op1b
, op2b
, 0))
4205 /* Result will be either NULL_TREE, or a combined comparison. */
4206 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4207 TRUTH_ANDIF_EXPR
, code1
, code2
,
4208 truth_type
, op1a
, op1b
);
4213 /* Likewise the swapped case of the above. */
4214 if (operand_equal_p (op1a
, op2b
, 0)
4215 && operand_equal_p (op1b
, op2a
, 0))
4217 /* Result will be either NULL_TREE, or a combined comparison. */
4218 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4219 TRUTH_ANDIF_EXPR
, code1
,
4220 swap_tree_comparison (code2
),
4221 truth_type
, op1a
, op1b
);
4226 /* If both comparisons are of the same value against constants, we might
4227 be able to merge them. */
4228 if (operand_equal_p (op1a
, op2a
, 0)
4229 && TREE_CODE (op1b
) == INTEGER_CST
4230 && TREE_CODE (op2b
) == INTEGER_CST
)
4232 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4234 /* If we have (op1a == op1b), we should either be able to
4235 return that or FALSE, depending on whether the constant op1b
4236 also satisfies the other comparison against op2b. */
4237 if (code1
== EQ_EXPR
)
4243 case EQ_EXPR
: val
= (cmp
== 0); break;
4244 case NE_EXPR
: val
= (cmp
!= 0); break;
4245 case LT_EXPR
: val
= (cmp
< 0); break;
4246 case GT_EXPR
: val
= (cmp
> 0); break;
4247 case LE_EXPR
: val
= (cmp
<= 0); break;
4248 case GE_EXPR
: val
= (cmp
>= 0); break;
4249 default: done
= false;
4254 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4256 return boolean_false_node
;
4259 /* Likewise if the second comparison is an == comparison. */
4260 else if (code2
== EQ_EXPR
)
4266 case EQ_EXPR
: val
= (cmp
== 0); break;
4267 case NE_EXPR
: val
= (cmp
!= 0); break;
4268 case LT_EXPR
: val
= (cmp
> 0); break;
4269 case GT_EXPR
: val
= (cmp
< 0); break;
4270 case LE_EXPR
: val
= (cmp
>= 0); break;
4271 case GE_EXPR
: val
= (cmp
<= 0); break;
4272 default: done
= false;
4277 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4279 return boolean_false_node
;
4283 /* Same business with inequality tests. */
4284 else if (code1
== NE_EXPR
)
4289 case EQ_EXPR
: val
= (cmp
!= 0); break;
4290 case NE_EXPR
: val
= (cmp
== 0); break;
4291 case LT_EXPR
: val
= (cmp
>= 0); break;
4292 case GT_EXPR
: val
= (cmp
<= 0); break;
4293 case LE_EXPR
: val
= (cmp
> 0); break;
4294 case GE_EXPR
: val
= (cmp
< 0); break;
4299 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4301 else if (code2
== NE_EXPR
)
4306 case EQ_EXPR
: val
= (cmp
== 0); break;
4307 case NE_EXPR
: val
= (cmp
!= 0); break;
4308 case LT_EXPR
: val
= (cmp
<= 0); break;
4309 case GT_EXPR
: val
= (cmp
>= 0); break;
4310 case LE_EXPR
: val
= (cmp
< 0); break;
4311 case GE_EXPR
: val
= (cmp
> 0); break;
4316 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4319 /* Chose the more restrictive of two < or <= comparisons. */
4320 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4321 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4323 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4324 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4326 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4329 /* Likewise chose the more restrictive of two > or >= comparisons. */
4330 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4331 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4333 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4334 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4336 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4339 /* Check for singleton ranges. */
4341 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4342 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4343 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4345 /* Check for disjoint ranges. */
4347 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4348 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4349 return boolean_false_node
;
4351 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4352 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4353 return boolean_false_node
;
4356 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4357 NAME's definition is a truth value. See if there are any simplifications
4358 that can be done against the NAME's definition. */
4359 if (TREE_CODE (op1a
) == SSA_NAME
4360 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4361 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4363 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4364 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4365 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4366 switch (gimple_code (stmt
))
4369 /* Try to simplify by copy-propagating the definition. */
4370 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4373 /* If every argument to the PHI produces the same result when
4374 ANDed with the second comparison, we win.
4375 Do not do this unless the type is bool since we need a bool
4376 result here anyway. */
4377 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4379 tree result
= NULL_TREE
;
4381 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4383 tree arg
= gimple_phi_arg_def (stmt
, i
);
4385 /* If this PHI has itself as an argument, ignore it.
4386 If all the other args produce the same result,
4388 if (arg
== gimple_phi_result (stmt
))
4390 else if (TREE_CODE (arg
) == INTEGER_CST
)
4392 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4395 result
= boolean_false_node
;
4396 else if (!integer_zerop (result
))
4400 result
= fold_build2 (code2
, boolean_type_node
,
4402 else if (!same_bool_comparison_p (result
,
4406 else if (TREE_CODE (arg
) == SSA_NAME
4407 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4410 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4411 /* In simple cases we can look through PHI nodes,
4412 but we have to be careful with loops.
4414 if (! dom_info_available_p (CDI_DOMINATORS
)
4415 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4416 || dominated_by_p (CDI_DOMINATORS
,
4417 gimple_bb (def_stmt
),
4420 temp
= and_var_with_comparison (arg
, invert
, code2
,
4426 else if (!same_bool_result_p (result
, temp
))
4442 /* Try to simplify the AND of two comparisons, specified by
4443 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4444 If this can be simplified to a single expression (without requiring
4445 introducing more SSA variables to hold intermediate values),
4446 return the resulting tree. Otherwise return NULL_TREE.
4447 If the result expression is non-null, it has boolean type. */
4450 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4451 enum tree_code code2
, tree op2a
, tree op2b
)
4453 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4457 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4460 /* Helper function for or_comparisons_1: try to simplify the OR of the
4461 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4462 If INVERT is true, invert the value of VAR before doing the OR.
4463 Return NULL_EXPR if we can't simplify this to a single expression. */
4466 or_var_with_comparison (tree var
, bool invert
,
4467 enum tree_code code2
, tree op2a
, tree op2b
)
4470 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4472 /* We can only deal with variables whose definitions are assignments. */
4473 if (!is_gimple_assign (stmt
))
4476 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4477 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4478 Then we only have to consider the simpler non-inverted cases. */
4480 t
= and_var_with_comparison_1 (stmt
,
4481 invert_tree_comparison (code2
, false),
4484 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4485 return canonicalize_bool (t
, invert
);
4488 /* Try to simplify the OR of the ssa variable defined by the assignment
4489 STMT with the comparison specified by (OP2A CODE2 OP2B).
4490 Return NULL_EXPR if we can't simplify this to a single expression. */
4493 or_var_with_comparison_1 (gimple stmt
,
4494 enum tree_code code2
, tree op2a
, tree op2b
)
4496 tree var
= gimple_assign_lhs (stmt
);
4497 tree true_test_var
= NULL_TREE
;
4498 tree false_test_var
= NULL_TREE
;
4499 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4501 /* Check for identities like (var OR (var != 0)) => true . */
4502 if (TREE_CODE (op2a
) == SSA_NAME
4503 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4505 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4506 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4508 true_test_var
= op2a
;
4509 if (var
== true_test_var
)
4512 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4513 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4515 false_test_var
= op2a
;
4516 if (var
== false_test_var
)
4517 return boolean_true_node
;
4521 /* If the definition is a comparison, recurse on it. */
4522 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4524 tree t
= or_comparisons_1 (innercode
,
4525 gimple_assign_rhs1 (stmt
),
4526 gimple_assign_rhs2 (stmt
),
4534 /* If the definition is an AND or OR expression, we may be able to
4535 simplify by reassociating. */
4536 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4537 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4539 tree inner1
= gimple_assign_rhs1 (stmt
);
4540 tree inner2
= gimple_assign_rhs2 (stmt
);
4543 tree partial
= NULL_TREE
;
4544 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4546 /* Check for boolean identities that don't require recursive examination
4548 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4549 inner1 OR (inner1 AND inner2) => inner1
4550 !inner1 OR (inner1 OR inner2) => true
4551 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4553 if (inner1
== true_test_var
)
4554 return (is_or
? var
: inner1
);
4555 else if (inner2
== true_test_var
)
4556 return (is_or
? var
: inner2
);
4557 else if (inner1
== false_test_var
)
4560 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4561 else if (inner2
== false_test_var
)
4564 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4566 /* Next, redistribute/reassociate the OR across the inner tests.
4567 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4568 if (TREE_CODE (inner1
) == SSA_NAME
4569 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4570 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4571 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4572 gimple_assign_rhs1 (s
),
4573 gimple_assign_rhs2 (s
),
4574 code2
, op2a
, op2b
)))
4576 /* Handle the OR case, where we are reassociating:
4577 (inner1 OR inner2) OR (op2a code2 op2b)
4579 If the partial result t is a constant, we win. Otherwise
4580 continue on to try reassociating with the other inner test. */
4583 if (integer_onep (t
))
4584 return boolean_true_node
;
4585 else if (integer_zerop (t
))
4589 /* Handle the AND case, where we are redistributing:
4590 (inner1 AND inner2) OR (op2a code2 op2b)
4591 => (t AND (inner2 OR (op2a code op2b))) */
4592 else if (integer_zerop (t
))
4593 return boolean_false_node
;
4595 /* Save partial result for later. */
4599 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4600 if (TREE_CODE (inner2
) == SSA_NAME
4601 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4602 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4603 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4604 gimple_assign_rhs1 (s
),
4605 gimple_assign_rhs2 (s
),
4606 code2
, op2a
, op2b
)))
4608 /* Handle the OR case, where we are reassociating:
4609 (inner1 OR inner2) OR (op2a code2 op2b)
4611 => (t OR partial) */
4614 if (integer_zerop (t
))
4616 else if (integer_onep (t
))
4617 return boolean_true_node
;
4618 /* If both are the same, we can apply the identity
4620 else if (partial
&& same_bool_result_p (t
, partial
))
4624 /* Handle the AND case, where we are redistributing:
4625 (inner1 AND inner2) OR (op2a code2 op2b)
4626 => (t AND (inner1 OR (op2a code2 op2b)))
4627 => (t AND partial) */
4630 if (integer_zerop (t
))
4631 return boolean_false_node
;
4634 /* We already got a simplification for the other
4635 operand to the redistributed AND expression. The
4636 interesting case is when at least one is true.
4637 Or, if both are the same, we can apply the identity
4639 if (integer_onep (partial
))
4641 else if (integer_onep (t
))
4643 else if (same_bool_result_p (t
, partial
))
4652 /* Try to simplify the OR of two comparisons defined by
4653 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4654 If this can be done without constructing an intermediate value,
4655 return the resulting tree; otherwise NULL_TREE is returned.
4656 This function is deliberately asymmetric as it recurses on SSA_DEFs
4657 in the first comparison but not the second. */
4660 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4661 enum tree_code code2
, tree op2a
, tree op2b
)
4663 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4665 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4666 if (operand_equal_p (op1a
, op2a
, 0)
4667 && operand_equal_p (op1b
, op2b
, 0))
4669 /* Result will be either NULL_TREE, or a combined comparison. */
4670 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4671 TRUTH_ORIF_EXPR
, code1
, code2
,
4672 truth_type
, op1a
, op1b
);
4677 /* Likewise the swapped case of the above. */
4678 if (operand_equal_p (op1a
, op2b
, 0)
4679 && operand_equal_p (op1b
, op2a
, 0))
4681 /* Result will be either NULL_TREE, or a combined comparison. */
4682 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4683 TRUTH_ORIF_EXPR
, code1
,
4684 swap_tree_comparison (code2
),
4685 truth_type
, op1a
, op1b
);
4690 /* If both comparisons are of the same value against constants, we might
4691 be able to merge them. */
4692 if (operand_equal_p (op1a
, op2a
, 0)
4693 && TREE_CODE (op1b
) == INTEGER_CST
4694 && TREE_CODE (op2b
) == INTEGER_CST
)
4696 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4698 /* If we have (op1a != op1b), we should either be able to
4699 return that or TRUE, depending on whether the constant op1b
4700 also satisfies the other comparison against op2b. */
4701 if (code1
== NE_EXPR
)
4707 case EQ_EXPR
: val
= (cmp
== 0); break;
4708 case NE_EXPR
: val
= (cmp
!= 0); break;
4709 case LT_EXPR
: val
= (cmp
< 0); break;
4710 case GT_EXPR
: val
= (cmp
> 0); break;
4711 case LE_EXPR
: val
= (cmp
<= 0); break;
4712 case GE_EXPR
: val
= (cmp
>= 0); break;
4713 default: done
= false;
4718 return boolean_true_node
;
4720 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4723 /* Likewise if the second comparison is a != comparison. */
4724 else if (code2
== NE_EXPR
)
4730 case EQ_EXPR
: val
= (cmp
== 0); break;
4731 case NE_EXPR
: val
= (cmp
!= 0); break;
4732 case LT_EXPR
: val
= (cmp
> 0); break;
4733 case GT_EXPR
: val
= (cmp
< 0); break;
4734 case LE_EXPR
: val
= (cmp
>= 0); break;
4735 case GE_EXPR
: val
= (cmp
<= 0); break;
4736 default: done
= false;
4741 return boolean_true_node
;
4743 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4747 /* See if an equality test is redundant with the other comparison. */
4748 else if (code1
== EQ_EXPR
)
4753 case EQ_EXPR
: val
= (cmp
== 0); break;
4754 case NE_EXPR
: val
= (cmp
!= 0); break;
4755 case LT_EXPR
: val
= (cmp
< 0); break;
4756 case GT_EXPR
: val
= (cmp
> 0); break;
4757 case LE_EXPR
: val
= (cmp
<= 0); break;
4758 case GE_EXPR
: val
= (cmp
>= 0); break;
4763 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4765 else if (code2
== EQ_EXPR
)
4770 case EQ_EXPR
: val
= (cmp
== 0); break;
4771 case NE_EXPR
: val
= (cmp
!= 0); break;
4772 case LT_EXPR
: val
= (cmp
> 0); break;
4773 case GT_EXPR
: val
= (cmp
< 0); break;
4774 case LE_EXPR
: val
= (cmp
>= 0); break;
4775 case GE_EXPR
: val
= (cmp
<= 0); break;
4780 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4783 /* Chose the less restrictive of two < or <= comparisons. */
4784 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4785 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4787 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4788 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4790 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4793 /* Likewise chose the less restrictive of two > or >= comparisons. */
4794 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4795 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4797 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4798 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4800 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4803 /* Check for singleton ranges. */
4805 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4806 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4807 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4809 /* Check for less/greater pairs that don't restrict the range at all. */
4811 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4812 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4813 return boolean_true_node
;
4815 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4816 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4817 return boolean_true_node
;
4820 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4821 NAME's definition is a truth value. See if there are any simplifications
4822 that can be done against the NAME's definition. */
4823 if (TREE_CODE (op1a
) == SSA_NAME
4824 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4825 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4827 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4828 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4829 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4830 switch (gimple_code (stmt
))
4833 /* Try to simplify by copy-propagating the definition. */
4834 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4837 /* If every argument to the PHI produces the same result when
4838 ORed with the second comparison, we win.
4839 Do not do this unless the type is bool since we need a bool
4840 result here anyway. */
4841 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4843 tree result
= NULL_TREE
;
4845 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4847 tree arg
= gimple_phi_arg_def (stmt
, i
);
4849 /* If this PHI has itself as an argument, ignore it.
4850 If all the other args produce the same result,
4852 if (arg
== gimple_phi_result (stmt
))
4854 else if (TREE_CODE (arg
) == INTEGER_CST
)
4856 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4859 result
= boolean_true_node
;
4860 else if (!integer_onep (result
))
4864 result
= fold_build2 (code2
, boolean_type_node
,
4866 else if (!same_bool_comparison_p (result
,
4870 else if (TREE_CODE (arg
) == SSA_NAME
4871 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4874 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4875 /* In simple cases we can look through PHI nodes,
4876 but we have to be careful with loops.
4878 if (! dom_info_available_p (CDI_DOMINATORS
)
4879 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4880 || dominated_by_p (CDI_DOMINATORS
,
4881 gimple_bb (def_stmt
),
4884 temp
= or_var_with_comparison (arg
, invert
, code2
,
4890 else if (!same_bool_result_p (result
, temp
))
4906 /* Try to simplify the OR of two comparisons, specified by
4907 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4908 If this can be simplified to a single expression (without requiring
4909 introducing more SSA variables to hold intermediate values),
4910 return the resulting tree. Otherwise return NULL_TREE.
4911 If the result expression is non-null, it has boolean type. */
4914 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4915 enum tree_code code2
, tree op2a
, tree op2b
)
4917 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4921 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4925 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4927 Either NULL_TREE, a simplified but non-constant or a constant
4930 ??? This should go into a gimple-fold-inline.h file to be eventually
4931 privatized with the single valueize function used in the various TUs
4932 to avoid the indirect function call overhead. */
4935 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4936 tree (*gvalueize
) (tree
))
4940 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4941 edges if there are intermediate VARYING defs. For this reason
4942 do not follow SSA edges here even though SCCVN can technically
4943 just deal fine with that. */
4944 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
)
4945 && rcode
.is_tree_code ()
4946 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4947 || ((tree_code
) rcode
) == ADDR_EXPR
)
4948 && is_gimple_val (ops
[0]))
4951 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4953 fprintf (dump_file
, "Match-and-simplified ");
4954 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4955 fprintf (dump_file
, " to ");
4956 print_generic_expr (dump_file
, res
, 0);
4957 fprintf (dump_file
, "\n");
4962 location_t loc
= gimple_location (stmt
);
4963 switch (gimple_code (stmt
))
4967 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4969 switch (get_gimple_rhs_class (subcode
))
4971 case GIMPLE_SINGLE_RHS
:
4973 tree rhs
= gimple_assign_rhs1 (stmt
);
4974 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4976 if (TREE_CODE (rhs
) == SSA_NAME
)
4978 /* If the RHS is an SSA_NAME, return its known constant value,
4980 return (*valueize
) (rhs
);
4982 /* Handle propagating invariant addresses into address
4984 else if (TREE_CODE (rhs
) == ADDR_EXPR
4985 && !is_gimple_min_invariant (rhs
))
4987 HOST_WIDE_INT offset
= 0;
4989 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4993 && (CONSTANT_CLASS_P (base
)
4994 || decl_address_invariant_p (base
)))
4995 return build_invariant_address (TREE_TYPE (rhs
),
4998 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4999 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
5000 && (CONSTRUCTOR_NELTS (rhs
)
5001 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
5006 vec
= XALLOCAVEC (tree
,
5007 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
5008 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
5010 val
= (*valueize
) (val
);
5011 if (TREE_CODE (val
) == INTEGER_CST
5012 || TREE_CODE (val
) == REAL_CST
5013 || TREE_CODE (val
) == FIXED_CST
)
5019 return build_vector (TREE_TYPE (rhs
), vec
);
5021 if (subcode
== OBJ_TYPE_REF
)
5023 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5024 /* If callee is constant, we can fold away the wrapper. */
5025 if (is_gimple_min_invariant (val
))
5029 if (kind
== tcc_reference
)
5031 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5032 || TREE_CODE (rhs
) == REALPART_EXPR
5033 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5034 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5036 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5037 return fold_unary_loc (EXPR_LOCATION (rhs
),
5039 TREE_TYPE (rhs
), val
);
5041 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5042 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5044 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5045 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5047 TREE_TYPE (rhs
), val
,
5048 TREE_OPERAND (rhs
, 1),
5049 TREE_OPERAND (rhs
, 2));
5051 else if (TREE_CODE (rhs
) == MEM_REF
5052 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5054 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5055 if (TREE_CODE (val
) == ADDR_EXPR
5056 && is_gimple_min_invariant (val
))
5058 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5060 TREE_OPERAND (rhs
, 1));
5065 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5067 else if (kind
== tcc_declaration
)
5068 return get_symbol_constant_value (rhs
);
5072 case GIMPLE_UNARY_RHS
:
5075 case GIMPLE_BINARY_RHS
:
5077 /* Handle binary operators that can appear in GIMPLE form. */
5078 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5079 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5081 /* Translate &x + CST into an invariant form suitable for
5082 further propagation. */
5083 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
5084 && TREE_CODE (op0
) == ADDR_EXPR
5085 && TREE_CODE (op1
) == INTEGER_CST
)
5087 tree off
= fold_convert (ptr_type_node
, op1
);
5088 return build_fold_addr_expr_loc
5090 fold_build2 (MEM_REF
,
5091 TREE_TYPE (TREE_TYPE (op0
)),
5092 unshare_expr (op0
), off
));
5095 return fold_binary_loc (loc
, subcode
,
5096 gimple_expr_type (stmt
), op0
, op1
);
5099 case GIMPLE_TERNARY_RHS
:
5101 /* Handle ternary operators that can appear in GIMPLE form. */
5102 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5103 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5104 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5106 /* Fold embedded expressions in ternary codes. */
5107 if ((subcode
== COND_EXPR
5108 || subcode
== VEC_COND_EXPR
)
5109 && COMPARISON_CLASS_P (op0
))
5111 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
5112 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
5113 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
5114 TREE_TYPE (op0
), op00
, op01
);
5119 return fold_ternary_loc (loc
, subcode
,
5120 gimple_expr_type (stmt
), op0
, op1
, op2
);
5131 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5133 if (gimple_call_internal_p (stmt
))
5135 enum tree_code subcode
= ERROR_MARK
;
5136 switch (gimple_call_internal_fn (stmt
))
5138 case IFN_UBSAN_CHECK_ADD
:
5139 subcode
= PLUS_EXPR
;
5141 case IFN_UBSAN_CHECK_SUB
:
5142 subcode
= MINUS_EXPR
;
5144 case IFN_UBSAN_CHECK_MUL
:
5145 subcode
= MULT_EXPR
;
5150 tree arg0
= gimple_call_arg (stmt
, 0);
5151 tree arg1
= gimple_call_arg (stmt
, 1);
5152 tree op0
= (*valueize
) (arg0
);
5153 tree op1
= (*valueize
) (arg1
);
5155 if (TREE_CODE (op0
) != INTEGER_CST
5156 || TREE_CODE (op1
) != INTEGER_CST
)
5161 /* x * 0 = 0 * x = 0 without overflow. */
5162 if (integer_zerop (op0
) || integer_zerop (op1
))
5163 return build_zero_cst (TREE_TYPE (arg0
));
5166 /* y - y = 0 without overflow. */
5167 if (operand_equal_p (op0
, op1
, 0))
5168 return build_zero_cst (TREE_TYPE (arg0
));
5175 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5177 && TREE_CODE (res
) == INTEGER_CST
5178 && !TREE_OVERFLOW (res
))
5183 fn
= (*valueize
) (gimple_call_fn (stmt
));
5184 if (TREE_CODE (fn
) == ADDR_EXPR
5185 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5186 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5187 && gimple_builtin_call_types_compatible_p (stmt
,
5188 TREE_OPERAND (fn
, 0)))
5190 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5193 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5194 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5195 retval
= fold_builtin_call_array (loc
,
5196 gimple_call_return_type (call_stmt
),
5197 fn
, gimple_call_num_args (stmt
), args
);
5200 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5201 STRIP_NOPS (retval
);
5202 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5215 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5216 Returns NULL_TREE if folding to a constant is not possible, otherwise
5217 returns a constant according to is_gimple_min_invariant. */
5220 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
5222 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5223 if (res
&& is_gimple_min_invariant (res
))
5229 /* The following set of functions are supposed to fold references using
5230 their constant initializers. */
5232 /* See if we can find constructor defining value of BASE.
5233 When we know the consructor with constant offset (such as
5234 base is array[40] and we do know constructor of array), then
5235 BIT_OFFSET is adjusted accordingly.
5237 As a special case, return error_mark_node when constructor
5238 is not explicitly available, but it is known to be zero
5239 such as 'static const int a;'. */
5241 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5242 tree (*valueize
)(tree
))
5244 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5245 if (TREE_CODE (base
) == MEM_REF
)
5247 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5249 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5251 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5256 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5257 base
= valueize (TREE_OPERAND (base
, 0));
5258 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5260 base
= TREE_OPERAND (base
, 0);
5263 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5264 DECL_INITIAL. If BASE is a nested reference into another
5265 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5266 the inner reference. */
5267 switch (TREE_CODE (base
))
5272 tree init
= ctor_for_folding (base
);
5274 /* Our semantic is exact opposite of ctor_for_folding;
5275 NULL means unknown, while error_mark_node is 0. */
5276 if (init
== error_mark_node
)
5279 return error_mark_node
;
5285 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5286 if (max_size
== -1 || size
!= max_size
)
5288 *bit_offset
+= bit_offset2
;
5289 return get_base_constructor (base
, bit_offset
, valueize
);
5300 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5301 SIZE to the memory at bit OFFSET. */
5304 fold_array_ctor_reference (tree type
, tree ctor
,
5305 unsigned HOST_WIDE_INT offset
,
5306 unsigned HOST_WIDE_INT size
,
5309 unsigned HOST_WIDE_INT cnt
;
5311 offset_int low_bound
;
5312 offset_int elt_size
;
5313 offset_int index
, max_index
;
5314 offset_int access_index
;
5315 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5316 HOST_WIDE_INT inner_offset
;
5318 /* Compute low bound and elt size. */
5319 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5320 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5321 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5323 /* Static constructors for variably sized objects makes no sense. */
5324 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5325 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5326 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5330 /* Static constructors for variably sized objects makes no sense. */
5331 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5333 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5335 /* We can handle only constantly sized accesses that are known to not
5336 be larger than size of array element. */
5337 if (!TYPE_SIZE_UNIT (type
)
5338 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5339 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5343 /* Compute the array index we look for. */
5344 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5346 access_index
+= low_bound
;
5348 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5349 TYPE_SIGN (index_type
));
5351 /* And offset within the access. */
5352 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5354 /* See if the array field is large enough to span whole access. We do not
5355 care to fold accesses spanning multiple array indexes. */
5356 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5359 index
= low_bound
- 1;
5361 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5362 TYPE_SIGN (index_type
));
5364 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5366 /* Array constructor might explicitely set index, or specify range
5367 or leave index NULL meaning that it is next index after previous
5371 if (TREE_CODE (cfield
) == INTEGER_CST
)
5372 max_index
= index
= wi::to_offset (cfield
);
5375 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5376 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5377 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5384 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5385 TYPE_SIGN (index_type
));
5389 /* Do we have match? */
5390 if (wi::cmpu (access_index
, index
) >= 0
5391 && wi::cmpu (access_index
, max_index
) <= 0)
5392 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5395 /* When memory is not explicitely mentioned in constructor,
5396 it is 0 (or out of range). */
5397 return build_zero_cst (type
);
5400 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5401 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5404 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5405 unsigned HOST_WIDE_INT offset
,
5406 unsigned HOST_WIDE_INT size
,
5409 unsigned HOST_WIDE_INT cnt
;
5412 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5415 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5416 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5417 tree field_size
= DECL_SIZE (cfield
);
5418 offset_int bitoffset
;
5419 offset_int bitoffset_end
, access_end
;
5421 /* Variable sized objects in static constructors makes no sense,
5422 but field_size can be NULL for flexible array members. */
5423 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5424 && TREE_CODE (byte_offset
) == INTEGER_CST
5425 && (field_size
!= NULL_TREE
5426 ? TREE_CODE (field_size
) == INTEGER_CST
5427 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5429 /* Compute bit offset of the field. */
5430 bitoffset
= (wi::to_offset (field_offset
)
5431 + wi::lshift (wi::to_offset (byte_offset
),
5432 LOG2_BITS_PER_UNIT
));
5433 /* Compute bit offset where the field ends. */
5434 if (field_size
!= NULL_TREE
)
5435 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5439 access_end
= offset_int (offset
) + size
;
5441 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5442 [BITOFFSET, BITOFFSET_END)? */
5443 if (wi::cmps (access_end
, bitoffset
) > 0
5444 && (field_size
== NULL_TREE
5445 || wi::lts_p (offset
, bitoffset_end
)))
5447 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5448 /* We do have overlap. Now see if field is large enough to
5449 cover the access. Give up for accesses spanning multiple
5451 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5453 if (wi::lts_p (offset
, bitoffset
))
5455 return fold_ctor_reference (type
, cval
,
5456 inner_offset
.to_uhwi (), size
,
5460 /* When memory is not explicitely mentioned in constructor, it is 0. */
5461 return build_zero_cst (type
);
5464 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5465 to the memory at bit OFFSET. */
5468 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5469 unsigned HOST_WIDE_INT size
, tree from_decl
)
5473 /* We found the field with exact match. */
5474 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5476 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5478 /* We are at the end of walk, see if we can view convert the
5480 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5481 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5482 && !compare_tree_int (TYPE_SIZE (type
), size
)
5483 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5485 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5486 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5488 STRIP_USELESS_TYPE_CONVERSION (ret
);
5491 /* For constants and byte-aligned/sized reads try to go through
5492 native_encode/interpret. */
5493 if (CONSTANT_CLASS_P (ctor
)
5494 && BITS_PER_UNIT
== 8
5495 && offset
% BITS_PER_UNIT
== 0
5496 && size
% BITS_PER_UNIT
== 0
5497 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5499 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5500 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5501 offset
/ BITS_PER_UNIT
) > 0)
5502 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5504 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5507 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5508 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5509 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5512 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5519 /* Return the tree representing the element referenced by T if T is an
5520 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5521 names using VALUEIZE. Return NULL_TREE otherwise. */
5524 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5526 tree ctor
, idx
, base
;
5527 HOST_WIDE_INT offset
, size
, max_size
;
5530 if (TREE_THIS_VOLATILE (t
))
5533 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
5534 return get_symbol_constant_value (t
);
5536 tem
= fold_read_from_constant_string (t
);
5540 switch (TREE_CODE (t
))
5543 case ARRAY_RANGE_REF
:
5544 /* Constant indexes are handled well by get_base_constructor.
5545 Only special case variable offsets.
5546 FIXME: This code can't handle nested references with variable indexes
5547 (they will be handled only by iteration of ccp). Perhaps we can bring
5548 get_ref_base_and_extent here and make it use a valueize callback. */
5549 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5551 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5552 && TREE_CODE (idx
) == INTEGER_CST
)
5554 tree low_bound
, unit_size
;
5556 /* If the resulting bit-offset is constant, track it. */
5557 if ((low_bound
= array_ref_low_bound (t
),
5558 TREE_CODE (low_bound
) == INTEGER_CST
)
5559 && (unit_size
= array_ref_element_size (t
),
5560 tree_fits_uhwi_p (unit_size
)))
5563 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5564 TYPE_PRECISION (TREE_TYPE (idx
)));
5566 if (wi::fits_shwi_p (woffset
))
5568 offset
= woffset
.to_shwi ();
5569 /* TODO: This code seems wrong, multiply then check
5570 to see if it fits. */
5571 offset
*= tree_to_uhwi (unit_size
);
5572 offset
*= BITS_PER_UNIT
;
5574 base
= TREE_OPERAND (t
, 0);
5575 ctor
= get_base_constructor (base
, &offset
, valueize
);
5576 /* Empty constructor. Always fold to 0. */
5577 if (ctor
== error_mark_node
)
5578 return build_zero_cst (TREE_TYPE (t
));
5579 /* Out of bound array access. Value is undefined,
5583 /* We can not determine ctor. */
5586 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5587 tree_to_uhwi (unit_size
)
5597 case TARGET_MEM_REF
:
5599 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5600 ctor
= get_base_constructor (base
, &offset
, valueize
);
5602 /* Empty constructor. Always fold to 0. */
5603 if (ctor
== error_mark_node
)
5604 return build_zero_cst (TREE_TYPE (t
));
5605 /* We do not know precise address. */
5606 if (max_size
== -1 || max_size
!= size
)
5608 /* We can not determine ctor. */
5612 /* Out of bound array access. Value is undefined, but don't fold. */
5616 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5622 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5623 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5624 return fold_build1_loc (EXPR_LOCATION (t
),
5625 TREE_CODE (t
), TREE_TYPE (t
), c
);
5637 fold_const_aggregate_ref (tree t
)
5639 return fold_const_aggregate_ref_1 (t
, NULL
);
5642 /* Lookup virtual method with index TOKEN in a virtual table V
5644 Set CAN_REFER if non-NULL to false if method
5645 is not referable or if the virtual table is ill-formed (such as rewriten
5646 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5649 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5651 unsigned HOST_WIDE_INT offset
,
5654 tree vtable
= v
, init
, fn
;
5655 unsigned HOST_WIDE_INT size
;
5656 unsigned HOST_WIDE_INT elt_size
, access_index
;
5662 /* First of all double check we have virtual table. */
5663 if (TREE_CODE (v
) != VAR_DECL
5664 || !DECL_VIRTUAL_P (v
))
5666 /* Pass down that we lost track of the target. */
5672 init
= ctor_for_folding (v
);
5674 /* The virtual tables should always be born with constructors
5675 and we always should assume that they are avaialble for
5676 folding. At the moment we do not stream them in all cases,
5677 but it should never happen that ctor seem unreachable. */
5679 if (init
== error_mark_node
)
5681 gcc_assert (in_lto_p
);
5682 /* Pass down that we lost track of the target. */
5687 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5688 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5689 offset
*= BITS_PER_UNIT
;
5690 offset
+= token
* size
;
5692 /* Lookup the value in the constructor that is assumed to be array.
5693 This is equivalent to
5694 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5695 offset, size, NULL);
5696 but in a constant time. We expect that frontend produced a simple
5697 array without indexed initializers. */
5699 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5700 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5701 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5702 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5704 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5705 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5707 /* This code makes an assumption that there are no
5708 indexed fileds produced by C++ FE, so we can directly index the array. */
5709 if (access_index
< CONSTRUCTOR_NELTS (init
))
5711 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5712 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5718 /* For type inconsistent program we may end up looking up virtual method
5719 in virtual table that does not contain TOKEN entries. We may overrun
5720 the virtual table and pick up a constant or RTTI info pointer.
5721 In any case the call is undefined. */
5723 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5724 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5725 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5728 fn
= TREE_OPERAND (fn
, 0);
5730 /* When cgraph node is missing and function is not public, we cannot
5731 devirtualize. This can happen in WHOPR when the actual method
5732 ends up in other partition, because we found devirtualization
5733 possibility too late. */
5734 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5745 /* Make sure we create a cgraph node for functions we'll reference.
5746 They can be non-existent if the reference comes from an entry
5747 of an external vtable for example. */
5748 cgraph_node::get_create (fn
);
5753 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5754 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5755 KNOWN_BINFO carries the binfo describing the true type of
5756 OBJ_TYPE_REF_OBJECT(REF).
5757 Set CAN_REFER if non-NULL to false if method
5758 is not referable or if the virtual table is ill-formed (such as rewriten
5759 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5762 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5765 unsigned HOST_WIDE_INT offset
;
5768 v
= BINFO_VTABLE (known_binfo
);
5769 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5773 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5779 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5782 /* Return true iff VAL is a gimple expression that is known to be
5783 non-negative. Restricted to floating-point inputs. */
5786 gimple_val_nonnegative_real_p (tree val
)
5790 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5792 /* Use existing logic for non-gimple trees. */
5793 if (tree_expr_nonnegative_p (val
))
5796 if (TREE_CODE (val
) != SSA_NAME
)
5799 /* Currently we look only at the immediately defining statement
5800 to make this determination, since recursion on defining
5801 statements of operands can lead to quadratic behavior in the
5802 worst case. This is expected to catch almost all occurrences
5803 in practice. It would be possible to implement limited-depth
5804 recursion if important cases are lost. Alternatively, passes
5805 that need this information (such as the pow/powi lowering code
5806 in the cse_sincos pass) could be revised to provide it through
5807 dataflow propagation. */
5809 def_stmt
= SSA_NAME_DEF_STMT (val
);
5811 if (is_gimple_assign (def_stmt
))
5815 /* See fold-const.c:tree_expr_nonnegative_p for additional
5816 cases that could be handled with recursion. */
5818 switch (gimple_assign_rhs_code (def_stmt
))
5821 /* Always true for floating-point operands. */
5825 /* True if the two operands are identical (since we are
5826 restricted to floating-point inputs). */
5827 op0
= gimple_assign_rhs1 (def_stmt
);
5828 op1
= gimple_assign_rhs2 (def_stmt
);
5831 || operand_equal_p (op0
, op1
, 0))
5838 else if (is_gimple_call (def_stmt
))
5840 tree fndecl
= gimple_call_fndecl (def_stmt
);
5842 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5846 switch (DECL_FUNCTION_CODE (fndecl
))
5848 CASE_FLT_FN (BUILT_IN_ACOS
):
5849 CASE_FLT_FN (BUILT_IN_ACOSH
):
5850 CASE_FLT_FN (BUILT_IN_CABS
):
5851 CASE_FLT_FN (BUILT_IN_COSH
):
5852 CASE_FLT_FN (BUILT_IN_ERFC
):
5853 CASE_FLT_FN (BUILT_IN_EXP
):
5854 CASE_FLT_FN (BUILT_IN_EXP10
):
5855 CASE_FLT_FN (BUILT_IN_EXP2
):
5856 CASE_FLT_FN (BUILT_IN_FABS
):
5857 CASE_FLT_FN (BUILT_IN_FDIM
):
5858 CASE_FLT_FN (BUILT_IN_HYPOT
):
5859 CASE_FLT_FN (BUILT_IN_POW10
):
5862 CASE_FLT_FN (BUILT_IN_SQRT
):
5863 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5864 nonnegative inputs. */
5865 if (!HONOR_SIGNED_ZEROS (val
))
5870 CASE_FLT_FN (BUILT_IN_POWI
):
5871 /* True if the second argument is an even integer. */
5872 arg1
= gimple_call_arg (def_stmt
, 1);
5874 if (TREE_CODE (arg1
) == INTEGER_CST
5875 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5880 CASE_FLT_FN (BUILT_IN_POW
):
5881 /* True if the second argument is an even integer-valued
5883 arg1
= gimple_call_arg (def_stmt
, 1);
5885 if (TREE_CODE (arg1
) == REAL_CST
)
5890 c
= TREE_REAL_CST (arg1
);
5891 n
= real_to_integer (&c
);
5895 REAL_VALUE_TYPE cint
;
5896 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5897 if (real_identical (&c
, &cint
))
5913 /* Given a pointer value OP0, return a simplified version of an
5914 indirection through OP0, or NULL_TREE if no simplification is
5915 possible. Note that the resulting type may be different from
5916 the type pointed to in the sense that it is still compatible
5917 from the langhooks point of view. */
5920 gimple_fold_indirect_ref (tree t
)
5922 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5927 subtype
= TREE_TYPE (sub
);
5928 if (!POINTER_TYPE_P (subtype
))
5931 if (TREE_CODE (sub
) == ADDR_EXPR
)
5933 tree op
= TREE_OPERAND (sub
, 0);
5934 tree optype
= TREE_TYPE (op
);
5936 if (useless_type_conversion_p (type
, optype
))
5939 /* *(foo *)&fooarray => fooarray[0] */
5940 if (TREE_CODE (optype
) == ARRAY_TYPE
5941 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5942 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5944 tree type_domain
= TYPE_DOMAIN (optype
);
5945 tree min_val
= size_zero_node
;
5946 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5947 min_val
= TYPE_MIN_VALUE (type_domain
);
5948 if (TREE_CODE (min_val
) == INTEGER_CST
)
5949 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5951 /* *(foo *)&complexfoo => __real__ complexfoo */
5952 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5953 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5954 return fold_build1 (REALPART_EXPR
, type
, op
);
5955 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5956 else if (TREE_CODE (optype
) == VECTOR_TYPE
5957 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5959 tree part_width
= TYPE_SIZE (type
);
5960 tree index
= bitsize_int (0);
5961 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5965 /* *(p + CST) -> ... */
5966 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5967 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5969 tree addr
= TREE_OPERAND (sub
, 0);
5970 tree off
= TREE_OPERAND (sub
, 1);
5974 addrtype
= TREE_TYPE (addr
);
5976 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5977 if (TREE_CODE (addr
) == ADDR_EXPR
5978 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5979 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5980 && tree_fits_uhwi_p (off
))
5982 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5983 tree part_width
= TYPE_SIZE (type
);
5984 unsigned HOST_WIDE_INT part_widthi
5985 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5986 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5987 tree index
= bitsize_int (indexi
);
5988 if (offset
/ part_widthi
5989 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5990 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5994 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5995 if (TREE_CODE (addr
) == ADDR_EXPR
5996 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5997 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5999 tree size
= TYPE_SIZE_UNIT (type
);
6000 if (tree_int_cst_equal (size
, off
))
6001 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
6004 /* *(p + CST) -> MEM_REF <p, CST>. */
6005 if (TREE_CODE (addr
) != ADDR_EXPR
6006 || DECL_P (TREE_OPERAND (addr
, 0)))
6007 return fold_build2 (MEM_REF
, type
,
6009 wide_int_to_tree (ptype
, off
));
6012 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6013 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6014 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6015 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6018 tree min_val
= size_zero_node
;
6020 sub
= gimple_fold_indirect_ref (sub
);
6022 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6023 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6024 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6025 min_val
= TYPE_MIN_VALUE (type_domain
);
6026 if (TREE_CODE (min_val
) == INTEGER_CST
)
6027 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6033 /* Return true if CODE is an operation that when operating on signed
6034 integer types involves undefined behavior on overflow and the
6035 operation can be expressed with unsigned arithmetic. */
6038 arith_code_with_undefined_signed_overflow (tree_code code
)
6046 case POINTER_PLUS_EXPR
:
6053 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6054 operation that can be transformed to unsigned arithmetic by converting
6055 its operand, carrying out the operation in the corresponding unsigned
6056 type and converting the result back to the original type.
6058 Returns a sequence of statements that replace STMT and also contain
6059 a modified form of STMT itself. */
6062 rewrite_to_defined_overflow (gimple stmt
)
6064 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6066 fprintf (dump_file
, "rewriting stmt with undefined signed "
6068 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6071 tree lhs
= gimple_assign_lhs (stmt
);
6072 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6073 gimple_seq stmts
= NULL
;
6074 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6076 gimple_seq stmts2
= NULL
;
6077 gimple_set_op (stmt
, i
,
6078 force_gimple_operand (fold_convert (type
,
6079 gimple_op (stmt
, i
)),
6080 &stmts2
, true, NULL_TREE
));
6081 gimple_seq_add_seq (&stmts
, stmts2
);
6083 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6084 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6085 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6086 gimple_seq_add_stmt (&stmts
, stmt
);
6087 gimple cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6088 gimple_seq_add_stmt (&stmts
, cvt
);
6094 /* Build the expression CODE OP0 of type TYPE with location LOC,
6095 simplifying it first if possible using VALUEIZE if not NULL.
6096 OP0 is expected to be valueized already. Returns the built
6097 expression value and appends statements possibly defining it
6101 gimple_build (gimple_seq
*seq
, location_t loc
,
6102 enum tree_code code
, tree type
, tree op0
,
6103 tree (*valueize
)(tree
))
6105 tree res
= gimple_simplify (code
, type
, op0
, seq
, valueize
);
6108 if (gimple_in_ssa_p (cfun
))
6109 res
= make_ssa_name (type
);
6111 res
= create_tmp_reg (type
);
6113 if (code
== REALPART_EXPR
6114 || code
== IMAGPART_EXPR
6115 || code
== VIEW_CONVERT_EXPR
)
6116 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6118 stmt
= gimple_build_assign (res
, code
, op0
);
6119 gimple_set_location (stmt
, loc
);
6120 gimple_seq_add_stmt_without_update (seq
, stmt
);
6125 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6126 simplifying it first if possible using VALUEIZE if not NULL.
6127 OP0 and OP1 are expected to be valueized already. Returns the built
6128 expression value and appends statements possibly defining it
6132 gimple_build (gimple_seq
*seq
, location_t loc
,
6133 enum tree_code code
, tree type
, tree op0
, tree op1
,
6134 tree (*valueize
)(tree
))
6136 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, valueize
);
6139 if (gimple_in_ssa_p (cfun
))
6140 res
= make_ssa_name (type
);
6142 res
= create_tmp_reg (type
);
6143 gimple stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6144 gimple_set_location (stmt
, loc
);
6145 gimple_seq_add_stmt_without_update (seq
, stmt
);
6150 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6151 simplifying it first if possible using VALUEIZE if not NULL.
6152 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
6153 expression value and appends statements possibly defining it
6157 gimple_build (gimple_seq
*seq
, location_t loc
,
6158 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
,
6159 tree (*valueize
)(tree
))
6161 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6165 if (gimple_in_ssa_p (cfun
))
6166 res
= make_ssa_name (type
);
6168 res
= create_tmp_reg (type
);
6170 if (code
== BIT_FIELD_REF
)
6171 stmt
= gimple_build_assign (res
, code
,
6172 build3 (code
, type
, op0
, op1
, op2
));
6174 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6175 gimple_set_location (stmt
, loc
);
6176 gimple_seq_add_stmt_without_update (seq
, stmt
);
6181 /* Build the call FN (ARG0) with a result of type TYPE
6182 (or no result if TYPE is void) with location LOC,
6183 simplifying it first if possible using VALUEIZE if not NULL.
6184 ARG0 is expected to be valueized already. Returns the built
6185 expression value (or NULL_TREE if TYPE is void) and appends
6186 statements possibly defining it to SEQ. */
6189 gimple_build (gimple_seq
*seq
, location_t loc
,
6190 enum built_in_function fn
, tree type
, tree arg0
,
6191 tree (*valueize
)(tree
))
6193 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, valueize
);
6196 tree decl
= builtin_decl_implicit (fn
);
6197 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
6198 if (!VOID_TYPE_P (type
))
6200 if (gimple_in_ssa_p (cfun
))
6201 res
= make_ssa_name (type
);
6203 res
= create_tmp_reg (type
);
6204 gimple_call_set_lhs (stmt
, res
);
6206 gimple_set_location (stmt
, loc
);
6207 gimple_seq_add_stmt_without_update (seq
, stmt
);
6212 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6213 (or no result if TYPE is void) with location LOC,
6214 simplifying it first if possible using VALUEIZE if not NULL.
6215 ARG0 is expected to be valueized already. Returns the built
6216 expression value (or NULL_TREE if TYPE is void) and appends
6217 statements possibly defining it to SEQ. */
6220 gimple_build (gimple_seq
*seq
, location_t loc
,
6221 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
,
6222 tree (*valueize
)(tree
))
6224 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, valueize
);
6227 tree decl
= builtin_decl_implicit (fn
);
6228 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6229 if (!VOID_TYPE_P (type
))
6231 if (gimple_in_ssa_p (cfun
))
6232 res
= make_ssa_name (type
);
6234 res
= create_tmp_reg (type
);
6235 gimple_call_set_lhs (stmt
, res
);
6237 gimple_set_location (stmt
, loc
);
6238 gimple_seq_add_stmt_without_update (seq
, stmt
);
6243 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6244 (or no result if TYPE is void) with location LOC,
6245 simplifying it first if possible using VALUEIZE if not NULL.
6246 ARG0 is expected to be valueized already. Returns the built
6247 expression value (or NULL_TREE if TYPE is void) and appends
6248 statements possibly defining it to SEQ. */
6251 gimple_build (gimple_seq
*seq
, location_t loc
,
6252 enum built_in_function fn
, tree type
,
6253 tree arg0
, tree arg1
, tree arg2
,
6254 tree (*valueize
)(tree
))
6256 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
, seq
, valueize
);
6259 tree decl
= builtin_decl_implicit (fn
);
6260 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6261 if (!VOID_TYPE_P (type
))
6263 if (gimple_in_ssa_p (cfun
))
6264 res
= make_ssa_name (type
);
6266 res
= create_tmp_reg (type
);
6267 gimple_call_set_lhs (stmt
, res
);
6269 gimple_set_location (stmt
, loc
);
6270 gimple_seq_add_stmt_without_update (seq
, stmt
);
6275 /* Build the conversion (TYPE) OP with a result of type TYPE
6276 with location LOC if such conversion is neccesary in GIMPLE,
6277 simplifying it first.
6278 Returns the built expression value and appends
6279 statements possibly defining it to SEQ. */
6282 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6284 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6286 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);