1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "fold-const.h"
36 #include "stringpool.h"
38 #include "hard-reg-set.h"
42 #include "statistics.h"
44 #include "fixed-value.h"
45 #include "insn-config.h"
54 #include "stor-layout.h"
58 #include "dominance.h"
59 #include "basic-block.h"
60 #include "tree-ssa-alias.h"
61 #include "internal-fn.h"
62 #include "gimple-fold.h"
63 #include "gimple-expr.h"
67 #include "gimple-iterator.h"
68 #include "gimple-ssa.h"
69 #include "tree-ssanames.h"
70 #include "tree-into-ssa.h"
73 #include "tree-ssa-propagate.h"
76 #include "plugin-api.h"
79 #include "ipa-utils.h"
80 #include "gimple-pretty-print.h"
81 #include "tree-ssa-address.h"
82 #include "langhooks.h"
83 #include "gimplify-me.h"
88 #include "gimple-match.h"
89 #include "tree-phinodes.h"
90 #include "ssa-iterators.h"
92 /* Return true when DECL can be referenced from current unit.
93 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
94 We can get declarations that are not possible to reference for various
97 1) When analyzing C++ virtual tables.
98 C++ virtual tables do have known constructors even
99 when they are keyed to other compilation unit.
100 Those tables can contain pointers to methods and vars
101 in other units. Those methods have both STATIC and EXTERNAL
103 2) In WHOPR mode devirtualization might lead to reference
104 to method that was partitioned elsehwere.
105 In this case we have static VAR_DECL or FUNCTION_DECL
106 that has no corresponding callgraph/varpool node
108 3) COMDAT functions referred by external vtables that
109 we devirtualize only during final compilation stage.
110 At this time we already decided that we will not output
111 the function body and thus we can't reference the symbol
115 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
118 struct cgraph_node
*node
;
121 if (DECL_ABSTRACT_P (decl
))
124 /* We are concerned only about static/external vars and functions. */
125 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
126 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
129 /* Static objects can be referred only if they was not optimized out yet. */
130 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
132 /* Before we start optimizing unreachable code we can be sure all
133 static objects are defined. */
134 if (symtab
->function_flags_ready
)
136 snode
= symtab_node::get (decl
);
137 if (!snode
|| !snode
->definition
)
139 node
= dyn_cast
<cgraph_node
*> (snode
);
140 return !node
|| !node
->global
.inlined_to
;
143 /* We will later output the initializer, so we can refer to it.
144 So we are concerned only when DECL comes from initializer of
145 external var or var that has been optimized out. */
147 || TREE_CODE (from_decl
) != VAR_DECL
148 || (!DECL_EXTERNAL (from_decl
)
149 && (vnode
= varpool_node::get (from_decl
)) != NULL
150 && vnode
->definition
)
152 && (vnode
= varpool_node::get (from_decl
)) != NULL
153 && vnode
->in_other_partition
))
155 /* We are folding reference from external vtable. The vtable may reffer
156 to a symbol keyed to other compilation unit. The other compilation
157 unit may be in separate DSO and the symbol may be hidden. */
158 if (DECL_VISIBILITY_SPECIFIED (decl
)
159 && DECL_EXTERNAL (decl
)
160 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
161 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
163 /* When function is public, we always can introduce new reference.
164 Exception are the COMDAT functions where introducing a direct
165 reference imply need to include function body in the curren tunit. */
166 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
168 /* We have COMDAT. We are going to check if we still have definition
169 or if the definition is going to be output in other partition.
170 Bypass this when gimplifying; all needed functions will be produced.
172 As observed in PR20991 for already optimized out comdat virtual functions
173 it may be tempting to not necessarily give up because the copy will be
174 output elsewhere when corresponding vtable is output.
175 This is however not possible - ABI specify that COMDATs are output in
176 units where they are used and when the other unit was compiled with LTO
177 it is possible that vtable was kept public while the function itself
179 if (!symtab
->function_flags_ready
)
182 snode
= symtab_node::get (decl
);
184 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
185 && (!snode
->in_other_partition
186 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
188 node
= dyn_cast
<cgraph_node
*> (snode
);
189 return !node
|| !node
->global
.inlined_to
;
192 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
193 acceptable form for is_gimple_min_invariant.
194 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
197 canonicalize_constructor_val (tree cval
, tree from_decl
)
199 tree orig_cval
= cval
;
201 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
202 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
204 tree ptr
= TREE_OPERAND (cval
, 0);
205 if (is_gimple_min_invariant (ptr
))
206 cval
= build1_loc (EXPR_LOCATION (cval
),
207 ADDR_EXPR
, TREE_TYPE (ptr
),
208 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
210 fold_convert (ptr_type_node
,
211 TREE_OPERAND (cval
, 1))));
213 if (TREE_CODE (cval
) == ADDR_EXPR
)
215 tree base
= NULL_TREE
;
216 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
218 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
220 TREE_OPERAND (cval
, 0) = base
;
223 base
= get_base_address (TREE_OPERAND (cval
, 0));
227 if ((TREE_CODE (base
) == VAR_DECL
228 || TREE_CODE (base
) == FUNCTION_DECL
)
229 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
231 if (TREE_CODE (base
) == VAR_DECL
)
232 TREE_ADDRESSABLE (base
) = 1;
233 else if (TREE_CODE (base
) == FUNCTION_DECL
)
235 /* Make sure we create a cgraph node for functions we'll reference.
236 They can be non-existent if the reference comes from an entry
237 of an external vtable for example. */
238 cgraph_node::get_create (base
);
240 /* Fixup types in global initializers. */
241 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
242 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
244 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
245 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
248 if (TREE_OVERFLOW_P (cval
))
249 return drop_tree_overflow (cval
);
253 /* If SYM is a constant variable with known value, return the value.
254 NULL_TREE is returned otherwise. */
257 get_symbol_constant_value (tree sym
)
259 tree val
= ctor_for_folding (sym
);
260 if (val
!= error_mark_node
)
264 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
265 if (val
&& is_gimple_min_invariant (val
))
270 /* Variables declared 'const' without an initializer
271 have zero as the initializer if they may not be
272 overridden at link or run time. */
274 && is_gimple_reg_type (TREE_TYPE (sym
)))
275 return build_zero_cst (TREE_TYPE (sym
));
283 /* Subroutine of fold_stmt. We perform several simplifications of the
284 memory reference tree EXPR and make sure to re-gimplify them properly
285 after propagation of constant addresses. IS_LHS is true if the
286 reference is supposed to be an lvalue. */
289 maybe_fold_reference (tree expr
, bool is_lhs
)
293 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
294 || TREE_CODE (expr
) == REALPART_EXPR
295 || TREE_CODE (expr
) == IMAGPART_EXPR
)
296 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
297 return fold_unary_loc (EXPR_LOCATION (expr
),
300 TREE_OPERAND (expr
, 0));
301 else if (TREE_CODE (expr
) == BIT_FIELD_REF
302 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
303 return fold_ternary_loc (EXPR_LOCATION (expr
),
306 TREE_OPERAND (expr
, 0),
307 TREE_OPERAND (expr
, 1),
308 TREE_OPERAND (expr
, 2));
311 && (result
= fold_const_aggregate_ref (expr
))
312 && is_gimple_min_invariant (result
))
319 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
320 replacement rhs for the statement or NULL_TREE if no simplification
321 could be made. It is assumed that the operands have been previously
325 fold_gimple_assign (gimple_stmt_iterator
*si
)
327 gimple stmt
= gsi_stmt (*si
);
328 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
329 location_t loc
= gimple_location (stmt
);
331 tree result
= NULL_TREE
;
333 switch (get_gimple_rhs_class (subcode
))
335 case GIMPLE_SINGLE_RHS
:
337 tree rhs
= gimple_assign_rhs1 (stmt
);
339 if (TREE_CLOBBER_P (rhs
))
342 if (REFERENCE_CLASS_P (rhs
))
343 return maybe_fold_reference (rhs
, false);
345 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
347 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
348 if (is_gimple_min_invariant (val
))
350 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
353 vec
<cgraph_node
*>targets
354 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
355 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
357 if (dump_enabled_p ())
359 location_t loc
= gimple_location_safe (stmt
);
360 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
361 "resolving virtual function address "
362 "reference to function %s\n",
363 targets
.length () == 1
364 ? targets
[0]->name ()
367 if (targets
.length () == 1)
369 val
= fold_convert (TREE_TYPE (val
),
370 build_fold_addr_expr_loc
371 (loc
, targets
[0]->decl
));
372 STRIP_USELESS_TYPE_CONVERSION (val
);
375 /* We can not use __builtin_unreachable here because it
376 can not have address taken. */
377 val
= build_int_cst (TREE_TYPE (val
), 0);
383 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
385 tree ref
= TREE_OPERAND (rhs
, 0);
386 tree tem
= maybe_fold_reference (ref
, true);
388 && TREE_CODE (tem
) == MEM_REF
389 && integer_zerop (TREE_OPERAND (tem
, 1)))
390 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
392 result
= fold_convert (TREE_TYPE (rhs
),
393 build_fold_addr_expr_loc (loc
, tem
));
394 else if (TREE_CODE (ref
) == MEM_REF
395 && integer_zerop (TREE_OPERAND (ref
, 1)))
396 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
399 else if (TREE_CODE (rhs
) == CONSTRUCTOR
400 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
401 && (CONSTRUCTOR_NELTS (rhs
)
402 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
404 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
408 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
409 if (TREE_CODE (val
) != INTEGER_CST
410 && TREE_CODE (val
) != REAL_CST
411 && TREE_CODE (val
) != FIXED_CST
)
414 return build_vector_from_ctor (TREE_TYPE (rhs
),
415 CONSTRUCTOR_ELTS (rhs
));
418 else if (DECL_P (rhs
))
419 return get_symbol_constant_value (rhs
);
421 /* If we couldn't fold the RHS, hand over to the generic
423 if (result
== NULL_TREE
)
426 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
427 that may have been added by fold, and "useless" type
428 conversions that might now be apparent due to propagation. */
429 STRIP_USELESS_TYPE_CONVERSION (result
);
431 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
438 case GIMPLE_UNARY_RHS
:
441 case GIMPLE_BINARY_RHS
:
442 /* Try to canonicalize for boolean-typed X the comparisons
443 X == 0, X == 1, X != 0, and X != 1. */
444 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
445 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
447 tree lhs
= gimple_assign_lhs (stmt
);
448 tree op1
= gimple_assign_rhs1 (stmt
);
449 tree op2
= gimple_assign_rhs2 (stmt
);
450 tree type
= TREE_TYPE (op1
);
452 /* Check whether the comparison operands are of the same boolean
453 type as the result type is.
454 Check that second operand is an integer-constant with value
456 if (TREE_CODE (op2
) == INTEGER_CST
457 && (integer_zerop (op2
) || integer_onep (op2
))
458 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
460 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
461 bool is_logical_not
= false;
463 /* X == 0 and X != 1 is a logical-not.of X
464 X == 1 and X != 0 is X */
465 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
466 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
467 is_logical_not
= true;
469 if (is_logical_not
== false)
471 /* Only for one-bit precision typed X the transformation
472 !X -> ~X is valied. */
473 else if (TYPE_PRECISION (type
) == 1)
474 result
= build1_loc (gimple_location (stmt
), BIT_NOT_EXPR
,
476 /* Otherwise we use !X -> X ^ 1. */
478 result
= build2_loc (gimple_location (stmt
), BIT_XOR_EXPR
,
479 type
, op1
, build_int_cst (type
, 1));
485 result
= fold_binary_loc (loc
, subcode
,
486 TREE_TYPE (gimple_assign_lhs (stmt
)),
487 gimple_assign_rhs1 (stmt
),
488 gimple_assign_rhs2 (stmt
));
492 STRIP_USELESS_TYPE_CONVERSION (result
);
493 if (valid_gimple_rhs_p (result
))
498 case GIMPLE_TERNARY_RHS
:
499 /* Try to fold a conditional expression. */
500 if (gimple_assign_rhs_code (stmt
) == COND_EXPR
)
502 tree op0
= gimple_assign_rhs1 (stmt
);
505 location_t cond_loc
= gimple_location (stmt
);
507 if (COMPARISON_CLASS_P (op0
))
509 fold_defer_overflow_warnings ();
510 tem
= fold_binary_loc (cond_loc
,
511 TREE_CODE (op0
), TREE_TYPE (op0
),
512 TREE_OPERAND (op0
, 0),
513 TREE_OPERAND (op0
, 1));
514 /* This is actually a conditional expression, not a GIMPLE
515 conditional statement, however, the valid_gimple_rhs_p
516 test still applies. */
517 set
= (tem
&& is_gimple_condexpr (tem
)
518 && valid_gimple_rhs_p (tem
));
519 fold_undefer_overflow_warnings (set
, stmt
, 0);
521 else if (is_gimple_min_invariant (op0
))
530 result
= fold_build3_loc (cond_loc
, COND_EXPR
,
531 TREE_TYPE (gimple_assign_lhs (stmt
)), tem
,
532 gimple_assign_rhs2 (stmt
),
533 gimple_assign_rhs3 (stmt
));
537 result
= fold_ternary_loc (loc
, subcode
,
538 TREE_TYPE (gimple_assign_lhs (stmt
)),
539 gimple_assign_rhs1 (stmt
),
540 gimple_assign_rhs2 (stmt
),
541 gimple_assign_rhs3 (stmt
));
545 STRIP_USELESS_TYPE_CONVERSION (result
);
546 if (valid_gimple_rhs_p (result
))
551 case GIMPLE_INVALID_RHS
:
558 /* Attempt to fold a conditional statement. Return true if any changes were
559 made. We only attempt to fold the condition expression, and do not perform
560 any transformation that would require alteration of the cfg. It is
561 assumed that the operands have been previously folded. */
564 fold_gimple_cond (gcond
*stmt
)
566 tree result
= fold_binary_loc (gimple_location (stmt
),
567 gimple_cond_code (stmt
),
569 gimple_cond_lhs (stmt
),
570 gimple_cond_rhs (stmt
));
574 STRIP_USELESS_TYPE_CONVERSION (result
);
575 if (is_gimple_condexpr (result
) && valid_gimple_rhs_p (result
))
577 gimple_cond_set_condition_from_tree (stmt
, result
);
586 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
587 adjusting the replacement stmts location and virtual operands.
588 If the statement has a lhs the last stmt in the sequence is expected
589 to assign to that lhs. */
592 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
594 gimple stmt
= gsi_stmt (*si_p
);
596 if (gimple_has_location (stmt
))
597 annotate_all_with_location (stmts
, gimple_location (stmt
));
599 /* First iterate over the replacement statements backward, assigning
600 virtual operands to their defining statements. */
601 gimple laststore
= NULL
;
602 for (gimple_stmt_iterator i
= gsi_last (stmts
);
603 !gsi_end_p (i
); gsi_prev (&i
))
605 gimple new_stmt
= gsi_stmt (i
);
606 if ((gimple_assign_single_p (new_stmt
)
607 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
608 || (is_gimple_call (new_stmt
)
609 && (gimple_call_flags (new_stmt
)
610 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
614 vdef
= gimple_vdef (stmt
);
616 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
617 gimple_set_vdef (new_stmt
, vdef
);
618 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
619 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
620 laststore
= new_stmt
;
624 /* Second iterate over the statements forward, assigning virtual
625 operands to their uses. */
626 tree reaching_vuse
= gimple_vuse (stmt
);
627 for (gimple_stmt_iterator i
= gsi_start (stmts
);
628 !gsi_end_p (i
); gsi_next (&i
))
630 gimple new_stmt
= gsi_stmt (i
);
631 /* If the new statement possibly has a VUSE, update it with exact SSA
632 name we know will reach this one. */
633 if (gimple_has_mem_ops (new_stmt
))
634 gimple_set_vuse (new_stmt
, reaching_vuse
);
635 gimple_set_modified (new_stmt
, true);
636 if (gimple_vdef (new_stmt
))
637 reaching_vuse
= gimple_vdef (new_stmt
);
640 /* If the new sequence does not do a store release the virtual
641 definition of the original statement. */
643 && reaching_vuse
== gimple_vuse (stmt
))
645 tree vdef
= gimple_vdef (stmt
);
647 && TREE_CODE (vdef
) == SSA_NAME
)
649 unlink_stmt_vdef (stmt
);
650 release_ssa_name (vdef
);
654 /* Finally replace the original statement with the sequence. */
655 gsi_replace_with_seq (si_p
, stmts
, false);
658 /* Convert EXPR into a GIMPLE value suitable for substitution on the
659 RHS of an assignment. Insert the necessary statements before
660 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
661 is replaced. If the call is expected to produces a result, then it
662 is replaced by an assignment of the new RHS to the result variable.
663 If the result is to be ignored, then the call is replaced by a
664 GIMPLE_NOP. A proper VDEF chain is retained by making the first
665 VUSE and the last VDEF of the whole sequence be the same as the replaced
666 statement and using new SSA names for stores in between. */
669 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
672 gimple stmt
, new_stmt
;
673 gimple_stmt_iterator i
;
674 gimple_seq stmts
= NULL
;
676 stmt
= gsi_stmt (*si_p
);
678 gcc_assert (is_gimple_call (stmt
));
680 push_gimplify_context (gimple_in_ssa_p (cfun
));
682 lhs
= gimple_call_lhs (stmt
);
683 if (lhs
== NULL_TREE
)
685 gimplify_and_add (expr
, &stmts
);
686 /* We can end up with folding a memcpy of an empty class assignment
687 which gets optimized away by C++ gimplification. */
688 if (gimple_seq_empty_p (stmts
))
690 pop_gimplify_context (NULL
);
691 if (gimple_in_ssa_p (cfun
))
693 unlink_stmt_vdef (stmt
);
696 gsi_replace (si_p
, gimple_build_nop (), true);
702 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
703 new_stmt
= gimple_build_assign (lhs
, tmp
);
704 i
= gsi_last (stmts
);
705 gsi_insert_after_without_update (&i
, new_stmt
,
706 GSI_CONTINUE_LINKING
);
709 pop_gimplify_context (NULL
);
711 gsi_replace_with_seq_vops (si_p
, stmts
);
715 /* Replace the call at *GSI with the gimple value VAL. */
718 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
720 gimple stmt
= gsi_stmt (*gsi
);
721 tree lhs
= gimple_call_lhs (stmt
);
725 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
726 val
= fold_convert (TREE_TYPE (lhs
), val
);
727 repl
= gimple_build_assign (lhs
, val
);
730 repl
= gimple_build_nop ();
731 tree vdef
= gimple_vdef (stmt
);
732 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
734 unlink_stmt_vdef (stmt
);
735 release_ssa_name (vdef
);
737 gsi_replace (gsi
, repl
, true);
740 /* Replace the call at *GSI with the new call REPL and fold that
744 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple repl
)
746 gimple stmt
= gsi_stmt (*gsi
);
747 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
748 gimple_set_location (repl
, gimple_location (stmt
));
749 if (gimple_vdef (stmt
)
750 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
752 gimple_set_vdef (repl
, gimple_vdef (stmt
));
753 gimple_set_vuse (repl
, gimple_vuse (stmt
));
754 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
756 gsi_replace (gsi
, repl
, true);
760 /* Return true if VAR is a VAR_DECL or a component thereof. */
763 var_decl_component_p (tree var
)
766 while (handled_component_p (inner
))
767 inner
= TREE_OPERAND (inner
, 0);
768 return SSA_VAR_P (inner
);
771 /* Fold function call to builtin mem{{,p}cpy,move}. Return
772 NULL_TREE if no simplification can be made.
773 If ENDP is 0, return DEST (like memcpy).
774 If ENDP is 1, return DEST+LEN (like mempcpy).
775 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
776 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
780 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
781 tree dest
, tree src
, int endp
)
783 gimple stmt
= gsi_stmt (*gsi
);
784 tree lhs
= gimple_call_lhs (stmt
);
785 tree len
= gimple_call_arg (stmt
, 2);
786 tree destvar
, srcvar
;
787 location_t loc
= gimple_location (stmt
);
789 /* If the LEN parameter is zero, return DEST. */
790 if (integer_zerop (len
))
793 if (gimple_call_lhs (stmt
))
794 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
796 repl
= gimple_build_nop ();
797 tree vdef
= gimple_vdef (stmt
);
798 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
800 unlink_stmt_vdef (stmt
);
801 release_ssa_name (vdef
);
803 gsi_replace (gsi
, repl
, true);
807 /* If SRC and DEST are the same (and not volatile), return
808 DEST{,+LEN,+LEN-1}. */
809 if (operand_equal_p (src
, dest
, 0))
811 unlink_stmt_vdef (stmt
);
812 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
813 release_ssa_name (gimple_vdef (stmt
));
816 gsi_replace (gsi
, gimple_build_nop (), true);
823 tree srctype
, desttype
;
824 unsigned int src_align
, dest_align
;
827 /* Build accesses at offset zero with a ref-all character type. */
828 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
831 /* If we can perform the copy efficiently with first doing all loads
832 and then all stores inline it that way. Currently efficiently
833 means that we can load all the memory into a single integer
834 register which is what MOVE_MAX gives us. */
835 src_align
= get_pointer_alignment (src
);
836 dest_align
= get_pointer_alignment (dest
);
837 if (tree_fits_uhwi_p (len
)
838 && compare_tree_int (len
, MOVE_MAX
) <= 0
839 /* ??? Don't transform copies from strings with known length this
840 confuses the tree-ssa-strlen.c. This doesn't handle
841 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
843 && !c_strlen (src
, 2))
845 unsigned ilen
= tree_to_uhwi (len
);
846 if (exact_log2 (ilen
) != -1)
848 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
850 && TYPE_MODE (type
) != BLKmode
851 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
853 /* If the destination pointer is not aligned we must be able
854 to emit an unaligned store. */
855 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
856 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
859 tree desttype
= type
;
860 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
861 srctype
= build_aligned_type (type
, src_align
);
862 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
863 tree tem
= fold_const_aggregate_ref (srcmem
);
866 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
867 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
873 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
875 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
876 if (gimple_in_ssa_p (cfun
))
877 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
880 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
881 gimple_assign_set_lhs (new_stmt
, srcmem
);
882 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
883 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
885 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
886 desttype
= build_aligned_type (type
, dest_align
);
888 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
891 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
892 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
893 if (gimple_vdef (new_stmt
)
894 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
895 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
898 gsi_replace (gsi
, new_stmt
, true);
901 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
910 /* Both DEST and SRC must be pointer types.
911 ??? This is what old code did. Is the testing for pointer types
914 If either SRC is readonly or length is 1, we can use memcpy. */
915 if (!dest_align
|| !src_align
)
917 if (readonly_data_expr (src
)
918 || (tree_fits_uhwi_p (len
)
919 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
920 >= tree_to_uhwi (len
))))
922 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
925 gimple_call_set_fndecl (stmt
, fn
);
926 gimple_call_set_arg (stmt
, 0, dest
);
927 gimple_call_set_arg (stmt
, 1, src
);
932 /* If *src and *dest can't overlap, optimize into memcpy as well. */
933 if (TREE_CODE (src
) == ADDR_EXPR
934 && TREE_CODE (dest
) == ADDR_EXPR
)
936 tree src_base
, dest_base
, fn
;
937 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
938 HOST_WIDE_INT size
= -1;
939 HOST_WIDE_INT maxsize
= -1;
941 srcvar
= TREE_OPERAND (src
, 0);
942 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
944 destvar
= TREE_OPERAND (dest
, 0);
945 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
947 if (tree_fits_uhwi_p (len
))
948 maxsize
= tree_to_uhwi (len
);
951 src_offset
/= BITS_PER_UNIT
;
952 dest_offset
/= BITS_PER_UNIT
;
953 if (SSA_VAR_P (src_base
)
954 && SSA_VAR_P (dest_base
))
956 if (operand_equal_p (src_base
, dest_base
, 0)
957 && ranges_overlap_p (src_offset
, maxsize
,
958 dest_offset
, maxsize
))
961 else if (TREE_CODE (src_base
) == MEM_REF
962 && TREE_CODE (dest_base
) == MEM_REF
)
964 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
965 TREE_OPERAND (dest_base
, 0), 0))
967 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
968 if (!wi::fits_shwi_p (off
))
970 src_offset
= off
.to_shwi ();
972 off
= mem_ref_offset (dest_base
) + dest_offset
;
973 if (!wi::fits_shwi_p (off
))
975 dest_offset
= off
.to_shwi ();
976 if (ranges_overlap_p (src_offset
, maxsize
,
977 dest_offset
, maxsize
))
983 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
986 gimple_call_set_fndecl (stmt
, fn
);
987 gimple_call_set_arg (stmt
, 0, dest
);
988 gimple_call_set_arg (stmt
, 1, src
);
993 /* If the destination and source do not alias optimize into
995 if ((is_gimple_min_invariant (dest
)
996 || TREE_CODE (dest
) == SSA_NAME
)
997 && (is_gimple_min_invariant (src
)
998 || TREE_CODE (src
) == SSA_NAME
))
1001 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
1002 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
1003 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
1006 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1009 gimple_call_set_fndecl (stmt
, fn
);
1010 gimple_call_set_arg (stmt
, 0, dest
);
1011 gimple_call_set_arg (stmt
, 1, src
);
1020 if (!tree_fits_shwi_p (len
))
1023 This logic lose for arguments like (type *)malloc (sizeof (type)),
1024 since we strip the casts of up to VOID return value from malloc.
1025 Perhaps we ought to inherit type from non-VOID argument here? */
1028 if (!POINTER_TYPE_P (TREE_TYPE (src
))
1029 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
1031 /* In the following try to find a type that is most natural to be
1032 used for the memcpy source and destination and that allows
1033 the most optimization when memcpy is turned into a plain assignment
1034 using that type. In theory we could always use a char[len] type
1035 but that only gains us that the destination and source possibly
1036 no longer will have their address taken. */
1037 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1038 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
1040 tree tem
= TREE_OPERAND (src
, 0);
1042 if (tem
!= TREE_OPERAND (src
, 0))
1043 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
1045 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
1047 tree tem
= TREE_OPERAND (dest
, 0);
1049 if (tem
!= TREE_OPERAND (dest
, 0))
1050 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
1052 srctype
= TREE_TYPE (TREE_TYPE (src
));
1053 if (TREE_CODE (srctype
) == ARRAY_TYPE
1054 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1056 srctype
= TREE_TYPE (srctype
);
1058 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
1060 desttype
= TREE_TYPE (TREE_TYPE (dest
));
1061 if (TREE_CODE (desttype
) == ARRAY_TYPE
1062 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1064 desttype
= TREE_TYPE (desttype
);
1066 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
1068 if (TREE_ADDRESSABLE (srctype
)
1069 || TREE_ADDRESSABLE (desttype
))
1072 /* Make sure we are not copying using a floating-point mode or
1073 a type whose size possibly does not match its precision. */
1074 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
1075 || TREE_CODE (desttype
) == BOOLEAN_TYPE
1076 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
1077 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
1078 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
1079 || TREE_CODE (srctype
) == BOOLEAN_TYPE
1080 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
1081 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
1089 src_align
= get_pointer_alignment (src
);
1090 dest_align
= get_pointer_alignment (dest
);
1091 if (dest_align
< TYPE_ALIGN (desttype
)
1092 || src_align
< TYPE_ALIGN (srctype
))
1096 STRIP_NOPS (destvar
);
1097 if (TREE_CODE (destvar
) == ADDR_EXPR
1098 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
1099 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
1100 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
1102 destvar
= NULL_TREE
;
1105 STRIP_NOPS (srcvar
);
1106 if (TREE_CODE (srcvar
) == ADDR_EXPR
1107 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
1108 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
1111 || src_align
>= TYPE_ALIGN (desttype
))
1112 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
1114 else if (!STRICT_ALIGNMENT
)
1116 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1118 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
1126 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1129 if (srcvar
== NULL_TREE
)
1132 if (src_align
>= TYPE_ALIGN (desttype
))
1133 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1136 if (STRICT_ALIGNMENT
)
1138 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1140 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1143 else if (destvar
== NULL_TREE
)
1146 if (dest_align
>= TYPE_ALIGN (srctype
))
1147 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1150 if (STRICT_ALIGNMENT
)
1152 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1154 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1159 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1161 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1162 if (gimple_in_ssa_p (cfun
))
1163 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1165 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1166 gimple_assign_set_lhs (new_stmt
, srcvar
);
1167 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1168 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1170 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1171 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1172 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1173 if (gimple_vdef (new_stmt
)
1174 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1175 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1178 gsi_replace (gsi
, new_stmt
, true);
1181 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1185 if (endp
== 0 || endp
== 3)
1188 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1190 if (endp
== 2 || endp
== 1)
1191 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1193 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1195 gimple repl
= gimple_build_assign (lhs
, dest
);
1196 gsi_replace (gsi
, repl
, true);
1200 /* Fold function call to builtin memset or bzero at *GSI setting the
1201 memory of size LEN to VAL. Return whether a simplification was made. */
1204 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1206 gimple stmt
= gsi_stmt (*gsi
);
1208 unsigned HOST_WIDE_INT length
, cval
;
1210 /* If the LEN parameter is zero, return DEST. */
1211 if (integer_zerop (len
))
1213 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1217 if (! tree_fits_uhwi_p (len
))
1220 if (TREE_CODE (c
) != INTEGER_CST
)
1223 tree dest
= gimple_call_arg (stmt
, 0);
1225 if (TREE_CODE (var
) != ADDR_EXPR
)
1228 var
= TREE_OPERAND (var
, 0);
1229 if (TREE_THIS_VOLATILE (var
))
1232 etype
= TREE_TYPE (var
);
1233 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1234 etype
= TREE_TYPE (etype
);
1236 if (!INTEGRAL_TYPE_P (etype
)
1237 && !POINTER_TYPE_P (etype
))
1240 if (! var_decl_component_p (var
))
1243 length
= tree_to_uhwi (len
);
1244 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1245 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1248 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1251 if (integer_zerop (c
))
1255 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1258 cval
= TREE_INT_CST_LOW (c
);
1262 cval
|= (cval
<< 31) << 1;
1265 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1266 gimple store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1267 gimple_set_vuse (store
, gimple_vuse (stmt
));
1268 tree vdef
= gimple_vdef (stmt
);
1269 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1271 gimple_set_vdef (store
, gimple_vdef (stmt
));
1272 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1274 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1275 if (gimple_call_lhs (stmt
))
1277 gimple asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1278 gsi_replace (gsi
, asgn
, true);
1282 gimple_stmt_iterator gsi2
= *gsi
;
1284 gsi_remove (&gsi2
, true);
1291 /* Return the string length, maximum string length or maximum value of
1293 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1294 is not NULL and, for TYPE == 0, its value is not equal to the length
1295 we determine or if we are unable to determine the length or value,
1296 return false. VISITED is a bitmap of visited variables.
1297 TYPE is 0 if string length should be returned, 1 for maximum string
1298 length and 2 for maximum value ARG can have. */
1301 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1306 if (TREE_CODE (arg
) != SSA_NAME
)
1308 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1309 if (TREE_CODE (arg
) == ADDR_EXPR
1310 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1311 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1313 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1314 if (TREE_CODE (aop0
) == INDIRECT_REF
1315 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1316 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1317 length
, visited
, type
);
1323 if (TREE_CODE (val
) != INTEGER_CST
1324 || tree_int_cst_sgn (val
) < 0)
1328 val
= c_strlen (arg
, 1);
1336 if (TREE_CODE (*length
) != INTEGER_CST
1337 || TREE_CODE (val
) != INTEGER_CST
)
1340 if (tree_int_cst_lt (*length
, val
))
1344 else if (simple_cst_equal (val
, *length
) != 1)
1352 /* If ARG is registered for SSA update we cannot look at its defining
1354 if (name_registered_for_update_p (arg
))
1357 /* If we were already here, break the infinite cycle. */
1359 *visited
= BITMAP_ALLOC (NULL
);
1360 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1364 def_stmt
= SSA_NAME_DEF_STMT (var
);
1366 switch (gimple_code (def_stmt
))
1369 /* The RHS of the statement defining VAR must either have a
1370 constant length or come from another SSA_NAME with a constant
1372 if (gimple_assign_single_p (def_stmt
)
1373 || gimple_assign_unary_nop_p (def_stmt
))
1375 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1376 return get_maxval_strlen (rhs
, length
, visited
, type
);
1378 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1380 tree op2
= gimple_assign_rhs2 (def_stmt
);
1381 tree op3
= gimple_assign_rhs3 (def_stmt
);
1382 return get_maxval_strlen (op2
, length
, visited
, type
)
1383 && get_maxval_strlen (op3
, length
, visited
, type
);
1389 /* All the arguments of the PHI node must have the same constant
1393 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1395 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1397 /* If this PHI has itself as an argument, we cannot
1398 determine the string length of this argument. However,
1399 if we can find a constant string length for the other
1400 PHI args then we can still be sure that this is a
1401 constant string length. So be optimistic and just
1402 continue with the next argument. */
1403 if (arg
== gimple_phi_result (def_stmt
))
1406 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1418 get_maxval_strlen (tree arg
, int type
)
1420 bitmap visited
= NULL
;
1421 tree len
= NULL_TREE
;
1422 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1425 BITMAP_FREE (visited
);
1431 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1432 If LEN is not NULL, it represents the length of the string to be
1433 copied. Return NULL_TREE if no simplification can be made. */
1436 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1437 tree dest
, tree src
)
1439 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1442 /* If SRC and DEST are the same (and not volatile), return DEST. */
1443 if (operand_equal_p (src
, dest
, 0))
1445 replace_call_with_value (gsi
, dest
);
1449 if (optimize_function_for_size_p (cfun
))
1452 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1456 tree len
= get_maxval_strlen (src
, 0);
1460 len
= fold_convert_loc (loc
, size_type_node
, len
);
1461 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1462 len
= force_gimple_operand_gsi (gsi
, len
, true,
1463 NULL_TREE
, true, GSI_SAME_STMT
);
1464 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1465 replace_call_with_call_and_fold (gsi
, repl
);
1469 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1470 If SLEN is not NULL, it represents the length of the source string.
1471 Return NULL_TREE if no simplification can be made. */
1474 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1475 tree dest
, tree src
, tree len
)
1477 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1480 /* If the LEN parameter is zero, return DEST. */
1481 if (integer_zerop (len
))
1483 replace_call_with_value (gsi
, dest
);
1487 /* We can't compare slen with len as constants below if len is not a
1489 if (TREE_CODE (len
) != INTEGER_CST
)
1492 /* Now, we must be passed a constant src ptr parameter. */
1493 tree slen
= get_maxval_strlen (src
, 0);
1494 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1497 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1499 /* We do not support simplification of this case, though we do
1500 support it when expanding trees into RTL. */
1501 /* FIXME: generate a call to __builtin_memset. */
1502 if (tree_int_cst_lt (slen
, len
))
1505 /* OK transform into builtin memcpy. */
1506 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1510 len
= fold_convert_loc (loc
, size_type_node
, len
);
1511 len
= force_gimple_operand_gsi (gsi
, len
, true,
1512 NULL_TREE
, true, GSI_SAME_STMT
);
1513 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1514 replace_call_with_call_and_fold (gsi
, repl
);
1518 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1521 Return NULL_TREE if no simplification was possible, otherwise return the
1522 simplified form of the call as a tree.
1524 The simplified form may be a constant or other expression which
1525 computes the same value, but in a more efficient manner (including
1526 calls to other builtin functions).
1528 The call may contain arguments which need to be evaluated, but
1529 which are not useful to determine the result of the call. In
1530 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1531 COMPOUND_EXPR will be an argument which must be evaluated.
1532 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1533 COMPOUND_EXPR in the chain will contain the tree for the simplified
1534 form of the builtin function call. */
1537 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1539 gimple stmt
= gsi_stmt (*gsi
);
1540 location_t loc
= gimple_location (stmt
);
1542 const char *p
= c_getstr (src
);
1544 /* If the string length is zero, return the dst parameter. */
1545 if (p
&& *p
== '\0')
1547 replace_call_with_value (gsi
, dst
);
1551 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1554 /* See if we can store by pieces into (dst + strlen(dst)). */
1556 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1557 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1559 if (!strlen_fn
|| !memcpy_fn
)
1562 /* If the length of the source string isn't computable don't
1563 split strcat into strlen and memcpy. */
1564 tree len
= get_maxval_strlen (src
, 0);
1568 /* Create strlen (dst). */
1569 gimple_seq stmts
= NULL
, stmts2
;
1570 gimple repl
= gimple_build_call (strlen_fn
, 1, dst
);
1571 gimple_set_location (repl
, loc
);
1572 if (gimple_in_ssa_p (cfun
))
1573 newdst
= make_ssa_name (size_type_node
);
1575 newdst
= create_tmp_reg (size_type_node
);
1576 gimple_call_set_lhs (repl
, newdst
);
1577 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1579 /* Create (dst p+ strlen (dst)). */
1580 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1581 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1582 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1584 len
= fold_convert_loc (loc
, size_type_node
, len
);
1585 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1586 build_int_cst (size_type_node
, 1));
1587 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1588 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1590 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1591 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1592 if (gimple_call_lhs (stmt
))
1594 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1595 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1596 gsi_replace_with_seq_vops (gsi
, stmts
);
1597 /* gsi now points at the assignment to the lhs, get a
1598 stmt iterator to the memcpy call.
1599 ??? We can't use gsi_for_stmt as that doesn't work when the
1600 CFG isn't built yet. */
1601 gimple_stmt_iterator gsi2
= *gsi
;
1607 gsi_replace_with_seq_vops (gsi
, stmts
);
1613 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1614 are the arguments to the call. */
1617 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1619 gimple stmt
= gsi_stmt (*gsi
);
1620 tree dest
= gimple_call_arg (stmt
, 0);
1621 tree src
= gimple_call_arg (stmt
, 1);
1622 tree size
= gimple_call_arg (stmt
, 2);
1628 /* If the SRC parameter is "", return DEST. */
1629 if (p
&& *p
== '\0')
1631 replace_call_with_value (gsi
, dest
);
1635 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1638 /* If __builtin_strcat_chk is used, assume strcat is available. */
1639 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1643 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
1644 replace_call_with_call_and_fold (gsi
, repl
);
1648 /* Simplify a call to the strncat builtin. */
1651 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1653 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1654 tree dst
= gimple_call_arg (stmt
, 0);
1655 tree src
= gimple_call_arg (stmt
, 1);
1656 tree len
= gimple_call_arg (stmt
, 2);
1658 const char *p
= c_getstr (src
);
1660 /* If the requested length is zero, or the src parameter string
1661 length is zero, return the dst parameter. */
1662 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1664 replace_call_with_value (gsi
, dst
);
1668 /* If the requested len is greater than or equal to the string
1669 length, call strcat. */
1670 if (TREE_CODE (len
) == INTEGER_CST
&& p
1671 && compare_tree_int (len
, strlen (p
)) >= 0)
1673 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1675 /* If the replacement _DECL isn't initialized, don't do the
1680 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1681 replace_call_with_call_and_fold (gsi
, repl
);
1688 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1692 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1694 gimple stmt
= gsi_stmt (*gsi
);
1695 tree dest
= gimple_call_arg (stmt
, 0);
1696 tree src
= gimple_call_arg (stmt
, 1);
1697 tree len
= gimple_call_arg (stmt
, 2);
1698 tree size
= gimple_call_arg (stmt
, 3);
1703 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1704 if ((p
&& *p
== '\0')
1705 || integer_zerop (len
))
1707 replace_call_with_value (gsi
, dest
);
1711 if (! tree_fits_uhwi_p (size
))
1714 if (! integer_all_onesp (size
))
1716 tree src_len
= c_strlen (src
, 1);
1718 && tree_fits_uhwi_p (src_len
)
1719 && tree_fits_uhwi_p (len
)
1720 && ! tree_int_cst_lt (len
, src_len
))
1722 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1723 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1727 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1728 replace_call_with_call_and_fold (gsi
, repl
);
1734 /* If __builtin_strncat_chk is used, assume strncat is available. */
1735 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1739 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1740 replace_call_with_call_and_fold (gsi
, repl
);
1744 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1745 to the call. IGNORE is true if the value returned
1746 by the builtin will be ignored. UNLOCKED is true is true if this
1747 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1748 the known length of the string. Return NULL_TREE if no simplification
1752 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1753 tree arg0
, tree arg1
,
1756 gimple stmt
= gsi_stmt (*gsi
);
1758 /* If we're using an unlocked function, assume the other unlocked
1759 functions exist explicitly. */
1760 tree
const fn_fputc
= (unlocked
1761 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1762 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1763 tree
const fn_fwrite
= (unlocked
1764 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1765 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1767 /* If the return value is used, don't do the transformation. */
1768 if (gimple_call_lhs (stmt
))
1771 /* Get the length of the string passed to fputs. If the length
1772 can't be determined, punt. */
1773 tree len
= get_maxval_strlen (arg0
, 0);
1775 || TREE_CODE (len
) != INTEGER_CST
)
1778 switch (compare_tree_int (len
, 1))
1780 case -1: /* length is 0, delete the call entirely . */
1781 replace_call_with_value (gsi
, integer_zero_node
);
1784 case 0: /* length is 1, call fputc. */
1786 const char *p
= c_getstr (arg0
);
1792 gimple repl
= gimple_build_call (fn_fputc
, 2,
1794 (integer_type_node
, p
[0]), arg1
);
1795 replace_call_with_call_and_fold (gsi
, repl
);
1800 case 1: /* length is greater than 1, call fwrite. */
1802 /* If optimizing for size keep fputs. */
1803 if (optimize_function_for_size_p (cfun
))
1805 /* New argument list transforming fputs(string, stream) to
1806 fwrite(string, 1, len, stream). */
1810 gimple repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1811 size_one_node
, len
, arg1
);
1812 replace_call_with_call_and_fold (gsi
, repl
);
1821 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1822 DEST, SRC, LEN, and SIZE are the arguments to the call.
1823 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1824 code of the builtin. If MAXLEN is not NULL, it is maximum length
1825 passed as third argument. */
1828 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1829 tree dest
, tree src
, tree len
, tree size
,
1830 enum built_in_function fcode
)
1832 gimple stmt
= gsi_stmt (*gsi
);
1833 location_t loc
= gimple_location (stmt
);
1834 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1837 /* If SRC and DEST are the same (and not volatile), return DEST
1838 (resp. DEST+LEN for __mempcpy_chk). */
1839 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1841 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1843 replace_call_with_value (gsi
, dest
);
1848 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1849 temp
= force_gimple_operand_gsi (gsi
, temp
,
1850 false, NULL_TREE
, true,
1852 replace_call_with_value (gsi
, temp
);
1857 if (! tree_fits_uhwi_p (size
))
1860 tree maxlen
= get_maxval_strlen (len
, 2);
1861 if (! integer_all_onesp (size
))
1863 if (! tree_fits_uhwi_p (len
))
1865 /* If LEN is not constant, try MAXLEN too.
1866 For MAXLEN only allow optimizing into non-_ocs function
1867 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1868 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1870 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1872 /* (void) __mempcpy_chk () can be optimized into
1873 (void) __memcpy_chk (). */
1874 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1878 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1879 replace_call_with_call_and_fold (gsi
, repl
);
1888 if (tree_int_cst_lt (size
, maxlen
))
1893 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1894 mem{cpy,pcpy,move,set} is available. */
1897 case BUILT_IN_MEMCPY_CHK
:
1898 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1900 case BUILT_IN_MEMPCPY_CHK
:
1901 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1903 case BUILT_IN_MEMMOVE_CHK
:
1904 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1906 case BUILT_IN_MEMSET_CHK
:
1907 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1916 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1917 replace_call_with_call_and_fold (gsi
, repl
);
1921 /* Fold a call to the __st[rp]cpy_chk builtin.
1922 DEST, SRC, and SIZE are the arguments to the call.
1923 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1924 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1925 strings passed as second argument. */
1928 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1930 tree src
, tree size
,
1931 enum built_in_function fcode
)
1933 gimple stmt
= gsi_stmt (*gsi
);
1934 location_t loc
= gimple_location (stmt
);
1935 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1938 /* If SRC and DEST are the same (and not volatile), return DEST. */
1939 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1941 replace_call_with_value (gsi
, dest
);
1945 if (! tree_fits_uhwi_p (size
))
1948 tree maxlen
= get_maxval_strlen (src
, 1);
1949 if (! integer_all_onesp (size
))
1951 len
= c_strlen (src
, 1);
1952 if (! len
|| ! tree_fits_uhwi_p (len
))
1954 /* If LEN is not constant, try MAXLEN too.
1955 For MAXLEN only allow optimizing into non-_ocs function
1956 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1957 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1959 if (fcode
== BUILT_IN_STPCPY_CHK
)
1964 /* If return value of __stpcpy_chk is ignored,
1965 optimize into __strcpy_chk. */
1966 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1970 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1971 replace_call_with_call_and_fold (gsi
, repl
);
1975 if (! len
|| TREE_SIDE_EFFECTS (len
))
1978 /* If c_strlen returned something, but not a constant,
1979 transform __strcpy_chk into __memcpy_chk. */
1980 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1984 len
= fold_convert_loc (loc
, size_type_node
, len
);
1985 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1986 build_int_cst (size_type_node
, 1));
1987 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1988 true, GSI_SAME_STMT
);
1989 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1990 replace_call_with_call_and_fold (gsi
, repl
);
1997 if (! tree_int_cst_lt (maxlen
, size
))
2001 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2002 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2003 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2007 gimple repl
= gimple_build_call (fn
, 2, dest
, src
);
2008 replace_call_with_call_and_fold (gsi
, repl
);
2012 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2013 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2014 length passed as third argument. IGNORE is true if return value can be
2015 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2018 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2019 tree dest
, tree src
,
2020 tree len
, tree size
,
2021 enum built_in_function fcode
)
2023 gimple stmt
= gsi_stmt (*gsi
);
2024 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2027 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2029 /* If return value of __stpncpy_chk is ignored,
2030 optimize into __strncpy_chk. */
2031 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2034 gimple repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2035 replace_call_with_call_and_fold (gsi
, repl
);
2040 if (! tree_fits_uhwi_p (size
))
2043 tree maxlen
= get_maxval_strlen (len
, 2);
2044 if (! integer_all_onesp (size
))
2046 if (! tree_fits_uhwi_p (len
))
2048 /* If LEN is not constant, try MAXLEN too.
2049 For MAXLEN only allow optimizing into non-_ocs function
2050 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2051 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2057 if (tree_int_cst_lt (size
, maxlen
))
2061 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2062 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2063 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2067 gimple repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2068 replace_call_with_call_and_fold (gsi
, repl
);
2072 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2073 Return NULL_TREE if no simplification can be made. */
2076 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2078 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2079 location_t loc
= gimple_location (stmt
);
2080 tree dest
= gimple_call_arg (stmt
, 0);
2081 tree src
= gimple_call_arg (stmt
, 1);
2082 tree fn
, len
, lenp1
;
2084 /* If the result is unused, replace stpcpy with strcpy. */
2085 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2087 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2090 gimple_call_set_fndecl (stmt
, fn
);
2095 len
= c_strlen (src
, 1);
2097 || TREE_CODE (len
) != INTEGER_CST
)
2100 if (optimize_function_for_size_p (cfun
)
2101 /* If length is zero it's small enough. */
2102 && !integer_zerop (len
))
2105 /* If the source has a known length replace stpcpy with memcpy. */
2106 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2110 gimple_seq stmts
= NULL
;
2111 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2112 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2113 tem
, build_int_cst (size_type_node
, 1));
2114 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2115 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2116 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2117 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2118 if (gimple_vdef (repl
)
2119 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2120 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2121 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2122 /* Replace the result with dest + len. */
2124 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2125 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2126 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2127 POINTER_PLUS_EXPR
, dest
, tem
);
2128 gsi_replace (gsi
, ret
, true);
2129 /* Finally fold the memcpy call. */
2130 gimple_stmt_iterator gsi2
= *gsi
;
2136 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2137 NULL_TREE if a normal call should be emitted rather than expanding
2138 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2139 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2140 passed as second argument. */
2143 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2144 enum built_in_function fcode
)
2146 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2147 tree dest
, size
, len
, fn
, fmt
, flag
;
2148 const char *fmt_str
;
2150 /* Verify the required arguments in the original call. */
2151 if (gimple_call_num_args (stmt
) < 5)
2154 dest
= gimple_call_arg (stmt
, 0);
2155 len
= gimple_call_arg (stmt
, 1);
2156 flag
= gimple_call_arg (stmt
, 2);
2157 size
= gimple_call_arg (stmt
, 3);
2158 fmt
= gimple_call_arg (stmt
, 4);
2160 if (! tree_fits_uhwi_p (size
))
2163 if (! integer_all_onesp (size
))
2165 tree maxlen
= get_maxval_strlen (len
, 2);
2166 if (! tree_fits_uhwi_p (len
))
2168 /* If LEN is not constant, try MAXLEN too.
2169 For MAXLEN only allow optimizing into non-_ocs function
2170 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2171 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2177 if (tree_int_cst_lt (size
, maxlen
))
2181 if (!init_target_chars ())
2184 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2185 or if format doesn't contain % chars or is "%s". */
2186 if (! integer_zerop (flag
))
2188 fmt_str
= c_getstr (fmt
);
2189 if (fmt_str
== NULL
)
2191 if (strchr (fmt_str
, target_percent
) != NULL
2192 && strcmp (fmt_str
, target_percent_s
))
2196 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2198 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2199 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2203 /* Replace the called function and the first 5 argument by 3 retaining
2204 trailing varargs. */
2205 gimple_call_set_fndecl (stmt
, fn
);
2206 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2207 gimple_call_set_arg (stmt
, 0, dest
);
2208 gimple_call_set_arg (stmt
, 1, len
);
2209 gimple_call_set_arg (stmt
, 2, fmt
);
2210 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2211 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2212 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2217 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2218 Return NULL_TREE if a normal call should be emitted rather than
2219 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2220 or BUILT_IN_VSPRINTF_CHK. */
2223 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2224 enum built_in_function fcode
)
2226 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2227 tree dest
, size
, len
, fn
, fmt
, flag
;
2228 const char *fmt_str
;
2229 unsigned nargs
= gimple_call_num_args (stmt
);
2231 /* Verify the required arguments in the original call. */
2234 dest
= gimple_call_arg (stmt
, 0);
2235 flag
= gimple_call_arg (stmt
, 1);
2236 size
= gimple_call_arg (stmt
, 2);
2237 fmt
= gimple_call_arg (stmt
, 3);
2239 if (! tree_fits_uhwi_p (size
))
2244 if (!init_target_chars ())
2247 /* Check whether the format is a literal string constant. */
2248 fmt_str
= c_getstr (fmt
);
2249 if (fmt_str
!= NULL
)
2251 /* If the format doesn't contain % args or %%, we know the size. */
2252 if (strchr (fmt_str
, target_percent
) == 0)
2254 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2255 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2257 /* If the format is "%s" and first ... argument is a string literal,
2258 we know the size too. */
2259 else if (fcode
== BUILT_IN_SPRINTF_CHK
2260 && strcmp (fmt_str
, target_percent_s
) == 0)
2266 arg
= gimple_call_arg (stmt
, 4);
2267 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2269 len
= c_strlen (arg
, 1);
2270 if (! len
|| ! tree_fits_uhwi_p (len
))
2277 if (! integer_all_onesp (size
))
2279 if (! len
|| ! tree_int_cst_lt (len
, size
))
2283 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2284 or if format doesn't contain % chars or is "%s". */
2285 if (! integer_zerop (flag
))
2287 if (fmt_str
== NULL
)
2289 if (strchr (fmt_str
, target_percent
) != NULL
2290 && strcmp (fmt_str
, target_percent_s
))
2294 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2295 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2296 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2300 /* Replace the called function and the first 4 argument by 2 retaining
2301 trailing varargs. */
2302 gimple_call_set_fndecl (stmt
, fn
);
2303 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2304 gimple_call_set_arg (stmt
, 0, dest
);
2305 gimple_call_set_arg (stmt
, 1, fmt
);
2306 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2307 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2308 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2313 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2314 ORIG may be null if this is a 2-argument call. We don't attempt to
2315 simplify calls with more than 3 arguments.
2317 Return NULL_TREE if no simplification was possible, otherwise return the
2318 simplified form of the call as a tree. If IGNORED is true, it means that
2319 the caller does not use the returned value of the function. */
2322 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2324 gimple stmt
= gsi_stmt (*gsi
);
2325 tree dest
= gimple_call_arg (stmt
, 0);
2326 tree fmt
= gimple_call_arg (stmt
, 1);
2327 tree orig
= NULL_TREE
;
2328 const char *fmt_str
= NULL
;
2330 /* Verify the required arguments in the original call. We deal with two
2331 types of sprintf() calls: 'sprintf (str, fmt)' and
2332 'sprintf (dest, "%s", orig)'. */
2333 if (gimple_call_num_args (stmt
) > 3)
2336 if (gimple_call_num_args (stmt
) == 3)
2337 orig
= gimple_call_arg (stmt
, 2);
2339 /* Check whether the format is a literal string constant. */
2340 fmt_str
= c_getstr (fmt
);
2341 if (fmt_str
== NULL
)
2344 if (!init_target_chars ())
2347 /* If the format doesn't contain % args or %%, use strcpy. */
2348 if (strchr (fmt_str
, target_percent
) == NULL
)
2350 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2355 /* Don't optimize sprintf (buf, "abc", ptr++). */
2359 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2360 'format' is known to contain no % formats. */
2361 gimple_seq stmts
= NULL
;
2362 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2363 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2364 if (gimple_call_lhs (stmt
))
2366 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2367 build_int_cst (integer_type_node
,
2369 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2370 gsi_replace_with_seq_vops (gsi
, stmts
);
2371 /* gsi now points at the assignment to the lhs, get a
2372 stmt iterator to the memcpy call.
2373 ??? We can't use gsi_for_stmt as that doesn't work when the
2374 CFG isn't built yet. */
2375 gimple_stmt_iterator gsi2
= *gsi
;
2381 gsi_replace_with_seq_vops (gsi
, stmts
);
2387 /* If the format is "%s", use strcpy if the result isn't used. */
2388 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2391 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2396 /* Don't crash on sprintf (str1, "%s"). */
2400 tree orig_len
= NULL_TREE
;
2401 if (gimple_call_lhs (stmt
))
2403 orig_len
= get_maxval_strlen (orig
, 0);
2408 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2409 gimple_seq stmts
= NULL
;
2410 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2411 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2412 if (gimple_call_lhs (stmt
))
2414 if (!useless_type_conversion_p (integer_type_node
,
2415 TREE_TYPE (orig_len
)))
2416 orig_len
= fold_convert (integer_type_node
, orig_len
);
2417 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2418 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2419 gsi_replace_with_seq_vops (gsi
, stmts
);
2420 /* gsi now points at the assignment to the lhs, get a
2421 stmt iterator to the memcpy call.
2422 ??? We can't use gsi_for_stmt as that doesn't work when the
2423 CFG isn't built yet. */
2424 gimple_stmt_iterator gsi2
= *gsi
;
2430 gsi_replace_with_seq_vops (gsi
, stmts
);
2438 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2439 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2440 attempt to simplify calls with more than 4 arguments.
2442 Return NULL_TREE if no simplification was possible, otherwise return the
2443 simplified form of the call as a tree. If IGNORED is true, it means that
2444 the caller does not use the returned value of the function. */
2447 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2449 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2450 tree dest
= gimple_call_arg (stmt
, 0);
2451 tree destsize
= gimple_call_arg (stmt
, 1);
2452 tree fmt
= gimple_call_arg (stmt
, 2);
2453 tree orig
= NULL_TREE
;
2454 const char *fmt_str
= NULL
;
2456 if (gimple_call_num_args (stmt
) > 4)
2459 if (gimple_call_num_args (stmt
) == 4)
2460 orig
= gimple_call_arg (stmt
, 3);
2462 if (!tree_fits_uhwi_p (destsize
))
2464 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2466 /* Check whether the format is a literal string constant. */
2467 fmt_str
= c_getstr (fmt
);
2468 if (fmt_str
== NULL
)
2471 if (!init_target_chars ())
2474 /* If the format doesn't contain % args or %%, use strcpy. */
2475 if (strchr (fmt_str
, target_percent
) == NULL
)
2477 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2481 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2485 /* We could expand this as
2486 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2488 memcpy (str, fmt_with_nul_at_cstm1, cst);
2489 but in the former case that might increase code size
2490 and in the latter case grow .rodata section too much.
2492 size_t len
= strlen (fmt_str
);
2496 gimple_seq stmts
= NULL
;
2497 gimple repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2498 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2499 if (gimple_call_lhs (stmt
))
2501 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2502 build_int_cst (integer_type_node
, len
));
2503 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2504 gsi_replace_with_seq_vops (gsi
, stmts
);
2505 /* gsi now points at the assignment to the lhs, get a
2506 stmt iterator to the memcpy call.
2507 ??? We can't use gsi_for_stmt as that doesn't work when the
2508 CFG isn't built yet. */
2509 gimple_stmt_iterator gsi2
= *gsi
;
2515 gsi_replace_with_seq_vops (gsi
, stmts
);
2521 /* If the format is "%s", use strcpy if the result isn't used. */
2522 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2524 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2528 /* Don't crash on snprintf (str1, cst, "%s"). */
2532 tree orig_len
= get_maxval_strlen (orig
, 0);
2536 /* We could expand this as
2537 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2539 memcpy (str1, str2_with_nul_at_cstm1, cst);
2540 but in the former case that might increase code size
2541 and in the latter case grow .rodata section too much.
2543 if (compare_tree_int (orig_len
, destlen
) >= 0)
2546 /* Convert snprintf (str1, cst, "%s", str2) into
2547 strcpy (str1, str2) if strlen (str2) < cst. */
2548 gimple_seq stmts
= NULL
;
2549 gimple repl
= gimple_build_call (fn
, 2, dest
, orig
);
2550 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2551 if (gimple_call_lhs (stmt
))
2553 if (!useless_type_conversion_p (integer_type_node
,
2554 TREE_TYPE (orig_len
)))
2555 orig_len
= fold_convert (integer_type_node
, orig_len
);
2556 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2557 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2558 gsi_replace_with_seq_vops (gsi
, stmts
);
2559 /* gsi now points at the assignment to the lhs, get a
2560 stmt iterator to the memcpy call.
2561 ??? We can't use gsi_for_stmt as that doesn't work when the
2562 CFG isn't built yet. */
2563 gimple_stmt_iterator gsi2
= *gsi
;
2569 gsi_replace_with_seq_vops (gsi
, stmts
);
2577 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2578 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2579 more than 3 arguments, and ARG may be null in the 2-argument case.
2581 Return NULL_TREE if no simplification was possible, otherwise return the
2582 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2583 code of the function to be simplified. */
2586 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2587 tree fp
, tree fmt
, tree arg
,
2588 enum built_in_function fcode
)
2590 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2591 tree fn_fputc
, fn_fputs
;
2592 const char *fmt_str
= NULL
;
2594 /* If the return value is used, don't do the transformation. */
2595 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2598 /* Check whether the format is a literal string constant. */
2599 fmt_str
= c_getstr (fmt
);
2600 if (fmt_str
== NULL
)
2603 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2605 /* If we're using an unlocked function, assume the other
2606 unlocked functions exist explicitly. */
2607 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2608 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2612 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2613 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2616 if (!init_target_chars ())
2619 /* If the format doesn't contain % args or %%, use strcpy. */
2620 if (strchr (fmt_str
, target_percent
) == NULL
)
2622 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2626 /* If the format specifier was "", fprintf does nothing. */
2627 if (fmt_str
[0] == '\0')
2629 replace_call_with_value (gsi
, NULL_TREE
);
2633 /* When "string" doesn't contain %, replace all cases of
2634 fprintf (fp, string) with fputs (string, fp). The fputs
2635 builtin will take care of special cases like length == 1. */
2638 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2639 replace_call_with_call_and_fold (gsi
, repl
);
2644 /* The other optimizations can be done only on the non-va_list variants. */
2645 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2648 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2649 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2651 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2655 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2656 replace_call_with_call_and_fold (gsi
, repl
);
2661 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2662 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2665 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2669 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2670 replace_call_with_call_and_fold (gsi
, repl
);
2678 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2679 FMT and ARG are the arguments to the call; we don't fold cases with
2680 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2682 Return NULL_TREE if no simplification was possible, otherwise return the
2683 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2684 code of the function to be simplified. */
2687 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2688 tree arg
, enum built_in_function fcode
)
2690 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2691 tree fn_putchar
, fn_puts
, newarg
;
2692 const char *fmt_str
= NULL
;
2694 /* If the return value is used, don't do the transformation. */
2695 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2698 /* Check whether the format is a literal string constant. */
2699 fmt_str
= c_getstr (fmt
);
2700 if (fmt_str
== NULL
)
2703 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2705 /* If we're using an unlocked function, assume the other
2706 unlocked functions exist explicitly. */
2707 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2708 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2712 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2713 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2716 if (!init_target_chars ())
2719 if (strcmp (fmt_str
, target_percent_s
) == 0
2720 || strchr (fmt_str
, target_percent
) == NULL
)
2724 if (strcmp (fmt_str
, target_percent_s
) == 0)
2726 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2729 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2732 str
= c_getstr (arg
);
2738 /* The format specifier doesn't contain any '%' characters. */
2739 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2745 /* If the string was "", printf does nothing. */
2748 replace_call_with_value (gsi
, NULL_TREE
);
2752 /* If the string has length of 1, call putchar. */
2755 /* Given printf("c"), (where c is any one character,)
2756 convert "c"[0] to an int and pass that to the replacement
2758 newarg
= build_int_cst (integer_type_node
, str
[0]);
2761 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2762 replace_call_with_call_and_fold (gsi
, repl
);
2768 /* If the string was "string\n", call puts("string"). */
2769 size_t len
= strlen (str
);
2770 if ((unsigned char)str
[len
- 1] == target_newline
2771 && (size_t) (int) len
== len
2775 tree offset_node
, string_cst
;
2777 /* Create a NUL-terminated string that's one char shorter
2778 than the original, stripping off the trailing '\n'. */
2779 newarg
= build_string_literal (len
, str
);
2780 string_cst
= string_constant (newarg
, &offset_node
);
2781 gcc_checking_assert (string_cst
2782 && (TREE_STRING_LENGTH (string_cst
)
2784 && integer_zerop (offset_node
)
2786 TREE_STRING_POINTER (string_cst
)[len
- 1]
2788 /* build_string_literal creates a new STRING_CST,
2789 modify it in place to avoid double copying. */
2790 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2791 newstr
[len
- 1] = '\0';
2794 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2795 replace_call_with_call_and_fold (gsi
, repl
);
2800 /* We'd like to arrange to call fputs(string,stdout) here,
2801 but we need stdout and don't have a way to get it yet. */
2806 /* The other optimizations can be done only on the non-va_list variants. */
2807 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2810 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2811 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2813 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2817 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2818 replace_call_with_call_and_fold (gsi
, repl
);
2823 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2824 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2826 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2831 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2832 replace_call_with_call_and_fold (gsi
, repl
);
2842 /* Fold a call to __builtin_strlen with known length LEN. */
2845 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2847 gimple stmt
= gsi_stmt (*gsi
);
2848 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2851 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2852 replace_call_with_value (gsi
, len
);
2857 /* Fold the non-target builtin at *GSI and return whether any simplification
2861 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2863 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2864 tree callee
= gimple_call_fndecl (stmt
);
2866 /* Give up for always_inline inline builtins until they are
2868 if (avoid_folding_inline_builtin (callee
))
2871 unsigned n
= gimple_call_num_args (stmt
);
2872 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2875 case BUILT_IN_BZERO
:
2876 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2877 gimple_call_arg (stmt
, 1));
2878 case BUILT_IN_MEMSET
:
2879 return gimple_fold_builtin_memset (gsi
,
2880 gimple_call_arg (stmt
, 1),
2881 gimple_call_arg (stmt
, 2));
2882 case BUILT_IN_BCOPY
:
2883 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2884 gimple_call_arg (stmt
, 0), 3);
2885 case BUILT_IN_MEMCPY
:
2886 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2887 gimple_call_arg (stmt
, 1), 0);
2888 case BUILT_IN_MEMPCPY
:
2889 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2890 gimple_call_arg (stmt
, 1), 1);
2891 case BUILT_IN_MEMMOVE
:
2892 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2893 gimple_call_arg (stmt
, 1), 3);
2894 case BUILT_IN_SPRINTF_CHK
:
2895 case BUILT_IN_VSPRINTF_CHK
:
2896 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2897 case BUILT_IN_STRCAT_CHK
:
2898 return gimple_fold_builtin_strcat_chk (gsi
);
2899 case BUILT_IN_STRNCAT_CHK
:
2900 return gimple_fold_builtin_strncat_chk (gsi
);
2901 case BUILT_IN_STRLEN
:
2902 return gimple_fold_builtin_strlen (gsi
);
2903 case BUILT_IN_STRCPY
:
2904 return gimple_fold_builtin_strcpy (gsi
,
2905 gimple_call_arg (stmt
, 0),
2906 gimple_call_arg (stmt
, 1));
2907 case BUILT_IN_STRNCPY
:
2908 return gimple_fold_builtin_strncpy (gsi
,
2909 gimple_call_arg (stmt
, 0),
2910 gimple_call_arg (stmt
, 1),
2911 gimple_call_arg (stmt
, 2));
2912 case BUILT_IN_STRCAT
:
2913 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2914 gimple_call_arg (stmt
, 1));
2915 case BUILT_IN_STRNCAT
:
2916 return gimple_fold_builtin_strncat (gsi
);
2917 case BUILT_IN_FPUTS
:
2918 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2919 gimple_call_arg (stmt
, 1), false);
2920 case BUILT_IN_FPUTS_UNLOCKED
:
2921 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2922 gimple_call_arg (stmt
, 1), true);
2923 case BUILT_IN_MEMCPY_CHK
:
2924 case BUILT_IN_MEMPCPY_CHK
:
2925 case BUILT_IN_MEMMOVE_CHK
:
2926 case BUILT_IN_MEMSET_CHK
:
2927 return gimple_fold_builtin_memory_chk (gsi
,
2928 gimple_call_arg (stmt
, 0),
2929 gimple_call_arg (stmt
, 1),
2930 gimple_call_arg (stmt
, 2),
2931 gimple_call_arg (stmt
, 3),
2933 case BUILT_IN_STPCPY
:
2934 return gimple_fold_builtin_stpcpy (gsi
);
2935 case BUILT_IN_STRCPY_CHK
:
2936 case BUILT_IN_STPCPY_CHK
:
2937 return gimple_fold_builtin_stxcpy_chk (gsi
,
2938 gimple_call_arg (stmt
, 0),
2939 gimple_call_arg (stmt
, 1),
2940 gimple_call_arg (stmt
, 2),
2942 case BUILT_IN_STRNCPY_CHK
:
2943 case BUILT_IN_STPNCPY_CHK
:
2944 return gimple_fold_builtin_stxncpy_chk (gsi
,
2945 gimple_call_arg (stmt
, 0),
2946 gimple_call_arg (stmt
, 1),
2947 gimple_call_arg (stmt
, 2),
2948 gimple_call_arg (stmt
, 3),
2950 case BUILT_IN_SNPRINTF_CHK
:
2951 case BUILT_IN_VSNPRINTF_CHK
:
2952 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2953 case BUILT_IN_SNPRINTF
:
2954 return gimple_fold_builtin_snprintf (gsi
);
2955 case BUILT_IN_SPRINTF
:
2956 return gimple_fold_builtin_sprintf (gsi
);
2957 case BUILT_IN_FPRINTF
:
2958 case BUILT_IN_FPRINTF_UNLOCKED
:
2959 case BUILT_IN_VFPRINTF
:
2960 if (n
== 2 || n
== 3)
2961 return gimple_fold_builtin_fprintf (gsi
,
2962 gimple_call_arg (stmt
, 0),
2963 gimple_call_arg (stmt
, 1),
2965 ? gimple_call_arg (stmt
, 2)
2969 case BUILT_IN_FPRINTF_CHK
:
2970 case BUILT_IN_VFPRINTF_CHK
:
2971 if (n
== 3 || n
== 4)
2972 return gimple_fold_builtin_fprintf (gsi
,
2973 gimple_call_arg (stmt
, 0),
2974 gimple_call_arg (stmt
, 2),
2976 ? gimple_call_arg (stmt
, 3)
2980 case BUILT_IN_PRINTF
:
2981 case BUILT_IN_PRINTF_UNLOCKED
:
2982 case BUILT_IN_VPRINTF
:
2983 if (n
== 1 || n
== 2)
2984 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2986 ? gimple_call_arg (stmt
, 1)
2987 : NULL_TREE
, fcode
);
2989 case BUILT_IN_PRINTF_CHK
:
2990 case BUILT_IN_VPRINTF_CHK
:
2991 if (n
== 2 || n
== 3)
2992 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2994 ? gimple_call_arg (stmt
, 2)
2995 : NULL_TREE
, fcode
);
2999 /* Try the generic builtin folder. */
3000 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3001 tree result
= fold_call_stmt (stmt
, ignore
);
3005 STRIP_NOPS (result
);
3007 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3008 if (!update_call_from_tree (gsi
, result
))
3009 gimplify_and_update_call_from_tree (gsi
, result
);
3016 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3017 doesn't fit into TYPE. The test for overflow should be regardless of
3018 -fwrapv, and even for unsigned types. */
3021 arith_overflowed_p (enum tree_code code
, const_tree type
,
3022 const_tree arg0
, const_tree arg1
)
3024 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3025 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3027 widest2_int warg0
= widest2_int_cst (arg0
);
3028 widest2_int warg1
= widest2_int_cst (arg1
);
3032 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3033 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3034 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3035 default: gcc_unreachable ();
3037 signop sign
= TYPE_SIGN (type
);
3038 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3040 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3043 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3044 The statement may be replaced by another statement, e.g., if the call
3045 simplifies to a constant value. Return true if any changes were made.
3046 It is assumed that the operands have been previously folded. */
3049 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3051 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3053 bool changed
= false;
3056 /* Fold *& in call arguments. */
3057 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3058 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3060 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3063 gimple_call_set_arg (stmt
, i
, tmp
);
3068 /* Check for virtual calls that became direct calls. */
3069 callee
= gimple_call_fn (stmt
);
3070 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3072 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3074 if (dump_file
&& virtual_method_call_p (callee
)
3075 && !possible_polymorphic_call_target_p
3076 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3077 (OBJ_TYPE_REF_EXPR (callee
)))))
3080 "Type inheritance inconsistent devirtualization of ");
3081 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3082 fprintf (dump_file
, " to ");
3083 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3084 fprintf (dump_file
, "\n");
3087 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3090 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3093 vec
<cgraph_node
*>targets
3094 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3095 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3097 tree lhs
= gimple_call_lhs (stmt
);
3098 if (dump_enabled_p ())
3100 location_t loc
= gimple_location_safe (stmt
);
3101 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3102 "folding virtual function call to %s\n",
3103 targets
.length () == 1
3104 ? targets
[0]->name ()
3105 : "__builtin_unreachable");
3107 if (targets
.length () == 1)
3109 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3111 /* If the call becomes noreturn, remove the lhs. */
3112 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3114 if (TREE_CODE (lhs
) == SSA_NAME
)
3116 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3117 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3118 gimple new_stmt
= gimple_build_assign (lhs
, def
);
3119 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3121 gimple_call_set_lhs (stmt
, NULL_TREE
);
3126 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3127 gimple new_stmt
= gimple_build_call (fndecl
, 0);
3128 gimple_set_location (new_stmt
, gimple_location (stmt
));
3129 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3131 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3132 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3134 /* To satisfy condition for
3135 cgraph_update_edges_for_call_stmt_node,
3136 we need to preserve GIMPLE_CALL statement
3137 at position of GSI iterator. */
3138 update_call_from_tree (gsi
, def
);
3139 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3143 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3144 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3145 gsi_replace (gsi
, new_stmt
, false);
3153 /* Check for indirect calls that became direct calls, and then
3154 no longer require a static chain. */
3155 if (gimple_call_chain (stmt
))
3157 tree fn
= gimple_call_fndecl (stmt
);
3158 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3160 gimple_call_set_chain (stmt
, NULL
);
3165 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3168 gimple_call_set_chain (stmt
, tmp
);
3177 /* Check for builtins that CCP can handle using information not
3178 available in the generic fold routines. */
3179 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3181 if (gimple_fold_builtin (gsi
))
3184 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3186 changed
|= targetm
.gimple_fold_builtin (gsi
);
3188 else if (gimple_call_internal_p (stmt
))
3190 enum tree_code subcode
= ERROR_MARK
;
3191 tree result
= NULL_TREE
;
3192 bool cplx_result
= false;
3193 tree overflow
= NULL_TREE
;
3194 switch (gimple_call_internal_fn (stmt
))
3196 case IFN_BUILTIN_EXPECT
:
3197 result
= fold_builtin_expect (gimple_location (stmt
),
3198 gimple_call_arg (stmt
, 0),
3199 gimple_call_arg (stmt
, 1),
3200 gimple_call_arg (stmt
, 2));
3202 case IFN_UBSAN_OBJECT_SIZE
:
3203 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3204 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3205 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3206 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3207 gimple_call_arg (stmt
, 2))))
3209 gsi_replace (gsi
, gimple_build_nop (), true);
3210 unlink_stmt_vdef (stmt
);
3211 release_defs (stmt
);
3215 case IFN_UBSAN_CHECK_ADD
:
3216 subcode
= PLUS_EXPR
;
3218 case IFN_UBSAN_CHECK_SUB
:
3219 subcode
= MINUS_EXPR
;
3221 case IFN_UBSAN_CHECK_MUL
:
3222 subcode
= MULT_EXPR
;
3224 case IFN_ADD_OVERFLOW
:
3225 subcode
= PLUS_EXPR
;
3228 case IFN_SUB_OVERFLOW
:
3229 subcode
= MINUS_EXPR
;
3232 case IFN_MUL_OVERFLOW
:
3233 subcode
= MULT_EXPR
;
3239 if (subcode
!= ERROR_MARK
)
3241 tree arg0
= gimple_call_arg (stmt
, 0);
3242 tree arg1
= gimple_call_arg (stmt
, 1);
3243 tree type
= TREE_TYPE (arg0
);
3246 tree lhs
= gimple_call_lhs (stmt
);
3247 if (lhs
== NULL_TREE
)
3250 type
= TREE_TYPE (TREE_TYPE (lhs
));
3252 if (type
== NULL_TREE
)
3254 /* x = y + 0; x = y - 0; x = y * 0; */
3255 else if (integer_zerop (arg1
))
3256 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3257 /* x = 0 + y; x = 0 * y; */
3258 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3259 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3261 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3262 result
= integer_zero_node
;
3263 /* x = y * 1; x = 1 * y; */
3264 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3266 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3268 else if (TREE_CODE (arg0
) == INTEGER_CST
3269 && TREE_CODE (arg1
) == INTEGER_CST
)
3272 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3273 fold_convert (type
, arg1
));
3275 result
= int_const_binop (subcode
, arg0
, arg1
);
3276 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3279 overflow
= build_one_cst (type
);
3286 if (result
== integer_zero_node
)
3287 result
= build_zero_cst (type
);
3288 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3290 if (TREE_CODE (result
) == INTEGER_CST
)
3292 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3294 overflow
= build_one_cst (type
);
3296 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3297 && TYPE_UNSIGNED (type
))
3298 || (TYPE_PRECISION (type
)
3299 < (TYPE_PRECISION (TREE_TYPE (result
))
3300 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3301 && !TYPE_UNSIGNED (type
)))))
3304 result
= fold_convert (type
, result
);
3311 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3312 result
= drop_tree_overflow (result
);
3315 if (overflow
== NULL_TREE
)
3316 overflow
= build_zero_cst (TREE_TYPE (result
));
3317 tree ctype
= build_complex_type (TREE_TYPE (result
));
3318 if (TREE_CODE (result
) == INTEGER_CST
3319 && TREE_CODE (overflow
) == INTEGER_CST
)
3320 result
= build_complex (ctype
, result
, overflow
);
3322 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3323 ctype
, result
, overflow
);
3325 if (!update_call_from_tree (gsi
, result
))
3326 gimplify_and_update_call_from_tree (gsi
, result
);
3335 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3338 Replaces *GSI with the simplification result in RCODE and OPS
3339 and the associated statements in *SEQ. Does the replacement
3340 according to INPLACE and returns true if the operation succeeded. */
3343 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3344 code_helper rcode
, tree
*ops
,
3345 gimple_seq
*seq
, bool inplace
)
3347 gimple stmt
= gsi_stmt (*gsi
);
3349 /* Play safe and do not allow abnormals to be mentioned in
3350 newly created statements. See also maybe_push_res_to_seq. */
3351 if ((TREE_CODE (ops
[0]) == SSA_NAME
3352 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0]))
3354 && TREE_CODE (ops
[1]) == SSA_NAME
3355 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1]))
3357 && TREE_CODE (ops
[2]) == SSA_NAME
3358 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])))
3361 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3363 gcc_assert (rcode
.is_tree_code ());
3364 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3365 /* GIMPLE_CONDs condition may not throw. */
3366 && (!flag_exceptions
3367 || !cfun
->can_throw_non_call_exceptions
3368 || !operation_could_trap_p (rcode
,
3369 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3371 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3372 else if (rcode
== SSA_NAME
)
3373 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3374 build_zero_cst (TREE_TYPE (ops
[0])));
3375 else if (rcode
== INTEGER_CST
)
3377 if (integer_zerop (ops
[0]))
3378 gimple_cond_make_false (cond_stmt
);
3380 gimple_cond_make_true (cond_stmt
);
3384 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3388 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3389 build_zero_cst (TREE_TYPE (res
)));
3393 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3395 fprintf (dump_file
, "gimple_simplified to ");
3396 if (!gimple_seq_empty_p (*seq
))
3397 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3398 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3401 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3404 else if (is_gimple_assign (stmt
)
3405 && rcode
.is_tree_code ())
3408 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3410 maybe_build_generic_op (rcode
,
3411 TREE_TYPE (gimple_assign_lhs (stmt
)),
3412 &ops
[0], ops
[1], ops
[2]);
3413 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3414 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3416 fprintf (dump_file
, "gimple_simplified to ");
3417 if (!gimple_seq_empty_p (*seq
))
3418 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3419 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3422 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3428 if (gimple_has_lhs (stmt
))
3430 tree lhs
= gimple_get_lhs (stmt
);
3431 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3436 fprintf (dump_file
, "gimple_simplified to ");
3437 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3439 gsi_replace_with_seq_vops (gsi
, *seq
);
3449 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3452 maybe_canonicalize_mem_ref_addr (tree
*t
)
3456 if (TREE_CODE (*t
) == ADDR_EXPR
)
3457 t
= &TREE_OPERAND (*t
, 0);
3459 while (handled_component_p (*t
))
3460 t
= &TREE_OPERAND (*t
, 0);
3462 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3463 of invariant addresses into a SSA name MEM_REF address. */
3464 if (TREE_CODE (*t
) == MEM_REF
3465 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3467 tree addr
= TREE_OPERAND (*t
, 0);
3468 if (TREE_CODE (addr
) == ADDR_EXPR
3469 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3470 || handled_component_p (TREE_OPERAND (addr
, 0))))
3473 HOST_WIDE_INT coffset
;
3474 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3479 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3480 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3481 TREE_OPERAND (*t
, 1),
3482 size_int (coffset
));
3485 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3486 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3489 /* Canonicalize back MEM_REFs to plain reference trees if the object
3490 accessed is a decl that has the same access semantics as the MEM_REF. */
3491 if (TREE_CODE (*t
) == MEM_REF
3492 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3493 && integer_zerop (TREE_OPERAND (*t
, 1))
3494 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3496 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3497 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3498 if (/* Same volatile qualification. */
3499 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3500 /* Same TBAA behavior with -fstrict-aliasing. */
3501 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3502 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3503 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3504 /* Same alignment. */
3505 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3506 /* We have to look out here to not drop a required conversion
3507 from the rhs to the lhs if *t appears on the lhs or vice-versa
3508 if it appears on the rhs. Thus require strict type
3510 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3512 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3517 /* Canonicalize TARGET_MEM_REF in particular with respect to
3518 the indexes becoming constant. */
3519 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3521 tree tem
= maybe_fold_tmr (*t
);
3532 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3533 distinguishes both cases. */
3536 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3538 bool changed
= false;
3539 gimple stmt
= gsi_stmt (*gsi
);
3542 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3544 ??? This shouldn't be done in generic folding but in the
3545 propagation helpers which also know whether an address was
3547 switch (gimple_code (stmt
))
3550 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3552 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3553 if ((REFERENCE_CLASS_P (*rhs
)
3554 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3555 && maybe_canonicalize_mem_ref_addr (rhs
))
3557 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3558 if (REFERENCE_CLASS_P (*lhs
)
3559 && maybe_canonicalize_mem_ref_addr (lhs
))
3565 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3567 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3568 if (REFERENCE_CLASS_P (*arg
)
3569 && maybe_canonicalize_mem_ref_addr (arg
))
3572 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3574 && REFERENCE_CLASS_P (*lhs
)
3575 && maybe_canonicalize_mem_ref_addr (lhs
))
3581 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3582 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3584 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3585 tree op
= TREE_VALUE (link
);
3586 if (REFERENCE_CLASS_P (op
)
3587 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3590 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3592 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3593 tree op
= TREE_VALUE (link
);
3594 if ((REFERENCE_CLASS_P (op
)
3595 || TREE_CODE (op
) == ADDR_EXPR
)
3596 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3602 if (gimple_debug_bind_p (stmt
))
3604 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3606 && (REFERENCE_CLASS_P (*val
)
3607 || TREE_CODE (*val
) == ADDR_EXPR
)
3608 && maybe_canonicalize_mem_ref_addr (val
))
3615 /* Dispatch to pattern-based folding. */
3617 || is_gimple_assign (stmt
)
3618 || gimple_code (stmt
) == GIMPLE_COND
)
3620 gimple_seq seq
= NULL
;
3623 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
, valueize
))
3625 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3628 gimple_seq_discard (seq
);
3632 stmt
= gsi_stmt (*gsi
);
3634 /* Fold the main computation performed by the statement. */
3635 switch (gimple_code (stmt
))
3639 unsigned old_num_ops
= gimple_num_ops (stmt
);
3640 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
3641 tree lhs
= gimple_assign_lhs (stmt
);
3643 /* First canonicalize operand order. This avoids building new
3644 trees if this is the only thing fold would later do. */
3645 if ((commutative_tree_code (subcode
)
3646 || commutative_ternary_tree_code (subcode
))
3647 && tree_swap_operands_p (gimple_assign_rhs1 (stmt
),
3648 gimple_assign_rhs2 (stmt
), false))
3650 tree tem
= gimple_assign_rhs1 (stmt
);
3651 gimple_assign_set_rhs1 (stmt
, gimple_assign_rhs2 (stmt
));
3652 gimple_assign_set_rhs2 (stmt
, tem
);
3655 new_rhs
= fold_gimple_assign (gsi
);
3657 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3658 TREE_TYPE (new_rhs
)))
3659 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3662 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3664 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3671 changed
|= fold_gimple_cond (as_a
<gcond
*> (stmt
));
3675 changed
|= gimple_fold_call (gsi
, inplace
);
3679 /* Fold *& in asm operands. */
3681 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3683 const char **oconstraints
;
3684 const char *constraint
;
3685 bool allows_mem
, allows_reg
;
3687 noutputs
= gimple_asm_noutputs (asm_stmt
);
3688 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3690 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3692 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3693 tree op
= TREE_VALUE (link
);
3695 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3696 if (REFERENCE_CLASS_P (op
)
3697 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3699 TREE_VALUE (link
) = op
;
3703 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3705 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3706 tree op
= TREE_VALUE (link
);
3708 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3709 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3710 oconstraints
, &allows_mem
, &allows_reg
);
3711 if (REFERENCE_CLASS_P (op
)
3712 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3715 TREE_VALUE (link
) = op
;
3723 if (gimple_debug_bind_p (stmt
))
3725 tree val
= gimple_debug_bind_get_value (stmt
);
3727 && REFERENCE_CLASS_P (val
))
3729 tree tem
= maybe_fold_reference (val
, false);
3732 gimple_debug_bind_set_value (stmt
, tem
);
3737 && TREE_CODE (val
) == ADDR_EXPR
)
3739 tree ref
= TREE_OPERAND (val
, 0);
3740 tree tem
= maybe_fold_reference (ref
, false);
3743 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3744 gimple_debug_bind_set_value (stmt
, tem
);
3754 stmt
= gsi_stmt (*gsi
);
3756 /* Fold *& on the lhs. */
3757 if (gimple_has_lhs (stmt
))
3759 tree lhs
= gimple_get_lhs (stmt
);
3760 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3762 tree new_lhs
= maybe_fold_reference (lhs
, true);
3765 gimple_set_lhs (stmt
, new_lhs
);
3774 /* Valueziation callback that ends up not following SSA edges. */
3777 no_follow_ssa_edges (tree
)
3782 /* Valueization callback that ends up following single-use SSA edges only. */
3785 follow_single_use_edges (tree val
)
3787 if (TREE_CODE (val
) == SSA_NAME
3788 && !has_single_use (val
))
3793 /* Fold the statement pointed to by GSI. In some cases, this function may
3794 replace the whole statement with a new one. Returns true iff folding
3796 The statement pointed to by GSI should be in valid gimple form but may
3797 be in unfolded state as resulting from for example constant propagation
3798 which can produce *&x = 0. */
3801 fold_stmt (gimple_stmt_iterator
*gsi
)
3803 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3807 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3809 return fold_stmt_1 (gsi
, false, valueize
);
3812 /* Perform the minimal folding on statement *GSI. Only operations like
3813 *&x created by constant propagation are handled. The statement cannot
3814 be replaced with a new one. Return true if the statement was
3815 changed, false otherwise.
3816 The statement *GSI should be in valid gimple form but may
3817 be in unfolded state as resulting from for example constant propagation
3818 which can produce *&x = 0. */
3821 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3823 gimple stmt
= gsi_stmt (*gsi
);
3824 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3825 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3829 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3830 if EXPR is null or we don't know how.
3831 If non-null, the result always has boolean type. */
3834 canonicalize_bool (tree expr
, bool invert
)
3840 if (integer_nonzerop (expr
))
3841 return boolean_false_node
;
3842 else if (integer_zerop (expr
))
3843 return boolean_true_node
;
3844 else if (TREE_CODE (expr
) == SSA_NAME
)
3845 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3846 build_int_cst (TREE_TYPE (expr
), 0));
3847 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3848 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3850 TREE_OPERAND (expr
, 0),
3851 TREE_OPERAND (expr
, 1));
3857 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3859 if (integer_nonzerop (expr
))
3860 return boolean_true_node
;
3861 else if (integer_zerop (expr
))
3862 return boolean_false_node
;
3863 else if (TREE_CODE (expr
) == SSA_NAME
)
3864 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3865 build_int_cst (TREE_TYPE (expr
), 0));
3866 else if (TREE_CODE_CLASS (TREE_CODE (expr
)) == tcc_comparison
)
3867 return fold_build2 (TREE_CODE (expr
),
3869 TREE_OPERAND (expr
, 0),
3870 TREE_OPERAND (expr
, 1));
3876 /* Check to see if a boolean expression EXPR is logically equivalent to the
3877 comparison (OP1 CODE OP2). Check for various identities involving
3881 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3882 const_tree op1
, const_tree op2
)
3886 /* The obvious case. */
3887 if (TREE_CODE (expr
) == code
3888 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3889 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3892 /* Check for comparing (name, name != 0) and the case where expr
3893 is an SSA_NAME with a definition matching the comparison. */
3894 if (TREE_CODE (expr
) == SSA_NAME
3895 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3897 if (operand_equal_p (expr
, op1
, 0))
3898 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3899 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3900 s
= SSA_NAME_DEF_STMT (expr
);
3901 if (is_gimple_assign (s
)
3902 && gimple_assign_rhs_code (s
) == code
3903 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3904 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3908 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3909 of name is a comparison, recurse. */
3910 if (TREE_CODE (op1
) == SSA_NAME
3911 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3913 s
= SSA_NAME_DEF_STMT (op1
);
3914 if (is_gimple_assign (s
)
3915 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3917 enum tree_code c
= gimple_assign_rhs_code (s
);
3918 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3919 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3920 return same_bool_comparison_p (expr
, c
,
3921 gimple_assign_rhs1 (s
),
3922 gimple_assign_rhs2 (s
));
3923 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3924 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3925 return same_bool_comparison_p (expr
,
3926 invert_tree_comparison (c
, false),
3927 gimple_assign_rhs1 (s
),
3928 gimple_assign_rhs2 (s
));
3934 /* Check to see if two boolean expressions OP1 and OP2 are logically
3938 same_bool_result_p (const_tree op1
, const_tree op2
)
3940 /* Simple cases first. */
3941 if (operand_equal_p (op1
, op2
, 0))
3944 /* Check the cases where at least one of the operands is a comparison.
3945 These are a bit smarter than operand_equal_p in that they apply some
3946 identifies on SSA_NAMEs. */
3947 if (TREE_CODE_CLASS (TREE_CODE (op2
)) == tcc_comparison
3948 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3949 TREE_OPERAND (op2
, 0),
3950 TREE_OPERAND (op2
, 1)))
3952 if (TREE_CODE_CLASS (TREE_CODE (op1
)) == tcc_comparison
3953 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3954 TREE_OPERAND (op1
, 0),
3955 TREE_OPERAND (op1
, 1)))
3962 /* Forward declarations for some mutually recursive functions. */
3965 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3966 enum tree_code code2
, tree op2a
, tree op2b
);
3968 and_var_with_comparison (tree var
, bool invert
,
3969 enum tree_code code2
, tree op2a
, tree op2b
);
3971 and_var_with_comparison_1 (gimple stmt
,
3972 enum tree_code code2
, tree op2a
, tree op2b
);
3974 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3975 enum tree_code code2
, tree op2a
, tree op2b
);
3977 or_var_with_comparison (tree var
, bool invert
,
3978 enum tree_code code2
, tree op2a
, tree op2b
);
3980 or_var_with_comparison_1 (gimple stmt
,
3981 enum tree_code code2
, tree op2a
, tree op2b
);
3983 /* Helper function for and_comparisons_1: try to simplify the AND of the
3984 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3985 If INVERT is true, invert the value of the VAR before doing the AND.
3986 Return NULL_EXPR if we can't simplify this to a single expression. */
3989 and_var_with_comparison (tree var
, bool invert
,
3990 enum tree_code code2
, tree op2a
, tree op2b
)
3993 gimple stmt
= SSA_NAME_DEF_STMT (var
);
3995 /* We can only deal with variables whose definitions are assignments. */
3996 if (!is_gimple_assign (stmt
))
3999 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4000 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4001 Then we only have to consider the simpler non-inverted cases. */
4003 t
= or_var_with_comparison_1 (stmt
,
4004 invert_tree_comparison (code2
, false),
4007 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4008 return canonicalize_bool (t
, invert
);
4011 /* Try to simplify the AND of the ssa variable defined by the assignment
4012 STMT with the comparison specified by (OP2A CODE2 OP2B).
4013 Return NULL_EXPR if we can't simplify this to a single expression. */
4016 and_var_with_comparison_1 (gimple stmt
,
4017 enum tree_code code2
, tree op2a
, tree op2b
)
4019 tree var
= gimple_assign_lhs (stmt
);
4020 tree true_test_var
= NULL_TREE
;
4021 tree false_test_var
= NULL_TREE
;
4022 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4024 /* Check for identities like (var AND (var == 0)) => false. */
4025 if (TREE_CODE (op2a
) == SSA_NAME
4026 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4028 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4029 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4031 true_test_var
= op2a
;
4032 if (var
== true_test_var
)
4035 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4036 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4038 false_test_var
= op2a
;
4039 if (var
== false_test_var
)
4040 return boolean_false_node
;
4044 /* If the definition is a comparison, recurse on it. */
4045 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4047 tree t
= and_comparisons_1 (innercode
,
4048 gimple_assign_rhs1 (stmt
),
4049 gimple_assign_rhs2 (stmt
),
4057 /* If the definition is an AND or OR expression, we may be able to
4058 simplify by reassociating. */
4059 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4060 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4062 tree inner1
= gimple_assign_rhs1 (stmt
);
4063 tree inner2
= gimple_assign_rhs2 (stmt
);
4066 tree partial
= NULL_TREE
;
4067 bool is_and
= (innercode
== BIT_AND_EXPR
);
4069 /* Check for boolean identities that don't require recursive examination
4071 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4072 inner1 AND (inner1 OR inner2) => inner1
4073 !inner1 AND (inner1 AND inner2) => false
4074 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4075 Likewise for similar cases involving inner2. */
4076 if (inner1
== true_test_var
)
4077 return (is_and
? var
: inner1
);
4078 else if (inner2
== true_test_var
)
4079 return (is_and
? var
: inner2
);
4080 else if (inner1
== false_test_var
)
4082 ? boolean_false_node
4083 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4084 else if (inner2
== false_test_var
)
4086 ? boolean_false_node
4087 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4089 /* Next, redistribute/reassociate the AND across the inner tests.
4090 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4091 if (TREE_CODE (inner1
) == SSA_NAME
4092 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4093 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4094 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4095 gimple_assign_rhs1 (s
),
4096 gimple_assign_rhs2 (s
),
4097 code2
, op2a
, op2b
)))
4099 /* Handle the AND case, where we are reassociating:
4100 (inner1 AND inner2) AND (op2a code2 op2b)
4102 If the partial result t is a constant, we win. Otherwise
4103 continue on to try reassociating with the other inner test. */
4106 if (integer_onep (t
))
4108 else if (integer_zerop (t
))
4109 return boolean_false_node
;
4112 /* Handle the OR case, where we are redistributing:
4113 (inner1 OR inner2) AND (op2a code2 op2b)
4114 => (t OR (inner2 AND (op2a code2 op2b))) */
4115 else if (integer_onep (t
))
4116 return boolean_true_node
;
4118 /* Save partial result for later. */
4122 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4123 if (TREE_CODE (inner2
) == SSA_NAME
4124 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4125 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4126 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4127 gimple_assign_rhs1 (s
),
4128 gimple_assign_rhs2 (s
),
4129 code2
, op2a
, op2b
)))
4131 /* Handle the AND case, where we are reassociating:
4132 (inner1 AND inner2) AND (op2a code2 op2b)
4133 => (inner1 AND t) */
4136 if (integer_onep (t
))
4138 else if (integer_zerop (t
))
4139 return boolean_false_node
;
4140 /* If both are the same, we can apply the identity
4142 else if (partial
&& same_bool_result_p (t
, partial
))
4146 /* Handle the OR case. where we are redistributing:
4147 (inner1 OR inner2) AND (op2a code2 op2b)
4148 => (t OR (inner1 AND (op2a code2 op2b)))
4149 => (t OR partial) */
4152 if (integer_onep (t
))
4153 return boolean_true_node
;
4156 /* We already got a simplification for the other
4157 operand to the redistributed OR expression. The
4158 interesting case is when at least one is false.
4159 Or, if both are the same, we can apply the identity
4161 if (integer_zerop (partial
))
4163 else if (integer_zerop (t
))
4165 else if (same_bool_result_p (t
, partial
))
4174 /* Try to simplify the AND of two comparisons defined by
4175 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4176 If this can be done without constructing an intermediate value,
4177 return the resulting tree; otherwise NULL_TREE is returned.
4178 This function is deliberately asymmetric as it recurses on SSA_DEFs
4179 in the first comparison but not the second. */
4182 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4183 enum tree_code code2
, tree op2a
, tree op2b
)
4185 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4187 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4188 if (operand_equal_p (op1a
, op2a
, 0)
4189 && operand_equal_p (op1b
, op2b
, 0))
4191 /* Result will be either NULL_TREE, or a combined comparison. */
4192 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4193 TRUTH_ANDIF_EXPR
, code1
, code2
,
4194 truth_type
, op1a
, op1b
);
4199 /* Likewise the swapped case of the above. */
4200 if (operand_equal_p (op1a
, op2b
, 0)
4201 && operand_equal_p (op1b
, op2a
, 0))
4203 /* Result will be either NULL_TREE, or a combined comparison. */
4204 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4205 TRUTH_ANDIF_EXPR
, code1
,
4206 swap_tree_comparison (code2
),
4207 truth_type
, op1a
, op1b
);
4212 /* If both comparisons are of the same value against constants, we might
4213 be able to merge them. */
4214 if (operand_equal_p (op1a
, op2a
, 0)
4215 && TREE_CODE (op1b
) == INTEGER_CST
4216 && TREE_CODE (op2b
) == INTEGER_CST
)
4218 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4220 /* If we have (op1a == op1b), we should either be able to
4221 return that or FALSE, depending on whether the constant op1b
4222 also satisfies the other comparison against op2b. */
4223 if (code1
== EQ_EXPR
)
4229 case EQ_EXPR
: val
= (cmp
== 0); break;
4230 case NE_EXPR
: val
= (cmp
!= 0); break;
4231 case LT_EXPR
: val
= (cmp
< 0); break;
4232 case GT_EXPR
: val
= (cmp
> 0); break;
4233 case LE_EXPR
: val
= (cmp
<= 0); break;
4234 case GE_EXPR
: val
= (cmp
>= 0); break;
4235 default: done
= false;
4240 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4242 return boolean_false_node
;
4245 /* Likewise if the second comparison is an == comparison. */
4246 else if (code2
== EQ_EXPR
)
4252 case EQ_EXPR
: val
= (cmp
== 0); break;
4253 case NE_EXPR
: val
= (cmp
!= 0); break;
4254 case LT_EXPR
: val
= (cmp
> 0); break;
4255 case GT_EXPR
: val
= (cmp
< 0); break;
4256 case LE_EXPR
: val
= (cmp
>= 0); break;
4257 case GE_EXPR
: val
= (cmp
<= 0); break;
4258 default: done
= false;
4263 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4265 return boolean_false_node
;
4269 /* Same business with inequality tests. */
4270 else if (code1
== NE_EXPR
)
4275 case EQ_EXPR
: val
= (cmp
!= 0); break;
4276 case NE_EXPR
: val
= (cmp
== 0); break;
4277 case LT_EXPR
: val
= (cmp
>= 0); break;
4278 case GT_EXPR
: val
= (cmp
<= 0); break;
4279 case LE_EXPR
: val
= (cmp
> 0); break;
4280 case GE_EXPR
: val
= (cmp
< 0); break;
4285 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4287 else if (code2
== NE_EXPR
)
4292 case EQ_EXPR
: val
= (cmp
== 0); break;
4293 case NE_EXPR
: val
= (cmp
!= 0); break;
4294 case LT_EXPR
: val
= (cmp
<= 0); break;
4295 case GT_EXPR
: val
= (cmp
>= 0); break;
4296 case LE_EXPR
: val
= (cmp
< 0); break;
4297 case GE_EXPR
: val
= (cmp
> 0); break;
4302 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4305 /* Chose the more restrictive of two < or <= comparisons. */
4306 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4307 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4309 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4310 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4312 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4315 /* Likewise chose the more restrictive of two > or >= comparisons. */
4316 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4317 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4319 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4320 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4322 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4325 /* Check for singleton ranges. */
4327 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4328 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4329 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4331 /* Check for disjoint ranges. */
4333 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4334 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4335 return boolean_false_node
;
4337 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4338 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4339 return boolean_false_node
;
4342 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4343 NAME's definition is a truth value. See if there are any simplifications
4344 that can be done against the NAME's definition. */
4345 if (TREE_CODE (op1a
) == SSA_NAME
4346 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4347 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4349 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4350 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4351 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4352 switch (gimple_code (stmt
))
4355 /* Try to simplify by copy-propagating the definition. */
4356 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4359 /* If every argument to the PHI produces the same result when
4360 ANDed with the second comparison, we win.
4361 Do not do this unless the type is bool since we need a bool
4362 result here anyway. */
4363 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4365 tree result
= NULL_TREE
;
4367 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4369 tree arg
= gimple_phi_arg_def (stmt
, i
);
4371 /* If this PHI has itself as an argument, ignore it.
4372 If all the other args produce the same result,
4374 if (arg
== gimple_phi_result (stmt
))
4376 else if (TREE_CODE (arg
) == INTEGER_CST
)
4378 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4381 result
= boolean_false_node
;
4382 else if (!integer_zerop (result
))
4386 result
= fold_build2 (code2
, boolean_type_node
,
4388 else if (!same_bool_comparison_p (result
,
4392 else if (TREE_CODE (arg
) == SSA_NAME
4393 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4396 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4397 /* In simple cases we can look through PHI nodes,
4398 but we have to be careful with loops.
4400 if (! dom_info_available_p (CDI_DOMINATORS
)
4401 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4402 || dominated_by_p (CDI_DOMINATORS
,
4403 gimple_bb (def_stmt
),
4406 temp
= and_var_with_comparison (arg
, invert
, code2
,
4412 else if (!same_bool_result_p (result
, temp
))
4428 /* Try to simplify the AND of two comparisons, specified by
4429 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4430 If this can be simplified to a single expression (without requiring
4431 introducing more SSA variables to hold intermediate values),
4432 return the resulting tree. Otherwise return NULL_TREE.
4433 If the result expression is non-null, it has boolean type. */
4436 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4437 enum tree_code code2
, tree op2a
, tree op2b
)
4439 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4443 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4446 /* Helper function for or_comparisons_1: try to simplify the OR of the
4447 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4448 If INVERT is true, invert the value of VAR before doing the OR.
4449 Return NULL_EXPR if we can't simplify this to a single expression. */
4452 or_var_with_comparison (tree var
, bool invert
,
4453 enum tree_code code2
, tree op2a
, tree op2b
)
4456 gimple stmt
= SSA_NAME_DEF_STMT (var
);
4458 /* We can only deal with variables whose definitions are assignments. */
4459 if (!is_gimple_assign (stmt
))
4462 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4463 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4464 Then we only have to consider the simpler non-inverted cases. */
4466 t
= and_var_with_comparison_1 (stmt
,
4467 invert_tree_comparison (code2
, false),
4470 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4471 return canonicalize_bool (t
, invert
);
4474 /* Try to simplify the OR of the ssa variable defined by the assignment
4475 STMT with the comparison specified by (OP2A CODE2 OP2B).
4476 Return NULL_EXPR if we can't simplify this to a single expression. */
4479 or_var_with_comparison_1 (gimple stmt
,
4480 enum tree_code code2
, tree op2a
, tree op2b
)
4482 tree var
= gimple_assign_lhs (stmt
);
4483 tree true_test_var
= NULL_TREE
;
4484 tree false_test_var
= NULL_TREE
;
4485 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4487 /* Check for identities like (var OR (var != 0)) => true . */
4488 if (TREE_CODE (op2a
) == SSA_NAME
4489 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4491 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4492 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4494 true_test_var
= op2a
;
4495 if (var
== true_test_var
)
4498 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4499 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4501 false_test_var
= op2a
;
4502 if (var
== false_test_var
)
4503 return boolean_true_node
;
4507 /* If the definition is a comparison, recurse on it. */
4508 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4510 tree t
= or_comparisons_1 (innercode
,
4511 gimple_assign_rhs1 (stmt
),
4512 gimple_assign_rhs2 (stmt
),
4520 /* If the definition is an AND or OR expression, we may be able to
4521 simplify by reassociating. */
4522 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4523 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4525 tree inner1
= gimple_assign_rhs1 (stmt
);
4526 tree inner2
= gimple_assign_rhs2 (stmt
);
4529 tree partial
= NULL_TREE
;
4530 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4532 /* Check for boolean identities that don't require recursive examination
4534 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4535 inner1 OR (inner1 AND inner2) => inner1
4536 !inner1 OR (inner1 OR inner2) => true
4537 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4539 if (inner1
== true_test_var
)
4540 return (is_or
? var
: inner1
);
4541 else if (inner2
== true_test_var
)
4542 return (is_or
? var
: inner2
);
4543 else if (inner1
== false_test_var
)
4546 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4547 else if (inner2
== false_test_var
)
4550 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4552 /* Next, redistribute/reassociate the OR across the inner tests.
4553 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4554 if (TREE_CODE (inner1
) == SSA_NAME
4555 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4556 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4557 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4558 gimple_assign_rhs1 (s
),
4559 gimple_assign_rhs2 (s
),
4560 code2
, op2a
, op2b
)))
4562 /* Handle the OR case, where we are reassociating:
4563 (inner1 OR inner2) OR (op2a code2 op2b)
4565 If the partial result t is a constant, we win. Otherwise
4566 continue on to try reassociating with the other inner test. */
4569 if (integer_onep (t
))
4570 return boolean_true_node
;
4571 else if (integer_zerop (t
))
4575 /* Handle the AND case, where we are redistributing:
4576 (inner1 AND inner2) OR (op2a code2 op2b)
4577 => (t AND (inner2 OR (op2a code op2b))) */
4578 else if (integer_zerop (t
))
4579 return boolean_false_node
;
4581 /* Save partial result for later. */
4585 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4586 if (TREE_CODE (inner2
) == SSA_NAME
4587 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4588 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4589 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4590 gimple_assign_rhs1 (s
),
4591 gimple_assign_rhs2 (s
),
4592 code2
, op2a
, op2b
)))
4594 /* Handle the OR case, where we are reassociating:
4595 (inner1 OR inner2) OR (op2a code2 op2b)
4597 => (t OR partial) */
4600 if (integer_zerop (t
))
4602 else if (integer_onep (t
))
4603 return boolean_true_node
;
4604 /* If both are the same, we can apply the identity
4606 else if (partial
&& same_bool_result_p (t
, partial
))
4610 /* Handle the AND case, where we are redistributing:
4611 (inner1 AND inner2) OR (op2a code2 op2b)
4612 => (t AND (inner1 OR (op2a code2 op2b)))
4613 => (t AND partial) */
4616 if (integer_zerop (t
))
4617 return boolean_false_node
;
4620 /* We already got a simplification for the other
4621 operand to the redistributed AND expression. The
4622 interesting case is when at least one is true.
4623 Or, if both are the same, we can apply the identity
4625 if (integer_onep (partial
))
4627 else if (integer_onep (t
))
4629 else if (same_bool_result_p (t
, partial
))
4638 /* Try to simplify the OR of two comparisons defined by
4639 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4640 If this can be done without constructing an intermediate value,
4641 return the resulting tree; otherwise NULL_TREE is returned.
4642 This function is deliberately asymmetric as it recurses on SSA_DEFs
4643 in the first comparison but not the second. */
4646 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4647 enum tree_code code2
, tree op2a
, tree op2b
)
4649 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4651 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4652 if (operand_equal_p (op1a
, op2a
, 0)
4653 && operand_equal_p (op1b
, op2b
, 0))
4655 /* Result will be either NULL_TREE, or a combined comparison. */
4656 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4657 TRUTH_ORIF_EXPR
, code1
, code2
,
4658 truth_type
, op1a
, op1b
);
4663 /* Likewise the swapped case of the above. */
4664 if (operand_equal_p (op1a
, op2b
, 0)
4665 && operand_equal_p (op1b
, op2a
, 0))
4667 /* Result will be either NULL_TREE, or a combined comparison. */
4668 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4669 TRUTH_ORIF_EXPR
, code1
,
4670 swap_tree_comparison (code2
),
4671 truth_type
, op1a
, op1b
);
4676 /* If both comparisons are of the same value against constants, we might
4677 be able to merge them. */
4678 if (operand_equal_p (op1a
, op2a
, 0)
4679 && TREE_CODE (op1b
) == INTEGER_CST
4680 && TREE_CODE (op2b
) == INTEGER_CST
)
4682 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4684 /* If we have (op1a != op1b), we should either be able to
4685 return that or TRUE, depending on whether the constant op1b
4686 also satisfies the other comparison against op2b. */
4687 if (code1
== NE_EXPR
)
4693 case EQ_EXPR
: val
= (cmp
== 0); break;
4694 case NE_EXPR
: val
= (cmp
!= 0); break;
4695 case LT_EXPR
: val
= (cmp
< 0); break;
4696 case GT_EXPR
: val
= (cmp
> 0); break;
4697 case LE_EXPR
: val
= (cmp
<= 0); break;
4698 case GE_EXPR
: val
= (cmp
>= 0); break;
4699 default: done
= false;
4704 return boolean_true_node
;
4706 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4709 /* Likewise if the second comparison is a != comparison. */
4710 else if (code2
== NE_EXPR
)
4716 case EQ_EXPR
: val
= (cmp
== 0); break;
4717 case NE_EXPR
: val
= (cmp
!= 0); break;
4718 case LT_EXPR
: val
= (cmp
> 0); break;
4719 case GT_EXPR
: val
= (cmp
< 0); break;
4720 case LE_EXPR
: val
= (cmp
>= 0); break;
4721 case GE_EXPR
: val
= (cmp
<= 0); break;
4722 default: done
= false;
4727 return boolean_true_node
;
4729 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4733 /* See if an equality test is redundant with the other comparison. */
4734 else if (code1
== EQ_EXPR
)
4739 case EQ_EXPR
: val
= (cmp
== 0); break;
4740 case NE_EXPR
: val
= (cmp
!= 0); break;
4741 case LT_EXPR
: val
= (cmp
< 0); break;
4742 case GT_EXPR
: val
= (cmp
> 0); break;
4743 case LE_EXPR
: val
= (cmp
<= 0); break;
4744 case GE_EXPR
: val
= (cmp
>= 0); break;
4749 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4751 else if (code2
== EQ_EXPR
)
4756 case EQ_EXPR
: val
= (cmp
== 0); break;
4757 case NE_EXPR
: val
= (cmp
!= 0); break;
4758 case LT_EXPR
: val
= (cmp
> 0); break;
4759 case GT_EXPR
: val
= (cmp
< 0); break;
4760 case LE_EXPR
: val
= (cmp
>= 0); break;
4761 case GE_EXPR
: val
= (cmp
<= 0); break;
4766 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4769 /* Chose the less restrictive of two < or <= comparisons. */
4770 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4771 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4773 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4774 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4776 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4779 /* Likewise chose the less restrictive of two > or >= comparisons. */
4780 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4781 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4783 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4784 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4786 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4789 /* Check for singleton ranges. */
4791 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4792 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4793 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4795 /* Check for less/greater pairs that don't restrict the range at all. */
4797 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4798 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4799 return boolean_true_node
;
4801 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4802 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4803 return boolean_true_node
;
4806 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4807 NAME's definition is a truth value. See if there are any simplifications
4808 that can be done against the NAME's definition. */
4809 if (TREE_CODE (op1a
) == SSA_NAME
4810 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4811 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4813 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4814 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4815 gimple stmt
= SSA_NAME_DEF_STMT (op1a
);
4816 switch (gimple_code (stmt
))
4819 /* Try to simplify by copy-propagating the definition. */
4820 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4823 /* If every argument to the PHI produces the same result when
4824 ORed with the second comparison, we win.
4825 Do not do this unless the type is bool since we need a bool
4826 result here anyway. */
4827 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4829 tree result
= NULL_TREE
;
4831 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4833 tree arg
= gimple_phi_arg_def (stmt
, i
);
4835 /* If this PHI has itself as an argument, ignore it.
4836 If all the other args produce the same result,
4838 if (arg
== gimple_phi_result (stmt
))
4840 else if (TREE_CODE (arg
) == INTEGER_CST
)
4842 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4845 result
= boolean_true_node
;
4846 else if (!integer_onep (result
))
4850 result
= fold_build2 (code2
, boolean_type_node
,
4852 else if (!same_bool_comparison_p (result
,
4856 else if (TREE_CODE (arg
) == SSA_NAME
4857 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4860 gimple def_stmt
= SSA_NAME_DEF_STMT (arg
);
4861 /* In simple cases we can look through PHI nodes,
4862 but we have to be careful with loops.
4864 if (! dom_info_available_p (CDI_DOMINATORS
)
4865 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4866 || dominated_by_p (CDI_DOMINATORS
,
4867 gimple_bb (def_stmt
),
4870 temp
= or_var_with_comparison (arg
, invert
, code2
,
4876 else if (!same_bool_result_p (result
, temp
))
4892 /* Try to simplify the OR of two comparisons, specified by
4893 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4894 If this can be simplified to a single expression (without requiring
4895 introducing more SSA variables to hold intermediate values),
4896 return the resulting tree. Otherwise return NULL_TREE.
4897 If the result expression is non-null, it has boolean type. */
4900 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4901 enum tree_code code2
, tree op2a
, tree op2b
)
4903 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4907 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4911 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4913 Either NULL_TREE, a simplified but non-constant or a constant
4916 ??? This should go into a gimple-fold-inline.h file to be eventually
4917 privatized with the single valueize function used in the various TUs
4918 to avoid the indirect function call overhead. */
4921 gimple_fold_stmt_to_constant_1 (gimple stmt
, tree (*valueize
) (tree
),
4922 tree (*gvalueize
) (tree
))
4926 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4927 edges if there are intermediate VARYING defs. For this reason
4928 do not follow SSA edges here even though SCCVN can technically
4929 just deal fine with that. */
4930 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
)
4931 && rcode
.is_tree_code ()
4932 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4933 || ((tree_code
) rcode
) == ADDR_EXPR
)
4934 && is_gimple_val (ops
[0]))
4937 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4939 fprintf (dump_file
, "Match-and-simplified ");
4940 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4941 fprintf (dump_file
, " to ");
4942 print_generic_expr (dump_file
, res
, 0);
4943 fprintf (dump_file
, "\n");
4948 location_t loc
= gimple_location (stmt
);
4949 switch (gimple_code (stmt
))
4953 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4955 switch (get_gimple_rhs_class (subcode
))
4957 case GIMPLE_SINGLE_RHS
:
4959 tree rhs
= gimple_assign_rhs1 (stmt
);
4960 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4962 if (TREE_CODE (rhs
) == SSA_NAME
)
4964 /* If the RHS is an SSA_NAME, return its known constant value,
4966 return (*valueize
) (rhs
);
4968 /* Handle propagating invariant addresses into address
4970 else if (TREE_CODE (rhs
) == ADDR_EXPR
4971 && !is_gimple_min_invariant (rhs
))
4973 HOST_WIDE_INT offset
= 0;
4975 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4979 && (CONSTANT_CLASS_P (base
)
4980 || decl_address_invariant_p (base
)))
4981 return build_invariant_address (TREE_TYPE (rhs
),
4984 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4985 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4986 && (CONSTRUCTOR_NELTS (rhs
)
4987 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4992 vec
= XALLOCAVEC (tree
,
4993 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4994 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4996 val
= (*valueize
) (val
);
4997 if (TREE_CODE (val
) == INTEGER_CST
4998 || TREE_CODE (val
) == REAL_CST
4999 || TREE_CODE (val
) == FIXED_CST
)
5005 return build_vector (TREE_TYPE (rhs
), vec
);
5007 if (subcode
== OBJ_TYPE_REF
)
5009 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5010 /* If callee is constant, we can fold away the wrapper. */
5011 if (is_gimple_min_invariant (val
))
5015 if (kind
== tcc_reference
)
5017 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5018 || TREE_CODE (rhs
) == REALPART_EXPR
5019 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5020 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5022 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5023 return fold_unary_loc (EXPR_LOCATION (rhs
),
5025 TREE_TYPE (rhs
), val
);
5027 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5028 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5030 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5031 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5033 TREE_TYPE (rhs
), val
,
5034 TREE_OPERAND (rhs
, 1),
5035 TREE_OPERAND (rhs
, 2));
5037 else if (TREE_CODE (rhs
) == MEM_REF
5038 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5040 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5041 if (TREE_CODE (val
) == ADDR_EXPR
5042 && is_gimple_min_invariant (val
))
5044 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5046 TREE_OPERAND (rhs
, 1));
5051 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5053 else if (kind
== tcc_declaration
)
5054 return get_symbol_constant_value (rhs
);
5058 case GIMPLE_UNARY_RHS
:
5061 case GIMPLE_BINARY_RHS
:
5063 /* Handle binary operators that can appear in GIMPLE form. */
5064 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5065 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5067 /* Translate &x + CST into an invariant form suitable for
5068 further propagation. */
5069 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
5070 && TREE_CODE (op0
) == ADDR_EXPR
5071 && TREE_CODE (op1
) == INTEGER_CST
)
5073 tree off
= fold_convert (ptr_type_node
, op1
);
5074 return build_fold_addr_expr_loc
5076 fold_build2 (MEM_REF
,
5077 TREE_TYPE (TREE_TYPE (op0
)),
5078 unshare_expr (op0
), off
));
5081 return fold_binary_loc (loc
, subcode
,
5082 gimple_expr_type (stmt
), op0
, op1
);
5085 case GIMPLE_TERNARY_RHS
:
5087 /* Handle ternary operators that can appear in GIMPLE form. */
5088 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5089 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5090 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5092 /* Fold embedded expressions in ternary codes. */
5093 if ((subcode
== COND_EXPR
5094 || subcode
== VEC_COND_EXPR
)
5095 && COMPARISON_CLASS_P (op0
))
5097 tree op00
= (*valueize
) (TREE_OPERAND (op0
, 0));
5098 tree op01
= (*valueize
) (TREE_OPERAND (op0
, 1));
5099 tree tem
= fold_binary_loc (loc
, TREE_CODE (op0
),
5100 TREE_TYPE (op0
), op00
, op01
);
5105 return fold_ternary_loc (loc
, subcode
,
5106 gimple_expr_type (stmt
), op0
, op1
, op2
);
5117 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5119 if (gimple_call_internal_p (stmt
))
5121 enum tree_code subcode
= ERROR_MARK
;
5122 switch (gimple_call_internal_fn (stmt
))
5124 case IFN_UBSAN_CHECK_ADD
:
5125 subcode
= PLUS_EXPR
;
5127 case IFN_UBSAN_CHECK_SUB
:
5128 subcode
= MINUS_EXPR
;
5130 case IFN_UBSAN_CHECK_MUL
:
5131 subcode
= MULT_EXPR
;
5136 tree arg0
= gimple_call_arg (stmt
, 0);
5137 tree arg1
= gimple_call_arg (stmt
, 1);
5138 tree op0
= (*valueize
) (arg0
);
5139 tree op1
= (*valueize
) (arg1
);
5141 if (TREE_CODE (op0
) != INTEGER_CST
5142 || TREE_CODE (op1
) != INTEGER_CST
)
5147 /* x * 0 = 0 * x = 0 without overflow. */
5148 if (integer_zerop (op0
) || integer_zerop (op1
))
5149 return build_zero_cst (TREE_TYPE (arg0
));
5152 /* y - y = 0 without overflow. */
5153 if (operand_equal_p (op0
, op1
, 0))
5154 return build_zero_cst (TREE_TYPE (arg0
));
5161 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5163 && TREE_CODE (res
) == INTEGER_CST
5164 && !TREE_OVERFLOW (res
))
5169 fn
= (*valueize
) (gimple_call_fn (stmt
));
5170 if (TREE_CODE (fn
) == ADDR_EXPR
5171 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5172 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5173 && gimple_builtin_call_types_compatible_p (stmt
,
5174 TREE_OPERAND (fn
, 0)))
5176 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5179 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5180 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5181 retval
= fold_builtin_call_array (loc
,
5182 gimple_call_return_type (call_stmt
),
5183 fn
, gimple_call_num_args (stmt
), args
);
5186 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5187 STRIP_NOPS (retval
);
5188 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5201 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5202 Returns NULL_TREE if folding to a constant is not possible, otherwise
5203 returns a constant according to is_gimple_min_invariant. */
5206 gimple_fold_stmt_to_constant (gimple stmt
, tree (*valueize
) (tree
))
5208 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5209 if (res
&& is_gimple_min_invariant (res
))
5215 /* The following set of functions are supposed to fold references using
5216 their constant initializers. */
5218 /* See if we can find constructor defining value of BASE.
5219 When we know the consructor with constant offset (such as
5220 base is array[40] and we do know constructor of array), then
5221 BIT_OFFSET is adjusted accordingly.
5223 As a special case, return error_mark_node when constructor
5224 is not explicitly available, but it is known to be zero
5225 such as 'static const int a;'. */
5227 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5228 tree (*valueize
)(tree
))
5230 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5231 if (TREE_CODE (base
) == MEM_REF
)
5233 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5235 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5237 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5242 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5243 base
= valueize (TREE_OPERAND (base
, 0));
5244 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5246 base
= TREE_OPERAND (base
, 0);
5249 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5250 DECL_INITIAL. If BASE is a nested reference into another
5251 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5252 the inner reference. */
5253 switch (TREE_CODE (base
))
5258 tree init
= ctor_for_folding (base
);
5260 /* Our semantic is exact opposite of ctor_for_folding;
5261 NULL means unknown, while error_mark_node is 0. */
5262 if (init
== error_mark_node
)
5265 return error_mark_node
;
5271 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5272 if (max_size
== -1 || size
!= max_size
)
5274 *bit_offset
+= bit_offset2
;
5275 return get_base_constructor (base
, bit_offset
, valueize
);
5286 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5287 SIZE to the memory at bit OFFSET. */
5290 fold_array_ctor_reference (tree type
, tree ctor
,
5291 unsigned HOST_WIDE_INT offset
,
5292 unsigned HOST_WIDE_INT size
,
5295 unsigned HOST_WIDE_INT cnt
;
5297 offset_int low_bound
;
5298 offset_int elt_size
;
5299 offset_int index
, max_index
;
5300 offset_int access_index
;
5301 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5302 HOST_WIDE_INT inner_offset
;
5304 /* Compute low bound and elt size. */
5305 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5306 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5307 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5309 /* Static constructors for variably sized objects makes no sense. */
5310 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5311 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5312 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5316 /* Static constructors for variably sized objects makes no sense. */
5317 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5319 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5321 /* We can handle only constantly sized accesses that are known to not
5322 be larger than size of array element. */
5323 if (!TYPE_SIZE_UNIT (type
)
5324 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5325 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5329 /* Compute the array index we look for. */
5330 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5332 access_index
+= low_bound
;
5334 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5335 TYPE_SIGN (index_type
));
5337 /* And offset within the access. */
5338 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5340 /* See if the array field is large enough to span whole access. We do not
5341 care to fold accesses spanning multiple array indexes. */
5342 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5345 index
= low_bound
- 1;
5347 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5348 TYPE_SIGN (index_type
));
5350 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5352 /* Array constructor might explicitely set index, or specify range
5353 or leave index NULL meaning that it is next index after previous
5357 if (TREE_CODE (cfield
) == INTEGER_CST
)
5358 max_index
= index
= wi::to_offset (cfield
);
5361 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5362 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5363 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5370 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5371 TYPE_SIGN (index_type
));
5375 /* Do we have match? */
5376 if (wi::cmpu (access_index
, index
) >= 0
5377 && wi::cmpu (access_index
, max_index
) <= 0)
5378 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5381 /* When memory is not explicitely mentioned in constructor,
5382 it is 0 (or out of range). */
5383 return build_zero_cst (type
);
5386 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5387 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5390 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5391 unsigned HOST_WIDE_INT offset
,
5392 unsigned HOST_WIDE_INT size
,
5395 unsigned HOST_WIDE_INT cnt
;
5398 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5401 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5402 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5403 tree field_size
= DECL_SIZE (cfield
);
5404 offset_int bitoffset
;
5405 offset_int bitoffset_end
, access_end
;
5407 /* Variable sized objects in static constructors makes no sense,
5408 but field_size can be NULL for flexible array members. */
5409 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5410 && TREE_CODE (byte_offset
) == INTEGER_CST
5411 && (field_size
!= NULL_TREE
5412 ? TREE_CODE (field_size
) == INTEGER_CST
5413 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5415 /* Compute bit offset of the field. */
5416 bitoffset
= (wi::to_offset (field_offset
)
5417 + wi::lshift (wi::to_offset (byte_offset
),
5418 LOG2_BITS_PER_UNIT
));
5419 /* Compute bit offset where the field ends. */
5420 if (field_size
!= NULL_TREE
)
5421 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5425 access_end
= offset_int (offset
) + size
;
5427 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5428 [BITOFFSET, BITOFFSET_END)? */
5429 if (wi::cmps (access_end
, bitoffset
) > 0
5430 && (field_size
== NULL_TREE
5431 || wi::lts_p (offset
, bitoffset_end
)))
5433 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5434 /* We do have overlap. Now see if field is large enough to
5435 cover the access. Give up for accesses spanning multiple
5437 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5439 if (wi::lts_p (offset
, bitoffset
))
5441 return fold_ctor_reference (type
, cval
,
5442 inner_offset
.to_uhwi (), size
,
5446 /* When memory is not explicitely mentioned in constructor, it is 0. */
5447 return build_zero_cst (type
);
5450 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5451 to the memory at bit OFFSET. */
5454 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5455 unsigned HOST_WIDE_INT size
, tree from_decl
)
5459 /* We found the field with exact match. */
5460 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5462 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5464 /* We are at the end of walk, see if we can view convert the
5466 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5467 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5468 && !compare_tree_int (TYPE_SIZE (type
), size
)
5469 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5471 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5472 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5477 /* For constants and byte-aligned/sized reads try to go through
5478 native_encode/interpret. */
5479 if (CONSTANT_CLASS_P (ctor
)
5480 && BITS_PER_UNIT
== 8
5481 && offset
% BITS_PER_UNIT
== 0
5482 && size
% BITS_PER_UNIT
== 0
5483 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5485 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5486 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5487 offset
/ BITS_PER_UNIT
) > 0)
5488 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5490 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5493 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5494 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5495 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5498 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5505 /* Return the tree representing the element referenced by T if T is an
5506 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5507 names using VALUEIZE. Return NULL_TREE otherwise. */
5510 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5512 tree ctor
, idx
, base
;
5513 HOST_WIDE_INT offset
, size
, max_size
;
5516 if (TREE_THIS_VOLATILE (t
))
5519 if (TREE_CODE_CLASS (TREE_CODE (t
)) == tcc_declaration
)
5520 return get_symbol_constant_value (t
);
5522 tem
= fold_read_from_constant_string (t
);
5526 switch (TREE_CODE (t
))
5529 case ARRAY_RANGE_REF
:
5530 /* Constant indexes are handled well by get_base_constructor.
5531 Only special case variable offsets.
5532 FIXME: This code can't handle nested references with variable indexes
5533 (they will be handled only by iteration of ccp). Perhaps we can bring
5534 get_ref_base_and_extent here and make it use a valueize callback. */
5535 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5537 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5538 && TREE_CODE (idx
) == INTEGER_CST
)
5540 tree low_bound
, unit_size
;
5542 /* If the resulting bit-offset is constant, track it. */
5543 if ((low_bound
= array_ref_low_bound (t
),
5544 TREE_CODE (low_bound
) == INTEGER_CST
)
5545 && (unit_size
= array_ref_element_size (t
),
5546 tree_fits_uhwi_p (unit_size
)))
5549 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5550 TYPE_PRECISION (TREE_TYPE (idx
)));
5552 if (wi::fits_shwi_p (woffset
))
5554 offset
= woffset
.to_shwi ();
5555 /* TODO: This code seems wrong, multiply then check
5556 to see if it fits. */
5557 offset
*= tree_to_uhwi (unit_size
);
5558 offset
*= BITS_PER_UNIT
;
5560 base
= TREE_OPERAND (t
, 0);
5561 ctor
= get_base_constructor (base
, &offset
, valueize
);
5562 /* Empty constructor. Always fold to 0. */
5563 if (ctor
== error_mark_node
)
5564 return build_zero_cst (TREE_TYPE (t
));
5565 /* Out of bound array access. Value is undefined,
5569 /* We can not determine ctor. */
5572 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5573 tree_to_uhwi (unit_size
)
5583 case TARGET_MEM_REF
:
5585 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5586 ctor
= get_base_constructor (base
, &offset
, valueize
);
5588 /* Empty constructor. Always fold to 0. */
5589 if (ctor
== error_mark_node
)
5590 return build_zero_cst (TREE_TYPE (t
));
5591 /* We do not know precise address. */
5592 if (max_size
== -1 || max_size
!= size
)
5594 /* We can not determine ctor. */
5598 /* Out of bound array access. Value is undefined, but don't fold. */
5602 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5608 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5609 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5610 return fold_build1_loc (EXPR_LOCATION (t
),
5611 TREE_CODE (t
), TREE_TYPE (t
), c
);
5623 fold_const_aggregate_ref (tree t
)
5625 return fold_const_aggregate_ref_1 (t
, NULL
);
5628 /* Lookup virtual method with index TOKEN in a virtual table V
5630 Set CAN_REFER if non-NULL to false if method
5631 is not referable or if the virtual table is ill-formed (such as rewriten
5632 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5635 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5637 unsigned HOST_WIDE_INT offset
,
5640 tree vtable
= v
, init
, fn
;
5641 unsigned HOST_WIDE_INT size
;
5642 unsigned HOST_WIDE_INT elt_size
, access_index
;
5648 /* First of all double check we have virtual table. */
5649 if (TREE_CODE (v
) != VAR_DECL
5650 || !DECL_VIRTUAL_P (v
))
5652 /* Pass down that we lost track of the target. */
5658 init
= ctor_for_folding (v
);
5660 /* The virtual tables should always be born with constructors
5661 and we always should assume that they are avaialble for
5662 folding. At the moment we do not stream them in all cases,
5663 but it should never happen that ctor seem unreachable. */
5665 if (init
== error_mark_node
)
5667 gcc_assert (in_lto_p
);
5668 /* Pass down that we lost track of the target. */
5673 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5674 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5675 offset
*= BITS_PER_UNIT
;
5676 offset
+= token
* size
;
5678 /* Lookup the value in the constructor that is assumed to be array.
5679 This is equivalent to
5680 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5681 offset, size, NULL);
5682 but in a constant time. We expect that frontend produced a simple
5683 array without indexed initializers. */
5685 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5686 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5687 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5688 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5690 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5691 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5693 /* This code makes an assumption that there are no
5694 indexed fileds produced by C++ FE, so we can directly index the array. */
5695 if (access_index
< CONSTRUCTOR_NELTS (init
))
5697 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5698 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5704 /* For type inconsistent program we may end up looking up virtual method
5705 in virtual table that does not contain TOKEN entries. We may overrun
5706 the virtual table and pick up a constant or RTTI info pointer.
5707 In any case the call is undefined. */
5709 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5710 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5711 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5714 fn
= TREE_OPERAND (fn
, 0);
5716 /* When cgraph node is missing and function is not public, we cannot
5717 devirtualize. This can happen in WHOPR when the actual method
5718 ends up in other partition, because we found devirtualization
5719 possibility too late. */
5720 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5731 /* Make sure we create a cgraph node for functions we'll reference.
5732 They can be non-existent if the reference comes from an entry
5733 of an external vtable for example. */
5734 cgraph_node::get_create (fn
);
5739 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5740 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5741 KNOWN_BINFO carries the binfo describing the true type of
5742 OBJ_TYPE_REF_OBJECT(REF).
5743 Set CAN_REFER if non-NULL to false if method
5744 is not referable or if the virtual table is ill-formed (such as rewriten
5745 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5748 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5751 unsigned HOST_WIDE_INT offset
;
5754 v
= BINFO_VTABLE (known_binfo
);
5755 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5759 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5765 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5768 /* Return true iff VAL is a gimple expression that is known to be
5769 non-negative. Restricted to floating-point inputs. */
5772 gimple_val_nonnegative_real_p (tree val
)
5776 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5778 /* Use existing logic for non-gimple trees. */
5779 if (tree_expr_nonnegative_p (val
))
5782 if (TREE_CODE (val
) != SSA_NAME
)
5785 /* Currently we look only at the immediately defining statement
5786 to make this determination, since recursion on defining
5787 statements of operands can lead to quadratic behavior in the
5788 worst case. This is expected to catch almost all occurrences
5789 in practice. It would be possible to implement limited-depth
5790 recursion if important cases are lost. Alternatively, passes
5791 that need this information (such as the pow/powi lowering code
5792 in the cse_sincos pass) could be revised to provide it through
5793 dataflow propagation. */
5795 def_stmt
= SSA_NAME_DEF_STMT (val
);
5797 if (is_gimple_assign (def_stmt
))
5801 /* See fold-const.c:tree_expr_nonnegative_p for additional
5802 cases that could be handled with recursion. */
5804 switch (gimple_assign_rhs_code (def_stmt
))
5807 /* Always true for floating-point operands. */
5811 /* True if the two operands are identical (since we are
5812 restricted to floating-point inputs). */
5813 op0
= gimple_assign_rhs1 (def_stmt
);
5814 op1
= gimple_assign_rhs2 (def_stmt
);
5817 || operand_equal_p (op0
, op1
, 0))
5824 else if (is_gimple_call (def_stmt
))
5826 tree fndecl
= gimple_call_fndecl (def_stmt
);
5828 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5832 switch (DECL_FUNCTION_CODE (fndecl
))
5834 CASE_FLT_FN (BUILT_IN_ACOS
):
5835 CASE_FLT_FN (BUILT_IN_ACOSH
):
5836 CASE_FLT_FN (BUILT_IN_CABS
):
5837 CASE_FLT_FN (BUILT_IN_COSH
):
5838 CASE_FLT_FN (BUILT_IN_ERFC
):
5839 CASE_FLT_FN (BUILT_IN_EXP
):
5840 CASE_FLT_FN (BUILT_IN_EXP10
):
5841 CASE_FLT_FN (BUILT_IN_EXP2
):
5842 CASE_FLT_FN (BUILT_IN_FABS
):
5843 CASE_FLT_FN (BUILT_IN_FDIM
):
5844 CASE_FLT_FN (BUILT_IN_HYPOT
):
5845 CASE_FLT_FN (BUILT_IN_POW10
):
5848 CASE_FLT_FN (BUILT_IN_SQRT
):
5849 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5850 nonnegative inputs. */
5851 if (!HONOR_SIGNED_ZEROS (val
))
5856 CASE_FLT_FN (BUILT_IN_POWI
):
5857 /* True if the second argument is an even integer. */
5858 arg1
= gimple_call_arg (def_stmt
, 1);
5860 if (TREE_CODE (arg1
) == INTEGER_CST
5861 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5866 CASE_FLT_FN (BUILT_IN_POW
):
5867 /* True if the second argument is an even integer-valued
5869 arg1
= gimple_call_arg (def_stmt
, 1);
5871 if (TREE_CODE (arg1
) == REAL_CST
)
5876 c
= TREE_REAL_CST (arg1
);
5877 n
= real_to_integer (&c
);
5881 REAL_VALUE_TYPE cint
;
5882 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5883 if (real_identical (&c
, &cint
))
5899 /* Given a pointer value OP0, return a simplified version of an
5900 indirection through OP0, or NULL_TREE if no simplification is
5901 possible. Note that the resulting type may be different from
5902 the type pointed to in the sense that it is still compatible
5903 from the langhooks point of view. */
5906 gimple_fold_indirect_ref (tree t
)
5908 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5913 subtype
= TREE_TYPE (sub
);
5914 if (!POINTER_TYPE_P (subtype
))
5917 if (TREE_CODE (sub
) == ADDR_EXPR
)
5919 tree op
= TREE_OPERAND (sub
, 0);
5920 tree optype
= TREE_TYPE (op
);
5922 if (useless_type_conversion_p (type
, optype
))
5925 /* *(foo *)&fooarray => fooarray[0] */
5926 if (TREE_CODE (optype
) == ARRAY_TYPE
5927 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5928 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5930 tree type_domain
= TYPE_DOMAIN (optype
);
5931 tree min_val
= size_zero_node
;
5932 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5933 min_val
= TYPE_MIN_VALUE (type_domain
);
5934 if (TREE_CODE (min_val
) == INTEGER_CST
)
5935 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5937 /* *(foo *)&complexfoo => __real__ complexfoo */
5938 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5939 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5940 return fold_build1 (REALPART_EXPR
, type
, op
);
5941 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5942 else if (TREE_CODE (optype
) == VECTOR_TYPE
5943 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5945 tree part_width
= TYPE_SIZE (type
);
5946 tree index
= bitsize_int (0);
5947 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5951 /* *(p + CST) -> ... */
5952 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5953 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5955 tree addr
= TREE_OPERAND (sub
, 0);
5956 tree off
= TREE_OPERAND (sub
, 1);
5960 addrtype
= TREE_TYPE (addr
);
5962 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5963 if (TREE_CODE (addr
) == ADDR_EXPR
5964 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5965 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5966 && tree_fits_uhwi_p (off
))
5968 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5969 tree part_width
= TYPE_SIZE (type
);
5970 unsigned HOST_WIDE_INT part_widthi
5971 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5972 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5973 tree index
= bitsize_int (indexi
);
5974 if (offset
/ part_widthi
5975 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5976 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5980 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5981 if (TREE_CODE (addr
) == ADDR_EXPR
5982 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5983 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5985 tree size
= TYPE_SIZE_UNIT (type
);
5986 if (tree_int_cst_equal (size
, off
))
5987 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5990 /* *(p + CST) -> MEM_REF <p, CST>. */
5991 if (TREE_CODE (addr
) != ADDR_EXPR
5992 || DECL_P (TREE_OPERAND (addr
, 0)))
5993 return fold_build2 (MEM_REF
, type
,
5995 wide_int_to_tree (ptype
, off
));
5998 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5999 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6000 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6001 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6004 tree min_val
= size_zero_node
;
6006 sub
= gimple_fold_indirect_ref (sub
);
6008 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6009 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6010 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6011 min_val
= TYPE_MIN_VALUE (type_domain
);
6012 if (TREE_CODE (min_val
) == INTEGER_CST
)
6013 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6019 /* Return true if CODE is an operation that when operating on signed
6020 integer types involves undefined behavior on overflow and the
6021 operation can be expressed with unsigned arithmetic. */
6024 arith_code_with_undefined_signed_overflow (tree_code code
)
6032 case POINTER_PLUS_EXPR
:
6039 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6040 operation that can be transformed to unsigned arithmetic by converting
6041 its operand, carrying out the operation in the corresponding unsigned
6042 type and converting the result back to the original type.
6044 Returns a sequence of statements that replace STMT and also contain
6045 a modified form of STMT itself. */
6048 rewrite_to_defined_overflow (gimple stmt
)
6050 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6052 fprintf (dump_file
, "rewriting stmt with undefined signed "
6054 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6057 tree lhs
= gimple_assign_lhs (stmt
);
6058 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6059 gimple_seq stmts
= NULL
;
6060 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6062 gimple_seq stmts2
= NULL
;
6063 gimple_set_op (stmt
, i
,
6064 force_gimple_operand (fold_convert (type
,
6065 gimple_op (stmt
, i
)),
6066 &stmts2
, true, NULL_TREE
));
6067 gimple_seq_add_seq (&stmts
, stmts2
);
6069 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6070 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6071 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6072 gimple_seq_add_stmt (&stmts
, stmt
);
6073 gimple cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6074 gimple_seq_add_stmt (&stmts
, cvt
);
6080 /* Build the expression CODE OP0 of type TYPE with location LOC,
6081 simplifying it first if possible using VALUEIZE if not NULL.
6082 OP0 is expected to be valueized already. Returns the built
6083 expression value and appends statements possibly defining it
6087 gimple_build (gimple_seq
*seq
, location_t loc
,
6088 enum tree_code code
, tree type
, tree op0
,
6089 tree (*valueize
)(tree
))
6091 tree res
= gimple_simplify (code
, type
, op0
, seq
, valueize
);
6094 if (gimple_in_ssa_p (cfun
))
6095 res
= make_ssa_name (type
);
6097 res
= create_tmp_reg (type
);
6099 if (code
== REALPART_EXPR
6100 || code
== IMAGPART_EXPR
6101 || code
== VIEW_CONVERT_EXPR
)
6102 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6104 stmt
= gimple_build_assign (res
, code
, op0
);
6105 gimple_set_location (stmt
, loc
);
6106 gimple_seq_add_stmt_without_update (seq
, stmt
);
6111 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6112 simplifying it first if possible using VALUEIZE if not NULL.
6113 OP0 and OP1 are expected to be valueized already. Returns the built
6114 expression value and appends statements possibly defining it
6118 gimple_build (gimple_seq
*seq
, location_t loc
,
6119 enum tree_code code
, tree type
, tree op0
, tree op1
,
6120 tree (*valueize
)(tree
))
6122 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, valueize
);
6125 if (gimple_in_ssa_p (cfun
))
6126 res
= make_ssa_name (type
);
6128 res
= create_tmp_reg (type
);
6129 gimple stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6130 gimple_set_location (stmt
, loc
);
6131 gimple_seq_add_stmt_without_update (seq
, stmt
);
6136 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6137 simplifying it first if possible using VALUEIZE if not NULL.
6138 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
6139 expression value and appends statements possibly defining it
6143 gimple_build (gimple_seq
*seq
, location_t loc
,
6144 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
,
6145 tree (*valueize
)(tree
))
6147 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6151 if (gimple_in_ssa_p (cfun
))
6152 res
= make_ssa_name (type
);
6154 res
= create_tmp_reg (type
);
6156 if (code
== BIT_FIELD_REF
)
6157 stmt
= gimple_build_assign (res
, code
,
6158 build3 (code
, type
, op0
, op1
, op2
));
6160 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6161 gimple_set_location (stmt
, loc
);
6162 gimple_seq_add_stmt_without_update (seq
, stmt
);
6167 /* Build the call FN (ARG0) with a result of type TYPE
6168 (or no result if TYPE is void) with location LOC,
6169 simplifying it first if possible using VALUEIZE if not NULL.
6170 ARG0 is expected to be valueized already. Returns the built
6171 expression value (or NULL_TREE if TYPE is void) and appends
6172 statements possibly defining it to SEQ. */
6175 gimple_build (gimple_seq
*seq
, location_t loc
,
6176 enum built_in_function fn
, tree type
, tree arg0
,
6177 tree (*valueize
)(tree
))
6179 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, valueize
);
6182 tree decl
= builtin_decl_implicit (fn
);
6183 gimple stmt
= gimple_build_call (decl
, 1, arg0
);
6184 if (!VOID_TYPE_P (type
))
6186 if (gimple_in_ssa_p (cfun
))
6187 res
= make_ssa_name (type
);
6189 res
= create_tmp_reg (type
);
6190 gimple_call_set_lhs (stmt
, res
);
6192 gimple_set_location (stmt
, loc
);
6193 gimple_seq_add_stmt_without_update (seq
, stmt
);
6198 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6199 (or no result if TYPE is void) with location LOC,
6200 simplifying it first if possible using VALUEIZE if not NULL.
6201 ARG0 is expected to be valueized already. Returns the built
6202 expression value (or NULL_TREE if TYPE is void) and appends
6203 statements possibly defining it to SEQ. */
6206 gimple_build (gimple_seq
*seq
, location_t loc
,
6207 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
,
6208 tree (*valueize
)(tree
))
6210 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, valueize
);
6213 tree decl
= builtin_decl_implicit (fn
);
6214 gimple stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6215 if (!VOID_TYPE_P (type
))
6217 if (gimple_in_ssa_p (cfun
))
6218 res
= make_ssa_name (type
);
6220 res
= create_tmp_reg (type
);
6221 gimple_call_set_lhs (stmt
, res
);
6223 gimple_set_location (stmt
, loc
);
6224 gimple_seq_add_stmt_without_update (seq
, stmt
);
6229 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6230 (or no result if TYPE is void) with location LOC,
6231 simplifying it first if possible using VALUEIZE if not NULL.
6232 ARG0 is expected to be valueized already. Returns the built
6233 expression value (or NULL_TREE if TYPE is void) and appends
6234 statements possibly defining it to SEQ. */
6237 gimple_build (gimple_seq
*seq
, location_t loc
,
6238 enum built_in_function fn
, tree type
,
6239 tree arg0
, tree arg1
, tree arg2
,
6240 tree (*valueize
)(tree
))
6242 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
, seq
, valueize
);
6245 tree decl
= builtin_decl_implicit (fn
);
6246 gimple stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6247 if (!VOID_TYPE_P (type
))
6249 if (gimple_in_ssa_p (cfun
))
6250 res
= make_ssa_name (type
);
6252 res
= create_tmp_reg (type
);
6253 gimple_call_set_lhs (stmt
, res
);
6255 gimple_set_location (stmt
, loc
);
6256 gimple_seq_add_stmt_without_update (seq
, stmt
);
6261 /* Build the conversion (TYPE) OP with a result of type TYPE
6262 with location LOC if such conversion is neccesary in GIMPLE,
6263 simplifying it first.
6264 Returns the built expression value and appends
6265 statements possibly defining it to SEQ. */
6268 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6270 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6272 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);