1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
31 #include "fold-const.h"
33 #include "insn-config.h"
42 #include "stor-layout.h"
44 #include "dominance.h"
45 #include "internal-fn.h"
46 #include "gimple-fold.h"
48 #include "gimple-iterator.h"
49 #include "tree-into-ssa.h"
52 #include "tree-ssa-propagate.h"
55 #include "ipa-utils.h"
56 #include "gimple-pretty-print.h"
57 #include "tree-ssa-address.h"
58 #include "langhooks.h"
59 #include "gimplify-me.h"
64 #include "gimple-match.h"
65 #include "gomp-constants.h"
67 /* Return true when DECL can be referenced from current unit.
68 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
69 We can get declarations that are not possible to reference for various
72 1) When analyzing C++ virtual tables.
73 C++ virtual tables do have known constructors even
74 when they are keyed to other compilation unit.
75 Those tables can contain pointers to methods and vars
76 in other units. Those methods have both STATIC and EXTERNAL
78 2) In WHOPR mode devirtualization might lead to reference
79 to method that was partitioned elsehwere.
80 In this case we have static VAR_DECL or FUNCTION_DECL
81 that has no corresponding callgraph/varpool node
83 3) COMDAT functions referred by external vtables that
84 we devirtualize only during final compilation stage.
85 At this time we already decided that we will not output
86 the function body and thus we can't reference the symbol
90 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
93 struct cgraph_node
*node
;
96 if (DECL_ABSTRACT_P (decl
))
99 /* We are concerned only about static/external vars and functions. */
100 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
101 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
104 /* Static objects can be referred only if they was not optimized out yet. */
105 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
107 /* Before we start optimizing unreachable code we can be sure all
108 static objects are defined. */
109 if (symtab
->function_flags_ready
)
111 snode
= symtab_node::get (decl
);
112 if (!snode
|| !snode
->definition
)
114 node
= dyn_cast
<cgraph_node
*> (snode
);
115 return !node
|| !node
->global
.inlined_to
;
118 /* We will later output the initializer, so we can refer to it.
119 So we are concerned only when DECL comes from initializer of
120 external var or var that has been optimized out. */
122 || TREE_CODE (from_decl
) != VAR_DECL
123 || (!DECL_EXTERNAL (from_decl
)
124 && (vnode
= varpool_node::get (from_decl
)) != NULL
125 && vnode
->definition
)
127 && (vnode
= varpool_node::get (from_decl
)) != NULL
128 && vnode
->in_other_partition
))
130 /* We are folding reference from external vtable. The vtable may reffer
131 to a symbol keyed to other compilation unit. The other compilation
132 unit may be in separate DSO and the symbol may be hidden. */
133 if (DECL_VISIBILITY_SPECIFIED (decl
)
134 && DECL_EXTERNAL (decl
)
135 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
136 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
138 /* When function is public, we always can introduce new reference.
139 Exception are the COMDAT functions where introducing a direct
140 reference imply need to include function body in the curren tunit. */
141 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
143 /* We have COMDAT. We are going to check if we still have definition
144 or if the definition is going to be output in other partition.
145 Bypass this when gimplifying; all needed functions will be produced.
147 As observed in PR20991 for already optimized out comdat virtual functions
148 it may be tempting to not necessarily give up because the copy will be
149 output elsewhere when corresponding vtable is output.
150 This is however not possible - ABI specify that COMDATs are output in
151 units where they are used and when the other unit was compiled with LTO
152 it is possible that vtable was kept public while the function itself
154 if (!symtab
->function_flags_ready
)
157 snode
= symtab_node::get (decl
);
159 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
160 && (!snode
->in_other_partition
161 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
163 node
= dyn_cast
<cgraph_node
*> (snode
);
164 return !node
|| !node
->global
.inlined_to
;
167 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
168 acceptable form for is_gimple_min_invariant.
169 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
172 canonicalize_constructor_val (tree cval
, tree from_decl
)
174 tree orig_cval
= cval
;
176 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
177 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
179 tree ptr
= TREE_OPERAND (cval
, 0);
180 if (is_gimple_min_invariant (ptr
))
181 cval
= build1_loc (EXPR_LOCATION (cval
),
182 ADDR_EXPR
, TREE_TYPE (ptr
),
183 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
185 fold_convert (ptr_type_node
,
186 TREE_OPERAND (cval
, 1))));
188 if (TREE_CODE (cval
) == ADDR_EXPR
)
190 tree base
= NULL_TREE
;
191 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
193 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
195 TREE_OPERAND (cval
, 0) = base
;
198 base
= get_base_address (TREE_OPERAND (cval
, 0));
202 if ((TREE_CODE (base
) == VAR_DECL
203 || TREE_CODE (base
) == FUNCTION_DECL
)
204 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
206 if (TREE_CODE (base
) == VAR_DECL
)
207 TREE_ADDRESSABLE (base
) = 1;
208 else if (TREE_CODE (base
) == FUNCTION_DECL
)
210 /* Make sure we create a cgraph node for functions we'll reference.
211 They can be non-existent if the reference comes from an entry
212 of an external vtable for example. */
213 cgraph_node::get_create (base
);
215 /* Fixup types in global initializers. */
216 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
217 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
219 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
220 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
223 if (TREE_OVERFLOW_P (cval
))
224 return drop_tree_overflow (cval
);
228 /* If SYM is a constant variable with known value, return the value.
229 NULL_TREE is returned otherwise. */
232 get_symbol_constant_value (tree sym
)
234 tree val
= ctor_for_folding (sym
);
235 if (val
!= error_mark_node
)
239 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
240 if (val
&& is_gimple_min_invariant (val
))
245 /* Variables declared 'const' without an initializer
246 have zero as the initializer if they may not be
247 overridden at link or run time. */
249 && is_gimple_reg_type (TREE_TYPE (sym
)))
250 return build_zero_cst (TREE_TYPE (sym
));
258 /* Subroutine of fold_stmt. We perform several simplifications of the
259 memory reference tree EXPR and make sure to re-gimplify them properly
260 after propagation of constant addresses. IS_LHS is true if the
261 reference is supposed to be an lvalue. */
264 maybe_fold_reference (tree expr
, bool is_lhs
)
268 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
269 || TREE_CODE (expr
) == REALPART_EXPR
270 || TREE_CODE (expr
) == IMAGPART_EXPR
)
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
272 return fold_unary_loc (EXPR_LOCATION (expr
),
275 TREE_OPERAND (expr
, 0));
276 else if (TREE_CODE (expr
) == BIT_FIELD_REF
277 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
278 return fold_ternary_loc (EXPR_LOCATION (expr
),
281 TREE_OPERAND (expr
, 0),
282 TREE_OPERAND (expr
, 1),
283 TREE_OPERAND (expr
, 2));
286 && (result
= fold_const_aggregate_ref (expr
))
287 && is_gimple_min_invariant (result
))
294 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
295 replacement rhs for the statement or NULL_TREE if no simplification
296 could be made. It is assumed that the operands have been previously
300 fold_gimple_assign (gimple_stmt_iterator
*si
)
302 gimple
*stmt
= gsi_stmt (*si
);
303 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
304 location_t loc
= gimple_location (stmt
);
306 tree result
= NULL_TREE
;
308 switch (get_gimple_rhs_class (subcode
))
310 case GIMPLE_SINGLE_RHS
:
312 tree rhs
= gimple_assign_rhs1 (stmt
);
314 if (TREE_CLOBBER_P (rhs
))
317 if (REFERENCE_CLASS_P (rhs
))
318 return maybe_fold_reference (rhs
, false);
320 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
322 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
323 if (is_gimple_min_invariant (val
))
325 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
328 vec
<cgraph_node
*>targets
329 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
330 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
332 if (dump_enabled_p ())
334 location_t loc
= gimple_location_safe (stmt
);
335 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
336 "resolving virtual function address "
337 "reference to function %s\n",
338 targets
.length () == 1
339 ? targets
[0]->name ()
342 if (targets
.length () == 1)
344 val
= fold_convert (TREE_TYPE (val
),
345 build_fold_addr_expr_loc
346 (loc
, targets
[0]->decl
));
347 STRIP_USELESS_TYPE_CONVERSION (val
);
350 /* We can not use __builtin_unreachable here because it
351 can not have address taken. */
352 val
= build_int_cst (TREE_TYPE (val
), 0);
358 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
360 tree ref
= TREE_OPERAND (rhs
, 0);
361 tree tem
= maybe_fold_reference (ref
, true);
363 && TREE_CODE (tem
) == MEM_REF
364 && integer_zerop (TREE_OPERAND (tem
, 1)))
365 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
367 result
= fold_convert (TREE_TYPE (rhs
),
368 build_fold_addr_expr_loc (loc
, tem
));
369 else if (TREE_CODE (ref
) == MEM_REF
370 && integer_zerop (TREE_OPERAND (ref
, 1)))
371 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
374 else if (TREE_CODE (rhs
) == CONSTRUCTOR
375 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
376 && (CONSTRUCTOR_NELTS (rhs
)
377 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
379 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
383 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
384 if (TREE_CODE (val
) != INTEGER_CST
385 && TREE_CODE (val
) != REAL_CST
386 && TREE_CODE (val
) != FIXED_CST
)
389 return build_vector_from_ctor (TREE_TYPE (rhs
),
390 CONSTRUCTOR_ELTS (rhs
));
393 else if (DECL_P (rhs
))
394 return get_symbol_constant_value (rhs
);
396 /* If we couldn't fold the RHS, hand over to the generic
398 if (result
== NULL_TREE
)
401 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
402 that may have been added by fold, and "useless" type
403 conversions that might now be apparent due to propagation. */
404 STRIP_USELESS_TYPE_CONVERSION (result
);
406 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
413 case GIMPLE_UNARY_RHS
:
416 case GIMPLE_BINARY_RHS
:
419 case GIMPLE_TERNARY_RHS
:
420 result
= fold_ternary_loc (loc
, subcode
,
421 TREE_TYPE (gimple_assign_lhs (stmt
)),
422 gimple_assign_rhs1 (stmt
),
423 gimple_assign_rhs2 (stmt
),
424 gimple_assign_rhs3 (stmt
));
428 STRIP_USELESS_TYPE_CONVERSION (result
);
429 if (valid_gimple_rhs_p (result
))
434 case GIMPLE_INVALID_RHS
:
442 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
443 adjusting the replacement stmts location and virtual operands.
444 If the statement has a lhs the last stmt in the sequence is expected
445 to assign to that lhs. */
448 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
450 gimple
*stmt
= gsi_stmt (*si_p
);
452 if (gimple_has_location (stmt
))
453 annotate_all_with_location (stmts
, gimple_location (stmt
));
455 /* First iterate over the replacement statements backward, assigning
456 virtual operands to their defining statements. */
457 gimple
*laststore
= NULL
;
458 for (gimple_stmt_iterator i
= gsi_last (stmts
);
459 !gsi_end_p (i
); gsi_prev (&i
))
461 gimple
*new_stmt
= gsi_stmt (i
);
462 if ((gimple_assign_single_p (new_stmt
)
463 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
464 || (is_gimple_call (new_stmt
)
465 && (gimple_call_flags (new_stmt
)
466 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
470 vdef
= gimple_vdef (stmt
);
472 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
473 gimple_set_vdef (new_stmt
, vdef
);
474 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
475 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
476 laststore
= new_stmt
;
480 /* Second iterate over the statements forward, assigning virtual
481 operands to their uses. */
482 tree reaching_vuse
= gimple_vuse (stmt
);
483 for (gimple_stmt_iterator i
= gsi_start (stmts
);
484 !gsi_end_p (i
); gsi_next (&i
))
486 gimple
*new_stmt
= gsi_stmt (i
);
487 /* If the new statement possibly has a VUSE, update it with exact SSA
488 name we know will reach this one. */
489 if (gimple_has_mem_ops (new_stmt
))
490 gimple_set_vuse (new_stmt
, reaching_vuse
);
491 gimple_set_modified (new_stmt
, true);
492 if (gimple_vdef (new_stmt
))
493 reaching_vuse
= gimple_vdef (new_stmt
);
496 /* If the new sequence does not do a store release the virtual
497 definition of the original statement. */
499 && reaching_vuse
== gimple_vuse (stmt
))
501 tree vdef
= gimple_vdef (stmt
);
503 && TREE_CODE (vdef
) == SSA_NAME
)
505 unlink_stmt_vdef (stmt
);
506 release_ssa_name (vdef
);
510 /* Finally replace the original statement with the sequence. */
511 gsi_replace_with_seq (si_p
, stmts
, false);
514 /* Convert EXPR into a GIMPLE value suitable for substitution on the
515 RHS of an assignment. Insert the necessary statements before
516 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
517 is replaced. If the call is expected to produces a result, then it
518 is replaced by an assignment of the new RHS to the result variable.
519 If the result is to be ignored, then the call is replaced by a
520 GIMPLE_NOP. A proper VDEF chain is retained by making the first
521 VUSE and the last VDEF of the whole sequence be the same as the replaced
522 statement and using new SSA names for stores in between. */
525 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
528 gimple
*stmt
, *new_stmt
;
529 gimple_stmt_iterator i
;
530 gimple_seq stmts
= NULL
;
532 stmt
= gsi_stmt (*si_p
);
534 gcc_assert (is_gimple_call (stmt
));
536 push_gimplify_context (gimple_in_ssa_p (cfun
));
538 lhs
= gimple_call_lhs (stmt
);
539 if (lhs
== NULL_TREE
)
541 gimplify_and_add (expr
, &stmts
);
542 /* We can end up with folding a memcpy of an empty class assignment
543 which gets optimized away by C++ gimplification. */
544 if (gimple_seq_empty_p (stmts
))
546 pop_gimplify_context (NULL
);
547 if (gimple_in_ssa_p (cfun
))
549 unlink_stmt_vdef (stmt
);
552 gsi_replace (si_p
, gimple_build_nop (), false);
558 tree tmp
= get_initialized_tmp_var (expr
, &stmts
, NULL
);
559 new_stmt
= gimple_build_assign (lhs
, tmp
);
560 i
= gsi_last (stmts
);
561 gsi_insert_after_without_update (&i
, new_stmt
,
562 GSI_CONTINUE_LINKING
);
565 pop_gimplify_context (NULL
);
567 gsi_replace_with_seq_vops (si_p
, stmts
);
571 /* Replace the call at *GSI with the gimple value VAL. */
574 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
576 gimple
*stmt
= gsi_stmt (*gsi
);
577 tree lhs
= gimple_call_lhs (stmt
);
581 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
582 val
= fold_convert (TREE_TYPE (lhs
), val
);
583 repl
= gimple_build_assign (lhs
, val
);
586 repl
= gimple_build_nop ();
587 tree vdef
= gimple_vdef (stmt
);
588 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
590 unlink_stmt_vdef (stmt
);
591 release_ssa_name (vdef
);
593 gsi_replace (gsi
, repl
, false);
596 /* Replace the call at *GSI with the new call REPL and fold that
600 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
602 gimple
*stmt
= gsi_stmt (*gsi
);
603 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
604 gimple_set_location (repl
, gimple_location (stmt
));
605 if (gimple_vdef (stmt
)
606 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
608 gimple_set_vdef (repl
, gimple_vdef (stmt
));
609 gimple_set_vuse (repl
, gimple_vuse (stmt
));
610 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
612 gsi_replace (gsi
, repl
, false);
616 /* Return true if VAR is a VAR_DECL or a component thereof. */
619 var_decl_component_p (tree var
)
622 while (handled_component_p (inner
))
623 inner
= TREE_OPERAND (inner
, 0);
624 return SSA_VAR_P (inner
);
627 /* Fold function call to builtin mem{{,p}cpy,move}. Return
628 false if no simplification can be made.
629 If ENDP is 0, return DEST (like memcpy).
630 If ENDP is 1, return DEST+LEN (like mempcpy).
631 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
632 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
636 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
637 tree dest
, tree src
, int endp
)
639 gimple
*stmt
= gsi_stmt (*gsi
);
640 tree lhs
= gimple_call_lhs (stmt
);
641 tree len
= gimple_call_arg (stmt
, 2);
642 tree destvar
, srcvar
;
643 location_t loc
= gimple_location (stmt
);
645 /* If the LEN parameter is zero, return DEST. */
646 if (integer_zerop (len
))
649 if (gimple_call_lhs (stmt
))
650 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
652 repl
= gimple_build_nop ();
653 tree vdef
= gimple_vdef (stmt
);
654 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
656 unlink_stmt_vdef (stmt
);
657 release_ssa_name (vdef
);
659 gsi_replace (gsi
, repl
, false);
663 /* If SRC and DEST are the same (and not volatile), return
664 DEST{,+LEN,+LEN-1}. */
665 if (operand_equal_p (src
, dest
, 0))
667 unlink_stmt_vdef (stmt
);
668 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
669 release_ssa_name (gimple_vdef (stmt
));
672 gsi_replace (gsi
, gimple_build_nop (), false);
679 tree srctype
, desttype
;
680 unsigned int src_align
, dest_align
;
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align
= get_pointer_alignment (src
);
692 dest_align
= get_pointer_alignment (dest
);
693 if (tree_fits_uhwi_p (len
)
694 && compare_tree_int (len
, MOVE_MAX
) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
699 && !c_strlen (src
, 2))
701 unsigned ilen
= tree_to_uhwi (len
);
702 if (exact_log2 (ilen
) != -1)
704 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
706 && TYPE_MODE (type
) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)))
715 tree desttype
= type
;
716 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
717 srctype
= build_aligned_type (type
, src_align
);
718 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
719 tree tem
= fold_const_aggregate_ref (srcmem
);
722 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
723 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
729 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
731 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
732 if (gimple_in_ssa_p (cfun
))
733 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
736 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
737 gimple_assign_set_lhs (new_stmt
, srcmem
);
738 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
739 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
741 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
742 desttype
= build_aligned_type (type
, dest_align
);
744 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
747 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
748 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
749 if (gimple_vdef (new_stmt
)
750 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
751 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
754 gsi_replace (gsi
, new_stmt
, false);
757 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
766 /* Both DEST and SRC must be pointer types.
767 ??? This is what old code did. Is the testing for pointer types
770 If either SRC is readonly or length is 1, we can use memcpy. */
771 if (!dest_align
|| !src_align
)
773 if (readonly_data_expr (src
)
774 || (tree_fits_uhwi_p (len
)
775 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
776 >= tree_to_uhwi (len
))))
778 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
781 gimple_call_set_fndecl (stmt
, fn
);
782 gimple_call_set_arg (stmt
, 0, dest
);
783 gimple_call_set_arg (stmt
, 1, src
);
788 /* If *src and *dest can't overlap, optimize into memcpy as well. */
789 if (TREE_CODE (src
) == ADDR_EXPR
790 && TREE_CODE (dest
) == ADDR_EXPR
)
792 tree src_base
, dest_base
, fn
;
793 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
794 HOST_WIDE_INT size
= -1;
795 HOST_WIDE_INT maxsize
= -1;
797 srcvar
= TREE_OPERAND (src
, 0);
798 src_base
= get_ref_base_and_extent (srcvar
, &src_offset
,
800 destvar
= TREE_OPERAND (dest
, 0);
801 dest_base
= get_ref_base_and_extent (destvar
, &dest_offset
,
803 if (tree_fits_uhwi_p (len
))
804 maxsize
= tree_to_uhwi (len
);
807 src_offset
/= BITS_PER_UNIT
;
808 dest_offset
/= BITS_PER_UNIT
;
809 if (SSA_VAR_P (src_base
)
810 && SSA_VAR_P (dest_base
))
812 if (operand_equal_p (src_base
, dest_base
, 0)
813 && ranges_overlap_p (src_offset
, maxsize
,
814 dest_offset
, maxsize
))
817 else if (TREE_CODE (src_base
) == MEM_REF
818 && TREE_CODE (dest_base
) == MEM_REF
)
820 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
821 TREE_OPERAND (dest_base
, 0), 0))
823 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
824 if (!wi::fits_shwi_p (off
))
826 src_offset
= off
.to_shwi ();
828 off
= mem_ref_offset (dest_base
) + dest_offset
;
829 if (!wi::fits_shwi_p (off
))
831 dest_offset
= off
.to_shwi ();
832 if (ranges_overlap_p (src_offset
, maxsize
,
833 dest_offset
, maxsize
))
839 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
842 gimple_call_set_fndecl (stmt
, fn
);
843 gimple_call_set_arg (stmt
, 0, dest
);
844 gimple_call_set_arg (stmt
, 1, src
);
849 /* If the destination and source do not alias optimize into
851 if ((is_gimple_min_invariant (dest
)
852 || TREE_CODE (dest
) == SSA_NAME
)
853 && (is_gimple_min_invariant (src
)
854 || TREE_CODE (src
) == SSA_NAME
))
857 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
858 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
859 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
862 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
865 gimple_call_set_fndecl (stmt
, fn
);
866 gimple_call_set_arg (stmt
, 0, dest
);
867 gimple_call_set_arg (stmt
, 1, src
);
876 if (!tree_fits_shwi_p (len
))
879 This logic lose for arguments like (type *)malloc (sizeof (type)),
880 since we strip the casts of up to VOID return value from malloc.
881 Perhaps we ought to inherit type from non-VOID argument here? */
884 if (!POINTER_TYPE_P (TREE_TYPE (src
))
885 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
887 /* In the following try to find a type that is most natural to be
888 used for the memcpy source and destination and that allows
889 the most optimization when memcpy is turned into a plain assignment
890 using that type. In theory we could always use a char[len] type
891 but that only gains us that the destination and source possibly
892 no longer will have their address taken. */
893 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
894 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
896 tree tem
= TREE_OPERAND (src
, 0);
898 if (tem
!= TREE_OPERAND (src
, 0))
899 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
901 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
903 tree tem
= TREE_OPERAND (dest
, 0);
905 if (tem
!= TREE_OPERAND (dest
, 0))
906 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
908 srctype
= TREE_TYPE (TREE_TYPE (src
));
909 if (TREE_CODE (srctype
) == ARRAY_TYPE
910 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
912 srctype
= TREE_TYPE (srctype
);
914 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
916 desttype
= TREE_TYPE (TREE_TYPE (dest
));
917 if (TREE_CODE (desttype
) == ARRAY_TYPE
918 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
920 desttype
= TREE_TYPE (desttype
);
922 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
924 if (TREE_ADDRESSABLE (srctype
)
925 || TREE_ADDRESSABLE (desttype
))
928 /* Make sure we are not copying using a floating-point mode or
929 a type whose size possibly does not match its precision. */
930 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
931 || TREE_CODE (desttype
) == BOOLEAN_TYPE
932 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
933 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
934 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
935 || TREE_CODE (srctype
) == BOOLEAN_TYPE
936 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
937 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
945 src_align
= get_pointer_alignment (src
);
946 dest_align
= get_pointer_alignment (dest
);
947 if (dest_align
< TYPE_ALIGN (desttype
)
948 || src_align
< TYPE_ALIGN (srctype
))
952 STRIP_NOPS (destvar
);
953 if (TREE_CODE (destvar
) == ADDR_EXPR
954 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
955 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
956 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
962 if (TREE_CODE (srcvar
) == ADDR_EXPR
963 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
964 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
967 || src_align
>= TYPE_ALIGN (desttype
))
968 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
970 else if (!STRICT_ALIGNMENT
)
972 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
974 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
982 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
985 if (srcvar
== NULL_TREE
)
988 if (src_align
>= TYPE_ALIGN (desttype
))
989 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
992 if (STRICT_ALIGNMENT
)
994 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
996 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
999 else if (destvar
== NULL_TREE
)
1002 if (dest_align
>= TYPE_ALIGN (srctype
))
1003 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1006 if (STRICT_ALIGNMENT
)
1008 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1010 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1015 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1017 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1018 if (gimple_in_ssa_p (cfun
))
1019 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1021 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1022 gimple_assign_set_lhs (new_stmt
, srcvar
);
1023 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1024 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1026 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1027 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1028 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1029 if (gimple_vdef (new_stmt
)
1030 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1031 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1034 gsi_replace (gsi
, new_stmt
, false);
1037 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1041 if (endp
== 0 || endp
== 3)
1044 len
= fold_build2_loc (loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1046 if (endp
== 2 || endp
== 1)
1047 dest
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1049 dest
= force_gimple_operand_gsi (gsi
, dest
, false, NULL_TREE
, true,
1051 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1052 gsi_replace (gsi
, repl
, false);
1056 /* Fold function call to builtin memset or bzero at *GSI setting the
1057 memory of size LEN to VAL. Return whether a simplification was made. */
1060 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1062 gimple
*stmt
= gsi_stmt (*gsi
);
1064 unsigned HOST_WIDE_INT length
, cval
;
1066 /* If the LEN parameter is zero, return DEST. */
1067 if (integer_zerop (len
))
1069 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1073 if (! tree_fits_uhwi_p (len
))
1076 if (TREE_CODE (c
) != INTEGER_CST
)
1079 tree dest
= gimple_call_arg (stmt
, 0);
1081 if (TREE_CODE (var
) != ADDR_EXPR
)
1084 var
= TREE_OPERAND (var
, 0);
1085 if (TREE_THIS_VOLATILE (var
))
1088 etype
= TREE_TYPE (var
);
1089 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1090 etype
= TREE_TYPE (etype
);
1092 if (!INTEGRAL_TYPE_P (etype
)
1093 && !POINTER_TYPE_P (etype
))
1096 if (! var_decl_component_p (var
))
1099 length
= tree_to_uhwi (len
);
1100 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1101 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1104 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1107 if (integer_zerop (c
))
1111 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1114 cval
= TREE_INT_CST_LOW (c
);
1118 cval
|= (cval
<< 31) << 1;
1121 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1122 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1123 gimple_set_vuse (store
, gimple_vuse (stmt
));
1124 tree vdef
= gimple_vdef (stmt
);
1125 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1127 gimple_set_vdef (store
, gimple_vdef (stmt
));
1128 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1130 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1131 if (gimple_call_lhs (stmt
))
1133 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1134 gsi_replace (gsi
, asgn
, false);
1138 gimple_stmt_iterator gsi2
= *gsi
;
1140 gsi_remove (&gsi2
, true);
1147 /* Return the string length, maximum string length or maximum value of
1149 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1150 is not NULL and, for TYPE == 0, its value is not equal to the length
1151 we determine or if we are unable to determine the length or value,
1152 return false. VISITED is a bitmap of visited variables.
1153 TYPE is 0 if string length should be returned, 1 for maximum string
1154 length and 2 for maximum value ARG can have. */
1157 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1162 if (TREE_CODE (arg
) != SSA_NAME
)
1164 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1165 if (TREE_CODE (arg
) == ADDR_EXPR
1166 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1167 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1169 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1170 if (TREE_CODE (aop0
) == INDIRECT_REF
1171 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1172 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1173 length
, visited
, type
);
1179 if (TREE_CODE (val
) != INTEGER_CST
1180 || tree_int_cst_sgn (val
) < 0)
1184 val
= c_strlen (arg
, 1);
1192 if (TREE_CODE (*length
) != INTEGER_CST
1193 || TREE_CODE (val
) != INTEGER_CST
)
1196 if (tree_int_cst_lt (*length
, val
))
1200 else if (simple_cst_equal (val
, *length
) != 1)
1208 /* If ARG is registered for SSA update we cannot look at its defining
1210 if (name_registered_for_update_p (arg
))
1213 /* If we were already here, break the infinite cycle. */
1215 *visited
= BITMAP_ALLOC (NULL
);
1216 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1220 def_stmt
= SSA_NAME_DEF_STMT (var
);
1222 switch (gimple_code (def_stmt
))
1225 /* The RHS of the statement defining VAR must either have a
1226 constant length or come from another SSA_NAME with a constant
1228 if (gimple_assign_single_p (def_stmt
)
1229 || gimple_assign_unary_nop_p (def_stmt
))
1231 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1232 return get_maxval_strlen (rhs
, length
, visited
, type
);
1234 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1236 tree op2
= gimple_assign_rhs2 (def_stmt
);
1237 tree op3
= gimple_assign_rhs3 (def_stmt
);
1238 return get_maxval_strlen (op2
, length
, visited
, type
)
1239 && get_maxval_strlen (op3
, length
, visited
, type
);
1245 /* All the arguments of the PHI node must have the same constant
1249 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1251 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1253 /* If this PHI has itself as an argument, we cannot
1254 determine the string length of this argument. However,
1255 if we can find a constant string length for the other
1256 PHI args then we can still be sure that this is a
1257 constant string length. So be optimistic and just
1258 continue with the next argument. */
1259 if (arg
== gimple_phi_result (def_stmt
))
1262 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1274 get_maxval_strlen (tree arg
, int type
)
1276 bitmap visited
= NULL
;
1277 tree len
= NULL_TREE
;
1278 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1281 BITMAP_FREE (visited
);
1287 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1288 If LEN is not NULL, it represents the length of the string to be
1289 copied. Return NULL_TREE if no simplification can be made. */
1292 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1293 tree dest
, tree src
)
1295 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1298 /* If SRC and DEST are the same (and not volatile), return DEST. */
1299 if (operand_equal_p (src
, dest
, 0))
1301 replace_call_with_value (gsi
, dest
);
1305 if (optimize_function_for_size_p (cfun
))
1308 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1312 tree len
= get_maxval_strlen (src
, 0);
1316 len
= fold_convert_loc (loc
, size_type_node
, len
);
1317 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1318 len
= force_gimple_operand_gsi (gsi
, len
, true,
1319 NULL_TREE
, true, GSI_SAME_STMT
);
1320 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1321 replace_call_with_call_and_fold (gsi
, repl
);
1325 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1326 If SLEN is not NULL, it represents the length of the source string.
1327 Return NULL_TREE if no simplification can be made. */
1330 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1331 tree dest
, tree src
, tree len
)
1333 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1336 /* If the LEN parameter is zero, return DEST. */
1337 if (integer_zerop (len
))
1339 replace_call_with_value (gsi
, dest
);
1343 /* We can't compare slen with len as constants below if len is not a
1345 if (TREE_CODE (len
) != INTEGER_CST
)
1348 /* Now, we must be passed a constant src ptr parameter. */
1349 tree slen
= get_maxval_strlen (src
, 0);
1350 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1353 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1355 /* We do not support simplification of this case, though we do
1356 support it when expanding trees into RTL. */
1357 /* FIXME: generate a call to __builtin_memset. */
1358 if (tree_int_cst_lt (slen
, len
))
1361 /* OK transform into builtin memcpy. */
1362 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1366 len
= fold_convert_loc (loc
, size_type_node
, len
);
1367 len
= force_gimple_operand_gsi (gsi
, len
, true,
1368 NULL_TREE
, true, GSI_SAME_STMT
);
1369 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1370 replace_call_with_call_and_fold (gsi
, repl
);
1374 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1377 Return NULL_TREE if no simplification was possible, otherwise return the
1378 simplified form of the call as a tree.
1380 The simplified form may be a constant or other expression which
1381 computes the same value, but in a more efficient manner (including
1382 calls to other builtin functions).
1384 The call may contain arguments which need to be evaluated, but
1385 which are not useful to determine the result of the call. In
1386 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1387 COMPOUND_EXPR will be an argument which must be evaluated.
1388 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1389 COMPOUND_EXPR in the chain will contain the tree for the simplified
1390 form of the builtin function call. */
1393 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1395 gimple
*stmt
= gsi_stmt (*gsi
);
1396 location_t loc
= gimple_location (stmt
);
1398 const char *p
= c_getstr (src
);
1400 /* If the string length is zero, return the dst parameter. */
1401 if (p
&& *p
== '\0')
1403 replace_call_with_value (gsi
, dst
);
1407 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1410 /* See if we can store by pieces into (dst + strlen(dst)). */
1412 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1413 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1415 if (!strlen_fn
|| !memcpy_fn
)
1418 /* If the length of the source string isn't computable don't
1419 split strcat into strlen and memcpy. */
1420 tree len
= get_maxval_strlen (src
, 0);
1424 /* Create strlen (dst). */
1425 gimple_seq stmts
= NULL
, stmts2
;
1426 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1427 gimple_set_location (repl
, loc
);
1428 if (gimple_in_ssa_p (cfun
))
1429 newdst
= make_ssa_name (size_type_node
);
1431 newdst
= create_tmp_reg (size_type_node
);
1432 gimple_call_set_lhs (repl
, newdst
);
1433 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1435 /* Create (dst p+ strlen (dst)). */
1436 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1437 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1438 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1440 len
= fold_convert_loc (loc
, size_type_node
, len
);
1441 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1442 build_int_cst (size_type_node
, 1));
1443 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1444 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1446 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1447 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1448 if (gimple_call_lhs (stmt
))
1450 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1451 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1452 gsi_replace_with_seq_vops (gsi
, stmts
);
1453 /* gsi now points at the assignment to the lhs, get a
1454 stmt iterator to the memcpy call.
1455 ??? We can't use gsi_for_stmt as that doesn't work when the
1456 CFG isn't built yet. */
1457 gimple_stmt_iterator gsi2
= *gsi
;
1463 gsi_replace_with_seq_vops (gsi
, stmts
);
1469 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1470 are the arguments to the call. */
1473 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1475 gimple
*stmt
= gsi_stmt (*gsi
);
1476 tree dest
= gimple_call_arg (stmt
, 0);
1477 tree src
= gimple_call_arg (stmt
, 1);
1478 tree size
= gimple_call_arg (stmt
, 2);
1484 /* If the SRC parameter is "", return DEST. */
1485 if (p
&& *p
== '\0')
1487 replace_call_with_value (gsi
, dest
);
1491 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1494 /* If __builtin_strcat_chk is used, assume strcat is available. */
1495 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1499 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1500 replace_call_with_call_and_fold (gsi
, repl
);
1504 /* Simplify a call to the strncat builtin. */
1507 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1509 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1510 tree dst
= gimple_call_arg (stmt
, 0);
1511 tree src
= gimple_call_arg (stmt
, 1);
1512 tree len
= gimple_call_arg (stmt
, 2);
1514 const char *p
= c_getstr (src
);
1516 /* If the requested length is zero, or the src parameter string
1517 length is zero, return the dst parameter. */
1518 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1520 replace_call_with_value (gsi
, dst
);
1524 /* If the requested len is greater than or equal to the string
1525 length, call strcat. */
1526 if (TREE_CODE (len
) == INTEGER_CST
&& p
1527 && compare_tree_int (len
, strlen (p
)) >= 0)
1529 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1531 /* If the replacement _DECL isn't initialized, don't do the
1536 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1537 replace_call_with_call_and_fold (gsi
, repl
);
1544 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1548 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1550 gimple
*stmt
= gsi_stmt (*gsi
);
1551 tree dest
= gimple_call_arg (stmt
, 0);
1552 tree src
= gimple_call_arg (stmt
, 1);
1553 tree len
= gimple_call_arg (stmt
, 2);
1554 tree size
= gimple_call_arg (stmt
, 3);
1559 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1560 if ((p
&& *p
== '\0')
1561 || integer_zerop (len
))
1563 replace_call_with_value (gsi
, dest
);
1567 if (! tree_fits_uhwi_p (size
))
1570 if (! integer_all_onesp (size
))
1572 tree src_len
= c_strlen (src
, 1);
1574 && tree_fits_uhwi_p (src_len
)
1575 && tree_fits_uhwi_p (len
)
1576 && ! tree_int_cst_lt (len
, src_len
))
1578 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1579 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1583 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1584 replace_call_with_call_and_fold (gsi
, repl
);
1590 /* If __builtin_strncat_chk is used, assume strncat is available. */
1591 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1595 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1596 replace_call_with_call_and_fold (gsi
, repl
);
1600 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1601 to the call. IGNORE is true if the value returned
1602 by the builtin will be ignored. UNLOCKED is true is true if this
1603 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1604 the known length of the string. Return NULL_TREE if no simplification
1608 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1609 tree arg0
, tree arg1
,
1612 gimple
*stmt
= gsi_stmt (*gsi
);
1614 /* If we're using an unlocked function, assume the other unlocked
1615 functions exist explicitly. */
1616 tree
const fn_fputc
= (unlocked
1617 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1618 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1619 tree
const fn_fwrite
= (unlocked
1620 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1621 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1623 /* If the return value is used, don't do the transformation. */
1624 if (gimple_call_lhs (stmt
))
1627 /* Get the length of the string passed to fputs. If the length
1628 can't be determined, punt. */
1629 tree len
= get_maxval_strlen (arg0
, 0);
1631 || TREE_CODE (len
) != INTEGER_CST
)
1634 switch (compare_tree_int (len
, 1))
1636 case -1: /* length is 0, delete the call entirely . */
1637 replace_call_with_value (gsi
, integer_zero_node
);
1640 case 0: /* length is 1, call fputc. */
1642 const char *p
= c_getstr (arg0
);
1648 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
1650 (integer_type_node
, p
[0]), arg1
);
1651 replace_call_with_call_and_fold (gsi
, repl
);
1656 case 1: /* length is greater than 1, call fwrite. */
1658 /* If optimizing for size keep fputs. */
1659 if (optimize_function_for_size_p (cfun
))
1661 /* New argument list transforming fputs(string, stream) to
1662 fwrite(string, 1, len, stream). */
1666 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1667 size_one_node
, len
, arg1
);
1668 replace_call_with_call_and_fold (gsi
, repl
);
1677 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1678 DEST, SRC, LEN, and SIZE are the arguments to the call.
1679 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1680 code of the builtin. If MAXLEN is not NULL, it is maximum length
1681 passed as third argument. */
1684 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1685 tree dest
, tree src
, tree len
, tree size
,
1686 enum built_in_function fcode
)
1688 gimple
*stmt
= gsi_stmt (*gsi
);
1689 location_t loc
= gimple_location (stmt
);
1690 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1693 /* If SRC and DEST are the same (and not volatile), return DEST
1694 (resp. DEST+LEN for __mempcpy_chk). */
1695 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1697 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1699 replace_call_with_value (gsi
, dest
);
1704 tree temp
= fold_build_pointer_plus_loc (loc
, dest
, len
);
1705 temp
= force_gimple_operand_gsi (gsi
, temp
,
1706 false, NULL_TREE
, true,
1708 replace_call_with_value (gsi
, temp
);
1713 if (! tree_fits_uhwi_p (size
))
1716 tree maxlen
= get_maxval_strlen (len
, 2);
1717 if (! integer_all_onesp (size
))
1719 if (! tree_fits_uhwi_p (len
))
1721 /* If LEN is not constant, try MAXLEN too.
1722 For MAXLEN only allow optimizing into non-_ocs function
1723 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1724 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1726 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1728 /* (void) __mempcpy_chk () can be optimized into
1729 (void) __memcpy_chk (). */
1730 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1734 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1735 replace_call_with_call_and_fold (gsi
, repl
);
1744 if (tree_int_cst_lt (size
, maxlen
))
1749 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1750 mem{cpy,pcpy,move,set} is available. */
1753 case BUILT_IN_MEMCPY_CHK
:
1754 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1756 case BUILT_IN_MEMPCPY_CHK
:
1757 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1759 case BUILT_IN_MEMMOVE_CHK
:
1760 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1762 case BUILT_IN_MEMSET_CHK
:
1763 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1772 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1773 replace_call_with_call_and_fold (gsi
, repl
);
1777 /* Fold a call to the __st[rp]cpy_chk builtin.
1778 DEST, SRC, and SIZE are the arguments to the call.
1779 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1780 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1781 strings passed as second argument. */
1784 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1786 tree src
, tree size
,
1787 enum built_in_function fcode
)
1789 gimple
*stmt
= gsi_stmt (*gsi
);
1790 location_t loc
= gimple_location (stmt
);
1791 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1794 /* If SRC and DEST are the same (and not volatile), return DEST. */
1795 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1797 replace_call_with_value (gsi
, dest
);
1801 if (! tree_fits_uhwi_p (size
))
1804 tree maxlen
= get_maxval_strlen (src
, 1);
1805 if (! integer_all_onesp (size
))
1807 len
= c_strlen (src
, 1);
1808 if (! len
|| ! tree_fits_uhwi_p (len
))
1810 /* If LEN is not constant, try MAXLEN too.
1811 For MAXLEN only allow optimizing into non-_ocs function
1812 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1813 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1815 if (fcode
== BUILT_IN_STPCPY_CHK
)
1820 /* If return value of __stpcpy_chk is ignored,
1821 optimize into __strcpy_chk. */
1822 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1826 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1827 replace_call_with_call_and_fold (gsi
, repl
);
1831 if (! len
|| TREE_SIDE_EFFECTS (len
))
1834 /* If c_strlen returned something, but not a constant,
1835 transform __strcpy_chk into __memcpy_chk. */
1836 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1840 len
= fold_convert_loc (loc
, size_type_node
, len
);
1841 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1842 build_int_cst (size_type_node
, 1));
1843 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL_TREE
,
1844 true, GSI_SAME_STMT
);
1845 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1846 replace_call_with_call_and_fold (gsi
, repl
);
1853 if (! tree_int_cst_lt (maxlen
, size
))
1857 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1858 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1859 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1863 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1864 replace_call_with_call_and_fold (gsi
, repl
);
1868 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1869 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1870 length passed as third argument. IGNORE is true if return value can be
1871 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1874 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1875 tree dest
, tree src
,
1876 tree len
, tree size
,
1877 enum built_in_function fcode
)
1879 gimple
*stmt
= gsi_stmt (*gsi
);
1880 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1883 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1885 /* If return value of __stpncpy_chk is ignored,
1886 optimize into __strncpy_chk. */
1887 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1890 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1891 replace_call_with_call_and_fold (gsi
, repl
);
1896 if (! tree_fits_uhwi_p (size
))
1899 tree maxlen
= get_maxval_strlen (len
, 2);
1900 if (! integer_all_onesp (size
))
1902 if (! tree_fits_uhwi_p (len
))
1904 /* If LEN is not constant, try MAXLEN too.
1905 For MAXLEN only allow optimizing into non-_ocs function
1906 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1907 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1913 if (tree_int_cst_lt (size
, maxlen
))
1917 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1918 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
1919 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
1923 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1924 replace_call_with_call_and_fold (gsi
, repl
);
1928 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1929 Return NULL_TREE if no simplification can be made. */
1932 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
1934 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1935 location_t loc
= gimple_location (stmt
);
1936 tree dest
= gimple_call_arg (stmt
, 0);
1937 tree src
= gimple_call_arg (stmt
, 1);
1938 tree fn
, len
, lenp1
;
1940 /* If the result is unused, replace stpcpy with strcpy. */
1941 if (gimple_call_lhs (stmt
) == NULL_TREE
)
1943 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
1946 gimple_call_set_fndecl (stmt
, fn
);
1951 len
= c_strlen (src
, 1);
1953 || TREE_CODE (len
) != INTEGER_CST
)
1956 if (optimize_function_for_size_p (cfun
)
1957 /* If length is zero it's small enough. */
1958 && !integer_zerop (len
))
1961 /* If the source has a known length replace stpcpy with memcpy. */
1962 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1966 gimple_seq stmts
= NULL
;
1967 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1968 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
1969 tem
, build_int_cst (size_type_node
, 1));
1970 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1971 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
1972 gimple_set_vuse (repl
, gimple_vuse (stmt
));
1973 gimple_set_vdef (repl
, gimple_vdef (stmt
));
1974 if (gimple_vdef (repl
)
1975 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
1976 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
1977 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
1978 /* Replace the result with dest + len. */
1980 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
1981 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1982 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
1983 POINTER_PLUS_EXPR
, dest
, tem
);
1984 gsi_replace (gsi
, ret
, false);
1985 /* Finally fold the memcpy call. */
1986 gimple_stmt_iterator gsi2
= *gsi
;
1992 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1993 NULL_TREE if a normal call should be emitted rather than expanding
1994 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1995 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1996 passed as second argument. */
1999 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2000 enum built_in_function fcode
)
2002 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2003 tree dest
, size
, len
, fn
, fmt
, flag
;
2004 const char *fmt_str
;
2006 /* Verify the required arguments in the original call. */
2007 if (gimple_call_num_args (stmt
) < 5)
2010 dest
= gimple_call_arg (stmt
, 0);
2011 len
= gimple_call_arg (stmt
, 1);
2012 flag
= gimple_call_arg (stmt
, 2);
2013 size
= gimple_call_arg (stmt
, 3);
2014 fmt
= gimple_call_arg (stmt
, 4);
2016 if (! tree_fits_uhwi_p (size
))
2019 if (! integer_all_onesp (size
))
2021 tree maxlen
= get_maxval_strlen (len
, 2);
2022 if (! tree_fits_uhwi_p (len
))
2024 /* If LEN is not constant, try MAXLEN too.
2025 For MAXLEN only allow optimizing into non-_ocs function
2026 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2027 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2033 if (tree_int_cst_lt (size
, maxlen
))
2037 if (!init_target_chars ())
2040 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2041 or if format doesn't contain % chars or is "%s". */
2042 if (! integer_zerop (flag
))
2044 fmt_str
= c_getstr (fmt
);
2045 if (fmt_str
== NULL
)
2047 if (strchr (fmt_str
, target_percent
) != NULL
2048 && strcmp (fmt_str
, target_percent_s
))
2052 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2054 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2055 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2059 /* Replace the called function and the first 5 argument by 3 retaining
2060 trailing varargs. */
2061 gimple_call_set_fndecl (stmt
, fn
);
2062 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2063 gimple_call_set_arg (stmt
, 0, dest
);
2064 gimple_call_set_arg (stmt
, 1, len
);
2065 gimple_call_set_arg (stmt
, 2, fmt
);
2066 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2067 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2068 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2073 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2074 Return NULL_TREE if a normal call should be emitted rather than
2075 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2076 or BUILT_IN_VSPRINTF_CHK. */
2079 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2080 enum built_in_function fcode
)
2082 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2083 tree dest
, size
, len
, fn
, fmt
, flag
;
2084 const char *fmt_str
;
2085 unsigned nargs
= gimple_call_num_args (stmt
);
2087 /* Verify the required arguments in the original call. */
2090 dest
= gimple_call_arg (stmt
, 0);
2091 flag
= gimple_call_arg (stmt
, 1);
2092 size
= gimple_call_arg (stmt
, 2);
2093 fmt
= gimple_call_arg (stmt
, 3);
2095 if (! tree_fits_uhwi_p (size
))
2100 if (!init_target_chars ())
2103 /* Check whether the format is a literal string constant. */
2104 fmt_str
= c_getstr (fmt
);
2105 if (fmt_str
!= NULL
)
2107 /* If the format doesn't contain % args or %%, we know the size. */
2108 if (strchr (fmt_str
, target_percent
) == 0)
2110 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2111 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2113 /* If the format is "%s" and first ... argument is a string literal,
2114 we know the size too. */
2115 else if (fcode
== BUILT_IN_SPRINTF_CHK
2116 && strcmp (fmt_str
, target_percent_s
) == 0)
2122 arg
= gimple_call_arg (stmt
, 4);
2123 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2125 len
= c_strlen (arg
, 1);
2126 if (! len
|| ! tree_fits_uhwi_p (len
))
2133 if (! integer_all_onesp (size
))
2135 if (! len
|| ! tree_int_cst_lt (len
, size
))
2139 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2140 or if format doesn't contain % chars or is "%s". */
2141 if (! integer_zerop (flag
))
2143 if (fmt_str
== NULL
)
2145 if (strchr (fmt_str
, target_percent
) != NULL
2146 && strcmp (fmt_str
, target_percent_s
))
2150 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2151 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2152 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2156 /* Replace the called function and the first 4 argument by 2 retaining
2157 trailing varargs. */
2158 gimple_call_set_fndecl (stmt
, fn
);
2159 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2160 gimple_call_set_arg (stmt
, 0, dest
);
2161 gimple_call_set_arg (stmt
, 1, fmt
);
2162 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2163 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2164 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2169 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2170 ORIG may be null if this is a 2-argument call. We don't attempt to
2171 simplify calls with more than 3 arguments.
2173 Return NULL_TREE if no simplification was possible, otherwise return the
2174 simplified form of the call as a tree. If IGNORED is true, it means that
2175 the caller does not use the returned value of the function. */
2178 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2180 gimple
*stmt
= gsi_stmt (*gsi
);
2181 tree dest
= gimple_call_arg (stmt
, 0);
2182 tree fmt
= gimple_call_arg (stmt
, 1);
2183 tree orig
= NULL_TREE
;
2184 const char *fmt_str
= NULL
;
2186 /* Verify the required arguments in the original call. We deal with two
2187 types of sprintf() calls: 'sprintf (str, fmt)' and
2188 'sprintf (dest, "%s", orig)'. */
2189 if (gimple_call_num_args (stmt
) > 3)
2192 if (gimple_call_num_args (stmt
) == 3)
2193 orig
= gimple_call_arg (stmt
, 2);
2195 /* Check whether the format is a literal string constant. */
2196 fmt_str
= c_getstr (fmt
);
2197 if (fmt_str
== NULL
)
2200 if (!init_target_chars ())
2203 /* If the format doesn't contain % args or %%, use strcpy. */
2204 if (strchr (fmt_str
, target_percent
) == NULL
)
2206 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2211 /* Don't optimize sprintf (buf, "abc", ptr++). */
2215 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2216 'format' is known to contain no % formats. */
2217 gimple_seq stmts
= NULL
;
2218 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2219 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2220 if (gimple_call_lhs (stmt
))
2222 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2223 build_int_cst (integer_type_node
,
2225 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2226 gsi_replace_with_seq_vops (gsi
, stmts
);
2227 /* gsi now points at the assignment to the lhs, get a
2228 stmt iterator to the memcpy call.
2229 ??? We can't use gsi_for_stmt as that doesn't work when the
2230 CFG isn't built yet. */
2231 gimple_stmt_iterator gsi2
= *gsi
;
2237 gsi_replace_with_seq_vops (gsi
, stmts
);
2243 /* If the format is "%s", use strcpy if the result isn't used. */
2244 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2247 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2252 /* Don't crash on sprintf (str1, "%s"). */
2256 tree orig_len
= NULL_TREE
;
2257 if (gimple_call_lhs (stmt
))
2259 orig_len
= get_maxval_strlen (orig
, 0);
2264 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2265 gimple_seq stmts
= NULL
;
2266 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2267 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2268 if (gimple_call_lhs (stmt
))
2270 if (!useless_type_conversion_p (integer_type_node
,
2271 TREE_TYPE (orig_len
)))
2272 orig_len
= fold_convert (integer_type_node
, orig_len
);
2273 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2274 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2275 gsi_replace_with_seq_vops (gsi
, stmts
);
2276 /* gsi now points at the assignment to the lhs, get a
2277 stmt iterator to the memcpy call.
2278 ??? We can't use gsi_for_stmt as that doesn't work when the
2279 CFG isn't built yet. */
2280 gimple_stmt_iterator gsi2
= *gsi
;
2286 gsi_replace_with_seq_vops (gsi
, stmts
);
2294 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2295 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2296 attempt to simplify calls with more than 4 arguments.
2298 Return NULL_TREE if no simplification was possible, otherwise return the
2299 simplified form of the call as a tree. If IGNORED is true, it means that
2300 the caller does not use the returned value of the function. */
2303 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2305 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2306 tree dest
= gimple_call_arg (stmt
, 0);
2307 tree destsize
= gimple_call_arg (stmt
, 1);
2308 tree fmt
= gimple_call_arg (stmt
, 2);
2309 tree orig
= NULL_TREE
;
2310 const char *fmt_str
= NULL
;
2312 if (gimple_call_num_args (stmt
) > 4)
2315 if (gimple_call_num_args (stmt
) == 4)
2316 orig
= gimple_call_arg (stmt
, 3);
2318 if (!tree_fits_uhwi_p (destsize
))
2320 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2322 /* Check whether the format is a literal string constant. */
2323 fmt_str
= c_getstr (fmt
);
2324 if (fmt_str
== NULL
)
2327 if (!init_target_chars ())
2330 /* If the format doesn't contain % args or %%, use strcpy. */
2331 if (strchr (fmt_str
, target_percent
) == NULL
)
2333 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2337 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2341 /* We could expand this as
2342 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2344 memcpy (str, fmt_with_nul_at_cstm1, cst);
2345 but in the former case that might increase code size
2346 and in the latter case grow .rodata section too much.
2348 size_t len
= strlen (fmt_str
);
2352 gimple_seq stmts
= NULL
;
2353 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2354 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2355 if (gimple_call_lhs (stmt
))
2357 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2358 build_int_cst (integer_type_node
, len
));
2359 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2360 gsi_replace_with_seq_vops (gsi
, stmts
);
2361 /* gsi now points at the assignment to the lhs, get a
2362 stmt iterator to the memcpy call.
2363 ??? We can't use gsi_for_stmt as that doesn't work when the
2364 CFG isn't built yet. */
2365 gimple_stmt_iterator gsi2
= *gsi
;
2371 gsi_replace_with_seq_vops (gsi
, stmts
);
2377 /* If the format is "%s", use strcpy if the result isn't used. */
2378 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2380 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2384 /* Don't crash on snprintf (str1, cst, "%s"). */
2388 tree orig_len
= get_maxval_strlen (orig
, 0);
2389 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2392 /* We could expand this as
2393 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2395 memcpy (str1, str2_with_nul_at_cstm1, cst);
2396 but in the former case that might increase code size
2397 and in the latter case grow .rodata section too much.
2399 if (compare_tree_int (orig_len
, destlen
) >= 0)
2402 /* Convert snprintf (str1, cst, "%s", str2) into
2403 strcpy (str1, str2) if strlen (str2) < cst. */
2404 gimple_seq stmts
= NULL
;
2405 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2406 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2407 if (gimple_call_lhs (stmt
))
2409 if (!useless_type_conversion_p (integer_type_node
,
2410 TREE_TYPE (orig_len
)))
2411 orig_len
= fold_convert (integer_type_node
, orig_len
);
2412 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2413 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2414 gsi_replace_with_seq_vops (gsi
, stmts
);
2415 /* gsi now points at the assignment to the lhs, get a
2416 stmt iterator to the memcpy call.
2417 ??? We can't use gsi_for_stmt as that doesn't work when the
2418 CFG isn't built yet. */
2419 gimple_stmt_iterator gsi2
= *gsi
;
2425 gsi_replace_with_seq_vops (gsi
, stmts
);
2433 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2434 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2435 more than 3 arguments, and ARG may be null in the 2-argument case.
2437 Return NULL_TREE if no simplification was possible, otherwise return the
2438 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2439 code of the function to be simplified. */
2442 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2443 tree fp
, tree fmt
, tree arg
,
2444 enum built_in_function fcode
)
2446 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2447 tree fn_fputc
, fn_fputs
;
2448 const char *fmt_str
= NULL
;
2450 /* If the return value is used, don't do the transformation. */
2451 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2454 /* Check whether the format is a literal string constant. */
2455 fmt_str
= c_getstr (fmt
);
2456 if (fmt_str
== NULL
)
2459 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2461 /* If we're using an unlocked function, assume the other
2462 unlocked functions exist explicitly. */
2463 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2464 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2468 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2469 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2472 if (!init_target_chars ())
2475 /* If the format doesn't contain % args or %%, use strcpy. */
2476 if (strchr (fmt_str
, target_percent
) == NULL
)
2478 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2482 /* If the format specifier was "", fprintf does nothing. */
2483 if (fmt_str
[0] == '\0')
2485 replace_call_with_value (gsi
, NULL_TREE
);
2489 /* When "string" doesn't contain %, replace all cases of
2490 fprintf (fp, string) with fputs (string, fp). The fputs
2491 builtin will take care of special cases like length == 1. */
2494 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2495 replace_call_with_call_and_fold (gsi
, repl
);
2500 /* The other optimizations can be done only on the non-va_list variants. */
2501 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2504 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2505 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2507 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2511 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2512 replace_call_with_call_and_fold (gsi
, repl
);
2517 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2518 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2521 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2525 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2526 replace_call_with_call_and_fold (gsi
, repl
);
2534 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2535 FMT and ARG are the arguments to the call; we don't fold cases with
2536 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2538 Return NULL_TREE if no simplification was possible, otherwise return the
2539 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2540 code of the function to be simplified. */
2543 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2544 tree arg
, enum built_in_function fcode
)
2546 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2547 tree fn_putchar
, fn_puts
, newarg
;
2548 const char *fmt_str
= NULL
;
2550 /* If the return value is used, don't do the transformation. */
2551 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2554 /* Check whether the format is a literal string constant. */
2555 fmt_str
= c_getstr (fmt
);
2556 if (fmt_str
== NULL
)
2559 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2561 /* If we're using an unlocked function, assume the other
2562 unlocked functions exist explicitly. */
2563 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2564 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2568 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2569 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2572 if (!init_target_chars ())
2575 if (strcmp (fmt_str
, target_percent_s
) == 0
2576 || strchr (fmt_str
, target_percent
) == NULL
)
2580 if (strcmp (fmt_str
, target_percent_s
) == 0)
2582 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2585 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2588 str
= c_getstr (arg
);
2594 /* The format specifier doesn't contain any '%' characters. */
2595 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2601 /* If the string was "", printf does nothing. */
2604 replace_call_with_value (gsi
, NULL_TREE
);
2608 /* If the string has length of 1, call putchar. */
2611 /* Given printf("c"), (where c is any one character,)
2612 convert "c"[0] to an int and pass that to the replacement
2614 newarg
= build_int_cst (integer_type_node
, str
[0]);
2617 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2618 replace_call_with_call_and_fold (gsi
, repl
);
2624 /* If the string was "string\n", call puts("string"). */
2625 size_t len
= strlen (str
);
2626 if ((unsigned char)str
[len
- 1] == target_newline
2627 && (size_t) (int) len
== len
2631 tree offset_node
, string_cst
;
2633 /* Create a NUL-terminated string that's one char shorter
2634 than the original, stripping off the trailing '\n'. */
2635 newarg
= build_string_literal (len
, str
);
2636 string_cst
= string_constant (newarg
, &offset_node
);
2637 gcc_checking_assert (string_cst
2638 && (TREE_STRING_LENGTH (string_cst
)
2640 && integer_zerop (offset_node
)
2642 TREE_STRING_POINTER (string_cst
)[len
- 1]
2644 /* build_string_literal creates a new STRING_CST,
2645 modify it in place to avoid double copying. */
2646 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2647 newstr
[len
- 1] = '\0';
2650 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2651 replace_call_with_call_and_fold (gsi
, repl
);
2656 /* We'd like to arrange to call fputs(string,stdout) here,
2657 but we need stdout and don't have a way to get it yet. */
2662 /* The other optimizations can be done only on the non-va_list variants. */
2663 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2666 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2667 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2669 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2673 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2674 replace_call_with_call_and_fold (gsi
, repl
);
2679 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2680 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2682 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2687 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2688 replace_call_with_call_and_fold (gsi
, repl
);
2698 /* Fold a call to __builtin_strlen with known length LEN. */
2701 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2703 gimple
*stmt
= gsi_stmt (*gsi
);
2704 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2707 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2708 replace_call_with_value (gsi
, len
);
2712 /* Fold a call to __builtin_acc_on_device. */
2715 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
2717 /* Defer folding until we know which compiler we're in. */
2718 if (symtab
->state
!= EXPANSION
)
2721 unsigned val_host
= GOMP_DEVICE_HOST
;
2722 unsigned val_dev
= GOMP_DEVICE_NONE
;
2724 #ifdef ACCEL_COMPILER
2725 val_host
= GOMP_DEVICE_NOT_HOST
;
2726 val_dev
= ACCEL_COMPILER_acc_device
;
2729 location_t loc
= gimple_location (gsi_stmt (*gsi
));
2731 tree host_eq
= make_ssa_name (boolean_type_node
);
2732 gimple
*host_ass
= gimple_build_assign
2733 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
2734 gimple_set_location (host_ass
, loc
);
2735 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
2737 tree dev_eq
= make_ssa_name (boolean_type_node
);
2738 gimple
*dev_ass
= gimple_build_assign
2739 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
2740 gimple_set_location (dev_ass
, loc
);
2741 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
2743 tree result
= make_ssa_name (boolean_type_node
);
2744 gimple
*result_ass
= gimple_build_assign
2745 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
2746 gimple_set_location (result_ass
, loc
);
2747 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
2749 replace_call_with_value (gsi
, result
);
2754 /* Fold the non-target builtin at *GSI and return whether any simplification
2758 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2760 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2761 tree callee
= gimple_call_fndecl (stmt
);
2763 /* Give up for always_inline inline builtins until they are
2765 if (avoid_folding_inline_builtin (callee
))
2768 unsigned n
= gimple_call_num_args (stmt
);
2769 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2772 case BUILT_IN_BZERO
:
2773 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2774 gimple_call_arg (stmt
, 1));
2775 case BUILT_IN_MEMSET
:
2776 return gimple_fold_builtin_memset (gsi
,
2777 gimple_call_arg (stmt
, 1),
2778 gimple_call_arg (stmt
, 2));
2779 case BUILT_IN_BCOPY
:
2780 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2781 gimple_call_arg (stmt
, 0), 3);
2782 case BUILT_IN_MEMCPY
:
2783 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2784 gimple_call_arg (stmt
, 1), 0);
2785 case BUILT_IN_MEMPCPY
:
2786 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2787 gimple_call_arg (stmt
, 1), 1);
2788 case BUILT_IN_MEMMOVE
:
2789 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2790 gimple_call_arg (stmt
, 1), 3);
2791 case BUILT_IN_SPRINTF_CHK
:
2792 case BUILT_IN_VSPRINTF_CHK
:
2793 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2794 case BUILT_IN_STRCAT_CHK
:
2795 return gimple_fold_builtin_strcat_chk (gsi
);
2796 case BUILT_IN_STRNCAT_CHK
:
2797 return gimple_fold_builtin_strncat_chk (gsi
);
2798 case BUILT_IN_STRLEN
:
2799 return gimple_fold_builtin_strlen (gsi
);
2800 case BUILT_IN_STRCPY
:
2801 return gimple_fold_builtin_strcpy (gsi
,
2802 gimple_call_arg (stmt
, 0),
2803 gimple_call_arg (stmt
, 1));
2804 case BUILT_IN_STRNCPY
:
2805 return gimple_fold_builtin_strncpy (gsi
,
2806 gimple_call_arg (stmt
, 0),
2807 gimple_call_arg (stmt
, 1),
2808 gimple_call_arg (stmt
, 2));
2809 case BUILT_IN_STRCAT
:
2810 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2811 gimple_call_arg (stmt
, 1));
2812 case BUILT_IN_STRNCAT
:
2813 return gimple_fold_builtin_strncat (gsi
);
2814 case BUILT_IN_FPUTS
:
2815 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2816 gimple_call_arg (stmt
, 1), false);
2817 case BUILT_IN_FPUTS_UNLOCKED
:
2818 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2819 gimple_call_arg (stmt
, 1), true);
2820 case BUILT_IN_MEMCPY_CHK
:
2821 case BUILT_IN_MEMPCPY_CHK
:
2822 case BUILT_IN_MEMMOVE_CHK
:
2823 case BUILT_IN_MEMSET_CHK
:
2824 return gimple_fold_builtin_memory_chk (gsi
,
2825 gimple_call_arg (stmt
, 0),
2826 gimple_call_arg (stmt
, 1),
2827 gimple_call_arg (stmt
, 2),
2828 gimple_call_arg (stmt
, 3),
2830 case BUILT_IN_STPCPY
:
2831 return gimple_fold_builtin_stpcpy (gsi
);
2832 case BUILT_IN_STRCPY_CHK
:
2833 case BUILT_IN_STPCPY_CHK
:
2834 return gimple_fold_builtin_stxcpy_chk (gsi
,
2835 gimple_call_arg (stmt
, 0),
2836 gimple_call_arg (stmt
, 1),
2837 gimple_call_arg (stmt
, 2),
2839 case BUILT_IN_STRNCPY_CHK
:
2840 case BUILT_IN_STPNCPY_CHK
:
2841 return gimple_fold_builtin_stxncpy_chk (gsi
,
2842 gimple_call_arg (stmt
, 0),
2843 gimple_call_arg (stmt
, 1),
2844 gimple_call_arg (stmt
, 2),
2845 gimple_call_arg (stmt
, 3),
2847 case BUILT_IN_SNPRINTF_CHK
:
2848 case BUILT_IN_VSNPRINTF_CHK
:
2849 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2850 case BUILT_IN_SNPRINTF
:
2851 return gimple_fold_builtin_snprintf (gsi
);
2852 case BUILT_IN_SPRINTF
:
2853 return gimple_fold_builtin_sprintf (gsi
);
2854 case BUILT_IN_FPRINTF
:
2855 case BUILT_IN_FPRINTF_UNLOCKED
:
2856 case BUILT_IN_VFPRINTF
:
2857 if (n
== 2 || n
== 3)
2858 return gimple_fold_builtin_fprintf (gsi
,
2859 gimple_call_arg (stmt
, 0),
2860 gimple_call_arg (stmt
, 1),
2862 ? gimple_call_arg (stmt
, 2)
2866 case BUILT_IN_FPRINTF_CHK
:
2867 case BUILT_IN_VFPRINTF_CHK
:
2868 if (n
== 3 || n
== 4)
2869 return gimple_fold_builtin_fprintf (gsi
,
2870 gimple_call_arg (stmt
, 0),
2871 gimple_call_arg (stmt
, 2),
2873 ? gimple_call_arg (stmt
, 3)
2877 case BUILT_IN_PRINTF
:
2878 case BUILT_IN_PRINTF_UNLOCKED
:
2879 case BUILT_IN_VPRINTF
:
2880 if (n
== 1 || n
== 2)
2881 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2883 ? gimple_call_arg (stmt
, 1)
2884 : NULL_TREE
, fcode
);
2886 case BUILT_IN_PRINTF_CHK
:
2887 case BUILT_IN_VPRINTF_CHK
:
2888 if (n
== 2 || n
== 3)
2889 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2891 ? gimple_call_arg (stmt
, 2)
2892 : NULL_TREE
, fcode
);
2893 case BUILT_IN_ACC_ON_DEVICE
:
2894 return gimple_fold_builtin_acc_on_device (gsi
,
2895 gimple_call_arg (stmt
, 0));
2899 /* Try the generic builtin folder. */
2900 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2901 tree result
= fold_call_stmt (stmt
, ignore
);
2905 STRIP_NOPS (result
);
2907 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2908 if (!update_call_from_tree (gsi
, result
))
2909 gimplify_and_update_call_from_tree (gsi
, result
);
2916 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
2917 doesn't fit into TYPE. The test for overflow should be regardless of
2918 -fwrapv, and even for unsigned types. */
2921 arith_overflowed_p (enum tree_code code
, const_tree type
,
2922 const_tree arg0
, const_tree arg1
)
2924 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
2925 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
2927 widest2_int warg0
= widest2_int_cst (arg0
);
2928 widest2_int warg1
= widest2_int_cst (arg1
);
2932 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
2933 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
2934 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
2935 default: gcc_unreachable ();
2937 signop sign
= TYPE_SIGN (type
);
2938 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
2940 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
2943 /* Attempt to fold a call statement referenced by the statement iterator GSI.
2944 The statement may be replaced by another statement, e.g., if the call
2945 simplifies to a constant value. Return true if any changes were made.
2946 It is assumed that the operands have been previously folded. */
2949 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
2951 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2953 bool changed
= false;
2956 /* Fold *& in call arguments. */
2957 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
2958 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
2960 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
2963 gimple_call_set_arg (stmt
, i
, tmp
);
2968 /* Check for virtual calls that became direct calls. */
2969 callee
= gimple_call_fn (stmt
);
2970 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
2972 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
2974 if (dump_file
&& virtual_method_call_p (callee
)
2975 && !possible_polymorphic_call_target_p
2976 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
2977 (OBJ_TYPE_REF_EXPR (callee
)))))
2980 "Type inheritance inconsistent devirtualization of ");
2981 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
2982 fprintf (dump_file
, " to ");
2983 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
2984 fprintf (dump_file
, "\n");
2987 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
2990 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
2993 vec
<cgraph_node
*>targets
2994 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
2995 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
2997 tree lhs
= gimple_call_lhs (stmt
);
2998 if (dump_enabled_p ())
3000 location_t loc
= gimple_location_safe (stmt
);
3001 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3002 "folding virtual function call to %s\n",
3003 targets
.length () == 1
3004 ? targets
[0]->name ()
3005 : "__builtin_unreachable");
3007 if (targets
.length () == 1)
3009 gimple_call_set_fndecl (stmt
, targets
[0]->decl
);
3011 /* If the call becomes noreturn, remove the lhs. */
3012 if (lhs
&& (gimple_call_flags (stmt
) & ECF_NORETURN
))
3014 if (TREE_CODE (lhs
) == SSA_NAME
)
3016 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3017 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3018 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3019 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3021 gimple_call_set_lhs (stmt
, NULL_TREE
);
3023 maybe_remove_unused_call_args (cfun
, stmt
);
3027 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3028 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3029 gimple_set_location (new_stmt
, gimple_location (stmt
));
3030 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3032 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3033 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3035 /* To satisfy condition for
3036 cgraph_update_edges_for_call_stmt_node,
3037 we need to preserve GIMPLE_CALL statement
3038 at position of GSI iterator. */
3039 update_call_from_tree (gsi
, def
);
3040 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3044 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3045 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3046 gsi_replace (gsi
, new_stmt
, false);
3054 /* Check for indirect calls that became direct calls, and then
3055 no longer require a static chain. */
3056 if (gimple_call_chain (stmt
))
3058 tree fn
= gimple_call_fndecl (stmt
);
3059 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3061 gimple_call_set_chain (stmt
, NULL
);
3066 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3069 gimple_call_set_chain (stmt
, tmp
);
3078 /* Check for builtins that CCP can handle using information not
3079 available in the generic fold routines. */
3080 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3082 if (gimple_fold_builtin (gsi
))
3085 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3087 changed
|= targetm
.gimple_fold_builtin (gsi
);
3089 else if (gimple_call_internal_p (stmt
))
3091 enum tree_code subcode
= ERROR_MARK
;
3092 tree result
= NULL_TREE
;
3093 bool cplx_result
= false;
3094 tree overflow
= NULL_TREE
;
3095 switch (gimple_call_internal_fn (stmt
))
3097 case IFN_BUILTIN_EXPECT
:
3098 result
= fold_builtin_expect (gimple_location (stmt
),
3099 gimple_call_arg (stmt
, 0),
3100 gimple_call_arg (stmt
, 1),
3101 gimple_call_arg (stmt
, 2));
3103 case IFN_UBSAN_OBJECT_SIZE
:
3104 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3105 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3106 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3107 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3108 gimple_call_arg (stmt
, 2))))
3110 gsi_replace (gsi
, gimple_build_nop (), false);
3111 unlink_stmt_vdef (stmt
);
3112 release_defs (stmt
);
3116 case IFN_UBSAN_CHECK_ADD
:
3117 subcode
= PLUS_EXPR
;
3119 case IFN_UBSAN_CHECK_SUB
:
3120 subcode
= MINUS_EXPR
;
3122 case IFN_UBSAN_CHECK_MUL
:
3123 subcode
= MULT_EXPR
;
3125 case IFN_ADD_OVERFLOW
:
3126 subcode
= PLUS_EXPR
;
3129 case IFN_SUB_OVERFLOW
:
3130 subcode
= MINUS_EXPR
;
3133 case IFN_MUL_OVERFLOW
:
3134 subcode
= MULT_EXPR
;
3140 if (subcode
!= ERROR_MARK
)
3142 tree arg0
= gimple_call_arg (stmt
, 0);
3143 tree arg1
= gimple_call_arg (stmt
, 1);
3144 tree type
= TREE_TYPE (arg0
);
3147 tree lhs
= gimple_call_lhs (stmt
);
3148 if (lhs
== NULL_TREE
)
3151 type
= TREE_TYPE (TREE_TYPE (lhs
));
3153 if (type
== NULL_TREE
)
3155 /* x = y + 0; x = y - 0; x = y * 0; */
3156 else if (integer_zerop (arg1
))
3157 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3158 /* x = 0 + y; x = 0 * y; */
3159 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3160 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3162 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3163 result
= integer_zero_node
;
3164 /* x = y * 1; x = 1 * y; */
3165 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3167 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3169 else if (TREE_CODE (arg0
) == INTEGER_CST
3170 && TREE_CODE (arg1
) == INTEGER_CST
)
3173 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3174 fold_convert (type
, arg1
));
3176 result
= int_const_binop (subcode
, arg0
, arg1
);
3177 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3180 overflow
= build_one_cst (type
);
3187 if (result
== integer_zero_node
)
3188 result
= build_zero_cst (type
);
3189 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3191 if (TREE_CODE (result
) == INTEGER_CST
)
3193 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3195 overflow
= build_one_cst (type
);
3197 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3198 && TYPE_UNSIGNED (type
))
3199 || (TYPE_PRECISION (type
)
3200 < (TYPE_PRECISION (TREE_TYPE (result
))
3201 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3202 && !TYPE_UNSIGNED (type
)))))
3205 result
= fold_convert (type
, result
);
3212 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3213 result
= drop_tree_overflow (result
);
3216 if (overflow
== NULL_TREE
)
3217 overflow
= build_zero_cst (TREE_TYPE (result
));
3218 tree ctype
= build_complex_type (TREE_TYPE (result
));
3219 if (TREE_CODE (result
) == INTEGER_CST
3220 && TREE_CODE (overflow
) == INTEGER_CST
)
3221 result
= build_complex (ctype
, result
, overflow
);
3223 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3224 ctype
, result
, overflow
);
3226 if (!update_call_from_tree (gsi
, result
))
3227 gimplify_and_update_call_from_tree (gsi
, result
);
3236 /* Return true whether NAME has a use on STMT. */
3239 has_use_on_stmt (tree name
, gimple
*stmt
)
3241 imm_use_iterator iter
;
3242 use_operand_p use_p
;
3243 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
3244 if (USE_STMT (use_p
) == stmt
)
3249 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3252 Replaces *GSI with the simplification result in RCODE and OPS
3253 and the associated statements in *SEQ. Does the replacement
3254 according to INPLACE and returns true if the operation succeeded. */
3257 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3258 code_helper rcode
, tree
*ops
,
3259 gimple_seq
*seq
, bool inplace
)
3261 gimple
*stmt
= gsi_stmt (*gsi
);
3263 /* Play safe and do not allow abnormals to be mentioned in
3264 newly created statements. See also maybe_push_res_to_seq.
3265 As an exception allow such uses if there was a use of the
3266 same SSA name on the old stmt. */
3267 if ((TREE_CODE (ops
[0]) == SSA_NAME
3268 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
3269 && !has_use_on_stmt (ops
[0], stmt
))
3271 && TREE_CODE (ops
[1]) == SSA_NAME
3272 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
3273 && !has_use_on_stmt (ops
[1], stmt
))
3275 && TREE_CODE (ops
[2]) == SSA_NAME
3276 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
3277 && !has_use_on_stmt (ops
[2], stmt
)))
3280 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3282 gcc_assert (rcode
.is_tree_code ());
3283 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3284 /* GIMPLE_CONDs condition may not throw. */
3285 && (!flag_exceptions
3286 || !cfun
->can_throw_non_call_exceptions
3287 || !operation_could_trap_p (rcode
,
3288 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3290 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3291 else if (rcode
== SSA_NAME
)
3292 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3293 build_zero_cst (TREE_TYPE (ops
[0])));
3294 else if (rcode
== INTEGER_CST
)
3296 if (integer_zerop (ops
[0]))
3297 gimple_cond_make_false (cond_stmt
);
3299 gimple_cond_make_true (cond_stmt
);
3303 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3307 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3308 build_zero_cst (TREE_TYPE (res
)));
3312 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3314 fprintf (dump_file
, "gimple_simplified to ");
3315 if (!gimple_seq_empty_p (*seq
))
3316 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3317 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3320 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3323 else if (is_gimple_assign (stmt
)
3324 && rcode
.is_tree_code ())
3327 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3329 maybe_build_generic_op (rcode
,
3330 TREE_TYPE (gimple_assign_lhs (stmt
)),
3331 &ops
[0], ops
[1], ops
[2]);
3332 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3333 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3335 fprintf (dump_file
, "gimple_simplified to ");
3336 if (!gimple_seq_empty_p (*seq
))
3337 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3338 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3341 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3345 else if (rcode
.is_fn_code ()
3346 && gimple_call_builtin_p (stmt
, rcode
))
3349 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3351 gcc_assert (ops
[i
] != NULL_TREE
);
3352 gimple_call_set_arg (stmt
, i
, ops
[i
]);
3355 gcc_assert (ops
[i
] == NULL_TREE
);
3356 gcc_assert (gimple_seq_empty_p (*seq
));
3361 if (gimple_has_lhs (stmt
))
3363 tree lhs
= gimple_get_lhs (stmt
);
3364 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3367 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3369 fprintf (dump_file
, "gimple_simplified to ");
3370 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3372 gsi_replace_with_seq_vops (gsi
, *seq
);
3382 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3385 maybe_canonicalize_mem_ref_addr (tree
*t
)
3389 if (TREE_CODE (*t
) == ADDR_EXPR
)
3390 t
= &TREE_OPERAND (*t
, 0);
3392 while (handled_component_p (*t
))
3393 t
= &TREE_OPERAND (*t
, 0);
3395 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3396 of invariant addresses into a SSA name MEM_REF address. */
3397 if (TREE_CODE (*t
) == MEM_REF
3398 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3400 tree addr
= TREE_OPERAND (*t
, 0);
3401 if (TREE_CODE (addr
) == ADDR_EXPR
3402 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3403 || handled_component_p (TREE_OPERAND (addr
, 0))))
3406 HOST_WIDE_INT coffset
;
3407 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3412 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3413 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3414 TREE_OPERAND (*t
, 1),
3415 size_int (coffset
));
3418 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3419 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3422 /* Canonicalize back MEM_REFs to plain reference trees if the object
3423 accessed is a decl that has the same access semantics as the MEM_REF. */
3424 if (TREE_CODE (*t
) == MEM_REF
3425 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3426 && integer_zerop (TREE_OPERAND (*t
, 1))
3427 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3429 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3430 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3431 if (/* Same volatile qualification. */
3432 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3433 /* Same TBAA behavior with -fstrict-aliasing. */
3434 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3435 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3436 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3437 /* Same alignment. */
3438 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3439 /* We have to look out here to not drop a required conversion
3440 from the rhs to the lhs if *t appears on the lhs or vice-versa
3441 if it appears on the rhs. Thus require strict type
3443 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3445 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3450 /* Canonicalize TARGET_MEM_REF in particular with respect to
3451 the indexes becoming constant. */
3452 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3454 tree tem
= maybe_fold_tmr (*t
);
3465 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3466 distinguishes both cases. */
3469 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3471 bool changed
= false;
3472 gimple
*stmt
= gsi_stmt (*gsi
);
3475 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3477 ??? This shouldn't be done in generic folding but in the
3478 propagation helpers which also know whether an address was
3480 Also canonicalize operand order. */
3481 switch (gimple_code (stmt
))
3484 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3486 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3487 if ((REFERENCE_CLASS_P (*rhs
)
3488 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3489 && maybe_canonicalize_mem_ref_addr (rhs
))
3491 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3492 if (REFERENCE_CLASS_P (*lhs
)
3493 && maybe_canonicalize_mem_ref_addr (lhs
))
3498 /* Canonicalize operand order. */
3499 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3500 if (TREE_CODE_CLASS (code
) == tcc_comparison
3501 || commutative_tree_code (code
)
3502 || commutative_ternary_tree_code (code
))
3504 tree rhs1
= gimple_assign_rhs1 (stmt
);
3505 tree rhs2
= gimple_assign_rhs2 (stmt
);
3506 if (tree_swap_operands_p (rhs1
, rhs2
, false))
3508 gimple_assign_set_rhs1 (stmt
, rhs2
);
3509 gimple_assign_set_rhs2 (stmt
, rhs1
);
3510 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3511 gimple_assign_set_rhs_code (stmt
,
3512 swap_tree_comparison (code
));
3520 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3522 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3523 if (REFERENCE_CLASS_P (*arg
)
3524 && maybe_canonicalize_mem_ref_addr (arg
))
3527 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3529 && REFERENCE_CLASS_P (*lhs
)
3530 && maybe_canonicalize_mem_ref_addr (lhs
))
3536 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3537 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3539 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3540 tree op
= TREE_VALUE (link
);
3541 if (REFERENCE_CLASS_P (op
)
3542 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3545 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3547 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3548 tree op
= TREE_VALUE (link
);
3549 if ((REFERENCE_CLASS_P (op
)
3550 || TREE_CODE (op
) == ADDR_EXPR
)
3551 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3557 if (gimple_debug_bind_p (stmt
))
3559 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3561 && (REFERENCE_CLASS_P (*val
)
3562 || TREE_CODE (*val
) == ADDR_EXPR
)
3563 && maybe_canonicalize_mem_ref_addr (val
))
3569 /* Canonicalize operand order. */
3570 tree lhs
= gimple_cond_lhs (stmt
);
3571 tree rhs
= gimple_cond_rhs (stmt
);
3572 if (tree_swap_operands_p (lhs
, rhs
, false))
3574 gcond
*gc
= as_a
<gcond
*> (stmt
);
3575 gimple_cond_set_lhs (gc
, rhs
);
3576 gimple_cond_set_rhs (gc
, lhs
);
3577 gimple_cond_set_code (gc
,
3578 swap_tree_comparison (gimple_cond_code (gc
)));
3585 /* Dispatch to pattern-based folding. */
3587 || is_gimple_assign (stmt
)
3588 || gimple_code (stmt
) == GIMPLE_COND
)
3590 gimple_seq seq
= NULL
;
3593 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3594 valueize
, valueize
))
3596 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3599 gimple_seq_discard (seq
);
3603 stmt
= gsi_stmt (*gsi
);
3605 /* Fold the main computation performed by the statement. */
3606 switch (gimple_code (stmt
))
3610 /* Try to canonicalize for boolean-typed X the comparisons
3611 X == 0, X == 1, X != 0, and X != 1. */
3612 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
3613 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
3615 tree lhs
= gimple_assign_lhs (stmt
);
3616 tree op1
= gimple_assign_rhs1 (stmt
);
3617 tree op2
= gimple_assign_rhs2 (stmt
);
3618 tree type
= TREE_TYPE (op1
);
3620 /* Check whether the comparison operands are of the same boolean
3621 type as the result type is.
3622 Check that second operand is an integer-constant with value
3624 if (TREE_CODE (op2
) == INTEGER_CST
3625 && (integer_zerop (op2
) || integer_onep (op2
))
3626 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
3628 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
3629 bool is_logical_not
= false;
3631 /* X == 0 and X != 1 is a logical-not.of X
3632 X == 1 and X != 0 is X */
3633 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
3634 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
3635 is_logical_not
= true;
3637 if (is_logical_not
== false)
3638 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
3639 /* Only for one-bit precision typed X the transformation
3640 !X -> ~X is valied. */
3641 else if (TYPE_PRECISION (type
) == 1)
3642 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
3643 /* Otherwise we use !X -> X ^ 1. */
3645 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
3646 build_int_cst (type
, 1));
3652 unsigned old_num_ops
= gimple_num_ops (stmt
);
3653 tree lhs
= gimple_assign_lhs (stmt
);
3654 tree new_rhs
= fold_gimple_assign (gsi
);
3656 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3657 TREE_TYPE (new_rhs
)))
3658 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3661 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3663 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3670 changed
|= gimple_fold_call (gsi
, inplace
);
3674 /* Fold *& in asm operands. */
3676 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3678 const char **oconstraints
;
3679 const char *constraint
;
3680 bool allows_mem
, allows_reg
;
3682 noutputs
= gimple_asm_noutputs (asm_stmt
);
3683 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3685 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3687 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3688 tree op
= TREE_VALUE (link
);
3690 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3691 if (REFERENCE_CLASS_P (op
)
3692 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3694 TREE_VALUE (link
) = op
;
3698 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3700 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3701 tree op
= TREE_VALUE (link
);
3703 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3704 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3705 oconstraints
, &allows_mem
, &allows_reg
);
3706 if (REFERENCE_CLASS_P (op
)
3707 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3710 TREE_VALUE (link
) = op
;
3718 if (gimple_debug_bind_p (stmt
))
3720 tree val
= gimple_debug_bind_get_value (stmt
);
3722 && REFERENCE_CLASS_P (val
))
3724 tree tem
= maybe_fold_reference (val
, false);
3727 gimple_debug_bind_set_value (stmt
, tem
);
3732 && TREE_CODE (val
) == ADDR_EXPR
)
3734 tree ref
= TREE_OPERAND (val
, 0);
3735 tree tem
= maybe_fold_reference (ref
, false);
3738 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3739 gimple_debug_bind_set_value (stmt
, tem
);
3749 stmt
= gsi_stmt (*gsi
);
3751 /* Fold *& on the lhs. */
3752 if (gimple_has_lhs (stmt
))
3754 tree lhs
= gimple_get_lhs (stmt
);
3755 if (lhs
&& REFERENCE_CLASS_P (lhs
))
3757 tree new_lhs
= maybe_fold_reference (lhs
, true);
3760 gimple_set_lhs (stmt
, new_lhs
);
3769 /* Valueziation callback that ends up not following SSA edges. */
3772 no_follow_ssa_edges (tree
)
3777 /* Valueization callback that ends up following single-use SSA edges only. */
3780 follow_single_use_edges (tree val
)
3782 if (TREE_CODE (val
) == SSA_NAME
3783 && !has_single_use (val
))
3788 /* Fold the statement pointed to by GSI. In some cases, this function may
3789 replace the whole statement with a new one. Returns true iff folding
3791 The statement pointed to by GSI should be in valid gimple form but may
3792 be in unfolded state as resulting from for example constant propagation
3793 which can produce *&x = 0. */
3796 fold_stmt (gimple_stmt_iterator
*gsi
)
3798 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
3802 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
3804 return fold_stmt_1 (gsi
, false, valueize
);
3807 /* Perform the minimal folding on statement *GSI. Only operations like
3808 *&x created by constant propagation are handled. The statement cannot
3809 be replaced with a new one. Return true if the statement was
3810 changed, false otherwise.
3811 The statement *GSI should be in valid gimple form but may
3812 be in unfolded state as resulting from for example constant propagation
3813 which can produce *&x = 0. */
3816 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
3818 gimple
*stmt
= gsi_stmt (*gsi
);
3819 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
3820 gcc_assert (gsi_stmt (*gsi
) == stmt
);
3824 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3825 if EXPR is null or we don't know how.
3826 If non-null, the result always has boolean type. */
3829 canonicalize_bool (tree expr
, bool invert
)
3835 if (integer_nonzerop (expr
))
3836 return boolean_false_node
;
3837 else if (integer_zerop (expr
))
3838 return boolean_true_node
;
3839 else if (TREE_CODE (expr
) == SSA_NAME
)
3840 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
3841 build_int_cst (TREE_TYPE (expr
), 0));
3842 else if (COMPARISON_CLASS_P (expr
))
3843 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
3845 TREE_OPERAND (expr
, 0),
3846 TREE_OPERAND (expr
, 1));
3852 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3854 if (integer_nonzerop (expr
))
3855 return boolean_true_node
;
3856 else if (integer_zerop (expr
))
3857 return boolean_false_node
;
3858 else if (TREE_CODE (expr
) == SSA_NAME
)
3859 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
3860 build_int_cst (TREE_TYPE (expr
), 0));
3861 else if (COMPARISON_CLASS_P (expr
))
3862 return fold_build2 (TREE_CODE (expr
),
3864 TREE_OPERAND (expr
, 0),
3865 TREE_OPERAND (expr
, 1));
3871 /* Check to see if a boolean expression EXPR is logically equivalent to the
3872 comparison (OP1 CODE OP2). Check for various identities involving
3876 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
3877 const_tree op1
, const_tree op2
)
3881 /* The obvious case. */
3882 if (TREE_CODE (expr
) == code
3883 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
3884 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
3887 /* Check for comparing (name, name != 0) and the case where expr
3888 is an SSA_NAME with a definition matching the comparison. */
3889 if (TREE_CODE (expr
) == SSA_NAME
3890 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
3892 if (operand_equal_p (expr
, op1
, 0))
3893 return ((code
== NE_EXPR
&& integer_zerop (op2
))
3894 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
3895 s
= SSA_NAME_DEF_STMT (expr
);
3896 if (is_gimple_assign (s
)
3897 && gimple_assign_rhs_code (s
) == code
3898 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
3899 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
3903 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3904 of name is a comparison, recurse. */
3905 if (TREE_CODE (op1
) == SSA_NAME
3906 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
3908 s
= SSA_NAME_DEF_STMT (op1
);
3909 if (is_gimple_assign (s
)
3910 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
3912 enum tree_code c
= gimple_assign_rhs_code (s
);
3913 if ((c
== NE_EXPR
&& integer_zerop (op2
))
3914 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
3915 return same_bool_comparison_p (expr
, c
,
3916 gimple_assign_rhs1 (s
),
3917 gimple_assign_rhs2 (s
));
3918 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
3919 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
3920 return same_bool_comparison_p (expr
,
3921 invert_tree_comparison (c
, false),
3922 gimple_assign_rhs1 (s
),
3923 gimple_assign_rhs2 (s
));
3929 /* Check to see if two boolean expressions OP1 and OP2 are logically
3933 same_bool_result_p (const_tree op1
, const_tree op2
)
3935 /* Simple cases first. */
3936 if (operand_equal_p (op1
, op2
, 0))
3939 /* Check the cases where at least one of the operands is a comparison.
3940 These are a bit smarter than operand_equal_p in that they apply some
3941 identifies on SSA_NAMEs. */
3942 if (COMPARISON_CLASS_P (op2
)
3943 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
3944 TREE_OPERAND (op2
, 0),
3945 TREE_OPERAND (op2
, 1)))
3947 if (COMPARISON_CLASS_P (op1
)
3948 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
3949 TREE_OPERAND (op1
, 0),
3950 TREE_OPERAND (op1
, 1)))
3957 /* Forward declarations for some mutually recursive functions. */
3960 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3961 enum tree_code code2
, tree op2a
, tree op2b
);
3963 and_var_with_comparison (tree var
, bool invert
,
3964 enum tree_code code2
, tree op2a
, tree op2b
);
3966 and_var_with_comparison_1 (gimple
*stmt
,
3967 enum tree_code code2
, tree op2a
, tree op2b
);
3969 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
3970 enum tree_code code2
, tree op2a
, tree op2b
);
3972 or_var_with_comparison (tree var
, bool invert
,
3973 enum tree_code code2
, tree op2a
, tree op2b
);
3975 or_var_with_comparison_1 (gimple
*stmt
,
3976 enum tree_code code2
, tree op2a
, tree op2b
);
3978 /* Helper function for and_comparisons_1: try to simplify the AND of the
3979 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3980 If INVERT is true, invert the value of the VAR before doing the AND.
3981 Return NULL_EXPR if we can't simplify this to a single expression. */
3984 and_var_with_comparison (tree var
, bool invert
,
3985 enum tree_code code2
, tree op2a
, tree op2b
)
3988 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
3990 /* We can only deal with variables whose definitions are assignments. */
3991 if (!is_gimple_assign (stmt
))
3994 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3995 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3996 Then we only have to consider the simpler non-inverted cases. */
3998 t
= or_var_with_comparison_1 (stmt
,
3999 invert_tree_comparison (code2
, false),
4002 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4003 return canonicalize_bool (t
, invert
);
4006 /* Try to simplify the AND of the ssa variable defined by the assignment
4007 STMT with the comparison specified by (OP2A CODE2 OP2B).
4008 Return NULL_EXPR if we can't simplify this to a single expression. */
4011 and_var_with_comparison_1 (gimple
*stmt
,
4012 enum tree_code code2
, tree op2a
, tree op2b
)
4014 tree var
= gimple_assign_lhs (stmt
);
4015 tree true_test_var
= NULL_TREE
;
4016 tree false_test_var
= NULL_TREE
;
4017 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4019 /* Check for identities like (var AND (var == 0)) => false. */
4020 if (TREE_CODE (op2a
) == SSA_NAME
4021 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4023 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4024 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4026 true_test_var
= op2a
;
4027 if (var
== true_test_var
)
4030 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4031 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4033 false_test_var
= op2a
;
4034 if (var
== false_test_var
)
4035 return boolean_false_node
;
4039 /* If the definition is a comparison, recurse on it. */
4040 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4042 tree t
= and_comparisons_1 (innercode
,
4043 gimple_assign_rhs1 (stmt
),
4044 gimple_assign_rhs2 (stmt
),
4052 /* If the definition is an AND or OR expression, we may be able to
4053 simplify by reassociating. */
4054 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4055 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4057 tree inner1
= gimple_assign_rhs1 (stmt
);
4058 tree inner2
= gimple_assign_rhs2 (stmt
);
4061 tree partial
= NULL_TREE
;
4062 bool is_and
= (innercode
== BIT_AND_EXPR
);
4064 /* Check for boolean identities that don't require recursive examination
4066 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4067 inner1 AND (inner1 OR inner2) => inner1
4068 !inner1 AND (inner1 AND inner2) => false
4069 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4070 Likewise for similar cases involving inner2. */
4071 if (inner1
== true_test_var
)
4072 return (is_and
? var
: inner1
);
4073 else if (inner2
== true_test_var
)
4074 return (is_and
? var
: inner2
);
4075 else if (inner1
== false_test_var
)
4077 ? boolean_false_node
4078 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4079 else if (inner2
== false_test_var
)
4081 ? boolean_false_node
4082 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4084 /* Next, redistribute/reassociate the AND across the inner tests.
4085 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4086 if (TREE_CODE (inner1
) == SSA_NAME
4087 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4088 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4089 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4090 gimple_assign_rhs1 (s
),
4091 gimple_assign_rhs2 (s
),
4092 code2
, op2a
, op2b
)))
4094 /* Handle the AND case, where we are reassociating:
4095 (inner1 AND inner2) AND (op2a code2 op2b)
4097 If the partial result t is a constant, we win. Otherwise
4098 continue on to try reassociating with the other inner test. */
4101 if (integer_onep (t
))
4103 else if (integer_zerop (t
))
4104 return boolean_false_node
;
4107 /* Handle the OR case, where we are redistributing:
4108 (inner1 OR inner2) AND (op2a code2 op2b)
4109 => (t OR (inner2 AND (op2a code2 op2b))) */
4110 else if (integer_onep (t
))
4111 return boolean_true_node
;
4113 /* Save partial result for later. */
4117 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4118 if (TREE_CODE (inner2
) == SSA_NAME
4119 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4120 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4121 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4122 gimple_assign_rhs1 (s
),
4123 gimple_assign_rhs2 (s
),
4124 code2
, op2a
, op2b
)))
4126 /* Handle the AND case, where we are reassociating:
4127 (inner1 AND inner2) AND (op2a code2 op2b)
4128 => (inner1 AND t) */
4131 if (integer_onep (t
))
4133 else if (integer_zerop (t
))
4134 return boolean_false_node
;
4135 /* If both are the same, we can apply the identity
4137 else if (partial
&& same_bool_result_p (t
, partial
))
4141 /* Handle the OR case. where we are redistributing:
4142 (inner1 OR inner2) AND (op2a code2 op2b)
4143 => (t OR (inner1 AND (op2a code2 op2b)))
4144 => (t OR partial) */
4147 if (integer_onep (t
))
4148 return boolean_true_node
;
4151 /* We already got a simplification for the other
4152 operand to the redistributed OR expression. The
4153 interesting case is when at least one is false.
4154 Or, if both are the same, we can apply the identity
4156 if (integer_zerop (partial
))
4158 else if (integer_zerop (t
))
4160 else if (same_bool_result_p (t
, partial
))
4169 /* Try to simplify the AND of two comparisons defined by
4170 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4171 If this can be done without constructing an intermediate value,
4172 return the resulting tree; otherwise NULL_TREE is returned.
4173 This function is deliberately asymmetric as it recurses on SSA_DEFs
4174 in the first comparison but not the second. */
4177 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4178 enum tree_code code2
, tree op2a
, tree op2b
)
4180 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4182 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4183 if (operand_equal_p (op1a
, op2a
, 0)
4184 && operand_equal_p (op1b
, op2b
, 0))
4186 /* Result will be either NULL_TREE, or a combined comparison. */
4187 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4188 TRUTH_ANDIF_EXPR
, code1
, code2
,
4189 truth_type
, op1a
, op1b
);
4194 /* Likewise the swapped case of the above. */
4195 if (operand_equal_p (op1a
, op2b
, 0)
4196 && operand_equal_p (op1b
, op2a
, 0))
4198 /* Result will be either NULL_TREE, or a combined comparison. */
4199 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4200 TRUTH_ANDIF_EXPR
, code1
,
4201 swap_tree_comparison (code2
),
4202 truth_type
, op1a
, op1b
);
4207 /* If both comparisons are of the same value against constants, we might
4208 be able to merge them. */
4209 if (operand_equal_p (op1a
, op2a
, 0)
4210 && TREE_CODE (op1b
) == INTEGER_CST
4211 && TREE_CODE (op2b
) == INTEGER_CST
)
4213 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4215 /* If we have (op1a == op1b), we should either be able to
4216 return that or FALSE, depending on whether the constant op1b
4217 also satisfies the other comparison against op2b. */
4218 if (code1
== EQ_EXPR
)
4224 case EQ_EXPR
: val
= (cmp
== 0); break;
4225 case NE_EXPR
: val
= (cmp
!= 0); break;
4226 case LT_EXPR
: val
= (cmp
< 0); break;
4227 case GT_EXPR
: val
= (cmp
> 0); break;
4228 case LE_EXPR
: val
= (cmp
<= 0); break;
4229 case GE_EXPR
: val
= (cmp
>= 0); break;
4230 default: done
= false;
4235 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4237 return boolean_false_node
;
4240 /* Likewise if the second comparison is an == comparison. */
4241 else if (code2
== EQ_EXPR
)
4247 case EQ_EXPR
: val
= (cmp
== 0); break;
4248 case NE_EXPR
: val
= (cmp
!= 0); break;
4249 case LT_EXPR
: val
= (cmp
> 0); break;
4250 case GT_EXPR
: val
= (cmp
< 0); break;
4251 case LE_EXPR
: val
= (cmp
>= 0); break;
4252 case GE_EXPR
: val
= (cmp
<= 0); break;
4253 default: done
= false;
4258 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4260 return boolean_false_node
;
4264 /* Same business with inequality tests. */
4265 else if (code1
== NE_EXPR
)
4270 case EQ_EXPR
: val
= (cmp
!= 0); break;
4271 case NE_EXPR
: val
= (cmp
== 0); break;
4272 case LT_EXPR
: val
= (cmp
>= 0); break;
4273 case GT_EXPR
: val
= (cmp
<= 0); break;
4274 case LE_EXPR
: val
= (cmp
> 0); break;
4275 case GE_EXPR
: val
= (cmp
< 0); break;
4280 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4282 else if (code2
== NE_EXPR
)
4287 case EQ_EXPR
: val
= (cmp
== 0); break;
4288 case NE_EXPR
: val
= (cmp
!= 0); break;
4289 case LT_EXPR
: val
= (cmp
<= 0); break;
4290 case GT_EXPR
: val
= (cmp
>= 0); break;
4291 case LE_EXPR
: val
= (cmp
< 0); break;
4292 case GE_EXPR
: val
= (cmp
> 0); break;
4297 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4300 /* Chose the more restrictive of two < or <= comparisons. */
4301 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4302 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4304 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4305 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4307 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4310 /* Likewise chose the more restrictive of two > or >= comparisons. */
4311 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4312 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4314 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4315 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4317 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4320 /* Check for singleton ranges. */
4322 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4323 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4324 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4326 /* Check for disjoint ranges. */
4328 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4329 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4330 return boolean_false_node
;
4332 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4333 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4334 return boolean_false_node
;
4337 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4338 NAME's definition is a truth value. See if there are any simplifications
4339 that can be done against the NAME's definition. */
4340 if (TREE_CODE (op1a
) == SSA_NAME
4341 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4342 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4344 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4345 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4346 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4347 switch (gimple_code (stmt
))
4350 /* Try to simplify by copy-propagating the definition. */
4351 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4354 /* If every argument to the PHI produces the same result when
4355 ANDed with the second comparison, we win.
4356 Do not do this unless the type is bool since we need a bool
4357 result here anyway. */
4358 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4360 tree result
= NULL_TREE
;
4362 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4364 tree arg
= gimple_phi_arg_def (stmt
, i
);
4366 /* If this PHI has itself as an argument, ignore it.
4367 If all the other args produce the same result,
4369 if (arg
== gimple_phi_result (stmt
))
4371 else if (TREE_CODE (arg
) == INTEGER_CST
)
4373 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4376 result
= boolean_false_node
;
4377 else if (!integer_zerop (result
))
4381 result
= fold_build2 (code2
, boolean_type_node
,
4383 else if (!same_bool_comparison_p (result
,
4387 else if (TREE_CODE (arg
) == SSA_NAME
4388 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4391 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4392 /* In simple cases we can look through PHI nodes,
4393 but we have to be careful with loops.
4395 if (! dom_info_available_p (CDI_DOMINATORS
)
4396 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4397 || dominated_by_p (CDI_DOMINATORS
,
4398 gimple_bb (def_stmt
),
4401 temp
= and_var_with_comparison (arg
, invert
, code2
,
4407 else if (!same_bool_result_p (result
, temp
))
4423 /* Try to simplify the AND of two comparisons, specified by
4424 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4425 If this can be simplified to a single expression (without requiring
4426 introducing more SSA variables to hold intermediate values),
4427 return the resulting tree. Otherwise return NULL_TREE.
4428 If the result expression is non-null, it has boolean type. */
4431 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4432 enum tree_code code2
, tree op2a
, tree op2b
)
4434 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4438 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4441 /* Helper function for or_comparisons_1: try to simplify the OR of the
4442 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4443 If INVERT is true, invert the value of VAR before doing the OR.
4444 Return NULL_EXPR if we can't simplify this to a single expression. */
4447 or_var_with_comparison (tree var
, bool invert
,
4448 enum tree_code code2
, tree op2a
, tree op2b
)
4451 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4453 /* We can only deal with variables whose definitions are assignments. */
4454 if (!is_gimple_assign (stmt
))
4457 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4458 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4459 Then we only have to consider the simpler non-inverted cases. */
4461 t
= and_var_with_comparison_1 (stmt
,
4462 invert_tree_comparison (code2
, false),
4465 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4466 return canonicalize_bool (t
, invert
);
4469 /* Try to simplify the OR of the ssa variable defined by the assignment
4470 STMT with the comparison specified by (OP2A CODE2 OP2B).
4471 Return NULL_EXPR if we can't simplify this to a single expression. */
4474 or_var_with_comparison_1 (gimple
*stmt
,
4475 enum tree_code code2
, tree op2a
, tree op2b
)
4477 tree var
= gimple_assign_lhs (stmt
);
4478 tree true_test_var
= NULL_TREE
;
4479 tree false_test_var
= NULL_TREE
;
4480 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4482 /* Check for identities like (var OR (var != 0)) => true . */
4483 if (TREE_CODE (op2a
) == SSA_NAME
4484 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4486 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4487 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4489 true_test_var
= op2a
;
4490 if (var
== true_test_var
)
4493 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4494 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4496 false_test_var
= op2a
;
4497 if (var
== false_test_var
)
4498 return boolean_true_node
;
4502 /* If the definition is a comparison, recurse on it. */
4503 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4505 tree t
= or_comparisons_1 (innercode
,
4506 gimple_assign_rhs1 (stmt
),
4507 gimple_assign_rhs2 (stmt
),
4515 /* If the definition is an AND or OR expression, we may be able to
4516 simplify by reassociating. */
4517 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4518 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4520 tree inner1
= gimple_assign_rhs1 (stmt
);
4521 tree inner2
= gimple_assign_rhs2 (stmt
);
4524 tree partial
= NULL_TREE
;
4525 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4527 /* Check for boolean identities that don't require recursive examination
4529 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4530 inner1 OR (inner1 AND inner2) => inner1
4531 !inner1 OR (inner1 OR inner2) => true
4532 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4534 if (inner1
== true_test_var
)
4535 return (is_or
? var
: inner1
);
4536 else if (inner2
== true_test_var
)
4537 return (is_or
? var
: inner2
);
4538 else if (inner1
== false_test_var
)
4541 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4542 else if (inner2
== false_test_var
)
4545 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4547 /* Next, redistribute/reassociate the OR across the inner tests.
4548 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4549 if (TREE_CODE (inner1
) == SSA_NAME
4550 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4551 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4552 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4553 gimple_assign_rhs1 (s
),
4554 gimple_assign_rhs2 (s
),
4555 code2
, op2a
, op2b
)))
4557 /* Handle the OR case, where we are reassociating:
4558 (inner1 OR inner2) OR (op2a code2 op2b)
4560 If the partial result t is a constant, we win. Otherwise
4561 continue on to try reassociating with the other inner test. */
4564 if (integer_onep (t
))
4565 return boolean_true_node
;
4566 else if (integer_zerop (t
))
4570 /* Handle the AND case, where we are redistributing:
4571 (inner1 AND inner2) OR (op2a code2 op2b)
4572 => (t AND (inner2 OR (op2a code op2b))) */
4573 else if (integer_zerop (t
))
4574 return boolean_false_node
;
4576 /* Save partial result for later. */
4580 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4581 if (TREE_CODE (inner2
) == SSA_NAME
4582 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4583 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4584 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4585 gimple_assign_rhs1 (s
),
4586 gimple_assign_rhs2 (s
),
4587 code2
, op2a
, op2b
)))
4589 /* Handle the OR case, where we are reassociating:
4590 (inner1 OR inner2) OR (op2a code2 op2b)
4592 => (t OR partial) */
4595 if (integer_zerop (t
))
4597 else if (integer_onep (t
))
4598 return boolean_true_node
;
4599 /* If both are the same, we can apply the identity
4601 else if (partial
&& same_bool_result_p (t
, partial
))
4605 /* Handle the AND case, where we are redistributing:
4606 (inner1 AND inner2) OR (op2a code2 op2b)
4607 => (t AND (inner1 OR (op2a code2 op2b)))
4608 => (t AND partial) */
4611 if (integer_zerop (t
))
4612 return boolean_false_node
;
4615 /* We already got a simplification for the other
4616 operand to the redistributed AND expression. The
4617 interesting case is when at least one is true.
4618 Or, if both are the same, we can apply the identity
4620 if (integer_onep (partial
))
4622 else if (integer_onep (t
))
4624 else if (same_bool_result_p (t
, partial
))
4633 /* Try to simplify the OR of two comparisons defined by
4634 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4635 If this can be done without constructing an intermediate value,
4636 return the resulting tree; otherwise NULL_TREE is returned.
4637 This function is deliberately asymmetric as it recurses on SSA_DEFs
4638 in the first comparison but not the second. */
4641 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4642 enum tree_code code2
, tree op2a
, tree op2b
)
4644 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4646 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4647 if (operand_equal_p (op1a
, op2a
, 0)
4648 && operand_equal_p (op1b
, op2b
, 0))
4650 /* Result will be either NULL_TREE, or a combined comparison. */
4651 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4652 TRUTH_ORIF_EXPR
, code1
, code2
,
4653 truth_type
, op1a
, op1b
);
4658 /* Likewise the swapped case of the above. */
4659 if (operand_equal_p (op1a
, op2b
, 0)
4660 && operand_equal_p (op1b
, op2a
, 0))
4662 /* Result will be either NULL_TREE, or a combined comparison. */
4663 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4664 TRUTH_ORIF_EXPR
, code1
,
4665 swap_tree_comparison (code2
),
4666 truth_type
, op1a
, op1b
);
4671 /* If both comparisons are of the same value against constants, we might
4672 be able to merge them. */
4673 if (operand_equal_p (op1a
, op2a
, 0)
4674 && TREE_CODE (op1b
) == INTEGER_CST
4675 && TREE_CODE (op2b
) == INTEGER_CST
)
4677 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4679 /* If we have (op1a != op1b), we should either be able to
4680 return that or TRUE, depending on whether the constant op1b
4681 also satisfies the other comparison against op2b. */
4682 if (code1
== NE_EXPR
)
4688 case EQ_EXPR
: val
= (cmp
== 0); break;
4689 case NE_EXPR
: val
= (cmp
!= 0); break;
4690 case LT_EXPR
: val
= (cmp
< 0); break;
4691 case GT_EXPR
: val
= (cmp
> 0); break;
4692 case LE_EXPR
: val
= (cmp
<= 0); break;
4693 case GE_EXPR
: val
= (cmp
>= 0); break;
4694 default: done
= false;
4699 return boolean_true_node
;
4701 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4704 /* Likewise if the second comparison is a != comparison. */
4705 else if (code2
== NE_EXPR
)
4711 case EQ_EXPR
: val
= (cmp
== 0); break;
4712 case NE_EXPR
: val
= (cmp
!= 0); break;
4713 case LT_EXPR
: val
= (cmp
> 0); break;
4714 case GT_EXPR
: val
= (cmp
< 0); break;
4715 case LE_EXPR
: val
= (cmp
>= 0); break;
4716 case GE_EXPR
: val
= (cmp
<= 0); break;
4717 default: done
= false;
4722 return boolean_true_node
;
4724 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4728 /* See if an equality test is redundant with the other comparison. */
4729 else if (code1
== EQ_EXPR
)
4734 case EQ_EXPR
: val
= (cmp
== 0); break;
4735 case NE_EXPR
: val
= (cmp
!= 0); break;
4736 case LT_EXPR
: val
= (cmp
< 0); break;
4737 case GT_EXPR
: val
= (cmp
> 0); break;
4738 case LE_EXPR
: val
= (cmp
<= 0); break;
4739 case GE_EXPR
: val
= (cmp
>= 0); break;
4744 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4746 else if (code2
== EQ_EXPR
)
4751 case EQ_EXPR
: val
= (cmp
== 0); break;
4752 case NE_EXPR
: val
= (cmp
!= 0); break;
4753 case LT_EXPR
: val
= (cmp
> 0); break;
4754 case GT_EXPR
: val
= (cmp
< 0); break;
4755 case LE_EXPR
: val
= (cmp
>= 0); break;
4756 case GE_EXPR
: val
= (cmp
<= 0); break;
4761 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4764 /* Chose the less restrictive of two < or <= comparisons. */
4765 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4766 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4768 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4769 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4771 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4774 /* Likewise chose the less restrictive of two > or >= comparisons. */
4775 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4776 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4778 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4779 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4781 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4784 /* Check for singleton ranges. */
4786 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
4787 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
4788 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
4790 /* Check for less/greater pairs that don't restrict the range at all. */
4792 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4793 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4794 return boolean_true_node
;
4796 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4797 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4798 return boolean_true_node
;
4801 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4802 NAME's definition is a truth value. See if there are any simplifications
4803 that can be done against the NAME's definition. */
4804 if (TREE_CODE (op1a
) == SSA_NAME
4805 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4806 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4808 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4809 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4810 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4811 switch (gimple_code (stmt
))
4814 /* Try to simplify by copy-propagating the definition. */
4815 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4818 /* If every argument to the PHI produces the same result when
4819 ORed with the second comparison, we win.
4820 Do not do this unless the type is bool since we need a bool
4821 result here anyway. */
4822 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4824 tree result
= NULL_TREE
;
4826 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4828 tree arg
= gimple_phi_arg_def (stmt
, i
);
4830 /* If this PHI has itself as an argument, ignore it.
4831 If all the other args produce the same result,
4833 if (arg
== gimple_phi_result (stmt
))
4835 else if (TREE_CODE (arg
) == INTEGER_CST
)
4837 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
4840 result
= boolean_true_node
;
4841 else if (!integer_onep (result
))
4845 result
= fold_build2 (code2
, boolean_type_node
,
4847 else if (!same_bool_comparison_p (result
,
4851 else if (TREE_CODE (arg
) == SSA_NAME
4852 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4855 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4856 /* In simple cases we can look through PHI nodes,
4857 but we have to be careful with loops.
4859 if (! dom_info_available_p (CDI_DOMINATORS
)
4860 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4861 || dominated_by_p (CDI_DOMINATORS
,
4862 gimple_bb (def_stmt
),
4865 temp
= or_var_with_comparison (arg
, invert
, code2
,
4871 else if (!same_bool_result_p (result
, temp
))
4887 /* Try to simplify the OR of two comparisons, specified by
4888 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4889 If this can be simplified to a single expression (without requiring
4890 introducing more SSA variables to hold intermediate values),
4891 return the resulting tree. Otherwise return NULL_TREE.
4892 If the result expression is non-null, it has boolean type. */
4895 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4896 enum tree_code code2
, tree op2a
, tree op2b
)
4898 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4902 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4906 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4908 Either NULL_TREE, a simplified but non-constant or a constant
4911 ??? This should go into a gimple-fold-inline.h file to be eventually
4912 privatized with the single valueize function used in the various TUs
4913 to avoid the indirect function call overhead. */
4916 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
4917 tree (*gvalueize
) (tree
))
4921 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4922 edges if there are intermediate VARYING defs. For this reason
4923 do not follow SSA edges here even though SCCVN can technically
4924 just deal fine with that. */
4925 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
4927 tree res
= NULL_TREE
;
4928 if (rcode
.is_tree_code ()
4929 && (TREE_CODE_LENGTH ((tree_code
) rcode
) == 0
4930 || ((tree_code
) rcode
) == ADDR_EXPR
)
4931 && is_gimple_val (ops
[0]))
4933 else if (mprts_hook
)
4934 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
4937 if (dump_file
&& dump_flags
& TDF_DETAILS
)
4939 fprintf (dump_file
, "Match-and-simplified ");
4940 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
4941 fprintf (dump_file
, " to ");
4942 print_generic_expr (dump_file
, res
, 0);
4943 fprintf (dump_file
, "\n");
4949 location_t loc
= gimple_location (stmt
);
4950 switch (gimple_code (stmt
))
4954 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
4956 switch (get_gimple_rhs_class (subcode
))
4958 case GIMPLE_SINGLE_RHS
:
4960 tree rhs
= gimple_assign_rhs1 (stmt
);
4961 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
4963 if (TREE_CODE (rhs
) == SSA_NAME
)
4965 /* If the RHS is an SSA_NAME, return its known constant value,
4967 return (*valueize
) (rhs
);
4969 /* Handle propagating invariant addresses into address
4971 else if (TREE_CODE (rhs
) == ADDR_EXPR
4972 && !is_gimple_min_invariant (rhs
))
4974 HOST_WIDE_INT offset
= 0;
4976 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
4980 && (CONSTANT_CLASS_P (base
)
4981 || decl_address_invariant_p (base
)))
4982 return build_invariant_address (TREE_TYPE (rhs
),
4985 else if (TREE_CODE (rhs
) == CONSTRUCTOR
4986 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
4987 && (CONSTRUCTOR_NELTS (rhs
)
4988 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
4993 vec
= XALLOCAVEC (tree
,
4994 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
4995 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
4997 val
= (*valueize
) (val
);
4998 if (TREE_CODE (val
) == INTEGER_CST
4999 || TREE_CODE (val
) == REAL_CST
5000 || TREE_CODE (val
) == FIXED_CST
)
5006 return build_vector (TREE_TYPE (rhs
), vec
);
5008 if (subcode
== OBJ_TYPE_REF
)
5010 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5011 /* If callee is constant, we can fold away the wrapper. */
5012 if (is_gimple_min_invariant (val
))
5016 if (kind
== tcc_reference
)
5018 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5019 || TREE_CODE (rhs
) == REALPART_EXPR
5020 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5021 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5023 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5024 return fold_unary_loc (EXPR_LOCATION (rhs
),
5026 TREE_TYPE (rhs
), val
);
5028 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5029 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5031 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5032 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5034 TREE_TYPE (rhs
), val
,
5035 TREE_OPERAND (rhs
, 1),
5036 TREE_OPERAND (rhs
, 2));
5038 else if (TREE_CODE (rhs
) == MEM_REF
5039 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5041 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5042 if (TREE_CODE (val
) == ADDR_EXPR
5043 && is_gimple_min_invariant (val
))
5045 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5047 TREE_OPERAND (rhs
, 1));
5052 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5054 else if (kind
== tcc_declaration
)
5055 return get_symbol_constant_value (rhs
);
5059 case GIMPLE_UNARY_RHS
:
5062 case GIMPLE_BINARY_RHS
:
5063 /* Translate &x + CST into an invariant form suitable for
5064 further propagation. */
5065 if (subcode
== POINTER_PLUS_EXPR
)
5067 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5068 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5069 if (TREE_CODE (op0
) == ADDR_EXPR
5070 && TREE_CODE (op1
) == INTEGER_CST
)
5072 tree off
= fold_convert (ptr_type_node
, op1
);
5073 return build_fold_addr_expr_loc
5075 fold_build2 (MEM_REF
,
5076 TREE_TYPE (TREE_TYPE (op0
)),
5077 unshare_expr (op0
), off
));
5080 /* Canonicalize bool != 0 and bool == 0 appearing after
5081 valueization. While gimple_simplify handles this
5082 it can get confused by the ~X == 1 -> X == 0 transform
5083 which we cant reduce to a SSA name or a constant
5084 (and we have no way to tell gimple_simplify to not
5085 consider those transforms in the first place). */
5086 else if (subcode
== EQ_EXPR
5087 || subcode
== NE_EXPR
)
5089 tree lhs
= gimple_assign_lhs (stmt
);
5090 tree op0
= gimple_assign_rhs1 (stmt
);
5091 if (useless_type_conversion_p (TREE_TYPE (lhs
),
5094 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5095 op0
= (*valueize
) (op0
);
5096 if (TREE_CODE (op0
) == INTEGER_CST
)
5097 std::swap (op0
, op1
);
5098 if (TREE_CODE (op1
) == INTEGER_CST
5099 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
5100 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
5106 case GIMPLE_TERNARY_RHS
:
5108 /* Handle ternary operators that can appear in GIMPLE form. */
5109 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5110 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5111 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5112 return fold_ternary_loc (loc
, subcode
,
5113 gimple_expr_type (stmt
), op0
, op1
, op2
);
5124 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5126 if (gimple_call_internal_p (stmt
))
5128 enum tree_code subcode
= ERROR_MARK
;
5129 switch (gimple_call_internal_fn (stmt
))
5131 case IFN_UBSAN_CHECK_ADD
:
5132 subcode
= PLUS_EXPR
;
5134 case IFN_UBSAN_CHECK_SUB
:
5135 subcode
= MINUS_EXPR
;
5137 case IFN_UBSAN_CHECK_MUL
:
5138 subcode
= MULT_EXPR
;
5143 tree arg0
= gimple_call_arg (stmt
, 0);
5144 tree arg1
= gimple_call_arg (stmt
, 1);
5145 tree op0
= (*valueize
) (arg0
);
5146 tree op1
= (*valueize
) (arg1
);
5148 if (TREE_CODE (op0
) != INTEGER_CST
5149 || TREE_CODE (op1
) != INTEGER_CST
)
5154 /* x * 0 = 0 * x = 0 without overflow. */
5155 if (integer_zerop (op0
) || integer_zerop (op1
))
5156 return build_zero_cst (TREE_TYPE (arg0
));
5159 /* y - y = 0 without overflow. */
5160 if (operand_equal_p (op0
, op1
, 0))
5161 return build_zero_cst (TREE_TYPE (arg0
));
5168 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5170 && TREE_CODE (res
) == INTEGER_CST
5171 && !TREE_OVERFLOW (res
))
5176 fn
= (*valueize
) (gimple_call_fn (stmt
));
5177 if (TREE_CODE (fn
) == ADDR_EXPR
5178 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5179 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5180 && gimple_builtin_call_types_compatible_p (stmt
,
5181 TREE_OPERAND (fn
, 0)))
5183 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5186 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5187 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5188 retval
= fold_builtin_call_array (loc
,
5189 gimple_call_return_type (call_stmt
),
5190 fn
, gimple_call_num_args (stmt
), args
);
5193 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5194 STRIP_NOPS (retval
);
5195 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5208 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5209 Returns NULL_TREE if folding to a constant is not possible, otherwise
5210 returns a constant according to is_gimple_min_invariant. */
5213 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
5215 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5216 if (res
&& is_gimple_min_invariant (res
))
5222 /* The following set of functions are supposed to fold references using
5223 their constant initializers. */
5225 /* See if we can find constructor defining value of BASE.
5226 When we know the consructor with constant offset (such as
5227 base is array[40] and we do know constructor of array), then
5228 BIT_OFFSET is adjusted accordingly.
5230 As a special case, return error_mark_node when constructor
5231 is not explicitly available, but it is known to be zero
5232 such as 'static const int a;'. */
5234 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5235 tree (*valueize
)(tree
))
5237 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5238 if (TREE_CODE (base
) == MEM_REF
)
5240 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5242 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5244 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5249 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5250 base
= valueize (TREE_OPERAND (base
, 0));
5251 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5253 base
= TREE_OPERAND (base
, 0);
5256 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5257 DECL_INITIAL. If BASE is a nested reference into another
5258 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5259 the inner reference. */
5260 switch (TREE_CODE (base
))
5265 tree init
= ctor_for_folding (base
);
5267 /* Our semantic is exact opposite of ctor_for_folding;
5268 NULL means unknown, while error_mark_node is 0. */
5269 if (init
== error_mark_node
)
5272 return error_mark_node
;
5278 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
);
5279 if (max_size
== -1 || size
!= max_size
)
5281 *bit_offset
+= bit_offset2
;
5282 return get_base_constructor (base
, bit_offset
, valueize
);
5293 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5294 SIZE to the memory at bit OFFSET. */
5297 fold_array_ctor_reference (tree type
, tree ctor
,
5298 unsigned HOST_WIDE_INT offset
,
5299 unsigned HOST_WIDE_INT size
,
5302 unsigned HOST_WIDE_INT cnt
;
5304 offset_int low_bound
;
5305 offset_int elt_size
;
5306 offset_int index
, max_index
;
5307 offset_int access_index
;
5308 tree domain_type
= NULL_TREE
, index_type
= NULL_TREE
;
5309 HOST_WIDE_INT inner_offset
;
5311 /* Compute low bound and elt size. */
5312 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5313 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5314 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5316 /* Static constructors for variably sized objects makes no sense. */
5317 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5318 index_type
= TREE_TYPE (TYPE_MIN_VALUE (domain_type
));
5319 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5323 /* Static constructors for variably sized objects makes no sense. */
5324 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5326 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5328 /* We can handle only constantly sized accesses that are known to not
5329 be larger than size of array element. */
5330 if (!TYPE_SIZE_UNIT (type
)
5331 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5332 || wi::lts_p (elt_size
, wi::to_offset (TYPE_SIZE_UNIT (type
)))
5336 /* Compute the array index we look for. */
5337 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5339 access_index
+= low_bound
;
5341 access_index
= wi::ext (access_index
, TYPE_PRECISION (index_type
),
5342 TYPE_SIGN (index_type
));
5344 /* And offset within the access. */
5345 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5347 /* See if the array field is large enough to span whole access. We do not
5348 care to fold accesses spanning multiple array indexes. */
5349 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5352 index
= low_bound
- 1;
5354 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5355 TYPE_SIGN (index_type
));
5357 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
, cval
)
5359 /* Array constructor might explicitely set index, or specify range
5360 or leave index NULL meaning that it is next index after previous
5364 if (TREE_CODE (cfield
) == INTEGER_CST
)
5365 max_index
= index
= wi::to_offset (cfield
);
5368 gcc_assert (TREE_CODE (cfield
) == RANGE_EXPR
);
5369 index
= wi::to_offset (TREE_OPERAND (cfield
, 0));
5370 max_index
= wi::to_offset (TREE_OPERAND (cfield
, 1));
5377 index
= wi::ext (index
, TYPE_PRECISION (index_type
),
5378 TYPE_SIGN (index_type
));
5382 /* Do we have match? */
5383 if (wi::cmpu (access_index
, index
) >= 0
5384 && wi::cmpu (access_index
, max_index
) <= 0)
5385 return fold_ctor_reference (type
, cval
, inner_offset
, size
,
5388 /* When memory is not explicitely mentioned in constructor,
5389 it is 0 (or out of range). */
5390 return build_zero_cst (type
);
5393 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5394 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5397 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5398 unsigned HOST_WIDE_INT offset
,
5399 unsigned HOST_WIDE_INT size
,
5402 unsigned HOST_WIDE_INT cnt
;
5405 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5408 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5409 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5410 tree field_size
= DECL_SIZE (cfield
);
5411 offset_int bitoffset
;
5412 offset_int bitoffset_end
, access_end
;
5414 /* Variable sized objects in static constructors makes no sense,
5415 but field_size can be NULL for flexible array members. */
5416 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5417 && TREE_CODE (byte_offset
) == INTEGER_CST
5418 && (field_size
!= NULL_TREE
5419 ? TREE_CODE (field_size
) == INTEGER_CST
5420 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5422 /* Compute bit offset of the field. */
5423 bitoffset
= (wi::to_offset (field_offset
)
5424 + wi::lshift (wi::to_offset (byte_offset
),
5425 LOG2_BITS_PER_UNIT
));
5426 /* Compute bit offset where the field ends. */
5427 if (field_size
!= NULL_TREE
)
5428 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5432 access_end
= offset_int (offset
) + size
;
5434 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5435 [BITOFFSET, BITOFFSET_END)? */
5436 if (wi::cmps (access_end
, bitoffset
) > 0
5437 && (field_size
== NULL_TREE
5438 || wi::lts_p (offset
, bitoffset_end
)))
5440 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5441 /* We do have overlap. Now see if field is large enough to
5442 cover the access. Give up for accesses spanning multiple
5444 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5446 if (wi::lts_p (offset
, bitoffset
))
5448 return fold_ctor_reference (type
, cval
,
5449 inner_offset
.to_uhwi (), size
,
5453 /* When memory is not explicitely mentioned in constructor, it is 0. */
5454 return build_zero_cst (type
);
5457 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5458 to the memory at bit OFFSET. */
5461 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5462 unsigned HOST_WIDE_INT size
, tree from_decl
)
5466 /* We found the field with exact match. */
5467 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5469 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5471 /* We are at the end of walk, see if we can view convert the
5473 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5474 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5475 && !compare_tree_int (TYPE_SIZE (type
), size
)
5476 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5478 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5479 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5481 STRIP_USELESS_TYPE_CONVERSION (ret
);
5484 /* For constants and byte-aligned/sized reads try to go through
5485 native_encode/interpret. */
5486 if (CONSTANT_CLASS_P (ctor
)
5487 && BITS_PER_UNIT
== 8
5488 && offset
% BITS_PER_UNIT
== 0
5489 && size
% BITS_PER_UNIT
== 0
5490 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5492 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5493 if (native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5494 offset
/ BITS_PER_UNIT
) > 0)
5495 return native_interpret_expr (type
, buf
, size
/ BITS_PER_UNIT
);
5497 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5500 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5501 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5502 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5505 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5512 /* Return the tree representing the element referenced by T if T is an
5513 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5514 names using VALUEIZE. Return NULL_TREE otherwise. */
5517 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5519 tree ctor
, idx
, base
;
5520 HOST_WIDE_INT offset
, size
, max_size
;
5523 if (TREE_THIS_VOLATILE (t
))
5527 return get_symbol_constant_value (t
);
5529 tem
= fold_read_from_constant_string (t
);
5533 switch (TREE_CODE (t
))
5536 case ARRAY_RANGE_REF
:
5537 /* Constant indexes are handled well by get_base_constructor.
5538 Only special case variable offsets.
5539 FIXME: This code can't handle nested references with variable indexes
5540 (they will be handled only by iteration of ccp). Perhaps we can bring
5541 get_ref_base_and_extent here and make it use a valueize callback. */
5542 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5544 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5545 && TREE_CODE (idx
) == INTEGER_CST
)
5547 tree low_bound
, unit_size
;
5549 /* If the resulting bit-offset is constant, track it. */
5550 if ((low_bound
= array_ref_low_bound (t
),
5551 TREE_CODE (low_bound
) == INTEGER_CST
)
5552 && (unit_size
= array_ref_element_size (t
),
5553 tree_fits_uhwi_p (unit_size
)))
5556 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5557 TYPE_PRECISION (TREE_TYPE (idx
)));
5559 if (wi::fits_shwi_p (woffset
))
5561 offset
= woffset
.to_shwi ();
5562 /* TODO: This code seems wrong, multiply then check
5563 to see if it fits. */
5564 offset
*= tree_to_uhwi (unit_size
);
5565 offset
*= BITS_PER_UNIT
;
5567 base
= TREE_OPERAND (t
, 0);
5568 ctor
= get_base_constructor (base
, &offset
, valueize
);
5569 /* Empty constructor. Always fold to 0. */
5570 if (ctor
== error_mark_node
)
5571 return build_zero_cst (TREE_TYPE (t
));
5572 /* Out of bound array access. Value is undefined,
5576 /* We can not determine ctor. */
5579 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5580 tree_to_uhwi (unit_size
)
5590 case TARGET_MEM_REF
:
5592 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
);
5593 ctor
= get_base_constructor (base
, &offset
, valueize
);
5595 /* Empty constructor. Always fold to 0. */
5596 if (ctor
== error_mark_node
)
5597 return build_zero_cst (TREE_TYPE (t
));
5598 /* We do not know precise address. */
5599 if (max_size
== -1 || max_size
!= size
)
5601 /* We can not determine ctor. */
5605 /* Out of bound array access. Value is undefined, but don't fold. */
5609 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5615 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5616 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5617 return fold_build1_loc (EXPR_LOCATION (t
),
5618 TREE_CODE (t
), TREE_TYPE (t
), c
);
5630 fold_const_aggregate_ref (tree t
)
5632 return fold_const_aggregate_ref_1 (t
, NULL
);
5635 /* Lookup virtual method with index TOKEN in a virtual table V
5637 Set CAN_REFER if non-NULL to false if method
5638 is not referable or if the virtual table is ill-formed (such as rewriten
5639 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5642 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5644 unsigned HOST_WIDE_INT offset
,
5647 tree vtable
= v
, init
, fn
;
5648 unsigned HOST_WIDE_INT size
;
5649 unsigned HOST_WIDE_INT elt_size
, access_index
;
5655 /* First of all double check we have virtual table. */
5656 if (TREE_CODE (v
) != VAR_DECL
5657 || !DECL_VIRTUAL_P (v
))
5659 /* Pass down that we lost track of the target. */
5665 init
= ctor_for_folding (v
);
5667 /* The virtual tables should always be born with constructors
5668 and we always should assume that they are avaialble for
5669 folding. At the moment we do not stream them in all cases,
5670 but it should never happen that ctor seem unreachable. */
5672 if (init
== error_mark_node
)
5674 gcc_assert (in_lto_p
);
5675 /* Pass down that we lost track of the target. */
5680 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5681 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5682 offset
*= BITS_PER_UNIT
;
5683 offset
+= token
* size
;
5685 /* Lookup the value in the constructor that is assumed to be array.
5686 This is equivalent to
5687 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5688 offset, size, NULL);
5689 but in a constant time. We expect that frontend produced a simple
5690 array without indexed initializers. */
5692 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5693 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5694 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5695 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5697 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5698 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5700 /* This code makes an assumption that there are no
5701 indexed fileds produced by C++ FE, so we can directly index the array. */
5702 if (access_index
< CONSTRUCTOR_NELTS (init
))
5704 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5705 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5711 /* For type inconsistent program we may end up looking up virtual method
5712 in virtual table that does not contain TOKEN entries. We may overrun
5713 the virtual table and pick up a constant or RTTI info pointer.
5714 In any case the call is undefined. */
5716 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5717 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5718 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5721 fn
= TREE_OPERAND (fn
, 0);
5723 /* When cgraph node is missing and function is not public, we cannot
5724 devirtualize. This can happen in WHOPR when the actual method
5725 ends up in other partition, because we found devirtualization
5726 possibility too late. */
5727 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5738 /* Make sure we create a cgraph node for functions we'll reference.
5739 They can be non-existent if the reference comes from an entry
5740 of an external vtable for example. */
5741 cgraph_node::get_create (fn
);
5746 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5747 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5748 KNOWN_BINFO carries the binfo describing the true type of
5749 OBJ_TYPE_REF_OBJECT(REF).
5750 Set CAN_REFER if non-NULL to false if method
5751 is not referable or if the virtual table is ill-formed (such as rewriten
5752 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5755 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5758 unsigned HOST_WIDE_INT offset
;
5761 v
= BINFO_VTABLE (known_binfo
);
5762 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5766 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5772 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
5775 /* Return true iff VAL is a gimple expression that is known to be
5776 non-negative. Restricted to floating-point inputs. */
5779 gimple_val_nonnegative_real_p (tree val
)
5783 gcc_assert (val
&& SCALAR_FLOAT_TYPE_P (TREE_TYPE (val
)));
5785 /* Use existing logic for non-gimple trees. */
5786 if (tree_expr_nonnegative_p (val
))
5789 if (TREE_CODE (val
) != SSA_NAME
)
5792 /* Currently we look only at the immediately defining statement
5793 to make this determination, since recursion on defining
5794 statements of operands can lead to quadratic behavior in the
5795 worst case. This is expected to catch almost all occurrences
5796 in practice. It would be possible to implement limited-depth
5797 recursion if important cases are lost. Alternatively, passes
5798 that need this information (such as the pow/powi lowering code
5799 in the cse_sincos pass) could be revised to provide it through
5800 dataflow propagation. */
5802 def_stmt
= SSA_NAME_DEF_STMT (val
);
5804 if (is_gimple_assign (def_stmt
))
5808 /* See fold-const.c:tree_expr_nonnegative_p for additional
5809 cases that could be handled with recursion. */
5811 switch (gimple_assign_rhs_code (def_stmt
))
5814 /* Always true for floating-point operands. */
5818 /* True if the two operands are identical (since we are
5819 restricted to floating-point inputs). */
5820 op0
= gimple_assign_rhs1 (def_stmt
);
5821 op1
= gimple_assign_rhs2 (def_stmt
);
5824 || operand_equal_p (op0
, op1
, 0))
5831 else if (is_gimple_call (def_stmt
))
5833 tree fndecl
= gimple_call_fndecl (def_stmt
);
5835 && DECL_BUILT_IN_CLASS (fndecl
) == BUILT_IN_NORMAL
)
5839 switch (DECL_FUNCTION_CODE (fndecl
))
5841 CASE_FLT_FN (BUILT_IN_ACOS
):
5842 CASE_FLT_FN (BUILT_IN_ACOSH
):
5843 CASE_FLT_FN (BUILT_IN_CABS
):
5844 CASE_FLT_FN (BUILT_IN_COSH
):
5845 CASE_FLT_FN (BUILT_IN_ERFC
):
5846 CASE_FLT_FN (BUILT_IN_EXP
):
5847 CASE_FLT_FN (BUILT_IN_EXP10
):
5848 CASE_FLT_FN (BUILT_IN_EXP2
):
5849 CASE_FLT_FN (BUILT_IN_FABS
):
5850 CASE_FLT_FN (BUILT_IN_FDIM
):
5851 CASE_FLT_FN (BUILT_IN_HYPOT
):
5852 CASE_FLT_FN (BUILT_IN_POW10
):
5855 CASE_FLT_FN (BUILT_IN_SQRT
):
5856 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5857 nonnegative inputs. */
5858 if (!HONOR_SIGNED_ZEROS (val
))
5863 CASE_FLT_FN (BUILT_IN_POWI
):
5864 /* True if the second argument is an even integer. */
5865 arg1
= gimple_call_arg (def_stmt
, 1);
5867 if (TREE_CODE (arg1
) == INTEGER_CST
5868 && (TREE_INT_CST_LOW (arg1
) & 1) == 0)
5873 CASE_FLT_FN (BUILT_IN_POW
):
5874 /* True if the second argument is an even integer-valued
5876 arg1
= gimple_call_arg (def_stmt
, 1);
5878 if (TREE_CODE (arg1
) == REAL_CST
)
5883 c
= TREE_REAL_CST (arg1
);
5884 n
= real_to_integer (&c
);
5888 REAL_VALUE_TYPE cint
;
5889 real_from_integer (&cint
, VOIDmode
, n
, SIGNED
);
5890 if (real_identical (&c
, &cint
))
5906 /* Given a pointer value OP0, return a simplified version of an
5907 indirection through OP0, or NULL_TREE if no simplification is
5908 possible. Note that the resulting type may be different from
5909 the type pointed to in the sense that it is still compatible
5910 from the langhooks point of view. */
5913 gimple_fold_indirect_ref (tree t
)
5915 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
5920 subtype
= TREE_TYPE (sub
);
5921 if (!POINTER_TYPE_P (subtype
))
5924 if (TREE_CODE (sub
) == ADDR_EXPR
)
5926 tree op
= TREE_OPERAND (sub
, 0);
5927 tree optype
= TREE_TYPE (op
);
5929 if (useless_type_conversion_p (type
, optype
))
5932 /* *(foo *)&fooarray => fooarray[0] */
5933 if (TREE_CODE (optype
) == ARRAY_TYPE
5934 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
5935 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5937 tree type_domain
= TYPE_DOMAIN (optype
);
5938 tree min_val
= size_zero_node
;
5939 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
5940 min_val
= TYPE_MIN_VALUE (type_domain
);
5941 if (TREE_CODE (min_val
) == INTEGER_CST
)
5942 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
5944 /* *(foo *)&complexfoo => __real__ complexfoo */
5945 else if (TREE_CODE (optype
) == COMPLEX_TYPE
5946 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5947 return fold_build1 (REALPART_EXPR
, type
, op
);
5948 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5949 else if (TREE_CODE (optype
) == VECTOR_TYPE
5950 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
5952 tree part_width
= TYPE_SIZE (type
);
5953 tree index
= bitsize_int (0);
5954 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
5958 /* *(p + CST) -> ... */
5959 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
5960 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
5962 tree addr
= TREE_OPERAND (sub
, 0);
5963 tree off
= TREE_OPERAND (sub
, 1);
5967 addrtype
= TREE_TYPE (addr
);
5969 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5970 if (TREE_CODE (addr
) == ADDR_EXPR
5971 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
5972 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
5973 && tree_fits_uhwi_p (off
))
5975 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
5976 tree part_width
= TYPE_SIZE (type
);
5977 unsigned HOST_WIDE_INT part_widthi
5978 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
5979 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
5980 tree index
= bitsize_int (indexi
);
5981 if (offset
/ part_widthi
5982 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
5983 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
5987 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5988 if (TREE_CODE (addr
) == ADDR_EXPR
5989 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
5990 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
5992 tree size
= TYPE_SIZE_UNIT (type
);
5993 if (tree_int_cst_equal (size
, off
))
5994 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
5997 /* *(p + CST) -> MEM_REF <p, CST>. */
5998 if (TREE_CODE (addr
) != ADDR_EXPR
5999 || DECL_P (TREE_OPERAND (addr
, 0)))
6000 return fold_build2 (MEM_REF
, type
,
6002 wide_int_to_tree (ptype
, off
));
6005 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6006 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6007 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6008 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6011 tree min_val
= size_zero_node
;
6013 sub
= gimple_fold_indirect_ref (sub
);
6015 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6016 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6017 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6018 min_val
= TYPE_MIN_VALUE (type_domain
);
6019 if (TREE_CODE (min_val
) == INTEGER_CST
)
6020 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6026 /* Return true if CODE is an operation that when operating on signed
6027 integer types involves undefined behavior on overflow and the
6028 operation can be expressed with unsigned arithmetic. */
6031 arith_code_with_undefined_signed_overflow (tree_code code
)
6039 case POINTER_PLUS_EXPR
:
6046 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6047 operation that can be transformed to unsigned arithmetic by converting
6048 its operand, carrying out the operation in the corresponding unsigned
6049 type and converting the result back to the original type.
6051 Returns a sequence of statements that replace STMT and also contain
6052 a modified form of STMT itself. */
6055 rewrite_to_defined_overflow (gimple
*stmt
)
6057 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6059 fprintf (dump_file
, "rewriting stmt with undefined signed "
6061 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6064 tree lhs
= gimple_assign_lhs (stmt
);
6065 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6066 gimple_seq stmts
= NULL
;
6067 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6069 gimple_seq stmts2
= NULL
;
6070 gimple_set_op (stmt
, i
,
6071 force_gimple_operand (fold_convert (type
,
6072 gimple_op (stmt
, i
)),
6073 &stmts2
, true, NULL_TREE
));
6074 gimple_seq_add_seq (&stmts
, stmts2
);
6076 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6077 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6078 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6079 gimple_seq_add_stmt (&stmts
, stmt
);
6080 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6081 gimple_seq_add_stmt (&stmts
, cvt
);
6087 /* The valueization hook we use for the gimple_build API simplification.
6088 This makes us match fold_buildN behavior by only combining with
6089 statements in the sequence(s) we are currently building. */
6092 gimple_build_valueize (tree op
)
6094 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6099 /* Build the expression CODE OP0 of type TYPE with location LOC,
6100 simplifying it first if possible. Returns the built
6101 expression value and appends statements possibly defining it
6105 gimple_build (gimple_seq
*seq
, location_t loc
,
6106 enum tree_code code
, tree type
, tree op0
)
6108 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6111 if (gimple_in_ssa_p (cfun
))
6112 res
= make_ssa_name (type
);
6114 res
= create_tmp_reg (type
);
6116 if (code
== REALPART_EXPR
6117 || code
== IMAGPART_EXPR
6118 || code
== VIEW_CONVERT_EXPR
)
6119 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6121 stmt
= gimple_build_assign (res
, code
, op0
);
6122 gimple_set_location (stmt
, loc
);
6123 gimple_seq_add_stmt_without_update (seq
, stmt
);
6128 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6129 simplifying it first if possible. Returns the built
6130 expression value and appends statements possibly defining it
6134 gimple_build (gimple_seq
*seq
, location_t loc
,
6135 enum tree_code code
, tree type
, tree op0
, tree op1
)
6137 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6140 if (gimple_in_ssa_p (cfun
))
6141 res
= make_ssa_name (type
);
6143 res
= create_tmp_reg (type
);
6144 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6145 gimple_set_location (stmt
, loc
);
6146 gimple_seq_add_stmt_without_update (seq
, stmt
);
6151 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6152 simplifying it first if possible. Returns the built
6153 expression value and appends statements possibly defining it
6157 gimple_build (gimple_seq
*seq
, location_t loc
,
6158 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6160 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6161 seq
, gimple_build_valueize
);
6164 if (gimple_in_ssa_p (cfun
))
6165 res
= make_ssa_name (type
);
6167 res
= create_tmp_reg (type
);
6169 if (code
== BIT_FIELD_REF
)
6170 stmt
= gimple_build_assign (res
, code
,
6171 build3 (code
, type
, op0
, op1
, op2
));
6173 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6174 gimple_set_location (stmt
, loc
);
6175 gimple_seq_add_stmt_without_update (seq
, stmt
);
6180 /* Build the call FN (ARG0) with a result of type TYPE
6181 (or no result if TYPE is void) with location LOC,
6182 simplifying it first if possible. Returns the built
6183 expression value (or NULL_TREE if TYPE is void) and appends
6184 statements possibly defining it to SEQ. */
6187 gimple_build (gimple_seq
*seq
, location_t loc
,
6188 enum built_in_function fn
, tree type
, tree arg0
)
6190 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6193 tree decl
= builtin_decl_implicit (fn
);
6194 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
6195 if (!VOID_TYPE_P (type
))
6197 if (gimple_in_ssa_p (cfun
))
6198 res
= make_ssa_name (type
);
6200 res
= create_tmp_reg (type
);
6201 gimple_call_set_lhs (stmt
, res
);
6203 gimple_set_location (stmt
, loc
);
6204 gimple_seq_add_stmt_without_update (seq
, stmt
);
6209 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6210 (or no result if TYPE is void) with location LOC,
6211 simplifying it first if possible. Returns the built
6212 expression value (or NULL_TREE if TYPE is void) and appends
6213 statements possibly defining it to SEQ. */
6216 gimple_build (gimple_seq
*seq
, location_t loc
,
6217 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6219 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6222 tree decl
= builtin_decl_implicit (fn
);
6223 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6224 if (!VOID_TYPE_P (type
))
6226 if (gimple_in_ssa_p (cfun
))
6227 res
= make_ssa_name (type
);
6229 res
= create_tmp_reg (type
);
6230 gimple_call_set_lhs (stmt
, res
);
6232 gimple_set_location (stmt
, loc
);
6233 gimple_seq_add_stmt_without_update (seq
, stmt
);
6238 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6239 (or no result if TYPE is void) with location LOC,
6240 simplifying it first if possible. Returns the built
6241 expression value (or NULL_TREE if TYPE is void) and appends
6242 statements possibly defining it to SEQ. */
6245 gimple_build (gimple_seq
*seq
, location_t loc
,
6246 enum built_in_function fn
, tree type
,
6247 tree arg0
, tree arg1
, tree arg2
)
6249 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6250 seq
, gimple_build_valueize
);
6253 tree decl
= builtin_decl_implicit (fn
);
6254 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6255 if (!VOID_TYPE_P (type
))
6257 if (gimple_in_ssa_p (cfun
))
6258 res
= make_ssa_name (type
);
6260 res
= create_tmp_reg (type
);
6261 gimple_call_set_lhs (stmt
, res
);
6263 gimple_set_location (stmt
, loc
);
6264 gimple_seq_add_stmt_without_update (seq
, stmt
);
6269 /* Build the conversion (TYPE) OP with a result of type TYPE
6270 with location LOC if such conversion is neccesary in GIMPLE,
6271 simplifying it first.
6272 Returns the built expression value and appends
6273 statements possibly defining it to SEQ. */
6276 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6278 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6280 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);