1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2017 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
55 #include "omp-general.h"
58 #include "fold-const-call.h"
61 /* Return true when DECL can be referenced from current unit.
62 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
63 We can get declarations that are not possible to reference for various
66 1) When analyzing C++ virtual tables.
67 C++ virtual tables do have known constructors even
68 when they are keyed to other compilation unit.
69 Those tables can contain pointers to methods and vars
70 in other units. Those methods have both STATIC and EXTERNAL
72 2) In WHOPR mode devirtualization might lead to reference
73 to method that was partitioned elsehwere.
74 In this case we have static VAR_DECL or FUNCTION_DECL
75 that has no corresponding callgraph/varpool node
77 3) COMDAT functions referred by external vtables that
78 we devirtualize only during final compilation stage.
79 At this time we already decided that we will not output
80 the function body and thus we can't reference the symbol
84 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
87 struct cgraph_node
*node
;
90 if (DECL_ABSTRACT_P (decl
))
93 /* We are concerned only about static/external vars and functions. */
94 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
95 || !VAR_OR_FUNCTION_DECL_P (decl
))
98 /* Static objects can be referred only if they was not optimized out yet. */
99 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
101 /* Before we start optimizing unreachable code we can be sure all
102 static objects are defined. */
103 if (symtab
->function_flags_ready
)
105 snode
= symtab_node::get (decl
);
106 if (!snode
|| !snode
->definition
)
108 node
= dyn_cast
<cgraph_node
*> (snode
);
109 return !node
|| !node
->global
.inlined_to
;
112 /* We will later output the initializer, so we can refer to it.
113 So we are concerned only when DECL comes from initializer of
114 external var or var that has been optimized out. */
116 || !VAR_P (from_decl
)
117 || (!DECL_EXTERNAL (from_decl
)
118 && (vnode
= varpool_node::get (from_decl
)) != NULL
119 && vnode
->definition
)
121 && (vnode
= varpool_node::get (from_decl
)) != NULL
122 && vnode
->in_other_partition
))
124 /* We are folding reference from external vtable. The vtable may reffer
125 to a symbol keyed to other compilation unit. The other compilation
126 unit may be in separate DSO and the symbol may be hidden. */
127 if (DECL_VISIBILITY_SPECIFIED (decl
)
128 && DECL_EXTERNAL (decl
)
129 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
130 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
132 /* When function is public, we always can introduce new reference.
133 Exception are the COMDAT functions where introducing a direct
134 reference imply need to include function body in the curren tunit. */
135 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
137 /* We have COMDAT. We are going to check if we still have definition
138 or if the definition is going to be output in other partition.
139 Bypass this when gimplifying; all needed functions will be produced.
141 As observed in PR20991 for already optimized out comdat virtual functions
142 it may be tempting to not necessarily give up because the copy will be
143 output elsewhere when corresponding vtable is output.
144 This is however not possible - ABI specify that COMDATs are output in
145 units where they are used and when the other unit was compiled with LTO
146 it is possible that vtable was kept public while the function itself
148 if (!symtab
->function_flags_ready
)
151 snode
= symtab_node::get (decl
);
153 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
154 && (!snode
->in_other_partition
155 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
157 node
= dyn_cast
<cgraph_node
*> (snode
);
158 return !node
|| !node
->global
.inlined_to
;
161 /* Create a temporary for TYPE for a statement STMT. If the current function
162 is in SSA form, a SSA name is created. Otherwise a temporary register
166 create_tmp_reg_or_ssa_name (tree type
, gimple
*stmt
)
168 if (gimple_in_ssa_p (cfun
))
169 return make_ssa_name (type
, stmt
);
171 return create_tmp_reg (type
);
174 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
175 acceptable form for is_gimple_min_invariant.
176 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
179 canonicalize_constructor_val (tree cval
, tree from_decl
)
181 tree orig_cval
= cval
;
183 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
184 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
186 tree ptr
= TREE_OPERAND (cval
, 0);
187 if (is_gimple_min_invariant (ptr
))
188 cval
= build1_loc (EXPR_LOCATION (cval
),
189 ADDR_EXPR
, TREE_TYPE (ptr
),
190 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
192 fold_convert (ptr_type_node
,
193 TREE_OPERAND (cval
, 1))));
195 if (TREE_CODE (cval
) == ADDR_EXPR
)
197 tree base
= NULL_TREE
;
198 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
200 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
202 TREE_OPERAND (cval
, 0) = base
;
205 base
= get_base_address (TREE_OPERAND (cval
, 0));
209 if (VAR_OR_FUNCTION_DECL_P (base
)
210 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
212 if (TREE_TYPE (base
) == error_mark_node
)
215 TREE_ADDRESSABLE (base
) = 1;
216 else if (TREE_CODE (base
) == FUNCTION_DECL
)
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
221 cgraph_node::get_create (base
);
223 /* Fixup types in global initializers. */
224 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
225 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
228 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
231 if (TREE_OVERFLOW_P (cval
))
232 return drop_tree_overflow (cval
);
236 /* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
240 get_symbol_constant_value (tree sym
)
242 tree val
= ctor_for_folding (sym
);
243 if (val
!= error_mark_node
)
247 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
248 if (val
&& is_gimple_min_invariant (val
))
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
257 && is_gimple_reg_type (TREE_TYPE (sym
)))
258 return build_zero_cst (TREE_TYPE (sym
));
266 /* Subroutine of fold_stmt. We perform several simplifications of the
267 memory reference tree EXPR and make sure to re-gimplify them properly
268 after propagation of constant addresses. IS_LHS is true if the
269 reference is supposed to be an lvalue. */
272 maybe_fold_reference (tree expr
, bool is_lhs
)
276 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
277 || TREE_CODE (expr
) == REALPART_EXPR
278 || TREE_CODE (expr
) == IMAGPART_EXPR
)
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
280 return fold_unary_loc (EXPR_LOCATION (expr
),
283 TREE_OPERAND (expr
, 0));
284 else if (TREE_CODE (expr
) == BIT_FIELD_REF
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
286 return fold_ternary_loc (EXPR_LOCATION (expr
),
289 TREE_OPERAND (expr
, 0),
290 TREE_OPERAND (expr
, 1),
291 TREE_OPERAND (expr
, 2));
294 && (result
= fold_const_aggregate_ref (expr
))
295 && is_gimple_min_invariant (result
))
302 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
303 replacement rhs for the statement or NULL_TREE if no simplification
304 could be made. It is assumed that the operands have been previously
308 fold_gimple_assign (gimple_stmt_iterator
*si
)
310 gimple
*stmt
= gsi_stmt (*si
);
311 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
312 location_t loc
= gimple_location (stmt
);
314 tree result
= NULL_TREE
;
316 switch (get_gimple_rhs_class (subcode
))
318 case GIMPLE_SINGLE_RHS
:
320 tree rhs
= gimple_assign_rhs1 (stmt
);
322 if (TREE_CLOBBER_P (rhs
))
325 if (REFERENCE_CLASS_P (rhs
))
326 return maybe_fold_reference (rhs
, false);
328 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
330 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
331 if (is_gimple_min_invariant (val
))
333 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
336 vec
<cgraph_node
*>targets
337 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
338 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
340 if (dump_enabled_p ())
342 location_t loc
= gimple_location_safe (stmt
);
343 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
344 "resolving virtual function address "
345 "reference to function %s\n",
346 targets
.length () == 1
347 ? targets
[0]->name ()
350 if (targets
.length () == 1)
352 val
= fold_convert (TREE_TYPE (val
),
353 build_fold_addr_expr_loc
354 (loc
, targets
[0]->decl
));
355 STRIP_USELESS_TYPE_CONVERSION (val
);
358 /* We can not use __builtin_unreachable here because it
359 can not have address taken. */
360 val
= build_int_cst (TREE_TYPE (val
), 0);
366 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
368 tree ref
= TREE_OPERAND (rhs
, 0);
369 tree tem
= maybe_fold_reference (ref
, true);
371 && TREE_CODE (tem
) == MEM_REF
372 && integer_zerop (TREE_OPERAND (tem
, 1)))
373 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
375 result
= fold_convert (TREE_TYPE (rhs
),
376 build_fold_addr_expr_loc (loc
, tem
));
377 else if (TREE_CODE (ref
) == MEM_REF
378 && integer_zerop (TREE_OPERAND (ref
, 1)))
379 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
383 /* Strip away useless type conversions. Both the
384 NON_LVALUE_EXPR that may have been added by fold, and
385 "useless" type conversions that might now be apparent
386 due to propagation. */
387 STRIP_USELESS_TYPE_CONVERSION (result
);
389 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
394 else if (TREE_CODE (rhs
) == CONSTRUCTOR
395 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
397 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
401 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
402 if (! CONSTANT_CLASS_P (val
))
405 return build_vector_from_ctor (TREE_TYPE (rhs
),
406 CONSTRUCTOR_ELTS (rhs
));
409 else if (DECL_P (rhs
))
410 return get_symbol_constant_value (rhs
);
414 case GIMPLE_UNARY_RHS
:
417 case GIMPLE_BINARY_RHS
:
420 case GIMPLE_TERNARY_RHS
:
421 result
= fold_ternary_loc (loc
, subcode
,
422 TREE_TYPE (gimple_assign_lhs (stmt
)),
423 gimple_assign_rhs1 (stmt
),
424 gimple_assign_rhs2 (stmt
),
425 gimple_assign_rhs3 (stmt
));
429 STRIP_USELESS_TYPE_CONVERSION (result
);
430 if (valid_gimple_rhs_p (result
))
435 case GIMPLE_INVALID_RHS
:
443 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
444 adjusting the replacement stmts location and virtual operands.
445 If the statement has a lhs the last stmt in the sequence is expected
446 to assign to that lhs. */
449 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
451 gimple
*stmt
= gsi_stmt (*si_p
);
453 if (gimple_has_location (stmt
))
454 annotate_all_with_location (stmts
, gimple_location (stmt
));
456 /* First iterate over the replacement statements backward, assigning
457 virtual operands to their defining statements. */
458 gimple
*laststore
= NULL
;
459 for (gimple_stmt_iterator i
= gsi_last (stmts
);
460 !gsi_end_p (i
); gsi_prev (&i
))
462 gimple
*new_stmt
= gsi_stmt (i
);
463 if ((gimple_assign_single_p (new_stmt
)
464 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
465 || (is_gimple_call (new_stmt
)
466 && (gimple_call_flags (new_stmt
)
467 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
471 vdef
= gimple_vdef (stmt
);
473 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
474 gimple_set_vdef (new_stmt
, vdef
);
475 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
476 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
477 laststore
= new_stmt
;
481 /* Second iterate over the statements forward, assigning virtual
482 operands to their uses. */
483 tree reaching_vuse
= gimple_vuse (stmt
);
484 for (gimple_stmt_iterator i
= gsi_start (stmts
);
485 !gsi_end_p (i
); gsi_next (&i
))
487 gimple
*new_stmt
= gsi_stmt (i
);
488 /* If the new statement possibly has a VUSE, update it with exact SSA
489 name we know will reach this one. */
490 if (gimple_has_mem_ops (new_stmt
))
491 gimple_set_vuse (new_stmt
, reaching_vuse
);
492 gimple_set_modified (new_stmt
, true);
493 if (gimple_vdef (new_stmt
))
494 reaching_vuse
= gimple_vdef (new_stmt
);
497 /* If the new sequence does not do a store release the virtual
498 definition of the original statement. */
500 && reaching_vuse
== gimple_vuse (stmt
))
502 tree vdef
= gimple_vdef (stmt
);
504 && TREE_CODE (vdef
) == SSA_NAME
)
506 unlink_stmt_vdef (stmt
);
507 release_ssa_name (vdef
);
511 /* Finally replace the original statement with the sequence. */
512 gsi_replace_with_seq (si_p
, stmts
, false);
515 /* Convert EXPR into a GIMPLE value suitable for substitution on the
516 RHS of an assignment. Insert the necessary statements before
517 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
518 is replaced. If the call is expected to produces a result, then it
519 is replaced by an assignment of the new RHS to the result variable.
520 If the result is to be ignored, then the call is replaced by a
521 GIMPLE_NOP. A proper VDEF chain is retained by making the first
522 VUSE and the last VDEF of the whole sequence be the same as the replaced
523 statement and using new SSA names for stores in between. */
526 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
529 gimple
*stmt
, *new_stmt
;
530 gimple_stmt_iterator i
;
531 gimple_seq stmts
= NULL
;
533 stmt
= gsi_stmt (*si_p
);
535 gcc_assert (is_gimple_call (stmt
));
537 push_gimplify_context (gimple_in_ssa_p (cfun
));
539 lhs
= gimple_call_lhs (stmt
);
540 if (lhs
== NULL_TREE
)
542 gimplify_and_add (expr
, &stmts
);
543 /* We can end up with folding a memcpy of an empty class assignment
544 which gets optimized away by C++ gimplification. */
545 if (gimple_seq_empty_p (stmts
))
547 pop_gimplify_context (NULL
);
548 if (gimple_in_ssa_p (cfun
))
550 unlink_stmt_vdef (stmt
);
553 gsi_replace (si_p
, gimple_build_nop (), false);
559 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
560 new_stmt
= gimple_build_assign (lhs
, tmp
);
561 i
= gsi_last (stmts
);
562 gsi_insert_after_without_update (&i
, new_stmt
,
563 GSI_CONTINUE_LINKING
);
566 pop_gimplify_context (NULL
);
568 gsi_replace_with_seq_vops (si_p
, stmts
);
572 /* Replace the call at *GSI with the gimple value VAL. */
575 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
577 gimple
*stmt
= gsi_stmt (*gsi
);
578 tree lhs
= gimple_call_lhs (stmt
);
582 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
583 val
= fold_convert (TREE_TYPE (lhs
), val
);
584 repl
= gimple_build_assign (lhs
, val
);
587 repl
= gimple_build_nop ();
588 tree vdef
= gimple_vdef (stmt
);
589 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
591 unlink_stmt_vdef (stmt
);
592 release_ssa_name (vdef
);
594 gsi_replace (gsi
, repl
, false);
597 /* Replace the call at *GSI with the new call REPL and fold that
601 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
603 gimple
*stmt
= gsi_stmt (*gsi
);
604 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
605 gimple_set_location (repl
, gimple_location (stmt
));
606 if (gimple_vdef (stmt
)
607 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
609 gimple_set_vdef (repl
, gimple_vdef (stmt
));
610 gimple_set_vuse (repl
, gimple_vuse (stmt
));
611 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
613 gsi_replace (gsi
, repl
, false);
617 /* Return true if VAR is a VAR_DECL or a component thereof. */
620 var_decl_component_p (tree var
)
623 while (handled_component_p (inner
))
624 inner
= TREE_OPERAND (inner
, 0);
625 return SSA_VAR_P (inner
);
628 /* Fold function call to builtin mem{{,p}cpy,move}. Return
629 false if no simplification can be made.
630 If ENDP is 0, return DEST (like memcpy).
631 If ENDP is 1, return DEST+LEN (like mempcpy).
632 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
633 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
637 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
638 tree dest
, tree src
, int endp
)
640 gimple
*stmt
= gsi_stmt (*gsi
);
641 tree lhs
= gimple_call_lhs (stmt
);
642 tree len
= gimple_call_arg (stmt
, 2);
643 tree destvar
, srcvar
;
644 location_t loc
= gimple_location (stmt
);
646 /* If the LEN parameter is zero, return DEST. */
647 if (integer_zerop (len
))
650 if (gimple_call_lhs (stmt
))
651 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
653 repl
= gimple_build_nop ();
654 tree vdef
= gimple_vdef (stmt
);
655 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
657 unlink_stmt_vdef (stmt
);
658 release_ssa_name (vdef
);
660 gsi_replace (gsi
, repl
, false);
664 /* If SRC and DEST are the same (and not volatile), return
665 DEST{,+LEN,+LEN-1}. */
666 if (operand_equal_p (src
, dest
, 0))
668 unlink_stmt_vdef (stmt
);
669 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
670 release_ssa_name (gimple_vdef (stmt
));
673 gsi_replace (gsi
, gimple_build_nop (), false);
680 tree srctype
, desttype
;
681 unsigned int src_align
, dest_align
;
684 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
685 pointers as wide integer) and also may result in huge function
686 size because of inlined bounds copy. Thus don't inline for
687 functions we want to instrument. */
688 if (flag_check_pointer_bounds
689 && chkp_instrumentable_p (cfun
->decl
)
690 /* Even if data may contain pointers we can inline if copy
691 less than a pointer size. */
692 && (!tree_fits_uhwi_p (len
)
693 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
696 /* Build accesses at offset zero with a ref-all character type. */
697 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
700 /* If we can perform the copy efficiently with first doing all loads
701 and then all stores inline it that way. Currently efficiently
702 means that we can load all the memory into a single integer
703 register which is what MOVE_MAX gives us. */
704 src_align
= get_pointer_alignment (src
);
705 dest_align
= get_pointer_alignment (dest
);
706 if (tree_fits_uhwi_p (len
)
707 && compare_tree_int (len
, MOVE_MAX
) <= 0
708 /* ??? Don't transform copies from strings with known length this
709 confuses the tree-ssa-strlen.c. This doesn't handle
710 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
712 && !c_strlen (src
, 2))
714 unsigned ilen
= tree_to_uhwi (len
);
715 if (pow2p_hwi (ilen
))
717 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
719 && TYPE_MODE (type
) != BLKmode
720 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
722 /* If the destination pointer is not aligned we must be able
723 to emit an unaligned store. */
724 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
725 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)
726 || (optab_handler (movmisalign_optab
, TYPE_MODE (type
))
727 != CODE_FOR_nothing
)))
730 tree desttype
= type
;
731 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
732 srctype
= build_aligned_type (type
, src_align
);
733 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
734 tree tem
= fold_const_aggregate_ref (srcmem
);
737 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
738 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
740 && (optab_handler (movmisalign_optab
,
742 == CODE_FOR_nothing
))
747 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
749 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
751 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem
),
753 gimple_assign_set_lhs (new_stmt
, srcmem
);
754 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
755 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
757 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
758 desttype
= build_aligned_type (type
, dest_align
);
760 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
763 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
764 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
765 if (gimple_vdef (new_stmt
)
766 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
767 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
770 gsi_replace (gsi
, new_stmt
, false);
773 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
782 /* Both DEST and SRC must be pointer types.
783 ??? This is what old code did. Is the testing for pointer types
786 If either SRC is readonly or length is 1, we can use memcpy. */
787 if (!dest_align
|| !src_align
)
789 if (readonly_data_expr (src
)
790 || (tree_fits_uhwi_p (len
)
791 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
792 >= tree_to_uhwi (len
))))
794 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
797 gimple_call_set_fndecl (stmt
, fn
);
798 gimple_call_set_arg (stmt
, 0, dest
);
799 gimple_call_set_arg (stmt
, 1, src
);
804 /* If *src and *dest can't overlap, optimize into memcpy as well. */
805 if (TREE_CODE (src
) == ADDR_EXPR
806 && TREE_CODE (dest
) == ADDR_EXPR
)
808 tree src_base
, dest_base
, fn
;
809 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
810 HOST_WIDE_INT maxsize
;
812 srcvar
= TREE_OPERAND (src
, 0);
813 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
814 if (src_base
== NULL
)
816 destvar
= TREE_OPERAND (dest
, 0);
817 dest_base
= get_addr_base_and_unit_offset (destvar
,
819 if (dest_base
== NULL
)
821 if (tree_fits_uhwi_p (len
))
822 maxsize
= tree_to_uhwi (len
);
825 if (SSA_VAR_P (src_base
)
826 && SSA_VAR_P (dest_base
))
828 if (operand_equal_p (src_base
, dest_base
, 0)
829 && ranges_overlap_p (src_offset
, maxsize
,
830 dest_offset
, maxsize
))
833 else if (TREE_CODE (src_base
) == MEM_REF
834 && TREE_CODE (dest_base
) == MEM_REF
)
836 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
837 TREE_OPERAND (dest_base
, 0), 0))
839 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
840 if (!wi::fits_shwi_p (off
))
842 src_offset
= off
.to_shwi ();
844 off
= mem_ref_offset (dest_base
) + dest_offset
;
845 if (!wi::fits_shwi_p (off
))
847 dest_offset
= off
.to_shwi ();
848 if (ranges_overlap_p (src_offset
, maxsize
,
849 dest_offset
, maxsize
))
855 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
858 gimple_call_set_fndecl (stmt
, fn
);
859 gimple_call_set_arg (stmt
, 0, dest
);
860 gimple_call_set_arg (stmt
, 1, src
);
865 /* If the destination and source do not alias optimize into
867 if ((is_gimple_min_invariant (dest
)
868 || TREE_CODE (dest
) == SSA_NAME
)
869 && (is_gimple_min_invariant (src
)
870 || TREE_CODE (src
) == SSA_NAME
))
873 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
874 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
875 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
878 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
881 gimple_call_set_fndecl (stmt
, fn
);
882 gimple_call_set_arg (stmt
, 0, dest
);
883 gimple_call_set_arg (stmt
, 1, src
);
892 if (!tree_fits_shwi_p (len
))
895 This logic lose for arguments like (type *)malloc (sizeof (type)),
896 since we strip the casts of up to VOID return value from malloc.
897 Perhaps we ought to inherit type from non-VOID argument here? */
900 if (!POINTER_TYPE_P (TREE_TYPE (src
))
901 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
903 /* In the following try to find a type that is most natural to be
904 used for the memcpy source and destination and that allows
905 the most optimization when memcpy is turned into a plain assignment
906 using that type. In theory we could always use a char[len] type
907 but that only gains us that the destination and source possibly
908 no longer will have their address taken. */
909 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
910 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
912 tree tem
= TREE_OPERAND (src
, 0);
914 if (tem
!= TREE_OPERAND (src
, 0))
915 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
917 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
919 tree tem
= TREE_OPERAND (dest
, 0);
921 if (tem
!= TREE_OPERAND (dest
, 0))
922 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
924 srctype
= TREE_TYPE (TREE_TYPE (src
));
925 if (TREE_CODE (srctype
) == ARRAY_TYPE
926 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
928 srctype
= TREE_TYPE (srctype
);
930 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
932 desttype
= TREE_TYPE (TREE_TYPE (dest
));
933 if (TREE_CODE (desttype
) == ARRAY_TYPE
934 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
936 desttype
= TREE_TYPE (desttype
);
938 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
940 if (TREE_ADDRESSABLE (srctype
)
941 || TREE_ADDRESSABLE (desttype
))
944 /* Make sure we are not copying using a floating-point mode or
945 a type whose size possibly does not match its precision. */
946 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
947 || TREE_CODE (desttype
) == BOOLEAN_TYPE
948 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
949 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
950 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
951 || TREE_CODE (srctype
) == BOOLEAN_TYPE
952 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
953 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
961 src_align
= get_pointer_alignment (src
);
962 dest_align
= get_pointer_alignment (dest
);
963 if (dest_align
< TYPE_ALIGN (desttype
)
964 || src_align
< TYPE_ALIGN (srctype
))
968 STRIP_NOPS (destvar
);
969 if (TREE_CODE (destvar
) == ADDR_EXPR
970 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
971 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
972 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
978 if (TREE_CODE (srcvar
) == ADDR_EXPR
979 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
980 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
983 || src_align
>= TYPE_ALIGN (desttype
))
984 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
986 else if (!STRICT_ALIGNMENT
)
988 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
990 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
998 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
1001 if (srcvar
== NULL_TREE
)
1004 if (src_align
>= TYPE_ALIGN (desttype
))
1005 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
1008 if (STRICT_ALIGNMENT
)
1010 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1012 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1015 else if (destvar
== NULL_TREE
)
1018 if (dest_align
>= TYPE_ALIGN (srctype
))
1019 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1022 if (STRICT_ALIGNMENT
)
1024 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1026 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1031 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1033 tree tem
= fold_const_aggregate_ref (srcvar
);
1036 if (! is_gimple_min_invariant (srcvar
))
1038 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1039 srcvar
= create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar
),
1041 gimple_assign_set_lhs (new_stmt
, srcvar
);
1042 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1043 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1046 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1047 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1048 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1049 if (gimple_vdef (new_stmt
)
1050 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1051 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1054 gsi_replace (gsi
, new_stmt
, false);
1057 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1061 gimple_seq stmts
= NULL
;
1062 if (endp
== 0 || endp
== 3)
1065 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1067 if (endp
== 2 || endp
== 1)
1069 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1070 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1071 TREE_TYPE (dest
), dest
, len
);
1074 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1075 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1076 gsi_replace (gsi
, repl
, false);
1080 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1081 to built-in memcmp (a, b, len). */
1084 gimple_fold_builtin_bcmp (gimple_stmt_iterator
*gsi
)
1086 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCMP
);
1091 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1093 gimple
*stmt
= gsi_stmt (*gsi
);
1094 tree a
= gimple_call_arg (stmt
, 0);
1095 tree b
= gimple_call_arg (stmt
, 1);
1096 tree len
= gimple_call_arg (stmt
, 2);
1098 gimple
*repl
= gimple_build_call (fn
, 3, a
, b
, len
);
1099 replace_call_with_call_and_fold (gsi
, repl
);
1104 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1105 to built-in memmove (dest, src, len). */
1108 gimple_fold_builtin_bcopy (gimple_stmt_iterator
*gsi
)
1110 tree fn
= builtin_decl_implicit (BUILT_IN_MEMMOVE
);
1115 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1116 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1117 len) into memmove (dest, src, len). */
1119 gimple
*stmt
= gsi_stmt (*gsi
);
1120 tree src
= gimple_call_arg (stmt
, 0);
1121 tree dest
= gimple_call_arg (stmt
, 1);
1122 tree len
= gimple_call_arg (stmt
, 2);
1124 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1125 gimple_call_set_fntype (as_a
<gcall
*> (stmt
), TREE_TYPE (fn
));
1126 replace_call_with_call_and_fold (gsi
, repl
);
1131 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1132 to built-in memset (dest, 0, len). */
1135 gimple_fold_builtin_bzero (gimple_stmt_iterator
*gsi
)
1137 tree fn
= builtin_decl_implicit (BUILT_IN_MEMSET
);
1142 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1144 gimple
*stmt
= gsi_stmt (*gsi
);
1145 tree dest
= gimple_call_arg (stmt
, 0);
1146 tree len
= gimple_call_arg (stmt
, 1);
1148 gimple_seq seq
= NULL
;
1149 gimple
*repl
= gimple_build_call (fn
, 3, dest
, integer_zero_node
, len
);
1150 gimple_seq_add_stmt_without_update (&seq
, repl
);
1151 gsi_replace_with_seq_vops (gsi
, seq
);
1157 /* Fold function call to builtin memset or bzero at *GSI setting the
1158 memory of size LEN to VAL. Return whether a simplification was made. */
1161 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1163 gimple
*stmt
= gsi_stmt (*gsi
);
1165 unsigned HOST_WIDE_INT length
, cval
;
1167 /* If the LEN parameter is zero, return DEST. */
1168 if (integer_zerop (len
))
1170 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1174 if (! tree_fits_uhwi_p (len
))
1177 if (TREE_CODE (c
) != INTEGER_CST
)
1180 tree dest
= gimple_call_arg (stmt
, 0);
1182 if (TREE_CODE (var
) != ADDR_EXPR
)
1185 var
= TREE_OPERAND (var
, 0);
1186 if (TREE_THIS_VOLATILE (var
))
1189 etype
= TREE_TYPE (var
);
1190 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1191 etype
= TREE_TYPE (etype
);
1193 if (!INTEGRAL_TYPE_P (etype
)
1194 && !POINTER_TYPE_P (etype
))
1197 if (! var_decl_component_p (var
))
1200 length
= tree_to_uhwi (len
);
1201 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1202 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1205 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1208 if (integer_zerop (c
))
1212 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1215 cval
= TREE_INT_CST_LOW (c
);
1219 cval
|= (cval
<< 31) << 1;
1222 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1223 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1224 gimple_set_vuse (store
, gimple_vuse (stmt
));
1225 tree vdef
= gimple_vdef (stmt
);
1226 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1228 gimple_set_vdef (store
, gimple_vdef (stmt
));
1229 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1231 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1232 if (gimple_call_lhs (stmt
))
1234 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1235 gsi_replace (gsi
, asgn
, false);
1239 gimple_stmt_iterator gsi2
= *gsi
;
1241 gsi_remove (&gsi2
, true);
1248 /* Obtain the minimum and maximum string length or minimum and maximum
1249 value of ARG in LENGTH[0] and LENGTH[1], respectively.
1250 If ARG is an SSA name variable, follow its use-def chains. When
1251 TYPE == 0, if LENGTH[1] is not equal to the length we determine or
1252 if we are unable to determine the length or value, return False.
1253 VISITED is a bitmap of visited variables.
1254 TYPE is 0 if string length should be obtained, 1 for maximum string
1255 length and 2 for maximum value ARG can have.
1256 When FUZZY is set and the length of a string cannot be determined,
1257 the function instead considers as the maximum possible length the
1258 size of a character array it may refer to.
1259 Set *FLEXP to true if the range of the string lengths has been
1260 obtained from the upper bound of an array at the end of a struct.
1261 Such an array may hold a string that's longer than its upper bound
1262 due to it being used as a poor-man's flexible array member. */
1265 get_range_strlen (tree arg
, tree length
[2], bitmap
*visited
, int type
,
1266 bool fuzzy
, bool *flexp
)
1271 /* The minimum and maximum length. The MAXLEN pointer stays unchanged
1272 but MINLEN may be cleared during the execution of the function. */
1273 tree
*minlen
= length
;
1274 tree
*const maxlen
= length
+ 1;
1276 if (TREE_CODE (arg
) != SSA_NAME
)
1278 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1279 if (TREE_CODE (arg
) == ADDR_EXPR
1280 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1281 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1283 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1284 if (TREE_CODE (aop0
) == INDIRECT_REF
1285 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1286 return get_range_strlen (TREE_OPERAND (aop0
, 0),
1287 length
, visited
, type
, fuzzy
, flexp
);
1293 if (TREE_CODE (val
) != INTEGER_CST
1294 || tree_int_cst_sgn (val
) < 0)
1298 val
= c_strlen (arg
, 1);
1302 if (TREE_CODE (arg
) == ADDR_EXPR
)
1303 return get_range_strlen (TREE_OPERAND (arg
, 0), length
,
1304 visited
, type
, fuzzy
, flexp
);
1306 if (TREE_CODE (arg
) == COMPONENT_REF
1307 && TREE_CODE (TREE_TYPE (TREE_OPERAND (arg
, 1))) == ARRAY_TYPE
)
1309 /* Use the type of the member array to determine the upper
1310 bound on the length of the array. This may be overly
1311 optimistic if the array itself isn't NUL-terminated and
1312 the caller relies on the subsequent member to contain
1314 Set *FLEXP to true if the array whose bound is being
1315 used is at the end of a struct. */
1316 if (array_at_struct_end_p (arg
))
1319 arg
= TREE_OPERAND (arg
, 1);
1320 val
= TYPE_SIZE_UNIT (TREE_TYPE (arg
));
1321 if (!val
|| integer_zerop (val
))
1323 val
= fold_build2 (MINUS_EXPR
, TREE_TYPE (val
), val
,
1325 /* Set the minimum size to zero since the string in
1326 the array could have zero length. */
1327 *minlen
= ssize_int (0);
1337 && TREE_CODE (*minlen
) == INTEGER_CST
1338 && TREE_CODE (val
) == INTEGER_CST
1339 && tree_int_cst_lt (val
, *minlen
))))
1346 if (TREE_CODE (*maxlen
) != INTEGER_CST
1347 || TREE_CODE (val
) != INTEGER_CST
)
1350 if (tree_int_cst_lt (*maxlen
, val
))
1354 else if (simple_cst_equal (val
, *maxlen
) != 1)
1362 /* If ARG is registered for SSA update we cannot look at its defining
1364 if (name_registered_for_update_p (arg
))
1367 /* If we were already here, break the infinite cycle. */
1369 *visited
= BITMAP_ALLOC (NULL
);
1370 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1374 def_stmt
= SSA_NAME_DEF_STMT (var
);
1376 switch (gimple_code (def_stmt
))
1379 /* The RHS of the statement defining VAR must either have a
1380 constant length or come from another SSA_NAME with a constant
1382 if (gimple_assign_single_p (def_stmt
)
1383 || gimple_assign_unary_nop_p (def_stmt
))
1385 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1386 return get_range_strlen (rhs
, length
, visited
, type
, fuzzy
, flexp
);
1388 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1390 tree op2
= gimple_assign_rhs2 (def_stmt
);
1391 tree op3
= gimple_assign_rhs3 (def_stmt
);
1392 return get_range_strlen (op2
, length
, visited
, type
, fuzzy
, flexp
)
1393 && get_range_strlen (op3
, length
, visited
, type
, fuzzy
, flexp
);
1399 /* All the arguments of the PHI node must have the same constant
1403 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1405 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1407 /* If this PHI has itself as an argument, we cannot
1408 determine the string length of this argument. However,
1409 if we can find a constant string length for the other
1410 PHI args then we can still be sure that this is a
1411 constant string length. So be optimistic and just
1412 continue with the next argument. */
1413 if (arg
== gimple_phi_result (def_stmt
))
1416 if (!get_range_strlen (arg
, length
, visited
, type
, fuzzy
, flexp
))
1419 *maxlen
= build_all_ones_cst (size_type_node
);
1432 /* Determine the minimum and maximum value or string length that ARG
1433 refers to and store each in the first two elements of MINMAXLEN.
1434 For expressions that point to strings of unknown lengths that are
1435 character arrays, use the upper bound of the array as the maximum
1436 length. For example, given an expression like 'x ? array : "xyz"'
1437 and array declared as 'char array[8]', MINMAXLEN[0] will be set
1438 to 3 and MINMAXLEN[1] to 7, the longest string that could be
1440 Return true if the range of the string lengths has been obtained
1441 from the upper bound of an array at the end of a struct. Such
1442 an array may hold a string that's longer than its upper bound
1443 due to it being used as a poor-man's flexible array member. */
1446 get_range_strlen (tree arg
, tree minmaxlen
[2])
1448 bitmap visited
= NULL
;
1450 minmaxlen
[0] = NULL_TREE
;
1451 minmaxlen
[1] = NULL_TREE
;
1453 bool flexarray
= false;
1454 get_range_strlen (arg
, minmaxlen
, &visited
, 1, true, &flexarray
);
1457 BITMAP_FREE (visited
);
1463 get_maxval_strlen (tree arg
, int type
)
1465 bitmap visited
= NULL
;
1466 tree len
[2] = { NULL_TREE
, NULL_TREE
};
1469 if (!get_range_strlen (arg
, len
, &visited
, type
, false, &dummy
))
1472 BITMAP_FREE (visited
);
1478 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1479 If LEN is not NULL, it represents the length of the string to be
1480 copied. Return NULL_TREE if no simplification can be made. */
1483 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1484 tree dest
, tree src
)
1486 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1489 /* If SRC and DEST are the same (and not volatile), return DEST. */
1490 if (operand_equal_p (src
, dest
, 0))
1492 replace_call_with_value (gsi
, dest
);
1496 if (optimize_function_for_size_p (cfun
))
1499 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1503 tree len
= get_maxval_strlen (src
, 0);
1507 len
= fold_convert_loc (loc
, size_type_node
, len
);
1508 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1509 len
= force_gimple_operand_gsi (gsi
, len
, true,
1510 NULL_TREE
, true, GSI_SAME_STMT
);
1511 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1512 replace_call_with_call_and_fold (gsi
, repl
);
1516 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1517 If SLEN is not NULL, it represents the length of the source string.
1518 Return NULL_TREE if no simplification can be made. */
1521 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1522 tree dest
, tree src
, tree len
)
1524 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1527 /* If the LEN parameter is zero, return DEST. */
1528 if (integer_zerop (len
))
1530 replace_call_with_value (gsi
, dest
);
1534 /* We can't compare slen with len as constants below if len is not a
1536 if (TREE_CODE (len
) != INTEGER_CST
)
1539 /* Now, we must be passed a constant src ptr parameter. */
1540 tree slen
= get_maxval_strlen (src
, 0);
1541 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1544 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1546 /* We do not support simplification of this case, though we do
1547 support it when expanding trees into RTL. */
1548 /* FIXME: generate a call to __builtin_memset. */
1549 if (tree_int_cst_lt (slen
, len
))
1552 /* OK transform into builtin memcpy. */
1553 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1557 len
= fold_convert_loc (loc
, size_type_node
, len
);
1558 len
= force_gimple_operand_gsi (gsi
, len
, true,
1559 NULL_TREE
, true, GSI_SAME_STMT
);
1560 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1561 replace_call_with_call_and_fold (gsi
, repl
);
1565 /* Fold function call to builtin strchr or strrchr.
1566 If both arguments are constant, evaluate and fold the result,
1567 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
1568 In general strlen is significantly faster than strchr
1569 due to being a simpler operation. */
1571 gimple_fold_builtin_strchr (gimple_stmt_iterator
*gsi
, bool is_strrchr
)
1573 gimple
*stmt
= gsi_stmt (*gsi
);
1574 tree str
= gimple_call_arg (stmt
, 0);
1575 tree c
= gimple_call_arg (stmt
, 1);
1576 location_t loc
= gimple_location (stmt
);
1580 if (!gimple_call_lhs (stmt
))
1583 if ((p
= c_getstr (str
)) && target_char_cst_p (c
, &ch
))
1585 const char *p1
= is_strrchr
? strrchr (p
, ch
) : strchr (p
, ch
);
1589 replace_call_with_value (gsi
, integer_zero_node
);
1593 tree len
= build_int_cst (size_type_node
, p1
- p
);
1594 gimple_seq stmts
= NULL
;
1595 gimple
*new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1596 POINTER_PLUS_EXPR
, str
, len
);
1597 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1598 gsi_replace_with_seq_vops (gsi
, stmts
);
1602 if (!integer_zerop (c
))
1605 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
1606 if (is_strrchr
&& optimize_function_for_size_p (cfun
))
1608 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1612 gimple
*repl
= gimple_build_call (strchr_fn
, 2, str
, c
);
1613 replace_call_with_call_and_fold (gsi
, repl
);
1621 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1626 /* Create newstr = strlen (str). */
1627 gimple_seq stmts
= NULL
;
1628 gimple
*new_stmt
= gimple_build_call (strlen_fn
, 1, str
);
1629 gimple_set_location (new_stmt
, loc
);
1630 len
= create_tmp_reg_or_ssa_name (size_type_node
);
1631 gimple_call_set_lhs (new_stmt
, len
);
1632 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1634 /* Create (str p+ strlen (str)). */
1635 new_stmt
= gimple_build_assign (gimple_call_lhs (stmt
),
1636 POINTER_PLUS_EXPR
, str
, len
);
1637 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1638 gsi_replace_with_seq_vops (gsi
, stmts
);
1639 /* gsi now points at the assignment to the lhs, get a
1640 stmt iterator to the strlen.
1641 ??? We can't use gsi_for_stmt as that doesn't work when the
1642 CFG isn't built yet. */
1643 gimple_stmt_iterator gsi2
= *gsi
;
1649 /* Fold function call to builtin strstr.
1650 If both arguments are constant, evaluate and fold the result,
1651 additionally fold strstr (x, "") into x and strstr (x, "c")
1652 into strchr (x, 'c'). */
1654 gimple_fold_builtin_strstr (gimple_stmt_iterator
*gsi
)
1656 gimple
*stmt
= gsi_stmt (*gsi
);
1657 tree haystack
= gimple_call_arg (stmt
, 0);
1658 tree needle
= gimple_call_arg (stmt
, 1);
1661 if (!gimple_call_lhs (stmt
))
1664 q
= c_getstr (needle
);
1668 if ((p
= c_getstr (haystack
)))
1670 const char *r
= strstr (p
, q
);
1674 replace_call_with_value (gsi
, integer_zero_node
);
1678 tree len
= build_int_cst (size_type_node
, r
- p
);
1679 gimple_seq stmts
= NULL
;
1681 = gimple_build_assign (gimple_call_lhs (stmt
), POINTER_PLUS_EXPR
,
1683 gimple_seq_add_stmt_without_update (&stmts
, new_stmt
);
1684 gsi_replace_with_seq_vops (gsi
, stmts
);
1688 /* For strstr (x, "") return x. */
1691 replace_call_with_value (gsi
, haystack
);
1695 /* Transform strstr (x, "c") into strchr (x, 'c'). */
1698 tree strchr_fn
= builtin_decl_implicit (BUILT_IN_STRCHR
);
1701 tree c
= build_int_cst (integer_type_node
, q
[0]);
1702 gimple
*repl
= gimple_build_call (strchr_fn
, 2, haystack
, c
);
1703 replace_call_with_call_and_fold (gsi
, repl
);
1711 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1714 Return NULL_TREE if no simplification was possible, otherwise return the
1715 simplified form of the call as a tree.
1717 The simplified form may be a constant or other expression which
1718 computes the same value, but in a more efficient manner (including
1719 calls to other builtin functions).
1721 The call may contain arguments which need to be evaluated, but
1722 which are not useful to determine the result of the call. In
1723 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1724 COMPOUND_EXPR will be an argument which must be evaluated.
1725 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1726 COMPOUND_EXPR in the chain will contain the tree for the simplified
1727 form of the builtin function call. */
1730 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1732 gimple
*stmt
= gsi_stmt (*gsi
);
1733 location_t loc
= gimple_location (stmt
);
1735 const char *p
= c_getstr (src
);
1737 /* If the string length is zero, return the dst parameter. */
1738 if (p
&& *p
== '\0')
1740 replace_call_with_value (gsi
, dst
);
1744 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1747 /* See if we can store by pieces into (dst + strlen(dst)). */
1749 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1750 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1752 if (!strlen_fn
|| !memcpy_fn
)
1755 /* If the length of the source string isn't computable don't
1756 split strcat into strlen and memcpy. */
1757 tree len
= get_maxval_strlen (src
, 0);
1761 /* Create strlen (dst). */
1762 gimple_seq stmts
= NULL
, stmts2
;
1763 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1764 gimple_set_location (repl
, loc
);
1765 newdst
= create_tmp_reg_or_ssa_name (size_type_node
);
1766 gimple_call_set_lhs (repl
, newdst
);
1767 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1769 /* Create (dst p+ strlen (dst)). */
1770 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1771 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1772 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1774 len
= fold_convert_loc (loc
, size_type_node
, len
);
1775 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1776 build_int_cst (size_type_node
, 1));
1777 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1778 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1780 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1781 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1782 if (gimple_call_lhs (stmt
))
1784 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1785 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1786 gsi_replace_with_seq_vops (gsi
, stmts
);
1787 /* gsi now points at the assignment to the lhs, get a
1788 stmt iterator to the memcpy call.
1789 ??? We can't use gsi_for_stmt as that doesn't work when the
1790 CFG isn't built yet. */
1791 gimple_stmt_iterator gsi2
= *gsi
;
1797 gsi_replace_with_seq_vops (gsi
, stmts
);
1803 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1804 are the arguments to the call. */
1807 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1809 gimple
*stmt
= gsi_stmt (*gsi
);
1810 tree dest
= gimple_call_arg (stmt
, 0);
1811 tree src
= gimple_call_arg (stmt
, 1);
1812 tree size
= gimple_call_arg (stmt
, 2);
1818 /* If the SRC parameter is "", return DEST. */
1819 if (p
&& *p
== '\0')
1821 replace_call_with_value (gsi
, dest
);
1825 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1828 /* If __builtin_strcat_chk is used, assume strcat is available. */
1829 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1833 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1834 replace_call_with_call_and_fold (gsi
, repl
);
1838 /* Simplify a call to the strncat builtin. */
1841 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1843 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1844 tree dst
= gimple_call_arg (stmt
, 0);
1845 tree src
= gimple_call_arg (stmt
, 1);
1846 tree len
= gimple_call_arg (stmt
, 2);
1848 const char *p
= c_getstr (src
);
1850 /* If the requested length is zero, or the src parameter string
1851 length is zero, return the dst parameter. */
1852 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1854 replace_call_with_value (gsi
, dst
);
1858 /* If the requested len is greater than or equal to the string
1859 length, call strcat. */
1860 if (TREE_CODE (len
) == INTEGER_CST
&& p
1861 && compare_tree_int (len
, strlen (p
)) >= 0)
1863 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1865 /* If the replacement _DECL isn't initialized, don't do the
1870 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1871 replace_call_with_call_and_fold (gsi
, repl
);
1878 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1882 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1884 gimple
*stmt
= gsi_stmt (*gsi
);
1885 tree dest
= gimple_call_arg (stmt
, 0);
1886 tree src
= gimple_call_arg (stmt
, 1);
1887 tree len
= gimple_call_arg (stmt
, 2);
1888 tree size
= gimple_call_arg (stmt
, 3);
1893 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1894 if ((p
&& *p
== '\0')
1895 || integer_zerop (len
))
1897 replace_call_with_value (gsi
, dest
);
1901 if (! tree_fits_uhwi_p (size
))
1904 if (! integer_all_onesp (size
))
1906 tree src_len
= c_strlen (src
, 1);
1908 && tree_fits_uhwi_p (src_len
)
1909 && tree_fits_uhwi_p (len
)
1910 && ! tree_int_cst_lt (len
, src_len
))
1912 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1913 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1917 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1918 replace_call_with_call_and_fold (gsi
, repl
);
1924 /* If __builtin_strncat_chk is used, assume strncat is available. */
1925 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1929 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1930 replace_call_with_call_and_fold (gsi
, repl
);
1934 /* Build and append gimple statements to STMTS that would load a first
1935 character of a memory location identified by STR. LOC is location
1936 of the statement. */
1939 gimple_load_first_char (location_t loc
, tree str
, gimple_seq
*stmts
)
1943 tree cst_uchar_node
= build_type_variant (unsigned_char_type_node
, 1, 0);
1944 tree cst_uchar_ptr_node
1945 = build_pointer_type_for_mode (cst_uchar_node
, ptr_mode
, true);
1946 tree off0
= build_int_cst (cst_uchar_ptr_node
, 0);
1948 tree temp
= fold_build2_loc (loc
, MEM_REF
, cst_uchar_node
, str
, off0
);
1949 gassign
*stmt
= gimple_build_assign (NULL_TREE
, temp
);
1950 var
= create_tmp_reg_or_ssa_name (cst_uchar_node
, stmt
);
1952 gimple_assign_set_lhs (stmt
, var
);
1953 gimple_seq_add_stmt_without_update (stmts
, stmt
);
1958 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator.
1959 FCODE is the name of the builtin. */
1962 gimple_fold_builtin_string_compare (gimple_stmt_iterator
*gsi
)
1964 gimple
*stmt
= gsi_stmt (*gsi
);
1965 tree callee
= gimple_call_fndecl (stmt
);
1966 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
1968 tree type
= integer_type_node
;
1969 tree str1
= gimple_call_arg (stmt
, 0);
1970 tree str2
= gimple_call_arg (stmt
, 1);
1971 tree lhs
= gimple_call_lhs (stmt
);
1972 HOST_WIDE_INT length
= -1;
1974 /* Handle strncmp and strncasecmp functions. */
1975 if (gimple_call_num_args (stmt
) == 3)
1977 tree len
= gimple_call_arg (stmt
, 2);
1978 if (tree_fits_uhwi_p (len
))
1979 length
= tree_to_uhwi (len
);
1982 /* If the LEN parameter is zero, return zero. */
1985 replace_call_with_value (gsi
, integer_zero_node
);
1989 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
1990 if (operand_equal_p (str1
, str2
, 0))
1992 replace_call_with_value (gsi
, integer_zero_node
);
1996 const char *p1
= c_getstr (str1
);
1997 const char *p2
= c_getstr (str2
);
1999 /* For known strings, return an immediate value. */
2003 bool known_result
= false;
2007 case BUILT_IN_STRCMP
:
2009 r
= strcmp (p1
, p2
);
2010 known_result
= true;
2013 case BUILT_IN_STRNCMP
:
2017 r
= strncmp (p1
, p2
, length
);
2018 known_result
= true;
2021 /* Only handleable situation is where the string are equal (result 0),
2022 which is already handled by operand_equal_p case. */
2023 case BUILT_IN_STRCASECMP
:
2025 case BUILT_IN_STRNCASECMP
:
2029 r
= strncmp (p1
, p2
, length
);
2031 known_result
= true;
2040 replace_call_with_value (gsi
, build_cmp_result (type
, r
));
2045 bool nonzero_length
= length
>= 1
2046 || fcode
== BUILT_IN_STRCMP
2047 || fcode
== BUILT_IN_STRCASECMP
;
2049 location_t loc
= gimple_location (stmt
);
2051 /* If the second arg is "", return *(const unsigned char*)arg1. */
2052 if (p2
&& *p2
== '\0' && nonzero_length
)
2054 gimple_seq stmts
= NULL
;
2055 tree var
= gimple_load_first_char (loc
, str1
, &stmts
);
2058 stmt
= gimple_build_assign (lhs
, NOP_EXPR
, var
);
2059 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2062 gsi_replace_with_seq_vops (gsi
, stmts
);
2066 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2067 if (p1
&& *p1
== '\0' && nonzero_length
)
2069 gimple_seq stmts
= NULL
;
2070 tree var
= gimple_load_first_char (loc
, str2
, &stmts
);
2074 tree c
= create_tmp_reg_or_ssa_name (integer_type_node
);
2075 stmt
= gimple_build_assign (c
, NOP_EXPR
, var
);
2076 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2078 stmt
= gimple_build_assign (lhs
, NEGATE_EXPR
, c
);
2079 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2082 gsi_replace_with_seq_vops (gsi
, stmts
);
2086 /* If len parameter is one, return an expression corresponding to
2087 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2088 if (fcode
== BUILT_IN_STRNCMP
&& length
== 1)
2090 gimple_seq stmts
= NULL
;
2091 tree temp1
= gimple_load_first_char (loc
, str1
, &stmts
);
2092 tree temp2
= gimple_load_first_char (loc
, str2
, &stmts
);
2096 tree c1
= create_tmp_reg_or_ssa_name (integer_type_node
);
2097 gassign
*convert1
= gimple_build_assign (c1
, NOP_EXPR
, temp1
);
2098 gimple_seq_add_stmt_without_update (&stmts
, convert1
);
2100 tree c2
= create_tmp_reg_or_ssa_name (integer_type_node
);
2101 gassign
*convert2
= gimple_build_assign (c2
, NOP_EXPR
, temp2
);
2102 gimple_seq_add_stmt_without_update (&stmts
, convert2
);
2104 stmt
= gimple_build_assign (lhs
, MINUS_EXPR
, c1
, c2
);
2105 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2108 gsi_replace_with_seq_vops (gsi
, stmts
);
2115 /* Fold a call to the memchr pointed by GSI iterator. */
2118 gimple_fold_builtin_memchr (gimple_stmt_iterator
*gsi
)
2120 gimple
*stmt
= gsi_stmt (*gsi
);
2121 tree lhs
= gimple_call_lhs (stmt
);
2122 tree arg1
= gimple_call_arg (stmt
, 0);
2123 tree arg2
= gimple_call_arg (stmt
, 1);
2124 tree len
= gimple_call_arg (stmt
, 2);
2126 /* If the LEN parameter is zero, return zero. */
2127 if (integer_zerop (len
))
2129 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2134 if (TREE_CODE (arg2
) != INTEGER_CST
2135 || !tree_fits_uhwi_p (len
)
2136 || !target_char_cst_p (arg2
, &c
))
2139 unsigned HOST_WIDE_INT length
= tree_to_uhwi (len
);
2140 unsigned HOST_WIDE_INT string_length
;
2141 const char *p1
= c_getstr (arg1
, &string_length
);
2145 const char *r
= (const char *)memchr (p1
, c
, MIN (length
, string_length
));
2148 if (length
<= string_length
)
2150 replace_call_with_value (gsi
, build_int_cst (ptr_type_node
, 0));
2156 unsigned HOST_WIDE_INT offset
= r
- p1
;
2157 gimple_seq stmts
= NULL
;
2158 if (lhs
!= NULL_TREE
)
2160 tree offset_cst
= build_int_cst (TREE_TYPE (len
), offset
);
2161 gassign
*stmt
= gimple_build_assign (lhs
, POINTER_PLUS_EXPR
,
2163 gimple_seq_add_stmt_without_update (&stmts
, stmt
);
2166 gimple_seq_add_stmt_without_update (&stmts
,
2167 gimple_build_nop ());
2169 gsi_replace_with_seq_vops (gsi
, stmts
);
2177 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2178 to the call. IGNORE is true if the value returned
2179 by the builtin will be ignored. UNLOCKED is true is true if this
2180 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2181 the known length of the string. Return NULL_TREE if no simplification
2185 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
2186 tree arg0
, tree arg1
,
2189 gimple
*stmt
= gsi_stmt (*gsi
);
2191 /* If we're using an unlocked function, assume the other unlocked
2192 functions exist explicitly. */
2193 tree
const fn_fputc
= (unlocked
2194 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
2195 : builtin_decl_implicit (BUILT_IN_FPUTC
));
2196 tree
const fn_fwrite
= (unlocked
2197 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
2198 : builtin_decl_implicit (BUILT_IN_FWRITE
));
2200 /* If the return value is used, don't do the transformation. */
2201 if (gimple_call_lhs (stmt
))
2204 /* Get the length of the string passed to fputs. If the length
2205 can't be determined, punt. */
2206 tree len
= get_maxval_strlen (arg0
, 0);
2208 || TREE_CODE (len
) != INTEGER_CST
)
2211 switch (compare_tree_int (len
, 1))
2213 case -1: /* length is 0, delete the call entirely . */
2214 replace_call_with_value (gsi
, integer_zero_node
);
2217 case 0: /* length is 1, call fputc. */
2219 const char *p
= c_getstr (arg0
);
2225 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
2227 (integer_type_node
, p
[0]), arg1
);
2228 replace_call_with_call_and_fold (gsi
, repl
);
2233 case 1: /* length is greater than 1, call fwrite. */
2235 /* If optimizing for size keep fputs. */
2236 if (optimize_function_for_size_p (cfun
))
2238 /* New argument list transforming fputs(string, stream) to
2239 fwrite(string, 1, len, stream). */
2243 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
2244 size_one_node
, len
, arg1
);
2245 replace_call_with_call_and_fold (gsi
, repl
);
2254 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
2255 DEST, SRC, LEN, and SIZE are the arguments to the call.
2256 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
2257 code of the builtin. If MAXLEN is not NULL, it is maximum length
2258 passed as third argument. */
2261 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
2262 tree dest
, tree src
, tree len
, tree size
,
2263 enum built_in_function fcode
)
2265 gimple
*stmt
= gsi_stmt (*gsi
);
2266 location_t loc
= gimple_location (stmt
);
2267 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2270 /* If SRC and DEST are the same (and not volatile), return DEST
2271 (resp. DEST+LEN for __mempcpy_chk). */
2272 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
2274 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
2276 replace_call_with_value (gsi
, dest
);
2281 gimple_seq stmts
= NULL
;
2282 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
2283 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
2284 TREE_TYPE (dest
), dest
, len
);
2285 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2286 replace_call_with_value (gsi
, temp
);
2291 if (! tree_fits_uhwi_p (size
))
2294 tree maxlen
= get_maxval_strlen (len
, 2);
2295 if (! integer_all_onesp (size
))
2297 if (! tree_fits_uhwi_p (len
))
2299 /* If LEN is not constant, try MAXLEN too.
2300 For MAXLEN only allow optimizing into non-_ocs function
2301 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2302 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2304 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
2306 /* (void) __mempcpy_chk () can be optimized into
2307 (void) __memcpy_chk (). */
2308 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2312 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2313 replace_call_with_call_and_fold (gsi
, repl
);
2322 if (tree_int_cst_lt (size
, maxlen
))
2327 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
2328 mem{cpy,pcpy,move,set} is available. */
2331 case BUILT_IN_MEMCPY_CHK
:
2332 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
2334 case BUILT_IN_MEMPCPY_CHK
:
2335 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
2337 case BUILT_IN_MEMMOVE_CHK
:
2338 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
2340 case BUILT_IN_MEMSET_CHK
:
2341 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
2350 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2351 replace_call_with_call_and_fold (gsi
, repl
);
2355 /* Fold a call to the __st[rp]cpy_chk builtin.
2356 DEST, SRC, and SIZE are the arguments to the call.
2357 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
2358 code of the builtin. If MAXLEN is not NULL, it is maximum length of
2359 strings passed as second argument. */
2362 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
2364 tree src
, tree size
,
2365 enum built_in_function fcode
)
2367 gimple
*stmt
= gsi_stmt (*gsi
);
2368 location_t loc
= gimple_location (stmt
);
2369 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2372 /* If SRC and DEST are the same (and not volatile), return DEST. */
2373 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
2375 replace_call_with_value (gsi
, dest
);
2379 if (! tree_fits_uhwi_p (size
))
2382 tree maxlen
= get_maxval_strlen (src
, 1);
2383 if (! integer_all_onesp (size
))
2385 len
= c_strlen (src
, 1);
2386 if (! len
|| ! tree_fits_uhwi_p (len
))
2388 /* If LEN is not constant, try MAXLEN too.
2389 For MAXLEN only allow optimizing into non-_ocs function
2390 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2391 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2393 if (fcode
== BUILT_IN_STPCPY_CHK
)
2398 /* If return value of __stpcpy_chk is ignored,
2399 optimize into __strcpy_chk. */
2400 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
2404 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
2405 replace_call_with_call_and_fold (gsi
, repl
);
2409 if (! len
|| TREE_SIDE_EFFECTS (len
))
2412 /* If c_strlen returned something, but not a constant,
2413 transform __strcpy_chk into __memcpy_chk. */
2414 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
2418 gimple_seq stmts
= NULL
;
2419 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2420 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
2421 build_int_cst (size_type_node
, 1));
2422 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2423 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2424 replace_call_with_call_and_fold (gsi
, repl
);
2431 if (! tree_int_cst_lt (maxlen
, size
))
2435 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
2436 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
2437 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
2441 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
2442 replace_call_with_call_and_fold (gsi
, repl
);
2446 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
2447 are the arguments to the call. If MAXLEN is not NULL, it is maximum
2448 length passed as third argument. IGNORE is true if return value can be
2449 ignored. FCODE is the BUILT_IN_* code of the builtin. */
2452 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
2453 tree dest
, tree src
,
2454 tree len
, tree size
,
2455 enum built_in_function fcode
)
2457 gimple
*stmt
= gsi_stmt (*gsi
);
2458 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
2461 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
2463 /* If return value of __stpncpy_chk is ignored,
2464 optimize into __strncpy_chk. */
2465 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
2468 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
2469 replace_call_with_call_and_fold (gsi
, repl
);
2474 if (! tree_fits_uhwi_p (size
))
2477 tree maxlen
= get_maxval_strlen (len
, 2);
2478 if (! integer_all_onesp (size
))
2480 if (! tree_fits_uhwi_p (len
))
2482 /* If LEN is not constant, try MAXLEN too.
2483 For MAXLEN only allow optimizing into non-_ocs function
2484 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2485 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2491 if (tree_int_cst_lt (size
, maxlen
))
2495 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2496 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
2497 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
2501 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
2502 replace_call_with_call_and_fold (gsi
, repl
);
2506 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
2507 Return NULL_TREE if no simplification can be made. */
2510 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
2512 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2513 location_t loc
= gimple_location (stmt
);
2514 tree dest
= gimple_call_arg (stmt
, 0);
2515 tree src
= gimple_call_arg (stmt
, 1);
2516 tree fn
, len
, lenp1
;
2518 /* If the result is unused, replace stpcpy with strcpy. */
2519 if (gimple_call_lhs (stmt
) == NULL_TREE
)
2521 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2524 gimple_call_set_fndecl (stmt
, fn
);
2529 len
= c_strlen (src
, 1);
2531 || TREE_CODE (len
) != INTEGER_CST
)
2534 if (optimize_function_for_size_p (cfun
)
2535 /* If length is zero it's small enough. */
2536 && !integer_zerop (len
))
2539 /* If the source has a known length replace stpcpy with memcpy. */
2540 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
2544 gimple_seq stmts
= NULL
;
2545 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
2546 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
2547 tem
, build_int_cst (size_type_node
, 1));
2548 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2549 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
2550 gimple_set_vuse (repl
, gimple_vuse (stmt
));
2551 gimple_set_vdef (repl
, gimple_vdef (stmt
));
2552 if (gimple_vdef (repl
)
2553 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
2554 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
2555 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
2556 /* Replace the result with dest + len. */
2558 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
2559 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
2560 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
2561 POINTER_PLUS_EXPR
, dest
, tem
);
2562 gsi_replace (gsi
, ret
, false);
2563 /* Finally fold the memcpy call. */
2564 gimple_stmt_iterator gsi2
= *gsi
;
2570 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2571 NULL_TREE if a normal call should be emitted rather than expanding
2572 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2573 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2574 passed as second argument. */
2577 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2578 enum built_in_function fcode
)
2580 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2581 tree dest
, size
, len
, fn
, fmt
, flag
;
2582 const char *fmt_str
;
2584 /* Verify the required arguments in the original call. */
2585 if (gimple_call_num_args (stmt
) < 5)
2588 dest
= gimple_call_arg (stmt
, 0);
2589 len
= gimple_call_arg (stmt
, 1);
2590 flag
= gimple_call_arg (stmt
, 2);
2591 size
= gimple_call_arg (stmt
, 3);
2592 fmt
= gimple_call_arg (stmt
, 4);
2594 if (! tree_fits_uhwi_p (size
))
2597 if (! integer_all_onesp (size
))
2599 tree maxlen
= get_maxval_strlen (len
, 2);
2600 if (! tree_fits_uhwi_p (len
))
2602 /* If LEN is not constant, try MAXLEN too.
2603 For MAXLEN only allow optimizing into non-_ocs function
2604 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2605 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2611 if (tree_int_cst_lt (size
, maxlen
))
2615 if (!init_target_chars ())
2618 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2619 or if format doesn't contain % chars or is "%s". */
2620 if (! integer_zerop (flag
))
2622 fmt_str
= c_getstr (fmt
);
2623 if (fmt_str
== NULL
)
2625 if (strchr (fmt_str
, target_percent
) != NULL
2626 && strcmp (fmt_str
, target_percent_s
))
2630 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2632 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2633 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2637 /* Replace the called function and the first 5 argument by 3 retaining
2638 trailing varargs. */
2639 gimple_call_set_fndecl (stmt
, fn
);
2640 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2641 gimple_call_set_arg (stmt
, 0, dest
);
2642 gimple_call_set_arg (stmt
, 1, len
);
2643 gimple_call_set_arg (stmt
, 2, fmt
);
2644 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2645 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2646 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2651 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2652 Return NULL_TREE if a normal call should be emitted rather than
2653 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2654 or BUILT_IN_VSPRINTF_CHK. */
2657 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2658 enum built_in_function fcode
)
2660 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2661 tree dest
, size
, len
, fn
, fmt
, flag
;
2662 const char *fmt_str
;
2663 unsigned nargs
= gimple_call_num_args (stmt
);
2665 /* Verify the required arguments in the original call. */
2668 dest
= gimple_call_arg (stmt
, 0);
2669 flag
= gimple_call_arg (stmt
, 1);
2670 size
= gimple_call_arg (stmt
, 2);
2671 fmt
= gimple_call_arg (stmt
, 3);
2673 if (! tree_fits_uhwi_p (size
))
2678 if (!init_target_chars ())
2681 /* Check whether the format is a literal string constant. */
2682 fmt_str
= c_getstr (fmt
);
2683 if (fmt_str
!= NULL
)
2685 /* If the format doesn't contain % args or %%, we know the size. */
2686 if (strchr (fmt_str
, target_percent
) == 0)
2688 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2689 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2691 /* If the format is "%s" and first ... argument is a string literal,
2692 we know the size too. */
2693 else if (fcode
== BUILT_IN_SPRINTF_CHK
2694 && strcmp (fmt_str
, target_percent_s
) == 0)
2700 arg
= gimple_call_arg (stmt
, 4);
2701 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2703 len
= c_strlen (arg
, 1);
2704 if (! len
|| ! tree_fits_uhwi_p (len
))
2711 if (! integer_all_onesp (size
))
2713 if (! len
|| ! tree_int_cst_lt (len
, size
))
2717 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2718 or if format doesn't contain % chars or is "%s". */
2719 if (! integer_zerop (flag
))
2721 if (fmt_str
== NULL
)
2723 if (strchr (fmt_str
, target_percent
) != NULL
2724 && strcmp (fmt_str
, target_percent_s
))
2728 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2729 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2730 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2734 /* Replace the called function and the first 4 argument by 2 retaining
2735 trailing varargs. */
2736 gimple_call_set_fndecl (stmt
, fn
);
2737 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2738 gimple_call_set_arg (stmt
, 0, dest
);
2739 gimple_call_set_arg (stmt
, 1, fmt
);
2740 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2741 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2742 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2747 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2748 ORIG may be null if this is a 2-argument call. We don't attempt to
2749 simplify calls with more than 3 arguments.
2751 Return true if simplification was possible, otherwise false. */
2754 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2756 gimple
*stmt
= gsi_stmt (*gsi
);
2757 tree dest
= gimple_call_arg (stmt
, 0);
2758 tree fmt
= gimple_call_arg (stmt
, 1);
2759 tree orig
= NULL_TREE
;
2760 const char *fmt_str
= NULL
;
2762 /* Verify the required arguments in the original call. We deal with two
2763 types of sprintf() calls: 'sprintf (str, fmt)' and
2764 'sprintf (dest, "%s", orig)'. */
2765 if (gimple_call_num_args (stmt
) > 3)
2768 if (gimple_call_num_args (stmt
) == 3)
2769 orig
= gimple_call_arg (stmt
, 2);
2771 /* Check whether the format is a literal string constant. */
2772 fmt_str
= c_getstr (fmt
);
2773 if (fmt_str
== NULL
)
2776 if (!init_target_chars ())
2779 /* If the format doesn't contain % args or %%, use strcpy. */
2780 if (strchr (fmt_str
, target_percent
) == NULL
)
2782 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2787 /* Don't optimize sprintf (buf, "abc", ptr++). */
2791 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2792 'format' is known to contain no % formats. */
2793 gimple_seq stmts
= NULL
;
2794 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2795 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2796 if (gimple_call_lhs (stmt
))
2798 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2799 build_int_cst (integer_type_node
,
2801 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2802 gsi_replace_with_seq_vops (gsi
, stmts
);
2803 /* gsi now points at the assignment to the lhs, get a
2804 stmt iterator to the memcpy call.
2805 ??? We can't use gsi_for_stmt as that doesn't work when the
2806 CFG isn't built yet. */
2807 gimple_stmt_iterator gsi2
= *gsi
;
2813 gsi_replace_with_seq_vops (gsi
, stmts
);
2819 /* If the format is "%s", use strcpy if the result isn't used. */
2820 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2823 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2828 /* Don't crash on sprintf (str1, "%s"). */
2832 tree orig_len
= NULL_TREE
;
2833 if (gimple_call_lhs (stmt
))
2835 orig_len
= get_maxval_strlen (orig
, 0);
2840 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2841 gimple_seq stmts
= NULL
;
2842 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2843 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2844 if (gimple_call_lhs (stmt
))
2846 if (!useless_type_conversion_p (integer_type_node
,
2847 TREE_TYPE (orig_len
)))
2848 orig_len
= fold_convert (integer_type_node
, orig_len
);
2849 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2850 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2851 gsi_replace_with_seq_vops (gsi
, stmts
);
2852 /* gsi now points at the assignment to the lhs, get a
2853 stmt iterator to the memcpy call.
2854 ??? We can't use gsi_for_stmt as that doesn't work when the
2855 CFG isn't built yet. */
2856 gimple_stmt_iterator gsi2
= *gsi
;
2862 gsi_replace_with_seq_vops (gsi
, stmts
);
2870 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2871 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2872 attempt to simplify calls with more than 4 arguments.
2874 Return true if simplification was possible, otherwise false. */
2877 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2879 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2880 tree dest
= gimple_call_arg (stmt
, 0);
2881 tree destsize
= gimple_call_arg (stmt
, 1);
2882 tree fmt
= gimple_call_arg (stmt
, 2);
2883 tree orig
= NULL_TREE
;
2884 const char *fmt_str
= NULL
;
2886 if (gimple_call_num_args (stmt
) > 4)
2889 if (gimple_call_num_args (stmt
) == 4)
2890 orig
= gimple_call_arg (stmt
, 3);
2892 if (!tree_fits_uhwi_p (destsize
))
2894 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2896 /* Check whether the format is a literal string constant. */
2897 fmt_str
= c_getstr (fmt
);
2898 if (fmt_str
== NULL
)
2901 if (!init_target_chars ())
2904 /* If the format doesn't contain % args or %%, use strcpy. */
2905 if (strchr (fmt_str
, target_percent
) == NULL
)
2907 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2911 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2915 /* We could expand this as
2916 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2918 memcpy (str, fmt_with_nul_at_cstm1, cst);
2919 but in the former case that might increase code size
2920 and in the latter case grow .rodata section too much.
2922 size_t len
= strlen (fmt_str
);
2926 gimple_seq stmts
= NULL
;
2927 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2928 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2929 if (gimple_call_lhs (stmt
))
2931 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2932 build_int_cst (integer_type_node
, len
));
2933 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2934 gsi_replace_with_seq_vops (gsi
, stmts
);
2935 /* gsi now points at the assignment to the lhs, get a
2936 stmt iterator to the memcpy call.
2937 ??? We can't use gsi_for_stmt as that doesn't work when the
2938 CFG isn't built yet. */
2939 gimple_stmt_iterator gsi2
= *gsi
;
2945 gsi_replace_with_seq_vops (gsi
, stmts
);
2951 /* If the format is "%s", use strcpy if the result isn't used. */
2952 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2954 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2958 /* Don't crash on snprintf (str1, cst, "%s"). */
2962 tree orig_len
= get_maxval_strlen (orig
, 0);
2963 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2966 /* We could expand this as
2967 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2969 memcpy (str1, str2_with_nul_at_cstm1, cst);
2970 but in the former case that might increase code size
2971 and in the latter case grow .rodata section too much.
2973 if (compare_tree_int (orig_len
, destlen
) >= 0)
2976 /* Convert snprintf (str1, cst, "%s", str2) into
2977 strcpy (str1, str2) if strlen (str2) < cst. */
2978 gimple_seq stmts
= NULL
;
2979 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2980 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2981 if (gimple_call_lhs (stmt
))
2983 if (!useless_type_conversion_p (integer_type_node
,
2984 TREE_TYPE (orig_len
)))
2985 orig_len
= fold_convert (integer_type_node
, orig_len
);
2986 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2987 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2988 gsi_replace_with_seq_vops (gsi
, stmts
);
2989 /* gsi now points at the assignment to the lhs, get a
2990 stmt iterator to the memcpy call.
2991 ??? We can't use gsi_for_stmt as that doesn't work when the
2992 CFG isn't built yet. */
2993 gimple_stmt_iterator gsi2
= *gsi
;
2999 gsi_replace_with_seq_vops (gsi
, stmts
);
3007 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3008 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3009 more than 3 arguments, and ARG may be null in the 2-argument case.
3011 Return NULL_TREE if no simplification was possible, otherwise return the
3012 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3013 code of the function to be simplified. */
3016 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
3017 tree fp
, tree fmt
, tree arg
,
3018 enum built_in_function fcode
)
3020 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3021 tree fn_fputc
, fn_fputs
;
3022 const char *fmt_str
= NULL
;
3024 /* If the return value is used, don't do the transformation. */
3025 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3028 /* Check whether the format is a literal string constant. */
3029 fmt_str
= c_getstr (fmt
);
3030 if (fmt_str
== NULL
)
3033 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
3035 /* If we're using an unlocked function, assume the other
3036 unlocked functions exist explicitly. */
3037 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
3038 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
3042 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
3043 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
3046 if (!init_target_chars ())
3049 /* If the format doesn't contain % args or %%, use strcpy. */
3050 if (strchr (fmt_str
, target_percent
) == NULL
)
3052 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
3056 /* If the format specifier was "", fprintf does nothing. */
3057 if (fmt_str
[0] == '\0')
3059 replace_call_with_value (gsi
, NULL_TREE
);
3063 /* When "string" doesn't contain %, replace all cases of
3064 fprintf (fp, string) with fputs (string, fp). The fputs
3065 builtin will take care of special cases like length == 1. */
3068 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
3069 replace_call_with_call_and_fold (gsi
, repl
);
3074 /* The other optimizations can be done only on the non-va_list variants. */
3075 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
3078 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3079 else if (strcmp (fmt_str
, target_percent_s
) == 0)
3081 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3085 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
3086 replace_call_with_call_and_fold (gsi
, repl
);
3091 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3092 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3095 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
3099 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
3100 replace_call_with_call_and_fold (gsi
, repl
);
3108 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3109 FMT and ARG are the arguments to the call; we don't fold cases with
3110 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3112 Return NULL_TREE if no simplification was possible, otherwise return the
3113 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3114 code of the function to be simplified. */
3117 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
3118 tree arg
, enum built_in_function fcode
)
3120 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3121 tree fn_putchar
, fn_puts
, newarg
;
3122 const char *fmt_str
= NULL
;
3124 /* If the return value is used, don't do the transformation. */
3125 if (gimple_call_lhs (stmt
) != NULL_TREE
)
3128 /* Check whether the format is a literal string constant. */
3129 fmt_str
= c_getstr (fmt
);
3130 if (fmt_str
== NULL
)
3133 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
3135 /* If we're using an unlocked function, assume the other
3136 unlocked functions exist explicitly. */
3137 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
3138 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
3142 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
3143 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
3146 if (!init_target_chars ())
3149 if (strcmp (fmt_str
, target_percent_s
) == 0
3150 || strchr (fmt_str
, target_percent
) == NULL
)
3154 if (strcmp (fmt_str
, target_percent_s
) == 0)
3156 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3159 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3162 str
= c_getstr (arg
);
3168 /* The format specifier doesn't contain any '%' characters. */
3169 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
3175 /* If the string was "", printf does nothing. */
3178 replace_call_with_value (gsi
, NULL_TREE
);
3182 /* If the string has length of 1, call putchar. */
3185 /* Given printf("c"), (where c is any one character,)
3186 convert "c"[0] to an int and pass that to the replacement
3188 newarg
= build_int_cst (integer_type_node
, str
[0]);
3191 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
3192 replace_call_with_call_and_fold (gsi
, repl
);
3198 /* If the string was "string\n", call puts("string"). */
3199 size_t len
= strlen (str
);
3200 if ((unsigned char)str
[len
- 1] == target_newline
3201 && (size_t) (int) len
== len
3205 tree offset_node
, string_cst
;
3207 /* Create a NUL-terminated string that's one char shorter
3208 than the original, stripping off the trailing '\n'. */
3209 newarg
= build_string_literal (len
, str
);
3210 string_cst
= string_constant (newarg
, &offset_node
);
3211 gcc_checking_assert (string_cst
3212 && (TREE_STRING_LENGTH (string_cst
)
3214 && integer_zerop (offset_node
)
3216 TREE_STRING_POINTER (string_cst
)[len
- 1]
3218 /* build_string_literal creates a new STRING_CST,
3219 modify it in place to avoid double copying. */
3220 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
3221 newstr
[len
- 1] = '\0';
3224 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
3225 replace_call_with_call_and_fold (gsi
, repl
);
3230 /* We'd like to arrange to call fputs(string,stdout) here,
3231 but we need stdout and don't have a way to get it yet. */
3236 /* The other optimizations can be done only on the non-va_list variants. */
3237 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
3240 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3241 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
3243 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
3247 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
3248 replace_call_with_call_and_fold (gsi
, repl
);
3253 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3254 else if (strcmp (fmt_str
, target_percent_c
) == 0)
3256 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
3261 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
3262 replace_call_with_call_and_fold (gsi
, repl
);
3272 /* Fold a call to __builtin_strlen with known length LEN. */
3275 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
3277 gimple
*stmt
= gsi_stmt (*gsi
);
3278 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
3281 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
3282 replace_call_with_value (gsi
, len
);
3286 /* Fold a call to __builtin_acc_on_device. */
3289 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
3291 /* Defer folding until we know which compiler we're in. */
3292 if (symtab
->state
!= EXPANSION
)
3295 unsigned val_host
= GOMP_DEVICE_HOST
;
3296 unsigned val_dev
= GOMP_DEVICE_NONE
;
3298 #ifdef ACCEL_COMPILER
3299 val_host
= GOMP_DEVICE_NOT_HOST
;
3300 val_dev
= ACCEL_COMPILER_acc_device
;
3303 location_t loc
= gimple_location (gsi_stmt (*gsi
));
3305 tree host_eq
= make_ssa_name (boolean_type_node
);
3306 gimple
*host_ass
= gimple_build_assign
3307 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
3308 gimple_set_location (host_ass
, loc
);
3309 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
3311 tree dev_eq
= make_ssa_name (boolean_type_node
);
3312 gimple
*dev_ass
= gimple_build_assign
3313 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
3314 gimple_set_location (dev_ass
, loc
);
3315 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
3317 tree result
= make_ssa_name (boolean_type_node
);
3318 gimple
*result_ass
= gimple_build_assign
3319 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
3320 gimple_set_location (result_ass
, loc
);
3321 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
3323 replace_call_with_value (gsi
, result
);
3328 /* Fold realloc (0, n) -> malloc (n). */
3331 gimple_fold_builtin_realloc (gimple_stmt_iterator
*gsi
)
3333 gimple
*stmt
= gsi_stmt (*gsi
);
3334 tree arg
= gimple_call_arg (stmt
, 0);
3335 tree size
= gimple_call_arg (stmt
, 1);
3337 if (operand_equal_p (arg
, null_pointer_node
, 0))
3339 tree fn_malloc
= builtin_decl_implicit (BUILT_IN_MALLOC
);
3342 gcall
*repl
= gimple_build_call (fn_malloc
, 1, size
);
3343 replace_call_with_call_and_fold (gsi
, repl
);
3350 /* Fold the non-target builtin at *GSI and return whether any simplification
3354 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
3356 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
3357 tree callee
= gimple_call_fndecl (stmt
);
3359 /* Give up for always_inline inline builtins until they are
3361 if (avoid_folding_inline_builtin (callee
))
3364 unsigned n
= gimple_call_num_args (stmt
);
3365 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
3369 return gimple_fold_builtin_bcmp (gsi
);
3370 case BUILT_IN_BCOPY
:
3371 return gimple_fold_builtin_bcopy (gsi
);
3372 case BUILT_IN_BZERO
:
3373 return gimple_fold_builtin_bzero (gsi
);
3375 case BUILT_IN_MEMSET
:
3376 return gimple_fold_builtin_memset (gsi
,
3377 gimple_call_arg (stmt
, 1),
3378 gimple_call_arg (stmt
, 2));
3379 case BUILT_IN_MEMCPY
:
3380 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3381 gimple_call_arg (stmt
, 1), 0);
3382 case BUILT_IN_MEMPCPY
:
3383 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3384 gimple_call_arg (stmt
, 1), 1);
3385 case BUILT_IN_MEMMOVE
:
3386 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
3387 gimple_call_arg (stmt
, 1), 3);
3388 case BUILT_IN_SPRINTF_CHK
:
3389 case BUILT_IN_VSPRINTF_CHK
:
3390 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
3391 case BUILT_IN_STRCAT_CHK
:
3392 return gimple_fold_builtin_strcat_chk (gsi
);
3393 case BUILT_IN_STRNCAT_CHK
:
3394 return gimple_fold_builtin_strncat_chk (gsi
);
3395 case BUILT_IN_STRLEN
:
3396 return gimple_fold_builtin_strlen (gsi
);
3397 case BUILT_IN_STRCPY
:
3398 return gimple_fold_builtin_strcpy (gsi
,
3399 gimple_call_arg (stmt
, 0),
3400 gimple_call_arg (stmt
, 1));
3401 case BUILT_IN_STRNCPY
:
3402 return gimple_fold_builtin_strncpy (gsi
,
3403 gimple_call_arg (stmt
, 0),
3404 gimple_call_arg (stmt
, 1),
3405 gimple_call_arg (stmt
, 2));
3406 case BUILT_IN_STRCAT
:
3407 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
3408 gimple_call_arg (stmt
, 1));
3409 case BUILT_IN_STRNCAT
:
3410 return gimple_fold_builtin_strncat (gsi
);
3411 case BUILT_IN_INDEX
:
3412 case BUILT_IN_STRCHR
:
3413 return gimple_fold_builtin_strchr (gsi
, false);
3414 case BUILT_IN_RINDEX
:
3415 case BUILT_IN_STRRCHR
:
3416 return gimple_fold_builtin_strchr (gsi
, true);
3417 case BUILT_IN_STRSTR
:
3418 return gimple_fold_builtin_strstr (gsi
);
3419 case BUILT_IN_STRCMP
:
3420 case BUILT_IN_STRCASECMP
:
3421 case BUILT_IN_STRNCMP
:
3422 case BUILT_IN_STRNCASECMP
:
3423 return gimple_fold_builtin_string_compare (gsi
);
3424 case BUILT_IN_MEMCHR
:
3425 return gimple_fold_builtin_memchr (gsi
);
3426 case BUILT_IN_FPUTS
:
3427 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3428 gimple_call_arg (stmt
, 1), false);
3429 case BUILT_IN_FPUTS_UNLOCKED
:
3430 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
3431 gimple_call_arg (stmt
, 1), true);
3432 case BUILT_IN_MEMCPY_CHK
:
3433 case BUILT_IN_MEMPCPY_CHK
:
3434 case BUILT_IN_MEMMOVE_CHK
:
3435 case BUILT_IN_MEMSET_CHK
:
3436 return gimple_fold_builtin_memory_chk (gsi
,
3437 gimple_call_arg (stmt
, 0),
3438 gimple_call_arg (stmt
, 1),
3439 gimple_call_arg (stmt
, 2),
3440 gimple_call_arg (stmt
, 3),
3442 case BUILT_IN_STPCPY
:
3443 return gimple_fold_builtin_stpcpy (gsi
);
3444 case BUILT_IN_STRCPY_CHK
:
3445 case BUILT_IN_STPCPY_CHK
:
3446 return gimple_fold_builtin_stxcpy_chk (gsi
,
3447 gimple_call_arg (stmt
, 0),
3448 gimple_call_arg (stmt
, 1),
3449 gimple_call_arg (stmt
, 2),
3451 case BUILT_IN_STRNCPY_CHK
:
3452 case BUILT_IN_STPNCPY_CHK
:
3453 return gimple_fold_builtin_stxncpy_chk (gsi
,
3454 gimple_call_arg (stmt
, 0),
3455 gimple_call_arg (stmt
, 1),
3456 gimple_call_arg (stmt
, 2),
3457 gimple_call_arg (stmt
, 3),
3459 case BUILT_IN_SNPRINTF_CHK
:
3460 case BUILT_IN_VSNPRINTF_CHK
:
3461 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
3463 case BUILT_IN_FPRINTF
:
3464 case BUILT_IN_FPRINTF_UNLOCKED
:
3465 case BUILT_IN_VFPRINTF
:
3466 if (n
== 2 || n
== 3)
3467 return gimple_fold_builtin_fprintf (gsi
,
3468 gimple_call_arg (stmt
, 0),
3469 gimple_call_arg (stmt
, 1),
3471 ? gimple_call_arg (stmt
, 2)
3475 case BUILT_IN_FPRINTF_CHK
:
3476 case BUILT_IN_VFPRINTF_CHK
:
3477 if (n
== 3 || n
== 4)
3478 return gimple_fold_builtin_fprintf (gsi
,
3479 gimple_call_arg (stmt
, 0),
3480 gimple_call_arg (stmt
, 2),
3482 ? gimple_call_arg (stmt
, 3)
3486 case BUILT_IN_PRINTF
:
3487 case BUILT_IN_PRINTF_UNLOCKED
:
3488 case BUILT_IN_VPRINTF
:
3489 if (n
== 1 || n
== 2)
3490 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
3492 ? gimple_call_arg (stmt
, 1)
3493 : NULL_TREE
, fcode
);
3495 case BUILT_IN_PRINTF_CHK
:
3496 case BUILT_IN_VPRINTF_CHK
:
3497 if (n
== 2 || n
== 3)
3498 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
3500 ? gimple_call_arg (stmt
, 2)
3501 : NULL_TREE
, fcode
);
3503 case BUILT_IN_ACC_ON_DEVICE
:
3504 return gimple_fold_builtin_acc_on_device (gsi
,
3505 gimple_call_arg (stmt
, 0));
3506 case BUILT_IN_REALLOC
:
3507 return gimple_fold_builtin_realloc (gsi
);
3512 /* Try the generic builtin folder. */
3513 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
3514 tree result
= fold_call_stmt (stmt
, ignore
);
3518 STRIP_NOPS (result
);
3520 result
= fold_convert (gimple_call_return_type (stmt
), result
);
3521 if (!update_call_from_tree (gsi
, result
))
3522 gimplify_and_update_call_from_tree (gsi
, result
);
3529 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
3530 function calls to constants, where possible. */
3533 fold_internal_goacc_dim (const gimple
*call
)
3535 int axis
= oacc_get_ifn_dim_arg (call
);
3536 int size
= oacc_get_fn_dim_size (current_function_decl
, axis
);
3537 bool is_pos
= gimple_call_internal_fn (call
) == IFN_GOACC_DIM_POS
;
3538 tree result
= NULL_TREE
;
3540 /* If the size is 1, or we only want the size and it is not dynamic,
3541 we know the answer. */
3542 if (size
== 1 || (!is_pos
&& size
))
3544 tree type
= TREE_TYPE (gimple_call_lhs (call
));
3545 result
= build_int_cst (type
, size
- is_pos
);
3551 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
3552 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
3553 &var where var is only addressable because of such calls. */
3556 optimize_atomic_compare_exchange_p (gimple
*stmt
)
3558 if (gimple_call_num_args (stmt
) != 6
3559 || !flag_inline_atomics
3561 || sanitize_flags_p (SANITIZE_THREAD
| SANITIZE_ADDRESS
)
3562 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
3563 || !gimple_vdef (stmt
)
3564 || !gimple_vuse (stmt
))
3567 tree fndecl
= gimple_call_fndecl (stmt
);
3568 switch (DECL_FUNCTION_CODE (fndecl
))
3570 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
3571 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
3572 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
3573 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
3574 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
3580 tree expected
= gimple_call_arg (stmt
, 1);
3581 if (TREE_CODE (expected
) != ADDR_EXPR
3582 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
3585 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
3586 if (!is_gimple_reg_type (etype
)
3587 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
3588 || TREE_THIS_VOLATILE (etype
)
3589 || VECTOR_TYPE_P (etype
)
3590 || TREE_CODE (etype
) == COMPLEX_TYPE
3591 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
3592 might not preserve all the bits. See PR71716. */
3593 || SCALAR_FLOAT_TYPE_P (etype
)
3594 || TYPE_PRECISION (etype
) != GET_MODE_BITSIZE (TYPE_MODE (etype
)))
3597 tree weak
= gimple_call_arg (stmt
, 3);
3598 if (!integer_zerop (weak
) && !integer_onep (weak
))
3601 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3602 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3603 machine_mode mode
= TYPE_MODE (itype
);
3605 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
3607 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
3610 if (int_size_in_bytes (etype
) != GET_MODE_SIZE (mode
))
3617 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3619 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3620 i = IMAGPART_EXPR <t>;
3622 e = REALPART_EXPR <t>; */
3625 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
3627 gimple
*stmt
= gsi_stmt (*gsi
);
3628 tree fndecl
= gimple_call_fndecl (stmt
);
3629 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3630 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3631 tree ctype
= build_complex_type (itype
);
3632 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3633 bool throws
= false;
3635 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3637 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3638 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
3639 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
3641 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
3642 build1 (VIEW_CONVERT_EXPR
, itype
,
3643 gimple_assign_lhs (g
)));
3644 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3646 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
3647 + int_size_in_bytes (itype
);
3648 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
3649 gimple_call_arg (stmt
, 0),
3650 gimple_assign_lhs (g
),
3651 gimple_call_arg (stmt
, 2),
3652 build_int_cst (integer_type_node
, flag
),
3653 gimple_call_arg (stmt
, 4),
3654 gimple_call_arg (stmt
, 5));
3655 tree lhs
= make_ssa_name (ctype
);
3656 gimple_call_set_lhs (g
, lhs
);
3657 gimple_set_vdef (g
, gimple_vdef (stmt
));
3658 gimple_set_vuse (g
, gimple_vuse (stmt
));
3659 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
3660 tree oldlhs
= gimple_call_lhs (stmt
);
3661 if (stmt_can_throw_internal (stmt
))
3664 e
= find_fallthru_edge (gsi_bb (*gsi
)->succs
);
3666 gimple_call_set_nothrow (as_a
<gcall
*> (g
),
3667 gimple_call_nothrow_p (as_a
<gcall
*> (stmt
)));
3668 gimple_call_set_lhs (stmt
, NULL_TREE
);
3669 gsi_replace (gsi
, g
, true);
3672 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
3673 build1 (IMAGPART_EXPR
, itype
, lhs
));
3676 gsi_insert_on_edge_immediate (e
, g
);
3677 *gsi
= gsi_for_stmt (g
);
3680 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3681 g
= gimple_build_assign (oldlhs
, NOP_EXPR
, gimple_assign_lhs (g
));
3682 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3684 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
3685 build1 (REALPART_EXPR
, itype
, lhs
));
3686 if (throws
&& oldlhs
== NULL_TREE
)
3688 gsi_insert_on_edge_immediate (e
, g
);
3689 *gsi
= gsi_for_stmt (g
);
3692 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3693 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
3695 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3697 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
3698 gimple_assign_lhs (g
)));
3699 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3701 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
3702 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3706 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3707 doesn't fit into TYPE. The test for overflow should be regardless of
3708 -fwrapv, and even for unsigned types. */
3711 arith_overflowed_p (enum tree_code code
, const_tree type
,
3712 const_tree arg0
, const_tree arg1
)
3714 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3715 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3717 widest2_int warg0
= widest2_int_cst (arg0
);
3718 widest2_int warg1
= widest2_int_cst (arg1
);
3722 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3723 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3724 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3725 default: gcc_unreachable ();
3727 signop sign
= TYPE_SIGN (type
);
3728 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3730 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3733 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3734 The statement may be replaced by another statement, e.g., if the call
3735 simplifies to a constant value. Return true if any changes were made.
3736 It is assumed that the operands have been previously folded. */
3739 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3741 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3743 bool changed
= false;
3746 /* Fold *& in call arguments. */
3747 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3748 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3750 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3753 gimple_call_set_arg (stmt
, i
, tmp
);
3758 /* Check for virtual calls that became direct calls. */
3759 callee
= gimple_call_fn (stmt
);
3760 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3762 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3764 if (dump_file
&& virtual_method_call_p (callee
)
3765 && !possible_polymorphic_call_target_p
3766 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3767 (OBJ_TYPE_REF_EXPR (callee
)))))
3770 "Type inheritance inconsistent devirtualization of ");
3771 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3772 fprintf (dump_file
, " to ");
3773 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3774 fprintf (dump_file
, "\n");
3777 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3780 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3783 vec
<cgraph_node
*>targets
3784 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3785 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3787 tree lhs
= gimple_call_lhs (stmt
);
3788 if (dump_enabled_p ())
3790 location_t loc
= gimple_location_safe (stmt
);
3791 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3792 "folding virtual function call to %s\n",
3793 targets
.length () == 1
3794 ? targets
[0]->name ()
3795 : "__builtin_unreachable");
3797 if (targets
.length () == 1)
3799 tree fndecl
= targets
[0]->decl
;
3800 gimple_call_set_fndecl (stmt
, fndecl
);
3802 /* If changing the call to __cxa_pure_virtual
3803 or similar noreturn function, adjust gimple_call_fntype
3805 if (gimple_call_noreturn_p (stmt
)
3806 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
3807 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
3808 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
3810 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
3811 /* If the call becomes noreturn, remove the lhs. */
3813 && gimple_call_noreturn_p (stmt
)
3814 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
3815 || should_remove_lhs_p (lhs
)))
3817 if (TREE_CODE (lhs
) == SSA_NAME
)
3819 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3820 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3821 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3822 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3824 gimple_call_set_lhs (stmt
, NULL_TREE
);
3826 maybe_remove_unused_call_args (cfun
, stmt
);
3830 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3831 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3832 gimple_set_location (new_stmt
, gimple_location (stmt
));
3833 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3835 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3836 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3838 /* To satisfy condition for
3839 cgraph_update_edges_for_call_stmt_node,
3840 we need to preserve GIMPLE_CALL statement
3841 at position of GSI iterator. */
3842 update_call_from_tree (gsi
, def
);
3843 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3847 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3848 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3849 gsi_replace (gsi
, new_stmt
, false);
3857 /* Check for indirect calls that became direct calls, and then
3858 no longer require a static chain. */
3859 if (gimple_call_chain (stmt
))
3861 tree fn
= gimple_call_fndecl (stmt
);
3862 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3864 gimple_call_set_chain (stmt
, NULL
);
3869 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3872 gimple_call_set_chain (stmt
, tmp
);
3881 /* Check for builtins that CCP can handle using information not
3882 available in the generic fold routines. */
3883 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3885 if (gimple_fold_builtin (gsi
))
3888 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3890 changed
|= targetm
.gimple_fold_builtin (gsi
);
3892 else if (gimple_call_internal_p (stmt
))
3894 enum tree_code subcode
= ERROR_MARK
;
3895 tree result
= NULL_TREE
;
3896 bool cplx_result
= false;
3897 tree overflow
= NULL_TREE
;
3898 switch (gimple_call_internal_fn (stmt
))
3900 case IFN_BUILTIN_EXPECT
:
3901 result
= fold_builtin_expect (gimple_location (stmt
),
3902 gimple_call_arg (stmt
, 0),
3903 gimple_call_arg (stmt
, 1),
3904 gimple_call_arg (stmt
, 2));
3906 case IFN_UBSAN_OBJECT_SIZE
:
3907 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3908 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3909 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3910 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3911 gimple_call_arg (stmt
, 2))))
3913 gsi_replace (gsi
, gimple_build_nop (), false);
3914 unlink_stmt_vdef (stmt
);
3915 release_defs (stmt
);
3919 case IFN_GOACC_DIM_SIZE
:
3920 case IFN_GOACC_DIM_POS
:
3921 result
= fold_internal_goacc_dim (stmt
);
3923 case IFN_UBSAN_CHECK_ADD
:
3924 subcode
= PLUS_EXPR
;
3926 case IFN_UBSAN_CHECK_SUB
:
3927 subcode
= MINUS_EXPR
;
3929 case IFN_UBSAN_CHECK_MUL
:
3930 subcode
= MULT_EXPR
;
3932 case IFN_ADD_OVERFLOW
:
3933 subcode
= PLUS_EXPR
;
3936 case IFN_SUB_OVERFLOW
:
3937 subcode
= MINUS_EXPR
;
3940 case IFN_MUL_OVERFLOW
:
3941 subcode
= MULT_EXPR
;
3947 if (subcode
!= ERROR_MARK
)
3949 tree arg0
= gimple_call_arg (stmt
, 0);
3950 tree arg1
= gimple_call_arg (stmt
, 1);
3951 tree type
= TREE_TYPE (arg0
);
3954 tree lhs
= gimple_call_lhs (stmt
);
3955 if (lhs
== NULL_TREE
)
3958 type
= TREE_TYPE (TREE_TYPE (lhs
));
3960 if (type
== NULL_TREE
)
3962 /* x = y + 0; x = y - 0; x = y * 0; */
3963 else if (integer_zerop (arg1
))
3964 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3965 /* x = 0 + y; x = 0 * y; */
3966 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3967 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3969 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3970 result
= integer_zero_node
;
3971 /* x = y * 1; x = 1 * y; */
3972 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3974 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3976 else if (TREE_CODE (arg0
) == INTEGER_CST
3977 && TREE_CODE (arg1
) == INTEGER_CST
)
3980 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3981 fold_convert (type
, arg1
));
3983 result
= int_const_binop (subcode
, arg0
, arg1
);
3984 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3987 overflow
= build_one_cst (type
);
3994 if (result
== integer_zero_node
)
3995 result
= build_zero_cst (type
);
3996 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3998 if (TREE_CODE (result
) == INTEGER_CST
)
4000 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
4002 overflow
= build_one_cst (type
);
4004 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
4005 && TYPE_UNSIGNED (type
))
4006 || (TYPE_PRECISION (type
)
4007 < (TYPE_PRECISION (TREE_TYPE (result
))
4008 + (TYPE_UNSIGNED (TREE_TYPE (result
))
4009 && !TYPE_UNSIGNED (type
)))))
4012 result
= fold_convert (type
, result
);
4019 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
4020 result
= drop_tree_overflow (result
);
4023 if (overflow
== NULL_TREE
)
4024 overflow
= build_zero_cst (TREE_TYPE (result
));
4025 tree ctype
= build_complex_type (TREE_TYPE (result
));
4026 if (TREE_CODE (result
) == INTEGER_CST
4027 && TREE_CODE (overflow
) == INTEGER_CST
)
4028 result
= build_complex (ctype
, result
, overflow
);
4030 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
4031 ctype
, result
, overflow
);
4033 if (!update_call_from_tree (gsi
, result
))
4034 gimplify_and_update_call_from_tree (gsi
, result
);
4043 /* Return true whether NAME has a use on STMT. */
4046 has_use_on_stmt (tree name
, gimple
*stmt
)
4048 imm_use_iterator iter
;
4049 use_operand_p use_p
;
4050 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
4051 if (USE_STMT (use_p
) == stmt
)
4056 /* Worker for fold_stmt_1 dispatch to pattern based folding with
4059 Replaces *GSI with the simplification result in RCODE and OPS
4060 and the associated statements in *SEQ. Does the replacement
4061 according to INPLACE and returns true if the operation succeeded. */
4064 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
4065 code_helper rcode
, tree
*ops
,
4066 gimple_seq
*seq
, bool inplace
)
4068 gimple
*stmt
= gsi_stmt (*gsi
);
4070 /* Play safe and do not allow abnormals to be mentioned in
4071 newly created statements. See also maybe_push_res_to_seq.
4072 As an exception allow such uses if there was a use of the
4073 same SSA name on the old stmt. */
4074 if ((TREE_CODE (ops
[0]) == SSA_NAME
4075 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
4076 && !has_use_on_stmt (ops
[0], stmt
))
4078 && TREE_CODE (ops
[1]) == SSA_NAME
4079 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
4080 && !has_use_on_stmt (ops
[1], stmt
))
4082 && TREE_CODE (ops
[2]) == SSA_NAME
4083 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
4084 && !has_use_on_stmt (ops
[2], stmt
))
4085 || (COMPARISON_CLASS_P (ops
[0])
4086 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
4087 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 0))
4088 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 0), stmt
))
4089 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
4090 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 1))
4091 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 1), stmt
)))))
4094 /* Don't insert new statements when INPLACE is true, even if we could
4095 reuse STMT for the final statement. */
4096 if (inplace
&& !gimple_seq_empty_p (*seq
))
4099 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
4101 gcc_assert (rcode
.is_tree_code ());
4102 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
4103 /* GIMPLE_CONDs condition may not throw. */
4104 && (!flag_exceptions
4105 || !cfun
->can_throw_non_call_exceptions
4106 || !operation_could_trap_p (rcode
,
4107 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
4109 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
4110 else if (rcode
== SSA_NAME
)
4111 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
4112 build_zero_cst (TREE_TYPE (ops
[0])));
4113 else if (rcode
== INTEGER_CST
)
4115 if (integer_zerop (ops
[0]))
4116 gimple_cond_make_false (cond_stmt
);
4118 gimple_cond_make_true (cond_stmt
);
4122 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
4126 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
4127 build_zero_cst (TREE_TYPE (res
)));
4131 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4133 fprintf (dump_file
, "gimple_simplified to ");
4134 if (!gimple_seq_empty_p (*seq
))
4135 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4136 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4139 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4142 else if (is_gimple_assign (stmt
)
4143 && rcode
.is_tree_code ())
4146 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
4148 maybe_build_generic_op (rcode
,
4149 TREE_TYPE (gimple_assign_lhs (stmt
)), ops
);
4150 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
4151 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4153 fprintf (dump_file
, "gimple_simplified to ");
4154 if (!gimple_seq_empty_p (*seq
))
4155 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4156 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
4159 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4163 else if (rcode
.is_fn_code ()
4164 && gimple_call_combined_fn (stmt
) == rcode
)
4167 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4169 gcc_assert (ops
[i
] != NULL_TREE
);
4170 gimple_call_set_arg (stmt
, i
, ops
[i
]);
4173 gcc_assert (ops
[i
] == NULL_TREE
);
4174 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4176 fprintf (dump_file
, "gimple_simplified to ");
4177 if (!gimple_seq_empty_p (*seq
))
4178 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4179 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
4181 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
4186 if (gimple_has_lhs (stmt
))
4188 tree lhs
= gimple_get_lhs (stmt
);
4189 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
4192 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4194 fprintf (dump_file
, "gimple_simplified to ");
4195 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
4197 gsi_replace_with_seq_vops (gsi
, *seq
);
4207 /* Canonicalize MEM_REFs invariant address operand after propagation. */
4210 maybe_canonicalize_mem_ref_addr (tree
*t
)
4214 if (TREE_CODE (*t
) == ADDR_EXPR
)
4215 t
= &TREE_OPERAND (*t
, 0);
4217 /* The C and C++ frontends use an ARRAY_REF for indexing with their
4218 generic vector extension. The actual vector referenced is
4219 view-converted to an array type for this purpose. If the index
4220 is constant the canonical representation in the middle-end is a
4221 BIT_FIELD_REF so re-write the former to the latter here. */
4222 if (TREE_CODE (*t
) == ARRAY_REF
4223 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
4224 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
4225 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
4227 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
4228 if (VECTOR_TYPE_P (vtype
))
4230 tree low
= array_ref_low_bound (*t
);
4231 if (TREE_CODE (low
) == INTEGER_CST
)
4233 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
4235 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
4236 wi::to_widest (low
));
4237 idx
= wi::mul (idx
, wi::to_widest
4238 (TYPE_SIZE (TREE_TYPE (*t
))));
4240 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
4241 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
4243 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
4245 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
4246 TYPE_SIZE (TREE_TYPE (*t
)),
4247 wide_int_to_tree (sizetype
, idx
));
4255 while (handled_component_p (*t
))
4256 t
= &TREE_OPERAND (*t
, 0);
4258 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
4259 of invariant addresses into a SSA name MEM_REF address. */
4260 if (TREE_CODE (*t
) == MEM_REF
4261 || TREE_CODE (*t
) == TARGET_MEM_REF
)
4263 tree addr
= TREE_OPERAND (*t
, 0);
4264 if (TREE_CODE (addr
) == ADDR_EXPR
4265 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
4266 || handled_component_p (TREE_OPERAND (addr
, 0))))
4269 HOST_WIDE_INT coffset
;
4270 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
4275 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
4276 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
4277 TREE_OPERAND (*t
, 1),
4278 size_int (coffset
));
4281 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
4282 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
4285 /* Canonicalize back MEM_REFs to plain reference trees if the object
4286 accessed is a decl that has the same access semantics as the MEM_REF. */
4287 if (TREE_CODE (*t
) == MEM_REF
4288 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
4289 && integer_zerop (TREE_OPERAND (*t
, 1))
4290 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
4292 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4293 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
4294 if (/* Same volatile qualification. */
4295 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
4296 /* Same TBAA behavior with -fstrict-aliasing. */
4297 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
4298 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
4299 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
4300 /* Same alignment. */
4301 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
4302 /* We have to look out here to not drop a required conversion
4303 from the rhs to the lhs if *t appears on the lhs or vice-versa
4304 if it appears on the rhs. Thus require strict type
4306 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
4308 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
4313 /* Canonicalize TARGET_MEM_REF in particular with respect to
4314 the indexes becoming constant. */
4315 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
4317 tree tem
= maybe_fold_tmr (*t
);
4328 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
4329 distinguishes both cases. */
4332 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
4334 bool changed
= false;
4335 gimple
*stmt
= gsi_stmt (*gsi
);
4336 bool nowarning
= gimple_no_warning_p (stmt
);
4338 fold_defer_overflow_warnings ();
4340 /* First do required canonicalization of [TARGET_]MEM_REF addresses
4342 ??? This shouldn't be done in generic folding but in the
4343 propagation helpers which also know whether an address was
4345 Also canonicalize operand order. */
4346 switch (gimple_code (stmt
))
4349 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
4351 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
4352 if ((REFERENCE_CLASS_P (*rhs
)
4353 || TREE_CODE (*rhs
) == ADDR_EXPR
)
4354 && maybe_canonicalize_mem_ref_addr (rhs
))
4356 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
4357 if (REFERENCE_CLASS_P (*lhs
)
4358 && maybe_canonicalize_mem_ref_addr (lhs
))
4363 /* Canonicalize operand order. */
4364 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4365 if (TREE_CODE_CLASS (code
) == tcc_comparison
4366 || commutative_tree_code (code
)
4367 || commutative_ternary_tree_code (code
))
4369 tree rhs1
= gimple_assign_rhs1 (stmt
);
4370 tree rhs2
= gimple_assign_rhs2 (stmt
);
4371 if (tree_swap_operands_p (rhs1
, rhs2
))
4373 gimple_assign_set_rhs1 (stmt
, rhs2
);
4374 gimple_assign_set_rhs2 (stmt
, rhs1
);
4375 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
4376 gimple_assign_set_rhs_code (stmt
,
4377 swap_tree_comparison (code
));
4385 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
4387 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
4388 if (REFERENCE_CLASS_P (*arg
)
4389 && maybe_canonicalize_mem_ref_addr (arg
))
4392 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
4394 && REFERENCE_CLASS_P (*lhs
)
4395 && maybe_canonicalize_mem_ref_addr (lhs
))
4401 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4402 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4404 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4405 tree op
= TREE_VALUE (link
);
4406 if (REFERENCE_CLASS_P (op
)
4407 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4410 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4412 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4413 tree op
= TREE_VALUE (link
);
4414 if ((REFERENCE_CLASS_P (op
)
4415 || TREE_CODE (op
) == ADDR_EXPR
)
4416 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
4422 if (gimple_debug_bind_p (stmt
))
4424 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
4426 && (REFERENCE_CLASS_P (*val
)
4427 || TREE_CODE (*val
) == ADDR_EXPR
)
4428 && maybe_canonicalize_mem_ref_addr (val
))
4434 /* Canonicalize operand order. */
4435 tree lhs
= gimple_cond_lhs (stmt
);
4436 tree rhs
= gimple_cond_rhs (stmt
);
4437 if (tree_swap_operands_p (lhs
, rhs
))
4439 gcond
*gc
= as_a
<gcond
*> (stmt
);
4440 gimple_cond_set_lhs (gc
, rhs
);
4441 gimple_cond_set_rhs (gc
, lhs
);
4442 gimple_cond_set_code (gc
,
4443 swap_tree_comparison (gimple_cond_code (gc
)));
4450 /* Dispatch to pattern-based folding. */
4452 || is_gimple_assign (stmt
)
4453 || gimple_code (stmt
) == GIMPLE_COND
)
4455 gimple_seq seq
= NULL
;
4458 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
4459 valueize
, valueize
))
4461 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
4464 gimple_seq_discard (seq
);
4468 stmt
= gsi_stmt (*gsi
);
4470 /* Fold the main computation performed by the statement. */
4471 switch (gimple_code (stmt
))
4475 /* Try to canonicalize for boolean-typed X the comparisons
4476 X == 0, X == 1, X != 0, and X != 1. */
4477 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
4478 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
4480 tree lhs
= gimple_assign_lhs (stmt
);
4481 tree op1
= gimple_assign_rhs1 (stmt
);
4482 tree op2
= gimple_assign_rhs2 (stmt
);
4483 tree type
= TREE_TYPE (op1
);
4485 /* Check whether the comparison operands are of the same boolean
4486 type as the result type is.
4487 Check that second operand is an integer-constant with value
4489 if (TREE_CODE (op2
) == INTEGER_CST
4490 && (integer_zerop (op2
) || integer_onep (op2
))
4491 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
4493 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
4494 bool is_logical_not
= false;
4496 /* X == 0 and X != 1 is a logical-not.of X
4497 X == 1 and X != 0 is X */
4498 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
4499 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
4500 is_logical_not
= true;
4502 if (is_logical_not
== false)
4503 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
4504 /* Only for one-bit precision typed X the transformation
4505 !X -> ~X is valied. */
4506 else if (TYPE_PRECISION (type
) == 1)
4507 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
4508 /* Otherwise we use !X -> X ^ 1. */
4510 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
4511 build_int_cst (type
, 1));
4517 unsigned old_num_ops
= gimple_num_ops (stmt
);
4518 tree lhs
= gimple_assign_lhs (stmt
);
4519 tree new_rhs
= fold_gimple_assign (gsi
);
4521 && !useless_type_conversion_p (TREE_TYPE (lhs
),
4522 TREE_TYPE (new_rhs
)))
4523 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
4526 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
4528 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
4535 changed
|= gimple_fold_call (gsi
, inplace
);
4539 /* Fold *& in asm operands. */
4541 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4543 const char **oconstraints
;
4544 const char *constraint
;
4545 bool allows_mem
, allows_reg
;
4547 noutputs
= gimple_asm_noutputs (asm_stmt
);
4548 oconstraints
= XALLOCAVEC (const char *, noutputs
);
4550 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
4552 tree link
= gimple_asm_output_op (asm_stmt
, i
);
4553 tree op
= TREE_VALUE (link
);
4555 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4556 if (REFERENCE_CLASS_P (op
)
4557 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
4559 TREE_VALUE (link
) = op
;
4563 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
4565 tree link
= gimple_asm_input_op (asm_stmt
, i
);
4566 tree op
= TREE_VALUE (link
);
4568 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
4569 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
4570 oconstraints
, &allows_mem
, &allows_reg
);
4571 if (REFERENCE_CLASS_P (op
)
4572 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
4575 TREE_VALUE (link
) = op
;
4583 if (gimple_debug_bind_p (stmt
))
4585 tree val
= gimple_debug_bind_get_value (stmt
);
4587 && REFERENCE_CLASS_P (val
))
4589 tree tem
= maybe_fold_reference (val
, false);
4592 gimple_debug_bind_set_value (stmt
, tem
);
4597 && TREE_CODE (val
) == ADDR_EXPR
)
4599 tree ref
= TREE_OPERAND (val
, 0);
4600 tree tem
= maybe_fold_reference (ref
, false);
4603 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
4604 gimple_debug_bind_set_value (stmt
, tem
);
4613 greturn
*ret_stmt
= as_a
<greturn
*> (stmt
);
4614 tree ret
= gimple_return_retval(ret_stmt
);
4616 if (ret
&& TREE_CODE (ret
) == SSA_NAME
&& valueize
)
4618 tree val
= valueize (ret
);
4619 if (val
&& val
!= ret
4620 && may_propagate_copy (ret
, val
))
4622 gimple_return_set_retval (ret_stmt
, val
);
4632 stmt
= gsi_stmt (*gsi
);
4634 /* Fold *& on the lhs. */
4635 if (gimple_has_lhs (stmt
))
4637 tree lhs
= gimple_get_lhs (stmt
);
4638 if (lhs
&& REFERENCE_CLASS_P (lhs
))
4640 tree new_lhs
= maybe_fold_reference (lhs
, true);
4643 gimple_set_lhs (stmt
, new_lhs
);
4649 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
4653 /* Valueziation callback that ends up not following SSA edges. */
4656 no_follow_ssa_edges (tree
)
4661 /* Valueization callback that ends up following single-use SSA edges only. */
4664 follow_single_use_edges (tree val
)
4666 if (TREE_CODE (val
) == SSA_NAME
4667 && !has_single_use (val
))
4672 /* Fold the statement pointed to by GSI. In some cases, this function may
4673 replace the whole statement with a new one. Returns true iff folding
4675 The statement pointed to by GSI should be in valid gimple form but may
4676 be in unfolded state as resulting from for example constant propagation
4677 which can produce *&x = 0. */
4680 fold_stmt (gimple_stmt_iterator
*gsi
)
4682 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
4686 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
4688 return fold_stmt_1 (gsi
, false, valueize
);
4691 /* Perform the minimal folding on statement *GSI. Only operations like
4692 *&x created by constant propagation are handled. The statement cannot
4693 be replaced with a new one. Return true if the statement was
4694 changed, false otherwise.
4695 The statement *GSI should be in valid gimple form but may
4696 be in unfolded state as resulting from for example constant propagation
4697 which can produce *&x = 0. */
4700 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
4702 gimple
*stmt
= gsi_stmt (*gsi
);
4703 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
4704 gcc_assert (gsi_stmt (*gsi
) == stmt
);
4708 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4709 if EXPR is null or we don't know how.
4710 If non-null, the result always has boolean type. */
4713 canonicalize_bool (tree expr
, bool invert
)
4719 if (integer_nonzerop (expr
))
4720 return boolean_false_node
;
4721 else if (integer_zerop (expr
))
4722 return boolean_true_node
;
4723 else if (TREE_CODE (expr
) == SSA_NAME
)
4724 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
4725 build_int_cst (TREE_TYPE (expr
), 0));
4726 else if (COMPARISON_CLASS_P (expr
))
4727 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
4729 TREE_OPERAND (expr
, 0),
4730 TREE_OPERAND (expr
, 1));
4736 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4738 if (integer_nonzerop (expr
))
4739 return boolean_true_node
;
4740 else if (integer_zerop (expr
))
4741 return boolean_false_node
;
4742 else if (TREE_CODE (expr
) == SSA_NAME
)
4743 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
4744 build_int_cst (TREE_TYPE (expr
), 0));
4745 else if (COMPARISON_CLASS_P (expr
))
4746 return fold_build2 (TREE_CODE (expr
),
4748 TREE_OPERAND (expr
, 0),
4749 TREE_OPERAND (expr
, 1));
4755 /* Check to see if a boolean expression EXPR is logically equivalent to the
4756 comparison (OP1 CODE OP2). Check for various identities involving
4760 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
4761 const_tree op1
, const_tree op2
)
4765 /* The obvious case. */
4766 if (TREE_CODE (expr
) == code
4767 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
4768 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
4771 /* Check for comparing (name, name != 0) and the case where expr
4772 is an SSA_NAME with a definition matching the comparison. */
4773 if (TREE_CODE (expr
) == SSA_NAME
4774 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4776 if (operand_equal_p (expr
, op1
, 0))
4777 return ((code
== NE_EXPR
&& integer_zerop (op2
))
4778 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
4779 s
= SSA_NAME_DEF_STMT (expr
);
4780 if (is_gimple_assign (s
)
4781 && gimple_assign_rhs_code (s
) == code
4782 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
4783 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
4787 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4788 of name is a comparison, recurse. */
4789 if (TREE_CODE (op1
) == SSA_NAME
4790 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
4792 s
= SSA_NAME_DEF_STMT (op1
);
4793 if (is_gimple_assign (s
)
4794 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
4796 enum tree_code c
= gimple_assign_rhs_code (s
);
4797 if ((c
== NE_EXPR
&& integer_zerop (op2
))
4798 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
4799 return same_bool_comparison_p (expr
, c
,
4800 gimple_assign_rhs1 (s
),
4801 gimple_assign_rhs2 (s
));
4802 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
4803 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
4804 return same_bool_comparison_p (expr
,
4805 invert_tree_comparison (c
, false),
4806 gimple_assign_rhs1 (s
),
4807 gimple_assign_rhs2 (s
));
4813 /* Check to see if two boolean expressions OP1 and OP2 are logically
4817 same_bool_result_p (const_tree op1
, const_tree op2
)
4819 /* Simple cases first. */
4820 if (operand_equal_p (op1
, op2
, 0))
4823 /* Check the cases where at least one of the operands is a comparison.
4824 These are a bit smarter than operand_equal_p in that they apply some
4825 identifies on SSA_NAMEs. */
4826 if (COMPARISON_CLASS_P (op2
)
4827 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
4828 TREE_OPERAND (op2
, 0),
4829 TREE_OPERAND (op2
, 1)))
4831 if (COMPARISON_CLASS_P (op1
)
4832 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
4833 TREE_OPERAND (op1
, 0),
4834 TREE_OPERAND (op1
, 1)))
4841 /* Forward declarations for some mutually recursive functions. */
4844 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4845 enum tree_code code2
, tree op2a
, tree op2b
);
4847 and_var_with_comparison (tree var
, bool invert
,
4848 enum tree_code code2
, tree op2a
, tree op2b
);
4850 and_var_with_comparison_1 (gimple
*stmt
,
4851 enum tree_code code2
, tree op2a
, tree op2b
);
4853 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4854 enum tree_code code2
, tree op2a
, tree op2b
);
4856 or_var_with_comparison (tree var
, bool invert
,
4857 enum tree_code code2
, tree op2a
, tree op2b
);
4859 or_var_with_comparison_1 (gimple
*stmt
,
4860 enum tree_code code2
, tree op2a
, tree op2b
);
4862 /* Helper function for and_comparisons_1: try to simplify the AND of the
4863 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4864 If INVERT is true, invert the value of the VAR before doing the AND.
4865 Return NULL_EXPR if we can't simplify this to a single expression. */
4868 and_var_with_comparison (tree var
, bool invert
,
4869 enum tree_code code2
, tree op2a
, tree op2b
)
4872 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4874 /* We can only deal with variables whose definitions are assignments. */
4875 if (!is_gimple_assign (stmt
))
4878 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4879 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4880 Then we only have to consider the simpler non-inverted cases. */
4882 t
= or_var_with_comparison_1 (stmt
,
4883 invert_tree_comparison (code2
, false),
4886 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4887 return canonicalize_bool (t
, invert
);
4890 /* Try to simplify the AND of the ssa variable defined by the assignment
4891 STMT with the comparison specified by (OP2A CODE2 OP2B).
4892 Return NULL_EXPR if we can't simplify this to a single expression. */
4895 and_var_with_comparison_1 (gimple
*stmt
,
4896 enum tree_code code2
, tree op2a
, tree op2b
)
4898 tree var
= gimple_assign_lhs (stmt
);
4899 tree true_test_var
= NULL_TREE
;
4900 tree false_test_var
= NULL_TREE
;
4901 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4903 /* Check for identities like (var AND (var == 0)) => false. */
4904 if (TREE_CODE (op2a
) == SSA_NAME
4905 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4907 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4908 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4910 true_test_var
= op2a
;
4911 if (var
== true_test_var
)
4914 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4915 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4917 false_test_var
= op2a
;
4918 if (var
== false_test_var
)
4919 return boolean_false_node
;
4923 /* If the definition is a comparison, recurse on it. */
4924 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4926 tree t
= and_comparisons_1 (innercode
,
4927 gimple_assign_rhs1 (stmt
),
4928 gimple_assign_rhs2 (stmt
),
4936 /* If the definition is an AND or OR expression, we may be able to
4937 simplify by reassociating. */
4938 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4939 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4941 tree inner1
= gimple_assign_rhs1 (stmt
);
4942 tree inner2
= gimple_assign_rhs2 (stmt
);
4945 tree partial
= NULL_TREE
;
4946 bool is_and
= (innercode
== BIT_AND_EXPR
);
4948 /* Check for boolean identities that don't require recursive examination
4950 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4951 inner1 AND (inner1 OR inner2) => inner1
4952 !inner1 AND (inner1 AND inner2) => false
4953 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4954 Likewise for similar cases involving inner2. */
4955 if (inner1
== true_test_var
)
4956 return (is_and
? var
: inner1
);
4957 else if (inner2
== true_test_var
)
4958 return (is_and
? var
: inner2
);
4959 else if (inner1
== false_test_var
)
4961 ? boolean_false_node
4962 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4963 else if (inner2
== false_test_var
)
4965 ? boolean_false_node
4966 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4968 /* Next, redistribute/reassociate the AND across the inner tests.
4969 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4970 if (TREE_CODE (inner1
) == SSA_NAME
4971 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4972 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4973 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4974 gimple_assign_rhs1 (s
),
4975 gimple_assign_rhs2 (s
),
4976 code2
, op2a
, op2b
)))
4978 /* Handle the AND case, where we are reassociating:
4979 (inner1 AND inner2) AND (op2a code2 op2b)
4981 If the partial result t is a constant, we win. Otherwise
4982 continue on to try reassociating with the other inner test. */
4985 if (integer_onep (t
))
4987 else if (integer_zerop (t
))
4988 return boolean_false_node
;
4991 /* Handle the OR case, where we are redistributing:
4992 (inner1 OR inner2) AND (op2a code2 op2b)
4993 => (t OR (inner2 AND (op2a code2 op2b))) */
4994 else if (integer_onep (t
))
4995 return boolean_true_node
;
4997 /* Save partial result for later. */
5001 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
5002 if (TREE_CODE (inner2
) == SSA_NAME
5003 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5004 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5005 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
5006 gimple_assign_rhs1 (s
),
5007 gimple_assign_rhs2 (s
),
5008 code2
, op2a
, op2b
)))
5010 /* Handle the AND case, where we are reassociating:
5011 (inner1 AND inner2) AND (op2a code2 op2b)
5012 => (inner1 AND t) */
5015 if (integer_onep (t
))
5017 else if (integer_zerop (t
))
5018 return boolean_false_node
;
5019 /* If both are the same, we can apply the identity
5021 else if (partial
&& same_bool_result_p (t
, partial
))
5025 /* Handle the OR case. where we are redistributing:
5026 (inner1 OR inner2) AND (op2a code2 op2b)
5027 => (t OR (inner1 AND (op2a code2 op2b)))
5028 => (t OR partial) */
5031 if (integer_onep (t
))
5032 return boolean_true_node
;
5035 /* We already got a simplification for the other
5036 operand to the redistributed OR expression. The
5037 interesting case is when at least one is false.
5038 Or, if both are the same, we can apply the identity
5040 if (integer_zerop (partial
))
5042 else if (integer_zerop (t
))
5044 else if (same_bool_result_p (t
, partial
))
5053 /* Try to simplify the AND of two comparisons defined by
5054 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5055 If this can be done without constructing an intermediate value,
5056 return the resulting tree; otherwise NULL_TREE is returned.
5057 This function is deliberately asymmetric as it recurses on SSA_DEFs
5058 in the first comparison but not the second. */
5061 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5062 enum tree_code code2
, tree op2a
, tree op2b
)
5064 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5066 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
5067 if (operand_equal_p (op1a
, op2a
, 0)
5068 && operand_equal_p (op1b
, op2b
, 0))
5070 /* Result will be either NULL_TREE, or a combined comparison. */
5071 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5072 TRUTH_ANDIF_EXPR
, code1
, code2
,
5073 truth_type
, op1a
, op1b
);
5078 /* Likewise the swapped case of the above. */
5079 if (operand_equal_p (op1a
, op2b
, 0)
5080 && operand_equal_p (op1b
, op2a
, 0))
5082 /* Result will be either NULL_TREE, or a combined comparison. */
5083 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5084 TRUTH_ANDIF_EXPR
, code1
,
5085 swap_tree_comparison (code2
),
5086 truth_type
, op1a
, op1b
);
5091 /* If both comparisons are of the same value against constants, we might
5092 be able to merge them. */
5093 if (operand_equal_p (op1a
, op2a
, 0)
5094 && TREE_CODE (op1b
) == INTEGER_CST
5095 && TREE_CODE (op2b
) == INTEGER_CST
)
5097 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5099 /* If we have (op1a == op1b), we should either be able to
5100 return that or FALSE, depending on whether the constant op1b
5101 also satisfies the other comparison against op2b. */
5102 if (code1
== EQ_EXPR
)
5108 case EQ_EXPR
: val
= (cmp
== 0); break;
5109 case NE_EXPR
: val
= (cmp
!= 0); break;
5110 case LT_EXPR
: val
= (cmp
< 0); break;
5111 case GT_EXPR
: val
= (cmp
> 0); break;
5112 case LE_EXPR
: val
= (cmp
<= 0); break;
5113 case GE_EXPR
: val
= (cmp
>= 0); break;
5114 default: done
= false;
5119 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5121 return boolean_false_node
;
5124 /* Likewise if the second comparison is an == comparison. */
5125 else if (code2
== EQ_EXPR
)
5131 case EQ_EXPR
: val
= (cmp
== 0); break;
5132 case NE_EXPR
: val
= (cmp
!= 0); break;
5133 case LT_EXPR
: val
= (cmp
> 0); break;
5134 case GT_EXPR
: val
= (cmp
< 0); break;
5135 case LE_EXPR
: val
= (cmp
>= 0); break;
5136 case GE_EXPR
: val
= (cmp
<= 0); break;
5137 default: done
= false;
5142 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5144 return boolean_false_node
;
5148 /* Same business with inequality tests. */
5149 else if (code1
== NE_EXPR
)
5154 case EQ_EXPR
: val
= (cmp
!= 0); break;
5155 case NE_EXPR
: val
= (cmp
== 0); break;
5156 case LT_EXPR
: val
= (cmp
>= 0); break;
5157 case GT_EXPR
: val
= (cmp
<= 0); break;
5158 case LE_EXPR
: val
= (cmp
> 0); break;
5159 case GE_EXPR
: val
= (cmp
< 0); break;
5164 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5166 else if (code2
== NE_EXPR
)
5171 case EQ_EXPR
: val
= (cmp
== 0); break;
5172 case NE_EXPR
: val
= (cmp
!= 0); break;
5173 case LT_EXPR
: val
= (cmp
<= 0); break;
5174 case GT_EXPR
: val
= (cmp
>= 0); break;
5175 case LE_EXPR
: val
= (cmp
< 0); break;
5176 case GE_EXPR
: val
= (cmp
> 0); break;
5181 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5184 /* Chose the more restrictive of two < or <= comparisons. */
5185 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5186 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5188 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5189 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5191 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5194 /* Likewise chose the more restrictive of two > or >= comparisons. */
5195 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5196 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5198 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5199 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5201 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5204 /* Check for singleton ranges. */
5206 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
5207 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
5208 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
5210 /* Check for disjoint ranges. */
5212 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5213 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5214 return boolean_false_node
;
5216 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5217 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5218 return boolean_false_node
;
5221 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5222 NAME's definition is a truth value. See if there are any simplifications
5223 that can be done against the NAME's definition. */
5224 if (TREE_CODE (op1a
) == SSA_NAME
5225 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5226 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5228 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5229 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5230 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5231 switch (gimple_code (stmt
))
5234 /* Try to simplify by copy-propagating the definition. */
5235 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5238 /* If every argument to the PHI produces the same result when
5239 ANDed with the second comparison, we win.
5240 Do not do this unless the type is bool since we need a bool
5241 result here anyway. */
5242 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5244 tree result
= NULL_TREE
;
5246 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5248 tree arg
= gimple_phi_arg_def (stmt
, i
);
5250 /* If this PHI has itself as an argument, ignore it.
5251 If all the other args produce the same result,
5253 if (arg
== gimple_phi_result (stmt
))
5255 else if (TREE_CODE (arg
) == INTEGER_CST
)
5257 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
5260 result
= boolean_false_node
;
5261 else if (!integer_zerop (result
))
5265 result
= fold_build2 (code2
, boolean_type_node
,
5267 else if (!same_bool_comparison_p (result
,
5271 else if (TREE_CODE (arg
) == SSA_NAME
5272 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5275 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5276 /* In simple cases we can look through PHI nodes,
5277 but we have to be careful with loops.
5279 if (! dom_info_available_p (CDI_DOMINATORS
)
5280 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5281 || dominated_by_p (CDI_DOMINATORS
,
5282 gimple_bb (def_stmt
),
5285 temp
= and_var_with_comparison (arg
, invert
, code2
,
5291 else if (!same_bool_result_p (result
, temp
))
5307 /* Try to simplify the AND of two comparisons, specified by
5308 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5309 If this can be simplified to a single expression (without requiring
5310 introducing more SSA variables to hold intermediate values),
5311 return the resulting tree. Otherwise return NULL_TREE.
5312 If the result expression is non-null, it has boolean type. */
5315 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5316 enum tree_code code2
, tree op2a
, tree op2b
)
5318 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5322 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5325 /* Helper function for or_comparisons_1: try to simplify the OR of the
5326 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
5327 If INVERT is true, invert the value of VAR before doing the OR.
5328 Return NULL_EXPR if we can't simplify this to a single expression. */
5331 or_var_with_comparison (tree var
, bool invert
,
5332 enum tree_code code2
, tree op2a
, tree op2b
)
5335 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
5337 /* We can only deal with variables whose definitions are assignments. */
5338 if (!is_gimple_assign (stmt
))
5341 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
5342 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
5343 Then we only have to consider the simpler non-inverted cases. */
5345 t
= and_var_with_comparison_1 (stmt
,
5346 invert_tree_comparison (code2
, false),
5349 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
5350 return canonicalize_bool (t
, invert
);
5353 /* Try to simplify the OR of the ssa variable defined by the assignment
5354 STMT with the comparison specified by (OP2A CODE2 OP2B).
5355 Return NULL_EXPR if we can't simplify this to a single expression. */
5358 or_var_with_comparison_1 (gimple
*stmt
,
5359 enum tree_code code2
, tree op2a
, tree op2b
)
5361 tree var
= gimple_assign_lhs (stmt
);
5362 tree true_test_var
= NULL_TREE
;
5363 tree false_test_var
= NULL_TREE
;
5364 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
5366 /* Check for identities like (var OR (var != 0)) => true . */
5367 if (TREE_CODE (op2a
) == SSA_NAME
5368 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
5370 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
5371 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
5373 true_test_var
= op2a
;
5374 if (var
== true_test_var
)
5377 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
5378 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
5380 false_test_var
= op2a
;
5381 if (var
== false_test_var
)
5382 return boolean_true_node
;
5386 /* If the definition is a comparison, recurse on it. */
5387 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
5389 tree t
= or_comparisons_1 (innercode
,
5390 gimple_assign_rhs1 (stmt
),
5391 gimple_assign_rhs2 (stmt
),
5399 /* If the definition is an AND or OR expression, we may be able to
5400 simplify by reassociating. */
5401 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
5402 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
5404 tree inner1
= gimple_assign_rhs1 (stmt
);
5405 tree inner2
= gimple_assign_rhs2 (stmt
);
5408 tree partial
= NULL_TREE
;
5409 bool is_or
= (innercode
== BIT_IOR_EXPR
);
5411 /* Check for boolean identities that don't require recursive examination
5413 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
5414 inner1 OR (inner1 AND inner2) => inner1
5415 !inner1 OR (inner1 OR inner2) => true
5416 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
5418 if (inner1
== true_test_var
)
5419 return (is_or
? var
: inner1
);
5420 else if (inner2
== true_test_var
)
5421 return (is_or
? var
: inner2
);
5422 else if (inner1
== false_test_var
)
5425 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
5426 else if (inner2
== false_test_var
)
5429 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
5431 /* Next, redistribute/reassociate the OR across the inner tests.
5432 Compute the first partial result, (inner1 OR (op2a code op2b)) */
5433 if (TREE_CODE (inner1
) == SSA_NAME
5434 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
5435 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5436 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5437 gimple_assign_rhs1 (s
),
5438 gimple_assign_rhs2 (s
),
5439 code2
, op2a
, op2b
)))
5441 /* Handle the OR case, where we are reassociating:
5442 (inner1 OR inner2) OR (op2a code2 op2b)
5444 If the partial result t is a constant, we win. Otherwise
5445 continue on to try reassociating with the other inner test. */
5448 if (integer_onep (t
))
5449 return boolean_true_node
;
5450 else if (integer_zerop (t
))
5454 /* Handle the AND case, where we are redistributing:
5455 (inner1 AND inner2) OR (op2a code2 op2b)
5456 => (t AND (inner2 OR (op2a code op2b))) */
5457 else if (integer_zerop (t
))
5458 return boolean_false_node
;
5460 /* Save partial result for later. */
5464 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
5465 if (TREE_CODE (inner2
) == SSA_NAME
5466 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
5467 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
5468 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
5469 gimple_assign_rhs1 (s
),
5470 gimple_assign_rhs2 (s
),
5471 code2
, op2a
, op2b
)))
5473 /* Handle the OR case, where we are reassociating:
5474 (inner1 OR inner2) OR (op2a code2 op2b)
5476 => (t OR partial) */
5479 if (integer_zerop (t
))
5481 else if (integer_onep (t
))
5482 return boolean_true_node
;
5483 /* If both are the same, we can apply the identity
5485 else if (partial
&& same_bool_result_p (t
, partial
))
5489 /* Handle the AND case, where we are redistributing:
5490 (inner1 AND inner2) OR (op2a code2 op2b)
5491 => (t AND (inner1 OR (op2a code2 op2b)))
5492 => (t AND partial) */
5495 if (integer_zerop (t
))
5496 return boolean_false_node
;
5499 /* We already got a simplification for the other
5500 operand to the redistributed AND expression. The
5501 interesting case is when at least one is true.
5502 Or, if both are the same, we can apply the identity
5504 if (integer_onep (partial
))
5506 else if (integer_onep (t
))
5508 else if (same_bool_result_p (t
, partial
))
5517 /* Try to simplify the OR of two comparisons defined by
5518 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
5519 If this can be done without constructing an intermediate value,
5520 return the resulting tree; otherwise NULL_TREE is returned.
5521 This function is deliberately asymmetric as it recurses on SSA_DEFs
5522 in the first comparison but not the second. */
5525 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
5526 enum tree_code code2
, tree op2a
, tree op2b
)
5528 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
5530 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
5531 if (operand_equal_p (op1a
, op2a
, 0)
5532 && operand_equal_p (op1b
, op2b
, 0))
5534 /* Result will be either NULL_TREE, or a combined comparison. */
5535 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5536 TRUTH_ORIF_EXPR
, code1
, code2
,
5537 truth_type
, op1a
, op1b
);
5542 /* Likewise the swapped case of the above. */
5543 if (operand_equal_p (op1a
, op2b
, 0)
5544 && operand_equal_p (op1b
, op2a
, 0))
5546 /* Result will be either NULL_TREE, or a combined comparison. */
5547 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
5548 TRUTH_ORIF_EXPR
, code1
,
5549 swap_tree_comparison (code2
),
5550 truth_type
, op1a
, op1b
);
5555 /* If both comparisons are of the same value against constants, we might
5556 be able to merge them. */
5557 if (operand_equal_p (op1a
, op2a
, 0)
5558 && TREE_CODE (op1b
) == INTEGER_CST
5559 && TREE_CODE (op2b
) == INTEGER_CST
)
5561 int cmp
= tree_int_cst_compare (op1b
, op2b
);
5563 /* If we have (op1a != op1b), we should either be able to
5564 return that or TRUE, depending on whether the constant op1b
5565 also satisfies the other comparison against op2b. */
5566 if (code1
== NE_EXPR
)
5572 case EQ_EXPR
: val
= (cmp
== 0); break;
5573 case NE_EXPR
: val
= (cmp
!= 0); break;
5574 case LT_EXPR
: val
= (cmp
< 0); break;
5575 case GT_EXPR
: val
= (cmp
> 0); break;
5576 case LE_EXPR
: val
= (cmp
<= 0); break;
5577 case GE_EXPR
: val
= (cmp
>= 0); break;
5578 default: done
= false;
5583 return boolean_true_node
;
5585 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5588 /* Likewise if the second comparison is a != comparison. */
5589 else if (code2
== NE_EXPR
)
5595 case EQ_EXPR
: val
= (cmp
== 0); break;
5596 case NE_EXPR
: val
= (cmp
!= 0); break;
5597 case LT_EXPR
: val
= (cmp
> 0); break;
5598 case GT_EXPR
: val
= (cmp
< 0); break;
5599 case LE_EXPR
: val
= (cmp
>= 0); break;
5600 case GE_EXPR
: val
= (cmp
<= 0); break;
5601 default: done
= false;
5606 return boolean_true_node
;
5608 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5612 /* See if an equality test is redundant with the other comparison. */
5613 else if (code1
== EQ_EXPR
)
5618 case EQ_EXPR
: val
= (cmp
== 0); break;
5619 case NE_EXPR
: val
= (cmp
!= 0); break;
5620 case LT_EXPR
: val
= (cmp
< 0); break;
5621 case GT_EXPR
: val
= (cmp
> 0); break;
5622 case LE_EXPR
: val
= (cmp
<= 0); break;
5623 case GE_EXPR
: val
= (cmp
>= 0); break;
5628 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5630 else if (code2
== EQ_EXPR
)
5635 case EQ_EXPR
: val
= (cmp
== 0); break;
5636 case NE_EXPR
: val
= (cmp
!= 0); break;
5637 case LT_EXPR
: val
= (cmp
> 0); break;
5638 case GT_EXPR
: val
= (cmp
< 0); break;
5639 case LE_EXPR
: val
= (cmp
>= 0); break;
5640 case GE_EXPR
: val
= (cmp
<= 0); break;
5645 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5648 /* Chose the less restrictive of two < or <= comparisons. */
5649 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5650 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5652 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5653 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5655 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5658 /* Likewise chose the less restrictive of two > or >= comparisons. */
5659 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5660 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5662 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5663 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5665 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5668 /* Check for singleton ranges. */
5670 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
5671 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
5672 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
5674 /* Check for less/greater pairs that don't restrict the range at all. */
5676 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5677 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5678 return boolean_true_node
;
5680 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5681 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5682 return boolean_true_node
;
5685 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5686 NAME's definition is a truth value. See if there are any simplifications
5687 that can be done against the NAME's definition. */
5688 if (TREE_CODE (op1a
) == SSA_NAME
5689 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5690 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5692 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5693 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5694 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5695 switch (gimple_code (stmt
))
5698 /* Try to simplify by copy-propagating the definition. */
5699 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5702 /* If every argument to the PHI produces the same result when
5703 ORed with the second comparison, we win.
5704 Do not do this unless the type is bool since we need a bool
5705 result here anyway. */
5706 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5708 tree result
= NULL_TREE
;
5710 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5712 tree arg
= gimple_phi_arg_def (stmt
, i
);
5714 /* If this PHI has itself as an argument, ignore it.
5715 If all the other args produce the same result,
5717 if (arg
== gimple_phi_result (stmt
))
5719 else if (TREE_CODE (arg
) == INTEGER_CST
)
5721 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
5724 result
= boolean_true_node
;
5725 else if (!integer_onep (result
))
5729 result
= fold_build2 (code2
, boolean_type_node
,
5731 else if (!same_bool_comparison_p (result
,
5735 else if (TREE_CODE (arg
) == SSA_NAME
5736 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5739 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5740 /* In simple cases we can look through PHI nodes,
5741 but we have to be careful with loops.
5743 if (! dom_info_available_p (CDI_DOMINATORS
)
5744 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5745 || dominated_by_p (CDI_DOMINATORS
,
5746 gimple_bb (def_stmt
),
5749 temp
= or_var_with_comparison (arg
, invert
, code2
,
5755 else if (!same_bool_result_p (result
, temp
))
5771 /* Try to simplify the OR of two comparisons, specified by
5772 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5773 If this can be simplified to a single expression (without requiring
5774 introducing more SSA variables to hold intermediate values),
5775 return the resulting tree. Otherwise return NULL_TREE.
5776 If the result expression is non-null, it has boolean type. */
5779 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5780 enum tree_code code2
, tree op2a
, tree op2b
)
5782 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5786 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5790 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5792 Either NULL_TREE, a simplified but non-constant or a constant
5795 ??? This should go into a gimple-fold-inline.h file to be eventually
5796 privatized with the single valueize function used in the various TUs
5797 to avoid the indirect function call overhead. */
5800 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
5801 tree (*gvalueize
) (tree
))
5805 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5806 edges if there are intermediate VARYING defs. For this reason
5807 do not follow SSA edges here even though SCCVN can technically
5808 just deal fine with that. */
5809 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
5811 tree res
= NULL_TREE
;
5812 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
5814 else if (mprts_hook
)
5815 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
5818 if (dump_file
&& dump_flags
& TDF_DETAILS
)
5820 fprintf (dump_file
, "Match-and-simplified ");
5821 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
5822 fprintf (dump_file
, " to ");
5823 print_generic_expr (dump_file
, res
);
5824 fprintf (dump_file
, "\n");
5830 location_t loc
= gimple_location (stmt
);
5831 switch (gimple_code (stmt
))
5835 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
5837 switch (get_gimple_rhs_class (subcode
))
5839 case GIMPLE_SINGLE_RHS
:
5841 tree rhs
= gimple_assign_rhs1 (stmt
);
5842 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
5844 if (TREE_CODE (rhs
) == SSA_NAME
)
5846 /* If the RHS is an SSA_NAME, return its known constant value,
5848 return (*valueize
) (rhs
);
5850 /* Handle propagating invariant addresses into address
5852 else if (TREE_CODE (rhs
) == ADDR_EXPR
5853 && !is_gimple_min_invariant (rhs
))
5855 HOST_WIDE_INT offset
= 0;
5857 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
5861 && (CONSTANT_CLASS_P (base
)
5862 || decl_address_invariant_p (base
)))
5863 return build_invariant_address (TREE_TYPE (rhs
),
5866 else if (TREE_CODE (rhs
) == CONSTRUCTOR
5867 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
5868 && (CONSTRUCTOR_NELTS (rhs
)
5869 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
5874 vec
= XALLOCAVEC (tree
,
5875 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
5876 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
5878 val
= (*valueize
) (val
);
5879 if (TREE_CODE (val
) == INTEGER_CST
5880 || TREE_CODE (val
) == REAL_CST
5881 || TREE_CODE (val
) == FIXED_CST
)
5887 return build_vector (TREE_TYPE (rhs
), vec
);
5889 if (subcode
== OBJ_TYPE_REF
)
5891 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5892 /* If callee is constant, we can fold away the wrapper. */
5893 if (is_gimple_min_invariant (val
))
5897 if (kind
== tcc_reference
)
5899 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5900 || TREE_CODE (rhs
) == REALPART_EXPR
5901 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5902 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5904 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5905 return fold_unary_loc (EXPR_LOCATION (rhs
),
5907 TREE_TYPE (rhs
), val
);
5909 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5910 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5912 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5913 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5915 TREE_TYPE (rhs
), val
,
5916 TREE_OPERAND (rhs
, 1),
5917 TREE_OPERAND (rhs
, 2));
5919 else if (TREE_CODE (rhs
) == MEM_REF
5920 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5922 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5923 if (TREE_CODE (val
) == ADDR_EXPR
5924 && is_gimple_min_invariant (val
))
5926 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5928 TREE_OPERAND (rhs
, 1));
5933 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5935 else if (kind
== tcc_declaration
)
5936 return get_symbol_constant_value (rhs
);
5940 case GIMPLE_UNARY_RHS
:
5943 case GIMPLE_BINARY_RHS
:
5944 /* Translate &x + CST into an invariant form suitable for
5945 further propagation. */
5946 if (subcode
== POINTER_PLUS_EXPR
)
5948 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5949 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5950 if (TREE_CODE (op0
) == ADDR_EXPR
5951 && TREE_CODE (op1
) == INTEGER_CST
)
5953 tree off
= fold_convert (ptr_type_node
, op1
);
5954 return build_fold_addr_expr_loc
5956 fold_build2 (MEM_REF
,
5957 TREE_TYPE (TREE_TYPE (op0
)),
5958 unshare_expr (op0
), off
));
5961 /* Canonicalize bool != 0 and bool == 0 appearing after
5962 valueization. While gimple_simplify handles this
5963 it can get confused by the ~X == 1 -> X == 0 transform
5964 which we cant reduce to a SSA name or a constant
5965 (and we have no way to tell gimple_simplify to not
5966 consider those transforms in the first place). */
5967 else if (subcode
== EQ_EXPR
5968 || subcode
== NE_EXPR
)
5970 tree lhs
= gimple_assign_lhs (stmt
);
5971 tree op0
= gimple_assign_rhs1 (stmt
);
5972 if (useless_type_conversion_p (TREE_TYPE (lhs
),
5975 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5976 op0
= (*valueize
) (op0
);
5977 if (TREE_CODE (op0
) == INTEGER_CST
)
5978 std::swap (op0
, op1
);
5979 if (TREE_CODE (op1
) == INTEGER_CST
5980 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
5981 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
5987 case GIMPLE_TERNARY_RHS
:
5989 /* Handle ternary operators that can appear in GIMPLE form. */
5990 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5991 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5992 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5993 return fold_ternary_loc (loc
, subcode
,
5994 gimple_expr_type (stmt
), op0
, op1
, op2
);
6005 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
6007 if (gimple_call_internal_p (stmt
))
6009 enum tree_code subcode
= ERROR_MARK
;
6010 switch (gimple_call_internal_fn (stmt
))
6012 case IFN_UBSAN_CHECK_ADD
:
6013 subcode
= PLUS_EXPR
;
6015 case IFN_UBSAN_CHECK_SUB
:
6016 subcode
= MINUS_EXPR
;
6018 case IFN_UBSAN_CHECK_MUL
:
6019 subcode
= MULT_EXPR
;
6021 case IFN_BUILTIN_EXPECT
:
6023 tree arg0
= gimple_call_arg (stmt
, 0);
6024 tree op0
= (*valueize
) (arg0
);
6025 if (TREE_CODE (op0
) == INTEGER_CST
)
6032 tree arg0
= gimple_call_arg (stmt
, 0);
6033 tree arg1
= gimple_call_arg (stmt
, 1);
6034 tree op0
= (*valueize
) (arg0
);
6035 tree op1
= (*valueize
) (arg1
);
6037 if (TREE_CODE (op0
) != INTEGER_CST
6038 || TREE_CODE (op1
) != INTEGER_CST
)
6043 /* x * 0 = 0 * x = 0 without overflow. */
6044 if (integer_zerop (op0
) || integer_zerop (op1
))
6045 return build_zero_cst (TREE_TYPE (arg0
));
6048 /* y - y = 0 without overflow. */
6049 if (operand_equal_p (op0
, op1
, 0))
6050 return build_zero_cst (TREE_TYPE (arg0
));
6057 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
6059 && TREE_CODE (res
) == INTEGER_CST
6060 && !TREE_OVERFLOW (res
))
6065 fn
= (*valueize
) (gimple_call_fn (stmt
));
6066 if (TREE_CODE (fn
) == ADDR_EXPR
6067 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
6068 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
6069 && gimple_builtin_call_types_compatible_p (stmt
,
6070 TREE_OPERAND (fn
, 0)))
6072 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
6075 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
6076 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
6077 retval
= fold_builtin_call_array (loc
,
6078 gimple_call_return_type (call_stmt
),
6079 fn
, gimple_call_num_args (stmt
), args
);
6082 /* fold_call_expr wraps the result inside a NOP_EXPR. */
6083 STRIP_NOPS (retval
);
6084 retval
= fold_convert (gimple_call_return_type (call_stmt
),
6097 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
6098 Returns NULL_TREE if folding to a constant is not possible, otherwise
6099 returns a constant according to is_gimple_min_invariant. */
6102 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
6104 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
6105 if (res
&& is_gimple_min_invariant (res
))
6111 /* The following set of functions are supposed to fold references using
6112 their constant initializers. */
6114 /* See if we can find constructor defining value of BASE.
6115 When we know the consructor with constant offset (such as
6116 base is array[40] and we do know constructor of array), then
6117 BIT_OFFSET is adjusted accordingly.
6119 As a special case, return error_mark_node when constructor
6120 is not explicitly available, but it is known to be zero
6121 such as 'static const int a;'. */
6123 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
6124 tree (*valueize
)(tree
))
6126 HOST_WIDE_INT bit_offset2
, size
, max_size
;
6129 if (TREE_CODE (base
) == MEM_REF
)
6131 if (!integer_zerop (TREE_OPERAND (base
, 1)))
6133 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
6135 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
6140 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
6141 base
= valueize (TREE_OPERAND (base
, 0));
6142 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
6144 base
= TREE_OPERAND (base
, 0);
6147 && TREE_CODE (base
) == SSA_NAME
)
6148 base
= valueize (base
);
6150 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
6151 DECL_INITIAL. If BASE is a nested reference into another
6152 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
6153 the inner reference. */
6154 switch (TREE_CODE (base
))
6159 tree init
= ctor_for_folding (base
);
6161 /* Our semantic is exact opposite of ctor_for_folding;
6162 NULL means unknown, while error_mark_node is 0. */
6163 if (init
== error_mark_node
)
6166 return error_mark_node
;
6170 case VIEW_CONVERT_EXPR
:
6171 return get_base_constructor (TREE_OPERAND (base
, 0),
6172 bit_offset
, valueize
);
6176 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
6178 if (max_size
== -1 || size
!= max_size
)
6180 *bit_offset
+= bit_offset2
;
6181 return get_base_constructor (base
, bit_offset
, valueize
);
6187 if (CONSTANT_CLASS_P (base
))
6194 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
6195 SIZE to the memory at bit OFFSET. */
6198 fold_array_ctor_reference (tree type
, tree ctor
,
6199 unsigned HOST_WIDE_INT offset
,
6200 unsigned HOST_WIDE_INT size
,
6203 offset_int low_bound
;
6204 offset_int elt_size
;
6205 offset_int access_index
;
6206 tree domain_type
= NULL_TREE
;
6207 HOST_WIDE_INT inner_offset
;
6209 /* Compute low bound and elt size. */
6210 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
6211 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
6212 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
6214 /* Static constructors for variably sized objects makes no sense. */
6215 if (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) != INTEGER_CST
)
6217 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
6221 /* Static constructors for variably sized objects makes no sense. */
6222 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
)))) != INTEGER_CST
)
6224 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
6226 /* We can handle only constantly sized accesses that are known to not
6227 be larger than size of array element. */
6228 if (!TYPE_SIZE_UNIT (type
)
6229 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
6230 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
6234 /* Compute the array index we look for. */
6235 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
6237 access_index
+= low_bound
;
6239 /* And offset within the access. */
6240 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
6242 /* See if the array field is large enough to span whole access. We do not
6243 care to fold accesses spanning multiple array indexes. */
6244 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
6246 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
6247 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
6249 /* When memory is not explicitely mentioned in constructor,
6250 it is 0 (or out of range). */
6251 return build_zero_cst (type
);
6254 /* CTOR is CONSTRUCTOR of an aggregate or vector.
6255 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
6258 fold_nonarray_ctor_reference (tree type
, tree ctor
,
6259 unsigned HOST_WIDE_INT offset
,
6260 unsigned HOST_WIDE_INT size
,
6263 unsigned HOST_WIDE_INT cnt
;
6266 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
6269 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
6270 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
6271 tree field_size
= DECL_SIZE (cfield
);
6272 offset_int bitoffset
;
6273 offset_int bitoffset_end
, access_end
;
6275 /* Variable sized objects in static constructors makes no sense,
6276 but field_size can be NULL for flexible array members. */
6277 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
6278 && TREE_CODE (byte_offset
) == INTEGER_CST
6279 && (field_size
!= NULL_TREE
6280 ? TREE_CODE (field_size
) == INTEGER_CST
6281 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
6283 /* Compute bit offset of the field. */
6284 bitoffset
= (wi::to_offset (field_offset
)
6285 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
6286 /* Compute bit offset where the field ends. */
6287 if (field_size
!= NULL_TREE
)
6288 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
6292 access_end
= offset_int (offset
) + size
;
6294 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
6295 [BITOFFSET, BITOFFSET_END)? */
6296 if (wi::cmps (access_end
, bitoffset
) > 0
6297 && (field_size
== NULL_TREE
6298 || wi::lts_p (offset
, bitoffset_end
)))
6300 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
6301 /* We do have overlap. Now see if field is large enough to
6302 cover the access. Give up for accesses spanning multiple
6304 if (wi::cmps (access_end
, bitoffset_end
) > 0)
6306 if (offset
< bitoffset
)
6308 return fold_ctor_reference (type
, cval
,
6309 inner_offset
.to_uhwi (), size
,
6313 /* When memory is not explicitely mentioned in constructor, it is 0. */
6314 return build_zero_cst (type
);
6317 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
6318 to the memory at bit OFFSET. */
6321 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
6322 unsigned HOST_WIDE_INT size
, tree from_decl
)
6326 /* We found the field with exact match. */
6327 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
6329 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6331 /* We are at the end of walk, see if we can view convert the
6333 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
6334 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
6335 && !compare_tree_int (TYPE_SIZE (type
), size
)
6336 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
6338 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
6341 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
6343 STRIP_USELESS_TYPE_CONVERSION (ret
);
6347 /* For constants and byte-aligned/sized reads try to go through
6348 native_encode/interpret. */
6349 if (CONSTANT_CLASS_P (ctor
)
6350 && BITS_PER_UNIT
== 8
6351 && offset
% BITS_PER_UNIT
== 0
6352 && size
% BITS_PER_UNIT
== 0
6353 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
6355 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
6356 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
6357 offset
/ BITS_PER_UNIT
);
6359 return native_interpret_expr (type
, buf
, len
);
6361 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
6364 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
6365 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
6366 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
6369 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
6376 /* Return the tree representing the element referenced by T if T is an
6377 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
6378 names using VALUEIZE. Return NULL_TREE otherwise. */
6381 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
6383 tree ctor
, idx
, base
;
6384 HOST_WIDE_INT offset
, size
, max_size
;
6388 if (TREE_THIS_VOLATILE (t
))
6392 return get_symbol_constant_value (t
);
6394 tem
= fold_read_from_constant_string (t
);
6398 switch (TREE_CODE (t
))
6401 case ARRAY_RANGE_REF
:
6402 /* Constant indexes are handled well by get_base_constructor.
6403 Only special case variable offsets.
6404 FIXME: This code can't handle nested references with variable indexes
6405 (they will be handled only by iteration of ccp). Perhaps we can bring
6406 get_ref_base_and_extent here and make it use a valueize callback. */
6407 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
6409 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
6410 && TREE_CODE (idx
) == INTEGER_CST
)
6412 tree low_bound
, unit_size
;
6414 /* If the resulting bit-offset is constant, track it. */
6415 if ((low_bound
= array_ref_low_bound (t
),
6416 TREE_CODE (low_bound
) == INTEGER_CST
)
6417 && (unit_size
= array_ref_element_size (t
),
6418 tree_fits_uhwi_p (unit_size
)))
6421 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
6422 TYPE_PRECISION (TREE_TYPE (idx
)));
6424 if (wi::fits_shwi_p (woffset
))
6426 offset
= woffset
.to_shwi ();
6427 /* TODO: This code seems wrong, multiply then check
6428 to see if it fits. */
6429 offset
*= tree_to_uhwi (unit_size
);
6430 offset
*= BITS_PER_UNIT
;
6432 base
= TREE_OPERAND (t
, 0);
6433 ctor
= get_base_constructor (base
, &offset
, valueize
);
6434 /* Empty constructor. Always fold to 0. */
6435 if (ctor
== error_mark_node
)
6436 return build_zero_cst (TREE_TYPE (t
));
6437 /* Out of bound array access. Value is undefined,
6441 /* We can not determine ctor. */
6444 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
6445 tree_to_uhwi (unit_size
)
6455 case TARGET_MEM_REF
:
6457 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
6458 ctor
= get_base_constructor (base
, &offset
, valueize
);
6460 /* Empty constructor. Always fold to 0. */
6461 if (ctor
== error_mark_node
)
6462 return build_zero_cst (TREE_TYPE (t
));
6463 /* We do not know precise address. */
6464 if (max_size
== -1 || max_size
!= size
)
6466 /* We can not determine ctor. */
6470 /* Out of bound array access. Value is undefined, but don't fold. */
6474 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
6480 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
6481 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
6482 return fold_build1_loc (EXPR_LOCATION (t
),
6483 TREE_CODE (t
), TREE_TYPE (t
), c
);
6495 fold_const_aggregate_ref (tree t
)
6497 return fold_const_aggregate_ref_1 (t
, NULL
);
6500 /* Lookup virtual method with index TOKEN in a virtual table V
6502 Set CAN_REFER if non-NULL to false if method
6503 is not referable or if the virtual table is ill-formed (such as rewriten
6504 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6507 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
6509 unsigned HOST_WIDE_INT offset
,
6512 tree vtable
= v
, init
, fn
;
6513 unsigned HOST_WIDE_INT size
;
6514 unsigned HOST_WIDE_INT elt_size
, access_index
;
6520 /* First of all double check we have virtual table. */
6521 if (!VAR_P (v
) || !DECL_VIRTUAL_P (v
))
6523 /* Pass down that we lost track of the target. */
6529 init
= ctor_for_folding (v
);
6531 /* The virtual tables should always be born with constructors
6532 and we always should assume that they are avaialble for
6533 folding. At the moment we do not stream them in all cases,
6534 but it should never happen that ctor seem unreachable. */
6536 if (init
== error_mark_node
)
6538 gcc_assert (in_lto_p
);
6539 /* Pass down that we lost track of the target. */
6544 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
6545 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
6546 offset
*= BITS_PER_UNIT
;
6547 offset
+= token
* size
;
6549 /* Lookup the value in the constructor that is assumed to be array.
6550 This is equivalent to
6551 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
6552 offset, size, NULL);
6553 but in a constant time. We expect that frontend produced a simple
6554 array without indexed initializers. */
6556 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
6557 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
6558 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
6559 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
6561 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
6562 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
6564 /* This code makes an assumption that there are no
6565 indexed fileds produced by C++ FE, so we can directly index the array. */
6566 if (access_index
< CONSTRUCTOR_NELTS (init
))
6568 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
6569 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
6575 /* For type inconsistent program we may end up looking up virtual method
6576 in virtual table that does not contain TOKEN entries. We may overrun
6577 the virtual table and pick up a constant or RTTI info pointer.
6578 In any case the call is undefined. */
6580 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
6581 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
6582 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6585 fn
= TREE_OPERAND (fn
, 0);
6587 /* When cgraph node is missing and function is not public, we cannot
6588 devirtualize. This can happen in WHOPR when the actual method
6589 ends up in other partition, because we found devirtualization
6590 possibility too late. */
6591 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
6602 /* Make sure we create a cgraph node for functions we'll reference.
6603 They can be non-existent if the reference comes from an entry
6604 of an external vtable for example. */
6605 cgraph_node::get_create (fn
);
6610 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
6611 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
6612 KNOWN_BINFO carries the binfo describing the true type of
6613 OBJ_TYPE_REF_OBJECT(REF).
6614 Set CAN_REFER if non-NULL to false if method
6615 is not referable or if the virtual table is ill-formed (such as rewriten
6616 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
6619 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
6622 unsigned HOST_WIDE_INT offset
;
6625 v
= BINFO_VTABLE (known_binfo
);
6626 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
6630 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
6636 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
6639 /* Given a pointer value T, return a simplified version of an
6640 indirection through T, or NULL_TREE if no simplification is
6641 possible. Note that the resulting type may be different from
6642 the type pointed to in the sense that it is still compatible
6643 from the langhooks point of view. */
6646 gimple_fold_indirect_ref (tree t
)
6648 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
6653 subtype
= TREE_TYPE (sub
);
6654 if (!POINTER_TYPE_P (subtype
)
6655 || TYPE_REF_CAN_ALIAS_ALL (ptype
))
6658 if (TREE_CODE (sub
) == ADDR_EXPR
)
6660 tree op
= TREE_OPERAND (sub
, 0);
6661 tree optype
= TREE_TYPE (op
);
6663 if (useless_type_conversion_p (type
, optype
))
6666 /* *(foo *)&fooarray => fooarray[0] */
6667 if (TREE_CODE (optype
) == ARRAY_TYPE
6668 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
6669 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6671 tree type_domain
= TYPE_DOMAIN (optype
);
6672 tree min_val
= size_zero_node
;
6673 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6674 min_val
= TYPE_MIN_VALUE (type_domain
);
6675 if (TREE_CODE (min_val
) == INTEGER_CST
)
6676 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
6678 /* *(foo *)&complexfoo => __real__ complexfoo */
6679 else if (TREE_CODE (optype
) == COMPLEX_TYPE
6680 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6681 return fold_build1 (REALPART_EXPR
, type
, op
);
6682 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6683 else if (TREE_CODE (optype
) == VECTOR_TYPE
6684 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6686 tree part_width
= TYPE_SIZE (type
);
6687 tree index
= bitsize_int (0);
6688 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
6692 /* *(p + CST) -> ... */
6693 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
6694 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
6696 tree addr
= TREE_OPERAND (sub
, 0);
6697 tree off
= TREE_OPERAND (sub
, 1);
6701 addrtype
= TREE_TYPE (addr
);
6703 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6704 if (TREE_CODE (addr
) == ADDR_EXPR
6705 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
6706 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
6707 && tree_fits_uhwi_p (off
))
6709 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
6710 tree part_width
= TYPE_SIZE (type
);
6711 unsigned HOST_WIDE_INT part_widthi
6712 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
6713 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
6714 tree index
= bitsize_int (indexi
);
6715 if (offset
/ part_widthi
6716 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
6717 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
6721 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6722 if (TREE_CODE (addr
) == ADDR_EXPR
6723 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
6724 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
6726 tree size
= TYPE_SIZE_UNIT (type
);
6727 if (tree_int_cst_equal (size
, off
))
6728 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
6731 /* *(p + CST) -> MEM_REF <p, CST>. */
6732 if (TREE_CODE (addr
) != ADDR_EXPR
6733 || DECL_P (TREE_OPERAND (addr
, 0)))
6734 return fold_build2 (MEM_REF
, type
,
6736 wide_int_to_tree (ptype
, off
));
6739 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6740 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6741 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6742 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6745 tree min_val
= size_zero_node
;
6747 sub
= gimple_fold_indirect_ref (sub
);
6749 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6750 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6751 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6752 min_val
= TYPE_MIN_VALUE (type_domain
);
6753 if (TREE_CODE (min_val
) == INTEGER_CST
)
6754 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6760 /* Return true if CODE is an operation that when operating on signed
6761 integer types involves undefined behavior on overflow and the
6762 operation can be expressed with unsigned arithmetic. */
6765 arith_code_with_undefined_signed_overflow (tree_code code
)
6773 case POINTER_PLUS_EXPR
:
6780 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6781 operation that can be transformed to unsigned arithmetic by converting
6782 its operand, carrying out the operation in the corresponding unsigned
6783 type and converting the result back to the original type.
6785 Returns a sequence of statements that replace STMT and also contain
6786 a modified form of STMT itself. */
6789 rewrite_to_defined_overflow (gimple
*stmt
)
6791 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6793 fprintf (dump_file
, "rewriting stmt with undefined signed "
6795 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6798 tree lhs
= gimple_assign_lhs (stmt
);
6799 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6800 gimple_seq stmts
= NULL
;
6801 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6803 tree op
= gimple_op (stmt
, i
);
6804 op
= gimple_convert (&stmts
, type
, op
);
6805 gimple_set_op (stmt
, i
, op
);
6807 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6808 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6809 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6810 gimple_seq_add_stmt (&stmts
, stmt
);
6811 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6812 gimple_seq_add_stmt (&stmts
, cvt
);
6818 /* The valueization hook we use for the gimple_build API simplification.
6819 This makes us match fold_buildN behavior by only combining with
6820 statements in the sequence(s) we are currently building. */
6823 gimple_build_valueize (tree op
)
6825 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6830 /* Build the expression CODE OP0 of type TYPE with location LOC,
6831 simplifying it first if possible. Returns the built
6832 expression value and appends statements possibly defining it
6836 gimple_build (gimple_seq
*seq
, location_t loc
,
6837 enum tree_code code
, tree type
, tree op0
)
6839 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6842 res
= create_tmp_reg_or_ssa_name (type
);
6844 if (code
== REALPART_EXPR
6845 || code
== IMAGPART_EXPR
6846 || code
== VIEW_CONVERT_EXPR
)
6847 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6849 stmt
= gimple_build_assign (res
, code
, op0
);
6850 gimple_set_location (stmt
, loc
);
6851 gimple_seq_add_stmt_without_update (seq
, stmt
);
6856 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6857 simplifying it first if possible. Returns the built
6858 expression value and appends statements possibly defining it
6862 gimple_build (gimple_seq
*seq
, location_t loc
,
6863 enum tree_code code
, tree type
, tree op0
, tree op1
)
6865 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6868 res
= create_tmp_reg_or_ssa_name (type
);
6869 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6870 gimple_set_location (stmt
, loc
);
6871 gimple_seq_add_stmt_without_update (seq
, stmt
);
6876 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6877 simplifying it first if possible. Returns the built
6878 expression value and appends statements possibly defining it
6882 gimple_build (gimple_seq
*seq
, location_t loc
,
6883 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6885 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6886 seq
, gimple_build_valueize
);
6889 res
= create_tmp_reg_or_ssa_name (type
);
6891 if (code
== BIT_FIELD_REF
)
6892 stmt
= gimple_build_assign (res
, code
,
6893 build3 (code
, type
, op0
, op1
, op2
));
6895 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6896 gimple_set_location (stmt
, loc
);
6897 gimple_seq_add_stmt_without_update (seq
, stmt
);
6902 /* Build the call FN (ARG0) with a result of type TYPE
6903 (or no result if TYPE is void) with location LOC,
6904 simplifying it first if possible. Returns the built
6905 expression value (or NULL_TREE if TYPE is void) and appends
6906 statements possibly defining it to SEQ. */
6909 gimple_build (gimple_seq
*seq
, location_t loc
,
6910 enum built_in_function fn
, tree type
, tree arg0
)
6912 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6915 tree decl
= builtin_decl_implicit (fn
);
6916 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
6917 if (!VOID_TYPE_P (type
))
6919 res
= create_tmp_reg_or_ssa_name (type
);
6920 gimple_call_set_lhs (stmt
, res
);
6922 gimple_set_location (stmt
, loc
);
6923 gimple_seq_add_stmt_without_update (seq
, stmt
);
6928 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6929 (or no result if TYPE is void) with location LOC,
6930 simplifying it first if possible. Returns the built
6931 expression value (or NULL_TREE if TYPE is void) and appends
6932 statements possibly defining it to SEQ. */
6935 gimple_build (gimple_seq
*seq
, location_t loc
,
6936 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6938 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6941 tree decl
= builtin_decl_implicit (fn
);
6942 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6943 if (!VOID_TYPE_P (type
))
6945 res
= create_tmp_reg_or_ssa_name (type
);
6946 gimple_call_set_lhs (stmt
, res
);
6948 gimple_set_location (stmt
, loc
);
6949 gimple_seq_add_stmt_without_update (seq
, stmt
);
6954 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6955 (or no result if TYPE is void) with location LOC,
6956 simplifying it first if possible. Returns the built
6957 expression value (or NULL_TREE if TYPE is void) and appends
6958 statements possibly defining it to SEQ. */
6961 gimple_build (gimple_seq
*seq
, location_t loc
,
6962 enum built_in_function fn
, tree type
,
6963 tree arg0
, tree arg1
, tree arg2
)
6965 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6966 seq
, gimple_build_valueize
);
6969 tree decl
= builtin_decl_implicit (fn
);
6970 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6971 if (!VOID_TYPE_P (type
))
6973 res
= create_tmp_reg_or_ssa_name (type
);
6974 gimple_call_set_lhs (stmt
, res
);
6976 gimple_set_location (stmt
, loc
);
6977 gimple_seq_add_stmt_without_update (seq
, stmt
);
6982 /* Build the conversion (TYPE) OP with a result of type TYPE
6983 with location LOC if such conversion is neccesary in GIMPLE,
6984 simplifying it first.
6985 Returns the built expression value and appends
6986 statements possibly defining it to SEQ. */
6989 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6991 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6993 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
6996 /* Build the conversion (ptrofftype) OP with a result of a type
6997 compatible with ptrofftype with location LOC if such conversion
6998 is neccesary in GIMPLE, simplifying it first.
6999 Returns the built expression value and appends
7000 statements possibly defining it to SEQ. */
7003 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
7005 if (ptrofftype_p (TREE_TYPE (op
)))
7007 return gimple_convert (seq
, loc
, sizetype
, op
);
7010 /* Return true if the result of assignment STMT is known to be non-negative.
7011 If the return value is based on the assumption that signed overflow is
7012 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7013 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7016 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7019 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7020 switch (get_gimple_rhs_class (code
))
7022 case GIMPLE_UNARY_RHS
:
7023 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7024 gimple_expr_type (stmt
),
7025 gimple_assign_rhs1 (stmt
),
7026 strict_overflow_p
, depth
);
7027 case GIMPLE_BINARY_RHS
:
7028 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
7029 gimple_expr_type (stmt
),
7030 gimple_assign_rhs1 (stmt
),
7031 gimple_assign_rhs2 (stmt
),
7032 strict_overflow_p
, depth
);
7033 case GIMPLE_TERNARY_RHS
:
7035 case GIMPLE_SINGLE_RHS
:
7036 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
7037 strict_overflow_p
, depth
);
7038 case GIMPLE_INVALID_RHS
:
7044 /* Return true if return value of call STMT is known to be non-negative.
7045 If the return value is based on the assumption that signed overflow is
7046 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7047 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7050 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7053 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
7054 gimple_call_arg (stmt
, 0) : NULL_TREE
;
7055 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
7056 gimple_call_arg (stmt
, 1) : NULL_TREE
;
7058 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
7059 gimple_call_combined_fn (stmt
),
7062 strict_overflow_p
, depth
);
7065 /* Return true if return value of call STMT is known to be non-negative.
7066 If the return value is based on the assumption that signed overflow is
7067 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7068 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7071 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7074 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7076 tree arg
= gimple_phi_arg_def (stmt
, i
);
7077 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
7083 /* Return true if STMT is known to compute a non-negative value.
7084 If the return value is based on the assumption that signed overflow is
7085 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
7086 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
7089 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
7092 switch (gimple_code (stmt
))
7095 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7098 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7101 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
7108 /* Return true if the floating-point value computed by assignment STMT
7109 is known to have an integer value. We also allow +Inf, -Inf and NaN
7110 to be considered integer values. Return false for signaling NaN.
7112 DEPTH is the current nesting depth of the query. */
7115 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
7117 enum tree_code code
= gimple_assign_rhs_code (stmt
);
7118 switch (get_gimple_rhs_class (code
))
7120 case GIMPLE_UNARY_RHS
:
7121 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
7122 gimple_assign_rhs1 (stmt
), depth
);
7123 case GIMPLE_BINARY_RHS
:
7124 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
7125 gimple_assign_rhs1 (stmt
),
7126 gimple_assign_rhs2 (stmt
), depth
);
7127 case GIMPLE_TERNARY_RHS
:
7129 case GIMPLE_SINGLE_RHS
:
7130 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
7131 case GIMPLE_INVALID_RHS
:
7137 /* Return true if the floating-point value computed by call STMT is known
7138 to have an integer value. We also allow +Inf, -Inf and NaN to be
7139 considered integer values. Return false for signaling NaN.
7141 DEPTH is the current nesting depth of the query. */
7144 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
7146 tree arg0
= (gimple_call_num_args (stmt
) > 0
7147 ? gimple_call_arg (stmt
, 0)
7149 tree arg1
= (gimple_call_num_args (stmt
) > 1
7150 ? gimple_call_arg (stmt
, 1)
7152 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
7156 /* Return true if the floating-point result of phi STMT is known to have
7157 an integer value. We also allow +Inf, -Inf and NaN to be considered
7158 integer values. Return false for signaling NaN.
7160 DEPTH is the current nesting depth of the query. */
7163 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
7165 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
7167 tree arg
= gimple_phi_arg_def (stmt
, i
);
7168 if (!integer_valued_real_single_p (arg
, depth
+ 1))
7174 /* Return true if the floating-point value computed by STMT is known
7175 to have an integer value. We also allow +Inf, -Inf and NaN to be
7176 considered integer values. Return false for signaling NaN.
7178 DEPTH is the current nesting depth of the query. */
7181 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
7183 switch (gimple_code (stmt
))
7186 return gimple_assign_integer_valued_real_p (stmt
, depth
);
7188 return gimple_call_integer_valued_real_p (stmt
, depth
);
7190 return gimple_phi_integer_valued_real_p (stmt
, depth
);