1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2016 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
32 #include "gimple-pretty-print.h"
33 #include "fold-const.h"
36 #include "stor-layout.h"
38 #include "gimple-fold.h"
40 #include "gimple-iterator.h"
41 #include "tree-into-ssa.h"
44 #include "tree-ssa-propagate.h"
45 #include "ipa-utils.h"
46 #include "tree-ssa-address.h"
47 #include "langhooks.h"
48 #include "gimplify-me.h"
52 #include "gimple-match.h"
53 #include "gomp-constants.h"
54 #include "optabs-query.h"
60 /* Return true when DECL can be referenced from current unit.
61 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
62 We can get declarations that are not possible to reference for various
65 1) When analyzing C++ virtual tables.
66 C++ virtual tables do have known constructors even
67 when they are keyed to other compilation unit.
68 Those tables can contain pointers to methods and vars
69 in other units. Those methods have both STATIC and EXTERNAL
71 2) In WHOPR mode devirtualization might lead to reference
72 to method that was partitioned elsehwere.
73 In this case we have static VAR_DECL or FUNCTION_DECL
74 that has no corresponding callgraph/varpool node
76 3) COMDAT functions referred by external vtables that
77 we devirtualize only during final compilation stage.
78 At this time we already decided that we will not output
79 the function body and thus we can't reference the symbol
83 can_refer_decl_in_current_unit_p (tree decl
, tree from_decl
)
86 struct cgraph_node
*node
;
89 if (DECL_ABSTRACT_P (decl
))
92 /* We are concerned only about static/external vars and functions. */
93 if ((!TREE_STATIC (decl
) && !DECL_EXTERNAL (decl
))
94 || (TREE_CODE (decl
) != VAR_DECL
&& TREE_CODE (decl
) != FUNCTION_DECL
))
97 /* Static objects can be referred only if they was not optimized out yet. */
98 if (!TREE_PUBLIC (decl
) && !DECL_EXTERNAL (decl
))
100 /* Before we start optimizing unreachable code we can be sure all
101 static objects are defined. */
102 if (symtab
->function_flags_ready
)
104 snode
= symtab_node::get (decl
);
105 if (!snode
|| !snode
->definition
)
107 node
= dyn_cast
<cgraph_node
*> (snode
);
108 return !node
|| !node
->global
.inlined_to
;
111 /* We will later output the initializer, so we can refer to it.
112 So we are concerned only when DECL comes from initializer of
113 external var or var that has been optimized out. */
115 || TREE_CODE (from_decl
) != VAR_DECL
116 || (!DECL_EXTERNAL (from_decl
)
117 && (vnode
= varpool_node::get (from_decl
)) != NULL
118 && vnode
->definition
)
120 && (vnode
= varpool_node::get (from_decl
)) != NULL
121 && vnode
->in_other_partition
))
123 /* We are folding reference from external vtable. The vtable may reffer
124 to a symbol keyed to other compilation unit. The other compilation
125 unit may be in separate DSO and the symbol may be hidden. */
126 if (DECL_VISIBILITY_SPECIFIED (decl
)
127 && DECL_EXTERNAL (decl
)
128 && DECL_VISIBILITY (decl
) != VISIBILITY_DEFAULT
129 && (!(snode
= symtab_node::get (decl
)) || !snode
->in_other_partition
))
131 /* When function is public, we always can introduce new reference.
132 Exception are the COMDAT functions where introducing a direct
133 reference imply need to include function body in the curren tunit. */
134 if (TREE_PUBLIC (decl
) && !DECL_COMDAT (decl
))
136 /* We have COMDAT. We are going to check if we still have definition
137 or if the definition is going to be output in other partition.
138 Bypass this when gimplifying; all needed functions will be produced.
140 As observed in PR20991 for already optimized out comdat virtual functions
141 it may be tempting to not necessarily give up because the copy will be
142 output elsewhere when corresponding vtable is output.
143 This is however not possible - ABI specify that COMDATs are output in
144 units where they are used and when the other unit was compiled with LTO
145 it is possible that vtable was kept public while the function itself
147 if (!symtab
->function_flags_ready
)
150 snode
= symtab_node::get (decl
);
152 || ((!snode
->definition
|| DECL_EXTERNAL (decl
))
153 && (!snode
->in_other_partition
154 || (!snode
->forced_by_abi
&& !snode
->force_output
))))
156 node
= dyn_cast
<cgraph_node
*> (snode
);
157 return !node
|| !node
->global
.inlined_to
;
160 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
161 acceptable form for is_gimple_min_invariant.
162 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
165 canonicalize_constructor_val (tree cval
, tree from_decl
)
167 tree orig_cval
= cval
;
169 if (TREE_CODE (cval
) == POINTER_PLUS_EXPR
170 && TREE_CODE (TREE_OPERAND (cval
, 1)) == INTEGER_CST
)
172 tree ptr
= TREE_OPERAND (cval
, 0);
173 if (is_gimple_min_invariant (ptr
))
174 cval
= build1_loc (EXPR_LOCATION (cval
),
175 ADDR_EXPR
, TREE_TYPE (ptr
),
176 fold_build2 (MEM_REF
, TREE_TYPE (TREE_TYPE (ptr
)),
178 fold_convert (ptr_type_node
,
179 TREE_OPERAND (cval
, 1))));
181 if (TREE_CODE (cval
) == ADDR_EXPR
)
183 tree base
= NULL_TREE
;
184 if (TREE_CODE (TREE_OPERAND (cval
, 0)) == COMPOUND_LITERAL_EXPR
)
186 base
= COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval
, 0));
188 TREE_OPERAND (cval
, 0) = base
;
191 base
= get_base_address (TREE_OPERAND (cval
, 0));
195 if ((TREE_CODE (base
) == VAR_DECL
196 || TREE_CODE (base
) == FUNCTION_DECL
)
197 && !can_refer_decl_in_current_unit_p (base
, from_decl
))
199 if (TREE_TYPE (base
) == error_mark_node
)
201 if (TREE_CODE (base
) == VAR_DECL
)
202 TREE_ADDRESSABLE (base
) = 1;
203 else if (TREE_CODE (base
) == FUNCTION_DECL
)
205 /* Make sure we create a cgraph node for functions we'll reference.
206 They can be non-existent if the reference comes from an entry
207 of an external vtable for example. */
208 cgraph_node::get_create (base
);
210 /* Fixup types in global initializers. */
211 if (TREE_TYPE (TREE_TYPE (cval
)) != TREE_TYPE (TREE_OPERAND (cval
, 0)))
212 cval
= build_fold_addr_expr (TREE_OPERAND (cval
, 0));
214 if (!useless_type_conversion_p (TREE_TYPE (orig_cval
), TREE_TYPE (cval
)))
215 cval
= fold_convert (TREE_TYPE (orig_cval
), cval
);
218 if (TREE_OVERFLOW_P (cval
))
219 return drop_tree_overflow (cval
);
223 /* If SYM is a constant variable with known value, return the value.
224 NULL_TREE is returned otherwise. */
227 get_symbol_constant_value (tree sym
)
229 tree val
= ctor_for_folding (sym
);
230 if (val
!= error_mark_node
)
234 val
= canonicalize_constructor_val (unshare_expr (val
), sym
);
235 if (val
&& is_gimple_min_invariant (val
))
240 /* Variables declared 'const' without an initializer
241 have zero as the initializer if they may not be
242 overridden at link or run time. */
244 && is_gimple_reg_type (TREE_TYPE (sym
)))
245 return build_zero_cst (TREE_TYPE (sym
));
253 /* Subroutine of fold_stmt. We perform several simplifications of the
254 memory reference tree EXPR and make sure to re-gimplify them properly
255 after propagation of constant addresses. IS_LHS is true if the
256 reference is supposed to be an lvalue. */
259 maybe_fold_reference (tree expr
, bool is_lhs
)
263 if ((TREE_CODE (expr
) == VIEW_CONVERT_EXPR
264 || TREE_CODE (expr
) == REALPART_EXPR
265 || TREE_CODE (expr
) == IMAGPART_EXPR
)
266 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
267 return fold_unary_loc (EXPR_LOCATION (expr
),
270 TREE_OPERAND (expr
, 0));
271 else if (TREE_CODE (expr
) == BIT_FIELD_REF
272 && CONSTANT_CLASS_P (TREE_OPERAND (expr
, 0)))
273 return fold_ternary_loc (EXPR_LOCATION (expr
),
276 TREE_OPERAND (expr
, 0),
277 TREE_OPERAND (expr
, 1),
278 TREE_OPERAND (expr
, 2));
281 && (result
= fold_const_aggregate_ref (expr
))
282 && is_gimple_min_invariant (result
))
289 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
290 replacement rhs for the statement or NULL_TREE if no simplification
291 could be made. It is assumed that the operands have been previously
295 fold_gimple_assign (gimple_stmt_iterator
*si
)
297 gimple
*stmt
= gsi_stmt (*si
);
298 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
299 location_t loc
= gimple_location (stmt
);
301 tree result
= NULL_TREE
;
303 switch (get_gimple_rhs_class (subcode
))
305 case GIMPLE_SINGLE_RHS
:
307 tree rhs
= gimple_assign_rhs1 (stmt
);
309 if (TREE_CLOBBER_P (rhs
))
312 if (REFERENCE_CLASS_P (rhs
))
313 return maybe_fold_reference (rhs
, false);
315 else if (TREE_CODE (rhs
) == OBJ_TYPE_REF
)
317 tree val
= OBJ_TYPE_REF_EXPR (rhs
);
318 if (is_gimple_min_invariant (val
))
320 else if (flag_devirtualize
&& virtual_method_call_p (rhs
))
323 vec
<cgraph_node
*>targets
324 = possible_polymorphic_call_targets (rhs
, stmt
, &final
);
325 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
327 if (dump_enabled_p ())
329 location_t loc
= gimple_location_safe (stmt
);
330 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
331 "resolving virtual function address "
332 "reference to function %s\n",
333 targets
.length () == 1
334 ? targets
[0]->name ()
337 if (targets
.length () == 1)
339 val
= fold_convert (TREE_TYPE (val
),
340 build_fold_addr_expr_loc
341 (loc
, targets
[0]->decl
));
342 STRIP_USELESS_TYPE_CONVERSION (val
);
345 /* We can not use __builtin_unreachable here because it
346 can not have address taken. */
347 val
= build_int_cst (TREE_TYPE (val
), 0);
353 else if (TREE_CODE (rhs
) == ADDR_EXPR
)
355 tree ref
= TREE_OPERAND (rhs
, 0);
356 tree tem
= maybe_fold_reference (ref
, true);
358 && TREE_CODE (tem
) == MEM_REF
359 && integer_zerop (TREE_OPERAND (tem
, 1)))
360 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (tem
, 0));
362 result
= fold_convert (TREE_TYPE (rhs
),
363 build_fold_addr_expr_loc (loc
, tem
));
364 else if (TREE_CODE (ref
) == MEM_REF
365 && integer_zerop (TREE_OPERAND (ref
, 1)))
366 result
= fold_convert (TREE_TYPE (rhs
), TREE_OPERAND (ref
, 0));
370 /* Strip away useless type conversions. Both the
371 NON_LVALUE_EXPR that may have been added by fold, and
372 "useless" type conversions that might now be apparent
373 due to propagation. */
374 STRIP_USELESS_TYPE_CONVERSION (result
);
376 if (result
!= rhs
&& valid_gimple_rhs_p (result
))
381 else if (TREE_CODE (rhs
) == CONSTRUCTOR
382 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
)
384 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
388 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
389 if (! CONSTANT_CLASS_P (val
))
392 return build_vector_from_ctor (TREE_TYPE (rhs
),
393 CONSTRUCTOR_ELTS (rhs
));
396 else if (DECL_P (rhs
))
397 return get_symbol_constant_value (rhs
);
401 case GIMPLE_UNARY_RHS
:
404 case GIMPLE_BINARY_RHS
:
407 case GIMPLE_TERNARY_RHS
:
408 result
= fold_ternary_loc (loc
, subcode
,
409 TREE_TYPE (gimple_assign_lhs (stmt
)),
410 gimple_assign_rhs1 (stmt
),
411 gimple_assign_rhs2 (stmt
),
412 gimple_assign_rhs3 (stmt
));
416 STRIP_USELESS_TYPE_CONVERSION (result
);
417 if (valid_gimple_rhs_p (result
))
422 case GIMPLE_INVALID_RHS
:
430 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
431 adjusting the replacement stmts location and virtual operands.
432 If the statement has a lhs the last stmt in the sequence is expected
433 to assign to that lhs. */
436 gsi_replace_with_seq_vops (gimple_stmt_iterator
*si_p
, gimple_seq stmts
)
438 gimple
*stmt
= gsi_stmt (*si_p
);
440 if (gimple_has_location (stmt
))
441 annotate_all_with_location (stmts
, gimple_location (stmt
));
443 /* First iterate over the replacement statements backward, assigning
444 virtual operands to their defining statements. */
445 gimple
*laststore
= NULL
;
446 for (gimple_stmt_iterator i
= gsi_last (stmts
);
447 !gsi_end_p (i
); gsi_prev (&i
))
449 gimple
*new_stmt
= gsi_stmt (i
);
450 if ((gimple_assign_single_p (new_stmt
)
451 && !is_gimple_reg (gimple_assign_lhs (new_stmt
)))
452 || (is_gimple_call (new_stmt
)
453 && (gimple_call_flags (new_stmt
)
454 & (ECF_NOVOPS
| ECF_PURE
| ECF_CONST
| ECF_NORETURN
)) == 0))
458 vdef
= gimple_vdef (stmt
);
460 vdef
= make_ssa_name (gimple_vop (cfun
), new_stmt
);
461 gimple_set_vdef (new_stmt
, vdef
);
462 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
463 SSA_NAME_DEF_STMT (vdef
) = new_stmt
;
464 laststore
= new_stmt
;
468 /* Second iterate over the statements forward, assigning virtual
469 operands to their uses. */
470 tree reaching_vuse
= gimple_vuse (stmt
);
471 for (gimple_stmt_iterator i
= gsi_start (stmts
);
472 !gsi_end_p (i
); gsi_next (&i
))
474 gimple
*new_stmt
= gsi_stmt (i
);
475 /* If the new statement possibly has a VUSE, update it with exact SSA
476 name we know will reach this one. */
477 if (gimple_has_mem_ops (new_stmt
))
478 gimple_set_vuse (new_stmt
, reaching_vuse
);
479 gimple_set_modified (new_stmt
, true);
480 if (gimple_vdef (new_stmt
))
481 reaching_vuse
= gimple_vdef (new_stmt
);
484 /* If the new sequence does not do a store release the virtual
485 definition of the original statement. */
487 && reaching_vuse
== gimple_vuse (stmt
))
489 tree vdef
= gimple_vdef (stmt
);
491 && TREE_CODE (vdef
) == SSA_NAME
)
493 unlink_stmt_vdef (stmt
);
494 release_ssa_name (vdef
);
498 /* Finally replace the original statement with the sequence. */
499 gsi_replace_with_seq (si_p
, stmts
, false);
502 /* Convert EXPR into a GIMPLE value suitable for substitution on the
503 RHS of an assignment. Insert the necessary statements before
504 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
505 is replaced. If the call is expected to produces a result, then it
506 is replaced by an assignment of the new RHS to the result variable.
507 If the result is to be ignored, then the call is replaced by a
508 GIMPLE_NOP. A proper VDEF chain is retained by making the first
509 VUSE and the last VDEF of the whole sequence be the same as the replaced
510 statement and using new SSA names for stores in between. */
513 gimplify_and_update_call_from_tree (gimple_stmt_iterator
*si_p
, tree expr
)
516 gimple
*stmt
, *new_stmt
;
517 gimple_stmt_iterator i
;
518 gimple_seq stmts
= NULL
;
520 stmt
= gsi_stmt (*si_p
);
522 gcc_assert (is_gimple_call (stmt
));
524 push_gimplify_context (gimple_in_ssa_p (cfun
));
526 lhs
= gimple_call_lhs (stmt
);
527 if (lhs
== NULL_TREE
)
529 gimplify_and_add (expr
, &stmts
);
530 /* We can end up with folding a memcpy of an empty class assignment
531 which gets optimized away by C++ gimplification. */
532 if (gimple_seq_empty_p (stmts
))
534 pop_gimplify_context (NULL
);
535 if (gimple_in_ssa_p (cfun
))
537 unlink_stmt_vdef (stmt
);
540 gsi_replace (si_p
, gimple_build_nop (), false);
546 tree tmp
= force_gimple_operand (expr
, &stmts
, false, NULL_TREE
);
547 new_stmt
= gimple_build_assign (lhs
, tmp
);
548 i
= gsi_last (stmts
);
549 gsi_insert_after_without_update (&i
, new_stmt
,
550 GSI_CONTINUE_LINKING
);
553 pop_gimplify_context (NULL
);
555 gsi_replace_with_seq_vops (si_p
, stmts
);
559 /* Replace the call at *GSI with the gimple value VAL. */
562 replace_call_with_value (gimple_stmt_iterator
*gsi
, tree val
)
564 gimple
*stmt
= gsi_stmt (*gsi
);
565 tree lhs
= gimple_call_lhs (stmt
);
569 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (val
)))
570 val
= fold_convert (TREE_TYPE (lhs
), val
);
571 repl
= gimple_build_assign (lhs
, val
);
574 repl
= gimple_build_nop ();
575 tree vdef
= gimple_vdef (stmt
);
576 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
578 unlink_stmt_vdef (stmt
);
579 release_ssa_name (vdef
);
581 gsi_replace (gsi
, repl
, false);
584 /* Replace the call at *GSI with the new call REPL and fold that
588 replace_call_with_call_and_fold (gimple_stmt_iterator
*gsi
, gimple
*repl
)
590 gimple
*stmt
= gsi_stmt (*gsi
);
591 gimple_call_set_lhs (repl
, gimple_call_lhs (stmt
));
592 gimple_set_location (repl
, gimple_location (stmt
));
593 if (gimple_vdef (stmt
)
594 && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
596 gimple_set_vdef (repl
, gimple_vdef (stmt
));
597 gimple_set_vuse (repl
, gimple_vuse (stmt
));
598 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
600 gsi_replace (gsi
, repl
, false);
604 /* Return true if VAR is a VAR_DECL or a component thereof. */
607 var_decl_component_p (tree var
)
610 while (handled_component_p (inner
))
611 inner
= TREE_OPERAND (inner
, 0);
612 return SSA_VAR_P (inner
);
615 /* Fold function call to builtin mem{{,p}cpy,move}. Return
616 false if no simplification can be made.
617 If ENDP is 0, return DEST (like memcpy).
618 If ENDP is 1, return DEST+LEN (like mempcpy).
619 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
620 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
624 gimple_fold_builtin_memory_op (gimple_stmt_iterator
*gsi
,
625 tree dest
, tree src
, int endp
)
627 gimple
*stmt
= gsi_stmt (*gsi
);
628 tree lhs
= gimple_call_lhs (stmt
);
629 tree len
= gimple_call_arg (stmt
, 2);
630 tree destvar
, srcvar
;
631 location_t loc
= gimple_location (stmt
);
633 /* If the LEN parameter is zero, return DEST. */
634 if (integer_zerop (len
))
637 if (gimple_call_lhs (stmt
))
638 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
640 repl
= gimple_build_nop ();
641 tree vdef
= gimple_vdef (stmt
);
642 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
644 unlink_stmt_vdef (stmt
);
645 release_ssa_name (vdef
);
647 gsi_replace (gsi
, repl
, false);
651 /* If SRC and DEST are the same (and not volatile), return
652 DEST{,+LEN,+LEN-1}. */
653 if (operand_equal_p (src
, dest
, 0))
655 unlink_stmt_vdef (stmt
);
656 if (gimple_vdef (stmt
) && TREE_CODE (gimple_vdef (stmt
)) == SSA_NAME
)
657 release_ssa_name (gimple_vdef (stmt
));
660 gsi_replace (gsi
, gimple_build_nop (), false);
667 tree srctype
, desttype
;
668 unsigned int src_align
, dest_align
;
671 /* Inlining of memcpy/memmove may cause bounds lost (if we copy
672 pointers as wide integer) and also may result in huge function
673 size because of inlined bounds copy. Thus don't inline for
674 functions we want to instrument. */
675 if (flag_check_pointer_bounds
676 && chkp_instrumentable_p (cfun
->decl
)
677 /* Even if data may contain pointers we can inline if copy
678 less than a pointer size. */
679 && (!tree_fits_uhwi_p (len
)
680 || compare_tree_int (len
, POINTER_SIZE_UNITS
) >= 0))
683 /* Build accesses at offset zero with a ref-all character type. */
684 off0
= build_int_cst (build_pointer_type_for_mode (char_type_node
,
687 /* If we can perform the copy efficiently with first doing all loads
688 and then all stores inline it that way. Currently efficiently
689 means that we can load all the memory into a single integer
690 register which is what MOVE_MAX gives us. */
691 src_align
= get_pointer_alignment (src
);
692 dest_align
= get_pointer_alignment (dest
);
693 if (tree_fits_uhwi_p (len
)
694 && compare_tree_int (len
, MOVE_MAX
) <= 0
695 /* ??? Don't transform copies from strings with known length this
696 confuses the tree-ssa-strlen.c. This doesn't handle
697 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
699 && !c_strlen (src
, 2))
701 unsigned ilen
= tree_to_uhwi (len
);
702 if (exact_log2 (ilen
) != -1)
704 tree type
= lang_hooks
.types
.type_for_size (ilen
* 8, 1);
706 && TYPE_MODE (type
) != BLKmode
707 && (GET_MODE_SIZE (TYPE_MODE (type
)) * BITS_PER_UNIT
709 /* If the destination pointer is not aligned we must be able
710 to emit an unaligned store. */
711 && (dest_align
>= GET_MODE_ALIGNMENT (TYPE_MODE (type
))
712 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
), dest_align
)
713 || (optab_handler (movmisalign_optab
, TYPE_MODE (type
))
714 != CODE_FOR_nothing
)))
717 tree desttype
= type
;
718 if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
719 srctype
= build_aligned_type (type
, src_align
);
720 tree srcmem
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
721 tree tem
= fold_const_aggregate_ref (srcmem
);
724 else if (src_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
))
725 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type
),
727 && (optab_handler (movmisalign_optab
,
729 == CODE_FOR_nothing
))
734 if (is_gimple_reg_type (TREE_TYPE (srcmem
)))
736 new_stmt
= gimple_build_assign (NULL_TREE
, srcmem
);
737 if (gimple_in_ssa_p (cfun
))
738 srcmem
= make_ssa_name (TREE_TYPE (srcmem
),
741 srcmem
= create_tmp_reg (TREE_TYPE (srcmem
));
742 gimple_assign_set_lhs (new_stmt
, srcmem
);
743 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
744 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
746 if (dest_align
< GET_MODE_ALIGNMENT (TYPE_MODE (type
)))
747 desttype
= build_aligned_type (type
, dest_align
);
749 = gimple_build_assign (fold_build2 (MEM_REF
, desttype
,
752 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
753 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
754 if (gimple_vdef (new_stmt
)
755 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
756 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
759 gsi_replace (gsi
, new_stmt
, false);
762 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
771 /* Both DEST and SRC must be pointer types.
772 ??? This is what old code did. Is the testing for pointer types
775 If either SRC is readonly or length is 1, we can use memcpy. */
776 if (!dest_align
|| !src_align
)
778 if (readonly_data_expr (src
)
779 || (tree_fits_uhwi_p (len
)
780 && (MIN (src_align
, dest_align
) / BITS_PER_UNIT
781 >= tree_to_uhwi (len
))))
783 tree fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
786 gimple_call_set_fndecl (stmt
, fn
);
787 gimple_call_set_arg (stmt
, 0, dest
);
788 gimple_call_set_arg (stmt
, 1, src
);
793 /* If *src and *dest can't overlap, optimize into memcpy as well. */
794 if (TREE_CODE (src
) == ADDR_EXPR
795 && TREE_CODE (dest
) == ADDR_EXPR
)
797 tree src_base
, dest_base
, fn
;
798 HOST_WIDE_INT src_offset
= 0, dest_offset
= 0;
799 HOST_WIDE_INT maxsize
;
801 srcvar
= TREE_OPERAND (src
, 0);
802 src_base
= get_addr_base_and_unit_offset (srcvar
, &src_offset
);
803 if (src_base
== NULL
)
805 destvar
= TREE_OPERAND (dest
, 0);
806 dest_base
= get_addr_base_and_unit_offset (destvar
,
808 if (dest_base
== NULL
)
810 if (tree_fits_uhwi_p (len
))
811 maxsize
= tree_to_uhwi (len
);
814 if (SSA_VAR_P (src_base
)
815 && SSA_VAR_P (dest_base
))
817 if (operand_equal_p (src_base
, dest_base
, 0)
818 && ranges_overlap_p (src_offset
, maxsize
,
819 dest_offset
, maxsize
))
822 else if (TREE_CODE (src_base
) == MEM_REF
823 && TREE_CODE (dest_base
) == MEM_REF
)
825 if (! operand_equal_p (TREE_OPERAND (src_base
, 0),
826 TREE_OPERAND (dest_base
, 0), 0))
828 offset_int off
= mem_ref_offset (src_base
) + src_offset
;
829 if (!wi::fits_shwi_p (off
))
831 src_offset
= off
.to_shwi ();
833 off
= mem_ref_offset (dest_base
) + dest_offset
;
834 if (!wi::fits_shwi_p (off
))
836 dest_offset
= off
.to_shwi ();
837 if (ranges_overlap_p (src_offset
, maxsize
,
838 dest_offset
, maxsize
))
844 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
847 gimple_call_set_fndecl (stmt
, fn
);
848 gimple_call_set_arg (stmt
, 0, dest
);
849 gimple_call_set_arg (stmt
, 1, src
);
854 /* If the destination and source do not alias optimize into
856 if ((is_gimple_min_invariant (dest
)
857 || TREE_CODE (dest
) == SSA_NAME
)
858 && (is_gimple_min_invariant (src
)
859 || TREE_CODE (src
) == SSA_NAME
))
862 ao_ref_init_from_ptr_and_size (&destr
, dest
, len
);
863 ao_ref_init_from_ptr_and_size (&srcr
, src
, len
);
864 if (!refs_may_alias_p_1 (&destr
, &srcr
, false))
867 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
870 gimple_call_set_fndecl (stmt
, fn
);
871 gimple_call_set_arg (stmt
, 0, dest
);
872 gimple_call_set_arg (stmt
, 1, src
);
881 if (!tree_fits_shwi_p (len
))
884 This logic lose for arguments like (type *)malloc (sizeof (type)),
885 since we strip the casts of up to VOID return value from malloc.
886 Perhaps we ought to inherit type from non-VOID argument here? */
889 if (!POINTER_TYPE_P (TREE_TYPE (src
))
890 || !POINTER_TYPE_P (TREE_TYPE (dest
)))
892 /* In the following try to find a type that is most natural to be
893 used for the memcpy source and destination and that allows
894 the most optimization when memcpy is turned into a plain assignment
895 using that type. In theory we could always use a char[len] type
896 but that only gains us that the destination and source possibly
897 no longer will have their address taken. */
898 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
899 if (TREE_CODE (src
) == POINTER_PLUS_EXPR
)
901 tree tem
= TREE_OPERAND (src
, 0);
903 if (tem
!= TREE_OPERAND (src
, 0))
904 src
= build1 (NOP_EXPR
, TREE_TYPE (tem
), src
);
906 if (TREE_CODE (dest
) == POINTER_PLUS_EXPR
)
908 tree tem
= TREE_OPERAND (dest
, 0);
910 if (tem
!= TREE_OPERAND (dest
, 0))
911 dest
= build1 (NOP_EXPR
, TREE_TYPE (tem
), dest
);
913 srctype
= TREE_TYPE (TREE_TYPE (src
));
914 if (TREE_CODE (srctype
) == ARRAY_TYPE
915 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
917 srctype
= TREE_TYPE (srctype
);
919 src
= build1 (NOP_EXPR
, build_pointer_type (srctype
), src
);
921 desttype
= TREE_TYPE (TREE_TYPE (dest
));
922 if (TREE_CODE (desttype
) == ARRAY_TYPE
923 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
925 desttype
= TREE_TYPE (desttype
);
927 dest
= build1 (NOP_EXPR
, build_pointer_type (desttype
), dest
);
929 if (TREE_ADDRESSABLE (srctype
)
930 || TREE_ADDRESSABLE (desttype
))
933 /* Make sure we are not copying using a floating-point mode or
934 a type whose size possibly does not match its precision. */
935 if (FLOAT_MODE_P (TYPE_MODE (desttype
))
936 || TREE_CODE (desttype
) == BOOLEAN_TYPE
937 || TREE_CODE (desttype
) == ENUMERAL_TYPE
)
938 desttype
= bitwise_type_for_mode (TYPE_MODE (desttype
));
939 if (FLOAT_MODE_P (TYPE_MODE (srctype
))
940 || TREE_CODE (srctype
) == BOOLEAN_TYPE
941 || TREE_CODE (srctype
) == ENUMERAL_TYPE
)
942 srctype
= bitwise_type_for_mode (TYPE_MODE (srctype
));
950 src_align
= get_pointer_alignment (src
);
951 dest_align
= get_pointer_alignment (dest
);
952 if (dest_align
< TYPE_ALIGN (desttype
)
953 || src_align
< TYPE_ALIGN (srctype
))
957 STRIP_NOPS (destvar
);
958 if (TREE_CODE (destvar
) == ADDR_EXPR
959 && var_decl_component_p (TREE_OPERAND (destvar
, 0))
960 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype
), len
))
961 destvar
= fold_build2 (MEM_REF
, desttype
, destvar
, off0
);
967 if (TREE_CODE (srcvar
) == ADDR_EXPR
968 && var_decl_component_p (TREE_OPERAND (srcvar
, 0))
969 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype
), len
))
972 || src_align
>= TYPE_ALIGN (desttype
))
973 srcvar
= fold_build2 (MEM_REF
, destvar
? desttype
: srctype
,
975 else if (!STRICT_ALIGNMENT
)
977 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
979 srcvar
= fold_build2 (MEM_REF
, srctype
, srcvar
, off0
);
987 if (srcvar
== NULL_TREE
&& destvar
== NULL_TREE
)
990 if (srcvar
== NULL_TREE
)
993 if (src_align
>= TYPE_ALIGN (desttype
))
994 srcvar
= fold_build2 (MEM_REF
, desttype
, src
, off0
);
997 if (STRICT_ALIGNMENT
)
999 srctype
= build_aligned_type (TYPE_MAIN_VARIANT (desttype
),
1001 srcvar
= fold_build2 (MEM_REF
, srctype
, src
, off0
);
1004 else if (destvar
== NULL_TREE
)
1007 if (dest_align
>= TYPE_ALIGN (srctype
))
1008 destvar
= fold_build2 (MEM_REF
, srctype
, dest
, off0
);
1011 if (STRICT_ALIGNMENT
)
1013 desttype
= build_aligned_type (TYPE_MAIN_VARIANT (srctype
),
1015 destvar
= fold_build2 (MEM_REF
, desttype
, dest
, off0
);
1020 if (is_gimple_reg_type (TREE_TYPE (srcvar
)))
1022 tree tem
= fold_const_aggregate_ref (srcvar
);
1025 if (! is_gimple_min_invariant (srcvar
))
1027 new_stmt
= gimple_build_assign (NULL_TREE
, srcvar
);
1028 if (gimple_in_ssa_p (cfun
))
1029 srcvar
= make_ssa_name (TREE_TYPE (srcvar
), new_stmt
);
1031 srcvar
= create_tmp_reg (TREE_TYPE (srcvar
));
1032 gimple_assign_set_lhs (new_stmt
, srcvar
);
1033 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1034 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1037 new_stmt
= gimple_build_assign (destvar
, srcvar
);
1038 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
1039 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
1040 if (gimple_vdef (new_stmt
)
1041 && TREE_CODE (gimple_vdef (new_stmt
)) == SSA_NAME
)
1042 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
1045 gsi_replace (gsi
, new_stmt
, false);
1048 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1052 gimple_seq stmts
= NULL
;
1053 if (endp
== 0 || endp
== 3)
1056 len
= gimple_build (&stmts
, loc
, MINUS_EXPR
, TREE_TYPE (len
), len
,
1058 if (endp
== 2 || endp
== 1)
1060 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1061 dest
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1062 TREE_TYPE (dest
), dest
, len
);
1065 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1066 gimple
*repl
= gimple_build_assign (lhs
, dest
);
1067 gsi_replace (gsi
, repl
, false);
1071 /* Fold function call to builtin memset or bzero at *GSI setting the
1072 memory of size LEN to VAL. Return whether a simplification was made. */
1075 gimple_fold_builtin_memset (gimple_stmt_iterator
*gsi
, tree c
, tree len
)
1077 gimple
*stmt
= gsi_stmt (*gsi
);
1079 unsigned HOST_WIDE_INT length
, cval
;
1081 /* If the LEN parameter is zero, return DEST. */
1082 if (integer_zerop (len
))
1084 replace_call_with_value (gsi
, gimple_call_arg (stmt
, 0));
1088 if (! tree_fits_uhwi_p (len
))
1091 if (TREE_CODE (c
) != INTEGER_CST
)
1094 tree dest
= gimple_call_arg (stmt
, 0);
1096 if (TREE_CODE (var
) != ADDR_EXPR
)
1099 var
= TREE_OPERAND (var
, 0);
1100 if (TREE_THIS_VOLATILE (var
))
1103 etype
= TREE_TYPE (var
);
1104 if (TREE_CODE (etype
) == ARRAY_TYPE
)
1105 etype
= TREE_TYPE (etype
);
1107 if (!INTEGRAL_TYPE_P (etype
)
1108 && !POINTER_TYPE_P (etype
))
1111 if (! var_decl_component_p (var
))
1114 length
= tree_to_uhwi (len
);
1115 if (GET_MODE_SIZE (TYPE_MODE (etype
)) != length
1116 || get_pointer_alignment (dest
) / BITS_PER_UNIT
< length
)
1119 if (length
> HOST_BITS_PER_WIDE_INT
/ BITS_PER_UNIT
)
1122 if (integer_zerop (c
))
1126 if (CHAR_BIT
!= 8 || BITS_PER_UNIT
!= 8 || HOST_BITS_PER_WIDE_INT
> 64)
1129 cval
= TREE_INT_CST_LOW (c
);
1133 cval
|= (cval
<< 31) << 1;
1136 var
= fold_build2 (MEM_REF
, etype
, dest
, build_int_cst (ptr_type_node
, 0));
1137 gimple
*store
= gimple_build_assign (var
, build_int_cst_type (etype
, cval
));
1138 gimple_set_vuse (store
, gimple_vuse (stmt
));
1139 tree vdef
= gimple_vdef (stmt
);
1140 if (vdef
&& TREE_CODE (vdef
) == SSA_NAME
)
1142 gimple_set_vdef (store
, gimple_vdef (stmt
));
1143 SSA_NAME_DEF_STMT (gimple_vdef (stmt
)) = store
;
1145 gsi_insert_before (gsi
, store
, GSI_SAME_STMT
);
1146 if (gimple_call_lhs (stmt
))
1148 gimple
*asgn
= gimple_build_assign (gimple_call_lhs (stmt
), dest
);
1149 gsi_replace (gsi
, asgn
, false);
1153 gimple_stmt_iterator gsi2
= *gsi
;
1155 gsi_remove (&gsi2
, true);
1162 /* Return the string length, maximum string length or maximum value of
1164 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1165 is not NULL and, for TYPE == 0, its value is not equal to the length
1166 we determine or if we are unable to determine the length or value,
1167 return false. VISITED is a bitmap of visited variables.
1168 TYPE is 0 if string length should be returned, 1 for maximum string
1169 length and 2 for maximum value ARG can have. */
1172 get_maxval_strlen (tree arg
, tree
*length
, bitmap
*visited
, int type
)
1177 if (TREE_CODE (arg
) != SSA_NAME
)
1179 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1180 if (TREE_CODE (arg
) == ADDR_EXPR
1181 && TREE_CODE (TREE_OPERAND (arg
, 0)) == ARRAY_REF
1182 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg
, 0), 1)))
1184 tree aop0
= TREE_OPERAND (TREE_OPERAND (arg
, 0), 0);
1185 if (TREE_CODE (aop0
) == INDIRECT_REF
1186 && TREE_CODE (TREE_OPERAND (aop0
, 0)) == SSA_NAME
)
1187 return get_maxval_strlen (TREE_OPERAND (aop0
, 0),
1188 length
, visited
, type
);
1194 if (TREE_CODE (val
) != INTEGER_CST
1195 || tree_int_cst_sgn (val
) < 0)
1199 val
= c_strlen (arg
, 1);
1207 if (TREE_CODE (*length
) != INTEGER_CST
1208 || TREE_CODE (val
) != INTEGER_CST
)
1211 if (tree_int_cst_lt (*length
, val
))
1215 else if (simple_cst_equal (val
, *length
) != 1)
1223 /* If ARG is registered for SSA update we cannot look at its defining
1225 if (name_registered_for_update_p (arg
))
1228 /* If we were already here, break the infinite cycle. */
1230 *visited
= BITMAP_ALLOC (NULL
);
1231 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (arg
)))
1235 def_stmt
= SSA_NAME_DEF_STMT (var
);
1237 switch (gimple_code (def_stmt
))
1240 /* The RHS of the statement defining VAR must either have a
1241 constant length or come from another SSA_NAME with a constant
1243 if (gimple_assign_single_p (def_stmt
)
1244 || gimple_assign_unary_nop_p (def_stmt
))
1246 tree rhs
= gimple_assign_rhs1 (def_stmt
);
1247 return get_maxval_strlen (rhs
, length
, visited
, type
);
1249 else if (gimple_assign_rhs_code (def_stmt
) == COND_EXPR
)
1251 tree op2
= gimple_assign_rhs2 (def_stmt
);
1252 tree op3
= gimple_assign_rhs3 (def_stmt
);
1253 return get_maxval_strlen (op2
, length
, visited
, type
)
1254 && get_maxval_strlen (op3
, length
, visited
, type
);
1260 /* All the arguments of the PHI node must have the same constant
1264 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); i
++)
1266 tree arg
= gimple_phi_arg (def_stmt
, i
)->def
;
1268 /* If this PHI has itself as an argument, we cannot
1269 determine the string length of this argument. However,
1270 if we can find a constant string length for the other
1271 PHI args then we can still be sure that this is a
1272 constant string length. So be optimistic and just
1273 continue with the next argument. */
1274 if (arg
== gimple_phi_result (def_stmt
))
1277 if (!get_maxval_strlen (arg
, length
, visited
, type
))
1289 get_maxval_strlen (tree arg
, int type
)
1291 bitmap visited
= NULL
;
1292 tree len
= NULL_TREE
;
1293 if (!get_maxval_strlen (arg
, &len
, &visited
, type
))
1296 BITMAP_FREE (visited
);
1302 /* Fold function call to builtin strcpy with arguments DEST and SRC.
1303 If LEN is not NULL, it represents the length of the string to be
1304 copied. Return NULL_TREE if no simplification can be made. */
1307 gimple_fold_builtin_strcpy (gimple_stmt_iterator
*gsi
,
1308 tree dest
, tree src
)
1310 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1313 /* If SRC and DEST are the same (and not volatile), return DEST. */
1314 if (operand_equal_p (src
, dest
, 0))
1316 replace_call_with_value (gsi
, dest
);
1320 if (optimize_function_for_size_p (cfun
))
1323 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1327 tree len
= get_maxval_strlen (src
, 0);
1331 len
= fold_convert_loc (loc
, size_type_node
, len
);
1332 len
= size_binop_loc (loc
, PLUS_EXPR
, len
, build_int_cst (size_type_node
, 1));
1333 len
= force_gimple_operand_gsi (gsi
, len
, true,
1334 NULL_TREE
, true, GSI_SAME_STMT
);
1335 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1336 replace_call_with_call_and_fold (gsi
, repl
);
1340 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1341 If SLEN is not NULL, it represents the length of the source string.
1342 Return NULL_TREE if no simplification can be made. */
1345 gimple_fold_builtin_strncpy (gimple_stmt_iterator
*gsi
,
1346 tree dest
, tree src
, tree len
)
1348 location_t loc
= gimple_location (gsi_stmt (*gsi
));
1351 /* If the LEN parameter is zero, return DEST. */
1352 if (integer_zerop (len
))
1354 replace_call_with_value (gsi
, dest
);
1358 /* We can't compare slen with len as constants below if len is not a
1360 if (TREE_CODE (len
) != INTEGER_CST
)
1363 /* Now, we must be passed a constant src ptr parameter. */
1364 tree slen
= get_maxval_strlen (src
, 0);
1365 if (!slen
|| TREE_CODE (slen
) != INTEGER_CST
)
1368 slen
= size_binop_loc (loc
, PLUS_EXPR
, slen
, ssize_int (1));
1370 /* We do not support simplification of this case, though we do
1371 support it when expanding trees into RTL. */
1372 /* FIXME: generate a call to __builtin_memset. */
1373 if (tree_int_cst_lt (slen
, len
))
1376 /* OK transform into builtin memcpy. */
1377 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1381 len
= fold_convert_loc (loc
, size_type_node
, len
);
1382 len
= force_gimple_operand_gsi (gsi
, len
, true,
1383 NULL_TREE
, true, GSI_SAME_STMT
);
1384 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1385 replace_call_with_call_and_fold (gsi
, repl
);
1389 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
1392 Return NULL_TREE if no simplification was possible, otherwise return the
1393 simplified form of the call as a tree.
1395 The simplified form may be a constant or other expression which
1396 computes the same value, but in a more efficient manner (including
1397 calls to other builtin functions).
1399 The call may contain arguments which need to be evaluated, but
1400 which are not useful to determine the result of the call. In
1401 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1402 COMPOUND_EXPR will be an argument which must be evaluated.
1403 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1404 COMPOUND_EXPR in the chain will contain the tree for the simplified
1405 form of the builtin function call. */
1408 gimple_fold_builtin_strcat (gimple_stmt_iterator
*gsi
, tree dst
, tree src
)
1410 gimple
*stmt
= gsi_stmt (*gsi
);
1411 location_t loc
= gimple_location (stmt
);
1413 const char *p
= c_getstr (src
);
1415 /* If the string length is zero, return the dst parameter. */
1416 if (p
&& *p
== '\0')
1418 replace_call_with_value (gsi
, dst
);
1422 if (!optimize_bb_for_speed_p (gimple_bb (stmt
)))
1425 /* See if we can store by pieces into (dst + strlen(dst)). */
1427 tree strlen_fn
= builtin_decl_implicit (BUILT_IN_STRLEN
);
1428 tree memcpy_fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1430 if (!strlen_fn
|| !memcpy_fn
)
1433 /* If the length of the source string isn't computable don't
1434 split strcat into strlen and memcpy. */
1435 tree len
= get_maxval_strlen (src
, 0);
1439 /* Create strlen (dst). */
1440 gimple_seq stmts
= NULL
, stmts2
;
1441 gimple
*repl
= gimple_build_call (strlen_fn
, 1, dst
);
1442 gimple_set_location (repl
, loc
);
1443 if (gimple_in_ssa_p (cfun
))
1444 newdst
= make_ssa_name (size_type_node
);
1446 newdst
= create_tmp_reg (size_type_node
);
1447 gimple_call_set_lhs (repl
, newdst
);
1448 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1450 /* Create (dst p+ strlen (dst)). */
1451 newdst
= fold_build_pointer_plus_loc (loc
, dst
, newdst
);
1452 newdst
= force_gimple_operand (newdst
, &stmts2
, true, NULL_TREE
);
1453 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1455 len
= fold_convert_loc (loc
, size_type_node
, len
);
1456 len
= size_binop_loc (loc
, PLUS_EXPR
, len
,
1457 build_int_cst (size_type_node
, 1));
1458 len
= force_gimple_operand (len
, &stmts2
, true, NULL_TREE
);
1459 gimple_seq_add_seq_without_update (&stmts
, stmts2
);
1461 repl
= gimple_build_call (memcpy_fn
, 3, newdst
, src
, len
);
1462 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1463 if (gimple_call_lhs (stmt
))
1465 repl
= gimple_build_assign (gimple_call_lhs (stmt
), dst
);
1466 gimple_seq_add_stmt_without_update (&stmts
, repl
);
1467 gsi_replace_with_seq_vops (gsi
, stmts
);
1468 /* gsi now points at the assignment to the lhs, get a
1469 stmt iterator to the memcpy call.
1470 ??? We can't use gsi_for_stmt as that doesn't work when the
1471 CFG isn't built yet. */
1472 gimple_stmt_iterator gsi2
= *gsi
;
1478 gsi_replace_with_seq_vops (gsi
, stmts
);
1484 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1485 are the arguments to the call. */
1488 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator
*gsi
)
1490 gimple
*stmt
= gsi_stmt (*gsi
);
1491 tree dest
= gimple_call_arg (stmt
, 0);
1492 tree src
= gimple_call_arg (stmt
, 1);
1493 tree size
= gimple_call_arg (stmt
, 2);
1499 /* If the SRC parameter is "", return DEST. */
1500 if (p
&& *p
== '\0')
1502 replace_call_with_value (gsi
, dest
);
1506 if (! tree_fits_uhwi_p (size
) || ! integer_all_onesp (size
))
1509 /* If __builtin_strcat_chk is used, assume strcat is available. */
1510 fn
= builtin_decl_explicit (BUILT_IN_STRCAT
);
1514 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1515 replace_call_with_call_and_fold (gsi
, repl
);
1519 /* Simplify a call to the strncat builtin. */
1522 gimple_fold_builtin_strncat (gimple_stmt_iterator
*gsi
)
1524 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1525 tree dst
= gimple_call_arg (stmt
, 0);
1526 tree src
= gimple_call_arg (stmt
, 1);
1527 tree len
= gimple_call_arg (stmt
, 2);
1529 const char *p
= c_getstr (src
);
1531 /* If the requested length is zero, or the src parameter string
1532 length is zero, return the dst parameter. */
1533 if (integer_zerop (len
) || (p
&& *p
== '\0'))
1535 replace_call_with_value (gsi
, dst
);
1539 /* If the requested len is greater than or equal to the string
1540 length, call strcat. */
1541 if (TREE_CODE (len
) == INTEGER_CST
&& p
1542 && compare_tree_int (len
, strlen (p
)) >= 0)
1544 tree fn
= builtin_decl_implicit (BUILT_IN_STRCAT
);
1546 /* If the replacement _DECL isn't initialized, don't do the
1551 gcall
*repl
= gimple_build_call (fn
, 2, dst
, src
);
1552 replace_call_with_call_and_fold (gsi
, repl
);
1559 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1563 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator
*gsi
)
1565 gimple
*stmt
= gsi_stmt (*gsi
);
1566 tree dest
= gimple_call_arg (stmt
, 0);
1567 tree src
= gimple_call_arg (stmt
, 1);
1568 tree len
= gimple_call_arg (stmt
, 2);
1569 tree size
= gimple_call_arg (stmt
, 3);
1574 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1575 if ((p
&& *p
== '\0')
1576 || integer_zerop (len
))
1578 replace_call_with_value (gsi
, dest
);
1582 if (! tree_fits_uhwi_p (size
))
1585 if (! integer_all_onesp (size
))
1587 tree src_len
= c_strlen (src
, 1);
1589 && tree_fits_uhwi_p (src_len
)
1590 && tree_fits_uhwi_p (len
)
1591 && ! tree_int_cst_lt (len
, src_len
))
1593 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1594 fn
= builtin_decl_explicit (BUILT_IN_STRCAT_CHK
);
1598 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1599 replace_call_with_call_and_fold (gsi
, repl
);
1605 /* If __builtin_strncat_chk is used, assume strncat is available. */
1606 fn
= builtin_decl_explicit (BUILT_IN_STRNCAT
);
1610 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1611 replace_call_with_call_and_fold (gsi
, repl
);
1615 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1616 to the call. IGNORE is true if the value returned
1617 by the builtin will be ignored. UNLOCKED is true is true if this
1618 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1619 the known length of the string. Return NULL_TREE if no simplification
1623 gimple_fold_builtin_fputs (gimple_stmt_iterator
*gsi
,
1624 tree arg0
, tree arg1
,
1627 gimple
*stmt
= gsi_stmt (*gsi
);
1629 /* If we're using an unlocked function, assume the other unlocked
1630 functions exist explicitly. */
1631 tree
const fn_fputc
= (unlocked
1632 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
)
1633 : builtin_decl_implicit (BUILT_IN_FPUTC
));
1634 tree
const fn_fwrite
= (unlocked
1635 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED
)
1636 : builtin_decl_implicit (BUILT_IN_FWRITE
));
1638 /* If the return value is used, don't do the transformation. */
1639 if (gimple_call_lhs (stmt
))
1642 /* Get the length of the string passed to fputs. If the length
1643 can't be determined, punt. */
1644 tree len
= get_maxval_strlen (arg0
, 0);
1646 || TREE_CODE (len
) != INTEGER_CST
)
1649 switch (compare_tree_int (len
, 1))
1651 case -1: /* length is 0, delete the call entirely . */
1652 replace_call_with_value (gsi
, integer_zero_node
);
1655 case 0: /* length is 1, call fputc. */
1657 const char *p
= c_getstr (arg0
);
1663 gimple
*repl
= gimple_build_call (fn_fputc
, 2,
1665 (integer_type_node
, p
[0]), arg1
);
1666 replace_call_with_call_and_fold (gsi
, repl
);
1671 case 1: /* length is greater than 1, call fwrite. */
1673 /* If optimizing for size keep fputs. */
1674 if (optimize_function_for_size_p (cfun
))
1676 /* New argument list transforming fputs(string, stream) to
1677 fwrite(string, 1, len, stream). */
1681 gimple
*repl
= gimple_build_call (fn_fwrite
, 4, arg0
,
1682 size_one_node
, len
, arg1
);
1683 replace_call_with_call_and_fold (gsi
, repl
);
1692 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1693 DEST, SRC, LEN, and SIZE are the arguments to the call.
1694 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1695 code of the builtin. If MAXLEN is not NULL, it is maximum length
1696 passed as third argument. */
1699 gimple_fold_builtin_memory_chk (gimple_stmt_iterator
*gsi
,
1700 tree dest
, tree src
, tree len
, tree size
,
1701 enum built_in_function fcode
)
1703 gimple
*stmt
= gsi_stmt (*gsi
);
1704 location_t loc
= gimple_location (stmt
);
1705 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1708 /* If SRC and DEST are the same (and not volatile), return DEST
1709 (resp. DEST+LEN for __mempcpy_chk). */
1710 if (fcode
!= BUILT_IN_MEMSET_CHK
&& operand_equal_p (src
, dest
, 0))
1712 if (fcode
!= BUILT_IN_MEMPCPY_CHK
)
1714 replace_call_with_value (gsi
, dest
);
1719 gimple_seq stmts
= NULL
;
1720 len
= gimple_convert_to_ptrofftype (&stmts
, loc
, len
);
1721 tree temp
= gimple_build (&stmts
, loc
, POINTER_PLUS_EXPR
,
1722 TREE_TYPE (dest
), dest
, len
);
1723 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1724 replace_call_with_value (gsi
, temp
);
1729 if (! tree_fits_uhwi_p (size
))
1732 tree maxlen
= get_maxval_strlen (len
, 2);
1733 if (! integer_all_onesp (size
))
1735 if (! tree_fits_uhwi_p (len
))
1737 /* If LEN is not constant, try MAXLEN too.
1738 For MAXLEN only allow optimizing into non-_ocs function
1739 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1740 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1742 if (fcode
== BUILT_IN_MEMPCPY_CHK
&& ignore
)
1744 /* (void) __mempcpy_chk () can be optimized into
1745 (void) __memcpy_chk (). */
1746 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1750 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1751 replace_call_with_call_and_fold (gsi
, repl
);
1760 if (tree_int_cst_lt (size
, maxlen
))
1765 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1766 mem{cpy,pcpy,move,set} is available. */
1769 case BUILT_IN_MEMCPY_CHK
:
1770 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY
);
1772 case BUILT_IN_MEMPCPY_CHK
:
1773 fn
= builtin_decl_explicit (BUILT_IN_MEMPCPY
);
1775 case BUILT_IN_MEMMOVE_CHK
:
1776 fn
= builtin_decl_explicit (BUILT_IN_MEMMOVE
);
1778 case BUILT_IN_MEMSET_CHK
:
1779 fn
= builtin_decl_explicit (BUILT_IN_MEMSET
);
1788 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1789 replace_call_with_call_and_fold (gsi
, repl
);
1793 /* Fold a call to the __st[rp]cpy_chk builtin.
1794 DEST, SRC, and SIZE are the arguments to the call.
1795 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1796 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1797 strings passed as second argument. */
1800 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator
*gsi
,
1802 tree src
, tree size
,
1803 enum built_in_function fcode
)
1805 gimple
*stmt
= gsi_stmt (*gsi
);
1806 location_t loc
= gimple_location (stmt
);
1807 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1810 /* If SRC and DEST are the same (and not volatile), return DEST. */
1811 if (fcode
== BUILT_IN_STRCPY_CHK
&& operand_equal_p (src
, dest
, 0))
1813 replace_call_with_value (gsi
, dest
);
1817 if (! tree_fits_uhwi_p (size
))
1820 tree maxlen
= get_maxval_strlen (src
, 1);
1821 if (! integer_all_onesp (size
))
1823 len
= c_strlen (src
, 1);
1824 if (! len
|| ! tree_fits_uhwi_p (len
))
1826 /* If LEN is not constant, try MAXLEN too.
1827 For MAXLEN only allow optimizing into non-_ocs function
1828 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1829 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1831 if (fcode
== BUILT_IN_STPCPY_CHK
)
1836 /* If return value of __stpcpy_chk is ignored,
1837 optimize into __strcpy_chk. */
1838 fn
= builtin_decl_explicit (BUILT_IN_STRCPY_CHK
);
1842 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, size
);
1843 replace_call_with_call_and_fold (gsi
, repl
);
1847 if (! len
|| TREE_SIDE_EFFECTS (len
))
1850 /* If c_strlen returned something, but not a constant,
1851 transform __strcpy_chk into __memcpy_chk. */
1852 fn
= builtin_decl_explicit (BUILT_IN_MEMCPY_CHK
);
1856 gimple_seq stmts
= NULL
;
1857 len
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1858 len
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
, len
,
1859 build_int_cst (size_type_node
, 1));
1860 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1861 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1862 replace_call_with_call_and_fold (gsi
, repl
);
1869 if (! tree_int_cst_lt (maxlen
, size
))
1873 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1874 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPCPY_CHK
1875 ? BUILT_IN_STPCPY
: BUILT_IN_STRCPY
);
1879 gimple
*repl
= gimple_build_call (fn
, 2, dest
, src
);
1880 replace_call_with_call_and_fold (gsi
, repl
);
1884 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1885 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1886 length passed as third argument. IGNORE is true if return value can be
1887 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1890 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator
*gsi
,
1891 tree dest
, tree src
,
1892 tree len
, tree size
,
1893 enum built_in_function fcode
)
1895 gimple
*stmt
= gsi_stmt (*gsi
);
1896 bool ignore
= gimple_call_lhs (stmt
) == NULL_TREE
;
1899 if (fcode
== BUILT_IN_STPNCPY_CHK
&& ignore
)
1901 /* If return value of __stpncpy_chk is ignored,
1902 optimize into __strncpy_chk. */
1903 fn
= builtin_decl_explicit (BUILT_IN_STRNCPY_CHK
);
1906 gimple
*repl
= gimple_build_call (fn
, 4, dest
, src
, len
, size
);
1907 replace_call_with_call_and_fold (gsi
, repl
);
1912 if (! tree_fits_uhwi_p (size
))
1915 tree maxlen
= get_maxval_strlen (len
, 2);
1916 if (! integer_all_onesp (size
))
1918 if (! tree_fits_uhwi_p (len
))
1920 /* If LEN is not constant, try MAXLEN too.
1921 For MAXLEN only allow optimizing into non-_ocs function
1922 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1923 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
1929 if (tree_int_cst_lt (size
, maxlen
))
1933 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1934 fn
= builtin_decl_explicit (fcode
== BUILT_IN_STPNCPY_CHK
1935 ? BUILT_IN_STPNCPY
: BUILT_IN_STRNCPY
);
1939 gimple
*repl
= gimple_build_call (fn
, 3, dest
, src
, len
);
1940 replace_call_with_call_and_fold (gsi
, repl
);
1944 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
1945 Return NULL_TREE if no simplification can be made. */
1948 gimple_fold_builtin_stpcpy (gimple_stmt_iterator
*gsi
)
1950 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
1951 location_t loc
= gimple_location (stmt
);
1952 tree dest
= gimple_call_arg (stmt
, 0);
1953 tree src
= gimple_call_arg (stmt
, 1);
1954 tree fn
, len
, lenp1
;
1956 /* If the result is unused, replace stpcpy with strcpy. */
1957 if (gimple_call_lhs (stmt
) == NULL_TREE
)
1959 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
1962 gimple_call_set_fndecl (stmt
, fn
);
1967 len
= c_strlen (src
, 1);
1969 || TREE_CODE (len
) != INTEGER_CST
)
1972 if (optimize_function_for_size_p (cfun
)
1973 /* If length is zero it's small enough. */
1974 && !integer_zerop (len
))
1977 /* If the source has a known length replace stpcpy with memcpy. */
1978 fn
= builtin_decl_implicit (BUILT_IN_MEMCPY
);
1982 gimple_seq stmts
= NULL
;
1983 tree tem
= gimple_convert (&stmts
, loc
, size_type_node
, len
);
1984 lenp1
= gimple_build (&stmts
, loc
, PLUS_EXPR
, size_type_node
,
1985 tem
, build_int_cst (size_type_node
, 1));
1986 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1987 gcall
*repl
= gimple_build_call (fn
, 3, dest
, src
, lenp1
);
1988 gimple_set_vuse (repl
, gimple_vuse (stmt
));
1989 gimple_set_vdef (repl
, gimple_vdef (stmt
));
1990 if (gimple_vdef (repl
)
1991 && TREE_CODE (gimple_vdef (repl
)) == SSA_NAME
)
1992 SSA_NAME_DEF_STMT (gimple_vdef (repl
)) = repl
;
1993 gsi_insert_before (gsi
, repl
, GSI_SAME_STMT
);
1994 /* Replace the result with dest + len. */
1996 tem
= gimple_convert (&stmts
, loc
, sizetype
, len
);
1997 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1998 gassign
*ret
= gimple_build_assign (gimple_call_lhs (stmt
),
1999 POINTER_PLUS_EXPR
, dest
, tem
);
2000 gsi_replace (gsi
, ret
, false);
2001 /* Finally fold the memcpy call. */
2002 gimple_stmt_iterator gsi2
= *gsi
;
2008 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2009 NULL_TREE if a normal call should be emitted rather than expanding
2010 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2011 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2012 passed as second argument. */
2015 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator
*gsi
,
2016 enum built_in_function fcode
)
2018 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2019 tree dest
, size
, len
, fn
, fmt
, flag
;
2020 const char *fmt_str
;
2022 /* Verify the required arguments in the original call. */
2023 if (gimple_call_num_args (stmt
) < 5)
2026 dest
= gimple_call_arg (stmt
, 0);
2027 len
= gimple_call_arg (stmt
, 1);
2028 flag
= gimple_call_arg (stmt
, 2);
2029 size
= gimple_call_arg (stmt
, 3);
2030 fmt
= gimple_call_arg (stmt
, 4);
2032 if (! tree_fits_uhwi_p (size
))
2035 if (! integer_all_onesp (size
))
2037 tree maxlen
= get_maxval_strlen (len
, 2);
2038 if (! tree_fits_uhwi_p (len
))
2040 /* If LEN is not constant, try MAXLEN too.
2041 For MAXLEN only allow optimizing into non-_ocs function
2042 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2043 if (maxlen
== NULL_TREE
|| ! tree_fits_uhwi_p (maxlen
))
2049 if (tree_int_cst_lt (size
, maxlen
))
2053 if (!init_target_chars ())
2056 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2057 or if format doesn't contain % chars or is "%s". */
2058 if (! integer_zerop (flag
))
2060 fmt_str
= c_getstr (fmt
);
2061 if (fmt_str
== NULL
)
2063 if (strchr (fmt_str
, target_percent
) != NULL
2064 && strcmp (fmt_str
, target_percent_s
))
2068 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2070 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSNPRINTF_CHK
2071 ? BUILT_IN_VSNPRINTF
: BUILT_IN_SNPRINTF
);
2075 /* Replace the called function and the first 5 argument by 3 retaining
2076 trailing varargs. */
2077 gimple_call_set_fndecl (stmt
, fn
);
2078 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2079 gimple_call_set_arg (stmt
, 0, dest
);
2080 gimple_call_set_arg (stmt
, 1, len
);
2081 gimple_call_set_arg (stmt
, 2, fmt
);
2082 for (unsigned i
= 3; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2083 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2084 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2089 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2090 Return NULL_TREE if a normal call should be emitted rather than
2091 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2092 or BUILT_IN_VSPRINTF_CHK. */
2095 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator
*gsi
,
2096 enum built_in_function fcode
)
2098 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2099 tree dest
, size
, len
, fn
, fmt
, flag
;
2100 const char *fmt_str
;
2101 unsigned nargs
= gimple_call_num_args (stmt
);
2103 /* Verify the required arguments in the original call. */
2106 dest
= gimple_call_arg (stmt
, 0);
2107 flag
= gimple_call_arg (stmt
, 1);
2108 size
= gimple_call_arg (stmt
, 2);
2109 fmt
= gimple_call_arg (stmt
, 3);
2111 if (! tree_fits_uhwi_p (size
))
2116 if (!init_target_chars ())
2119 /* Check whether the format is a literal string constant. */
2120 fmt_str
= c_getstr (fmt
);
2121 if (fmt_str
!= NULL
)
2123 /* If the format doesn't contain % args or %%, we know the size. */
2124 if (strchr (fmt_str
, target_percent
) == 0)
2126 if (fcode
!= BUILT_IN_SPRINTF_CHK
|| nargs
== 4)
2127 len
= build_int_cstu (size_type_node
, strlen (fmt_str
));
2129 /* If the format is "%s" and first ... argument is a string literal,
2130 we know the size too. */
2131 else if (fcode
== BUILT_IN_SPRINTF_CHK
2132 && strcmp (fmt_str
, target_percent_s
) == 0)
2138 arg
= gimple_call_arg (stmt
, 4);
2139 if (POINTER_TYPE_P (TREE_TYPE (arg
)))
2141 len
= c_strlen (arg
, 1);
2142 if (! len
|| ! tree_fits_uhwi_p (len
))
2149 if (! integer_all_onesp (size
))
2151 if (! len
|| ! tree_int_cst_lt (len
, size
))
2155 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2156 or if format doesn't contain % chars or is "%s". */
2157 if (! integer_zerop (flag
))
2159 if (fmt_str
== NULL
)
2161 if (strchr (fmt_str
, target_percent
) != NULL
2162 && strcmp (fmt_str
, target_percent_s
))
2166 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2167 fn
= builtin_decl_explicit (fcode
== BUILT_IN_VSPRINTF_CHK
2168 ? BUILT_IN_VSPRINTF
: BUILT_IN_SPRINTF
);
2172 /* Replace the called function and the first 4 argument by 2 retaining
2173 trailing varargs. */
2174 gimple_call_set_fndecl (stmt
, fn
);
2175 gimple_call_set_fntype (stmt
, TREE_TYPE (fn
));
2176 gimple_call_set_arg (stmt
, 0, dest
);
2177 gimple_call_set_arg (stmt
, 1, fmt
);
2178 for (unsigned i
= 2; i
< gimple_call_num_args (stmt
) - 2; ++i
)
2179 gimple_call_set_arg (stmt
, i
, gimple_call_arg (stmt
, i
+ 2));
2180 gimple_set_num_ops (stmt
, gimple_num_ops (stmt
) - 2);
2185 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2186 ORIG may be null if this is a 2-argument call. We don't attempt to
2187 simplify calls with more than 3 arguments.
2189 Return NULL_TREE if no simplification was possible, otherwise return the
2190 simplified form of the call as a tree. If IGNORED is true, it means that
2191 the caller does not use the returned value of the function. */
2194 gimple_fold_builtin_sprintf (gimple_stmt_iterator
*gsi
)
2196 gimple
*stmt
= gsi_stmt (*gsi
);
2197 tree dest
= gimple_call_arg (stmt
, 0);
2198 tree fmt
= gimple_call_arg (stmt
, 1);
2199 tree orig
= NULL_TREE
;
2200 const char *fmt_str
= NULL
;
2202 /* Verify the required arguments in the original call. We deal with two
2203 types of sprintf() calls: 'sprintf (str, fmt)' and
2204 'sprintf (dest, "%s", orig)'. */
2205 if (gimple_call_num_args (stmt
) > 3)
2208 if (gimple_call_num_args (stmt
) == 3)
2209 orig
= gimple_call_arg (stmt
, 2);
2211 /* Check whether the format is a literal string constant. */
2212 fmt_str
= c_getstr (fmt
);
2213 if (fmt_str
== NULL
)
2216 if (!init_target_chars ())
2219 /* If the format doesn't contain % args or %%, use strcpy. */
2220 if (strchr (fmt_str
, target_percent
) == NULL
)
2222 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2227 /* Don't optimize sprintf (buf, "abc", ptr++). */
2231 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2232 'format' is known to contain no % formats. */
2233 gimple_seq stmts
= NULL
;
2234 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2235 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2236 if (gimple_call_lhs (stmt
))
2238 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2239 build_int_cst (integer_type_node
,
2241 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2242 gsi_replace_with_seq_vops (gsi
, stmts
);
2243 /* gsi now points at the assignment to the lhs, get a
2244 stmt iterator to the memcpy call.
2245 ??? We can't use gsi_for_stmt as that doesn't work when the
2246 CFG isn't built yet. */
2247 gimple_stmt_iterator gsi2
= *gsi
;
2253 gsi_replace_with_seq_vops (gsi
, stmts
);
2259 /* If the format is "%s", use strcpy if the result isn't used. */
2260 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2263 fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2268 /* Don't crash on sprintf (str1, "%s"). */
2272 tree orig_len
= NULL_TREE
;
2273 if (gimple_call_lhs (stmt
))
2275 orig_len
= get_maxval_strlen (orig
, 0);
2280 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2281 gimple_seq stmts
= NULL
;
2282 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2283 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2284 if (gimple_call_lhs (stmt
))
2286 if (!useless_type_conversion_p (integer_type_node
,
2287 TREE_TYPE (orig_len
)))
2288 orig_len
= fold_convert (integer_type_node
, orig_len
);
2289 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2290 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2291 gsi_replace_with_seq_vops (gsi
, stmts
);
2292 /* gsi now points at the assignment to the lhs, get a
2293 stmt iterator to the memcpy call.
2294 ??? We can't use gsi_for_stmt as that doesn't work when the
2295 CFG isn't built yet. */
2296 gimple_stmt_iterator gsi2
= *gsi
;
2302 gsi_replace_with_seq_vops (gsi
, stmts
);
2310 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2311 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2312 attempt to simplify calls with more than 4 arguments.
2314 Return NULL_TREE if no simplification was possible, otherwise return the
2315 simplified form of the call as a tree. If IGNORED is true, it means that
2316 the caller does not use the returned value of the function. */
2319 gimple_fold_builtin_snprintf (gimple_stmt_iterator
*gsi
)
2321 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2322 tree dest
= gimple_call_arg (stmt
, 0);
2323 tree destsize
= gimple_call_arg (stmt
, 1);
2324 tree fmt
= gimple_call_arg (stmt
, 2);
2325 tree orig
= NULL_TREE
;
2326 const char *fmt_str
= NULL
;
2328 if (gimple_call_num_args (stmt
) > 4)
2331 if (gimple_call_num_args (stmt
) == 4)
2332 orig
= gimple_call_arg (stmt
, 3);
2334 if (!tree_fits_uhwi_p (destsize
))
2336 unsigned HOST_WIDE_INT destlen
= tree_to_uhwi (destsize
);
2338 /* Check whether the format is a literal string constant. */
2339 fmt_str
= c_getstr (fmt
);
2340 if (fmt_str
== NULL
)
2343 if (!init_target_chars ())
2346 /* If the format doesn't contain % args or %%, use strcpy. */
2347 if (strchr (fmt_str
, target_percent
) == NULL
)
2349 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2353 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2357 /* We could expand this as
2358 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2360 memcpy (str, fmt_with_nul_at_cstm1, cst);
2361 but in the former case that might increase code size
2362 and in the latter case grow .rodata section too much.
2364 size_t len
= strlen (fmt_str
);
2368 gimple_seq stmts
= NULL
;
2369 gimple
*repl
= gimple_build_call (fn
, 2, dest
, fmt
);
2370 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2371 if (gimple_call_lhs (stmt
))
2373 repl
= gimple_build_assign (gimple_call_lhs (stmt
),
2374 build_int_cst (integer_type_node
, len
));
2375 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2376 gsi_replace_with_seq_vops (gsi
, stmts
);
2377 /* gsi now points at the assignment to the lhs, get a
2378 stmt iterator to the memcpy call.
2379 ??? We can't use gsi_for_stmt as that doesn't work when the
2380 CFG isn't built yet. */
2381 gimple_stmt_iterator gsi2
= *gsi
;
2387 gsi_replace_with_seq_vops (gsi
, stmts
);
2393 /* If the format is "%s", use strcpy if the result isn't used. */
2394 else if (fmt_str
&& strcmp (fmt_str
, target_percent_s
) == 0)
2396 tree fn
= builtin_decl_implicit (BUILT_IN_STRCPY
);
2400 /* Don't crash on snprintf (str1, cst, "%s"). */
2404 tree orig_len
= get_maxval_strlen (orig
, 0);
2405 if (!orig_len
|| TREE_CODE (orig_len
) != INTEGER_CST
)
2408 /* We could expand this as
2409 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2411 memcpy (str1, str2_with_nul_at_cstm1, cst);
2412 but in the former case that might increase code size
2413 and in the latter case grow .rodata section too much.
2415 if (compare_tree_int (orig_len
, destlen
) >= 0)
2418 /* Convert snprintf (str1, cst, "%s", str2) into
2419 strcpy (str1, str2) if strlen (str2) < cst. */
2420 gimple_seq stmts
= NULL
;
2421 gimple
*repl
= gimple_build_call (fn
, 2, dest
, orig
);
2422 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2423 if (gimple_call_lhs (stmt
))
2425 if (!useless_type_conversion_p (integer_type_node
,
2426 TREE_TYPE (orig_len
)))
2427 orig_len
= fold_convert (integer_type_node
, orig_len
);
2428 repl
= gimple_build_assign (gimple_call_lhs (stmt
), orig_len
);
2429 gimple_seq_add_stmt_without_update (&stmts
, repl
);
2430 gsi_replace_with_seq_vops (gsi
, stmts
);
2431 /* gsi now points at the assignment to the lhs, get a
2432 stmt iterator to the memcpy call.
2433 ??? We can't use gsi_for_stmt as that doesn't work when the
2434 CFG isn't built yet. */
2435 gimple_stmt_iterator gsi2
= *gsi
;
2441 gsi_replace_with_seq_vops (gsi
, stmts
);
2449 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2450 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2451 more than 3 arguments, and ARG may be null in the 2-argument case.
2453 Return NULL_TREE if no simplification was possible, otherwise return the
2454 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2455 code of the function to be simplified. */
2458 gimple_fold_builtin_fprintf (gimple_stmt_iterator
*gsi
,
2459 tree fp
, tree fmt
, tree arg
,
2460 enum built_in_function fcode
)
2462 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2463 tree fn_fputc
, fn_fputs
;
2464 const char *fmt_str
= NULL
;
2466 /* If the return value is used, don't do the transformation. */
2467 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2470 /* Check whether the format is a literal string constant. */
2471 fmt_str
= c_getstr (fmt
);
2472 if (fmt_str
== NULL
)
2475 if (fcode
== BUILT_IN_FPRINTF_UNLOCKED
)
2477 /* If we're using an unlocked function, assume the other
2478 unlocked functions exist explicitly. */
2479 fn_fputc
= builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED
);
2480 fn_fputs
= builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED
);
2484 fn_fputc
= builtin_decl_implicit (BUILT_IN_FPUTC
);
2485 fn_fputs
= builtin_decl_implicit (BUILT_IN_FPUTS
);
2488 if (!init_target_chars ())
2491 /* If the format doesn't contain % args or %%, use strcpy. */
2492 if (strchr (fmt_str
, target_percent
) == NULL
)
2494 if (fcode
!= BUILT_IN_VFPRINTF
&& fcode
!= BUILT_IN_VFPRINTF_CHK
2498 /* If the format specifier was "", fprintf does nothing. */
2499 if (fmt_str
[0] == '\0')
2501 replace_call_with_value (gsi
, NULL_TREE
);
2505 /* When "string" doesn't contain %, replace all cases of
2506 fprintf (fp, string) with fputs (string, fp). The fputs
2507 builtin will take care of special cases like length == 1. */
2510 gcall
*repl
= gimple_build_call (fn_fputs
, 2, fmt
, fp
);
2511 replace_call_with_call_and_fold (gsi
, repl
);
2516 /* The other optimizations can be done only on the non-va_list variants. */
2517 else if (fcode
== BUILT_IN_VFPRINTF
|| fcode
== BUILT_IN_VFPRINTF_CHK
)
2520 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2521 else if (strcmp (fmt_str
, target_percent_s
) == 0)
2523 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2527 gcall
*repl
= gimple_build_call (fn_fputs
, 2, arg
, fp
);
2528 replace_call_with_call_and_fold (gsi
, repl
);
2533 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2534 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2537 || ! useless_type_conversion_p (integer_type_node
, TREE_TYPE (arg
)))
2541 gcall
*repl
= gimple_build_call (fn_fputc
, 2, arg
, fp
);
2542 replace_call_with_call_and_fold (gsi
, repl
);
2550 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2551 FMT and ARG are the arguments to the call; we don't fold cases with
2552 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2554 Return NULL_TREE if no simplification was possible, otherwise return the
2555 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2556 code of the function to be simplified. */
2559 gimple_fold_builtin_printf (gimple_stmt_iterator
*gsi
, tree fmt
,
2560 tree arg
, enum built_in_function fcode
)
2562 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
2563 tree fn_putchar
, fn_puts
, newarg
;
2564 const char *fmt_str
= NULL
;
2566 /* If the return value is used, don't do the transformation. */
2567 if (gimple_call_lhs (stmt
) != NULL_TREE
)
2570 /* Check whether the format is a literal string constant. */
2571 fmt_str
= c_getstr (fmt
);
2572 if (fmt_str
== NULL
)
2575 if (fcode
== BUILT_IN_PRINTF_UNLOCKED
)
2577 /* If we're using an unlocked function, assume the other
2578 unlocked functions exist explicitly. */
2579 fn_putchar
= builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED
);
2580 fn_puts
= builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED
);
2584 fn_putchar
= builtin_decl_implicit (BUILT_IN_PUTCHAR
);
2585 fn_puts
= builtin_decl_implicit (BUILT_IN_PUTS
);
2588 if (!init_target_chars ())
2591 if (strcmp (fmt_str
, target_percent_s
) == 0
2592 || strchr (fmt_str
, target_percent
) == NULL
)
2596 if (strcmp (fmt_str
, target_percent_s
) == 0)
2598 if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2601 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2604 str
= c_getstr (arg
);
2610 /* The format specifier doesn't contain any '%' characters. */
2611 if (fcode
!= BUILT_IN_VPRINTF
&& fcode
!= BUILT_IN_VPRINTF_CHK
2617 /* If the string was "", printf does nothing. */
2620 replace_call_with_value (gsi
, NULL_TREE
);
2624 /* If the string has length of 1, call putchar. */
2627 /* Given printf("c"), (where c is any one character,)
2628 convert "c"[0] to an int and pass that to the replacement
2630 newarg
= build_int_cst (integer_type_node
, str
[0]);
2633 gcall
*repl
= gimple_build_call (fn_putchar
, 1, newarg
);
2634 replace_call_with_call_and_fold (gsi
, repl
);
2640 /* If the string was "string\n", call puts("string"). */
2641 size_t len
= strlen (str
);
2642 if ((unsigned char)str
[len
- 1] == target_newline
2643 && (size_t) (int) len
== len
2647 tree offset_node
, string_cst
;
2649 /* Create a NUL-terminated string that's one char shorter
2650 than the original, stripping off the trailing '\n'. */
2651 newarg
= build_string_literal (len
, str
);
2652 string_cst
= string_constant (newarg
, &offset_node
);
2653 gcc_checking_assert (string_cst
2654 && (TREE_STRING_LENGTH (string_cst
)
2656 && integer_zerop (offset_node
)
2658 TREE_STRING_POINTER (string_cst
)[len
- 1]
2660 /* build_string_literal creates a new STRING_CST,
2661 modify it in place to avoid double copying. */
2662 newstr
= CONST_CAST (char *, TREE_STRING_POINTER (string_cst
));
2663 newstr
[len
- 1] = '\0';
2666 gcall
*repl
= gimple_build_call (fn_puts
, 1, newarg
);
2667 replace_call_with_call_and_fold (gsi
, repl
);
2672 /* We'd like to arrange to call fputs(string,stdout) here,
2673 but we need stdout and don't have a way to get it yet. */
2678 /* The other optimizations can be done only on the non-va_list variants. */
2679 else if (fcode
== BUILT_IN_VPRINTF
|| fcode
== BUILT_IN_VPRINTF_CHK
)
2682 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2683 else if (strcmp (fmt_str
, target_percent_s_newline
) == 0)
2685 if (!arg
|| ! POINTER_TYPE_P (TREE_TYPE (arg
)))
2689 gcall
*repl
= gimple_build_call (fn_puts
, 1, arg
);
2690 replace_call_with_call_and_fold (gsi
, repl
);
2695 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2696 else if (strcmp (fmt_str
, target_percent_c
) == 0)
2698 if (!arg
|| ! useless_type_conversion_p (integer_type_node
,
2703 gcall
*repl
= gimple_build_call (fn_putchar
, 1, arg
);
2704 replace_call_with_call_and_fold (gsi
, repl
);
2714 /* Fold a call to __builtin_strlen with known length LEN. */
2717 gimple_fold_builtin_strlen (gimple_stmt_iterator
*gsi
)
2719 gimple
*stmt
= gsi_stmt (*gsi
);
2720 tree len
= get_maxval_strlen (gimple_call_arg (stmt
, 0), 0);
2723 len
= force_gimple_operand_gsi (gsi
, len
, true, NULL
, true, GSI_SAME_STMT
);
2724 replace_call_with_value (gsi
, len
);
2728 /* Fold a call to __builtin_acc_on_device. */
2731 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator
*gsi
, tree arg0
)
2733 /* Defer folding until we know which compiler we're in. */
2734 if (symtab
->state
!= EXPANSION
)
2737 unsigned val_host
= GOMP_DEVICE_HOST
;
2738 unsigned val_dev
= GOMP_DEVICE_NONE
;
2740 #ifdef ACCEL_COMPILER
2741 val_host
= GOMP_DEVICE_NOT_HOST
;
2742 val_dev
= ACCEL_COMPILER_acc_device
;
2745 location_t loc
= gimple_location (gsi_stmt (*gsi
));
2747 tree host_eq
= make_ssa_name (boolean_type_node
);
2748 gimple
*host_ass
= gimple_build_assign
2749 (host_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_host
));
2750 gimple_set_location (host_ass
, loc
);
2751 gsi_insert_before (gsi
, host_ass
, GSI_SAME_STMT
);
2753 tree dev_eq
= make_ssa_name (boolean_type_node
);
2754 gimple
*dev_ass
= gimple_build_assign
2755 (dev_eq
, EQ_EXPR
, arg0
, build_int_cst (TREE_TYPE (arg0
), val_dev
));
2756 gimple_set_location (dev_ass
, loc
);
2757 gsi_insert_before (gsi
, dev_ass
, GSI_SAME_STMT
);
2759 tree result
= make_ssa_name (boolean_type_node
);
2760 gimple
*result_ass
= gimple_build_assign
2761 (result
, BIT_IOR_EXPR
, host_eq
, dev_eq
);
2762 gimple_set_location (result_ass
, loc
);
2763 gsi_insert_before (gsi
, result_ass
, GSI_SAME_STMT
);
2765 replace_call_with_value (gsi
, result
);
2770 /* Fold the non-target builtin at *GSI and return whether any simplification
2774 gimple_fold_builtin (gimple_stmt_iterator
*gsi
)
2776 gcall
*stmt
= as_a
<gcall
*>(gsi_stmt (*gsi
));
2777 tree callee
= gimple_call_fndecl (stmt
);
2779 /* Give up for always_inline inline builtins until they are
2781 if (avoid_folding_inline_builtin (callee
))
2784 unsigned n
= gimple_call_num_args (stmt
);
2785 enum built_in_function fcode
= DECL_FUNCTION_CODE (callee
);
2788 case BUILT_IN_BZERO
:
2789 return gimple_fold_builtin_memset (gsi
, integer_zero_node
,
2790 gimple_call_arg (stmt
, 1));
2791 case BUILT_IN_MEMSET
:
2792 return gimple_fold_builtin_memset (gsi
,
2793 gimple_call_arg (stmt
, 1),
2794 gimple_call_arg (stmt
, 2));
2795 case BUILT_IN_BCOPY
:
2796 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 1),
2797 gimple_call_arg (stmt
, 0), 3);
2798 case BUILT_IN_MEMCPY
:
2799 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2800 gimple_call_arg (stmt
, 1), 0);
2801 case BUILT_IN_MEMPCPY
:
2802 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2803 gimple_call_arg (stmt
, 1), 1);
2804 case BUILT_IN_MEMMOVE
:
2805 return gimple_fold_builtin_memory_op (gsi
, gimple_call_arg (stmt
, 0),
2806 gimple_call_arg (stmt
, 1), 3);
2807 case BUILT_IN_SPRINTF_CHK
:
2808 case BUILT_IN_VSPRINTF_CHK
:
2809 return gimple_fold_builtin_sprintf_chk (gsi
, fcode
);
2810 case BUILT_IN_STRCAT_CHK
:
2811 return gimple_fold_builtin_strcat_chk (gsi
);
2812 case BUILT_IN_STRNCAT_CHK
:
2813 return gimple_fold_builtin_strncat_chk (gsi
);
2814 case BUILT_IN_STRLEN
:
2815 return gimple_fold_builtin_strlen (gsi
);
2816 case BUILT_IN_STRCPY
:
2817 return gimple_fold_builtin_strcpy (gsi
,
2818 gimple_call_arg (stmt
, 0),
2819 gimple_call_arg (stmt
, 1));
2820 case BUILT_IN_STRNCPY
:
2821 return gimple_fold_builtin_strncpy (gsi
,
2822 gimple_call_arg (stmt
, 0),
2823 gimple_call_arg (stmt
, 1),
2824 gimple_call_arg (stmt
, 2));
2825 case BUILT_IN_STRCAT
:
2826 return gimple_fold_builtin_strcat (gsi
, gimple_call_arg (stmt
, 0),
2827 gimple_call_arg (stmt
, 1));
2828 case BUILT_IN_STRNCAT
:
2829 return gimple_fold_builtin_strncat (gsi
);
2830 case BUILT_IN_FPUTS
:
2831 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2832 gimple_call_arg (stmt
, 1), false);
2833 case BUILT_IN_FPUTS_UNLOCKED
:
2834 return gimple_fold_builtin_fputs (gsi
, gimple_call_arg (stmt
, 0),
2835 gimple_call_arg (stmt
, 1), true);
2836 case BUILT_IN_MEMCPY_CHK
:
2837 case BUILT_IN_MEMPCPY_CHK
:
2838 case BUILT_IN_MEMMOVE_CHK
:
2839 case BUILT_IN_MEMSET_CHK
:
2840 return gimple_fold_builtin_memory_chk (gsi
,
2841 gimple_call_arg (stmt
, 0),
2842 gimple_call_arg (stmt
, 1),
2843 gimple_call_arg (stmt
, 2),
2844 gimple_call_arg (stmt
, 3),
2846 case BUILT_IN_STPCPY
:
2847 return gimple_fold_builtin_stpcpy (gsi
);
2848 case BUILT_IN_STRCPY_CHK
:
2849 case BUILT_IN_STPCPY_CHK
:
2850 return gimple_fold_builtin_stxcpy_chk (gsi
,
2851 gimple_call_arg (stmt
, 0),
2852 gimple_call_arg (stmt
, 1),
2853 gimple_call_arg (stmt
, 2),
2855 case BUILT_IN_STRNCPY_CHK
:
2856 case BUILT_IN_STPNCPY_CHK
:
2857 return gimple_fold_builtin_stxncpy_chk (gsi
,
2858 gimple_call_arg (stmt
, 0),
2859 gimple_call_arg (stmt
, 1),
2860 gimple_call_arg (stmt
, 2),
2861 gimple_call_arg (stmt
, 3),
2863 case BUILT_IN_SNPRINTF_CHK
:
2864 case BUILT_IN_VSNPRINTF_CHK
:
2865 return gimple_fold_builtin_snprintf_chk (gsi
, fcode
);
2866 case BUILT_IN_SNPRINTF
:
2867 return gimple_fold_builtin_snprintf (gsi
);
2868 case BUILT_IN_SPRINTF
:
2869 return gimple_fold_builtin_sprintf (gsi
);
2870 case BUILT_IN_FPRINTF
:
2871 case BUILT_IN_FPRINTF_UNLOCKED
:
2872 case BUILT_IN_VFPRINTF
:
2873 if (n
== 2 || n
== 3)
2874 return gimple_fold_builtin_fprintf (gsi
,
2875 gimple_call_arg (stmt
, 0),
2876 gimple_call_arg (stmt
, 1),
2878 ? gimple_call_arg (stmt
, 2)
2882 case BUILT_IN_FPRINTF_CHK
:
2883 case BUILT_IN_VFPRINTF_CHK
:
2884 if (n
== 3 || n
== 4)
2885 return gimple_fold_builtin_fprintf (gsi
,
2886 gimple_call_arg (stmt
, 0),
2887 gimple_call_arg (stmt
, 2),
2889 ? gimple_call_arg (stmt
, 3)
2893 case BUILT_IN_PRINTF
:
2894 case BUILT_IN_PRINTF_UNLOCKED
:
2895 case BUILT_IN_VPRINTF
:
2896 if (n
== 1 || n
== 2)
2897 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 0),
2899 ? gimple_call_arg (stmt
, 1)
2900 : NULL_TREE
, fcode
);
2902 case BUILT_IN_PRINTF_CHK
:
2903 case BUILT_IN_VPRINTF_CHK
:
2904 if (n
== 2 || n
== 3)
2905 return gimple_fold_builtin_printf (gsi
, gimple_call_arg (stmt
, 1),
2907 ? gimple_call_arg (stmt
, 2)
2908 : NULL_TREE
, fcode
);
2910 case BUILT_IN_ACC_ON_DEVICE
:
2911 return gimple_fold_builtin_acc_on_device (gsi
,
2912 gimple_call_arg (stmt
, 0));
2916 /* Try the generic builtin folder. */
2917 bool ignore
= (gimple_call_lhs (stmt
) == NULL
);
2918 tree result
= fold_call_stmt (stmt
, ignore
);
2922 STRIP_NOPS (result
);
2924 result
= fold_convert (gimple_call_return_type (stmt
), result
);
2925 if (!update_call_from_tree (gsi
, result
))
2926 gimplify_and_update_call_from_tree (gsi
, result
);
2933 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
2934 function calls to constants, where possible. */
2937 fold_internal_goacc_dim (const gimple
*call
)
2939 int axis
= get_oacc_ifn_dim_arg (call
);
2940 int size
= get_oacc_fn_dim_size (current_function_decl
, axis
);
2941 bool is_pos
= gimple_call_internal_fn (call
) == IFN_GOACC_DIM_POS
;
2942 tree result
= NULL_TREE
;
2944 /* If the size is 1, or we only want the size and it is not dynamic,
2945 we know the answer. */
2946 if (size
== 1 || (!is_pos
&& size
))
2948 tree type
= TREE_TYPE (gimple_call_lhs (call
));
2949 result
= build_int_cst (type
, size
- is_pos
);
2955 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
2956 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
2957 &var where var is only addressable because of such calls. */
2960 optimize_atomic_compare_exchange_p (gimple
*stmt
)
2962 if (gimple_call_num_args (stmt
) != 6
2963 || !flag_inline_atomics
2965 || (flag_sanitize
& (SANITIZE_THREAD
| SANITIZE_ADDRESS
)) != 0
2966 || !gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
)
2967 || !gimple_vdef (stmt
)
2968 || !gimple_vuse (stmt
))
2971 tree fndecl
= gimple_call_fndecl (stmt
);
2972 switch (DECL_FUNCTION_CODE (fndecl
))
2974 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
:
2975 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2
:
2976 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4
:
2977 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8
:
2978 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16
:
2984 tree expected
= gimple_call_arg (stmt
, 1);
2985 if (TREE_CODE (expected
) != ADDR_EXPR
2986 || !SSA_VAR_P (TREE_OPERAND (expected
, 0)))
2989 tree etype
= TREE_TYPE (TREE_OPERAND (expected
, 0));
2990 if (!is_gimple_reg_type (etype
)
2991 || !auto_var_in_fn_p (TREE_OPERAND (expected
, 0), current_function_decl
)
2992 || TREE_THIS_VOLATILE (etype
)
2993 || VECTOR_TYPE_P (etype
)
2994 || TREE_CODE (etype
) == COMPLEX_TYPE
2995 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
2996 might not preserve all the bits. See PR71716. */
2997 || SCALAR_FLOAT_TYPE_P (etype
)
2998 || TYPE_PRECISION (etype
) != GET_MODE_BITSIZE (TYPE_MODE (etype
)))
3001 tree weak
= gimple_call_arg (stmt
, 3);
3002 if (!integer_zerop (weak
) && !integer_onep (weak
))
3005 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3006 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3007 machine_mode mode
= TYPE_MODE (itype
);
3009 if (direct_optab_handler (atomic_compare_and_swap_optab
, mode
)
3011 && optab_handler (sync_compare_and_swap_optab
, mode
) == CODE_FOR_nothing
)
3014 if (int_size_in_bytes (etype
) != GET_MODE_SIZE (mode
))
3021 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
3023 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
3024 i = IMAGPART_EXPR <t>;
3026 e = REALPART_EXPR <t>; */
3029 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator
*gsi
)
3031 gimple
*stmt
= gsi_stmt (*gsi
);
3032 tree fndecl
= gimple_call_fndecl (stmt
);
3033 tree parmt
= TYPE_ARG_TYPES (TREE_TYPE (fndecl
));
3034 tree itype
= TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt
)));
3035 tree ctype
= build_complex_type (itype
);
3036 tree expected
= TREE_OPERAND (gimple_call_arg (stmt
, 1), 0);
3037 gimple
*g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3039 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3040 gimple_stmt_iterator gsiret
= gsi_for_stmt (g
);
3041 if (!useless_type_conversion_p (itype
, TREE_TYPE (expected
)))
3043 g
= gimple_build_assign (make_ssa_name (itype
), VIEW_CONVERT_EXPR
,
3044 build1 (VIEW_CONVERT_EXPR
, itype
,
3045 gimple_assign_lhs (g
)));
3046 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3048 int flag
= (integer_onep (gimple_call_arg (stmt
, 3)) ? 256 : 0)
3049 + int_size_in_bytes (itype
);
3050 g
= gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE
, 6,
3051 gimple_call_arg (stmt
, 0),
3052 gimple_assign_lhs (g
),
3053 gimple_call_arg (stmt
, 2),
3054 build_int_cst (integer_type_node
, flag
),
3055 gimple_call_arg (stmt
, 4),
3056 gimple_call_arg (stmt
, 5));
3057 tree lhs
= make_ssa_name (ctype
);
3058 gimple_call_set_lhs (g
, lhs
);
3059 gimple_set_vdef (g
, gimple_vdef (stmt
));
3060 gimple_set_vuse (g
, gimple_vuse (stmt
));
3061 SSA_NAME_DEF_STMT (gimple_vdef (g
)) = g
;
3062 if (gimple_call_lhs (stmt
))
3064 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3065 g
= gimple_build_assign (make_ssa_name (itype
), IMAGPART_EXPR
,
3066 build1 (IMAGPART_EXPR
, itype
, lhs
));
3067 gsi_insert_before (gsi
, g
, GSI_SAME_STMT
);
3068 g
= gimple_build_assign (gimple_call_lhs (stmt
), NOP_EXPR
,
3069 gimple_assign_lhs (g
));
3071 gsi_replace (gsi
, g
, true);
3072 g
= gimple_build_assign (make_ssa_name (itype
), REALPART_EXPR
,
3073 build1 (REALPART_EXPR
, itype
, lhs
));
3074 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3075 if (!useless_type_conversion_p (TREE_TYPE (expected
), itype
))
3077 g
= gimple_build_assign (make_ssa_name (TREE_TYPE (expected
)),
3079 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (expected
),
3080 gimple_assign_lhs (g
)));
3081 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3083 g
= gimple_build_assign (expected
, SSA_NAME
, gimple_assign_lhs (g
));
3084 gsi_insert_after (gsi
, g
, GSI_NEW_STMT
);
3088 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3089 doesn't fit into TYPE. The test for overflow should be regardless of
3090 -fwrapv, and even for unsigned types. */
3093 arith_overflowed_p (enum tree_code code
, const_tree type
,
3094 const_tree arg0
, const_tree arg1
)
3096 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION
* 2) widest2_int
;
3097 typedef generic_wide_int
<wi::extended_tree
<WIDE_INT_MAX_PRECISION
* 2> >
3099 widest2_int warg0
= widest2_int_cst (arg0
);
3100 widest2_int warg1
= widest2_int_cst (arg1
);
3104 case PLUS_EXPR
: wres
= wi::add (warg0
, warg1
); break;
3105 case MINUS_EXPR
: wres
= wi::sub (warg0
, warg1
); break;
3106 case MULT_EXPR
: wres
= wi::mul (warg0
, warg1
); break;
3107 default: gcc_unreachable ();
3109 signop sign
= TYPE_SIGN (type
);
3110 if (sign
== UNSIGNED
&& wi::neg_p (wres
))
3112 return wi::min_precision (wres
, sign
) > TYPE_PRECISION (type
);
3115 /* Attempt to fold a call statement referenced by the statement iterator GSI.
3116 The statement may be replaced by another statement, e.g., if the call
3117 simplifies to a constant value. Return true if any changes were made.
3118 It is assumed that the operands have been previously folded. */
3121 gimple_fold_call (gimple_stmt_iterator
*gsi
, bool inplace
)
3123 gcall
*stmt
= as_a
<gcall
*> (gsi_stmt (*gsi
));
3125 bool changed
= false;
3128 /* Fold *& in call arguments. */
3129 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3130 if (REFERENCE_CLASS_P (gimple_call_arg (stmt
, i
)))
3132 tree tmp
= maybe_fold_reference (gimple_call_arg (stmt
, i
), false);
3135 gimple_call_set_arg (stmt
, i
, tmp
);
3140 /* Check for virtual calls that became direct calls. */
3141 callee
= gimple_call_fn (stmt
);
3142 if (callee
&& TREE_CODE (callee
) == OBJ_TYPE_REF
)
3144 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee
)) != NULL_TREE
)
3146 if (dump_file
&& virtual_method_call_p (callee
)
3147 && !possible_polymorphic_call_target_p
3148 (callee
, stmt
, cgraph_node::get (gimple_call_addr_fndecl
3149 (OBJ_TYPE_REF_EXPR (callee
)))))
3152 "Type inheritance inconsistent devirtualization of ");
3153 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3154 fprintf (dump_file
, " to ");
3155 print_generic_expr (dump_file
, callee
, TDF_SLIM
);
3156 fprintf (dump_file
, "\n");
3159 gimple_call_set_fn (stmt
, OBJ_TYPE_REF_EXPR (callee
));
3162 else if (flag_devirtualize
&& !inplace
&& virtual_method_call_p (callee
))
3165 vec
<cgraph_node
*>targets
3166 = possible_polymorphic_call_targets (callee
, stmt
, &final
);
3167 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
3169 tree lhs
= gimple_call_lhs (stmt
);
3170 if (dump_enabled_p ())
3172 location_t loc
= gimple_location_safe (stmt
);
3173 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
3174 "folding virtual function call to %s\n",
3175 targets
.length () == 1
3176 ? targets
[0]->name ()
3177 : "__builtin_unreachable");
3179 if (targets
.length () == 1)
3181 tree fndecl
= targets
[0]->decl
;
3182 gimple_call_set_fndecl (stmt
, fndecl
);
3184 /* If changing the call to __cxa_pure_virtual
3185 or similar noreturn function, adjust gimple_call_fntype
3187 if (gimple_call_noreturn_p (stmt
)
3188 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl
)))
3189 && TYPE_ARG_TYPES (TREE_TYPE (fndecl
))
3190 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl
)))
3192 gimple_call_set_fntype (stmt
, TREE_TYPE (fndecl
));
3193 /* If the call becomes noreturn, remove the lhs. */
3195 && gimple_call_noreturn_p (stmt
)
3196 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt
)))
3197 || should_remove_lhs_p (lhs
)))
3199 if (TREE_CODE (lhs
) == SSA_NAME
)
3201 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3202 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3203 gimple
*new_stmt
= gimple_build_assign (lhs
, def
);
3204 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
3206 gimple_call_set_lhs (stmt
, NULL_TREE
);
3208 maybe_remove_unused_call_args (cfun
, stmt
);
3212 tree fndecl
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
3213 gimple
*new_stmt
= gimple_build_call (fndecl
, 0);
3214 gimple_set_location (new_stmt
, gimple_location (stmt
));
3215 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3217 tree var
= create_tmp_var (TREE_TYPE (lhs
));
3218 tree def
= get_or_create_ssa_default_def (cfun
, var
);
3220 /* To satisfy condition for
3221 cgraph_update_edges_for_call_stmt_node,
3222 we need to preserve GIMPLE_CALL statement
3223 at position of GSI iterator. */
3224 update_call_from_tree (gsi
, def
);
3225 gsi_insert_before (gsi
, new_stmt
, GSI_NEW_STMT
);
3229 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
3230 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
3231 gsi_replace (gsi
, new_stmt
, false);
3239 /* Check for indirect calls that became direct calls, and then
3240 no longer require a static chain. */
3241 if (gimple_call_chain (stmt
))
3243 tree fn
= gimple_call_fndecl (stmt
);
3244 if (fn
&& !DECL_STATIC_CHAIN (fn
))
3246 gimple_call_set_chain (stmt
, NULL
);
3251 tree tmp
= maybe_fold_reference (gimple_call_chain (stmt
), false);
3254 gimple_call_set_chain (stmt
, tmp
);
3263 /* Check for builtins that CCP can handle using information not
3264 available in the generic fold routines. */
3265 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3267 if (gimple_fold_builtin (gsi
))
3270 else if (gimple_call_builtin_p (stmt
, BUILT_IN_MD
))
3272 changed
|= targetm
.gimple_fold_builtin (gsi
);
3274 else if (gimple_call_internal_p (stmt
))
3276 enum tree_code subcode
= ERROR_MARK
;
3277 tree result
= NULL_TREE
;
3278 bool cplx_result
= false;
3279 tree overflow
= NULL_TREE
;
3280 switch (gimple_call_internal_fn (stmt
))
3282 case IFN_BUILTIN_EXPECT
:
3283 result
= fold_builtin_expect (gimple_location (stmt
),
3284 gimple_call_arg (stmt
, 0),
3285 gimple_call_arg (stmt
, 1),
3286 gimple_call_arg (stmt
, 2));
3288 case IFN_UBSAN_OBJECT_SIZE
:
3289 if (integer_all_onesp (gimple_call_arg (stmt
, 2))
3290 || (TREE_CODE (gimple_call_arg (stmt
, 1)) == INTEGER_CST
3291 && TREE_CODE (gimple_call_arg (stmt
, 2)) == INTEGER_CST
3292 && tree_int_cst_le (gimple_call_arg (stmt
, 1),
3293 gimple_call_arg (stmt
, 2))))
3295 gsi_replace (gsi
, gimple_build_nop (), false);
3296 unlink_stmt_vdef (stmt
);
3297 release_defs (stmt
);
3301 case IFN_GOACC_DIM_SIZE
:
3302 case IFN_GOACC_DIM_POS
:
3303 result
= fold_internal_goacc_dim (stmt
);
3305 case IFN_UBSAN_CHECK_ADD
:
3306 subcode
= PLUS_EXPR
;
3308 case IFN_UBSAN_CHECK_SUB
:
3309 subcode
= MINUS_EXPR
;
3311 case IFN_UBSAN_CHECK_MUL
:
3312 subcode
= MULT_EXPR
;
3314 case IFN_ADD_OVERFLOW
:
3315 subcode
= PLUS_EXPR
;
3318 case IFN_SUB_OVERFLOW
:
3319 subcode
= MINUS_EXPR
;
3322 case IFN_MUL_OVERFLOW
:
3323 subcode
= MULT_EXPR
;
3329 if (subcode
!= ERROR_MARK
)
3331 tree arg0
= gimple_call_arg (stmt
, 0);
3332 tree arg1
= gimple_call_arg (stmt
, 1);
3333 tree type
= TREE_TYPE (arg0
);
3336 tree lhs
= gimple_call_lhs (stmt
);
3337 if (lhs
== NULL_TREE
)
3340 type
= TREE_TYPE (TREE_TYPE (lhs
));
3342 if (type
== NULL_TREE
)
3344 /* x = y + 0; x = y - 0; x = y * 0; */
3345 else if (integer_zerop (arg1
))
3346 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg0
;
3347 /* x = 0 + y; x = 0 * y; */
3348 else if (subcode
!= MINUS_EXPR
&& integer_zerop (arg0
))
3349 result
= subcode
== MULT_EXPR
? integer_zero_node
: arg1
;
3351 else if (subcode
== MINUS_EXPR
&& operand_equal_p (arg0
, arg1
, 0))
3352 result
= integer_zero_node
;
3353 /* x = y * 1; x = 1 * y; */
3354 else if (subcode
== MULT_EXPR
&& integer_onep (arg1
))
3356 else if (subcode
== MULT_EXPR
&& integer_onep (arg0
))
3358 else if (TREE_CODE (arg0
) == INTEGER_CST
3359 && TREE_CODE (arg1
) == INTEGER_CST
)
3362 result
= int_const_binop (subcode
, fold_convert (type
, arg0
),
3363 fold_convert (type
, arg1
));
3365 result
= int_const_binop (subcode
, arg0
, arg1
);
3366 if (result
&& arith_overflowed_p (subcode
, type
, arg0
, arg1
))
3369 overflow
= build_one_cst (type
);
3376 if (result
== integer_zero_node
)
3377 result
= build_zero_cst (type
);
3378 else if (cplx_result
&& TREE_TYPE (result
) != type
)
3380 if (TREE_CODE (result
) == INTEGER_CST
)
3382 if (arith_overflowed_p (PLUS_EXPR
, type
, result
,
3384 overflow
= build_one_cst (type
);
3386 else if ((!TYPE_UNSIGNED (TREE_TYPE (result
))
3387 && TYPE_UNSIGNED (type
))
3388 || (TYPE_PRECISION (type
)
3389 < (TYPE_PRECISION (TREE_TYPE (result
))
3390 + (TYPE_UNSIGNED (TREE_TYPE (result
))
3391 && !TYPE_UNSIGNED (type
)))))
3394 result
= fold_convert (type
, result
);
3401 if (TREE_CODE (result
) == INTEGER_CST
&& TREE_OVERFLOW (result
))
3402 result
= drop_tree_overflow (result
);
3405 if (overflow
== NULL_TREE
)
3406 overflow
= build_zero_cst (TREE_TYPE (result
));
3407 tree ctype
= build_complex_type (TREE_TYPE (result
));
3408 if (TREE_CODE (result
) == INTEGER_CST
3409 && TREE_CODE (overflow
) == INTEGER_CST
)
3410 result
= build_complex (ctype
, result
, overflow
);
3412 result
= build2_loc (gimple_location (stmt
), COMPLEX_EXPR
,
3413 ctype
, result
, overflow
);
3415 if (!update_call_from_tree (gsi
, result
))
3416 gimplify_and_update_call_from_tree (gsi
, result
);
3425 /* Return true whether NAME has a use on STMT. */
3428 has_use_on_stmt (tree name
, gimple
*stmt
)
3430 imm_use_iterator iter
;
3431 use_operand_p use_p
;
3432 FOR_EACH_IMM_USE_FAST (use_p
, iter
, name
)
3433 if (USE_STMT (use_p
) == stmt
)
3438 /* Worker for fold_stmt_1 dispatch to pattern based folding with
3441 Replaces *GSI with the simplification result in RCODE and OPS
3442 and the associated statements in *SEQ. Does the replacement
3443 according to INPLACE and returns true if the operation succeeded. */
3446 replace_stmt_with_simplification (gimple_stmt_iterator
*gsi
,
3447 code_helper rcode
, tree
*ops
,
3448 gimple_seq
*seq
, bool inplace
)
3450 gimple
*stmt
= gsi_stmt (*gsi
);
3452 /* Play safe and do not allow abnormals to be mentioned in
3453 newly created statements. See also maybe_push_res_to_seq.
3454 As an exception allow such uses if there was a use of the
3455 same SSA name on the old stmt. */
3456 if ((TREE_CODE (ops
[0]) == SSA_NAME
3457 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[0])
3458 && !has_use_on_stmt (ops
[0], stmt
))
3460 && TREE_CODE (ops
[1]) == SSA_NAME
3461 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[1])
3462 && !has_use_on_stmt (ops
[1], stmt
))
3464 && TREE_CODE (ops
[2]) == SSA_NAME
3465 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops
[2])
3466 && !has_use_on_stmt (ops
[2], stmt
))
3467 || (COMPARISON_CLASS_P (ops
[0])
3468 && ((TREE_CODE (TREE_OPERAND (ops
[0], 0)) == SSA_NAME
3469 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 0))
3470 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 0), stmt
))
3471 || (TREE_CODE (TREE_OPERAND (ops
[0], 1)) == SSA_NAME
3472 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops
[0], 1))
3473 && !has_use_on_stmt (TREE_OPERAND (ops
[0], 1), stmt
)))))
3476 /* Don't insert new statements when INPLACE is true, even if we could
3477 reuse STMT for the final statement. */
3478 if (inplace
&& !gimple_seq_empty_p (*seq
))
3481 if (gcond
*cond_stmt
= dyn_cast
<gcond
*> (stmt
))
3483 gcc_assert (rcode
.is_tree_code ());
3484 if (TREE_CODE_CLASS ((enum tree_code
)rcode
) == tcc_comparison
3485 /* GIMPLE_CONDs condition may not throw. */
3486 && (!flag_exceptions
3487 || !cfun
->can_throw_non_call_exceptions
3488 || !operation_could_trap_p (rcode
,
3489 FLOAT_TYPE_P (TREE_TYPE (ops
[0])),
3491 gimple_cond_set_condition (cond_stmt
, rcode
, ops
[0], ops
[1]);
3492 else if (rcode
== SSA_NAME
)
3493 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, ops
[0],
3494 build_zero_cst (TREE_TYPE (ops
[0])));
3495 else if (rcode
== INTEGER_CST
)
3497 if (integer_zerop (ops
[0]))
3498 gimple_cond_make_false (cond_stmt
);
3500 gimple_cond_make_true (cond_stmt
);
3504 tree res
= maybe_push_res_to_seq (rcode
, boolean_type_node
,
3508 gimple_cond_set_condition (cond_stmt
, NE_EXPR
, res
,
3509 build_zero_cst (TREE_TYPE (res
)));
3513 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3515 fprintf (dump_file
, "gimple_simplified to ");
3516 if (!gimple_seq_empty_p (*seq
))
3517 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3518 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3521 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3524 else if (is_gimple_assign (stmt
)
3525 && rcode
.is_tree_code ())
3528 || gimple_num_ops (stmt
) > get_gimple_rhs_num_ops (rcode
))
3530 maybe_build_generic_op (rcode
,
3531 TREE_TYPE (gimple_assign_lhs (stmt
)), ops
);
3532 gimple_assign_set_rhs_with_ops (gsi
, rcode
, ops
[0], ops
[1], ops
[2]);
3533 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3535 fprintf (dump_file
, "gimple_simplified to ");
3536 if (!gimple_seq_empty_p (*seq
))
3537 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3538 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
),
3541 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3545 else if (rcode
.is_fn_code ()
3546 && gimple_call_combined_fn (stmt
) == rcode
)
3549 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3551 gcc_assert (ops
[i
] != NULL_TREE
);
3552 gimple_call_set_arg (stmt
, i
, ops
[i
]);
3555 gcc_assert (ops
[i
] == NULL_TREE
);
3556 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3558 fprintf (dump_file
, "gimple_simplified to ");
3559 if (!gimple_seq_empty_p (*seq
))
3560 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3561 print_gimple_stmt (dump_file
, gsi_stmt (*gsi
), 0, TDF_SLIM
);
3563 gsi_insert_seq_before (gsi
, *seq
, GSI_SAME_STMT
);
3568 if (gimple_has_lhs (stmt
))
3570 tree lhs
= gimple_get_lhs (stmt
);
3571 if (!maybe_push_res_to_seq (rcode
, TREE_TYPE (lhs
),
3574 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3576 fprintf (dump_file
, "gimple_simplified to ");
3577 print_gimple_seq (dump_file
, *seq
, 0, TDF_SLIM
);
3579 gsi_replace_with_seq_vops (gsi
, *seq
);
3589 /* Canonicalize MEM_REFs invariant address operand after propagation. */
3592 maybe_canonicalize_mem_ref_addr (tree
*t
)
3596 if (TREE_CODE (*t
) == ADDR_EXPR
)
3597 t
= &TREE_OPERAND (*t
, 0);
3599 /* The C and C++ frontends use an ARRAY_REF for indexing with their
3600 generic vector extension. The actual vector referenced is
3601 view-converted to an array type for this purpose. If the index
3602 is constant the canonical representation in the middle-end is a
3603 BIT_FIELD_REF so re-write the former to the latter here. */
3604 if (TREE_CODE (*t
) == ARRAY_REF
3605 && TREE_CODE (TREE_OPERAND (*t
, 0)) == VIEW_CONVERT_EXPR
3606 && TREE_CODE (TREE_OPERAND (*t
, 1)) == INTEGER_CST
3607 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0))))
3609 tree vtype
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t
, 0), 0));
3610 if (VECTOR_TYPE_P (vtype
))
3612 tree low
= array_ref_low_bound (*t
);
3613 if (TREE_CODE (low
) == INTEGER_CST
)
3615 if (tree_int_cst_le (low
, TREE_OPERAND (*t
, 1)))
3617 widest_int idx
= wi::sub (wi::to_widest (TREE_OPERAND (*t
, 1)),
3618 wi::to_widest (low
));
3619 idx
= wi::mul (idx
, wi::to_widest
3620 (TYPE_SIZE (TREE_TYPE (*t
))));
3622 = wi::add (idx
, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t
))));
3623 if (wi::les_p (ext
, wi::to_widest (TYPE_SIZE (vtype
))))
3625 *t
= build3_loc (EXPR_LOCATION (*t
), BIT_FIELD_REF
,
3627 TREE_OPERAND (TREE_OPERAND (*t
, 0), 0),
3628 TYPE_SIZE (TREE_TYPE (*t
)),
3629 wide_int_to_tree (sizetype
, idx
));
3637 while (handled_component_p (*t
))
3638 t
= &TREE_OPERAND (*t
, 0);
3640 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3641 of invariant addresses into a SSA name MEM_REF address. */
3642 if (TREE_CODE (*t
) == MEM_REF
3643 || TREE_CODE (*t
) == TARGET_MEM_REF
)
3645 tree addr
= TREE_OPERAND (*t
, 0);
3646 if (TREE_CODE (addr
) == ADDR_EXPR
3647 && (TREE_CODE (TREE_OPERAND (addr
, 0)) == MEM_REF
3648 || handled_component_p (TREE_OPERAND (addr
, 0))))
3651 HOST_WIDE_INT coffset
;
3652 base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
3657 TREE_OPERAND (*t
, 0) = build_fold_addr_expr (base
);
3658 TREE_OPERAND (*t
, 1) = int_const_binop (PLUS_EXPR
,
3659 TREE_OPERAND (*t
, 1),
3660 size_int (coffset
));
3663 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t
, 0)) == DEBUG_EXPR_DECL
3664 || is_gimple_mem_ref_addr (TREE_OPERAND (*t
, 0)));
3667 /* Canonicalize back MEM_REFs to plain reference trees if the object
3668 accessed is a decl that has the same access semantics as the MEM_REF. */
3669 if (TREE_CODE (*t
) == MEM_REF
3670 && TREE_CODE (TREE_OPERAND (*t
, 0)) == ADDR_EXPR
3671 && integer_zerop (TREE_OPERAND (*t
, 1))
3672 && MR_DEPENDENCE_CLIQUE (*t
) == 0)
3674 tree decl
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3675 tree alias_type
= TREE_TYPE (TREE_OPERAND (*t
, 1));
3676 if (/* Same volatile qualification. */
3677 TREE_THIS_VOLATILE (*t
) == TREE_THIS_VOLATILE (decl
)
3678 /* Same TBAA behavior with -fstrict-aliasing. */
3679 && !TYPE_REF_CAN_ALIAS_ALL (alias_type
)
3680 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl
))
3681 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type
)))
3682 /* Same alignment. */
3683 && TYPE_ALIGN (TREE_TYPE (decl
)) == TYPE_ALIGN (TREE_TYPE (*t
))
3684 /* We have to look out here to not drop a required conversion
3685 from the rhs to the lhs if *t appears on the lhs or vice-versa
3686 if it appears on the rhs. Thus require strict type
3688 && types_compatible_p (TREE_TYPE (*t
), TREE_TYPE (decl
)))
3690 *t
= TREE_OPERAND (TREE_OPERAND (*t
, 0), 0);
3695 /* Canonicalize TARGET_MEM_REF in particular with respect to
3696 the indexes becoming constant. */
3697 else if (TREE_CODE (*t
) == TARGET_MEM_REF
)
3699 tree tem
= maybe_fold_tmr (*t
);
3710 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3711 distinguishes both cases. */
3714 fold_stmt_1 (gimple_stmt_iterator
*gsi
, bool inplace
, tree (*valueize
) (tree
))
3716 bool changed
= false;
3717 gimple
*stmt
= gsi_stmt (*gsi
);
3718 bool nowarning
= gimple_no_warning_p (stmt
);
3720 fold_defer_overflow_warnings ();
3722 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3724 ??? This shouldn't be done in generic folding but in the
3725 propagation helpers which also know whether an address was
3727 Also canonicalize operand order. */
3728 switch (gimple_code (stmt
))
3731 if (gimple_assign_rhs_class (stmt
) == GIMPLE_SINGLE_RHS
)
3733 tree
*rhs
= gimple_assign_rhs1_ptr (stmt
);
3734 if ((REFERENCE_CLASS_P (*rhs
)
3735 || TREE_CODE (*rhs
) == ADDR_EXPR
)
3736 && maybe_canonicalize_mem_ref_addr (rhs
))
3738 tree
*lhs
= gimple_assign_lhs_ptr (stmt
);
3739 if (REFERENCE_CLASS_P (*lhs
)
3740 && maybe_canonicalize_mem_ref_addr (lhs
))
3745 /* Canonicalize operand order. */
3746 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3747 if (TREE_CODE_CLASS (code
) == tcc_comparison
3748 || commutative_tree_code (code
)
3749 || commutative_ternary_tree_code (code
))
3751 tree rhs1
= gimple_assign_rhs1 (stmt
);
3752 tree rhs2
= gimple_assign_rhs2 (stmt
);
3753 if (tree_swap_operands_p (rhs1
, rhs2
, false))
3755 gimple_assign_set_rhs1 (stmt
, rhs2
);
3756 gimple_assign_set_rhs2 (stmt
, rhs1
);
3757 if (TREE_CODE_CLASS (code
) == tcc_comparison
)
3758 gimple_assign_set_rhs_code (stmt
,
3759 swap_tree_comparison (code
));
3767 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3769 tree
*arg
= gimple_call_arg_ptr (stmt
, i
);
3770 if (REFERENCE_CLASS_P (*arg
)
3771 && maybe_canonicalize_mem_ref_addr (arg
))
3774 tree
*lhs
= gimple_call_lhs_ptr (stmt
);
3776 && REFERENCE_CLASS_P (*lhs
)
3777 && maybe_canonicalize_mem_ref_addr (lhs
))
3783 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3784 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3786 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3787 tree op
= TREE_VALUE (link
);
3788 if (REFERENCE_CLASS_P (op
)
3789 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3792 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3794 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3795 tree op
= TREE_VALUE (link
);
3796 if ((REFERENCE_CLASS_P (op
)
3797 || TREE_CODE (op
) == ADDR_EXPR
)
3798 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link
)))
3804 if (gimple_debug_bind_p (stmt
))
3806 tree
*val
= gimple_debug_bind_get_value_ptr (stmt
);
3808 && (REFERENCE_CLASS_P (*val
)
3809 || TREE_CODE (*val
) == ADDR_EXPR
)
3810 && maybe_canonicalize_mem_ref_addr (val
))
3816 /* Canonicalize operand order. */
3817 tree lhs
= gimple_cond_lhs (stmt
);
3818 tree rhs
= gimple_cond_rhs (stmt
);
3819 if (tree_swap_operands_p (lhs
, rhs
, false))
3821 gcond
*gc
= as_a
<gcond
*> (stmt
);
3822 gimple_cond_set_lhs (gc
, rhs
);
3823 gimple_cond_set_rhs (gc
, lhs
);
3824 gimple_cond_set_code (gc
,
3825 swap_tree_comparison (gimple_cond_code (gc
)));
3832 /* Dispatch to pattern-based folding. */
3834 || is_gimple_assign (stmt
)
3835 || gimple_code (stmt
) == GIMPLE_COND
)
3837 gimple_seq seq
= NULL
;
3840 if (gimple_simplify (stmt
, &rcode
, ops
, inplace
? NULL
: &seq
,
3841 valueize
, valueize
))
3843 if (replace_stmt_with_simplification (gsi
, rcode
, ops
, &seq
, inplace
))
3846 gimple_seq_discard (seq
);
3850 stmt
= gsi_stmt (*gsi
);
3852 /* Fold the main computation performed by the statement. */
3853 switch (gimple_code (stmt
))
3857 /* Try to canonicalize for boolean-typed X the comparisons
3858 X == 0, X == 1, X != 0, and X != 1. */
3859 if (gimple_assign_rhs_code (stmt
) == EQ_EXPR
3860 || gimple_assign_rhs_code (stmt
) == NE_EXPR
)
3862 tree lhs
= gimple_assign_lhs (stmt
);
3863 tree op1
= gimple_assign_rhs1 (stmt
);
3864 tree op2
= gimple_assign_rhs2 (stmt
);
3865 tree type
= TREE_TYPE (op1
);
3867 /* Check whether the comparison operands are of the same boolean
3868 type as the result type is.
3869 Check that second operand is an integer-constant with value
3871 if (TREE_CODE (op2
) == INTEGER_CST
3872 && (integer_zerop (op2
) || integer_onep (op2
))
3873 && useless_type_conversion_p (TREE_TYPE (lhs
), type
))
3875 enum tree_code cmp_code
= gimple_assign_rhs_code (stmt
);
3876 bool is_logical_not
= false;
3878 /* X == 0 and X != 1 is a logical-not.of X
3879 X == 1 and X != 0 is X */
3880 if ((cmp_code
== EQ_EXPR
&& integer_zerop (op2
))
3881 || (cmp_code
== NE_EXPR
&& integer_onep (op2
)))
3882 is_logical_not
= true;
3884 if (is_logical_not
== false)
3885 gimple_assign_set_rhs_with_ops (gsi
, TREE_CODE (op1
), op1
);
3886 /* Only for one-bit precision typed X the transformation
3887 !X -> ~X is valied. */
3888 else if (TYPE_PRECISION (type
) == 1)
3889 gimple_assign_set_rhs_with_ops (gsi
, BIT_NOT_EXPR
, op1
);
3890 /* Otherwise we use !X -> X ^ 1. */
3892 gimple_assign_set_rhs_with_ops (gsi
, BIT_XOR_EXPR
, op1
,
3893 build_int_cst (type
, 1));
3899 unsigned old_num_ops
= gimple_num_ops (stmt
);
3900 tree lhs
= gimple_assign_lhs (stmt
);
3901 tree new_rhs
= fold_gimple_assign (gsi
);
3903 && !useless_type_conversion_p (TREE_TYPE (lhs
),
3904 TREE_TYPE (new_rhs
)))
3905 new_rhs
= fold_convert (TREE_TYPE (lhs
), new_rhs
);
3908 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs
)) < old_num_ops
))
3910 gimple_assign_set_rhs_from_tree (gsi
, new_rhs
);
3917 changed
|= gimple_fold_call (gsi
, inplace
);
3921 /* Fold *& in asm operands. */
3923 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3925 const char **oconstraints
;
3926 const char *constraint
;
3927 bool allows_mem
, allows_reg
;
3929 noutputs
= gimple_asm_noutputs (asm_stmt
);
3930 oconstraints
= XALLOCAVEC (const char *, noutputs
);
3932 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); ++i
)
3934 tree link
= gimple_asm_output_op (asm_stmt
, i
);
3935 tree op
= TREE_VALUE (link
);
3937 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3938 if (REFERENCE_CLASS_P (op
)
3939 && (op
= maybe_fold_reference (op
, true)) != NULL_TREE
)
3941 TREE_VALUE (link
) = op
;
3945 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); ++i
)
3947 tree link
= gimple_asm_input_op (asm_stmt
, i
);
3948 tree op
= TREE_VALUE (link
);
3950 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link
)));
3951 parse_input_constraint (&constraint
, 0, 0, noutputs
, 0,
3952 oconstraints
, &allows_mem
, &allows_reg
);
3953 if (REFERENCE_CLASS_P (op
)
3954 && (op
= maybe_fold_reference (op
, !allows_reg
&& allows_mem
))
3957 TREE_VALUE (link
) = op
;
3965 if (gimple_debug_bind_p (stmt
))
3967 tree val
= gimple_debug_bind_get_value (stmt
);
3969 && REFERENCE_CLASS_P (val
))
3971 tree tem
= maybe_fold_reference (val
, false);
3974 gimple_debug_bind_set_value (stmt
, tem
);
3979 && TREE_CODE (val
) == ADDR_EXPR
)
3981 tree ref
= TREE_OPERAND (val
, 0);
3982 tree tem
= maybe_fold_reference (ref
, false);
3985 tem
= build_fold_addr_expr_with_type (tem
, TREE_TYPE (val
));
3986 gimple_debug_bind_set_value (stmt
, tem
);
3996 stmt
= gsi_stmt (*gsi
);
3998 /* Fold *& on the lhs. */
3999 if (gimple_has_lhs (stmt
))
4001 tree lhs
= gimple_get_lhs (stmt
);
4002 if (lhs
&& REFERENCE_CLASS_P (lhs
))
4004 tree new_lhs
= maybe_fold_reference (lhs
, true);
4007 gimple_set_lhs (stmt
, new_lhs
);
4013 fold_undefer_overflow_warnings (changed
&& !nowarning
, stmt
, 0);
4017 /* Valueziation callback that ends up not following SSA edges. */
4020 no_follow_ssa_edges (tree
)
4025 /* Valueization callback that ends up following single-use SSA edges only. */
4028 follow_single_use_edges (tree val
)
4030 if (TREE_CODE (val
) == SSA_NAME
4031 && !has_single_use (val
))
4036 /* Fold the statement pointed to by GSI. In some cases, this function may
4037 replace the whole statement with a new one. Returns true iff folding
4039 The statement pointed to by GSI should be in valid gimple form but may
4040 be in unfolded state as resulting from for example constant propagation
4041 which can produce *&x = 0. */
4044 fold_stmt (gimple_stmt_iterator
*gsi
)
4046 return fold_stmt_1 (gsi
, false, no_follow_ssa_edges
);
4050 fold_stmt (gimple_stmt_iterator
*gsi
, tree (*valueize
) (tree
))
4052 return fold_stmt_1 (gsi
, false, valueize
);
4055 /* Perform the minimal folding on statement *GSI. Only operations like
4056 *&x created by constant propagation are handled. The statement cannot
4057 be replaced with a new one. Return true if the statement was
4058 changed, false otherwise.
4059 The statement *GSI should be in valid gimple form but may
4060 be in unfolded state as resulting from for example constant propagation
4061 which can produce *&x = 0. */
4064 fold_stmt_inplace (gimple_stmt_iterator
*gsi
)
4066 gimple
*stmt
= gsi_stmt (*gsi
);
4067 bool changed
= fold_stmt_1 (gsi
, true, no_follow_ssa_edges
);
4068 gcc_assert (gsi_stmt (*gsi
) == stmt
);
4072 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
4073 if EXPR is null or we don't know how.
4074 If non-null, the result always has boolean type. */
4077 canonicalize_bool (tree expr
, bool invert
)
4083 if (integer_nonzerop (expr
))
4084 return boolean_false_node
;
4085 else if (integer_zerop (expr
))
4086 return boolean_true_node
;
4087 else if (TREE_CODE (expr
) == SSA_NAME
)
4088 return fold_build2 (EQ_EXPR
, boolean_type_node
, expr
,
4089 build_int_cst (TREE_TYPE (expr
), 0));
4090 else if (COMPARISON_CLASS_P (expr
))
4091 return fold_build2 (invert_tree_comparison (TREE_CODE (expr
), false),
4093 TREE_OPERAND (expr
, 0),
4094 TREE_OPERAND (expr
, 1));
4100 if (TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4102 if (integer_nonzerop (expr
))
4103 return boolean_true_node
;
4104 else if (integer_zerop (expr
))
4105 return boolean_false_node
;
4106 else if (TREE_CODE (expr
) == SSA_NAME
)
4107 return fold_build2 (NE_EXPR
, boolean_type_node
, expr
,
4108 build_int_cst (TREE_TYPE (expr
), 0));
4109 else if (COMPARISON_CLASS_P (expr
))
4110 return fold_build2 (TREE_CODE (expr
),
4112 TREE_OPERAND (expr
, 0),
4113 TREE_OPERAND (expr
, 1));
4119 /* Check to see if a boolean expression EXPR is logically equivalent to the
4120 comparison (OP1 CODE OP2). Check for various identities involving
4124 same_bool_comparison_p (const_tree expr
, enum tree_code code
,
4125 const_tree op1
, const_tree op2
)
4129 /* The obvious case. */
4130 if (TREE_CODE (expr
) == code
4131 && operand_equal_p (TREE_OPERAND (expr
, 0), op1
, 0)
4132 && operand_equal_p (TREE_OPERAND (expr
, 1), op2
, 0))
4135 /* Check for comparing (name, name != 0) and the case where expr
4136 is an SSA_NAME with a definition matching the comparison. */
4137 if (TREE_CODE (expr
) == SSA_NAME
4138 && TREE_CODE (TREE_TYPE (expr
)) == BOOLEAN_TYPE
)
4140 if (operand_equal_p (expr
, op1
, 0))
4141 return ((code
== NE_EXPR
&& integer_zerop (op2
))
4142 || (code
== EQ_EXPR
&& integer_nonzerop (op2
)));
4143 s
= SSA_NAME_DEF_STMT (expr
);
4144 if (is_gimple_assign (s
)
4145 && gimple_assign_rhs_code (s
) == code
4146 && operand_equal_p (gimple_assign_rhs1 (s
), op1
, 0)
4147 && operand_equal_p (gimple_assign_rhs2 (s
), op2
, 0))
4151 /* If op1 is of the form (name != 0) or (name == 0), and the definition
4152 of name is a comparison, recurse. */
4153 if (TREE_CODE (op1
) == SSA_NAME
4154 && TREE_CODE (TREE_TYPE (op1
)) == BOOLEAN_TYPE
)
4156 s
= SSA_NAME_DEF_STMT (op1
);
4157 if (is_gimple_assign (s
)
4158 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
4160 enum tree_code c
= gimple_assign_rhs_code (s
);
4161 if ((c
== NE_EXPR
&& integer_zerop (op2
))
4162 || (c
== EQ_EXPR
&& integer_nonzerop (op2
)))
4163 return same_bool_comparison_p (expr
, c
,
4164 gimple_assign_rhs1 (s
),
4165 gimple_assign_rhs2 (s
));
4166 if ((c
== EQ_EXPR
&& integer_zerop (op2
))
4167 || (c
== NE_EXPR
&& integer_nonzerop (op2
)))
4168 return same_bool_comparison_p (expr
,
4169 invert_tree_comparison (c
, false),
4170 gimple_assign_rhs1 (s
),
4171 gimple_assign_rhs2 (s
));
4177 /* Check to see if two boolean expressions OP1 and OP2 are logically
4181 same_bool_result_p (const_tree op1
, const_tree op2
)
4183 /* Simple cases first. */
4184 if (operand_equal_p (op1
, op2
, 0))
4187 /* Check the cases where at least one of the operands is a comparison.
4188 These are a bit smarter than operand_equal_p in that they apply some
4189 identifies on SSA_NAMEs. */
4190 if (COMPARISON_CLASS_P (op2
)
4191 && same_bool_comparison_p (op1
, TREE_CODE (op2
),
4192 TREE_OPERAND (op2
, 0),
4193 TREE_OPERAND (op2
, 1)))
4195 if (COMPARISON_CLASS_P (op1
)
4196 && same_bool_comparison_p (op2
, TREE_CODE (op1
),
4197 TREE_OPERAND (op1
, 0),
4198 TREE_OPERAND (op1
, 1)))
4205 /* Forward declarations for some mutually recursive functions. */
4208 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4209 enum tree_code code2
, tree op2a
, tree op2b
);
4211 and_var_with_comparison (tree var
, bool invert
,
4212 enum tree_code code2
, tree op2a
, tree op2b
);
4214 and_var_with_comparison_1 (gimple
*stmt
,
4215 enum tree_code code2
, tree op2a
, tree op2b
);
4217 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4218 enum tree_code code2
, tree op2a
, tree op2b
);
4220 or_var_with_comparison (tree var
, bool invert
,
4221 enum tree_code code2
, tree op2a
, tree op2b
);
4223 or_var_with_comparison_1 (gimple
*stmt
,
4224 enum tree_code code2
, tree op2a
, tree op2b
);
4226 /* Helper function for and_comparisons_1: try to simplify the AND of the
4227 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4228 If INVERT is true, invert the value of the VAR before doing the AND.
4229 Return NULL_EXPR if we can't simplify this to a single expression. */
4232 and_var_with_comparison (tree var
, bool invert
,
4233 enum tree_code code2
, tree op2a
, tree op2b
)
4236 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4238 /* We can only deal with variables whose definitions are assignments. */
4239 if (!is_gimple_assign (stmt
))
4242 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4243 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
4244 Then we only have to consider the simpler non-inverted cases. */
4246 t
= or_var_with_comparison_1 (stmt
,
4247 invert_tree_comparison (code2
, false),
4250 t
= and_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4251 return canonicalize_bool (t
, invert
);
4254 /* Try to simplify the AND of the ssa variable defined by the assignment
4255 STMT with the comparison specified by (OP2A CODE2 OP2B).
4256 Return NULL_EXPR if we can't simplify this to a single expression. */
4259 and_var_with_comparison_1 (gimple
*stmt
,
4260 enum tree_code code2
, tree op2a
, tree op2b
)
4262 tree var
= gimple_assign_lhs (stmt
);
4263 tree true_test_var
= NULL_TREE
;
4264 tree false_test_var
= NULL_TREE
;
4265 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4267 /* Check for identities like (var AND (var == 0)) => false. */
4268 if (TREE_CODE (op2a
) == SSA_NAME
4269 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4271 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4272 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4274 true_test_var
= op2a
;
4275 if (var
== true_test_var
)
4278 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4279 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4281 false_test_var
= op2a
;
4282 if (var
== false_test_var
)
4283 return boolean_false_node
;
4287 /* If the definition is a comparison, recurse on it. */
4288 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4290 tree t
= and_comparisons_1 (innercode
,
4291 gimple_assign_rhs1 (stmt
),
4292 gimple_assign_rhs2 (stmt
),
4300 /* If the definition is an AND or OR expression, we may be able to
4301 simplify by reassociating. */
4302 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4303 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4305 tree inner1
= gimple_assign_rhs1 (stmt
);
4306 tree inner2
= gimple_assign_rhs2 (stmt
);
4309 tree partial
= NULL_TREE
;
4310 bool is_and
= (innercode
== BIT_AND_EXPR
);
4312 /* Check for boolean identities that don't require recursive examination
4314 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4315 inner1 AND (inner1 OR inner2) => inner1
4316 !inner1 AND (inner1 AND inner2) => false
4317 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4318 Likewise for similar cases involving inner2. */
4319 if (inner1
== true_test_var
)
4320 return (is_and
? var
: inner1
);
4321 else if (inner2
== true_test_var
)
4322 return (is_and
? var
: inner2
);
4323 else if (inner1
== false_test_var
)
4325 ? boolean_false_node
4326 : and_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4327 else if (inner2
== false_test_var
)
4329 ? boolean_false_node
4330 : and_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4332 /* Next, redistribute/reassociate the AND across the inner tests.
4333 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4334 if (TREE_CODE (inner1
) == SSA_NAME
4335 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4336 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4337 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4338 gimple_assign_rhs1 (s
),
4339 gimple_assign_rhs2 (s
),
4340 code2
, op2a
, op2b
)))
4342 /* Handle the AND case, where we are reassociating:
4343 (inner1 AND inner2) AND (op2a code2 op2b)
4345 If the partial result t is a constant, we win. Otherwise
4346 continue on to try reassociating with the other inner test. */
4349 if (integer_onep (t
))
4351 else if (integer_zerop (t
))
4352 return boolean_false_node
;
4355 /* Handle the OR case, where we are redistributing:
4356 (inner1 OR inner2) AND (op2a code2 op2b)
4357 => (t OR (inner2 AND (op2a code2 op2b))) */
4358 else if (integer_onep (t
))
4359 return boolean_true_node
;
4361 /* Save partial result for later. */
4365 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4366 if (TREE_CODE (inner2
) == SSA_NAME
4367 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4368 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4369 && (t
= maybe_fold_and_comparisons (gimple_assign_rhs_code (s
),
4370 gimple_assign_rhs1 (s
),
4371 gimple_assign_rhs2 (s
),
4372 code2
, op2a
, op2b
)))
4374 /* Handle the AND case, where we are reassociating:
4375 (inner1 AND inner2) AND (op2a code2 op2b)
4376 => (inner1 AND t) */
4379 if (integer_onep (t
))
4381 else if (integer_zerop (t
))
4382 return boolean_false_node
;
4383 /* If both are the same, we can apply the identity
4385 else if (partial
&& same_bool_result_p (t
, partial
))
4389 /* Handle the OR case. where we are redistributing:
4390 (inner1 OR inner2) AND (op2a code2 op2b)
4391 => (t OR (inner1 AND (op2a code2 op2b)))
4392 => (t OR partial) */
4395 if (integer_onep (t
))
4396 return boolean_true_node
;
4399 /* We already got a simplification for the other
4400 operand to the redistributed OR expression. The
4401 interesting case is when at least one is false.
4402 Or, if both are the same, we can apply the identity
4404 if (integer_zerop (partial
))
4406 else if (integer_zerop (t
))
4408 else if (same_bool_result_p (t
, partial
))
4417 /* Try to simplify the AND of two comparisons defined by
4418 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4419 If this can be done without constructing an intermediate value,
4420 return the resulting tree; otherwise NULL_TREE is returned.
4421 This function is deliberately asymmetric as it recurses on SSA_DEFs
4422 in the first comparison but not the second. */
4425 and_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4426 enum tree_code code2
, tree op2a
, tree op2b
)
4428 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4430 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4431 if (operand_equal_p (op1a
, op2a
, 0)
4432 && operand_equal_p (op1b
, op2b
, 0))
4434 /* Result will be either NULL_TREE, or a combined comparison. */
4435 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4436 TRUTH_ANDIF_EXPR
, code1
, code2
,
4437 truth_type
, op1a
, op1b
);
4442 /* Likewise the swapped case of the above. */
4443 if (operand_equal_p (op1a
, op2b
, 0)
4444 && operand_equal_p (op1b
, op2a
, 0))
4446 /* Result will be either NULL_TREE, or a combined comparison. */
4447 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4448 TRUTH_ANDIF_EXPR
, code1
,
4449 swap_tree_comparison (code2
),
4450 truth_type
, op1a
, op1b
);
4455 /* If both comparisons are of the same value against constants, we might
4456 be able to merge them. */
4457 if (operand_equal_p (op1a
, op2a
, 0)
4458 && TREE_CODE (op1b
) == INTEGER_CST
4459 && TREE_CODE (op2b
) == INTEGER_CST
)
4461 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4463 /* If we have (op1a == op1b), we should either be able to
4464 return that or FALSE, depending on whether the constant op1b
4465 also satisfies the other comparison against op2b. */
4466 if (code1
== EQ_EXPR
)
4472 case EQ_EXPR
: val
= (cmp
== 0); break;
4473 case NE_EXPR
: val
= (cmp
!= 0); break;
4474 case LT_EXPR
: val
= (cmp
< 0); break;
4475 case GT_EXPR
: val
= (cmp
> 0); break;
4476 case LE_EXPR
: val
= (cmp
<= 0); break;
4477 case GE_EXPR
: val
= (cmp
>= 0); break;
4478 default: done
= false;
4483 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4485 return boolean_false_node
;
4488 /* Likewise if the second comparison is an == comparison. */
4489 else if (code2
== EQ_EXPR
)
4495 case EQ_EXPR
: val
= (cmp
== 0); break;
4496 case NE_EXPR
: val
= (cmp
!= 0); break;
4497 case LT_EXPR
: val
= (cmp
> 0); break;
4498 case GT_EXPR
: val
= (cmp
< 0); break;
4499 case LE_EXPR
: val
= (cmp
>= 0); break;
4500 case GE_EXPR
: val
= (cmp
<= 0); break;
4501 default: done
= false;
4506 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4508 return boolean_false_node
;
4512 /* Same business with inequality tests. */
4513 else if (code1
== NE_EXPR
)
4518 case EQ_EXPR
: val
= (cmp
!= 0); break;
4519 case NE_EXPR
: val
= (cmp
== 0); break;
4520 case LT_EXPR
: val
= (cmp
>= 0); break;
4521 case GT_EXPR
: val
= (cmp
<= 0); break;
4522 case LE_EXPR
: val
= (cmp
> 0); break;
4523 case GE_EXPR
: val
= (cmp
< 0); break;
4528 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4530 else if (code2
== NE_EXPR
)
4535 case EQ_EXPR
: val
= (cmp
== 0); break;
4536 case NE_EXPR
: val
= (cmp
!= 0); break;
4537 case LT_EXPR
: val
= (cmp
<= 0); break;
4538 case GT_EXPR
: val
= (cmp
>= 0); break;
4539 case LE_EXPR
: val
= (cmp
< 0); break;
4540 case GE_EXPR
: val
= (cmp
> 0); break;
4545 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4548 /* Chose the more restrictive of two < or <= comparisons. */
4549 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
4550 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4552 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
4553 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4555 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4558 /* Likewise chose the more restrictive of two > or >= comparisons. */
4559 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
4560 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4562 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
4563 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4565 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4568 /* Check for singleton ranges. */
4570 && ((code1
== LE_EXPR
&& code2
== GE_EXPR
)
4571 || (code1
== GE_EXPR
&& code2
== LE_EXPR
)))
4572 return fold_build2 (EQ_EXPR
, boolean_type_node
, op1a
, op2b
);
4574 /* Check for disjoint ranges. */
4576 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
4577 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
4578 return boolean_false_node
;
4580 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
4581 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
4582 return boolean_false_node
;
4585 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4586 NAME's definition is a truth value. See if there are any simplifications
4587 that can be done against the NAME's definition. */
4588 if (TREE_CODE (op1a
) == SSA_NAME
4589 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
4590 && (integer_zerop (op1b
) || integer_onep (op1b
)))
4592 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
4593 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
4594 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
4595 switch (gimple_code (stmt
))
4598 /* Try to simplify by copy-propagating the definition. */
4599 return and_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
4602 /* If every argument to the PHI produces the same result when
4603 ANDed with the second comparison, we win.
4604 Do not do this unless the type is bool since we need a bool
4605 result here anyway. */
4606 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
4608 tree result
= NULL_TREE
;
4610 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
4612 tree arg
= gimple_phi_arg_def (stmt
, i
);
4614 /* If this PHI has itself as an argument, ignore it.
4615 If all the other args produce the same result,
4617 if (arg
== gimple_phi_result (stmt
))
4619 else if (TREE_CODE (arg
) == INTEGER_CST
)
4621 if (invert
? integer_nonzerop (arg
) : integer_zerop (arg
))
4624 result
= boolean_false_node
;
4625 else if (!integer_zerop (result
))
4629 result
= fold_build2 (code2
, boolean_type_node
,
4631 else if (!same_bool_comparison_p (result
,
4635 else if (TREE_CODE (arg
) == SSA_NAME
4636 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
4639 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
4640 /* In simple cases we can look through PHI nodes,
4641 but we have to be careful with loops.
4643 if (! dom_info_available_p (CDI_DOMINATORS
)
4644 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
4645 || dominated_by_p (CDI_DOMINATORS
,
4646 gimple_bb (def_stmt
),
4649 temp
= and_var_with_comparison (arg
, invert
, code2
,
4655 else if (!same_bool_result_p (result
, temp
))
4671 /* Try to simplify the AND of two comparisons, specified by
4672 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4673 If this can be simplified to a single expression (without requiring
4674 introducing more SSA variables to hold intermediate values),
4675 return the resulting tree. Otherwise return NULL_TREE.
4676 If the result expression is non-null, it has boolean type. */
4679 maybe_fold_and_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
4680 enum tree_code code2
, tree op2a
, tree op2b
)
4682 tree t
= and_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
4686 return and_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
4689 /* Helper function for or_comparisons_1: try to simplify the OR of the
4690 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4691 If INVERT is true, invert the value of VAR before doing the OR.
4692 Return NULL_EXPR if we can't simplify this to a single expression. */
4695 or_var_with_comparison (tree var
, bool invert
,
4696 enum tree_code code2
, tree op2a
, tree op2b
)
4699 gimple
*stmt
= SSA_NAME_DEF_STMT (var
);
4701 /* We can only deal with variables whose definitions are assignments. */
4702 if (!is_gimple_assign (stmt
))
4705 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4706 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4707 Then we only have to consider the simpler non-inverted cases. */
4709 t
= and_var_with_comparison_1 (stmt
,
4710 invert_tree_comparison (code2
, false),
4713 t
= or_var_with_comparison_1 (stmt
, code2
, op2a
, op2b
);
4714 return canonicalize_bool (t
, invert
);
4717 /* Try to simplify the OR of the ssa variable defined by the assignment
4718 STMT with the comparison specified by (OP2A CODE2 OP2B).
4719 Return NULL_EXPR if we can't simplify this to a single expression. */
4722 or_var_with_comparison_1 (gimple
*stmt
,
4723 enum tree_code code2
, tree op2a
, tree op2b
)
4725 tree var
= gimple_assign_lhs (stmt
);
4726 tree true_test_var
= NULL_TREE
;
4727 tree false_test_var
= NULL_TREE
;
4728 enum tree_code innercode
= gimple_assign_rhs_code (stmt
);
4730 /* Check for identities like (var OR (var != 0)) => true . */
4731 if (TREE_CODE (op2a
) == SSA_NAME
4732 && TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
)
4734 if ((code2
== NE_EXPR
&& integer_zerop (op2b
))
4735 || (code2
== EQ_EXPR
&& integer_nonzerop (op2b
)))
4737 true_test_var
= op2a
;
4738 if (var
== true_test_var
)
4741 else if ((code2
== EQ_EXPR
&& integer_zerop (op2b
))
4742 || (code2
== NE_EXPR
&& integer_nonzerop (op2b
)))
4744 false_test_var
= op2a
;
4745 if (var
== false_test_var
)
4746 return boolean_true_node
;
4750 /* If the definition is a comparison, recurse on it. */
4751 if (TREE_CODE_CLASS (innercode
) == tcc_comparison
)
4753 tree t
= or_comparisons_1 (innercode
,
4754 gimple_assign_rhs1 (stmt
),
4755 gimple_assign_rhs2 (stmt
),
4763 /* If the definition is an AND or OR expression, we may be able to
4764 simplify by reassociating. */
4765 if (TREE_CODE (TREE_TYPE (var
)) == BOOLEAN_TYPE
4766 && (innercode
== BIT_AND_EXPR
|| innercode
== BIT_IOR_EXPR
))
4768 tree inner1
= gimple_assign_rhs1 (stmt
);
4769 tree inner2
= gimple_assign_rhs2 (stmt
);
4772 tree partial
= NULL_TREE
;
4773 bool is_or
= (innercode
== BIT_IOR_EXPR
);
4775 /* Check for boolean identities that don't require recursive examination
4777 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4778 inner1 OR (inner1 AND inner2) => inner1
4779 !inner1 OR (inner1 OR inner2) => true
4780 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4782 if (inner1
== true_test_var
)
4783 return (is_or
? var
: inner1
);
4784 else if (inner2
== true_test_var
)
4785 return (is_or
? var
: inner2
);
4786 else if (inner1
== false_test_var
)
4789 : or_var_with_comparison (inner2
, false, code2
, op2a
, op2b
));
4790 else if (inner2
== false_test_var
)
4793 : or_var_with_comparison (inner1
, false, code2
, op2a
, op2b
));
4795 /* Next, redistribute/reassociate the OR across the inner tests.
4796 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4797 if (TREE_CODE (inner1
) == SSA_NAME
4798 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner1
))
4799 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4800 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4801 gimple_assign_rhs1 (s
),
4802 gimple_assign_rhs2 (s
),
4803 code2
, op2a
, op2b
)))
4805 /* Handle the OR case, where we are reassociating:
4806 (inner1 OR inner2) OR (op2a code2 op2b)
4808 If the partial result t is a constant, we win. Otherwise
4809 continue on to try reassociating with the other inner test. */
4812 if (integer_onep (t
))
4813 return boolean_true_node
;
4814 else if (integer_zerop (t
))
4818 /* Handle the AND case, where we are redistributing:
4819 (inner1 AND inner2) OR (op2a code2 op2b)
4820 => (t AND (inner2 OR (op2a code op2b))) */
4821 else if (integer_zerop (t
))
4822 return boolean_false_node
;
4824 /* Save partial result for later. */
4828 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4829 if (TREE_CODE (inner2
) == SSA_NAME
4830 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (inner2
))
4831 && TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
4832 && (t
= maybe_fold_or_comparisons (gimple_assign_rhs_code (s
),
4833 gimple_assign_rhs1 (s
),
4834 gimple_assign_rhs2 (s
),
4835 code2
, op2a
, op2b
)))
4837 /* Handle the OR case, where we are reassociating:
4838 (inner1 OR inner2) OR (op2a code2 op2b)
4840 => (t OR partial) */
4843 if (integer_zerop (t
))
4845 else if (integer_onep (t
))
4846 return boolean_true_node
;
4847 /* If both are the same, we can apply the identity
4849 else if (partial
&& same_bool_result_p (t
, partial
))
4853 /* Handle the AND case, where we are redistributing:
4854 (inner1 AND inner2) OR (op2a code2 op2b)
4855 => (t AND (inner1 OR (op2a code2 op2b)))
4856 => (t AND partial) */
4859 if (integer_zerop (t
))
4860 return boolean_false_node
;
4863 /* We already got a simplification for the other
4864 operand to the redistributed AND expression. The
4865 interesting case is when at least one is true.
4866 Or, if both are the same, we can apply the identity
4868 if (integer_onep (partial
))
4870 else if (integer_onep (t
))
4872 else if (same_bool_result_p (t
, partial
))
4881 /* Try to simplify the OR of two comparisons defined by
4882 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4883 If this can be done without constructing an intermediate value,
4884 return the resulting tree; otherwise NULL_TREE is returned.
4885 This function is deliberately asymmetric as it recurses on SSA_DEFs
4886 in the first comparison but not the second. */
4889 or_comparisons_1 (enum tree_code code1
, tree op1a
, tree op1b
,
4890 enum tree_code code2
, tree op2a
, tree op2b
)
4892 tree truth_type
= truth_type_for (TREE_TYPE (op1a
));
4894 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4895 if (operand_equal_p (op1a
, op2a
, 0)
4896 && operand_equal_p (op1b
, op2b
, 0))
4898 /* Result will be either NULL_TREE, or a combined comparison. */
4899 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4900 TRUTH_ORIF_EXPR
, code1
, code2
,
4901 truth_type
, op1a
, op1b
);
4906 /* Likewise the swapped case of the above. */
4907 if (operand_equal_p (op1a
, op2b
, 0)
4908 && operand_equal_p (op1b
, op2a
, 0))
4910 /* Result will be either NULL_TREE, or a combined comparison. */
4911 tree t
= combine_comparisons (UNKNOWN_LOCATION
,
4912 TRUTH_ORIF_EXPR
, code1
,
4913 swap_tree_comparison (code2
),
4914 truth_type
, op1a
, op1b
);
4919 /* If both comparisons are of the same value against constants, we might
4920 be able to merge them. */
4921 if (operand_equal_p (op1a
, op2a
, 0)
4922 && TREE_CODE (op1b
) == INTEGER_CST
4923 && TREE_CODE (op2b
) == INTEGER_CST
)
4925 int cmp
= tree_int_cst_compare (op1b
, op2b
);
4927 /* If we have (op1a != op1b), we should either be able to
4928 return that or TRUE, depending on whether the constant op1b
4929 also satisfies the other comparison against op2b. */
4930 if (code1
== NE_EXPR
)
4936 case EQ_EXPR
: val
= (cmp
== 0); break;
4937 case NE_EXPR
: val
= (cmp
!= 0); break;
4938 case LT_EXPR
: val
= (cmp
< 0); break;
4939 case GT_EXPR
: val
= (cmp
> 0); break;
4940 case LE_EXPR
: val
= (cmp
<= 0); break;
4941 case GE_EXPR
: val
= (cmp
>= 0); break;
4942 default: done
= false;
4947 return boolean_true_node
;
4949 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
4952 /* Likewise if the second comparison is a != comparison. */
4953 else if (code2
== NE_EXPR
)
4959 case EQ_EXPR
: val
= (cmp
== 0); break;
4960 case NE_EXPR
: val
= (cmp
!= 0); break;
4961 case LT_EXPR
: val
= (cmp
> 0); break;
4962 case GT_EXPR
: val
= (cmp
< 0); break;
4963 case LE_EXPR
: val
= (cmp
>= 0); break;
4964 case GE_EXPR
: val
= (cmp
<= 0); break;
4965 default: done
= false;
4970 return boolean_true_node
;
4972 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4976 /* See if an equality test is redundant with the other comparison. */
4977 else if (code1
== EQ_EXPR
)
4982 case EQ_EXPR
: val
= (cmp
== 0); break;
4983 case NE_EXPR
: val
= (cmp
!= 0); break;
4984 case LT_EXPR
: val
= (cmp
< 0); break;
4985 case GT_EXPR
: val
= (cmp
> 0); break;
4986 case LE_EXPR
: val
= (cmp
<= 0); break;
4987 case GE_EXPR
: val
= (cmp
>= 0); break;
4992 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
4994 else if (code2
== EQ_EXPR
)
4999 case EQ_EXPR
: val
= (cmp
== 0); break;
5000 case NE_EXPR
: val
= (cmp
!= 0); break;
5001 case LT_EXPR
: val
= (cmp
> 0); break;
5002 case GT_EXPR
: val
= (cmp
< 0); break;
5003 case LE_EXPR
: val
= (cmp
>= 0); break;
5004 case GE_EXPR
: val
= (cmp
<= 0); break;
5009 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5012 /* Chose the less restrictive of two < or <= comparisons. */
5013 else if ((code1
== LT_EXPR
|| code1
== LE_EXPR
)
5014 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5016 if ((cmp
< 0) || (cmp
== 0 && code1
== LT_EXPR
))
5017 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5019 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5022 /* Likewise chose the less restrictive of two > or >= comparisons. */
5023 else if ((code1
== GT_EXPR
|| code1
== GE_EXPR
)
5024 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5026 if ((cmp
> 0) || (cmp
== 0 && code1
== GT_EXPR
))
5027 return fold_build2 (code2
, boolean_type_node
, op2a
, op2b
);
5029 return fold_build2 (code1
, boolean_type_node
, op1a
, op1b
);
5032 /* Check for singleton ranges. */
5034 && ((code1
== LT_EXPR
&& code2
== GT_EXPR
)
5035 || (code1
== GT_EXPR
&& code2
== LT_EXPR
)))
5036 return fold_build2 (NE_EXPR
, boolean_type_node
, op1a
, op2b
);
5038 /* Check for less/greater pairs that don't restrict the range at all. */
5040 && (code1
== LT_EXPR
|| code1
== LE_EXPR
)
5041 && (code2
== GT_EXPR
|| code2
== GE_EXPR
))
5042 return boolean_true_node
;
5044 && (code1
== GT_EXPR
|| code1
== GE_EXPR
)
5045 && (code2
== LT_EXPR
|| code2
== LE_EXPR
))
5046 return boolean_true_node
;
5049 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
5050 NAME's definition is a truth value. See if there are any simplifications
5051 that can be done against the NAME's definition. */
5052 if (TREE_CODE (op1a
) == SSA_NAME
5053 && (code1
== NE_EXPR
|| code1
== EQ_EXPR
)
5054 && (integer_zerop (op1b
) || integer_onep (op1b
)))
5056 bool invert
= ((code1
== EQ_EXPR
&& integer_zerop (op1b
))
5057 || (code1
== NE_EXPR
&& integer_onep (op1b
)));
5058 gimple
*stmt
= SSA_NAME_DEF_STMT (op1a
);
5059 switch (gimple_code (stmt
))
5062 /* Try to simplify by copy-propagating the definition. */
5063 return or_var_with_comparison (op1a
, invert
, code2
, op2a
, op2b
);
5066 /* If every argument to the PHI produces the same result when
5067 ORed with the second comparison, we win.
5068 Do not do this unless the type is bool since we need a bool
5069 result here anyway. */
5070 if (TREE_CODE (TREE_TYPE (op1a
)) == BOOLEAN_TYPE
)
5072 tree result
= NULL_TREE
;
5074 for (i
= 0; i
< gimple_phi_num_args (stmt
); i
++)
5076 tree arg
= gimple_phi_arg_def (stmt
, i
);
5078 /* If this PHI has itself as an argument, ignore it.
5079 If all the other args produce the same result,
5081 if (arg
== gimple_phi_result (stmt
))
5083 else if (TREE_CODE (arg
) == INTEGER_CST
)
5085 if (invert
? integer_zerop (arg
) : integer_nonzerop (arg
))
5088 result
= boolean_true_node
;
5089 else if (!integer_onep (result
))
5093 result
= fold_build2 (code2
, boolean_type_node
,
5095 else if (!same_bool_comparison_p (result
,
5099 else if (TREE_CODE (arg
) == SSA_NAME
5100 && !SSA_NAME_IS_DEFAULT_DEF (arg
))
5103 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
5104 /* In simple cases we can look through PHI nodes,
5105 but we have to be careful with loops.
5107 if (! dom_info_available_p (CDI_DOMINATORS
)
5108 || gimple_bb (def_stmt
) == gimple_bb (stmt
)
5109 || dominated_by_p (CDI_DOMINATORS
,
5110 gimple_bb (def_stmt
),
5113 temp
= or_var_with_comparison (arg
, invert
, code2
,
5119 else if (!same_bool_result_p (result
, temp
))
5135 /* Try to simplify the OR of two comparisons, specified by
5136 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
5137 If this can be simplified to a single expression (without requiring
5138 introducing more SSA variables to hold intermediate values),
5139 return the resulting tree. Otherwise return NULL_TREE.
5140 If the result expression is non-null, it has boolean type. */
5143 maybe_fold_or_comparisons (enum tree_code code1
, tree op1a
, tree op1b
,
5144 enum tree_code code2
, tree op2a
, tree op2b
)
5146 tree t
= or_comparisons_1 (code1
, op1a
, op1b
, code2
, op2a
, op2b
);
5150 return or_comparisons_1 (code2
, op2a
, op2b
, code1
, op1a
, op1b
);
5154 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5156 Either NULL_TREE, a simplified but non-constant or a constant
5159 ??? This should go into a gimple-fold-inline.h file to be eventually
5160 privatized with the single valueize function used in the various TUs
5161 to avoid the indirect function call overhead. */
5164 gimple_fold_stmt_to_constant_1 (gimple
*stmt
, tree (*valueize
) (tree
),
5165 tree (*gvalueize
) (tree
))
5169 /* ??? The SSA propagators do not correctly deal with following SSA use-def
5170 edges if there are intermediate VARYING defs. For this reason
5171 do not follow SSA edges here even though SCCVN can technically
5172 just deal fine with that. */
5173 if (gimple_simplify (stmt
, &rcode
, ops
, NULL
, gvalueize
, valueize
))
5175 tree res
= NULL_TREE
;
5176 if (gimple_simplified_result_is_gimple_val (rcode
, ops
))
5178 else if (mprts_hook
)
5179 res
= mprts_hook (rcode
, gimple_expr_type (stmt
), ops
);
5182 if (dump_file
&& dump_flags
& TDF_DETAILS
)
5184 fprintf (dump_file
, "Match-and-simplified ");
5185 print_gimple_expr (dump_file
, stmt
, 0, TDF_SLIM
);
5186 fprintf (dump_file
, " to ");
5187 print_generic_expr (dump_file
, res
, 0);
5188 fprintf (dump_file
, "\n");
5194 location_t loc
= gimple_location (stmt
);
5195 switch (gimple_code (stmt
))
5199 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
5201 switch (get_gimple_rhs_class (subcode
))
5203 case GIMPLE_SINGLE_RHS
:
5205 tree rhs
= gimple_assign_rhs1 (stmt
);
5206 enum tree_code_class kind
= TREE_CODE_CLASS (subcode
);
5208 if (TREE_CODE (rhs
) == SSA_NAME
)
5210 /* If the RHS is an SSA_NAME, return its known constant value,
5212 return (*valueize
) (rhs
);
5214 /* Handle propagating invariant addresses into address
5216 else if (TREE_CODE (rhs
) == ADDR_EXPR
5217 && !is_gimple_min_invariant (rhs
))
5219 HOST_WIDE_INT offset
= 0;
5221 base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs
, 0),
5225 && (CONSTANT_CLASS_P (base
)
5226 || decl_address_invariant_p (base
)))
5227 return build_invariant_address (TREE_TYPE (rhs
),
5230 else if (TREE_CODE (rhs
) == CONSTRUCTOR
5231 && TREE_CODE (TREE_TYPE (rhs
)) == VECTOR_TYPE
5232 && (CONSTRUCTOR_NELTS (rhs
)
5233 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
))))
5238 vec
= XALLOCAVEC (tree
,
5239 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs
)));
5240 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs
), i
, val
)
5242 val
= (*valueize
) (val
);
5243 if (TREE_CODE (val
) == INTEGER_CST
5244 || TREE_CODE (val
) == REAL_CST
5245 || TREE_CODE (val
) == FIXED_CST
)
5251 return build_vector (TREE_TYPE (rhs
), vec
);
5253 if (subcode
== OBJ_TYPE_REF
)
5255 tree val
= (*valueize
) (OBJ_TYPE_REF_EXPR (rhs
));
5256 /* If callee is constant, we can fold away the wrapper. */
5257 if (is_gimple_min_invariant (val
))
5261 if (kind
== tcc_reference
)
5263 if ((TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
5264 || TREE_CODE (rhs
) == REALPART_EXPR
5265 || TREE_CODE (rhs
) == IMAGPART_EXPR
)
5266 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5268 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5269 return fold_unary_loc (EXPR_LOCATION (rhs
),
5271 TREE_TYPE (rhs
), val
);
5273 else if (TREE_CODE (rhs
) == BIT_FIELD_REF
5274 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5276 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5277 return fold_ternary_loc (EXPR_LOCATION (rhs
),
5279 TREE_TYPE (rhs
), val
,
5280 TREE_OPERAND (rhs
, 1),
5281 TREE_OPERAND (rhs
, 2));
5283 else if (TREE_CODE (rhs
) == MEM_REF
5284 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
)
5286 tree val
= (*valueize
) (TREE_OPERAND (rhs
, 0));
5287 if (TREE_CODE (val
) == ADDR_EXPR
5288 && is_gimple_min_invariant (val
))
5290 tree tem
= fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
5292 TREE_OPERAND (rhs
, 1));
5297 return fold_const_aggregate_ref_1 (rhs
, valueize
);
5299 else if (kind
== tcc_declaration
)
5300 return get_symbol_constant_value (rhs
);
5304 case GIMPLE_UNARY_RHS
:
5307 case GIMPLE_BINARY_RHS
:
5308 /* Translate &x + CST into an invariant form suitable for
5309 further propagation. */
5310 if (subcode
== POINTER_PLUS_EXPR
)
5312 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5313 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5314 if (TREE_CODE (op0
) == ADDR_EXPR
5315 && TREE_CODE (op1
) == INTEGER_CST
)
5317 tree off
= fold_convert (ptr_type_node
, op1
);
5318 return build_fold_addr_expr_loc
5320 fold_build2 (MEM_REF
,
5321 TREE_TYPE (TREE_TYPE (op0
)),
5322 unshare_expr (op0
), off
));
5325 /* Canonicalize bool != 0 and bool == 0 appearing after
5326 valueization. While gimple_simplify handles this
5327 it can get confused by the ~X == 1 -> X == 0 transform
5328 which we cant reduce to a SSA name or a constant
5329 (and we have no way to tell gimple_simplify to not
5330 consider those transforms in the first place). */
5331 else if (subcode
== EQ_EXPR
5332 || subcode
== NE_EXPR
)
5334 tree lhs
= gimple_assign_lhs (stmt
);
5335 tree op0
= gimple_assign_rhs1 (stmt
);
5336 if (useless_type_conversion_p (TREE_TYPE (lhs
),
5339 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5340 op0
= (*valueize
) (op0
);
5341 if (TREE_CODE (op0
) == INTEGER_CST
)
5342 std::swap (op0
, op1
);
5343 if (TREE_CODE (op1
) == INTEGER_CST
5344 && ((subcode
== NE_EXPR
&& integer_zerop (op1
))
5345 || (subcode
== EQ_EXPR
&& integer_onep (op1
))))
5351 case GIMPLE_TERNARY_RHS
:
5353 /* Handle ternary operators that can appear in GIMPLE form. */
5354 tree op0
= (*valueize
) (gimple_assign_rhs1 (stmt
));
5355 tree op1
= (*valueize
) (gimple_assign_rhs2 (stmt
));
5356 tree op2
= (*valueize
) (gimple_assign_rhs3 (stmt
));
5357 return fold_ternary_loc (loc
, subcode
,
5358 gimple_expr_type (stmt
), op0
, op1
, op2
);
5369 gcall
*call_stmt
= as_a
<gcall
*> (stmt
);
5371 if (gimple_call_internal_p (stmt
))
5373 enum tree_code subcode
= ERROR_MARK
;
5374 switch (gimple_call_internal_fn (stmt
))
5376 case IFN_UBSAN_CHECK_ADD
:
5377 subcode
= PLUS_EXPR
;
5379 case IFN_UBSAN_CHECK_SUB
:
5380 subcode
= MINUS_EXPR
;
5382 case IFN_UBSAN_CHECK_MUL
:
5383 subcode
= MULT_EXPR
;
5385 case IFN_BUILTIN_EXPECT
:
5387 tree arg0
= gimple_call_arg (stmt
, 0);
5388 tree op0
= (*valueize
) (arg0
);
5389 if (TREE_CODE (op0
) == INTEGER_CST
)
5396 tree arg0
= gimple_call_arg (stmt
, 0);
5397 tree arg1
= gimple_call_arg (stmt
, 1);
5398 tree op0
= (*valueize
) (arg0
);
5399 tree op1
= (*valueize
) (arg1
);
5401 if (TREE_CODE (op0
) != INTEGER_CST
5402 || TREE_CODE (op1
) != INTEGER_CST
)
5407 /* x * 0 = 0 * x = 0 without overflow. */
5408 if (integer_zerop (op0
) || integer_zerop (op1
))
5409 return build_zero_cst (TREE_TYPE (arg0
));
5412 /* y - y = 0 without overflow. */
5413 if (operand_equal_p (op0
, op1
, 0))
5414 return build_zero_cst (TREE_TYPE (arg0
));
5421 = fold_binary_loc (loc
, subcode
, TREE_TYPE (arg0
), op0
, op1
);
5423 && TREE_CODE (res
) == INTEGER_CST
5424 && !TREE_OVERFLOW (res
))
5429 fn
= (*valueize
) (gimple_call_fn (stmt
));
5430 if (TREE_CODE (fn
) == ADDR_EXPR
5431 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5432 && DECL_BUILT_IN (TREE_OPERAND (fn
, 0))
5433 && gimple_builtin_call_types_compatible_p (stmt
,
5434 TREE_OPERAND (fn
, 0)))
5436 tree
*args
= XALLOCAVEC (tree
, gimple_call_num_args (stmt
));
5439 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5440 args
[i
] = (*valueize
) (gimple_call_arg (stmt
, i
));
5441 retval
= fold_builtin_call_array (loc
,
5442 gimple_call_return_type (call_stmt
),
5443 fn
, gimple_call_num_args (stmt
), args
);
5446 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5447 STRIP_NOPS (retval
);
5448 retval
= fold_convert (gimple_call_return_type (call_stmt
),
5461 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5462 Returns NULL_TREE if folding to a constant is not possible, otherwise
5463 returns a constant according to is_gimple_min_invariant. */
5466 gimple_fold_stmt_to_constant (gimple
*stmt
, tree (*valueize
) (tree
))
5468 tree res
= gimple_fold_stmt_to_constant_1 (stmt
, valueize
);
5469 if (res
&& is_gimple_min_invariant (res
))
5475 /* The following set of functions are supposed to fold references using
5476 their constant initializers. */
5478 /* See if we can find constructor defining value of BASE.
5479 When we know the consructor with constant offset (such as
5480 base is array[40] and we do know constructor of array), then
5481 BIT_OFFSET is adjusted accordingly.
5483 As a special case, return error_mark_node when constructor
5484 is not explicitly available, but it is known to be zero
5485 such as 'static const int a;'. */
5487 get_base_constructor (tree base
, HOST_WIDE_INT
*bit_offset
,
5488 tree (*valueize
)(tree
))
5490 HOST_WIDE_INT bit_offset2
, size
, max_size
;
5493 if (TREE_CODE (base
) == MEM_REF
)
5495 if (!integer_zerop (TREE_OPERAND (base
, 1)))
5497 if (!tree_fits_shwi_p (TREE_OPERAND (base
, 1)))
5499 *bit_offset
+= (mem_ref_offset (base
).to_short_addr ()
5504 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
5505 base
= valueize (TREE_OPERAND (base
, 0));
5506 if (!base
|| TREE_CODE (base
) != ADDR_EXPR
)
5508 base
= TREE_OPERAND (base
, 0);
5511 && TREE_CODE (base
) == SSA_NAME
)
5512 base
= valueize (base
);
5514 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5515 DECL_INITIAL. If BASE is a nested reference into another
5516 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5517 the inner reference. */
5518 switch (TREE_CODE (base
))
5523 tree init
= ctor_for_folding (base
);
5525 /* Our semantic is exact opposite of ctor_for_folding;
5526 NULL means unknown, while error_mark_node is 0. */
5527 if (init
== error_mark_node
)
5530 return error_mark_node
;
5534 case VIEW_CONVERT_EXPR
:
5535 return get_base_constructor (TREE_OPERAND (base
, 0),
5536 bit_offset
, valueize
);
5540 base
= get_ref_base_and_extent (base
, &bit_offset2
, &size
, &max_size
,
5542 if (max_size
== -1 || size
!= max_size
)
5544 *bit_offset
+= bit_offset2
;
5545 return get_base_constructor (base
, bit_offset
, valueize
);
5551 if (CONSTANT_CLASS_P (base
))
5558 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5559 SIZE to the memory at bit OFFSET. */
5562 fold_array_ctor_reference (tree type
, tree ctor
,
5563 unsigned HOST_WIDE_INT offset
,
5564 unsigned HOST_WIDE_INT size
,
5567 offset_int low_bound
;
5568 offset_int elt_size
;
5569 offset_int access_index
;
5570 tree domain_type
= NULL_TREE
;
5571 HOST_WIDE_INT inner_offset
;
5573 /* Compute low bound and elt size. */
5574 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
)
5575 domain_type
= TYPE_DOMAIN (TREE_TYPE (ctor
));
5576 if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
5578 /* Static constructors for variably sized objects makes no sense. */
5579 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type
)) == INTEGER_CST
);
5580 low_bound
= wi::to_offset (TYPE_MIN_VALUE (domain_type
));
5584 /* Static constructors for variably sized objects makes no sense. */
5585 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))))
5587 elt_size
= wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor
))));
5589 /* We can handle only constantly sized accesses that are known to not
5590 be larger than size of array element. */
5591 if (!TYPE_SIZE_UNIT (type
)
5592 || TREE_CODE (TYPE_SIZE_UNIT (type
)) != INTEGER_CST
5593 || elt_size
< wi::to_offset (TYPE_SIZE_UNIT (type
))
5597 /* Compute the array index we look for. */
5598 access_index
= wi::udiv_trunc (offset_int (offset
/ BITS_PER_UNIT
),
5600 access_index
+= low_bound
;
5602 /* And offset within the access. */
5603 inner_offset
= offset
% (elt_size
.to_uhwi () * BITS_PER_UNIT
);
5605 /* See if the array field is large enough to span whole access. We do not
5606 care to fold accesses spanning multiple array indexes. */
5607 if (inner_offset
+ size
> elt_size
.to_uhwi () * BITS_PER_UNIT
)
5609 if (tree val
= get_array_ctor_element_at_index (ctor
, access_index
))
5610 return fold_ctor_reference (type
, val
, inner_offset
, size
, from_decl
);
5612 /* When memory is not explicitely mentioned in constructor,
5613 it is 0 (or out of range). */
5614 return build_zero_cst (type
);
5617 /* CTOR is CONSTRUCTOR of an aggregate or vector.
5618 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5621 fold_nonarray_ctor_reference (tree type
, tree ctor
,
5622 unsigned HOST_WIDE_INT offset
,
5623 unsigned HOST_WIDE_INT size
,
5626 unsigned HOST_WIDE_INT cnt
;
5629 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor
), cnt
, cfield
,
5632 tree byte_offset
= DECL_FIELD_OFFSET (cfield
);
5633 tree field_offset
= DECL_FIELD_BIT_OFFSET (cfield
);
5634 tree field_size
= DECL_SIZE (cfield
);
5635 offset_int bitoffset
;
5636 offset_int bitoffset_end
, access_end
;
5638 /* Variable sized objects in static constructors makes no sense,
5639 but field_size can be NULL for flexible array members. */
5640 gcc_assert (TREE_CODE (field_offset
) == INTEGER_CST
5641 && TREE_CODE (byte_offset
) == INTEGER_CST
5642 && (field_size
!= NULL_TREE
5643 ? TREE_CODE (field_size
) == INTEGER_CST
5644 : TREE_CODE (TREE_TYPE (cfield
)) == ARRAY_TYPE
));
5646 /* Compute bit offset of the field. */
5647 bitoffset
= (wi::to_offset (field_offset
)
5648 + (wi::to_offset (byte_offset
) << LOG2_BITS_PER_UNIT
));
5649 /* Compute bit offset where the field ends. */
5650 if (field_size
!= NULL_TREE
)
5651 bitoffset_end
= bitoffset
+ wi::to_offset (field_size
);
5655 access_end
= offset_int (offset
) + size
;
5657 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5658 [BITOFFSET, BITOFFSET_END)? */
5659 if (wi::cmps (access_end
, bitoffset
) > 0
5660 && (field_size
== NULL_TREE
5661 || wi::lts_p (offset
, bitoffset_end
)))
5663 offset_int inner_offset
= offset_int (offset
) - bitoffset
;
5664 /* We do have overlap. Now see if field is large enough to
5665 cover the access. Give up for accesses spanning multiple
5667 if (wi::cmps (access_end
, bitoffset_end
) > 0)
5669 if (offset
< bitoffset
)
5671 return fold_ctor_reference (type
, cval
,
5672 inner_offset
.to_uhwi (), size
,
5676 /* When memory is not explicitely mentioned in constructor, it is 0. */
5677 return build_zero_cst (type
);
5680 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5681 to the memory at bit OFFSET. */
5684 fold_ctor_reference (tree type
, tree ctor
, unsigned HOST_WIDE_INT offset
,
5685 unsigned HOST_WIDE_INT size
, tree from_decl
)
5689 /* We found the field with exact match. */
5690 if (useless_type_conversion_p (type
, TREE_TYPE (ctor
))
5692 return canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5694 /* We are at the end of walk, see if we can view convert the
5696 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor
)) && !offset
5697 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
5698 && !compare_tree_int (TYPE_SIZE (type
), size
)
5699 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor
)), size
))
5701 ret
= canonicalize_constructor_val (unshare_expr (ctor
), from_decl
);
5702 ret
= fold_unary (VIEW_CONVERT_EXPR
, type
, ret
);
5704 STRIP_USELESS_TYPE_CONVERSION (ret
);
5707 /* For constants and byte-aligned/sized reads try to go through
5708 native_encode/interpret. */
5709 if (CONSTANT_CLASS_P (ctor
)
5710 && BITS_PER_UNIT
== 8
5711 && offset
% BITS_PER_UNIT
== 0
5712 && size
% BITS_PER_UNIT
== 0
5713 && size
<= MAX_BITSIZE_MODE_ANY_MODE
)
5715 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
5716 int len
= native_encode_expr (ctor
, buf
, size
/ BITS_PER_UNIT
,
5717 offset
/ BITS_PER_UNIT
);
5719 return native_interpret_expr (type
, buf
, len
);
5721 if (TREE_CODE (ctor
) == CONSTRUCTOR
)
5724 if (TREE_CODE (TREE_TYPE (ctor
)) == ARRAY_TYPE
5725 || TREE_CODE (TREE_TYPE (ctor
)) == VECTOR_TYPE
)
5726 return fold_array_ctor_reference (type
, ctor
, offset
, size
,
5729 return fold_nonarray_ctor_reference (type
, ctor
, offset
, size
,
5736 /* Return the tree representing the element referenced by T if T is an
5737 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5738 names using VALUEIZE. Return NULL_TREE otherwise. */
5741 fold_const_aggregate_ref_1 (tree t
, tree (*valueize
) (tree
))
5743 tree ctor
, idx
, base
;
5744 HOST_WIDE_INT offset
, size
, max_size
;
5748 if (TREE_THIS_VOLATILE (t
))
5752 return get_symbol_constant_value (t
);
5754 tem
= fold_read_from_constant_string (t
);
5758 switch (TREE_CODE (t
))
5761 case ARRAY_RANGE_REF
:
5762 /* Constant indexes are handled well by get_base_constructor.
5763 Only special case variable offsets.
5764 FIXME: This code can't handle nested references with variable indexes
5765 (they will be handled only by iteration of ccp). Perhaps we can bring
5766 get_ref_base_and_extent here and make it use a valueize callback. */
5767 if (TREE_CODE (TREE_OPERAND (t
, 1)) == SSA_NAME
5769 && (idx
= (*valueize
) (TREE_OPERAND (t
, 1)))
5770 && TREE_CODE (idx
) == INTEGER_CST
)
5772 tree low_bound
, unit_size
;
5774 /* If the resulting bit-offset is constant, track it. */
5775 if ((low_bound
= array_ref_low_bound (t
),
5776 TREE_CODE (low_bound
) == INTEGER_CST
)
5777 && (unit_size
= array_ref_element_size (t
),
5778 tree_fits_uhwi_p (unit_size
)))
5781 = wi::sext (wi::to_offset (idx
) - wi::to_offset (low_bound
),
5782 TYPE_PRECISION (TREE_TYPE (idx
)));
5784 if (wi::fits_shwi_p (woffset
))
5786 offset
= woffset
.to_shwi ();
5787 /* TODO: This code seems wrong, multiply then check
5788 to see if it fits. */
5789 offset
*= tree_to_uhwi (unit_size
);
5790 offset
*= BITS_PER_UNIT
;
5792 base
= TREE_OPERAND (t
, 0);
5793 ctor
= get_base_constructor (base
, &offset
, valueize
);
5794 /* Empty constructor. Always fold to 0. */
5795 if (ctor
== error_mark_node
)
5796 return build_zero_cst (TREE_TYPE (t
));
5797 /* Out of bound array access. Value is undefined,
5801 /* We can not determine ctor. */
5804 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
,
5805 tree_to_uhwi (unit_size
)
5815 case TARGET_MEM_REF
:
5817 base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
, &reverse
);
5818 ctor
= get_base_constructor (base
, &offset
, valueize
);
5820 /* Empty constructor. Always fold to 0. */
5821 if (ctor
== error_mark_node
)
5822 return build_zero_cst (TREE_TYPE (t
));
5823 /* We do not know precise address. */
5824 if (max_size
== -1 || max_size
!= size
)
5826 /* We can not determine ctor. */
5830 /* Out of bound array access. Value is undefined, but don't fold. */
5834 return fold_ctor_reference (TREE_TYPE (t
), ctor
, offset
, size
,
5840 tree c
= fold_const_aggregate_ref_1 (TREE_OPERAND (t
, 0), valueize
);
5841 if (c
&& TREE_CODE (c
) == COMPLEX_CST
)
5842 return fold_build1_loc (EXPR_LOCATION (t
),
5843 TREE_CODE (t
), TREE_TYPE (t
), c
);
5855 fold_const_aggregate_ref (tree t
)
5857 return fold_const_aggregate_ref_1 (t
, NULL
);
5860 /* Lookup virtual method with index TOKEN in a virtual table V
5862 Set CAN_REFER if non-NULL to false if method
5863 is not referable or if the virtual table is ill-formed (such as rewriten
5864 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5867 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token
,
5869 unsigned HOST_WIDE_INT offset
,
5872 tree vtable
= v
, init
, fn
;
5873 unsigned HOST_WIDE_INT size
;
5874 unsigned HOST_WIDE_INT elt_size
, access_index
;
5880 /* First of all double check we have virtual table. */
5881 if (TREE_CODE (v
) != VAR_DECL
5882 || !DECL_VIRTUAL_P (v
))
5884 /* Pass down that we lost track of the target. */
5890 init
= ctor_for_folding (v
);
5892 /* The virtual tables should always be born with constructors
5893 and we always should assume that they are avaialble for
5894 folding. At the moment we do not stream them in all cases,
5895 but it should never happen that ctor seem unreachable. */
5897 if (init
== error_mark_node
)
5899 gcc_assert (in_lto_p
);
5900 /* Pass down that we lost track of the target. */
5905 gcc_checking_assert (TREE_CODE (TREE_TYPE (v
)) == ARRAY_TYPE
);
5906 size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v
))));
5907 offset
*= BITS_PER_UNIT
;
5908 offset
+= token
* size
;
5910 /* Lookup the value in the constructor that is assumed to be array.
5911 This is equivalent to
5912 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5913 offset, size, NULL);
5914 but in a constant time. We expect that frontend produced a simple
5915 array without indexed initializers. */
5917 gcc_checking_assert (TREE_CODE (TREE_TYPE (init
)) == ARRAY_TYPE
);
5918 domain_type
= TYPE_DOMAIN (TREE_TYPE (init
));
5919 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type
)));
5920 elt_size
= tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init
))));
5922 access_index
= offset
/ BITS_PER_UNIT
/ elt_size
;
5923 gcc_checking_assert (offset
% (elt_size
* BITS_PER_UNIT
) == 0);
5925 /* This code makes an assumption that there are no
5926 indexed fileds produced by C++ FE, so we can directly index the array. */
5927 if (access_index
< CONSTRUCTOR_NELTS (init
))
5929 fn
= CONSTRUCTOR_ELT (init
, access_index
)->value
;
5930 gcc_checking_assert (!CONSTRUCTOR_ELT (init
, access_index
)->index
);
5936 /* For type inconsistent program we may end up looking up virtual method
5937 in virtual table that does not contain TOKEN entries. We may overrun
5938 the virtual table and pick up a constant or RTTI info pointer.
5939 In any case the call is undefined. */
5941 || (TREE_CODE (fn
) != ADDR_EXPR
&& TREE_CODE (fn
) != FDESC_EXPR
)
5942 || TREE_CODE (TREE_OPERAND (fn
, 0)) != FUNCTION_DECL
)
5943 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5946 fn
= TREE_OPERAND (fn
, 0);
5948 /* When cgraph node is missing and function is not public, we cannot
5949 devirtualize. This can happen in WHOPR when the actual method
5950 ends up in other partition, because we found devirtualization
5951 possibility too late. */
5952 if (!can_refer_decl_in_current_unit_p (fn
, vtable
))
5963 /* Make sure we create a cgraph node for functions we'll reference.
5964 They can be non-existent if the reference comes from an entry
5965 of an external vtable for example. */
5966 cgraph_node::get_create (fn
);
5971 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5972 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5973 KNOWN_BINFO carries the binfo describing the true type of
5974 OBJ_TYPE_REF_OBJECT(REF).
5975 Set CAN_REFER if non-NULL to false if method
5976 is not referable or if the virtual table is ill-formed (such as rewriten
5977 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
5980 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token
, tree known_binfo
,
5983 unsigned HOST_WIDE_INT offset
;
5986 v
= BINFO_VTABLE (known_binfo
);
5987 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5991 if (!vtable_pointer_value_to_vtable (v
, &v
, &offset
))
5997 return gimple_get_virt_method_for_vtable (token
, v
, offset
, can_refer
);
6000 /* Given a pointer value OP0, return a simplified version of an
6001 indirection through OP0, or NULL_TREE if no simplification is
6002 possible. Note that the resulting type may be different from
6003 the type pointed to in the sense that it is still compatible
6004 from the langhooks point of view. */
6007 gimple_fold_indirect_ref (tree t
)
6009 tree ptype
= TREE_TYPE (t
), type
= TREE_TYPE (ptype
);
6014 subtype
= TREE_TYPE (sub
);
6015 if (!POINTER_TYPE_P (subtype
))
6018 if (TREE_CODE (sub
) == ADDR_EXPR
)
6020 tree op
= TREE_OPERAND (sub
, 0);
6021 tree optype
= TREE_TYPE (op
);
6023 if (useless_type_conversion_p (type
, optype
))
6026 /* *(foo *)&fooarray => fooarray[0] */
6027 if (TREE_CODE (optype
) == ARRAY_TYPE
6028 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype
))) == INTEGER_CST
6029 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6031 tree type_domain
= TYPE_DOMAIN (optype
);
6032 tree min_val
= size_zero_node
;
6033 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6034 min_val
= TYPE_MIN_VALUE (type_domain
);
6035 if (TREE_CODE (min_val
) == INTEGER_CST
)
6036 return build4 (ARRAY_REF
, type
, op
, min_val
, NULL_TREE
, NULL_TREE
);
6038 /* *(foo *)&complexfoo => __real__ complexfoo */
6039 else if (TREE_CODE (optype
) == COMPLEX_TYPE
6040 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6041 return fold_build1 (REALPART_EXPR
, type
, op
);
6042 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
6043 else if (TREE_CODE (optype
) == VECTOR_TYPE
6044 && useless_type_conversion_p (type
, TREE_TYPE (optype
)))
6046 tree part_width
= TYPE_SIZE (type
);
6047 tree index
= bitsize_int (0);
6048 return fold_build3 (BIT_FIELD_REF
, type
, op
, part_width
, index
);
6052 /* *(p + CST) -> ... */
6053 if (TREE_CODE (sub
) == POINTER_PLUS_EXPR
6054 && TREE_CODE (TREE_OPERAND (sub
, 1)) == INTEGER_CST
)
6056 tree addr
= TREE_OPERAND (sub
, 0);
6057 tree off
= TREE_OPERAND (sub
, 1);
6061 addrtype
= TREE_TYPE (addr
);
6063 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
6064 if (TREE_CODE (addr
) == ADDR_EXPR
6065 && TREE_CODE (TREE_TYPE (addrtype
)) == VECTOR_TYPE
6066 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
)))
6067 && tree_fits_uhwi_p (off
))
6069 unsigned HOST_WIDE_INT offset
= tree_to_uhwi (off
);
6070 tree part_width
= TYPE_SIZE (type
);
6071 unsigned HOST_WIDE_INT part_widthi
6072 = tree_to_shwi (part_width
) / BITS_PER_UNIT
;
6073 unsigned HOST_WIDE_INT indexi
= offset
* BITS_PER_UNIT
;
6074 tree index
= bitsize_int (indexi
);
6075 if (offset
/ part_widthi
6076 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype
)))
6077 return fold_build3 (BIT_FIELD_REF
, type
, TREE_OPERAND (addr
, 0),
6081 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
6082 if (TREE_CODE (addr
) == ADDR_EXPR
6083 && TREE_CODE (TREE_TYPE (addrtype
)) == COMPLEX_TYPE
6084 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (addrtype
))))
6086 tree size
= TYPE_SIZE_UNIT (type
);
6087 if (tree_int_cst_equal (size
, off
))
6088 return fold_build1 (IMAGPART_EXPR
, type
, TREE_OPERAND (addr
, 0));
6091 /* *(p + CST) -> MEM_REF <p, CST>. */
6092 if (TREE_CODE (addr
) != ADDR_EXPR
6093 || DECL_P (TREE_OPERAND (addr
, 0)))
6094 return fold_build2 (MEM_REF
, type
,
6096 wide_int_to_tree (ptype
, off
));
6099 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
6100 if (TREE_CODE (TREE_TYPE (subtype
)) == ARRAY_TYPE
6101 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype
)))) == INTEGER_CST
6102 && useless_type_conversion_p (type
, TREE_TYPE (TREE_TYPE (subtype
))))
6105 tree min_val
= size_zero_node
;
6107 sub
= gimple_fold_indirect_ref (sub
);
6109 sub
= build1 (INDIRECT_REF
, TREE_TYPE (subtype
), osub
);
6110 type_domain
= TYPE_DOMAIN (TREE_TYPE (sub
));
6111 if (type_domain
&& TYPE_MIN_VALUE (type_domain
))
6112 min_val
= TYPE_MIN_VALUE (type_domain
);
6113 if (TREE_CODE (min_val
) == INTEGER_CST
)
6114 return build4 (ARRAY_REF
, type
, sub
, min_val
, NULL_TREE
, NULL_TREE
);
6120 /* Return true if CODE is an operation that when operating on signed
6121 integer types involves undefined behavior on overflow and the
6122 operation can be expressed with unsigned arithmetic. */
6125 arith_code_with_undefined_signed_overflow (tree_code code
)
6133 case POINTER_PLUS_EXPR
:
6140 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6141 operation that can be transformed to unsigned arithmetic by converting
6142 its operand, carrying out the operation in the corresponding unsigned
6143 type and converting the result back to the original type.
6145 Returns a sequence of statements that replace STMT and also contain
6146 a modified form of STMT itself. */
6149 rewrite_to_defined_overflow (gimple
*stmt
)
6151 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6153 fprintf (dump_file
, "rewriting stmt with undefined signed "
6155 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
6158 tree lhs
= gimple_assign_lhs (stmt
);
6159 tree type
= unsigned_type_for (TREE_TYPE (lhs
));
6160 gimple_seq stmts
= NULL
;
6161 for (unsigned i
= 1; i
< gimple_num_ops (stmt
); ++i
)
6163 tree op
= gimple_op (stmt
, i
);
6164 op
= gimple_convert (&stmts
, type
, op
);
6165 gimple_set_op (stmt
, i
, op
);
6167 gimple_assign_set_lhs (stmt
, make_ssa_name (type
, stmt
));
6168 if (gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
)
6169 gimple_assign_set_rhs_code (stmt
, PLUS_EXPR
);
6170 gimple_seq_add_stmt (&stmts
, stmt
);
6171 gimple
*cvt
= gimple_build_assign (lhs
, NOP_EXPR
, gimple_assign_lhs (stmt
));
6172 gimple_seq_add_stmt (&stmts
, cvt
);
6178 /* The valueization hook we use for the gimple_build API simplification.
6179 This makes us match fold_buildN behavior by only combining with
6180 statements in the sequence(s) we are currently building. */
6183 gimple_build_valueize (tree op
)
6185 if (gimple_bb (SSA_NAME_DEF_STMT (op
)) == NULL
)
6190 /* Build the expression CODE OP0 of type TYPE with location LOC,
6191 simplifying it first if possible. Returns the built
6192 expression value and appends statements possibly defining it
6196 gimple_build (gimple_seq
*seq
, location_t loc
,
6197 enum tree_code code
, tree type
, tree op0
)
6199 tree res
= gimple_simplify (code
, type
, op0
, seq
, gimple_build_valueize
);
6202 if (gimple_in_ssa_p (cfun
))
6203 res
= make_ssa_name (type
);
6205 res
= create_tmp_reg (type
);
6207 if (code
== REALPART_EXPR
6208 || code
== IMAGPART_EXPR
6209 || code
== VIEW_CONVERT_EXPR
)
6210 stmt
= gimple_build_assign (res
, code
, build1 (code
, type
, op0
));
6212 stmt
= gimple_build_assign (res
, code
, op0
);
6213 gimple_set_location (stmt
, loc
);
6214 gimple_seq_add_stmt_without_update (seq
, stmt
);
6219 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6220 simplifying it first if possible. Returns the built
6221 expression value and appends statements possibly defining it
6225 gimple_build (gimple_seq
*seq
, location_t loc
,
6226 enum tree_code code
, tree type
, tree op0
, tree op1
)
6228 tree res
= gimple_simplify (code
, type
, op0
, op1
, seq
, gimple_build_valueize
);
6231 if (gimple_in_ssa_p (cfun
))
6232 res
= make_ssa_name (type
);
6234 res
= create_tmp_reg (type
);
6235 gimple
*stmt
= gimple_build_assign (res
, code
, op0
, op1
);
6236 gimple_set_location (stmt
, loc
);
6237 gimple_seq_add_stmt_without_update (seq
, stmt
);
6242 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6243 simplifying it first if possible. Returns the built
6244 expression value and appends statements possibly defining it
6248 gimple_build (gimple_seq
*seq
, location_t loc
,
6249 enum tree_code code
, tree type
, tree op0
, tree op1
, tree op2
)
6251 tree res
= gimple_simplify (code
, type
, op0
, op1
, op2
,
6252 seq
, gimple_build_valueize
);
6255 if (gimple_in_ssa_p (cfun
))
6256 res
= make_ssa_name (type
);
6258 res
= create_tmp_reg (type
);
6260 if (code
== BIT_FIELD_REF
)
6261 stmt
= gimple_build_assign (res
, code
,
6262 build3 (code
, type
, op0
, op1
, op2
));
6264 stmt
= gimple_build_assign (res
, code
, op0
, op1
, op2
);
6265 gimple_set_location (stmt
, loc
);
6266 gimple_seq_add_stmt_without_update (seq
, stmt
);
6271 /* Build the call FN (ARG0) with a result of type TYPE
6272 (or no result if TYPE is void) with location LOC,
6273 simplifying it first if possible. Returns the built
6274 expression value (or NULL_TREE if TYPE is void) and appends
6275 statements possibly defining it to SEQ. */
6278 gimple_build (gimple_seq
*seq
, location_t loc
,
6279 enum built_in_function fn
, tree type
, tree arg0
)
6281 tree res
= gimple_simplify (fn
, type
, arg0
, seq
, gimple_build_valueize
);
6284 tree decl
= builtin_decl_implicit (fn
);
6285 gimple
*stmt
= gimple_build_call (decl
, 1, arg0
);
6286 if (!VOID_TYPE_P (type
))
6288 if (gimple_in_ssa_p (cfun
))
6289 res
= make_ssa_name (type
);
6291 res
= create_tmp_reg (type
);
6292 gimple_call_set_lhs (stmt
, res
);
6294 gimple_set_location (stmt
, loc
);
6295 gimple_seq_add_stmt_without_update (seq
, stmt
);
6300 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
6301 (or no result if TYPE is void) with location LOC,
6302 simplifying it first if possible. Returns the built
6303 expression value (or NULL_TREE if TYPE is void) and appends
6304 statements possibly defining it to SEQ. */
6307 gimple_build (gimple_seq
*seq
, location_t loc
,
6308 enum built_in_function fn
, tree type
, tree arg0
, tree arg1
)
6310 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, seq
, gimple_build_valueize
);
6313 tree decl
= builtin_decl_implicit (fn
);
6314 gimple
*stmt
= gimple_build_call (decl
, 2, arg0
, arg1
);
6315 if (!VOID_TYPE_P (type
))
6317 if (gimple_in_ssa_p (cfun
))
6318 res
= make_ssa_name (type
);
6320 res
= create_tmp_reg (type
);
6321 gimple_call_set_lhs (stmt
, res
);
6323 gimple_set_location (stmt
, loc
);
6324 gimple_seq_add_stmt_without_update (seq
, stmt
);
6329 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6330 (or no result if TYPE is void) with location LOC,
6331 simplifying it first if possible. Returns the built
6332 expression value (or NULL_TREE if TYPE is void) and appends
6333 statements possibly defining it to SEQ. */
6336 gimple_build (gimple_seq
*seq
, location_t loc
,
6337 enum built_in_function fn
, tree type
,
6338 tree arg0
, tree arg1
, tree arg2
)
6340 tree res
= gimple_simplify (fn
, type
, arg0
, arg1
, arg2
,
6341 seq
, gimple_build_valueize
);
6344 tree decl
= builtin_decl_implicit (fn
);
6345 gimple
*stmt
= gimple_build_call (decl
, 3, arg0
, arg1
, arg2
);
6346 if (!VOID_TYPE_P (type
))
6348 if (gimple_in_ssa_p (cfun
))
6349 res
= make_ssa_name (type
);
6351 res
= create_tmp_reg (type
);
6352 gimple_call_set_lhs (stmt
, res
);
6354 gimple_set_location (stmt
, loc
);
6355 gimple_seq_add_stmt_without_update (seq
, stmt
);
6360 /* Build the conversion (TYPE) OP with a result of type TYPE
6361 with location LOC if such conversion is neccesary in GIMPLE,
6362 simplifying it first.
6363 Returns the built expression value and appends
6364 statements possibly defining it to SEQ. */
6367 gimple_convert (gimple_seq
*seq
, location_t loc
, tree type
, tree op
)
6369 if (useless_type_conversion_p (type
, TREE_TYPE (op
)))
6371 return gimple_build (seq
, loc
, NOP_EXPR
, type
, op
);
6374 /* Build the conversion (ptrofftype) OP with a result of a type
6375 compatible with ptrofftype with location LOC if such conversion
6376 is neccesary in GIMPLE, simplifying it first.
6377 Returns the built expression value and appends
6378 statements possibly defining it to SEQ. */
6381 gimple_convert_to_ptrofftype (gimple_seq
*seq
, location_t loc
, tree op
)
6383 if (ptrofftype_p (TREE_TYPE (op
)))
6385 return gimple_convert (seq
, loc
, sizetype
, op
);
6388 /* Return true if the result of assignment STMT is known to be non-negative.
6389 If the return value is based on the assumption that signed overflow is
6390 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6391 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6394 gimple_assign_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6397 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6398 switch (get_gimple_rhs_class (code
))
6400 case GIMPLE_UNARY_RHS
:
6401 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6402 gimple_expr_type (stmt
),
6403 gimple_assign_rhs1 (stmt
),
6404 strict_overflow_p
, depth
);
6405 case GIMPLE_BINARY_RHS
:
6406 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt
),
6407 gimple_expr_type (stmt
),
6408 gimple_assign_rhs1 (stmt
),
6409 gimple_assign_rhs2 (stmt
),
6410 strict_overflow_p
, depth
);
6411 case GIMPLE_TERNARY_RHS
:
6413 case GIMPLE_SINGLE_RHS
:
6414 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt
),
6415 strict_overflow_p
, depth
);
6416 case GIMPLE_INVALID_RHS
:
6422 /* Return true if return value of call STMT is known to be non-negative.
6423 If the return value is based on the assumption that signed overflow is
6424 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6425 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6428 gimple_call_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6431 tree arg0
= gimple_call_num_args (stmt
) > 0 ?
6432 gimple_call_arg (stmt
, 0) : NULL_TREE
;
6433 tree arg1
= gimple_call_num_args (stmt
) > 1 ?
6434 gimple_call_arg (stmt
, 1) : NULL_TREE
;
6436 return tree_call_nonnegative_warnv_p (gimple_expr_type (stmt
),
6437 gimple_call_combined_fn (stmt
),
6440 strict_overflow_p
, depth
);
6443 /* Return true if return value of call STMT is known to be non-negative.
6444 If the return value is based on the assumption that signed overflow is
6445 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6446 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6449 gimple_phi_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6452 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6454 tree arg
= gimple_phi_arg_def (stmt
, i
);
6455 if (!tree_single_nonnegative_warnv_p (arg
, strict_overflow_p
, depth
+ 1))
6461 /* Return true if STMT is known to compute a non-negative value.
6462 If the return value is based on the assumption that signed overflow is
6463 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
6464 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
6467 gimple_stmt_nonnegative_warnv_p (gimple
*stmt
, bool *strict_overflow_p
,
6470 switch (gimple_code (stmt
))
6473 return gimple_assign_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6476 return gimple_call_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6479 return gimple_phi_nonnegative_warnv_p (stmt
, strict_overflow_p
,
6486 /* Return true if the floating-point value computed by assignment STMT
6487 is known to have an integer value. We also allow +Inf, -Inf and NaN
6488 to be considered integer values. Return false for signaling NaN.
6490 DEPTH is the current nesting depth of the query. */
6493 gimple_assign_integer_valued_real_p (gimple
*stmt
, int depth
)
6495 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6496 switch (get_gimple_rhs_class (code
))
6498 case GIMPLE_UNARY_RHS
:
6499 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt
),
6500 gimple_assign_rhs1 (stmt
), depth
);
6501 case GIMPLE_BINARY_RHS
:
6502 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt
),
6503 gimple_assign_rhs1 (stmt
),
6504 gimple_assign_rhs2 (stmt
), depth
);
6505 case GIMPLE_TERNARY_RHS
:
6507 case GIMPLE_SINGLE_RHS
:
6508 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt
), depth
);
6509 case GIMPLE_INVALID_RHS
:
6515 /* Return true if the floating-point value computed by call STMT is known
6516 to have an integer value. We also allow +Inf, -Inf and NaN to be
6517 considered integer values. Return false for signaling NaN.
6519 DEPTH is the current nesting depth of the query. */
6522 gimple_call_integer_valued_real_p (gimple
*stmt
, int depth
)
6524 tree arg0
= (gimple_call_num_args (stmt
) > 0
6525 ? gimple_call_arg (stmt
, 0)
6527 tree arg1
= (gimple_call_num_args (stmt
) > 1
6528 ? gimple_call_arg (stmt
, 1)
6530 return integer_valued_real_call_p (gimple_call_combined_fn (stmt
),
6534 /* Return true if the floating-point result of phi STMT is known to have
6535 an integer value. We also allow +Inf, -Inf and NaN to be considered
6536 integer values. Return false for signaling NaN.
6538 DEPTH is the current nesting depth of the query. */
6541 gimple_phi_integer_valued_real_p (gimple
*stmt
, int depth
)
6543 for (unsigned i
= 0; i
< gimple_phi_num_args (stmt
); ++i
)
6545 tree arg
= gimple_phi_arg_def (stmt
, i
);
6546 if (!integer_valued_real_single_p (arg
, depth
+ 1))
6552 /* Return true if the floating-point value computed by STMT is known
6553 to have an integer value. We also allow +Inf, -Inf and NaN to be
6554 considered integer values. Return false for signaling NaN.
6556 DEPTH is the current nesting depth of the query. */
6559 gimple_stmt_integer_valued_real_p (gimple
*stmt
, int depth
)
6561 switch (gimple_code (stmt
))
6564 return gimple_assign_integer_valued_real_p (stmt
, depth
);
6566 return gimple_call_integer_valued_real_p (stmt
, depth
);
6568 return gimple_phi_integer_valued_real_p (stmt
, depth
);