1 /* Alias analysis for trees.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "fold-const.h"
33 #include "hard-reg-set.h"
35 #include "dominance.h"
36 #include "basic-block.h"
37 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
38 #include "langhooks.h"
40 #include "tree-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
45 #include "gimple-expr.h"
47 #include "gimple-ssa.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
51 #include "insn-config.h"
61 #include "tree-inline.h"
63 #include "alloc-pool.h"
65 #include "plugin-api.h"
68 #include "ipa-reference.h"
70 /* Broad overview of how alias analysis on gimple works:
72 Statements clobbering or using memory are linked through the
73 virtual operand factored use-def chain. The virtual operand
74 is unique per function, its symbol is accessible via gimple_vop (cfun).
75 Virtual operands are used for efficiently walking memory statements
76 in the gimple IL and are useful for things like value-numbering as
77 a generation count for memory references.
79 SSA_NAME pointers may have associated points-to information
80 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
81 points-to information is (re-)computed by the TODO_rebuild_alias
82 pass manager todo. Points-to information is also used for more
83 precise tracking of call-clobbered and call-used variables and
84 related disambiguations.
86 This file contains functions for disambiguating memory references,
87 the so called alias-oracle and tools for walking of the gimple IL.
89 The main alias-oracle entry-points are
91 bool stmt_may_clobber_ref_p (gimple, tree)
93 This function queries if a statement may invalidate (parts of)
94 the memory designated by the reference tree argument.
96 bool ref_maybe_used_by_stmt_p (gimple, tree)
98 This function queries if a statement may need (parts of) the
99 memory designated by the reference tree argument.
101 There are variants of these functions that only handle the call
102 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
103 Note that these do not disambiguate against a possible call lhs.
105 bool refs_may_alias_p (tree, tree)
107 This function tries to disambiguate two reference trees.
109 bool ptr_deref_may_alias_global_p (tree)
111 This function queries if dereferencing a pointer variable may
114 More low-level disambiguators are available and documented in
115 this file. Low-level disambiguators dealing with points-to
116 information are in tree-ssa-structalias.c. */
119 /* Query statistics for the different low-level disambiguators.
120 A high-level query may trigger multiple of them. */
123 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
124 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
125 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
126 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
127 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
128 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
132 dump_alias_stats (FILE *s
)
134 fprintf (s
, "\nAlias oracle query stats:\n");
135 fprintf (s
, " refs_may_alias_p: "
136 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
137 HOST_WIDE_INT_PRINT_DEC
" queries\n",
138 alias_stats
.refs_may_alias_p_no_alias
,
139 alias_stats
.refs_may_alias_p_no_alias
140 + alias_stats
.refs_may_alias_p_may_alias
);
141 fprintf (s
, " ref_maybe_used_by_call_p: "
142 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
143 HOST_WIDE_INT_PRINT_DEC
" queries\n",
144 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
145 alias_stats
.refs_may_alias_p_no_alias
146 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
147 fprintf (s
, " call_may_clobber_ref_p: "
148 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
149 HOST_WIDE_INT_PRINT_DEC
" queries\n",
150 alias_stats
.call_may_clobber_ref_p_no_alias
,
151 alias_stats
.call_may_clobber_ref_p_no_alias
152 + alias_stats
.call_may_clobber_ref_p_may_alias
);
153 dump_alias_stats_in_alias_c (s
);
157 /* Return true, if dereferencing PTR may alias with a global variable. */
160 ptr_deref_may_alias_global_p (tree ptr
)
162 struct ptr_info_def
*pi
;
164 /* If we end up with a pointer constant here that may point
166 if (TREE_CODE (ptr
) != SSA_NAME
)
169 pi
= SSA_NAME_PTR_INFO (ptr
);
171 /* If we do not have points-to information for this variable,
176 /* ??? This does not use TBAA to prune globals ptr may not access. */
177 return pt_solution_includes_global (&pi
->pt
);
180 /* Return true if dereferencing PTR may alias DECL.
181 The caller is responsible for applying TBAA to see if PTR
182 may access DECL at all. */
185 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
187 struct ptr_info_def
*pi
;
189 /* Conversions are irrelevant for points-to information and
190 data-dependence analysis can feed us those. */
193 /* Anything we do not explicilty handle aliases. */
194 if ((TREE_CODE (ptr
) != SSA_NAME
195 && TREE_CODE (ptr
) != ADDR_EXPR
196 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
197 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
198 || (TREE_CODE (decl
) != VAR_DECL
199 && TREE_CODE (decl
) != PARM_DECL
200 && TREE_CODE (decl
) != RESULT_DECL
))
203 /* Disregard pointer offsetting. */
204 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
208 ptr
= TREE_OPERAND (ptr
, 0);
210 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
211 return ptr_deref_may_alias_decl_p (ptr
, decl
);
214 /* ADDR_EXPR pointers either just offset another pointer or directly
215 specify the pointed-to set. */
216 if (TREE_CODE (ptr
) == ADDR_EXPR
)
218 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
220 && (TREE_CODE (base
) == MEM_REF
221 || TREE_CODE (base
) == TARGET_MEM_REF
))
222 ptr
= TREE_OPERAND (base
, 0);
227 && CONSTANT_CLASS_P (base
))
233 /* Non-aliased variables can not be pointed to. */
234 if (!may_be_aliased (decl
))
237 /* If we do not have useful points-to information for this pointer
238 we cannot disambiguate anything else. */
239 pi
= SSA_NAME_PTR_INFO (ptr
);
243 return pt_solution_includes (&pi
->pt
, decl
);
246 /* Return true if dereferenced PTR1 and PTR2 may alias.
247 The caller is responsible for applying TBAA to see if accesses
248 through PTR1 and PTR2 may conflict at all. */
251 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
253 struct ptr_info_def
*pi1
, *pi2
;
255 /* Conversions are irrelevant for points-to information and
256 data-dependence analysis can feed us those. */
260 /* Disregard pointer offsetting. */
261 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
265 ptr1
= TREE_OPERAND (ptr1
, 0);
267 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
268 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
270 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
274 ptr2
= TREE_OPERAND (ptr2
, 0);
276 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
277 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
280 /* ADDR_EXPR pointers either just offset another pointer or directly
281 specify the pointed-to set. */
282 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
284 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
286 && (TREE_CODE (base
) == MEM_REF
287 || TREE_CODE (base
) == TARGET_MEM_REF
))
288 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
291 return ptr_deref_may_alias_decl_p (ptr2
, base
);
295 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
297 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
299 && (TREE_CODE (base
) == MEM_REF
300 || TREE_CODE (base
) == TARGET_MEM_REF
))
301 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
304 return ptr_deref_may_alias_decl_p (ptr1
, base
);
309 /* From here we require SSA name pointers. Anything else aliases. */
310 if (TREE_CODE (ptr1
) != SSA_NAME
311 || TREE_CODE (ptr2
) != SSA_NAME
312 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
313 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
316 /* We may end up with two empty points-to solutions for two same pointers.
317 In this case we still want to say both pointers alias, so shortcut
322 /* If we do not have useful points-to information for either pointer
323 we cannot disambiguate anything else. */
324 pi1
= SSA_NAME_PTR_INFO (ptr1
);
325 pi2
= SSA_NAME_PTR_INFO (ptr2
);
329 /* ??? This does not use TBAA to prune decls from the intersection
330 that not both pointers may access. */
331 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
334 /* Return true if dereferencing PTR may alias *REF.
335 The caller is responsible for applying TBAA to see if PTR
336 may access *REF at all. */
339 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
341 tree base
= ao_ref_base (ref
);
343 if (TREE_CODE (base
) == MEM_REF
344 || TREE_CODE (base
) == TARGET_MEM_REF
)
345 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
346 else if (DECL_P (base
))
347 return ptr_deref_may_alias_decl_p (ptr
, base
);
352 /* Returns whether reference REF to BASE may refer to global memory. */
355 ref_may_alias_global_p_1 (tree base
)
358 return is_global_var (base
);
359 else if (TREE_CODE (base
) == MEM_REF
360 || TREE_CODE (base
) == TARGET_MEM_REF
)
361 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
366 ref_may_alias_global_p (ao_ref
*ref
)
368 tree base
= ao_ref_base (ref
);
369 return ref_may_alias_global_p_1 (base
);
373 ref_may_alias_global_p (tree ref
)
375 tree base
= get_base_address (ref
);
376 return ref_may_alias_global_p_1 (base
);
379 /* Return true whether STMT may clobber global memory. */
382 stmt_may_clobber_global_p (gimple stmt
)
386 if (!gimple_vdef (stmt
))
389 /* ??? We can ask the oracle whether an artificial pointer
390 dereference with a pointer with points-to information covering
391 all global memory (what about non-address taken memory?) maybe
392 clobbered by this call. As there is at the moment no convenient
393 way of doing that without generating garbage do some manual
395 ??? We could make a NULL ao_ref argument to the various
396 predicates special, meaning any global memory. */
398 switch (gimple_code (stmt
))
401 lhs
= gimple_assign_lhs (stmt
);
402 return (TREE_CODE (lhs
) != SSA_NAME
403 && ref_may_alias_global_p (lhs
));
412 /* Dump alias information on FILE. */
415 dump_alias_info (FILE *file
)
419 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
422 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
424 fprintf (file
, "Aliased symbols\n\n");
426 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
428 if (may_be_aliased (var
))
429 dump_variable (file
, var
);
432 fprintf (file
, "\nCall clobber information\n");
434 fprintf (file
, "\nESCAPED");
435 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
437 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
439 for (i
= 1; i
< num_ssa_names
; i
++)
441 tree ptr
= ssa_name (i
);
442 struct ptr_info_def
*pi
;
445 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
446 || SSA_NAME_IN_FREE_LIST (ptr
))
449 pi
= SSA_NAME_PTR_INFO (ptr
);
451 dump_points_to_info_for (file
, ptr
);
454 fprintf (file
, "\n");
458 /* Dump alias information on stderr. */
461 debug_alias_info (void)
463 dump_alias_info (stderr
);
467 /* Dump the points-to set *PT into FILE. */
470 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
473 fprintf (file
, ", points-to anything");
476 fprintf (file
, ", points-to non-local");
479 fprintf (file
, ", points-to escaped");
482 fprintf (file
, ", points-to unit escaped");
485 fprintf (file
, ", points-to NULL");
489 fprintf (file
, ", points-to vars: ");
490 dump_decl_set (file
, pt
->vars
);
491 if (pt
->vars_contains_nonlocal
492 && pt
->vars_contains_escaped_heap
)
493 fprintf (file
, " (nonlocal, escaped heap)");
494 else if (pt
->vars_contains_nonlocal
495 && pt
->vars_contains_escaped
)
496 fprintf (file
, " (nonlocal, escaped)");
497 else if (pt
->vars_contains_nonlocal
)
498 fprintf (file
, " (nonlocal)");
499 else if (pt
->vars_contains_escaped_heap
)
500 fprintf (file
, " (escaped heap)");
501 else if (pt
->vars_contains_escaped
)
502 fprintf (file
, " (escaped)");
507 /* Unified dump function for pt_solution. */
510 debug (pt_solution
&ref
)
512 dump_points_to_solution (stderr
, &ref
);
516 debug (pt_solution
*ptr
)
521 fprintf (stderr
, "<nil>\n");
525 /* Dump points-to information for SSA_NAME PTR into FILE. */
528 dump_points_to_info_for (FILE *file
, tree ptr
)
530 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
532 print_generic_expr (file
, ptr
, dump_flags
);
535 dump_points_to_solution (file
, &pi
->pt
);
537 fprintf (file
, ", points-to anything");
539 fprintf (file
, "\n");
543 /* Dump points-to information for VAR into stderr. */
546 debug_points_to_info_for (tree var
)
548 dump_points_to_info_for (stderr
, var
);
552 /* Initializes the alias-oracle reference representation *R from REF. */
555 ao_ref_init (ao_ref
*r
, tree ref
)
562 r
->ref_alias_set
= -1;
563 r
->base_alias_set
= -1;
564 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
567 /* Returns the base object of the memory reference *REF. */
570 ao_ref_base (ao_ref
*ref
)
574 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
579 /* Returns the base object alias set of the memory reference *REF. */
582 ao_ref_base_alias_set (ao_ref
*ref
)
585 if (ref
->base_alias_set
!= -1)
586 return ref
->base_alias_set
;
590 while (handled_component_p (base_ref
))
591 base_ref
= TREE_OPERAND (base_ref
, 0);
592 ref
->base_alias_set
= get_alias_set (base_ref
);
593 return ref
->base_alias_set
;
596 /* Returns the reference alias set of the memory reference *REF. */
599 ao_ref_alias_set (ao_ref
*ref
)
601 if (ref
->ref_alias_set
!= -1)
602 return ref
->ref_alias_set
;
603 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
604 return ref
->ref_alias_set
;
607 /* Init an alias-oracle reference representation from a gimple pointer
608 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
609 size is assumed to be unknown. The access is assumed to be only
610 to or after of the pointer target, not before it. */
613 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
615 HOST_WIDE_INT t
, size_hwi
, extra_offset
= 0;
616 ref
->ref
= NULL_TREE
;
617 if (TREE_CODE (ptr
) == SSA_NAME
)
619 gimple stmt
= SSA_NAME_DEF_STMT (ptr
);
620 if (gimple_assign_single_p (stmt
)
621 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
622 ptr
= gimple_assign_rhs1 (stmt
);
623 else if (is_gimple_assign (stmt
)
624 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
625 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
627 ptr
= gimple_assign_rhs1 (stmt
);
628 extra_offset
= BITS_PER_UNIT
629 * int_cst_value (gimple_assign_rhs2 (stmt
));
633 if (TREE_CODE (ptr
) == ADDR_EXPR
)
635 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
637 ref
->offset
= BITS_PER_UNIT
* t
;
642 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
647 ref
->base
= build2 (MEM_REF
, char_type_node
,
648 ptr
, null_pointer_node
);
651 ref
->offset
+= extra_offset
;
653 && tree_fits_shwi_p (size
)
654 && (size_hwi
= tree_to_shwi (size
)) <= HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
)
655 ref
->max_size
= ref
->size
= size_hwi
* BITS_PER_UNIT
;
657 ref
->max_size
= ref
->size
= -1;
658 ref
->ref_alias_set
= 0;
659 ref
->base_alias_set
= 0;
660 ref
->volatile_p
= false;
663 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
664 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
668 same_type_for_tbaa (tree type1
, tree type2
)
670 type1
= TYPE_MAIN_VARIANT (type1
);
671 type2
= TYPE_MAIN_VARIANT (type2
);
673 /* If we would have to do structural comparison bail out. */
674 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
675 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
678 /* Compare the canonical types. */
679 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
682 /* ??? Array types are not properly unified in all cases as we have
683 spurious changes in the index types for example. Removing this
684 causes all sorts of problems with the Fortran frontend. */
685 if (TREE_CODE (type1
) == ARRAY_TYPE
686 && TREE_CODE (type2
) == ARRAY_TYPE
)
689 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
690 object of one of its constrained subtypes, e.g. when a function with an
691 unconstrained parameter passed by reference is called on an object and
692 inlined. But, even in the case of a fixed size, type and subtypes are
693 not equivalent enough as to share the same TYPE_CANONICAL, since this
694 would mean that conversions between them are useless, whereas they are
695 not (e.g. type and subtypes can have different modes). So, in the end,
696 they are only guaranteed to have the same alias set. */
697 if (get_alias_set (type1
) == get_alias_set (type2
))
700 /* The types are known to be not equal. */
704 /* Determine if the two component references REF1 and REF2 which are
705 based on access types TYPE1 and TYPE2 and of which at least one is based
706 on an indirect reference may alias. REF2 is the only one that can
707 be a decl in which case REF2_IS_DECL is true.
708 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
709 are the respective alias sets. */
712 aliasing_component_refs_p (tree ref1
,
713 alias_set_type ref1_alias_set
,
714 alias_set_type base1_alias_set
,
715 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
717 alias_set_type ref2_alias_set
,
718 alias_set_type base2_alias_set
,
719 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
722 /* If one reference is a component references through pointers try to find a
723 common base and apply offset based disambiguation. This handles
725 struct A { int i; int j; } *q;
726 struct B { struct A a; int k; } *p;
727 disambiguating q->i and p->a.j. */
733 /* Choose bases and base types to search for. */
735 while (handled_component_p (base1
))
736 base1
= TREE_OPERAND (base1
, 0);
737 type1
= TREE_TYPE (base1
);
739 while (handled_component_p (base2
))
740 base2
= TREE_OPERAND (base2
, 0);
741 type2
= TREE_TYPE (base2
);
743 /* Now search for the type1 in the access path of ref2. This
744 would be a common base for doing offset based disambiguation on. */
746 while (handled_component_p (*refp
)
747 && same_type_for_tbaa (TREE_TYPE (*refp
), type1
) == 0)
748 refp
= &TREE_OPERAND (*refp
, 0);
749 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type1
);
750 /* If we couldn't compare types we have to bail out. */
753 else if (same_p
== 1)
755 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
756 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
);
758 get_ref_base_and_extent (base1
, &offadj
, &sztmp
, &msztmp
);
760 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
762 /* If we didn't find a common base, try the other way around. */
764 while (handled_component_p (*refp
)
765 && same_type_for_tbaa (TREE_TYPE (*refp
), type2
) == 0)
766 refp
= &TREE_OPERAND (*refp
, 0);
767 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type2
);
768 /* If we couldn't compare types we have to bail out. */
771 else if (same_p
== 1)
773 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
774 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
);
776 get_ref_base_and_extent (base2
, &offadj
, &sztmp
, &msztmp
);
778 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
781 /* If we have two type access paths B1.path1 and B2.path2 they may
782 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
783 But we can still have a path that goes B1.path1...B2.path2 with
784 a part that we do not see. So we can only disambiguate now
785 if there is no B2 in the tail of path1 and no B1 on the
787 if (base1_alias_set
== ref2_alias_set
788 || alias_set_subset_of (base1_alias_set
, ref2_alias_set
))
790 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
792 return (base2_alias_set
== ref1_alias_set
793 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
797 /* Return true if we can determine that component references REF1 and REF2,
798 that are within a common DECL, cannot overlap. */
801 nonoverlapping_component_refs_of_decl_p (tree ref1
, tree ref2
)
803 auto_vec
<tree
, 16> component_refs1
;
804 auto_vec
<tree
, 16> component_refs2
;
806 /* Create the stack of handled components for REF1. */
807 while (handled_component_p (ref1
))
809 component_refs1
.safe_push (ref1
);
810 ref1
= TREE_OPERAND (ref1
, 0);
812 if (TREE_CODE (ref1
) == MEM_REF
)
814 if (!integer_zerop (TREE_OPERAND (ref1
, 1)))
816 ref1
= TREE_OPERAND (TREE_OPERAND (ref1
, 0), 0);
819 /* Create the stack of handled components for REF2. */
820 while (handled_component_p (ref2
))
822 component_refs2
.safe_push (ref2
);
823 ref2
= TREE_OPERAND (ref2
, 0);
825 if (TREE_CODE (ref2
) == MEM_REF
)
827 if (!integer_zerop (TREE_OPERAND (ref2
, 1)))
829 ref2
= TREE_OPERAND (TREE_OPERAND (ref2
, 0), 0);
832 /* We must have the same base DECL. */
833 gcc_assert (ref1
== ref2
);
835 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
836 rank. This is sufficient because we start from the same DECL and you
837 cannot reference several fields at a time with COMPONENT_REFs (unlike
838 with ARRAY_RANGE_REFs for arrays) so you always need the same number
839 of them to access a sub-component, unless you're in a union, in which
840 case the return value will precisely be false. */
845 if (component_refs1
.is_empty ())
847 ref1
= component_refs1
.pop ();
849 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
853 if (component_refs2
.is_empty ())
855 ref2
= component_refs2
.pop ();
857 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
859 /* Beware of BIT_FIELD_REF. */
860 if (TREE_CODE (ref1
) != COMPONENT_REF
861 || TREE_CODE (ref2
) != COMPONENT_REF
)
864 tree field1
= TREE_OPERAND (ref1
, 1);
865 tree field2
= TREE_OPERAND (ref2
, 1);
867 /* ??? We cannot simply use the type of operand #0 of the refs here
868 as the Fortran compiler smuggles type punning into COMPONENT_REFs
869 for common blocks instead of using unions like everyone else. */
870 tree type1
= DECL_CONTEXT (field1
);
871 tree type2
= DECL_CONTEXT (field2
);
873 /* We cannot disambiguate fields in a union or qualified union. */
874 if (type1
!= type2
|| TREE_CODE (type1
) != RECORD_TYPE
)
877 /* Different fields of the same record type cannot overlap.
878 ??? Bitfields can overlap at RTL level so punt on them. */
879 if (field1
!= field2
)
881 component_refs1
.release ();
882 component_refs2
.release ();
883 return !(DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
));
888 component_refs1
.release ();
889 component_refs2
.release ();
893 /* qsort compare function to sort FIELD_DECLs after their
894 DECL_FIELD_CONTEXT TYPE_UID. */
897 ncr_compar (const void *field1_
, const void *field2_
)
899 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
900 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
901 unsigned int uid1
= TYPE_UID (DECL_FIELD_CONTEXT (field1
));
902 unsigned int uid2
= TYPE_UID (DECL_FIELD_CONTEXT (field2
));
905 else if (uid1
> uid2
)
910 /* Return true if we can determine that the fields referenced cannot
911 overlap for any pair of objects. */
914 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
916 if (!flag_strict_aliasing
918 || TREE_CODE (x
) != COMPONENT_REF
919 || TREE_CODE (y
) != COMPONENT_REF
)
922 auto_vec
<const_tree
, 16> fieldsx
;
923 while (TREE_CODE (x
) == COMPONENT_REF
)
925 tree field
= TREE_OPERAND (x
, 1);
926 tree type
= DECL_FIELD_CONTEXT (field
);
927 if (TREE_CODE (type
) == RECORD_TYPE
)
928 fieldsx
.safe_push (field
);
929 x
= TREE_OPERAND (x
, 0);
931 if (fieldsx
.length () == 0)
933 auto_vec
<const_tree
, 16> fieldsy
;
934 while (TREE_CODE (y
) == COMPONENT_REF
)
936 tree field
= TREE_OPERAND (y
, 1);
937 tree type
= DECL_FIELD_CONTEXT (field
);
938 if (TREE_CODE (type
) == RECORD_TYPE
)
939 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
940 y
= TREE_OPERAND (y
, 0);
942 if (fieldsy
.length () == 0)
945 /* Most common case first. */
946 if (fieldsx
.length () == 1
947 && fieldsy
.length () == 1)
948 return ((DECL_FIELD_CONTEXT (fieldsx
[0])
949 == DECL_FIELD_CONTEXT (fieldsy
[0]))
950 && fieldsx
[0] != fieldsy
[0]
951 && !(DECL_BIT_FIELD (fieldsx
[0]) && DECL_BIT_FIELD (fieldsy
[0])));
953 if (fieldsx
.length () == 2)
955 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
956 std::swap (fieldsx
[0], fieldsx
[1]);
959 fieldsx
.qsort (ncr_compar
);
961 if (fieldsy
.length () == 2)
963 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
964 std::swap (fieldsy
[0], fieldsy
[1]);
967 fieldsy
.qsort (ncr_compar
);
969 unsigned i
= 0, j
= 0;
972 const_tree fieldx
= fieldsx
[i
];
973 const_tree fieldy
= fieldsy
[j
];
974 tree typex
= DECL_FIELD_CONTEXT (fieldx
);
975 tree typey
= DECL_FIELD_CONTEXT (fieldy
);
978 /* We're left with accessing different fields of a structure,
979 no possible overlap, unless they are both bitfields. */
980 if (fieldx
!= fieldy
)
981 return !(DECL_BIT_FIELD (fieldx
) && DECL_BIT_FIELD (fieldy
));
983 if (TYPE_UID (typex
) < TYPE_UID (typey
))
986 if (i
== fieldsx
.length ())
992 if (j
== fieldsy
.length ())
1002 /* Return true if two memory references based on the variables BASE1
1003 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1004 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1005 if non-NULL are the complete memory reference trees. */
1008 decl_refs_may_alias_p (tree ref1
, tree base1
,
1009 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1010 tree ref2
, tree base2
,
1011 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
)
1013 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
1015 /* If both references are based on different variables, they cannot alias. */
1019 /* If both references are based on the same variable, they cannot alias if
1020 the accesses do not overlap. */
1021 if (!ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1024 /* For components with variable position, the above test isn't sufficient,
1025 so we disambiguate component references manually. */
1027 && handled_component_p (ref1
) && handled_component_p (ref2
)
1028 && nonoverlapping_component_refs_of_decl_p (ref1
, ref2
))
1034 /* Return true if an indirect reference based on *PTR1 constrained
1035 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1036 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1037 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1038 in which case they are computed on-demand. REF1 and REF2
1039 if non-NULL are the complete memory reference trees. */
1042 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1043 HOST_WIDE_INT offset1
,
1044 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED
,
1045 alias_set_type ref1_alias_set
,
1046 alias_set_type base1_alias_set
,
1047 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1048 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1049 alias_set_type ref2_alias_set
,
1050 alias_set_type base2_alias_set
, bool tbaa_p
)
1053 tree ptrtype1
, dbase2
;
1054 HOST_WIDE_INT offset1p
= offset1
, offset2p
= offset2
;
1055 HOST_WIDE_INT doffset1
, doffset2
;
1057 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1058 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1061 ptr1
= TREE_OPERAND (base1
, 0);
1063 /* The offset embedded in MEM_REFs can be negative. Bias them
1064 so that the resulting offset adjustment is positive. */
1065 offset_int moff
= mem_ref_offset (base1
);
1066 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1067 if (wi::neg_p (moff
))
1068 offset2p
+= (-moff
).to_short_addr ();
1070 offset1p
+= moff
.to_short_addr ();
1072 /* If only one reference is based on a variable, they cannot alias if
1073 the pointer access is beyond the extent of the variable access.
1074 (the pointer base cannot validly point to an offset less than zero
1076 ??? IVOPTs creates bases that do not honor this restriction,
1077 so do not apply this optimization for TARGET_MEM_REFs. */
1078 if (TREE_CODE (base1
) != TARGET_MEM_REF
1079 && !ranges_overlap_p (MAX (0, offset1p
), -1, offset2p
, max_size2
))
1081 /* They also cannot alias if the pointer may not point to the decl. */
1082 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
1085 /* Disambiguations that rely on strict aliasing rules follow. */
1086 if (!flag_strict_aliasing
|| !tbaa_p
)
1089 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1091 /* If the alias set for a pointer access is zero all bets are off. */
1092 if (base1_alias_set
== -1)
1093 base1_alias_set
= get_deref_alias_set (ptrtype1
);
1094 if (base1_alias_set
== 0)
1096 if (base2_alias_set
== -1)
1097 base2_alias_set
= get_alias_set (base2
);
1099 /* When we are trying to disambiguate an access with a pointer dereference
1100 as base versus one with a decl as base we can use both the size
1101 of the decl and its dynamic type for extra disambiguation.
1102 ??? We do not know anything about the dynamic type of the decl
1103 other than that its alias-set contains base2_alias_set as a subset
1104 which does not help us here. */
1105 /* As we know nothing useful about the dynamic type of the decl just
1106 use the usual conflict check rather than a subset test.
1107 ??? We could introduce -fvery-strict-aliasing when the language
1108 does not allow decls to have a dynamic type that differs from their
1109 static type. Then we can check
1110 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1111 if (base1_alias_set
!= base2_alias_set
1112 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1114 /* If the size of the access relevant for TBAA through the pointer
1115 is bigger than the size of the decl we can't possibly access the
1116 decl via that pointer. */
1117 if (DECL_SIZE (base2
) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1
))
1118 && TREE_CODE (DECL_SIZE (base2
)) == INTEGER_CST
1119 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1
))) == INTEGER_CST
1120 /* ??? This in turn may run afoul when a decl of type T which is
1121 a member of union type U is accessed through a pointer to
1122 type U and sizeof T is smaller than sizeof U. */
1123 && TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
1124 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
1125 && tree_int_cst_lt (DECL_SIZE (base2
), TYPE_SIZE (TREE_TYPE (ptrtype1
))))
1131 /* If the decl is accessed via a MEM_REF, reconstruct the base
1132 we can use for TBAA and an appropriately adjusted offset. */
1134 while (handled_component_p (dbase2
))
1135 dbase2
= TREE_OPERAND (dbase2
, 0);
1138 if (TREE_CODE (dbase2
) == MEM_REF
1139 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
1141 offset_int moff
= mem_ref_offset (dbase2
);
1142 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1143 if (wi::neg_p (moff
))
1144 doffset1
-= (-moff
).to_short_addr ();
1146 doffset2
-= moff
.to_short_addr ();
1149 /* If either reference is view-converted, give up now. */
1150 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1151 || same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (base2
)) != 1)
1154 /* If both references are through the same type, they do not alias
1155 if the accesses do not overlap. This does extra disambiguation
1156 for mixed/pointer accesses but requires strict aliasing.
1157 For MEM_REFs we require that the component-ref offset we computed
1158 is relative to the start of the type which we ensure by
1159 comparing rvalue and access type and disregarding the constant
1161 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1162 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1163 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
1164 return ranges_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
);
1167 && nonoverlapping_component_refs_p (ref1
, ref2
))
1170 /* Do access-path based disambiguation. */
1172 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1173 return aliasing_component_refs_p (ref1
,
1174 ref1_alias_set
, base1_alias_set
,
1177 ref2_alias_set
, base2_alias_set
,
1178 offset2
, max_size2
, true);
1183 /* Return true if two indirect references based on *PTR1
1184 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1185 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1186 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1187 in which case they are computed on-demand. REF1 and REF2
1188 if non-NULL are the complete memory reference trees. */
1191 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1192 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1193 alias_set_type ref1_alias_set
,
1194 alias_set_type base1_alias_set
,
1195 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1196 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1197 alias_set_type ref2_alias_set
,
1198 alias_set_type base2_alias_set
, bool tbaa_p
)
1202 tree ptrtype1
, ptrtype2
;
1204 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1205 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1206 && (TREE_CODE (base2
) == MEM_REF
1207 || TREE_CODE (base2
) == TARGET_MEM_REF
));
1209 ptr1
= TREE_OPERAND (base1
, 0);
1210 ptr2
= TREE_OPERAND (base2
, 0);
1212 /* If both bases are based on pointers they cannot alias if they may not
1213 point to the same memory object or if they point to the same object
1214 and the accesses do not overlap. */
1215 if ((!cfun
|| gimple_in_ssa_p (cfun
))
1216 && operand_equal_p (ptr1
, ptr2
, 0)
1217 && (((TREE_CODE (base1
) != TARGET_MEM_REF
1218 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1219 && (TREE_CODE (base2
) != TARGET_MEM_REF
1220 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
1221 || (TREE_CODE (base1
) == TARGET_MEM_REF
1222 && TREE_CODE (base2
) == TARGET_MEM_REF
1223 && (TMR_STEP (base1
) == TMR_STEP (base2
)
1224 || (TMR_STEP (base1
) && TMR_STEP (base2
)
1225 && operand_equal_p (TMR_STEP (base1
),
1226 TMR_STEP (base2
), 0)))
1227 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
1228 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
1229 && operand_equal_p (TMR_INDEX (base1
),
1230 TMR_INDEX (base2
), 0)))
1231 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
1232 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
1233 && operand_equal_p (TMR_INDEX2 (base1
),
1234 TMR_INDEX2 (base2
), 0))))))
1237 /* The offset embedded in MEM_REFs can be negative. Bias them
1238 so that the resulting offset adjustment is positive. */
1239 moff
= mem_ref_offset (base1
);
1240 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1241 if (wi::neg_p (moff
))
1242 offset2
+= (-moff
).to_short_addr ();
1244 offset1
+= moff
.to_shwi ();
1245 moff
= mem_ref_offset (base2
);
1246 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1247 if (wi::neg_p (moff
))
1248 offset1
+= (-moff
).to_short_addr ();
1250 offset2
+= moff
.to_short_addr ();
1251 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1253 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
1256 /* Disambiguations that rely on strict aliasing rules follow. */
1257 if (!flag_strict_aliasing
|| !tbaa_p
)
1260 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1261 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
1263 /* If the alias set for a pointer access is zero all bets are off. */
1264 if (base1_alias_set
== -1)
1265 base1_alias_set
= get_deref_alias_set (ptrtype1
);
1266 if (base1_alias_set
== 0)
1268 if (base2_alias_set
== -1)
1269 base2_alias_set
= get_deref_alias_set (ptrtype2
);
1270 if (base2_alias_set
== 0)
1273 /* If both references are through the same type, they do not alias
1274 if the accesses do not overlap. This does extra disambiguation
1275 for mixed/pointer accesses but requires strict aliasing. */
1276 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1277 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1278 && (TREE_CODE (base2
) != TARGET_MEM_REF
1279 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
1280 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) == 1
1281 && same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) == 1
1282 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
1283 TREE_TYPE (ptrtype2
)) == 1)
1284 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1286 /* Do type-based disambiguation. */
1287 if (base1_alias_set
!= base2_alias_set
1288 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1291 /* If either reference is view-converted, give up now. */
1292 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1293 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
1297 && nonoverlapping_component_refs_p (ref1
, ref2
))
1300 /* Do access-path based disambiguation. */
1302 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1303 return aliasing_component_refs_p (ref1
,
1304 ref1_alias_set
, base1_alias_set
,
1307 ref2_alias_set
, base2_alias_set
,
1308 offset2
, max_size2
, false);
1313 /* Return true, if the two memory references REF1 and REF2 may alias. */
1316 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
1319 HOST_WIDE_INT offset1
= 0, offset2
= 0;
1320 HOST_WIDE_INT max_size1
= -1, max_size2
= -1;
1321 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
1323 gcc_checking_assert ((!ref1
->ref
1324 || TREE_CODE (ref1
->ref
) == SSA_NAME
1325 || DECL_P (ref1
->ref
)
1326 || TREE_CODE (ref1
->ref
) == STRING_CST
1327 || handled_component_p (ref1
->ref
)
1328 || TREE_CODE (ref1
->ref
) == MEM_REF
1329 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
1331 || TREE_CODE (ref2
->ref
) == SSA_NAME
1332 || DECL_P (ref2
->ref
)
1333 || TREE_CODE (ref2
->ref
) == STRING_CST
1334 || handled_component_p (ref2
->ref
)
1335 || TREE_CODE (ref2
->ref
) == MEM_REF
1336 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
1338 /* Decompose the references into their base objects and the access. */
1339 base1
= ao_ref_base (ref1
);
1340 offset1
= ref1
->offset
;
1341 max_size1
= ref1
->max_size
;
1342 base2
= ao_ref_base (ref2
);
1343 offset2
= ref2
->offset
;
1344 max_size2
= ref2
->max_size
;
1346 /* We can end up with registers or constants as bases for example from
1347 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1348 which is seen as a struct copy. */
1349 if (TREE_CODE (base1
) == SSA_NAME
1350 || TREE_CODE (base1
) == CONST_DECL
1351 || TREE_CODE (base1
) == CONSTRUCTOR
1352 || TREE_CODE (base1
) == ADDR_EXPR
1353 || CONSTANT_CLASS_P (base1
)
1354 || TREE_CODE (base2
) == SSA_NAME
1355 || TREE_CODE (base2
) == CONST_DECL
1356 || TREE_CODE (base2
) == CONSTRUCTOR
1357 || TREE_CODE (base2
) == ADDR_EXPR
1358 || CONSTANT_CLASS_P (base2
))
1361 /* We can end up referring to code via function and label decls.
1362 As we likely do not properly track code aliases conservatively
1364 if (TREE_CODE (base1
) == FUNCTION_DECL
1365 || TREE_CODE (base1
) == LABEL_DECL
1366 || TREE_CODE (base2
) == FUNCTION_DECL
1367 || TREE_CODE (base2
) == LABEL_DECL
)
1370 /* Two volatile accesses always conflict. */
1371 if (ref1
->volatile_p
1372 && ref2
->volatile_p
)
1375 /* Defer to simple offset based disambiguation if we have
1376 references based on two decls. Do this before defering to
1377 TBAA to handle must-alias cases in conformance with the
1378 GCC extension of allowing type-punning through unions. */
1379 var1_p
= DECL_P (base1
);
1380 var2_p
= DECL_P (base2
);
1381 if (var1_p
&& var2_p
)
1382 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
1383 ref2
->ref
, base2
, offset2
, max_size2
);
1385 /* Handle restrict based accesses.
1386 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1388 tree rbase1
= base1
;
1389 tree rbase2
= base2
;
1394 while (handled_component_p (rbase1
))
1395 rbase1
= TREE_OPERAND (rbase1
, 0);
1401 while (handled_component_p (rbase2
))
1402 rbase2
= TREE_OPERAND (rbase2
, 0);
1404 if (rbase1
&& rbase2
1405 && (TREE_CODE (base1
) == MEM_REF
|| TREE_CODE (base1
) == TARGET_MEM_REF
)
1406 && (TREE_CODE (base2
) == MEM_REF
|| TREE_CODE (base2
) == TARGET_MEM_REF
)
1407 /* If the accesses are in the same restrict clique... */
1408 && MR_DEPENDENCE_CLIQUE (base1
) == MR_DEPENDENCE_CLIQUE (base2
)
1409 /* But based on different pointers they do not alias. */
1410 && MR_DEPENDENCE_BASE (base1
) != MR_DEPENDENCE_BASE (base2
))
1413 ind1_p
= (TREE_CODE (base1
) == MEM_REF
1414 || TREE_CODE (base1
) == TARGET_MEM_REF
);
1415 ind2_p
= (TREE_CODE (base2
) == MEM_REF
1416 || TREE_CODE (base2
) == TARGET_MEM_REF
);
1418 /* Canonicalize the pointer-vs-decl case. */
1419 if (ind1_p
&& var2_p
)
1421 std::swap (offset1
, offset2
);
1422 std::swap (max_size1
, max_size2
);
1423 std::swap (base1
, base2
);
1424 std::swap (ref1
, ref2
);
1431 /* First defer to TBAA if possible. */
1433 && flag_strict_aliasing
1434 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
1435 ao_ref_alias_set (ref2
)))
1438 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1439 if (var1_p
&& ind2_p
)
1440 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
1442 ao_ref_alias_set (ref2
), -1,
1445 ao_ref_alias_set (ref1
),
1446 ao_ref_base_alias_set (ref1
),
1448 else if (ind1_p
&& ind2_p
)
1449 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
1451 ao_ref_alias_set (ref1
), -1,
1454 ao_ref_alias_set (ref2
), -1,
1457 /* We really do not want to end up here, but returning true is safe. */
1458 #ifdef ENABLE_CHECKING
1466 refs_may_alias_p (tree ref1
, ao_ref
*ref2
)
1469 ao_ref_init (&r1
, ref1
);
1470 return refs_may_alias_p_1 (&r1
, ref2
, true);
1474 refs_may_alias_p (tree ref1
, tree ref2
)
1478 ao_ref_init (&r1
, ref1
);
1479 ao_ref_init (&r2
, ref2
);
1480 res
= refs_may_alias_p_1 (&r1
, &r2
, true);
1482 ++alias_stats
.refs_may_alias_p_may_alias
;
1484 ++alias_stats
.refs_may_alias_p_no_alias
;
1488 /* Returns true if there is a anti-dependence for the STORE that
1489 executes after the LOAD. */
1492 refs_anti_dependent_p (tree load
, tree store
)
1495 ao_ref_init (&r1
, load
);
1496 ao_ref_init (&r2
, store
);
1497 return refs_may_alias_p_1 (&r1
, &r2
, false);
1500 /* Returns true if there is a output dependence for the stores
1501 STORE1 and STORE2. */
1504 refs_output_dependent_p (tree store1
, tree store2
)
1507 ao_ref_init (&r1
, store1
);
1508 ao_ref_init (&r2
, store2
);
1509 return refs_may_alias_p_1 (&r1
, &r2
, false);
1512 /* If the call CALL may use the memory reference REF return true,
1513 otherwise return false. */
1516 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
)
1520 int flags
= gimple_call_flags (call
);
1522 /* Const functions without a static chain do not implicitly use memory. */
1523 if (!gimple_call_chain (call
)
1524 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
1527 base
= ao_ref_base (ref
);
1531 /* A call that is not without side-effects might involve volatile
1532 accesses and thus conflicts with all other volatile accesses. */
1533 if (ref
->volatile_p
)
1536 /* If the reference is based on a decl that is not aliased the call
1537 cannot possibly use it. */
1539 && !may_be_aliased (base
)
1540 /* But local statics can be used through recursion. */
1541 && !is_global_var (base
))
1544 callee
= gimple_call_fndecl (call
);
1546 /* Handle those builtin functions explicitly that do not act as
1547 escape points. See tree-ssa-structalias.c:find_func_aliases
1548 for the list of builtins we might need to handle here. */
1549 if (callee
!= NULL_TREE
1550 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
1551 switch (DECL_FUNCTION_CODE (callee
))
1553 /* All the following functions read memory pointed to by
1554 their second argument. strcat/strncat additionally
1555 reads memory pointed to by the first argument. */
1556 case BUILT_IN_STRCAT
:
1557 case BUILT_IN_STRNCAT
:
1560 ao_ref_init_from_ptr_and_size (&dref
,
1561 gimple_call_arg (call
, 0),
1563 if (refs_may_alias_p_1 (&dref
, ref
, false))
1567 case BUILT_IN_STRCPY
:
1568 case BUILT_IN_STRNCPY
:
1569 case BUILT_IN_MEMCPY
:
1570 case BUILT_IN_MEMMOVE
:
1571 case BUILT_IN_MEMPCPY
:
1572 case BUILT_IN_STPCPY
:
1573 case BUILT_IN_STPNCPY
:
1574 case BUILT_IN_TM_MEMCPY
:
1575 case BUILT_IN_TM_MEMMOVE
:
1578 tree size
= NULL_TREE
;
1579 if (gimple_call_num_args (call
) == 3)
1580 size
= gimple_call_arg (call
, 2);
1581 ao_ref_init_from_ptr_and_size (&dref
,
1582 gimple_call_arg (call
, 1),
1584 return refs_may_alias_p_1 (&dref
, ref
, false);
1586 case BUILT_IN_STRCAT_CHK
:
1587 case BUILT_IN_STRNCAT_CHK
:
1590 ao_ref_init_from_ptr_and_size (&dref
,
1591 gimple_call_arg (call
, 0),
1593 if (refs_may_alias_p_1 (&dref
, ref
, false))
1597 case BUILT_IN_STRCPY_CHK
:
1598 case BUILT_IN_STRNCPY_CHK
:
1599 case BUILT_IN_MEMCPY_CHK
:
1600 case BUILT_IN_MEMMOVE_CHK
:
1601 case BUILT_IN_MEMPCPY_CHK
:
1602 case BUILT_IN_STPCPY_CHK
:
1603 case BUILT_IN_STPNCPY_CHK
:
1606 tree size
= NULL_TREE
;
1607 if (gimple_call_num_args (call
) == 4)
1608 size
= gimple_call_arg (call
, 2);
1609 ao_ref_init_from_ptr_and_size (&dref
,
1610 gimple_call_arg (call
, 1),
1612 return refs_may_alias_p_1 (&dref
, ref
, false);
1614 case BUILT_IN_BCOPY
:
1617 tree size
= gimple_call_arg (call
, 2);
1618 ao_ref_init_from_ptr_and_size (&dref
,
1619 gimple_call_arg (call
, 0),
1621 return refs_may_alias_p_1 (&dref
, ref
, false);
1624 /* The following functions read memory pointed to by their
1626 CASE_BUILT_IN_TM_LOAD (1):
1627 CASE_BUILT_IN_TM_LOAD (2):
1628 CASE_BUILT_IN_TM_LOAD (4):
1629 CASE_BUILT_IN_TM_LOAD (8):
1630 CASE_BUILT_IN_TM_LOAD (FLOAT
):
1631 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
1632 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
1633 CASE_BUILT_IN_TM_LOAD (M64
):
1634 CASE_BUILT_IN_TM_LOAD (M128
):
1635 CASE_BUILT_IN_TM_LOAD (M256
):
1636 case BUILT_IN_TM_LOG
:
1637 case BUILT_IN_TM_LOG_1
:
1638 case BUILT_IN_TM_LOG_2
:
1639 case BUILT_IN_TM_LOG_4
:
1640 case BUILT_IN_TM_LOG_8
:
1641 case BUILT_IN_TM_LOG_FLOAT
:
1642 case BUILT_IN_TM_LOG_DOUBLE
:
1643 case BUILT_IN_TM_LOG_LDOUBLE
:
1644 case BUILT_IN_TM_LOG_M64
:
1645 case BUILT_IN_TM_LOG_M128
:
1646 case BUILT_IN_TM_LOG_M256
:
1647 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call
, 0), ref
);
1649 /* These read memory pointed to by the first argument. */
1650 case BUILT_IN_STRDUP
:
1651 case BUILT_IN_STRNDUP
:
1652 case BUILT_IN_REALLOC
:
1655 tree size
= NULL_TREE
;
1656 if (gimple_call_num_args (call
) == 2)
1657 size
= gimple_call_arg (call
, 1);
1658 ao_ref_init_from_ptr_and_size (&dref
,
1659 gimple_call_arg (call
, 0),
1661 return refs_may_alias_p_1 (&dref
, ref
, false);
1663 /* These read memory pointed to by the first argument. */
1664 case BUILT_IN_INDEX
:
1665 case BUILT_IN_STRCHR
:
1666 case BUILT_IN_STRRCHR
:
1669 ao_ref_init_from_ptr_and_size (&dref
,
1670 gimple_call_arg (call
, 0),
1672 return refs_may_alias_p_1 (&dref
, ref
, false);
1674 /* These read memory pointed to by the first argument with size
1675 in the third argument. */
1676 case BUILT_IN_MEMCHR
:
1679 ao_ref_init_from_ptr_and_size (&dref
,
1680 gimple_call_arg (call
, 0),
1681 gimple_call_arg (call
, 2));
1682 return refs_may_alias_p_1 (&dref
, ref
, false);
1684 /* These read memory pointed to by the first and second arguments. */
1685 case BUILT_IN_STRSTR
:
1686 case BUILT_IN_STRPBRK
:
1689 ao_ref_init_from_ptr_and_size (&dref
,
1690 gimple_call_arg (call
, 0),
1692 if (refs_may_alias_p_1 (&dref
, ref
, false))
1694 ao_ref_init_from_ptr_and_size (&dref
,
1695 gimple_call_arg (call
, 1),
1697 return refs_may_alias_p_1 (&dref
, ref
, false);
1700 /* The following builtins do not read from memory. */
1702 case BUILT_IN_MALLOC
:
1703 case BUILT_IN_POSIX_MEMALIGN
:
1704 case BUILT_IN_ALIGNED_ALLOC
:
1705 case BUILT_IN_CALLOC
:
1706 case BUILT_IN_ALLOCA
:
1707 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1708 case BUILT_IN_STACK_SAVE
:
1709 case BUILT_IN_STACK_RESTORE
:
1710 case BUILT_IN_MEMSET
:
1711 case BUILT_IN_TM_MEMSET
:
1712 case BUILT_IN_MEMSET_CHK
:
1713 case BUILT_IN_FREXP
:
1714 case BUILT_IN_FREXPF
:
1715 case BUILT_IN_FREXPL
:
1716 case BUILT_IN_GAMMA_R
:
1717 case BUILT_IN_GAMMAF_R
:
1718 case BUILT_IN_GAMMAL_R
:
1719 case BUILT_IN_LGAMMA_R
:
1720 case BUILT_IN_LGAMMAF_R
:
1721 case BUILT_IN_LGAMMAL_R
:
1723 case BUILT_IN_MODFF
:
1724 case BUILT_IN_MODFL
:
1725 case BUILT_IN_REMQUO
:
1726 case BUILT_IN_REMQUOF
:
1727 case BUILT_IN_REMQUOL
:
1728 case BUILT_IN_SINCOS
:
1729 case BUILT_IN_SINCOSF
:
1730 case BUILT_IN_SINCOSL
:
1731 case BUILT_IN_ASSUME_ALIGNED
:
1732 case BUILT_IN_VA_END
:
1734 /* __sync_* builtins and some OpenMP builtins act as threading
1736 #undef DEF_SYNC_BUILTIN
1737 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1738 #include "sync-builtins.def"
1739 #undef DEF_SYNC_BUILTIN
1740 case BUILT_IN_GOMP_ATOMIC_START
:
1741 case BUILT_IN_GOMP_ATOMIC_END
:
1742 case BUILT_IN_GOMP_BARRIER
:
1743 case BUILT_IN_GOMP_BARRIER_CANCEL
:
1744 case BUILT_IN_GOMP_TASKWAIT
:
1745 case BUILT_IN_GOMP_TASKGROUP_END
:
1746 case BUILT_IN_GOMP_CRITICAL_START
:
1747 case BUILT_IN_GOMP_CRITICAL_END
:
1748 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
1749 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
1750 case BUILT_IN_GOMP_LOOP_END
:
1751 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
1752 case BUILT_IN_GOMP_ORDERED_START
:
1753 case BUILT_IN_GOMP_ORDERED_END
:
1754 case BUILT_IN_GOMP_SECTIONS_END
:
1755 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
1756 case BUILT_IN_GOMP_SINGLE_COPY_START
:
1757 case BUILT_IN_GOMP_SINGLE_COPY_END
:
1761 /* Fallthru to general call handling. */;
1764 /* Check if base is a global static variable that is not read
1766 if (callee
!= NULL_TREE
1767 && TREE_CODE (base
) == VAR_DECL
1768 && TREE_STATIC (base
))
1770 struct cgraph_node
*node
= cgraph_node::get (callee
);
1773 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1774 node yet. We should enforce that there are nodes for all decls in the
1775 IL and remove this check instead. */
1777 && (not_read
= ipa_reference_get_not_read_global (node
))
1778 && bitmap_bit_p (not_read
, DECL_UID (base
)))
1782 /* Check if the base variable is call-used. */
1785 if (pt_solution_includes (gimple_call_use_set (call
), base
))
1788 else if ((TREE_CODE (base
) == MEM_REF
1789 || TREE_CODE (base
) == TARGET_MEM_REF
)
1790 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
1792 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
1796 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
1802 /* Inspect call arguments for passed-by-value aliases. */
1804 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1806 tree op
= gimple_call_arg (call
, i
);
1807 int flags
= gimple_call_arg_flags (call
, i
);
1809 if (flags
& EAF_UNUSED
)
1812 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1813 op
= TREE_OPERAND (op
, 0);
1815 if (TREE_CODE (op
) != SSA_NAME
1816 && !is_gimple_min_invariant (op
))
1819 ao_ref_init (&r
, op
);
1820 if (refs_may_alias_p_1 (&r
, ref
, true))
1829 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
)
1832 res
= ref_maybe_used_by_call_p_1 (call
, ref
);
1834 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
1836 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
1841 /* If the statement STMT may use the memory reference REF return
1842 true, otherwise return false. */
1845 ref_maybe_used_by_stmt_p (gimple stmt
, ao_ref
*ref
)
1847 if (is_gimple_assign (stmt
))
1851 /* All memory assign statements are single. */
1852 if (!gimple_assign_single_p (stmt
))
1855 rhs
= gimple_assign_rhs1 (stmt
);
1856 if (is_gimple_reg (rhs
)
1857 || is_gimple_min_invariant (rhs
)
1858 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
1861 return refs_may_alias_p (rhs
, ref
);
1863 else if (is_gimple_call (stmt
))
1864 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
);
1865 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
1867 tree retval
= gimple_return_retval (return_stmt
);
1869 && TREE_CODE (retval
) != SSA_NAME
1870 && !is_gimple_min_invariant (retval
)
1871 && refs_may_alias_p (retval
, ref
))
1873 /* If ref escapes the function then the return acts as a use. */
1874 tree base
= ao_ref_base (ref
);
1877 else if (DECL_P (base
))
1878 return is_global_var (base
);
1879 else if (TREE_CODE (base
) == MEM_REF
1880 || TREE_CODE (base
) == TARGET_MEM_REF
)
1881 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
1889 ref_maybe_used_by_stmt_p (gimple stmt
, tree ref
)
1892 ao_ref_init (&r
, ref
);
1893 return ref_maybe_used_by_stmt_p (stmt
, &r
);
1896 /* If the call in statement CALL may clobber the memory reference REF
1897 return true, otherwise return false. */
1900 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
)
1905 /* If the call is pure or const it cannot clobber anything. */
1906 if (gimple_call_flags (call
)
1907 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
1909 if (gimple_call_internal_p (call
))
1910 switch (gimple_call_internal_fn (call
))
1912 /* Treat these internal calls like ECF_PURE for aliasing,
1913 they don't write to any memory the program should care about.
1914 They have important other side-effects, and read memory,
1915 so can't be ECF_NOVOPS. */
1916 case IFN_UBSAN_NULL
:
1917 case IFN_UBSAN_BOUNDS
:
1918 case IFN_UBSAN_VPTR
:
1919 case IFN_UBSAN_OBJECT_SIZE
:
1920 case IFN_ASAN_CHECK
:
1926 base
= ao_ref_base (ref
);
1930 if (TREE_CODE (base
) == SSA_NAME
1931 || CONSTANT_CLASS_P (base
))
1934 /* A call that is not without side-effects might involve volatile
1935 accesses and thus conflicts with all other volatile accesses. */
1936 if (ref
->volatile_p
)
1939 /* If the reference is based on a decl that is not aliased the call
1940 cannot possibly clobber it. */
1942 && !may_be_aliased (base
)
1943 /* But local non-readonly statics can be modified through recursion
1944 or the call may implement a threading barrier which we must
1945 treat as may-def. */
1946 && (TREE_READONLY (base
)
1947 || !is_global_var (base
)))
1950 callee
= gimple_call_fndecl (call
);
1952 /* Handle those builtin functions explicitly that do not act as
1953 escape points. See tree-ssa-structalias.c:find_func_aliases
1954 for the list of builtins we might need to handle here. */
1955 if (callee
!= NULL_TREE
1956 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
1957 switch (DECL_FUNCTION_CODE (callee
))
1959 /* All the following functions clobber memory pointed to by
1960 their first argument. */
1961 case BUILT_IN_STRCPY
:
1962 case BUILT_IN_STRNCPY
:
1963 case BUILT_IN_MEMCPY
:
1964 case BUILT_IN_MEMMOVE
:
1965 case BUILT_IN_MEMPCPY
:
1966 case BUILT_IN_STPCPY
:
1967 case BUILT_IN_STPNCPY
:
1968 case BUILT_IN_STRCAT
:
1969 case BUILT_IN_STRNCAT
:
1970 case BUILT_IN_MEMSET
:
1971 case BUILT_IN_TM_MEMSET
:
1972 CASE_BUILT_IN_TM_STORE (1):
1973 CASE_BUILT_IN_TM_STORE (2):
1974 CASE_BUILT_IN_TM_STORE (4):
1975 CASE_BUILT_IN_TM_STORE (8):
1976 CASE_BUILT_IN_TM_STORE (FLOAT
):
1977 CASE_BUILT_IN_TM_STORE (DOUBLE
):
1978 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
1979 CASE_BUILT_IN_TM_STORE (M64
):
1980 CASE_BUILT_IN_TM_STORE (M128
):
1981 CASE_BUILT_IN_TM_STORE (M256
):
1982 case BUILT_IN_TM_MEMCPY
:
1983 case BUILT_IN_TM_MEMMOVE
:
1986 tree size
= NULL_TREE
;
1987 /* Don't pass in size for strncat, as the maximum size
1988 is strlen (dest) + n + 1 instead of n, resp.
1989 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1991 if (gimple_call_num_args (call
) == 3
1992 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT
)
1993 size
= gimple_call_arg (call
, 2);
1994 ao_ref_init_from_ptr_and_size (&dref
,
1995 gimple_call_arg (call
, 0),
1997 return refs_may_alias_p_1 (&dref
, ref
, false);
1999 case BUILT_IN_STRCPY_CHK
:
2000 case BUILT_IN_STRNCPY_CHK
:
2001 case BUILT_IN_MEMCPY_CHK
:
2002 case BUILT_IN_MEMMOVE_CHK
:
2003 case BUILT_IN_MEMPCPY_CHK
:
2004 case BUILT_IN_STPCPY_CHK
:
2005 case BUILT_IN_STPNCPY_CHK
:
2006 case BUILT_IN_STRCAT_CHK
:
2007 case BUILT_IN_STRNCAT_CHK
:
2008 case BUILT_IN_MEMSET_CHK
:
2011 tree size
= NULL_TREE
;
2012 /* Don't pass in size for __strncat_chk, as the maximum size
2013 is strlen (dest) + n + 1 instead of n, resp.
2014 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2016 if (gimple_call_num_args (call
) == 4
2017 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT_CHK
)
2018 size
= gimple_call_arg (call
, 2);
2019 ao_ref_init_from_ptr_and_size (&dref
,
2020 gimple_call_arg (call
, 0),
2022 return refs_may_alias_p_1 (&dref
, ref
, false);
2024 case BUILT_IN_BCOPY
:
2027 tree size
= gimple_call_arg (call
, 2);
2028 ao_ref_init_from_ptr_and_size (&dref
,
2029 gimple_call_arg (call
, 1),
2031 return refs_may_alias_p_1 (&dref
, ref
, false);
2033 /* Allocating memory does not have any side-effects apart from
2034 being the definition point for the pointer. */
2035 case BUILT_IN_MALLOC
:
2036 case BUILT_IN_ALIGNED_ALLOC
:
2037 case BUILT_IN_CALLOC
:
2038 case BUILT_IN_STRDUP
:
2039 case BUILT_IN_STRNDUP
:
2040 /* Unix98 specifies that errno is set on allocation failure. */
2042 && targetm
.ref_may_alias_errno (ref
))
2045 case BUILT_IN_STACK_SAVE
:
2046 case BUILT_IN_ALLOCA
:
2047 case BUILT_IN_ALLOCA_WITH_ALIGN
:
2048 case BUILT_IN_ASSUME_ALIGNED
:
2050 /* But posix_memalign stores a pointer into the memory pointed to
2051 by its first argument. */
2052 case BUILT_IN_POSIX_MEMALIGN
:
2054 tree ptrptr
= gimple_call_arg (call
, 0);
2056 ao_ref_init_from_ptr_and_size (&dref
, ptrptr
,
2057 TYPE_SIZE_UNIT (ptr_type_node
));
2058 return (refs_may_alias_p_1 (&dref
, ref
, false)
2060 && targetm
.ref_may_alias_errno (ref
)));
2062 /* Freeing memory kills the pointed-to memory. More importantly
2063 the call has to serve as a barrier for moving loads and stores
2066 case BUILT_IN_VA_END
:
2068 tree ptr
= gimple_call_arg (call
, 0);
2069 return ptr_deref_may_alias_ref_p_1 (ptr
, ref
);
2071 /* Realloc serves both as allocation point and deallocation point. */
2072 case BUILT_IN_REALLOC
:
2074 tree ptr
= gimple_call_arg (call
, 0);
2075 /* Unix98 specifies that errno is set on allocation failure. */
2076 return ((flag_errno_math
2077 && targetm
.ref_may_alias_errno (ref
))
2078 || ptr_deref_may_alias_ref_p_1 (ptr
, ref
));
2080 case BUILT_IN_GAMMA_R
:
2081 case BUILT_IN_GAMMAF_R
:
2082 case BUILT_IN_GAMMAL_R
:
2083 case BUILT_IN_LGAMMA_R
:
2084 case BUILT_IN_LGAMMAF_R
:
2085 case BUILT_IN_LGAMMAL_R
:
2087 tree out
= gimple_call_arg (call
, 1);
2088 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2090 if (flag_errno_math
)
2094 case BUILT_IN_FREXP
:
2095 case BUILT_IN_FREXPF
:
2096 case BUILT_IN_FREXPL
:
2098 case BUILT_IN_MODFF
:
2099 case BUILT_IN_MODFL
:
2101 tree out
= gimple_call_arg (call
, 1);
2102 return ptr_deref_may_alias_ref_p_1 (out
, ref
);
2104 case BUILT_IN_REMQUO
:
2105 case BUILT_IN_REMQUOF
:
2106 case BUILT_IN_REMQUOL
:
2108 tree out
= gimple_call_arg (call
, 2);
2109 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2111 if (flag_errno_math
)
2115 case BUILT_IN_SINCOS
:
2116 case BUILT_IN_SINCOSF
:
2117 case BUILT_IN_SINCOSL
:
2119 tree sin
= gimple_call_arg (call
, 1);
2120 tree cos
= gimple_call_arg (call
, 2);
2121 return (ptr_deref_may_alias_ref_p_1 (sin
, ref
)
2122 || ptr_deref_may_alias_ref_p_1 (cos
, ref
));
2124 /* __sync_* builtins and some OpenMP builtins act as threading
2126 #undef DEF_SYNC_BUILTIN
2127 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2128 #include "sync-builtins.def"
2129 #undef DEF_SYNC_BUILTIN
2130 case BUILT_IN_GOMP_ATOMIC_START
:
2131 case BUILT_IN_GOMP_ATOMIC_END
:
2132 case BUILT_IN_GOMP_BARRIER
:
2133 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2134 case BUILT_IN_GOMP_TASKWAIT
:
2135 case BUILT_IN_GOMP_TASKGROUP_END
:
2136 case BUILT_IN_GOMP_CRITICAL_START
:
2137 case BUILT_IN_GOMP_CRITICAL_END
:
2138 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2139 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2140 case BUILT_IN_GOMP_LOOP_END
:
2141 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2142 case BUILT_IN_GOMP_ORDERED_START
:
2143 case BUILT_IN_GOMP_ORDERED_END
:
2144 case BUILT_IN_GOMP_SECTIONS_END
:
2145 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2146 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2147 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2150 /* Fallthru to general call handling. */;
2153 /* Check if base is a global static variable that is not written
2155 if (callee
!= NULL_TREE
2156 && TREE_CODE (base
) == VAR_DECL
2157 && TREE_STATIC (base
))
2159 struct cgraph_node
*node
= cgraph_node::get (callee
);
2163 && (not_written
= ipa_reference_get_not_written_global (node
))
2164 && bitmap_bit_p (not_written
, DECL_UID (base
)))
2168 /* Check if the base variable is call-clobbered. */
2170 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
2171 else if ((TREE_CODE (base
) == MEM_REF
2172 || TREE_CODE (base
) == TARGET_MEM_REF
)
2173 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2175 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2179 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
2185 /* If the call in statement CALL may clobber the memory reference REF
2186 return true, otherwise return false. */
2189 call_may_clobber_ref_p (gcall
*call
, tree ref
)
2193 ao_ref_init (&r
, ref
);
2194 res
= call_may_clobber_ref_p_1 (call
, &r
);
2196 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
2198 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
2203 /* If the statement STMT may clobber the memory reference REF return true,
2204 otherwise return false. */
2207 stmt_may_clobber_ref_p_1 (gimple stmt
, ao_ref
*ref
)
2209 if (is_gimple_call (stmt
))
2211 tree lhs
= gimple_call_lhs (stmt
);
2213 && TREE_CODE (lhs
) != SSA_NAME
)
2216 ao_ref_init (&r
, lhs
);
2217 if (refs_may_alias_p_1 (ref
, &r
, true))
2221 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
);
2223 else if (gimple_assign_single_p (stmt
))
2225 tree lhs
= gimple_assign_lhs (stmt
);
2226 if (TREE_CODE (lhs
) != SSA_NAME
)
2229 ao_ref_init (&r
, lhs
);
2230 return refs_may_alias_p_1 (ref
, &r
, true);
2233 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2240 stmt_may_clobber_ref_p (gimple stmt
, tree ref
)
2243 ao_ref_init (&r
, ref
);
2244 return stmt_may_clobber_ref_p_1 (stmt
, &r
);
2247 /* If STMT kills the memory reference REF return true, otherwise
2251 stmt_kills_ref_p (gimple stmt
, ao_ref
*ref
)
2253 if (!ao_ref_base (ref
))
2256 if (gimple_has_lhs (stmt
)
2257 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
2258 /* The assignment is not necessarily carried out if it can throw
2259 and we can catch it in the current function where we could inspect
2261 ??? We only need to care about the RHS throwing. For aggregate
2262 assignments or similar calls and non-call exceptions the LHS
2263 might throw as well. */
2264 && !stmt_can_throw_internal (stmt
))
2266 tree lhs
= gimple_get_lhs (stmt
);
2267 /* If LHS is literally a base of the access we are done. */
2270 tree base
= ref
->ref
;
2271 if (handled_component_p (base
))
2273 tree saved_lhs0
= NULL_TREE
;
2274 if (handled_component_p (lhs
))
2276 saved_lhs0
= TREE_OPERAND (lhs
, 0);
2277 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
2281 /* Just compare the outermost handled component, if
2282 they are equal we have found a possible common
2284 tree saved_base0
= TREE_OPERAND (base
, 0);
2285 TREE_OPERAND (base
, 0) = integer_zero_node
;
2286 bool res
= operand_equal_p (lhs
, base
, 0);
2287 TREE_OPERAND (base
, 0) = saved_base0
;
2290 /* Otherwise drop handled components of the access. */
2293 while (handled_component_p (base
));
2295 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
2297 /* Finally check if lhs is equal or equal to the base candidate
2299 if (operand_equal_p (lhs
, base
, 0))
2303 /* Now look for non-literal equal bases with the restriction of
2304 handling constant offset and size. */
2305 /* For a must-alias check we need to be able to constrain
2306 the access properly. */
2307 if (ref
->max_size
== -1)
2309 HOST_WIDE_INT size
, offset
, max_size
, ref_offset
= ref
->offset
;
2310 tree base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
2311 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2312 so base == ref->base does not always hold. */
2313 if (base
!= ref
->base
)
2315 /* If both base and ref->base are MEM_REFs, only compare the
2316 first operand, and if the second operand isn't equal constant,
2317 try to add the offsets into offset and ref_offset. */
2318 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
2319 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
2321 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
2322 TREE_OPERAND (ref
->base
, 1)))
2324 offset_int off1
= mem_ref_offset (base
);
2325 off1
= wi::lshift (off1
, LOG2_BITS_PER_UNIT
);
2327 offset_int off2
= mem_ref_offset (ref
->base
);
2328 off2
= wi::lshift (off2
, LOG2_BITS_PER_UNIT
);
2330 if (wi::fits_shwi_p (off1
) && wi::fits_shwi_p (off2
))
2332 offset
= off1
.to_shwi ();
2333 ref_offset
= off2
.to_shwi ();
2342 /* For a must-alias check we need to be able to constrain
2343 the access properly. */
2344 if (size
!= -1 && size
== max_size
)
2346 if (offset
<= ref_offset
2347 && offset
+ size
>= ref_offset
+ ref
->max_size
)
2352 if (is_gimple_call (stmt
))
2354 tree callee
= gimple_call_fndecl (stmt
);
2355 if (callee
!= NULL_TREE
2356 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2357 switch (DECL_FUNCTION_CODE (callee
))
2361 tree ptr
= gimple_call_arg (stmt
, 0);
2362 tree base
= ao_ref_base (ref
);
2363 if (base
&& TREE_CODE (base
) == MEM_REF
2364 && TREE_OPERAND (base
, 0) == ptr
)
2369 case BUILT_IN_MEMCPY
:
2370 case BUILT_IN_MEMPCPY
:
2371 case BUILT_IN_MEMMOVE
:
2372 case BUILT_IN_MEMSET
:
2373 case BUILT_IN_MEMCPY_CHK
:
2374 case BUILT_IN_MEMPCPY_CHK
:
2375 case BUILT_IN_MEMMOVE_CHK
:
2376 case BUILT_IN_MEMSET_CHK
:
2378 /* For a must-alias check we need to be able to constrain
2379 the access properly. */
2380 if (ref
->max_size
== -1)
2382 tree dest
= gimple_call_arg (stmt
, 0);
2383 tree len
= gimple_call_arg (stmt
, 2);
2384 if (!tree_fits_shwi_p (len
))
2386 tree rbase
= ref
->base
;
2387 offset_int roffset
= ref
->offset
;
2389 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
2390 tree base
= ao_ref_base (&dref
);
2391 offset_int offset
= dref
.offset
;
2392 if (!base
|| dref
.size
== -1)
2394 if (TREE_CODE (base
) == MEM_REF
)
2396 if (TREE_CODE (rbase
) != MEM_REF
)
2398 // Compare pointers.
2399 offset
+= wi::lshift (mem_ref_offset (base
),
2400 LOG2_BITS_PER_UNIT
);
2401 roffset
+= wi::lshift (mem_ref_offset (rbase
),
2402 LOG2_BITS_PER_UNIT
);
2403 base
= TREE_OPERAND (base
, 0);
2404 rbase
= TREE_OPERAND (rbase
, 0);
2407 && wi::les_p (offset
, roffset
)
2408 && wi::les_p (roffset
+ ref
->max_size
,
2409 offset
+ wi::lshift (wi::to_offset (len
),
2410 LOG2_BITS_PER_UNIT
)))
2415 case BUILT_IN_VA_END
:
2417 tree ptr
= gimple_call_arg (stmt
, 0);
2418 if (TREE_CODE (ptr
) == ADDR_EXPR
)
2420 tree base
= ao_ref_base (ref
);
2421 if (TREE_OPERAND (ptr
, 0) == base
)
2434 stmt_kills_ref_p (gimple stmt
, tree ref
)
2437 ao_ref_init (&r
, ref
);
2438 return stmt_kills_ref_p (stmt
, &r
);
2442 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2443 TARGET or a statement clobbering the memory reference REF in which
2444 case false is returned. The walk starts with VUSE, one argument of PHI. */
2447 maybe_skip_until (gimple phi
, tree target
, ao_ref
*ref
,
2448 tree vuse
, unsigned int *cnt
, bitmap
*visited
,
2449 bool abort_on_visited
,
2450 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2453 basic_block bb
= gimple_bb (phi
);
2456 *visited
= BITMAP_ALLOC (NULL
);
2458 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
2460 /* Walk until we hit the target. */
2461 while (vuse
!= target
)
2463 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2464 /* Recurse for PHI nodes. */
2465 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2467 /* An already visited PHI node ends the walk successfully. */
2468 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
2469 return !abort_on_visited
;
2470 vuse
= get_continuation_for_phi (def_stmt
, ref
, cnt
,
2471 visited
, abort_on_visited
,
2477 else if (gimple_nop_p (def_stmt
))
2481 /* A clobbering statement or the end of the IL ends it failing. */
2483 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2486 && (*translate
) (ref
, vuse
, data
, true) == NULL
)
2492 /* If we reach a new basic-block see if we already skipped it
2493 in a previous walk that ended successfully. */
2494 if (gimple_bb (def_stmt
) != bb
)
2496 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
2497 return !abort_on_visited
;
2498 bb
= gimple_bb (def_stmt
);
2500 vuse
= gimple_vuse (def_stmt
);
2505 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2506 until we hit the phi argument definition that dominates the other one.
2507 Return that, or NULL_TREE if there is no such definition. */
2510 get_continuation_for_phi_1 (gimple phi
, tree arg0
, tree arg1
,
2511 ao_ref
*ref
, unsigned int *cnt
,
2512 bitmap
*visited
, bool abort_on_visited
,
2513 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2516 gimple def0
= SSA_NAME_DEF_STMT (arg0
);
2517 gimple def1
= SSA_NAME_DEF_STMT (arg1
);
2522 else if (gimple_nop_p (def0
)
2523 || (!gimple_nop_p (def1
)
2524 && dominated_by_p (CDI_DOMINATORS
,
2525 gimple_bb (def1
), gimple_bb (def0
))))
2527 if (maybe_skip_until (phi
, arg0
, ref
, arg1
, cnt
,
2528 visited
, abort_on_visited
, translate
, data
))
2531 else if (gimple_nop_p (def1
)
2532 || dominated_by_p (CDI_DOMINATORS
,
2533 gimple_bb (def0
), gimple_bb (def1
)))
2535 if (maybe_skip_until (phi
, arg1
, ref
, arg0
, cnt
,
2536 visited
, abort_on_visited
, translate
, data
))
2539 /* Special case of a diamond:
2541 goto (cond) ? L1 : L2
2542 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2544 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2545 L3: MEM_4 = PHI<MEM_2, MEM_3>
2546 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2547 dominate each other, but still we can easily skip this PHI node
2548 if we recognize that the vuse MEM operand is the same for both,
2549 and that we can skip both statements (they don't clobber us).
2550 This is still linear. Don't use maybe_skip_until, that might
2551 potentially be slow. */
2552 else if ((common_vuse
= gimple_vuse (def0
))
2553 && common_vuse
== gimple_vuse (def1
))
2556 if ((!stmt_may_clobber_ref_p_1 (def0
, ref
)
2558 && (*translate
) (ref
, arg0
, data
, true) == NULL
))
2559 && (!stmt_may_clobber_ref_p_1 (def1
, ref
)
2561 && (*translate
) (ref
, arg1
, data
, true) == NULL
)))
2569 /* Starting from a PHI node for the virtual operand of the memory reference
2570 REF find a continuation virtual operand that allows to continue walking
2571 statements dominating PHI skipping only statements that cannot possibly
2572 clobber REF. Increments *CNT for each alias disambiguation done.
2573 Returns NULL_TREE if no suitable virtual operand can be found. */
2576 get_continuation_for_phi (gimple phi
, ao_ref
*ref
,
2577 unsigned int *cnt
, bitmap
*visited
,
2578 bool abort_on_visited
,
2579 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2582 unsigned nargs
= gimple_phi_num_args (phi
);
2584 /* Through a single-argument PHI we can simply look through. */
2586 return PHI_ARG_DEF (phi
, 0);
2588 /* For two or more arguments try to pairwise skip non-aliasing code
2589 until we hit the phi argument definition that dominates the other one. */
2590 else if (nargs
>= 2)
2595 /* Find a candidate for the virtual operand which definition
2596 dominates those of all others. */
2597 arg0
= PHI_ARG_DEF (phi
, 0);
2598 if (!SSA_NAME_IS_DEFAULT_DEF (arg0
))
2599 for (i
= 1; i
< nargs
; ++i
)
2601 arg1
= PHI_ARG_DEF (phi
, i
);
2602 if (SSA_NAME_IS_DEFAULT_DEF (arg1
))
2607 if (dominated_by_p (CDI_DOMINATORS
,
2608 gimple_bb (SSA_NAME_DEF_STMT (arg0
)),
2609 gimple_bb (SSA_NAME_DEF_STMT (arg1
))))
2613 /* Then pairwise reduce against the found candidate. */
2614 for (i
= 0; i
< nargs
; ++i
)
2616 arg1
= PHI_ARG_DEF (phi
, i
);
2617 arg0
= get_continuation_for_phi_1 (phi
, arg0
, arg1
, ref
,
2618 cnt
, visited
, abort_on_visited
,
2630 /* Based on the memory reference REF and its virtual use VUSE call
2631 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2632 itself. That is, for each virtual use for which its defining statement
2633 does not clobber REF.
2635 WALKER is called with REF, the current virtual use and DATA. If
2636 WALKER returns non-NULL the walk stops and its result is returned.
2637 At the end of a non-successful walk NULL is returned.
2639 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2640 use which definition is a statement that may clobber REF and DATA.
2641 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2642 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2643 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2644 to adjust REF and *DATA to make that valid.
2646 VALUEIZE if non-NULL is called with the next VUSE that is considered
2647 and return value is substituted for that. This can be used to
2648 implement optimistic value-numbering for example. Note that the
2649 VUSE argument is assumed to be valueized already.
2651 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2654 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
,
2655 void *(*walker
)(ao_ref
*, tree
, unsigned int, void *),
2656 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2657 tree (*valueize
)(tree
),
2660 bitmap visited
= NULL
;
2662 unsigned int cnt
= 0;
2663 bool translated
= false;
2665 timevar_push (TV_ALIAS_STMT_WALK
);
2671 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2672 res
= (*walker
) (ref
, vuse
, cnt
, data
);
2674 if (res
== (void *)-1)
2679 /* Lookup succeeded. */
2680 else if (res
!= NULL
)
2684 vuse
= valueize (vuse
);
2685 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2686 if (gimple_nop_p (def_stmt
))
2688 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2689 vuse
= get_continuation_for_phi (def_stmt
, ref
, &cnt
,
2690 &visited
, translated
, translate
, data
);
2694 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2698 res
= (*translate
) (ref
, vuse
, data
, false);
2699 /* Failed lookup and translation. */
2700 if (res
== (void *)-1)
2705 /* Lookup succeeded. */
2706 else if (res
!= NULL
)
2708 /* Translation succeeded, continue walking. */
2711 vuse
= gimple_vuse (def_stmt
);
2717 BITMAP_FREE (visited
);
2719 timevar_pop (TV_ALIAS_STMT_WALK
);
2725 /* Based on the memory reference REF call WALKER for each vdef which
2726 defining statement may clobber REF, starting with VDEF. If REF
2727 is NULL_TREE, each defining statement is visited.
2729 WALKER is called with REF, the current vdef and DATA. If WALKER
2730 returns true the walk is stopped, otherwise it continues.
2732 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2733 The pointer may be NULL and then we do not track this information.
2735 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2736 PHI argument (but only one walk continues on merge points), the
2737 return value is true if any of the walks was successful.
2739 The function returns the number of statements walked. */
2742 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
2743 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2744 bitmap
*visited
, unsigned int cnt
,
2745 bool *function_entry_reached
)
2749 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
2752 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
2755 if (gimple_nop_p (def_stmt
))
2757 if (function_entry_reached
)
2758 *function_entry_reached
= true;
2761 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2765 *visited
= BITMAP_ALLOC (NULL
);
2766 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
2767 cnt
+= walk_aliased_vdefs_1 (ref
, gimple_phi_arg_def (def_stmt
, i
),
2768 walker
, data
, visited
, 0,
2769 function_entry_reached
);
2773 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2776 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2777 && (*walker
) (ref
, vdef
, data
))
2780 vdef
= gimple_vuse (def_stmt
);
2786 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
2787 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2789 bool *function_entry_reached
)
2791 bitmap local_visited
= NULL
;
2794 timevar_push (TV_ALIAS_STMT_WALK
);
2796 if (function_entry_reached
)
2797 *function_entry_reached
= false;
2799 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
2800 visited
? visited
: &local_visited
, 0,
2801 function_entry_reached
);
2803 BITMAP_FREE (local_visited
);
2805 timevar_pop (TV_ALIAS_STMT_WALK
);