1 /* Alias analysis for trees.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "fold-const.h"
40 #include "hard-reg-set.h"
43 #include "dominance.h"
44 #include "basic-block.h"
45 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
46 #include "langhooks.h"
48 #include "tree-pretty-print.h"
50 #include "tree-ssa-alias.h"
51 #include "internal-fn.h"
53 #include "gimple-expr.h"
56 #include "gimple-ssa.h"
57 #include "stringpool.h"
58 #include "tree-ssanames.h"
61 #include "tree-inline.h"
63 #include "alloc-pool.h"
64 #include "tree-ssa-alias.h"
67 #include "plugin-api.h"
70 #include "ipa-reference.h"
72 /* Broad overview of how alias analysis on gimple works:
74 Statements clobbering or using memory are linked through the
75 virtual operand factored use-def chain. The virtual operand
76 is unique per function, its symbol is accessible via gimple_vop (cfun).
77 Virtual operands are used for efficiently walking memory statements
78 in the gimple IL and are useful for things like value-numbering as
79 a generation count for memory references.
81 SSA_NAME pointers may have associated points-to information
82 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
83 points-to information is (re-)computed by the TODO_rebuild_alias
84 pass manager todo. Points-to information is also used for more
85 precise tracking of call-clobbered and call-used variables and
86 related disambiguations.
88 This file contains functions for disambiguating memory references,
89 the so called alias-oracle and tools for walking of the gimple IL.
91 The main alias-oracle entry-points are
93 bool stmt_may_clobber_ref_p (gimple, tree)
95 This function queries if a statement may invalidate (parts of)
96 the memory designated by the reference tree argument.
98 bool ref_maybe_used_by_stmt_p (gimple, tree)
100 This function queries if a statement may need (parts of) the
101 memory designated by the reference tree argument.
103 There are variants of these functions that only handle the call
104 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
105 Note that these do not disambiguate against a possible call lhs.
107 bool refs_may_alias_p (tree, tree)
109 This function tries to disambiguate two reference trees.
111 bool ptr_deref_may_alias_global_p (tree)
113 This function queries if dereferencing a pointer variable may
116 More low-level disambiguators are available and documented in
117 this file. Low-level disambiguators dealing with points-to
118 information are in tree-ssa-structalias.c. */
121 /* Query statistics for the different low-level disambiguators.
122 A high-level query may trigger multiple of them. */
125 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
126 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
127 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
128 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
129 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
130 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
134 dump_alias_stats (FILE *s
)
136 fprintf (s
, "\nAlias oracle query stats:\n");
137 fprintf (s
, " refs_may_alias_p: "
138 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC
" queries\n",
140 alias_stats
.refs_may_alias_p_no_alias
,
141 alias_stats
.refs_may_alias_p_no_alias
142 + alias_stats
.refs_may_alias_p_may_alias
);
143 fprintf (s
, " ref_maybe_used_by_call_p: "
144 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC
" queries\n",
146 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
147 alias_stats
.refs_may_alias_p_no_alias
148 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
149 fprintf (s
, " call_may_clobber_ref_p: "
150 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC
" queries\n",
152 alias_stats
.call_may_clobber_ref_p_no_alias
,
153 alias_stats
.call_may_clobber_ref_p_no_alias
154 + alias_stats
.call_may_clobber_ref_p_may_alias
);
158 /* Return true, if dereferencing PTR may alias with a global variable. */
161 ptr_deref_may_alias_global_p (tree ptr
)
163 struct ptr_info_def
*pi
;
165 /* If we end up with a pointer constant here that may point
167 if (TREE_CODE (ptr
) != SSA_NAME
)
170 pi
= SSA_NAME_PTR_INFO (ptr
);
172 /* If we do not have points-to information for this variable,
177 /* ??? This does not use TBAA to prune globals ptr may not access. */
178 return pt_solution_includes_global (&pi
->pt
);
181 /* Return true if dereferencing PTR may alias DECL.
182 The caller is responsible for applying TBAA to see if PTR
183 may access DECL at all. */
186 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
188 struct ptr_info_def
*pi
;
190 /* Conversions are irrelevant for points-to information and
191 data-dependence analysis can feed us those. */
194 /* Anything we do not explicilty handle aliases. */
195 if ((TREE_CODE (ptr
) != SSA_NAME
196 && TREE_CODE (ptr
) != ADDR_EXPR
197 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
198 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
199 || (TREE_CODE (decl
) != VAR_DECL
200 && TREE_CODE (decl
) != PARM_DECL
201 && TREE_CODE (decl
) != RESULT_DECL
))
204 /* Disregard pointer offsetting. */
205 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
209 ptr
= TREE_OPERAND (ptr
, 0);
211 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
212 return ptr_deref_may_alias_decl_p (ptr
, decl
);
215 /* ADDR_EXPR pointers either just offset another pointer or directly
216 specify the pointed-to set. */
217 if (TREE_CODE (ptr
) == ADDR_EXPR
)
219 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
221 && (TREE_CODE (base
) == MEM_REF
222 || TREE_CODE (base
) == TARGET_MEM_REF
))
223 ptr
= TREE_OPERAND (base
, 0);
228 && CONSTANT_CLASS_P (base
))
234 /* Non-aliased variables can not be pointed to. */
235 if (!may_be_aliased (decl
))
238 /* If we do not have useful points-to information for this pointer
239 we cannot disambiguate anything else. */
240 pi
= SSA_NAME_PTR_INFO (ptr
);
244 return pt_solution_includes (&pi
->pt
, decl
);
247 /* Return true if dereferenced PTR1 and PTR2 may alias.
248 The caller is responsible for applying TBAA to see if accesses
249 through PTR1 and PTR2 may conflict at all. */
252 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
254 struct ptr_info_def
*pi1
, *pi2
;
256 /* Conversions are irrelevant for points-to information and
257 data-dependence analysis can feed us those. */
261 /* Disregard pointer offsetting. */
262 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
266 ptr1
= TREE_OPERAND (ptr1
, 0);
268 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
269 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
271 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
275 ptr2
= TREE_OPERAND (ptr2
, 0);
277 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
278 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
281 /* ADDR_EXPR pointers either just offset another pointer or directly
282 specify the pointed-to set. */
283 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
285 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
287 && (TREE_CODE (base
) == MEM_REF
288 || TREE_CODE (base
) == TARGET_MEM_REF
))
289 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
292 return ptr_deref_may_alias_decl_p (ptr2
, base
);
296 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
298 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
300 && (TREE_CODE (base
) == MEM_REF
301 || TREE_CODE (base
) == TARGET_MEM_REF
))
302 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
305 return ptr_deref_may_alias_decl_p (ptr1
, base
);
310 /* From here we require SSA name pointers. Anything else aliases. */
311 if (TREE_CODE (ptr1
) != SSA_NAME
312 || TREE_CODE (ptr2
) != SSA_NAME
313 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
314 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
317 /* We may end up with two empty points-to solutions for two same pointers.
318 In this case we still want to say both pointers alias, so shortcut
323 /* If we do not have useful points-to information for either pointer
324 we cannot disambiguate anything else. */
325 pi1
= SSA_NAME_PTR_INFO (ptr1
);
326 pi2
= SSA_NAME_PTR_INFO (ptr2
);
330 /* ??? This does not use TBAA to prune decls from the intersection
331 that not both pointers may access. */
332 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
335 /* Return true if dereferencing PTR may alias *REF.
336 The caller is responsible for applying TBAA to see if PTR
337 may access *REF at all. */
340 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
342 tree base
= ao_ref_base (ref
);
344 if (TREE_CODE (base
) == MEM_REF
345 || TREE_CODE (base
) == TARGET_MEM_REF
)
346 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
347 else if (DECL_P (base
))
348 return ptr_deref_may_alias_decl_p (ptr
, base
);
353 /* Returns whether reference REF to BASE may refer to global memory. */
356 ref_may_alias_global_p_1 (tree base
)
359 return is_global_var (base
);
360 else if (TREE_CODE (base
) == MEM_REF
361 || TREE_CODE (base
) == TARGET_MEM_REF
)
362 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
367 ref_may_alias_global_p (ao_ref
*ref
)
369 tree base
= ao_ref_base (ref
);
370 return ref_may_alias_global_p_1 (base
);
374 ref_may_alias_global_p (tree ref
)
376 tree base
= get_base_address (ref
);
377 return ref_may_alias_global_p_1 (base
);
380 /* Return true whether STMT may clobber global memory. */
383 stmt_may_clobber_global_p (gimple stmt
)
387 if (!gimple_vdef (stmt
))
390 /* ??? We can ask the oracle whether an artificial pointer
391 dereference with a pointer with points-to information covering
392 all global memory (what about non-address taken memory?) maybe
393 clobbered by this call. As there is at the moment no convenient
394 way of doing that without generating garbage do some manual
396 ??? We could make a NULL ao_ref argument to the various
397 predicates special, meaning any global memory. */
399 switch (gimple_code (stmt
))
402 lhs
= gimple_assign_lhs (stmt
);
403 return (TREE_CODE (lhs
) != SSA_NAME
404 && ref_may_alias_global_p (lhs
));
413 /* Dump alias information on FILE. */
416 dump_alias_info (FILE *file
)
420 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
423 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
425 fprintf (file
, "Aliased symbols\n\n");
427 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
429 if (may_be_aliased (var
))
430 dump_variable (file
, var
);
433 fprintf (file
, "\nCall clobber information\n");
435 fprintf (file
, "\nESCAPED");
436 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
438 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
440 for (i
= 1; i
< num_ssa_names
; i
++)
442 tree ptr
= ssa_name (i
);
443 struct ptr_info_def
*pi
;
446 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
447 || SSA_NAME_IN_FREE_LIST (ptr
))
450 pi
= SSA_NAME_PTR_INFO (ptr
);
452 dump_points_to_info_for (file
, ptr
);
455 fprintf (file
, "\n");
459 /* Dump alias information on stderr. */
462 debug_alias_info (void)
464 dump_alias_info (stderr
);
468 /* Dump the points-to set *PT into FILE. */
471 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
474 fprintf (file
, ", points-to anything");
477 fprintf (file
, ", points-to non-local");
480 fprintf (file
, ", points-to escaped");
483 fprintf (file
, ", points-to unit escaped");
486 fprintf (file
, ", points-to NULL");
490 fprintf (file
, ", points-to vars: ");
491 dump_decl_set (file
, pt
->vars
);
492 if (pt
->vars_contains_nonlocal
493 && pt
->vars_contains_escaped_heap
)
494 fprintf (file
, " (nonlocal, escaped heap)");
495 else if (pt
->vars_contains_nonlocal
496 && pt
->vars_contains_escaped
)
497 fprintf (file
, " (nonlocal, escaped)");
498 else if (pt
->vars_contains_nonlocal
)
499 fprintf (file
, " (nonlocal)");
500 else if (pt
->vars_contains_escaped_heap
)
501 fprintf (file
, " (escaped heap)");
502 else if (pt
->vars_contains_escaped
)
503 fprintf (file
, " (escaped)");
508 /* Unified dump function for pt_solution. */
511 debug (pt_solution
&ref
)
513 dump_points_to_solution (stderr
, &ref
);
517 debug (pt_solution
*ptr
)
522 fprintf (stderr
, "<nil>\n");
526 /* Dump points-to information for SSA_NAME PTR into FILE. */
529 dump_points_to_info_for (FILE *file
, tree ptr
)
531 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
533 print_generic_expr (file
, ptr
, dump_flags
);
536 dump_points_to_solution (file
, &pi
->pt
);
538 fprintf (file
, ", points-to anything");
540 fprintf (file
, "\n");
544 /* Dump points-to information for VAR into stderr. */
547 debug_points_to_info_for (tree var
)
549 dump_points_to_info_for (stderr
, var
);
553 /* Initializes the alias-oracle reference representation *R from REF. */
556 ao_ref_init (ao_ref
*r
, tree ref
)
563 r
->ref_alias_set
= -1;
564 r
->base_alias_set
= -1;
565 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
568 /* Returns the base object of the memory reference *REF. */
571 ao_ref_base (ao_ref
*ref
)
575 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
580 /* Returns the base object alias set of the memory reference *REF. */
583 ao_ref_base_alias_set (ao_ref
*ref
)
586 if (ref
->base_alias_set
!= -1)
587 return ref
->base_alias_set
;
591 while (handled_component_p (base_ref
))
592 base_ref
= TREE_OPERAND (base_ref
, 0);
593 ref
->base_alias_set
= get_alias_set (base_ref
);
594 return ref
->base_alias_set
;
597 /* Returns the reference alias set of the memory reference *REF. */
600 ao_ref_alias_set (ao_ref
*ref
)
602 if (ref
->ref_alias_set
!= -1)
603 return ref
->ref_alias_set
;
604 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
605 return ref
->ref_alias_set
;
608 /* Init an alias-oracle reference representation from a gimple pointer
609 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
610 size is assumed to be unknown. The access is assumed to be only
611 to or after of the pointer target, not before it. */
614 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
616 HOST_WIDE_INT t
, size_hwi
, extra_offset
= 0;
617 ref
->ref
= NULL_TREE
;
618 if (TREE_CODE (ptr
) == SSA_NAME
)
620 gimple stmt
= SSA_NAME_DEF_STMT (ptr
);
621 if (gimple_assign_single_p (stmt
)
622 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
623 ptr
= gimple_assign_rhs1 (stmt
);
624 else if (is_gimple_assign (stmt
)
625 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
626 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
628 ptr
= gimple_assign_rhs1 (stmt
);
629 extra_offset
= BITS_PER_UNIT
630 * int_cst_value (gimple_assign_rhs2 (stmt
));
634 if (TREE_CODE (ptr
) == ADDR_EXPR
)
636 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
638 ref
->offset
= BITS_PER_UNIT
* t
;
643 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
648 ref
->base
= build2 (MEM_REF
, char_type_node
,
649 ptr
, null_pointer_node
);
652 ref
->offset
+= extra_offset
;
654 && tree_fits_shwi_p (size
)
655 && (size_hwi
= tree_to_shwi (size
)) <= HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
)
656 ref
->max_size
= ref
->size
= size_hwi
* BITS_PER_UNIT
;
658 ref
->max_size
= ref
->size
= -1;
659 ref
->ref_alias_set
= 0;
660 ref
->base_alias_set
= 0;
661 ref
->volatile_p
= false;
664 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
665 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
669 same_type_for_tbaa (tree type1
, tree type2
)
671 type1
= TYPE_MAIN_VARIANT (type1
);
672 type2
= TYPE_MAIN_VARIANT (type2
);
674 /* If we would have to do structural comparison bail out. */
675 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
676 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
679 /* Compare the canonical types. */
680 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
683 /* ??? Array types are not properly unified in all cases as we have
684 spurious changes in the index types for example. Removing this
685 causes all sorts of problems with the Fortran frontend. */
686 if (TREE_CODE (type1
) == ARRAY_TYPE
687 && TREE_CODE (type2
) == ARRAY_TYPE
)
690 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
691 object of one of its constrained subtypes, e.g. when a function with an
692 unconstrained parameter passed by reference is called on an object and
693 inlined. But, even in the case of a fixed size, type and subtypes are
694 not equivalent enough as to share the same TYPE_CANONICAL, since this
695 would mean that conversions between them are useless, whereas they are
696 not (e.g. type and subtypes can have different modes). So, in the end,
697 they are only guaranteed to have the same alias set. */
698 if (get_alias_set (type1
) == get_alias_set (type2
))
701 /* The types are known to be not equal. */
705 /* Determine if the two component references REF1 and REF2 which are
706 based on access types TYPE1 and TYPE2 and of which at least one is based
707 on an indirect reference may alias. REF2 is the only one that can
708 be a decl in which case REF2_IS_DECL is true.
709 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
710 are the respective alias sets. */
713 aliasing_component_refs_p (tree ref1
,
714 alias_set_type ref1_alias_set
,
715 alias_set_type base1_alias_set
,
716 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
718 alias_set_type ref2_alias_set
,
719 alias_set_type base2_alias_set
,
720 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
723 /* If one reference is a component references through pointers try to find a
724 common base and apply offset based disambiguation. This handles
726 struct A { int i; int j; } *q;
727 struct B { struct A a; int k; } *p;
728 disambiguating q->i and p->a.j. */
734 /* Choose bases and base types to search for. */
736 while (handled_component_p (base1
))
737 base1
= TREE_OPERAND (base1
, 0);
738 type1
= TREE_TYPE (base1
);
740 while (handled_component_p (base2
))
741 base2
= TREE_OPERAND (base2
, 0);
742 type2
= TREE_TYPE (base2
);
744 /* Now search for the type1 in the access path of ref2. This
745 would be a common base for doing offset based disambiguation on. */
747 while (handled_component_p (*refp
)
748 && same_type_for_tbaa (TREE_TYPE (*refp
), type1
) == 0)
749 refp
= &TREE_OPERAND (*refp
, 0);
750 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type1
);
751 /* If we couldn't compare types we have to bail out. */
754 else if (same_p
== 1)
756 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
757 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
);
759 get_ref_base_and_extent (base1
, &offadj
, &sztmp
, &msztmp
);
761 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
763 /* If we didn't find a common base, try the other way around. */
765 while (handled_component_p (*refp
)
766 && same_type_for_tbaa (TREE_TYPE (*refp
), type2
) == 0)
767 refp
= &TREE_OPERAND (*refp
, 0);
768 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type2
);
769 /* If we couldn't compare types we have to bail out. */
772 else if (same_p
== 1)
774 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
775 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
);
777 get_ref_base_and_extent (base2
, &offadj
, &sztmp
, &msztmp
);
779 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
782 /* If we have two type access paths B1.path1 and B2.path2 they may
783 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
784 But we can still have a path that goes B1.path1...B2.path2 with
785 a part that we do not see. So we can only disambiguate now
786 if there is no B2 in the tail of path1 and no B1 on the
788 if (base1_alias_set
== ref2_alias_set
789 || alias_set_subset_of (base1_alias_set
, ref2_alias_set
))
791 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
793 return (base2_alias_set
== ref1_alias_set
794 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
798 /* Return true if we can determine that component references REF1 and REF2,
799 that are within a common DECL, cannot overlap. */
802 nonoverlapping_component_refs_of_decl_p (tree ref1
, tree ref2
)
804 auto_vec
<tree
, 16> component_refs1
;
805 auto_vec
<tree
, 16> component_refs2
;
807 /* Create the stack of handled components for REF1. */
808 while (handled_component_p (ref1
))
810 component_refs1
.safe_push (ref1
);
811 ref1
= TREE_OPERAND (ref1
, 0);
813 if (TREE_CODE (ref1
) == MEM_REF
)
815 if (!integer_zerop (TREE_OPERAND (ref1
, 1)))
817 ref1
= TREE_OPERAND (TREE_OPERAND (ref1
, 0), 0);
820 /* Create the stack of handled components for REF2. */
821 while (handled_component_p (ref2
))
823 component_refs2
.safe_push (ref2
);
824 ref2
= TREE_OPERAND (ref2
, 0);
826 if (TREE_CODE (ref2
) == MEM_REF
)
828 if (!integer_zerop (TREE_OPERAND (ref2
, 1)))
830 ref2
= TREE_OPERAND (TREE_OPERAND (ref2
, 0), 0);
833 /* We must have the same base DECL. */
834 gcc_assert (ref1
== ref2
);
836 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
837 rank. This is sufficient because we start from the same DECL and you
838 cannot reference several fields at a time with COMPONENT_REFs (unlike
839 with ARRAY_RANGE_REFs for arrays) so you always need the same number
840 of them to access a sub-component, unless you're in a union, in which
841 case the return value will precisely be false. */
846 if (component_refs1
.is_empty ())
848 ref1
= component_refs1
.pop ();
850 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
854 if (component_refs2
.is_empty ())
856 ref2
= component_refs2
.pop ();
858 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
860 /* Beware of BIT_FIELD_REF. */
861 if (TREE_CODE (ref1
) != COMPONENT_REF
862 || TREE_CODE (ref2
) != COMPONENT_REF
)
865 tree field1
= TREE_OPERAND (ref1
, 1);
866 tree field2
= TREE_OPERAND (ref2
, 1);
868 /* ??? We cannot simply use the type of operand #0 of the refs here
869 as the Fortran compiler smuggles type punning into COMPONENT_REFs
870 for common blocks instead of using unions like everyone else. */
871 tree type1
= DECL_CONTEXT (field1
);
872 tree type2
= DECL_CONTEXT (field2
);
874 /* We cannot disambiguate fields in a union or qualified union. */
875 if (type1
!= type2
|| TREE_CODE (type1
) != RECORD_TYPE
)
878 /* Different fields of the same record type cannot overlap.
879 ??? Bitfields can overlap at RTL level so punt on them. */
880 if (field1
!= field2
)
882 component_refs1
.release ();
883 component_refs2
.release ();
884 return !(DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
));
889 component_refs1
.release ();
890 component_refs2
.release ();
894 /* qsort compare function to sort FIELD_DECLs after their
895 DECL_FIELD_CONTEXT TYPE_UID. */
898 ncr_compar (const void *field1_
, const void *field2_
)
900 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
901 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
902 unsigned int uid1
= TYPE_UID (DECL_FIELD_CONTEXT (field1
));
903 unsigned int uid2
= TYPE_UID (DECL_FIELD_CONTEXT (field2
));
906 else if (uid1
> uid2
)
911 /* Return true if we can determine that the fields referenced cannot
912 overlap for any pair of objects. */
915 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
917 if (!flag_strict_aliasing
919 || TREE_CODE (x
) != COMPONENT_REF
920 || TREE_CODE (y
) != COMPONENT_REF
)
923 auto_vec
<const_tree
, 16> fieldsx
;
924 while (TREE_CODE (x
) == COMPONENT_REF
)
926 tree field
= TREE_OPERAND (x
, 1);
927 tree type
= DECL_FIELD_CONTEXT (field
);
928 if (TREE_CODE (type
) == RECORD_TYPE
)
929 fieldsx
.safe_push (field
);
930 x
= TREE_OPERAND (x
, 0);
932 if (fieldsx
.length () == 0)
934 auto_vec
<const_tree
, 16> fieldsy
;
935 while (TREE_CODE (y
) == COMPONENT_REF
)
937 tree field
= TREE_OPERAND (y
, 1);
938 tree type
= DECL_FIELD_CONTEXT (field
);
939 if (TREE_CODE (type
) == RECORD_TYPE
)
940 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
941 y
= TREE_OPERAND (y
, 0);
943 if (fieldsy
.length () == 0)
946 /* Most common case first. */
947 if (fieldsx
.length () == 1
948 && fieldsy
.length () == 1)
949 return ((DECL_FIELD_CONTEXT (fieldsx
[0])
950 == DECL_FIELD_CONTEXT (fieldsy
[0]))
951 && fieldsx
[0] != fieldsy
[0]
952 && !(DECL_BIT_FIELD (fieldsx
[0]) && DECL_BIT_FIELD (fieldsy
[0])));
954 if (fieldsx
.length () == 2)
956 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
958 const_tree tem
= fieldsx
[0];
959 fieldsx
[0] = fieldsx
[1];
964 fieldsx
.qsort (ncr_compar
);
966 if (fieldsy
.length () == 2)
968 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
970 const_tree tem
= fieldsy
[0];
971 fieldsy
[0] = fieldsy
[1];
976 fieldsy
.qsort (ncr_compar
);
978 unsigned i
= 0, j
= 0;
981 const_tree fieldx
= fieldsx
[i
];
982 const_tree fieldy
= fieldsy
[j
];
983 tree typex
= DECL_FIELD_CONTEXT (fieldx
);
984 tree typey
= DECL_FIELD_CONTEXT (fieldy
);
987 /* We're left with accessing different fields of a structure,
988 no possible overlap, unless they are both bitfields. */
989 if (fieldx
!= fieldy
)
990 return !(DECL_BIT_FIELD (fieldx
) && DECL_BIT_FIELD (fieldy
));
992 if (TYPE_UID (typex
) < TYPE_UID (typey
))
995 if (i
== fieldsx
.length ())
1001 if (j
== fieldsy
.length ())
1011 /* Return true if two memory references based on the variables BASE1
1012 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1013 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1014 if non-NULL are the complete memory reference trees. */
1017 decl_refs_may_alias_p (tree ref1
, tree base1
,
1018 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1019 tree ref2
, tree base2
,
1020 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
)
1022 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
1024 /* If both references are based on different variables, they cannot alias. */
1028 /* If both references are based on the same variable, they cannot alias if
1029 the accesses do not overlap. */
1030 if (!ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1033 /* For components with variable position, the above test isn't sufficient,
1034 so we disambiguate component references manually. */
1036 && handled_component_p (ref1
) && handled_component_p (ref2
)
1037 && nonoverlapping_component_refs_of_decl_p (ref1
, ref2
))
1043 /* Return true if an indirect reference based on *PTR1 constrained
1044 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1045 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1046 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1047 in which case they are computed on-demand. REF1 and REF2
1048 if non-NULL are the complete memory reference trees. */
1051 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1052 HOST_WIDE_INT offset1
,
1053 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED
,
1054 alias_set_type ref1_alias_set
,
1055 alias_set_type base1_alias_set
,
1056 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1057 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1058 alias_set_type ref2_alias_set
,
1059 alias_set_type base2_alias_set
, bool tbaa_p
)
1062 tree ptrtype1
, dbase2
;
1063 HOST_WIDE_INT offset1p
= offset1
, offset2p
= offset2
;
1064 HOST_WIDE_INT doffset1
, doffset2
;
1066 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1067 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1070 ptr1
= TREE_OPERAND (base1
, 0);
1072 /* The offset embedded in MEM_REFs can be negative. Bias them
1073 so that the resulting offset adjustment is positive. */
1074 offset_int moff
= mem_ref_offset (base1
);
1075 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1076 if (wi::neg_p (moff
))
1077 offset2p
+= (-moff
).to_short_addr ();
1079 offset1p
+= moff
.to_short_addr ();
1081 /* If only one reference is based on a variable, they cannot alias if
1082 the pointer access is beyond the extent of the variable access.
1083 (the pointer base cannot validly point to an offset less than zero
1085 ??? IVOPTs creates bases that do not honor this restriction,
1086 so do not apply this optimization for TARGET_MEM_REFs. */
1087 if (TREE_CODE (base1
) != TARGET_MEM_REF
1088 && !ranges_overlap_p (MAX (0, offset1p
), -1, offset2p
, max_size2
))
1090 /* They also cannot alias if the pointer may not point to the decl. */
1091 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
1094 /* Disambiguations that rely on strict aliasing rules follow. */
1095 if (!flag_strict_aliasing
|| !tbaa_p
)
1098 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1100 /* If the alias set for a pointer access is zero all bets are off. */
1101 if (base1_alias_set
== -1)
1102 base1_alias_set
= get_deref_alias_set (ptrtype1
);
1103 if (base1_alias_set
== 0)
1105 if (base2_alias_set
== -1)
1106 base2_alias_set
= get_alias_set (base2
);
1108 /* When we are trying to disambiguate an access with a pointer dereference
1109 as base versus one with a decl as base we can use both the size
1110 of the decl and its dynamic type for extra disambiguation.
1111 ??? We do not know anything about the dynamic type of the decl
1112 other than that its alias-set contains base2_alias_set as a subset
1113 which does not help us here. */
1114 /* As we know nothing useful about the dynamic type of the decl just
1115 use the usual conflict check rather than a subset test.
1116 ??? We could introduce -fvery-strict-aliasing when the language
1117 does not allow decls to have a dynamic type that differs from their
1118 static type. Then we can check
1119 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1120 if (base1_alias_set
!= base2_alias_set
1121 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1123 /* If the size of the access relevant for TBAA through the pointer
1124 is bigger than the size of the decl we can't possibly access the
1125 decl via that pointer. */
1126 if (DECL_SIZE (base2
) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1
))
1127 && TREE_CODE (DECL_SIZE (base2
)) == INTEGER_CST
1128 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1
))) == INTEGER_CST
1129 /* ??? This in turn may run afoul when a decl of type T which is
1130 a member of union type U is accessed through a pointer to
1131 type U and sizeof T is smaller than sizeof U. */
1132 && TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
1133 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
1134 && tree_int_cst_lt (DECL_SIZE (base2
), TYPE_SIZE (TREE_TYPE (ptrtype1
))))
1140 /* If the decl is accessed via a MEM_REF, reconstruct the base
1141 we can use for TBAA and an appropriately adjusted offset. */
1143 while (handled_component_p (dbase2
))
1144 dbase2
= TREE_OPERAND (dbase2
, 0);
1147 if (TREE_CODE (dbase2
) == MEM_REF
1148 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
1150 offset_int moff
= mem_ref_offset (dbase2
);
1151 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1152 if (wi::neg_p (moff
))
1153 doffset1
-= (-moff
).to_short_addr ();
1155 doffset2
-= moff
.to_short_addr ();
1158 /* If either reference is view-converted, give up now. */
1159 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1160 || same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (base2
)) != 1)
1163 /* If both references are through the same type, they do not alias
1164 if the accesses do not overlap. This does extra disambiguation
1165 for mixed/pointer accesses but requires strict aliasing.
1166 For MEM_REFs we require that the component-ref offset we computed
1167 is relative to the start of the type which we ensure by
1168 comparing rvalue and access type and disregarding the constant
1170 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1171 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1172 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
1173 return ranges_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
);
1176 && nonoverlapping_component_refs_p (ref1
, ref2
))
1179 /* Do access-path based disambiguation. */
1181 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1182 return aliasing_component_refs_p (ref1
,
1183 ref1_alias_set
, base1_alias_set
,
1186 ref2_alias_set
, base2_alias_set
,
1187 offset2
, max_size2
, true);
1192 /* Return true if two indirect references based on *PTR1
1193 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1194 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1195 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1196 in which case they are computed on-demand. REF1 and REF2
1197 if non-NULL are the complete memory reference trees. */
1200 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1201 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1202 alias_set_type ref1_alias_set
,
1203 alias_set_type base1_alias_set
,
1204 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1205 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1206 alias_set_type ref2_alias_set
,
1207 alias_set_type base2_alias_set
, bool tbaa_p
)
1211 tree ptrtype1
, ptrtype2
;
1213 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1214 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1215 && (TREE_CODE (base2
) == MEM_REF
1216 || TREE_CODE (base2
) == TARGET_MEM_REF
));
1218 ptr1
= TREE_OPERAND (base1
, 0);
1219 ptr2
= TREE_OPERAND (base2
, 0);
1221 /* If both bases are based on pointers they cannot alias if they may not
1222 point to the same memory object or if they point to the same object
1223 and the accesses do not overlap. */
1224 if ((!cfun
|| gimple_in_ssa_p (cfun
))
1225 && operand_equal_p (ptr1
, ptr2
, 0)
1226 && (((TREE_CODE (base1
) != TARGET_MEM_REF
1227 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1228 && (TREE_CODE (base2
) != TARGET_MEM_REF
1229 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
1230 || (TREE_CODE (base1
) == TARGET_MEM_REF
1231 && TREE_CODE (base2
) == TARGET_MEM_REF
1232 && (TMR_STEP (base1
) == TMR_STEP (base2
)
1233 || (TMR_STEP (base1
) && TMR_STEP (base2
)
1234 && operand_equal_p (TMR_STEP (base1
),
1235 TMR_STEP (base2
), 0)))
1236 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
1237 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
1238 && operand_equal_p (TMR_INDEX (base1
),
1239 TMR_INDEX (base2
), 0)))
1240 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
1241 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
1242 && operand_equal_p (TMR_INDEX2 (base1
),
1243 TMR_INDEX2 (base2
), 0))))))
1246 /* The offset embedded in MEM_REFs can be negative. Bias them
1247 so that the resulting offset adjustment is positive. */
1248 moff
= mem_ref_offset (base1
);
1249 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1250 if (wi::neg_p (moff
))
1251 offset2
+= (-moff
).to_short_addr ();
1253 offset1
+= moff
.to_shwi ();
1254 moff
= mem_ref_offset (base2
);
1255 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1256 if (wi::neg_p (moff
))
1257 offset1
+= (-moff
).to_short_addr ();
1259 offset2
+= moff
.to_short_addr ();
1260 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1262 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
1265 /* Disambiguations that rely on strict aliasing rules follow. */
1266 if (!flag_strict_aliasing
|| !tbaa_p
)
1269 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1270 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
1272 /* If the alias set for a pointer access is zero all bets are off. */
1273 if (base1_alias_set
== -1)
1274 base1_alias_set
= get_deref_alias_set (ptrtype1
);
1275 if (base1_alias_set
== 0)
1277 if (base2_alias_set
== -1)
1278 base2_alias_set
= get_deref_alias_set (ptrtype2
);
1279 if (base2_alias_set
== 0)
1282 /* If both references are through the same type, they do not alias
1283 if the accesses do not overlap. This does extra disambiguation
1284 for mixed/pointer accesses but requires strict aliasing. */
1285 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1286 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1287 && (TREE_CODE (base2
) != TARGET_MEM_REF
1288 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
1289 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) == 1
1290 && same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) == 1
1291 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
1292 TREE_TYPE (ptrtype2
)) == 1)
1293 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1295 /* Do type-based disambiguation. */
1296 if (base1_alias_set
!= base2_alias_set
1297 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1300 /* If either reference is view-converted, give up now. */
1301 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1302 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
1306 && nonoverlapping_component_refs_p (ref1
, ref2
))
1309 /* Do access-path based disambiguation. */
1311 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1312 return aliasing_component_refs_p (ref1
,
1313 ref1_alias_set
, base1_alias_set
,
1316 ref2_alias_set
, base2_alias_set
,
1317 offset2
, max_size2
, false);
1322 /* Return true, if the two memory references REF1 and REF2 may alias. */
1325 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
1328 HOST_WIDE_INT offset1
= 0, offset2
= 0;
1329 HOST_WIDE_INT max_size1
= -1, max_size2
= -1;
1330 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
1332 gcc_checking_assert ((!ref1
->ref
1333 || TREE_CODE (ref1
->ref
) == SSA_NAME
1334 || DECL_P (ref1
->ref
)
1335 || TREE_CODE (ref1
->ref
) == STRING_CST
1336 || handled_component_p (ref1
->ref
)
1337 || TREE_CODE (ref1
->ref
) == MEM_REF
1338 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
1340 || TREE_CODE (ref2
->ref
) == SSA_NAME
1341 || DECL_P (ref2
->ref
)
1342 || TREE_CODE (ref2
->ref
) == STRING_CST
1343 || handled_component_p (ref2
->ref
)
1344 || TREE_CODE (ref2
->ref
) == MEM_REF
1345 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
1347 /* Decompose the references into their base objects and the access. */
1348 base1
= ao_ref_base (ref1
);
1349 offset1
= ref1
->offset
;
1350 max_size1
= ref1
->max_size
;
1351 base2
= ao_ref_base (ref2
);
1352 offset2
= ref2
->offset
;
1353 max_size2
= ref2
->max_size
;
1355 /* We can end up with registers or constants as bases for example from
1356 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1357 which is seen as a struct copy. */
1358 if (TREE_CODE (base1
) == SSA_NAME
1359 || TREE_CODE (base1
) == CONST_DECL
1360 || TREE_CODE (base1
) == CONSTRUCTOR
1361 || TREE_CODE (base1
) == ADDR_EXPR
1362 || CONSTANT_CLASS_P (base1
)
1363 || TREE_CODE (base2
) == SSA_NAME
1364 || TREE_CODE (base2
) == CONST_DECL
1365 || TREE_CODE (base2
) == CONSTRUCTOR
1366 || TREE_CODE (base2
) == ADDR_EXPR
1367 || CONSTANT_CLASS_P (base2
))
1370 /* We can end up referring to code via function and label decls.
1371 As we likely do not properly track code aliases conservatively
1373 if (TREE_CODE (base1
) == FUNCTION_DECL
1374 || TREE_CODE (base1
) == LABEL_DECL
1375 || TREE_CODE (base2
) == FUNCTION_DECL
1376 || TREE_CODE (base2
) == LABEL_DECL
)
1379 /* Two volatile accesses always conflict. */
1380 if (ref1
->volatile_p
1381 && ref2
->volatile_p
)
1384 /* Defer to simple offset based disambiguation if we have
1385 references based on two decls. Do this before defering to
1386 TBAA to handle must-alias cases in conformance with the
1387 GCC extension of allowing type-punning through unions. */
1388 var1_p
= DECL_P (base1
);
1389 var2_p
= DECL_P (base2
);
1390 if (var1_p
&& var2_p
)
1391 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
1392 ref2
->ref
, base2
, offset2
, max_size2
);
1394 /* Handle restrict based accesses.
1395 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1397 tree rbase1
= base1
;
1398 tree rbase2
= base2
;
1403 while (handled_component_p (rbase1
))
1404 rbase1
= TREE_OPERAND (rbase1
, 0);
1410 while (handled_component_p (rbase2
))
1411 rbase2
= TREE_OPERAND (rbase2
, 0);
1413 if (rbase1
&& rbase2
1414 && (TREE_CODE (base1
) == MEM_REF
|| TREE_CODE (base1
) == TARGET_MEM_REF
)
1415 && (TREE_CODE (base2
) == MEM_REF
|| TREE_CODE (base2
) == TARGET_MEM_REF
)
1416 /* If the accesses are in the same restrict clique... */
1417 && MR_DEPENDENCE_CLIQUE (base1
) == MR_DEPENDENCE_CLIQUE (base2
)
1418 /* But based on different pointers they do not alias. */
1419 && MR_DEPENDENCE_BASE (base1
) != MR_DEPENDENCE_BASE (base2
))
1422 ind1_p
= (TREE_CODE (base1
) == MEM_REF
1423 || TREE_CODE (base1
) == TARGET_MEM_REF
);
1424 ind2_p
= (TREE_CODE (base2
) == MEM_REF
1425 || TREE_CODE (base2
) == TARGET_MEM_REF
);
1427 /* Canonicalize the pointer-vs-decl case. */
1428 if (ind1_p
&& var2_p
)
1433 tmp1
= offset1
; offset1
= offset2
; offset2
= tmp1
;
1434 tmp1
= max_size1
; max_size1
= max_size2
; max_size2
= tmp1
;
1435 tmp2
= base1
; base1
= base2
; base2
= tmp2
;
1436 tmp3
= ref1
; ref1
= ref2
; ref2
= tmp3
;
1443 /* First defer to TBAA if possible. */
1445 && flag_strict_aliasing
1446 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
1447 ao_ref_alias_set (ref2
)))
1450 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1451 if (var1_p
&& ind2_p
)
1452 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
1454 ao_ref_alias_set (ref2
), -1,
1457 ao_ref_alias_set (ref1
),
1458 ao_ref_base_alias_set (ref1
),
1460 else if (ind1_p
&& ind2_p
)
1461 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
1463 ao_ref_alias_set (ref1
), -1,
1466 ao_ref_alias_set (ref2
), -1,
1469 /* We really do not want to end up here, but returning true is safe. */
1470 #ifdef ENABLE_CHECKING
1478 refs_may_alias_p (tree ref1
, ao_ref
*ref2
)
1481 ao_ref_init (&r1
, ref1
);
1482 return refs_may_alias_p_1 (&r1
, ref2
, true);
1486 refs_may_alias_p (tree ref1
, tree ref2
)
1490 ao_ref_init (&r1
, ref1
);
1491 ao_ref_init (&r2
, ref2
);
1492 res
= refs_may_alias_p_1 (&r1
, &r2
, true);
1494 ++alias_stats
.refs_may_alias_p_may_alias
;
1496 ++alias_stats
.refs_may_alias_p_no_alias
;
1500 /* Returns true if there is a anti-dependence for the STORE that
1501 executes after the LOAD. */
1504 refs_anti_dependent_p (tree load
, tree store
)
1507 ao_ref_init (&r1
, load
);
1508 ao_ref_init (&r2
, store
);
1509 return refs_may_alias_p_1 (&r1
, &r2
, false);
1512 /* Returns true if there is a output dependence for the stores
1513 STORE1 and STORE2. */
1516 refs_output_dependent_p (tree store1
, tree store2
)
1519 ao_ref_init (&r1
, store1
);
1520 ao_ref_init (&r2
, store2
);
1521 return refs_may_alias_p_1 (&r1
, &r2
, false);
1524 /* If the call CALL may use the memory reference REF return true,
1525 otherwise return false. */
1528 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
)
1532 int flags
= gimple_call_flags (call
);
1534 /* Const functions without a static chain do not implicitly use memory. */
1535 if (!gimple_call_chain (call
)
1536 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
1539 base
= ao_ref_base (ref
);
1543 /* A call that is not without side-effects might involve volatile
1544 accesses and thus conflicts with all other volatile accesses. */
1545 if (ref
->volatile_p
)
1548 /* If the reference is based on a decl that is not aliased the call
1549 cannot possibly use it. */
1551 && !may_be_aliased (base
)
1552 /* But local statics can be used through recursion. */
1553 && !is_global_var (base
))
1556 callee
= gimple_call_fndecl (call
);
1558 /* Handle those builtin functions explicitly that do not act as
1559 escape points. See tree-ssa-structalias.c:find_func_aliases
1560 for the list of builtins we might need to handle here. */
1561 if (callee
!= NULL_TREE
1562 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
1563 switch (DECL_FUNCTION_CODE (callee
))
1565 /* All the following functions read memory pointed to by
1566 their second argument. strcat/strncat additionally
1567 reads memory pointed to by the first argument. */
1568 case BUILT_IN_STRCAT
:
1569 case BUILT_IN_STRNCAT
:
1572 ao_ref_init_from_ptr_and_size (&dref
,
1573 gimple_call_arg (call
, 0),
1575 if (refs_may_alias_p_1 (&dref
, ref
, false))
1579 case BUILT_IN_STRCPY
:
1580 case BUILT_IN_STRNCPY
:
1581 case BUILT_IN_MEMCPY
:
1582 case BUILT_IN_MEMMOVE
:
1583 case BUILT_IN_MEMPCPY
:
1584 case BUILT_IN_STPCPY
:
1585 case BUILT_IN_STPNCPY
:
1586 case BUILT_IN_TM_MEMCPY
:
1587 case BUILT_IN_TM_MEMMOVE
:
1590 tree size
= NULL_TREE
;
1591 if (gimple_call_num_args (call
) == 3)
1592 size
= gimple_call_arg (call
, 2);
1593 ao_ref_init_from_ptr_and_size (&dref
,
1594 gimple_call_arg (call
, 1),
1596 return refs_may_alias_p_1 (&dref
, ref
, false);
1598 case BUILT_IN_STRCAT_CHK
:
1599 case BUILT_IN_STRNCAT_CHK
:
1602 ao_ref_init_from_ptr_and_size (&dref
,
1603 gimple_call_arg (call
, 0),
1605 if (refs_may_alias_p_1 (&dref
, ref
, false))
1609 case BUILT_IN_STRCPY_CHK
:
1610 case BUILT_IN_STRNCPY_CHK
:
1611 case BUILT_IN_MEMCPY_CHK
:
1612 case BUILT_IN_MEMMOVE_CHK
:
1613 case BUILT_IN_MEMPCPY_CHK
:
1614 case BUILT_IN_STPCPY_CHK
:
1615 case BUILT_IN_STPNCPY_CHK
:
1618 tree size
= NULL_TREE
;
1619 if (gimple_call_num_args (call
) == 4)
1620 size
= gimple_call_arg (call
, 2);
1621 ao_ref_init_from_ptr_and_size (&dref
,
1622 gimple_call_arg (call
, 1),
1624 return refs_may_alias_p_1 (&dref
, ref
, false);
1626 case BUILT_IN_BCOPY
:
1629 tree size
= gimple_call_arg (call
, 2);
1630 ao_ref_init_from_ptr_and_size (&dref
,
1631 gimple_call_arg (call
, 0),
1633 return refs_may_alias_p_1 (&dref
, ref
, false);
1636 /* The following functions read memory pointed to by their
1638 CASE_BUILT_IN_TM_LOAD (1):
1639 CASE_BUILT_IN_TM_LOAD (2):
1640 CASE_BUILT_IN_TM_LOAD (4):
1641 CASE_BUILT_IN_TM_LOAD (8):
1642 CASE_BUILT_IN_TM_LOAD (FLOAT
):
1643 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
1644 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
1645 CASE_BUILT_IN_TM_LOAD (M64
):
1646 CASE_BUILT_IN_TM_LOAD (M128
):
1647 CASE_BUILT_IN_TM_LOAD (M256
):
1648 case BUILT_IN_TM_LOG
:
1649 case BUILT_IN_TM_LOG_1
:
1650 case BUILT_IN_TM_LOG_2
:
1651 case BUILT_IN_TM_LOG_4
:
1652 case BUILT_IN_TM_LOG_8
:
1653 case BUILT_IN_TM_LOG_FLOAT
:
1654 case BUILT_IN_TM_LOG_DOUBLE
:
1655 case BUILT_IN_TM_LOG_LDOUBLE
:
1656 case BUILT_IN_TM_LOG_M64
:
1657 case BUILT_IN_TM_LOG_M128
:
1658 case BUILT_IN_TM_LOG_M256
:
1659 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call
, 0), ref
);
1661 /* These read memory pointed to by the first argument. */
1662 case BUILT_IN_STRDUP
:
1663 case BUILT_IN_STRNDUP
:
1664 case BUILT_IN_REALLOC
:
1667 tree size
= NULL_TREE
;
1668 if (gimple_call_num_args (call
) == 2)
1669 size
= gimple_call_arg (call
, 1);
1670 ao_ref_init_from_ptr_and_size (&dref
,
1671 gimple_call_arg (call
, 0),
1673 return refs_may_alias_p_1 (&dref
, ref
, false);
1675 /* These read memory pointed to by the first argument. */
1676 case BUILT_IN_INDEX
:
1677 case BUILT_IN_STRCHR
:
1678 case BUILT_IN_STRRCHR
:
1681 ao_ref_init_from_ptr_and_size (&dref
,
1682 gimple_call_arg (call
, 0),
1684 return refs_may_alias_p_1 (&dref
, ref
, false);
1686 /* These read memory pointed to by the first argument with size
1687 in the third argument. */
1688 case BUILT_IN_MEMCHR
:
1691 ao_ref_init_from_ptr_and_size (&dref
,
1692 gimple_call_arg (call
, 0),
1693 gimple_call_arg (call
, 2));
1694 return refs_may_alias_p_1 (&dref
, ref
, false);
1696 /* These read memory pointed to by the first and second arguments. */
1697 case BUILT_IN_STRSTR
:
1698 case BUILT_IN_STRPBRK
:
1701 ao_ref_init_from_ptr_and_size (&dref
,
1702 gimple_call_arg (call
, 0),
1704 if (refs_may_alias_p_1 (&dref
, ref
, false))
1706 ao_ref_init_from_ptr_and_size (&dref
,
1707 gimple_call_arg (call
, 1),
1709 return refs_may_alias_p_1 (&dref
, ref
, false);
1712 /* The following builtins do not read from memory. */
1714 case BUILT_IN_MALLOC
:
1715 case BUILT_IN_POSIX_MEMALIGN
:
1716 case BUILT_IN_ALIGNED_ALLOC
:
1717 case BUILT_IN_CALLOC
:
1718 case BUILT_IN_ALLOCA
:
1719 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1720 case BUILT_IN_STACK_SAVE
:
1721 case BUILT_IN_STACK_RESTORE
:
1722 case BUILT_IN_MEMSET
:
1723 case BUILT_IN_TM_MEMSET
:
1724 case BUILT_IN_MEMSET_CHK
:
1725 case BUILT_IN_FREXP
:
1726 case BUILT_IN_FREXPF
:
1727 case BUILT_IN_FREXPL
:
1728 case BUILT_IN_GAMMA_R
:
1729 case BUILT_IN_GAMMAF_R
:
1730 case BUILT_IN_GAMMAL_R
:
1731 case BUILT_IN_LGAMMA_R
:
1732 case BUILT_IN_LGAMMAF_R
:
1733 case BUILT_IN_LGAMMAL_R
:
1735 case BUILT_IN_MODFF
:
1736 case BUILT_IN_MODFL
:
1737 case BUILT_IN_REMQUO
:
1738 case BUILT_IN_REMQUOF
:
1739 case BUILT_IN_REMQUOL
:
1740 case BUILT_IN_SINCOS
:
1741 case BUILT_IN_SINCOSF
:
1742 case BUILT_IN_SINCOSL
:
1743 case BUILT_IN_ASSUME_ALIGNED
:
1744 case BUILT_IN_VA_END
:
1746 /* __sync_* builtins and some OpenMP builtins act as threading
1748 #undef DEF_SYNC_BUILTIN
1749 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1750 #include "sync-builtins.def"
1751 #undef DEF_SYNC_BUILTIN
1752 case BUILT_IN_GOMP_ATOMIC_START
:
1753 case BUILT_IN_GOMP_ATOMIC_END
:
1754 case BUILT_IN_GOMP_BARRIER
:
1755 case BUILT_IN_GOMP_BARRIER_CANCEL
:
1756 case BUILT_IN_GOMP_TASKWAIT
:
1757 case BUILT_IN_GOMP_TASKGROUP_END
:
1758 case BUILT_IN_GOMP_CRITICAL_START
:
1759 case BUILT_IN_GOMP_CRITICAL_END
:
1760 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
1761 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
1762 case BUILT_IN_GOMP_LOOP_END
:
1763 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
1764 case BUILT_IN_GOMP_ORDERED_START
:
1765 case BUILT_IN_GOMP_ORDERED_END
:
1766 case BUILT_IN_GOMP_SECTIONS_END
:
1767 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
1768 case BUILT_IN_GOMP_SINGLE_COPY_START
:
1769 case BUILT_IN_GOMP_SINGLE_COPY_END
:
1773 /* Fallthru to general call handling. */;
1776 /* Check if base is a global static variable that is not read
1778 if (callee
!= NULL_TREE
1779 && TREE_CODE (base
) == VAR_DECL
1780 && TREE_STATIC (base
))
1782 struct cgraph_node
*node
= cgraph_node::get (callee
);
1785 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1786 node yet. We should enforce that there are nodes for all decls in the
1787 IL and remove this check instead. */
1789 && (not_read
= ipa_reference_get_not_read_global (node
))
1790 && bitmap_bit_p (not_read
, DECL_UID (base
)))
1794 /* Check if the base variable is call-used. */
1797 if (pt_solution_includes (gimple_call_use_set (call
), base
))
1800 else if ((TREE_CODE (base
) == MEM_REF
1801 || TREE_CODE (base
) == TARGET_MEM_REF
)
1802 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
1804 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
1808 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
1814 /* Inspect call arguments for passed-by-value aliases. */
1816 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1818 tree op
= gimple_call_arg (call
, i
);
1819 int flags
= gimple_call_arg_flags (call
, i
);
1821 if (flags
& EAF_UNUSED
)
1824 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1825 op
= TREE_OPERAND (op
, 0);
1827 if (TREE_CODE (op
) != SSA_NAME
1828 && !is_gimple_min_invariant (op
))
1831 ao_ref_init (&r
, op
);
1832 if (refs_may_alias_p_1 (&r
, ref
, true))
1841 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
)
1844 res
= ref_maybe_used_by_call_p_1 (call
, ref
);
1846 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
1848 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
1853 /* If the statement STMT may use the memory reference REF return
1854 true, otherwise return false. */
1857 ref_maybe_used_by_stmt_p (gimple stmt
, ao_ref
*ref
)
1859 if (is_gimple_assign (stmt
))
1863 /* All memory assign statements are single. */
1864 if (!gimple_assign_single_p (stmt
))
1867 rhs
= gimple_assign_rhs1 (stmt
);
1868 if (is_gimple_reg (rhs
)
1869 || is_gimple_min_invariant (rhs
)
1870 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
1873 return refs_may_alias_p (rhs
, ref
);
1875 else if (is_gimple_call (stmt
))
1876 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
);
1877 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
1879 tree retval
= gimple_return_retval (return_stmt
);
1881 && TREE_CODE (retval
) != SSA_NAME
1882 && !is_gimple_min_invariant (retval
)
1883 && refs_may_alias_p (retval
, ref
))
1885 /* If ref escapes the function then the return acts as a use. */
1886 tree base
= ao_ref_base (ref
);
1889 else if (DECL_P (base
))
1890 return is_global_var (base
);
1891 else if (TREE_CODE (base
) == MEM_REF
1892 || TREE_CODE (base
) == TARGET_MEM_REF
)
1893 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
1901 ref_maybe_used_by_stmt_p (gimple stmt
, tree ref
)
1904 ao_ref_init (&r
, ref
);
1905 return ref_maybe_used_by_stmt_p (stmt
, &r
);
1908 /* If the call in statement CALL may clobber the memory reference REF
1909 return true, otherwise return false. */
1912 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
)
1917 /* If the call is pure or const it cannot clobber anything. */
1918 if (gimple_call_flags (call
)
1919 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
1922 base
= ao_ref_base (ref
);
1926 if (TREE_CODE (base
) == SSA_NAME
1927 || CONSTANT_CLASS_P (base
))
1930 /* A call that is not without side-effects might involve volatile
1931 accesses and thus conflicts with all other volatile accesses. */
1932 if (ref
->volatile_p
)
1935 /* If the reference is based on a decl that is not aliased the call
1936 cannot possibly clobber it. */
1938 && !may_be_aliased (base
)
1939 /* But local non-readonly statics can be modified through recursion
1940 or the call may implement a threading barrier which we must
1941 treat as may-def. */
1942 && (TREE_READONLY (base
)
1943 || !is_global_var (base
)))
1946 callee
= gimple_call_fndecl (call
);
1948 /* Handle those builtin functions explicitly that do not act as
1949 escape points. See tree-ssa-structalias.c:find_func_aliases
1950 for the list of builtins we might need to handle here. */
1951 if (callee
!= NULL_TREE
1952 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
1953 switch (DECL_FUNCTION_CODE (callee
))
1955 /* All the following functions clobber memory pointed to by
1956 their first argument. */
1957 case BUILT_IN_STRCPY
:
1958 case BUILT_IN_STRNCPY
:
1959 case BUILT_IN_MEMCPY
:
1960 case BUILT_IN_MEMMOVE
:
1961 case BUILT_IN_MEMPCPY
:
1962 case BUILT_IN_STPCPY
:
1963 case BUILT_IN_STPNCPY
:
1964 case BUILT_IN_STRCAT
:
1965 case BUILT_IN_STRNCAT
:
1966 case BUILT_IN_MEMSET
:
1967 case BUILT_IN_TM_MEMSET
:
1968 CASE_BUILT_IN_TM_STORE (1):
1969 CASE_BUILT_IN_TM_STORE (2):
1970 CASE_BUILT_IN_TM_STORE (4):
1971 CASE_BUILT_IN_TM_STORE (8):
1972 CASE_BUILT_IN_TM_STORE (FLOAT
):
1973 CASE_BUILT_IN_TM_STORE (DOUBLE
):
1974 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
1975 CASE_BUILT_IN_TM_STORE (M64
):
1976 CASE_BUILT_IN_TM_STORE (M128
):
1977 CASE_BUILT_IN_TM_STORE (M256
):
1978 case BUILT_IN_TM_MEMCPY
:
1979 case BUILT_IN_TM_MEMMOVE
:
1982 tree size
= NULL_TREE
;
1983 /* Don't pass in size for strncat, as the maximum size
1984 is strlen (dest) + n + 1 instead of n, resp.
1985 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1987 if (gimple_call_num_args (call
) == 3
1988 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT
)
1989 size
= gimple_call_arg (call
, 2);
1990 ao_ref_init_from_ptr_and_size (&dref
,
1991 gimple_call_arg (call
, 0),
1993 return refs_may_alias_p_1 (&dref
, ref
, false);
1995 case BUILT_IN_STRCPY_CHK
:
1996 case BUILT_IN_STRNCPY_CHK
:
1997 case BUILT_IN_MEMCPY_CHK
:
1998 case BUILT_IN_MEMMOVE_CHK
:
1999 case BUILT_IN_MEMPCPY_CHK
:
2000 case BUILT_IN_STPCPY_CHK
:
2001 case BUILT_IN_STPNCPY_CHK
:
2002 case BUILT_IN_STRCAT_CHK
:
2003 case BUILT_IN_STRNCAT_CHK
:
2004 case BUILT_IN_MEMSET_CHK
:
2007 tree size
= NULL_TREE
;
2008 /* Don't pass in size for __strncat_chk, as the maximum size
2009 is strlen (dest) + n + 1 instead of n, resp.
2010 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2012 if (gimple_call_num_args (call
) == 4
2013 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT_CHK
)
2014 size
= gimple_call_arg (call
, 2);
2015 ao_ref_init_from_ptr_and_size (&dref
,
2016 gimple_call_arg (call
, 0),
2018 return refs_may_alias_p_1 (&dref
, ref
, false);
2020 case BUILT_IN_BCOPY
:
2023 tree size
= gimple_call_arg (call
, 2);
2024 ao_ref_init_from_ptr_and_size (&dref
,
2025 gimple_call_arg (call
, 1),
2027 return refs_may_alias_p_1 (&dref
, ref
, false);
2029 /* Allocating memory does not have any side-effects apart from
2030 being the definition point for the pointer. */
2031 case BUILT_IN_MALLOC
:
2032 case BUILT_IN_ALIGNED_ALLOC
:
2033 case BUILT_IN_CALLOC
:
2034 case BUILT_IN_STRDUP
:
2035 case BUILT_IN_STRNDUP
:
2036 /* Unix98 specifies that errno is set on allocation failure. */
2038 && targetm
.ref_may_alias_errno (ref
))
2041 case BUILT_IN_STACK_SAVE
:
2042 case BUILT_IN_ALLOCA
:
2043 case BUILT_IN_ALLOCA_WITH_ALIGN
:
2044 case BUILT_IN_ASSUME_ALIGNED
:
2046 /* But posix_memalign stores a pointer into the memory pointed to
2047 by its first argument. */
2048 case BUILT_IN_POSIX_MEMALIGN
:
2050 tree ptrptr
= gimple_call_arg (call
, 0);
2052 ao_ref_init_from_ptr_and_size (&dref
, ptrptr
,
2053 TYPE_SIZE_UNIT (ptr_type_node
));
2054 return (refs_may_alias_p_1 (&dref
, ref
, false)
2056 && targetm
.ref_may_alias_errno (ref
)));
2058 /* Freeing memory kills the pointed-to memory. More importantly
2059 the call has to serve as a barrier for moving loads and stores
2062 case BUILT_IN_VA_END
:
2064 tree ptr
= gimple_call_arg (call
, 0);
2065 return ptr_deref_may_alias_ref_p_1 (ptr
, ref
);
2067 /* Realloc serves both as allocation point and deallocation point. */
2068 case BUILT_IN_REALLOC
:
2070 tree ptr
= gimple_call_arg (call
, 0);
2071 /* Unix98 specifies that errno is set on allocation failure. */
2072 return ((flag_errno_math
2073 && targetm
.ref_may_alias_errno (ref
))
2074 || ptr_deref_may_alias_ref_p_1 (ptr
, ref
));
2076 case BUILT_IN_GAMMA_R
:
2077 case BUILT_IN_GAMMAF_R
:
2078 case BUILT_IN_GAMMAL_R
:
2079 case BUILT_IN_LGAMMA_R
:
2080 case BUILT_IN_LGAMMAF_R
:
2081 case BUILT_IN_LGAMMAL_R
:
2083 tree out
= gimple_call_arg (call
, 1);
2084 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2086 if (flag_errno_math
)
2090 case BUILT_IN_FREXP
:
2091 case BUILT_IN_FREXPF
:
2092 case BUILT_IN_FREXPL
:
2094 case BUILT_IN_MODFF
:
2095 case BUILT_IN_MODFL
:
2097 tree out
= gimple_call_arg (call
, 1);
2098 return ptr_deref_may_alias_ref_p_1 (out
, ref
);
2100 case BUILT_IN_REMQUO
:
2101 case BUILT_IN_REMQUOF
:
2102 case BUILT_IN_REMQUOL
:
2104 tree out
= gimple_call_arg (call
, 2);
2105 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2107 if (flag_errno_math
)
2111 case BUILT_IN_SINCOS
:
2112 case BUILT_IN_SINCOSF
:
2113 case BUILT_IN_SINCOSL
:
2115 tree sin
= gimple_call_arg (call
, 1);
2116 tree cos
= gimple_call_arg (call
, 2);
2117 return (ptr_deref_may_alias_ref_p_1 (sin
, ref
)
2118 || ptr_deref_may_alias_ref_p_1 (cos
, ref
));
2120 /* __sync_* builtins and some OpenMP builtins act as threading
2122 #undef DEF_SYNC_BUILTIN
2123 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2124 #include "sync-builtins.def"
2125 #undef DEF_SYNC_BUILTIN
2126 case BUILT_IN_GOMP_ATOMIC_START
:
2127 case BUILT_IN_GOMP_ATOMIC_END
:
2128 case BUILT_IN_GOMP_BARRIER
:
2129 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2130 case BUILT_IN_GOMP_TASKWAIT
:
2131 case BUILT_IN_GOMP_TASKGROUP_END
:
2132 case BUILT_IN_GOMP_CRITICAL_START
:
2133 case BUILT_IN_GOMP_CRITICAL_END
:
2134 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2135 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2136 case BUILT_IN_GOMP_LOOP_END
:
2137 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2138 case BUILT_IN_GOMP_ORDERED_START
:
2139 case BUILT_IN_GOMP_ORDERED_END
:
2140 case BUILT_IN_GOMP_SECTIONS_END
:
2141 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2142 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2143 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2146 /* Fallthru to general call handling. */;
2149 /* Check if base is a global static variable that is not written
2151 if (callee
!= NULL_TREE
2152 && TREE_CODE (base
) == VAR_DECL
2153 && TREE_STATIC (base
))
2155 struct cgraph_node
*node
= cgraph_node::get (callee
);
2159 && (not_written
= ipa_reference_get_not_written_global (node
))
2160 && bitmap_bit_p (not_written
, DECL_UID (base
)))
2164 /* Check if the base variable is call-clobbered. */
2166 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
2167 else if ((TREE_CODE (base
) == MEM_REF
2168 || TREE_CODE (base
) == TARGET_MEM_REF
)
2169 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2171 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2175 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
2181 /* If the call in statement CALL may clobber the memory reference REF
2182 return true, otherwise return false. */
2185 call_may_clobber_ref_p (gcall
*call
, tree ref
)
2189 ao_ref_init (&r
, ref
);
2190 res
= call_may_clobber_ref_p_1 (call
, &r
);
2192 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
2194 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
2199 /* If the statement STMT may clobber the memory reference REF return true,
2200 otherwise return false. */
2203 stmt_may_clobber_ref_p_1 (gimple stmt
, ao_ref
*ref
)
2205 if (is_gimple_call (stmt
))
2207 tree lhs
= gimple_call_lhs (stmt
);
2209 && TREE_CODE (lhs
) != SSA_NAME
)
2212 ao_ref_init (&r
, lhs
);
2213 if (refs_may_alias_p_1 (ref
, &r
, true))
2217 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
);
2219 else if (gimple_assign_single_p (stmt
))
2221 tree lhs
= gimple_assign_lhs (stmt
);
2222 if (TREE_CODE (lhs
) != SSA_NAME
)
2225 ao_ref_init (&r
, lhs
);
2226 return refs_may_alias_p_1 (ref
, &r
, true);
2229 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2236 stmt_may_clobber_ref_p (gimple stmt
, tree ref
)
2239 ao_ref_init (&r
, ref
);
2240 return stmt_may_clobber_ref_p_1 (stmt
, &r
);
2243 /* If STMT kills the memory reference REF return true, otherwise
2247 stmt_kills_ref_p (gimple stmt
, ao_ref
*ref
)
2249 if (!ao_ref_base (ref
))
2252 if (gimple_has_lhs (stmt
)
2253 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
2254 /* The assignment is not necessarily carried out if it can throw
2255 and we can catch it in the current function where we could inspect
2257 ??? We only need to care about the RHS throwing. For aggregate
2258 assignments or similar calls and non-call exceptions the LHS
2259 might throw as well. */
2260 && !stmt_can_throw_internal (stmt
))
2262 tree lhs
= gimple_get_lhs (stmt
);
2263 /* If LHS is literally a base of the access we are done. */
2266 tree base
= ref
->ref
;
2267 if (handled_component_p (base
))
2269 tree saved_lhs0
= NULL_TREE
;
2270 if (handled_component_p (lhs
))
2272 saved_lhs0
= TREE_OPERAND (lhs
, 0);
2273 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
2277 /* Just compare the outermost handled component, if
2278 they are equal we have found a possible common
2280 tree saved_base0
= TREE_OPERAND (base
, 0);
2281 TREE_OPERAND (base
, 0) = integer_zero_node
;
2282 bool res
= operand_equal_p (lhs
, base
, 0);
2283 TREE_OPERAND (base
, 0) = saved_base0
;
2286 /* Otherwise drop handled components of the access. */
2289 while (handled_component_p (base
));
2291 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
2293 /* Finally check if lhs is equal or equal to the base candidate
2295 if (operand_equal_p (lhs
, base
, 0))
2299 /* Now look for non-literal equal bases with the restriction of
2300 handling constant offset and size. */
2301 /* For a must-alias check we need to be able to constrain
2302 the access properly. */
2303 if (ref
->max_size
== -1)
2305 HOST_WIDE_INT size
, offset
, max_size
, ref_offset
= ref
->offset
;
2306 tree base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
2307 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2308 so base == ref->base does not always hold. */
2309 if (base
!= ref
->base
)
2311 /* If both base and ref->base are MEM_REFs, only compare the
2312 first operand, and if the second operand isn't equal constant,
2313 try to add the offsets into offset and ref_offset. */
2314 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
2315 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
2317 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
2318 TREE_OPERAND (ref
->base
, 1)))
2320 offset_int off1
= mem_ref_offset (base
);
2321 off1
= wi::lshift (off1
, LOG2_BITS_PER_UNIT
);
2323 offset_int off2
= mem_ref_offset (ref
->base
);
2324 off2
= wi::lshift (off2
, LOG2_BITS_PER_UNIT
);
2326 if (wi::fits_shwi_p (off1
) && wi::fits_shwi_p (off2
))
2328 offset
= off1
.to_shwi ();
2329 ref_offset
= off2
.to_shwi ();
2338 /* For a must-alias check we need to be able to constrain
2339 the access properly. */
2340 if (size
!= -1 && size
== max_size
)
2342 if (offset
<= ref_offset
2343 && offset
+ size
>= ref_offset
+ ref
->max_size
)
2348 if (is_gimple_call (stmt
))
2350 tree callee
= gimple_call_fndecl (stmt
);
2351 if (callee
!= NULL_TREE
2352 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2353 switch (DECL_FUNCTION_CODE (callee
))
2357 tree ptr
= gimple_call_arg (stmt
, 0);
2358 tree base
= ao_ref_base (ref
);
2359 if (base
&& TREE_CODE (base
) == MEM_REF
2360 && TREE_OPERAND (base
, 0) == ptr
)
2365 case BUILT_IN_MEMCPY
:
2366 case BUILT_IN_MEMPCPY
:
2367 case BUILT_IN_MEMMOVE
:
2368 case BUILT_IN_MEMSET
:
2369 case BUILT_IN_MEMCPY_CHK
:
2370 case BUILT_IN_MEMPCPY_CHK
:
2371 case BUILT_IN_MEMMOVE_CHK
:
2372 case BUILT_IN_MEMSET_CHK
:
2374 /* For a must-alias check we need to be able to constrain
2375 the access properly. */
2376 if (ref
->max_size
== -1)
2378 tree dest
= gimple_call_arg (stmt
, 0);
2379 tree len
= gimple_call_arg (stmt
, 2);
2380 if (!tree_fits_shwi_p (len
))
2382 tree rbase
= ref
->base
;
2383 offset_int roffset
= ref
->offset
;
2385 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
2386 tree base
= ao_ref_base (&dref
);
2387 offset_int offset
= dref
.offset
;
2388 if (!base
|| dref
.size
== -1)
2390 if (TREE_CODE (base
) == MEM_REF
)
2392 if (TREE_CODE (rbase
) != MEM_REF
)
2394 // Compare pointers.
2395 offset
+= wi::lshift (mem_ref_offset (base
),
2396 LOG2_BITS_PER_UNIT
);
2397 roffset
+= wi::lshift (mem_ref_offset (rbase
),
2398 LOG2_BITS_PER_UNIT
);
2399 base
= TREE_OPERAND (base
, 0);
2400 rbase
= TREE_OPERAND (rbase
, 0);
2403 && wi::les_p (offset
, roffset
)
2404 && wi::les_p (roffset
+ ref
->max_size
,
2405 offset
+ wi::lshift (wi::to_offset (len
),
2406 LOG2_BITS_PER_UNIT
)))
2411 case BUILT_IN_VA_END
:
2413 tree ptr
= gimple_call_arg (stmt
, 0);
2414 if (TREE_CODE (ptr
) == ADDR_EXPR
)
2416 tree base
= ao_ref_base (ref
);
2417 if (TREE_OPERAND (ptr
, 0) == base
)
2430 stmt_kills_ref_p (gimple stmt
, tree ref
)
2433 ao_ref_init (&r
, ref
);
2434 return stmt_kills_ref_p (stmt
, &r
);
2438 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2439 TARGET or a statement clobbering the memory reference REF in which
2440 case false is returned. The walk starts with VUSE, one argument of PHI. */
2443 maybe_skip_until (gimple phi
, tree target
, ao_ref
*ref
,
2444 tree vuse
, unsigned int *cnt
, bitmap
*visited
,
2445 bool abort_on_visited
,
2446 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2449 basic_block bb
= gimple_bb (phi
);
2452 *visited
= BITMAP_ALLOC (NULL
);
2454 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
2456 /* Walk until we hit the target. */
2457 while (vuse
!= target
)
2459 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2460 /* Recurse for PHI nodes. */
2461 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2463 /* An already visited PHI node ends the walk successfully. */
2464 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
2465 return !abort_on_visited
;
2466 vuse
= get_continuation_for_phi (def_stmt
, ref
, cnt
,
2467 visited
, abort_on_visited
,
2473 else if (gimple_nop_p (def_stmt
))
2477 /* A clobbering statement or the end of the IL ends it failing. */
2479 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2482 && (*translate
) (ref
, vuse
, data
, true) == NULL
)
2488 /* If we reach a new basic-block see if we already skipped it
2489 in a previous walk that ended successfully. */
2490 if (gimple_bb (def_stmt
) != bb
)
2492 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
2493 return !abort_on_visited
;
2494 bb
= gimple_bb (def_stmt
);
2496 vuse
= gimple_vuse (def_stmt
);
2501 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2502 until we hit the phi argument definition that dominates the other one.
2503 Return that, or NULL_TREE if there is no such definition. */
2506 get_continuation_for_phi_1 (gimple phi
, tree arg0
, tree arg1
,
2507 ao_ref
*ref
, unsigned int *cnt
,
2508 bitmap
*visited
, bool abort_on_visited
,
2509 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2512 gimple def0
= SSA_NAME_DEF_STMT (arg0
);
2513 gimple def1
= SSA_NAME_DEF_STMT (arg1
);
2518 else if (gimple_nop_p (def0
)
2519 || (!gimple_nop_p (def1
)
2520 && dominated_by_p (CDI_DOMINATORS
,
2521 gimple_bb (def1
), gimple_bb (def0
))))
2523 if (maybe_skip_until (phi
, arg0
, ref
, arg1
, cnt
,
2524 visited
, abort_on_visited
, translate
, data
))
2527 else if (gimple_nop_p (def1
)
2528 || dominated_by_p (CDI_DOMINATORS
,
2529 gimple_bb (def0
), gimple_bb (def1
)))
2531 if (maybe_skip_until (phi
, arg1
, ref
, arg0
, cnt
,
2532 visited
, abort_on_visited
, translate
, data
))
2535 /* Special case of a diamond:
2537 goto (cond) ? L1 : L2
2538 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2540 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2541 L3: MEM_4 = PHI<MEM_2, MEM_3>
2542 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2543 dominate each other, but still we can easily skip this PHI node
2544 if we recognize that the vuse MEM operand is the same for both,
2545 and that we can skip both statements (they don't clobber us).
2546 This is still linear. Don't use maybe_skip_until, that might
2547 potentially be slow. */
2548 else if ((common_vuse
= gimple_vuse (def0
))
2549 && common_vuse
== gimple_vuse (def1
))
2552 if ((!stmt_may_clobber_ref_p_1 (def0
, ref
)
2554 && (*translate
) (ref
, arg0
, data
, true) == NULL
))
2555 && (!stmt_may_clobber_ref_p_1 (def1
, ref
)
2557 && (*translate
) (ref
, arg1
, data
, true) == NULL
)))
2565 /* Starting from a PHI node for the virtual operand of the memory reference
2566 REF find a continuation virtual operand that allows to continue walking
2567 statements dominating PHI skipping only statements that cannot possibly
2568 clobber REF. Increments *CNT for each alias disambiguation done.
2569 Returns NULL_TREE if no suitable virtual operand can be found. */
2572 get_continuation_for_phi (gimple phi
, ao_ref
*ref
,
2573 unsigned int *cnt
, bitmap
*visited
,
2574 bool abort_on_visited
,
2575 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2578 unsigned nargs
= gimple_phi_num_args (phi
);
2580 /* Through a single-argument PHI we can simply look through. */
2582 return PHI_ARG_DEF (phi
, 0);
2584 /* For two or more arguments try to pairwise skip non-aliasing code
2585 until we hit the phi argument definition that dominates the other one. */
2586 else if (nargs
>= 2)
2591 /* Find a candidate for the virtual operand which definition
2592 dominates those of all others. */
2593 arg0
= PHI_ARG_DEF (phi
, 0);
2594 if (!SSA_NAME_IS_DEFAULT_DEF (arg0
))
2595 for (i
= 1; i
< nargs
; ++i
)
2597 arg1
= PHI_ARG_DEF (phi
, i
);
2598 if (SSA_NAME_IS_DEFAULT_DEF (arg1
))
2603 if (dominated_by_p (CDI_DOMINATORS
,
2604 gimple_bb (SSA_NAME_DEF_STMT (arg0
)),
2605 gimple_bb (SSA_NAME_DEF_STMT (arg1
))))
2609 /* Then pairwise reduce against the found candidate. */
2610 for (i
= 0; i
< nargs
; ++i
)
2612 arg1
= PHI_ARG_DEF (phi
, i
);
2613 arg0
= get_continuation_for_phi_1 (phi
, arg0
, arg1
, ref
,
2614 cnt
, visited
, abort_on_visited
,
2626 /* Based on the memory reference REF and its virtual use VUSE call
2627 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2628 itself. That is, for each virtual use for which its defining statement
2629 does not clobber REF.
2631 WALKER is called with REF, the current virtual use and DATA. If
2632 WALKER returns non-NULL the walk stops and its result is returned.
2633 At the end of a non-successful walk NULL is returned.
2635 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2636 use which definition is a statement that may clobber REF and DATA.
2637 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2638 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2639 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2640 to adjust REF and *DATA to make that valid.
2642 VALUEIZE if non-NULL is called with the next VUSE that is considered
2643 and return value is substituted for that. This can be used to
2644 implement optimistic value-numbering for example. Note that the
2645 VUSE argument is assumed to be valueized already.
2647 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2650 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
,
2651 void *(*walker
)(ao_ref
*, tree
, unsigned int, void *),
2652 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2653 tree (*valueize
)(tree
),
2656 bitmap visited
= NULL
;
2658 unsigned int cnt
= 0;
2659 bool translated
= false;
2661 timevar_push (TV_ALIAS_STMT_WALK
);
2667 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2668 res
= (*walker
) (ref
, vuse
, cnt
, data
);
2670 if (res
== (void *)-1)
2675 /* Lookup succeeded. */
2676 else if (res
!= NULL
)
2680 vuse
= valueize (vuse
);
2681 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2682 if (gimple_nop_p (def_stmt
))
2684 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2685 vuse
= get_continuation_for_phi (def_stmt
, ref
, &cnt
,
2686 &visited
, translated
, translate
, data
);
2690 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2694 res
= (*translate
) (ref
, vuse
, data
, false);
2695 /* Failed lookup and translation. */
2696 if (res
== (void *)-1)
2701 /* Lookup succeeded. */
2702 else if (res
!= NULL
)
2704 /* Translation succeeded, continue walking. */
2707 vuse
= gimple_vuse (def_stmt
);
2713 BITMAP_FREE (visited
);
2715 timevar_pop (TV_ALIAS_STMT_WALK
);
2721 /* Based on the memory reference REF call WALKER for each vdef which
2722 defining statement may clobber REF, starting with VDEF. If REF
2723 is NULL_TREE, each defining statement is visited.
2725 WALKER is called with REF, the current vdef and DATA. If WALKER
2726 returns true the walk is stopped, otherwise it continues.
2728 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2729 The pointer may be NULL and then we do not track this information.
2731 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2732 PHI argument (but only one walk continues on merge points), the
2733 return value is true if any of the walks was successful.
2735 The function returns the number of statements walked. */
2738 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
2739 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2740 bitmap
*visited
, unsigned int cnt
,
2741 bool *function_entry_reached
)
2745 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
2748 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
2751 if (gimple_nop_p (def_stmt
))
2753 if (function_entry_reached
)
2754 *function_entry_reached
= true;
2757 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2761 *visited
= BITMAP_ALLOC (NULL
);
2762 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
2763 cnt
+= walk_aliased_vdefs_1 (ref
, gimple_phi_arg_def (def_stmt
, i
),
2764 walker
, data
, visited
, 0,
2765 function_entry_reached
);
2769 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2772 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2773 && (*walker
) (ref
, vdef
, data
))
2776 vdef
= gimple_vuse (def_stmt
);
2782 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
2783 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2785 bool *function_entry_reached
)
2787 bitmap local_visited
= NULL
;
2790 timevar_push (TV_ALIAS_STMT_WALK
);
2792 if (function_entry_reached
)
2793 *function_entry_reached
= false;
2795 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
2796 visited
? visited
: &local_visited
, 0,
2797 function_entry_reached
);
2799 BITMAP_FREE (local_visited
);
2801 timevar_pop (TV_ALIAS_STMT_WALK
);