1 /* Alias analysis for trees.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "double-int.h"
35 #include "fold-const.h"
40 #include "hard-reg-set.h"
42 #include "dominance.h"
43 #include "basic-block.h"
44 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
45 #include "langhooks.h"
47 #include "tree-pretty-print.h"
49 #include "tree-ssa-alias.h"
50 #include "internal-fn.h"
52 #include "gimple-expr.h"
55 #include "gimple-ssa.h"
56 #include "stringpool.h"
57 #include "tree-ssanames.h"
60 #include "statistics.h"
62 #include "fixed-value.h"
63 #include "insn-config.h"
73 #include "tree-inline.h"
75 #include "alloc-pool.h"
78 #include "plugin-api.h"
81 #include "ipa-reference.h"
83 /* Broad overview of how alias analysis on gimple works:
85 Statements clobbering or using memory are linked through the
86 virtual operand factored use-def chain. The virtual operand
87 is unique per function, its symbol is accessible via gimple_vop (cfun).
88 Virtual operands are used for efficiently walking memory statements
89 in the gimple IL and are useful for things like value-numbering as
90 a generation count for memory references.
92 SSA_NAME pointers may have associated points-to information
93 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
94 points-to information is (re-)computed by the TODO_rebuild_alias
95 pass manager todo. Points-to information is also used for more
96 precise tracking of call-clobbered and call-used variables and
97 related disambiguations.
99 This file contains functions for disambiguating memory references,
100 the so called alias-oracle and tools for walking of the gimple IL.
102 The main alias-oracle entry-points are
104 bool stmt_may_clobber_ref_p (gimple, tree)
106 This function queries if a statement may invalidate (parts of)
107 the memory designated by the reference tree argument.
109 bool ref_maybe_used_by_stmt_p (gimple, tree)
111 This function queries if a statement may need (parts of) the
112 memory designated by the reference tree argument.
114 There are variants of these functions that only handle the call
115 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
116 Note that these do not disambiguate against a possible call lhs.
118 bool refs_may_alias_p (tree, tree)
120 This function tries to disambiguate two reference trees.
122 bool ptr_deref_may_alias_global_p (tree)
124 This function queries if dereferencing a pointer variable may
127 More low-level disambiguators are available and documented in
128 this file. Low-level disambiguators dealing with points-to
129 information are in tree-ssa-structalias.c. */
132 /* Query statistics for the different low-level disambiguators.
133 A high-level query may trigger multiple of them. */
136 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
137 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
138 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
139 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
140 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
141 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
145 dump_alias_stats (FILE *s
)
147 fprintf (s
, "\nAlias oracle query stats:\n");
148 fprintf (s
, " refs_may_alias_p: "
149 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
150 HOST_WIDE_INT_PRINT_DEC
" queries\n",
151 alias_stats
.refs_may_alias_p_no_alias
,
152 alias_stats
.refs_may_alias_p_no_alias
153 + alias_stats
.refs_may_alias_p_may_alias
);
154 fprintf (s
, " ref_maybe_used_by_call_p: "
155 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
156 HOST_WIDE_INT_PRINT_DEC
" queries\n",
157 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
158 alias_stats
.refs_may_alias_p_no_alias
159 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
160 fprintf (s
, " call_may_clobber_ref_p: "
161 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
162 HOST_WIDE_INT_PRINT_DEC
" queries\n",
163 alias_stats
.call_may_clobber_ref_p_no_alias
,
164 alias_stats
.call_may_clobber_ref_p_no_alias
165 + alias_stats
.call_may_clobber_ref_p_may_alias
);
169 /* Return true, if dereferencing PTR may alias with a global variable. */
172 ptr_deref_may_alias_global_p (tree ptr
)
174 struct ptr_info_def
*pi
;
176 /* If we end up with a pointer constant here that may point
178 if (TREE_CODE (ptr
) != SSA_NAME
)
181 pi
= SSA_NAME_PTR_INFO (ptr
);
183 /* If we do not have points-to information for this variable,
188 /* ??? This does not use TBAA to prune globals ptr may not access. */
189 return pt_solution_includes_global (&pi
->pt
);
192 /* Return true if dereferencing PTR may alias DECL.
193 The caller is responsible for applying TBAA to see if PTR
194 may access DECL at all. */
197 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
199 struct ptr_info_def
*pi
;
201 /* Conversions are irrelevant for points-to information and
202 data-dependence analysis can feed us those. */
205 /* Anything we do not explicilty handle aliases. */
206 if ((TREE_CODE (ptr
) != SSA_NAME
207 && TREE_CODE (ptr
) != ADDR_EXPR
208 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
209 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
210 || (TREE_CODE (decl
) != VAR_DECL
211 && TREE_CODE (decl
) != PARM_DECL
212 && TREE_CODE (decl
) != RESULT_DECL
))
215 /* Disregard pointer offsetting. */
216 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
220 ptr
= TREE_OPERAND (ptr
, 0);
222 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
223 return ptr_deref_may_alias_decl_p (ptr
, decl
);
226 /* ADDR_EXPR pointers either just offset another pointer or directly
227 specify the pointed-to set. */
228 if (TREE_CODE (ptr
) == ADDR_EXPR
)
230 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
232 && (TREE_CODE (base
) == MEM_REF
233 || TREE_CODE (base
) == TARGET_MEM_REF
))
234 ptr
= TREE_OPERAND (base
, 0);
239 && CONSTANT_CLASS_P (base
))
245 /* Non-aliased variables can not be pointed to. */
246 if (!may_be_aliased (decl
))
249 /* If we do not have useful points-to information for this pointer
250 we cannot disambiguate anything else. */
251 pi
= SSA_NAME_PTR_INFO (ptr
);
255 return pt_solution_includes (&pi
->pt
, decl
);
258 /* Return true if dereferenced PTR1 and PTR2 may alias.
259 The caller is responsible for applying TBAA to see if accesses
260 through PTR1 and PTR2 may conflict at all. */
263 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
265 struct ptr_info_def
*pi1
, *pi2
;
267 /* Conversions are irrelevant for points-to information and
268 data-dependence analysis can feed us those. */
272 /* Disregard pointer offsetting. */
273 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
277 ptr1
= TREE_OPERAND (ptr1
, 0);
279 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
280 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
282 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
286 ptr2
= TREE_OPERAND (ptr2
, 0);
288 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
289 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
292 /* ADDR_EXPR pointers either just offset another pointer or directly
293 specify the pointed-to set. */
294 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
296 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
298 && (TREE_CODE (base
) == MEM_REF
299 || TREE_CODE (base
) == TARGET_MEM_REF
))
300 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
303 return ptr_deref_may_alias_decl_p (ptr2
, base
);
307 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
309 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
311 && (TREE_CODE (base
) == MEM_REF
312 || TREE_CODE (base
) == TARGET_MEM_REF
))
313 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
316 return ptr_deref_may_alias_decl_p (ptr1
, base
);
321 /* From here we require SSA name pointers. Anything else aliases. */
322 if (TREE_CODE (ptr1
) != SSA_NAME
323 || TREE_CODE (ptr2
) != SSA_NAME
324 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
325 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
328 /* We may end up with two empty points-to solutions for two same pointers.
329 In this case we still want to say both pointers alias, so shortcut
334 /* If we do not have useful points-to information for either pointer
335 we cannot disambiguate anything else. */
336 pi1
= SSA_NAME_PTR_INFO (ptr1
);
337 pi2
= SSA_NAME_PTR_INFO (ptr2
);
341 /* ??? This does not use TBAA to prune decls from the intersection
342 that not both pointers may access. */
343 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
346 /* Return true if dereferencing PTR may alias *REF.
347 The caller is responsible for applying TBAA to see if PTR
348 may access *REF at all. */
351 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
353 tree base
= ao_ref_base (ref
);
355 if (TREE_CODE (base
) == MEM_REF
356 || TREE_CODE (base
) == TARGET_MEM_REF
)
357 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
358 else if (DECL_P (base
))
359 return ptr_deref_may_alias_decl_p (ptr
, base
);
364 /* Returns whether reference REF to BASE may refer to global memory. */
367 ref_may_alias_global_p_1 (tree base
)
370 return is_global_var (base
);
371 else if (TREE_CODE (base
) == MEM_REF
372 || TREE_CODE (base
) == TARGET_MEM_REF
)
373 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
378 ref_may_alias_global_p (ao_ref
*ref
)
380 tree base
= ao_ref_base (ref
);
381 return ref_may_alias_global_p_1 (base
);
385 ref_may_alias_global_p (tree ref
)
387 tree base
= get_base_address (ref
);
388 return ref_may_alias_global_p_1 (base
);
391 /* Return true whether STMT may clobber global memory. */
394 stmt_may_clobber_global_p (gimple stmt
)
398 if (!gimple_vdef (stmt
))
401 /* ??? We can ask the oracle whether an artificial pointer
402 dereference with a pointer with points-to information covering
403 all global memory (what about non-address taken memory?) maybe
404 clobbered by this call. As there is at the moment no convenient
405 way of doing that without generating garbage do some manual
407 ??? We could make a NULL ao_ref argument to the various
408 predicates special, meaning any global memory. */
410 switch (gimple_code (stmt
))
413 lhs
= gimple_assign_lhs (stmt
);
414 return (TREE_CODE (lhs
) != SSA_NAME
415 && ref_may_alias_global_p (lhs
));
424 /* Dump alias information on FILE. */
427 dump_alias_info (FILE *file
)
431 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
434 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
436 fprintf (file
, "Aliased symbols\n\n");
438 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
440 if (may_be_aliased (var
))
441 dump_variable (file
, var
);
444 fprintf (file
, "\nCall clobber information\n");
446 fprintf (file
, "\nESCAPED");
447 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
449 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
451 for (i
= 1; i
< num_ssa_names
; i
++)
453 tree ptr
= ssa_name (i
);
454 struct ptr_info_def
*pi
;
457 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
458 || SSA_NAME_IN_FREE_LIST (ptr
))
461 pi
= SSA_NAME_PTR_INFO (ptr
);
463 dump_points_to_info_for (file
, ptr
);
466 fprintf (file
, "\n");
470 /* Dump alias information on stderr. */
473 debug_alias_info (void)
475 dump_alias_info (stderr
);
479 /* Dump the points-to set *PT into FILE. */
482 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
485 fprintf (file
, ", points-to anything");
488 fprintf (file
, ", points-to non-local");
491 fprintf (file
, ", points-to escaped");
494 fprintf (file
, ", points-to unit escaped");
497 fprintf (file
, ", points-to NULL");
501 fprintf (file
, ", points-to vars: ");
502 dump_decl_set (file
, pt
->vars
);
503 if (pt
->vars_contains_nonlocal
504 && pt
->vars_contains_escaped_heap
)
505 fprintf (file
, " (nonlocal, escaped heap)");
506 else if (pt
->vars_contains_nonlocal
507 && pt
->vars_contains_escaped
)
508 fprintf (file
, " (nonlocal, escaped)");
509 else if (pt
->vars_contains_nonlocal
)
510 fprintf (file
, " (nonlocal)");
511 else if (pt
->vars_contains_escaped_heap
)
512 fprintf (file
, " (escaped heap)");
513 else if (pt
->vars_contains_escaped
)
514 fprintf (file
, " (escaped)");
519 /* Unified dump function for pt_solution. */
522 debug (pt_solution
&ref
)
524 dump_points_to_solution (stderr
, &ref
);
528 debug (pt_solution
*ptr
)
533 fprintf (stderr
, "<nil>\n");
537 /* Dump points-to information for SSA_NAME PTR into FILE. */
540 dump_points_to_info_for (FILE *file
, tree ptr
)
542 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
544 print_generic_expr (file
, ptr
, dump_flags
);
547 dump_points_to_solution (file
, &pi
->pt
);
549 fprintf (file
, ", points-to anything");
551 fprintf (file
, "\n");
555 /* Dump points-to information for VAR into stderr. */
558 debug_points_to_info_for (tree var
)
560 dump_points_to_info_for (stderr
, var
);
564 /* Initializes the alias-oracle reference representation *R from REF. */
567 ao_ref_init (ao_ref
*r
, tree ref
)
574 r
->ref_alias_set
= -1;
575 r
->base_alias_set
= -1;
576 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
579 /* Returns the base object of the memory reference *REF. */
582 ao_ref_base (ao_ref
*ref
)
586 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
591 /* Returns the base object alias set of the memory reference *REF. */
594 ao_ref_base_alias_set (ao_ref
*ref
)
597 if (ref
->base_alias_set
!= -1)
598 return ref
->base_alias_set
;
602 while (handled_component_p (base_ref
))
603 base_ref
= TREE_OPERAND (base_ref
, 0);
604 ref
->base_alias_set
= get_alias_set (base_ref
);
605 return ref
->base_alias_set
;
608 /* Returns the reference alias set of the memory reference *REF. */
611 ao_ref_alias_set (ao_ref
*ref
)
613 if (ref
->ref_alias_set
!= -1)
614 return ref
->ref_alias_set
;
615 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
616 return ref
->ref_alias_set
;
619 /* Init an alias-oracle reference representation from a gimple pointer
620 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
621 size is assumed to be unknown. The access is assumed to be only
622 to or after of the pointer target, not before it. */
625 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
627 HOST_WIDE_INT t
, size_hwi
, extra_offset
= 0;
628 ref
->ref
= NULL_TREE
;
629 if (TREE_CODE (ptr
) == SSA_NAME
)
631 gimple stmt
= SSA_NAME_DEF_STMT (ptr
);
632 if (gimple_assign_single_p (stmt
)
633 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
634 ptr
= gimple_assign_rhs1 (stmt
);
635 else if (is_gimple_assign (stmt
)
636 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
637 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
639 ptr
= gimple_assign_rhs1 (stmt
);
640 extra_offset
= BITS_PER_UNIT
641 * int_cst_value (gimple_assign_rhs2 (stmt
));
645 if (TREE_CODE (ptr
) == ADDR_EXPR
)
647 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
649 ref
->offset
= BITS_PER_UNIT
* t
;
654 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
659 ref
->base
= build2 (MEM_REF
, char_type_node
,
660 ptr
, null_pointer_node
);
663 ref
->offset
+= extra_offset
;
665 && tree_fits_shwi_p (size
)
666 && (size_hwi
= tree_to_shwi (size
)) <= HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
)
667 ref
->max_size
= ref
->size
= size_hwi
* BITS_PER_UNIT
;
669 ref
->max_size
= ref
->size
= -1;
670 ref
->ref_alias_set
= 0;
671 ref
->base_alias_set
= 0;
672 ref
->volatile_p
= false;
675 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
676 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
680 same_type_for_tbaa (tree type1
, tree type2
)
682 type1
= TYPE_MAIN_VARIANT (type1
);
683 type2
= TYPE_MAIN_VARIANT (type2
);
685 /* If we would have to do structural comparison bail out. */
686 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
687 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
690 /* Compare the canonical types. */
691 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
694 /* ??? Array types are not properly unified in all cases as we have
695 spurious changes in the index types for example. Removing this
696 causes all sorts of problems with the Fortran frontend. */
697 if (TREE_CODE (type1
) == ARRAY_TYPE
698 && TREE_CODE (type2
) == ARRAY_TYPE
)
701 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
702 object of one of its constrained subtypes, e.g. when a function with an
703 unconstrained parameter passed by reference is called on an object and
704 inlined. But, even in the case of a fixed size, type and subtypes are
705 not equivalent enough as to share the same TYPE_CANONICAL, since this
706 would mean that conversions between them are useless, whereas they are
707 not (e.g. type and subtypes can have different modes). So, in the end,
708 they are only guaranteed to have the same alias set. */
709 if (get_alias_set (type1
) == get_alias_set (type2
))
712 /* The types are known to be not equal. */
716 /* Determine if the two component references REF1 and REF2 which are
717 based on access types TYPE1 and TYPE2 and of which at least one is based
718 on an indirect reference may alias. REF2 is the only one that can
719 be a decl in which case REF2_IS_DECL is true.
720 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
721 are the respective alias sets. */
724 aliasing_component_refs_p (tree ref1
,
725 alias_set_type ref1_alias_set
,
726 alias_set_type base1_alias_set
,
727 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
729 alias_set_type ref2_alias_set
,
730 alias_set_type base2_alias_set
,
731 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
734 /* If one reference is a component references through pointers try to find a
735 common base and apply offset based disambiguation. This handles
737 struct A { int i; int j; } *q;
738 struct B { struct A a; int k; } *p;
739 disambiguating q->i and p->a.j. */
745 /* Choose bases and base types to search for. */
747 while (handled_component_p (base1
))
748 base1
= TREE_OPERAND (base1
, 0);
749 type1
= TREE_TYPE (base1
);
751 while (handled_component_p (base2
))
752 base2
= TREE_OPERAND (base2
, 0);
753 type2
= TREE_TYPE (base2
);
755 /* Now search for the type1 in the access path of ref2. This
756 would be a common base for doing offset based disambiguation on. */
758 while (handled_component_p (*refp
)
759 && same_type_for_tbaa (TREE_TYPE (*refp
), type1
) == 0)
760 refp
= &TREE_OPERAND (*refp
, 0);
761 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type1
);
762 /* If we couldn't compare types we have to bail out. */
765 else if (same_p
== 1)
767 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
768 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
);
770 get_ref_base_and_extent (base1
, &offadj
, &sztmp
, &msztmp
);
772 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
774 /* If we didn't find a common base, try the other way around. */
776 while (handled_component_p (*refp
)
777 && same_type_for_tbaa (TREE_TYPE (*refp
), type2
) == 0)
778 refp
= &TREE_OPERAND (*refp
, 0);
779 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type2
);
780 /* If we couldn't compare types we have to bail out. */
783 else if (same_p
== 1)
785 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
786 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
);
788 get_ref_base_and_extent (base2
, &offadj
, &sztmp
, &msztmp
);
790 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
793 /* If we have two type access paths B1.path1 and B2.path2 they may
794 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
795 But we can still have a path that goes B1.path1...B2.path2 with
796 a part that we do not see. So we can only disambiguate now
797 if there is no B2 in the tail of path1 and no B1 on the
799 if (base1_alias_set
== ref2_alias_set
800 || alias_set_subset_of (base1_alias_set
, ref2_alias_set
))
802 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
804 return (base2_alias_set
== ref1_alias_set
805 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
809 /* Return true if we can determine that component references REF1 and REF2,
810 that are within a common DECL, cannot overlap. */
813 nonoverlapping_component_refs_of_decl_p (tree ref1
, tree ref2
)
815 auto_vec
<tree
, 16> component_refs1
;
816 auto_vec
<tree
, 16> component_refs2
;
818 /* Create the stack of handled components for REF1. */
819 while (handled_component_p (ref1
))
821 component_refs1
.safe_push (ref1
);
822 ref1
= TREE_OPERAND (ref1
, 0);
824 if (TREE_CODE (ref1
) == MEM_REF
)
826 if (!integer_zerop (TREE_OPERAND (ref1
, 1)))
828 ref1
= TREE_OPERAND (TREE_OPERAND (ref1
, 0), 0);
831 /* Create the stack of handled components for REF2. */
832 while (handled_component_p (ref2
))
834 component_refs2
.safe_push (ref2
);
835 ref2
= TREE_OPERAND (ref2
, 0);
837 if (TREE_CODE (ref2
) == MEM_REF
)
839 if (!integer_zerop (TREE_OPERAND (ref2
, 1)))
841 ref2
= TREE_OPERAND (TREE_OPERAND (ref2
, 0), 0);
844 /* We must have the same base DECL. */
845 gcc_assert (ref1
== ref2
);
847 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
848 rank. This is sufficient because we start from the same DECL and you
849 cannot reference several fields at a time with COMPONENT_REFs (unlike
850 with ARRAY_RANGE_REFs for arrays) so you always need the same number
851 of them to access a sub-component, unless you're in a union, in which
852 case the return value will precisely be false. */
857 if (component_refs1
.is_empty ())
859 ref1
= component_refs1
.pop ();
861 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
865 if (component_refs2
.is_empty ())
867 ref2
= component_refs2
.pop ();
869 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
871 /* Beware of BIT_FIELD_REF. */
872 if (TREE_CODE (ref1
) != COMPONENT_REF
873 || TREE_CODE (ref2
) != COMPONENT_REF
)
876 tree field1
= TREE_OPERAND (ref1
, 1);
877 tree field2
= TREE_OPERAND (ref2
, 1);
879 /* ??? We cannot simply use the type of operand #0 of the refs here
880 as the Fortran compiler smuggles type punning into COMPONENT_REFs
881 for common blocks instead of using unions like everyone else. */
882 tree type1
= DECL_CONTEXT (field1
);
883 tree type2
= DECL_CONTEXT (field2
);
885 /* We cannot disambiguate fields in a union or qualified union. */
886 if (type1
!= type2
|| TREE_CODE (type1
) != RECORD_TYPE
)
889 /* Different fields of the same record type cannot overlap.
890 ??? Bitfields can overlap at RTL level so punt on them. */
891 if (field1
!= field2
)
893 component_refs1
.release ();
894 component_refs2
.release ();
895 return !(DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
));
900 component_refs1
.release ();
901 component_refs2
.release ();
905 /* qsort compare function to sort FIELD_DECLs after their
906 DECL_FIELD_CONTEXT TYPE_UID. */
909 ncr_compar (const void *field1_
, const void *field2_
)
911 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
912 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
913 unsigned int uid1
= TYPE_UID (DECL_FIELD_CONTEXT (field1
));
914 unsigned int uid2
= TYPE_UID (DECL_FIELD_CONTEXT (field2
));
917 else if (uid1
> uid2
)
922 /* Return true if we can determine that the fields referenced cannot
923 overlap for any pair of objects. */
926 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
928 if (!flag_strict_aliasing
930 || TREE_CODE (x
) != COMPONENT_REF
931 || TREE_CODE (y
) != COMPONENT_REF
)
934 auto_vec
<const_tree
, 16> fieldsx
;
935 while (TREE_CODE (x
) == COMPONENT_REF
)
937 tree field
= TREE_OPERAND (x
, 1);
938 tree type
= DECL_FIELD_CONTEXT (field
);
939 if (TREE_CODE (type
) == RECORD_TYPE
)
940 fieldsx
.safe_push (field
);
941 x
= TREE_OPERAND (x
, 0);
943 if (fieldsx
.length () == 0)
945 auto_vec
<const_tree
, 16> fieldsy
;
946 while (TREE_CODE (y
) == COMPONENT_REF
)
948 tree field
= TREE_OPERAND (y
, 1);
949 tree type
= DECL_FIELD_CONTEXT (field
);
950 if (TREE_CODE (type
) == RECORD_TYPE
)
951 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
952 y
= TREE_OPERAND (y
, 0);
954 if (fieldsy
.length () == 0)
957 /* Most common case first. */
958 if (fieldsx
.length () == 1
959 && fieldsy
.length () == 1)
960 return ((DECL_FIELD_CONTEXT (fieldsx
[0])
961 == DECL_FIELD_CONTEXT (fieldsy
[0]))
962 && fieldsx
[0] != fieldsy
[0]
963 && !(DECL_BIT_FIELD (fieldsx
[0]) && DECL_BIT_FIELD (fieldsy
[0])));
965 if (fieldsx
.length () == 2)
967 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
969 const_tree tem
= fieldsx
[0];
970 fieldsx
[0] = fieldsx
[1];
975 fieldsx
.qsort (ncr_compar
);
977 if (fieldsy
.length () == 2)
979 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
981 const_tree tem
= fieldsy
[0];
982 fieldsy
[0] = fieldsy
[1];
987 fieldsy
.qsort (ncr_compar
);
989 unsigned i
= 0, j
= 0;
992 const_tree fieldx
= fieldsx
[i
];
993 const_tree fieldy
= fieldsy
[j
];
994 tree typex
= DECL_FIELD_CONTEXT (fieldx
);
995 tree typey
= DECL_FIELD_CONTEXT (fieldy
);
998 /* We're left with accessing different fields of a structure,
999 no possible overlap, unless they are both bitfields. */
1000 if (fieldx
!= fieldy
)
1001 return !(DECL_BIT_FIELD (fieldx
) && DECL_BIT_FIELD (fieldy
));
1003 if (TYPE_UID (typex
) < TYPE_UID (typey
))
1006 if (i
== fieldsx
.length ())
1012 if (j
== fieldsy
.length ())
1022 /* Return true if two memory references based on the variables BASE1
1023 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1024 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1025 if non-NULL are the complete memory reference trees. */
1028 decl_refs_may_alias_p (tree ref1
, tree base1
,
1029 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1030 tree ref2
, tree base2
,
1031 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
)
1033 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
1035 /* If both references are based on different variables, they cannot alias. */
1039 /* If both references are based on the same variable, they cannot alias if
1040 the accesses do not overlap. */
1041 if (!ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1044 /* For components with variable position, the above test isn't sufficient,
1045 so we disambiguate component references manually. */
1047 && handled_component_p (ref1
) && handled_component_p (ref2
)
1048 && nonoverlapping_component_refs_of_decl_p (ref1
, ref2
))
1054 /* Return true if an indirect reference based on *PTR1 constrained
1055 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1056 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1057 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1058 in which case they are computed on-demand. REF1 and REF2
1059 if non-NULL are the complete memory reference trees. */
1062 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1063 HOST_WIDE_INT offset1
,
1064 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED
,
1065 alias_set_type ref1_alias_set
,
1066 alias_set_type base1_alias_set
,
1067 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1068 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1069 alias_set_type ref2_alias_set
,
1070 alias_set_type base2_alias_set
, bool tbaa_p
)
1073 tree ptrtype1
, dbase2
;
1074 HOST_WIDE_INT offset1p
= offset1
, offset2p
= offset2
;
1075 HOST_WIDE_INT doffset1
, doffset2
;
1077 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1078 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1081 ptr1
= TREE_OPERAND (base1
, 0);
1083 /* The offset embedded in MEM_REFs can be negative. Bias them
1084 so that the resulting offset adjustment is positive. */
1085 offset_int moff
= mem_ref_offset (base1
);
1086 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1087 if (wi::neg_p (moff
))
1088 offset2p
+= (-moff
).to_short_addr ();
1090 offset1p
+= moff
.to_short_addr ();
1092 /* If only one reference is based on a variable, they cannot alias if
1093 the pointer access is beyond the extent of the variable access.
1094 (the pointer base cannot validly point to an offset less than zero
1096 ??? IVOPTs creates bases that do not honor this restriction,
1097 so do not apply this optimization for TARGET_MEM_REFs. */
1098 if (TREE_CODE (base1
) != TARGET_MEM_REF
1099 && !ranges_overlap_p (MAX (0, offset1p
), -1, offset2p
, max_size2
))
1101 /* They also cannot alias if the pointer may not point to the decl. */
1102 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
1105 /* Disambiguations that rely on strict aliasing rules follow. */
1106 if (!flag_strict_aliasing
|| !tbaa_p
)
1109 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1111 /* If the alias set for a pointer access is zero all bets are off. */
1112 if (base1_alias_set
== -1)
1113 base1_alias_set
= get_deref_alias_set (ptrtype1
);
1114 if (base1_alias_set
== 0)
1116 if (base2_alias_set
== -1)
1117 base2_alias_set
= get_alias_set (base2
);
1119 /* When we are trying to disambiguate an access with a pointer dereference
1120 as base versus one with a decl as base we can use both the size
1121 of the decl and its dynamic type for extra disambiguation.
1122 ??? We do not know anything about the dynamic type of the decl
1123 other than that its alias-set contains base2_alias_set as a subset
1124 which does not help us here. */
1125 /* As we know nothing useful about the dynamic type of the decl just
1126 use the usual conflict check rather than a subset test.
1127 ??? We could introduce -fvery-strict-aliasing when the language
1128 does not allow decls to have a dynamic type that differs from their
1129 static type. Then we can check
1130 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1131 if (base1_alias_set
!= base2_alias_set
1132 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1134 /* If the size of the access relevant for TBAA through the pointer
1135 is bigger than the size of the decl we can't possibly access the
1136 decl via that pointer. */
1137 if (DECL_SIZE (base2
) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1
))
1138 && TREE_CODE (DECL_SIZE (base2
)) == INTEGER_CST
1139 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1
))) == INTEGER_CST
1140 /* ??? This in turn may run afoul when a decl of type T which is
1141 a member of union type U is accessed through a pointer to
1142 type U and sizeof T is smaller than sizeof U. */
1143 && TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
1144 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
1145 && tree_int_cst_lt (DECL_SIZE (base2
), TYPE_SIZE (TREE_TYPE (ptrtype1
))))
1151 /* If the decl is accessed via a MEM_REF, reconstruct the base
1152 we can use for TBAA and an appropriately adjusted offset. */
1154 while (handled_component_p (dbase2
))
1155 dbase2
= TREE_OPERAND (dbase2
, 0);
1158 if (TREE_CODE (dbase2
) == MEM_REF
1159 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
1161 offset_int moff
= mem_ref_offset (dbase2
);
1162 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1163 if (wi::neg_p (moff
))
1164 doffset1
-= (-moff
).to_short_addr ();
1166 doffset2
-= moff
.to_short_addr ();
1169 /* If either reference is view-converted, give up now. */
1170 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1171 || same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (base2
)) != 1)
1174 /* If both references are through the same type, they do not alias
1175 if the accesses do not overlap. This does extra disambiguation
1176 for mixed/pointer accesses but requires strict aliasing.
1177 For MEM_REFs we require that the component-ref offset we computed
1178 is relative to the start of the type which we ensure by
1179 comparing rvalue and access type and disregarding the constant
1181 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1182 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1183 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
1184 return ranges_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
);
1187 && nonoverlapping_component_refs_p (ref1
, ref2
))
1190 /* Do access-path based disambiguation. */
1192 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1193 return aliasing_component_refs_p (ref1
,
1194 ref1_alias_set
, base1_alias_set
,
1197 ref2_alias_set
, base2_alias_set
,
1198 offset2
, max_size2
, true);
1203 /* Return true if two indirect references based on *PTR1
1204 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1205 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1206 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1207 in which case they are computed on-demand. REF1 and REF2
1208 if non-NULL are the complete memory reference trees. */
1211 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1212 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1213 alias_set_type ref1_alias_set
,
1214 alias_set_type base1_alias_set
,
1215 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1216 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1217 alias_set_type ref2_alias_set
,
1218 alias_set_type base2_alias_set
, bool tbaa_p
)
1222 tree ptrtype1
, ptrtype2
;
1224 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1225 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1226 && (TREE_CODE (base2
) == MEM_REF
1227 || TREE_CODE (base2
) == TARGET_MEM_REF
));
1229 ptr1
= TREE_OPERAND (base1
, 0);
1230 ptr2
= TREE_OPERAND (base2
, 0);
1232 /* If both bases are based on pointers they cannot alias if they may not
1233 point to the same memory object or if they point to the same object
1234 and the accesses do not overlap. */
1235 if ((!cfun
|| gimple_in_ssa_p (cfun
))
1236 && operand_equal_p (ptr1
, ptr2
, 0)
1237 && (((TREE_CODE (base1
) != TARGET_MEM_REF
1238 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1239 && (TREE_CODE (base2
) != TARGET_MEM_REF
1240 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
1241 || (TREE_CODE (base1
) == TARGET_MEM_REF
1242 && TREE_CODE (base2
) == TARGET_MEM_REF
1243 && (TMR_STEP (base1
) == TMR_STEP (base2
)
1244 || (TMR_STEP (base1
) && TMR_STEP (base2
)
1245 && operand_equal_p (TMR_STEP (base1
),
1246 TMR_STEP (base2
), 0)))
1247 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
1248 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
1249 && operand_equal_p (TMR_INDEX (base1
),
1250 TMR_INDEX (base2
), 0)))
1251 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
1252 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
1253 && operand_equal_p (TMR_INDEX2 (base1
),
1254 TMR_INDEX2 (base2
), 0))))))
1257 /* The offset embedded in MEM_REFs can be negative. Bias them
1258 so that the resulting offset adjustment is positive. */
1259 moff
= mem_ref_offset (base1
);
1260 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1261 if (wi::neg_p (moff
))
1262 offset2
+= (-moff
).to_short_addr ();
1264 offset1
+= moff
.to_shwi ();
1265 moff
= mem_ref_offset (base2
);
1266 moff
= wi::lshift (moff
, LOG2_BITS_PER_UNIT
);
1267 if (wi::neg_p (moff
))
1268 offset1
+= (-moff
).to_short_addr ();
1270 offset2
+= moff
.to_short_addr ();
1271 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1273 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
1276 /* Disambiguations that rely on strict aliasing rules follow. */
1277 if (!flag_strict_aliasing
|| !tbaa_p
)
1280 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1281 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
1283 /* If the alias set for a pointer access is zero all bets are off. */
1284 if (base1_alias_set
== -1)
1285 base1_alias_set
= get_deref_alias_set (ptrtype1
);
1286 if (base1_alias_set
== 0)
1288 if (base2_alias_set
== -1)
1289 base2_alias_set
= get_deref_alias_set (ptrtype2
);
1290 if (base2_alias_set
== 0)
1293 /* If both references are through the same type, they do not alias
1294 if the accesses do not overlap. This does extra disambiguation
1295 for mixed/pointer accesses but requires strict aliasing. */
1296 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1297 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1298 && (TREE_CODE (base2
) != TARGET_MEM_REF
1299 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
1300 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) == 1
1301 && same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) == 1
1302 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
1303 TREE_TYPE (ptrtype2
)) == 1)
1304 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1306 /* Do type-based disambiguation. */
1307 if (base1_alias_set
!= base2_alias_set
1308 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1311 /* If either reference is view-converted, give up now. */
1312 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1313 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
1317 && nonoverlapping_component_refs_p (ref1
, ref2
))
1320 /* Do access-path based disambiguation. */
1322 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1323 return aliasing_component_refs_p (ref1
,
1324 ref1_alias_set
, base1_alias_set
,
1327 ref2_alias_set
, base2_alias_set
,
1328 offset2
, max_size2
, false);
1333 /* Return true, if the two memory references REF1 and REF2 may alias. */
1336 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
1339 HOST_WIDE_INT offset1
= 0, offset2
= 0;
1340 HOST_WIDE_INT max_size1
= -1, max_size2
= -1;
1341 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
1343 gcc_checking_assert ((!ref1
->ref
1344 || TREE_CODE (ref1
->ref
) == SSA_NAME
1345 || DECL_P (ref1
->ref
)
1346 || TREE_CODE (ref1
->ref
) == STRING_CST
1347 || handled_component_p (ref1
->ref
)
1348 || TREE_CODE (ref1
->ref
) == MEM_REF
1349 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
1351 || TREE_CODE (ref2
->ref
) == SSA_NAME
1352 || DECL_P (ref2
->ref
)
1353 || TREE_CODE (ref2
->ref
) == STRING_CST
1354 || handled_component_p (ref2
->ref
)
1355 || TREE_CODE (ref2
->ref
) == MEM_REF
1356 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
1358 /* Decompose the references into their base objects and the access. */
1359 base1
= ao_ref_base (ref1
);
1360 offset1
= ref1
->offset
;
1361 max_size1
= ref1
->max_size
;
1362 base2
= ao_ref_base (ref2
);
1363 offset2
= ref2
->offset
;
1364 max_size2
= ref2
->max_size
;
1366 /* We can end up with registers or constants as bases for example from
1367 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1368 which is seen as a struct copy. */
1369 if (TREE_CODE (base1
) == SSA_NAME
1370 || TREE_CODE (base1
) == CONST_DECL
1371 || TREE_CODE (base1
) == CONSTRUCTOR
1372 || TREE_CODE (base1
) == ADDR_EXPR
1373 || CONSTANT_CLASS_P (base1
)
1374 || TREE_CODE (base2
) == SSA_NAME
1375 || TREE_CODE (base2
) == CONST_DECL
1376 || TREE_CODE (base2
) == CONSTRUCTOR
1377 || TREE_CODE (base2
) == ADDR_EXPR
1378 || CONSTANT_CLASS_P (base2
))
1381 /* We can end up referring to code via function and label decls.
1382 As we likely do not properly track code aliases conservatively
1384 if (TREE_CODE (base1
) == FUNCTION_DECL
1385 || TREE_CODE (base1
) == LABEL_DECL
1386 || TREE_CODE (base2
) == FUNCTION_DECL
1387 || TREE_CODE (base2
) == LABEL_DECL
)
1390 /* Two volatile accesses always conflict. */
1391 if (ref1
->volatile_p
1392 && ref2
->volatile_p
)
1395 /* Defer to simple offset based disambiguation if we have
1396 references based on two decls. Do this before defering to
1397 TBAA to handle must-alias cases in conformance with the
1398 GCC extension of allowing type-punning through unions. */
1399 var1_p
= DECL_P (base1
);
1400 var2_p
= DECL_P (base2
);
1401 if (var1_p
&& var2_p
)
1402 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
1403 ref2
->ref
, base2
, offset2
, max_size2
);
1405 /* Handle restrict based accesses.
1406 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1408 tree rbase1
= base1
;
1409 tree rbase2
= base2
;
1414 while (handled_component_p (rbase1
))
1415 rbase1
= TREE_OPERAND (rbase1
, 0);
1421 while (handled_component_p (rbase2
))
1422 rbase2
= TREE_OPERAND (rbase2
, 0);
1424 if (rbase1
&& rbase2
1425 && (TREE_CODE (base1
) == MEM_REF
|| TREE_CODE (base1
) == TARGET_MEM_REF
)
1426 && (TREE_CODE (base2
) == MEM_REF
|| TREE_CODE (base2
) == TARGET_MEM_REF
)
1427 /* If the accesses are in the same restrict clique... */
1428 && MR_DEPENDENCE_CLIQUE (base1
) == MR_DEPENDENCE_CLIQUE (base2
)
1429 /* But based on different pointers they do not alias. */
1430 && MR_DEPENDENCE_BASE (base1
) != MR_DEPENDENCE_BASE (base2
))
1433 ind1_p
= (TREE_CODE (base1
) == MEM_REF
1434 || TREE_CODE (base1
) == TARGET_MEM_REF
);
1435 ind2_p
= (TREE_CODE (base2
) == MEM_REF
1436 || TREE_CODE (base2
) == TARGET_MEM_REF
);
1438 /* Canonicalize the pointer-vs-decl case. */
1439 if (ind1_p
&& var2_p
)
1444 tmp1
= offset1
; offset1
= offset2
; offset2
= tmp1
;
1445 tmp1
= max_size1
; max_size1
= max_size2
; max_size2
= tmp1
;
1446 tmp2
= base1
; base1
= base2
; base2
= tmp2
;
1447 tmp3
= ref1
; ref1
= ref2
; ref2
= tmp3
;
1454 /* First defer to TBAA if possible. */
1456 && flag_strict_aliasing
1457 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
1458 ao_ref_alias_set (ref2
)))
1461 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1462 if (var1_p
&& ind2_p
)
1463 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
1465 ao_ref_alias_set (ref2
), -1,
1468 ao_ref_alias_set (ref1
),
1469 ao_ref_base_alias_set (ref1
),
1471 else if (ind1_p
&& ind2_p
)
1472 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
1474 ao_ref_alias_set (ref1
), -1,
1477 ao_ref_alias_set (ref2
), -1,
1480 /* We really do not want to end up here, but returning true is safe. */
1481 #ifdef ENABLE_CHECKING
1489 refs_may_alias_p (tree ref1
, ao_ref
*ref2
)
1492 ao_ref_init (&r1
, ref1
);
1493 return refs_may_alias_p_1 (&r1
, ref2
, true);
1497 refs_may_alias_p (tree ref1
, tree ref2
)
1501 ao_ref_init (&r1
, ref1
);
1502 ao_ref_init (&r2
, ref2
);
1503 res
= refs_may_alias_p_1 (&r1
, &r2
, true);
1505 ++alias_stats
.refs_may_alias_p_may_alias
;
1507 ++alias_stats
.refs_may_alias_p_no_alias
;
1511 /* Returns true if there is a anti-dependence for the STORE that
1512 executes after the LOAD. */
1515 refs_anti_dependent_p (tree load
, tree store
)
1518 ao_ref_init (&r1
, load
);
1519 ao_ref_init (&r2
, store
);
1520 return refs_may_alias_p_1 (&r1
, &r2
, false);
1523 /* Returns true if there is a output dependence for the stores
1524 STORE1 and STORE2. */
1527 refs_output_dependent_p (tree store1
, tree store2
)
1530 ao_ref_init (&r1
, store1
);
1531 ao_ref_init (&r2
, store2
);
1532 return refs_may_alias_p_1 (&r1
, &r2
, false);
1535 /* If the call CALL may use the memory reference REF return true,
1536 otherwise return false. */
1539 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
)
1543 int flags
= gimple_call_flags (call
);
1545 /* Const functions without a static chain do not implicitly use memory. */
1546 if (!gimple_call_chain (call
)
1547 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
1550 base
= ao_ref_base (ref
);
1554 /* A call that is not without side-effects might involve volatile
1555 accesses and thus conflicts with all other volatile accesses. */
1556 if (ref
->volatile_p
)
1559 /* If the reference is based on a decl that is not aliased the call
1560 cannot possibly use it. */
1562 && !may_be_aliased (base
)
1563 /* But local statics can be used through recursion. */
1564 && !is_global_var (base
))
1567 callee
= gimple_call_fndecl (call
);
1569 /* Handle those builtin functions explicitly that do not act as
1570 escape points. See tree-ssa-structalias.c:find_func_aliases
1571 for the list of builtins we might need to handle here. */
1572 if (callee
!= NULL_TREE
1573 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
1574 switch (DECL_FUNCTION_CODE (callee
))
1576 /* All the following functions read memory pointed to by
1577 their second argument. strcat/strncat additionally
1578 reads memory pointed to by the first argument. */
1579 case BUILT_IN_STRCAT
:
1580 case BUILT_IN_STRNCAT
:
1583 ao_ref_init_from_ptr_and_size (&dref
,
1584 gimple_call_arg (call
, 0),
1586 if (refs_may_alias_p_1 (&dref
, ref
, false))
1590 case BUILT_IN_STRCPY
:
1591 case BUILT_IN_STRNCPY
:
1592 case BUILT_IN_MEMCPY
:
1593 case BUILT_IN_MEMMOVE
:
1594 case BUILT_IN_MEMPCPY
:
1595 case BUILT_IN_STPCPY
:
1596 case BUILT_IN_STPNCPY
:
1597 case BUILT_IN_TM_MEMCPY
:
1598 case BUILT_IN_TM_MEMMOVE
:
1601 tree size
= NULL_TREE
;
1602 if (gimple_call_num_args (call
) == 3)
1603 size
= gimple_call_arg (call
, 2);
1604 ao_ref_init_from_ptr_and_size (&dref
,
1605 gimple_call_arg (call
, 1),
1607 return refs_may_alias_p_1 (&dref
, ref
, false);
1609 case BUILT_IN_STRCAT_CHK
:
1610 case BUILT_IN_STRNCAT_CHK
:
1613 ao_ref_init_from_ptr_and_size (&dref
,
1614 gimple_call_arg (call
, 0),
1616 if (refs_may_alias_p_1 (&dref
, ref
, false))
1620 case BUILT_IN_STRCPY_CHK
:
1621 case BUILT_IN_STRNCPY_CHK
:
1622 case BUILT_IN_MEMCPY_CHK
:
1623 case BUILT_IN_MEMMOVE_CHK
:
1624 case BUILT_IN_MEMPCPY_CHK
:
1625 case BUILT_IN_STPCPY_CHK
:
1626 case BUILT_IN_STPNCPY_CHK
:
1629 tree size
= NULL_TREE
;
1630 if (gimple_call_num_args (call
) == 4)
1631 size
= gimple_call_arg (call
, 2);
1632 ao_ref_init_from_ptr_and_size (&dref
,
1633 gimple_call_arg (call
, 1),
1635 return refs_may_alias_p_1 (&dref
, ref
, false);
1637 case BUILT_IN_BCOPY
:
1640 tree size
= gimple_call_arg (call
, 2);
1641 ao_ref_init_from_ptr_and_size (&dref
,
1642 gimple_call_arg (call
, 0),
1644 return refs_may_alias_p_1 (&dref
, ref
, false);
1647 /* The following functions read memory pointed to by their
1649 CASE_BUILT_IN_TM_LOAD (1):
1650 CASE_BUILT_IN_TM_LOAD (2):
1651 CASE_BUILT_IN_TM_LOAD (4):
1652 CASE_BUILT_IN_TM_LOAD (8):
1653 CASE_BUILT_IN_TM_LOAD (FLOAT
):
1654 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
1655 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
1656 CASE_BUILT_IN_TM_LOAD (M64
):
1657 CASE_BUILT_IN_TM_LOAD (M128
):
1658 CASE_BUILT_IN_TM_LOAD (M256
):
1659 case BUILT_IN_TM_LOG
:
1660 case BUILT_IN_TM_LOG_1
:
1661 case BUILT_IN_TM_LOG_2
:
1662 case BUILT_IN_TM_LOG_4
:
1663 case BUILT_IN_TM_LOG_8
:
1664 case BUILT_IN_TM_LOG_FLOAT
:
1665 case BUILT_IN_TM_LOG_DOUBLE
:
1666 case BUILT_IN_TM_LOG_LDOUBLE
:
1667 case BUILT_IN_TM_LOG_M64
:
1668 case BUILT_IN_TM_LOG_M128
:
1669 case BUILT_IN_TM_LOG_M256
:
1670 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call
, 0), ref
);
1672 /* These read memory pointed to by the first argument. */
1673 case BUILT_IN_STRDUP
:
1674 case BUILT_IN_STRNDUP
:
1675 case BUILT_IN_REALLOC
:
1678 tree size
= NULL_TREE
;
1679 if (gimple_call_num_args (call
) == 2)
1680 size
= gimple_call_arg (call
, 1);
1681 ao_ref_init_from_ptr_and_size (&dref
,
1682 gimple_call_arg (call
, 0),
1684 return refs_may_alias_p_1 (&dref
, ref
, false);
1686 /* These read memory pointed to by the first argument. */
1687 case BUILT_IN_INDEX
:
1688 case BUILT_IN_STRCHR
:
1689 case BUILT_IN_STRRCHR
:
1692 ao_ref_init_from_ptr_and_size (&dref
,
1693 gimple_call_arg (call
, 0),
1695 return refs_may_alias_p_1 (&dref
, ref
, false);
1697 /* These read memory pointed to by the first argument with size
1698 in the third argument. */
1699 case BUILT_IN_MEMCHR
:
1702 ao_ref_init_from_ptr_and_size (&dref
,
1703 gimple_call_arg (call
, 0),
1704 gimple_call_arg (call
, 2));
1705 return refs_may_alias_p_1 (&dref
, ref
, false);
1707 /* These read memory pointed to by the first and second arguments. */
1708 case BUILT_IN_STRSTR
:
1709 case BUILT_IN_STRPBRK
:
1712 ao_ref_init_from_ptr_and_size (&dref
,
1713 gimple_call_arg (call
, 0),
1715 if (refs_may_alias_p_1 (&dref
, ref
, false))
1717 ao_ref_init_from_ptr_and_size (&dref
,
1718 gimple_call_arg (call
, 1),
1720 return refs_may_alias_p_1 (&dref
, ref
, false);
1723 /* The following builtins do not read from memory. */
1725 case BUILT_IN_MALLOC
:
1726 case BUILT_IN_POSIX_MEMALIGN
:
1727 case BUILT_IN_ALIGNED_ALLOC
:
1728 case BUILT_IN_CALLOC
:
1729 case BUILT_IN_ALLOCA
:
1730 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1731 case BUILT_IN_STACK_SAVE
:
1732 case BUILT_IN_STACK_RESTORE
:
1733 case BUILT_IN_MEMSET
:
1734 case BUILT_IN_TM_MEMSET
:
1735 case BUILT_IN_MEMSET_CHK
:
1736 case BUILT_IN_FREXP
:
1737 case BUILT_IN_FREXPF
:
1738 case BUILT_IN_FREXPL
:
1739 case BUILT_IN_GAMMA_R
:
1740 case BUILT_IN_GAMMAF_R
:
1741 case BUILT_IN_GAMMAL_R
:
1742 case BUILT_IN_LGAMMA_R
:
1743 case BUILT_IN_LGAMMAF_R
:
1744 case BUILT_IN_LGAMMAL_R
:
1746 case BUILT_IN_MODFF
:
1747 case BUILT_IN_MODFL
:
1748 case BUILT_IN_REMQUO
:
1749 case BUILT_IN_REMQUOF
:
1750 case BUILT_IN_REMQUOL
:
1751 case BUILT_IN_SINCOS
:
1752 case BUILT_IN_SINCOSF
:
1753 case BUILT_IN_SINCOSL
:
1754 case BUILT_IN_ASSUME_ALIGNED
:
1755 case BUILT_IN_VA_END
:
1757 /* __sync_* builtins and some OpenMP builtins act as threading
1759 #undef DEF_SYNC_BUILTIN
1760 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1761 #include "sync-builtins.def"
1762 #undef DEF_SYNC_BUILTIN
1763 case BUILT_IN_GOMP_ATOMIC_START
:
1764 case BUILT_IN_GOMP_ATOMIC_END
:
1765 case BUILT_IN_GOMP_BARRIER
:
1766 case BUILT_IN_GOMP_BARRIER_CANCEL
:
1767 case BUILT_IN_GOMP_TASKWAIT
:
1768 case BUILT_IN_GOMP_TASKGROUP_END
:
1769 case BUILT_IN_GOMP_CRITICAL_START
:
1770 case BUILT_IN_GOMP_CRITICAL_END
:
1771 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
1772 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
1773 case BUILT_IN_GOMP_LOOP_END
:
1774 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
1775 case BUILT_IN_GOMP_ORDERED_START
:
1776 case BUILT_IN_GOMP_ORDERED_END
:
1777 case BUILT_IN_GOMP_SECTIONS_END
:
1778 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
1779 case BUILT_IN_GOMP_SINGLE_COPY_START
:
1780 case BUILT_IN_GOMP_SINGLE_COPY_END
:
1784 /* Fallthru to general call handling. */;
1787 /* Check if base is a global static variable that is not read
1789 if (callee
!= NULL_TREE
1790 && TREE_CODE (base
) == VAR_DECL
1791 && TREE_STATIC (base
))
1793 struct cgraph_node
*node
= cgraph_node::get (callee
);
1796 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1797 node yet. We should enforce that there are nodes for all decls in the
1798 IL and remove this check instead. */
1800 && (not_read
= ipa_reference_get_not_read_global (node
))
1801 && bitmap_bit_p (not_read
, DECL_UID (base
)))
1805 /* Check if the base variable is call-used. */
1808 if (pt_solution_includes (gimple_call_use_set (call
), base
))
1811 else if ((TREE_CODE (base
) == MEM_REF
1812 || TREE_CODE (base
) == TARGET_MEM_REF
)
1813 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
1815 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
1819 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
1825 /* Inspect call arguments for passed-by-value aliases. */
1827 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1829 tree op
= gimple_call_arg (call
, i
);
1830 int flags
= gimple_call_arg_flags (call
, i
);
1832 if (flags
& EAF_UNUSED
)
1835 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1836 op
= TREE_OPERAND (op
, 0);
1838 if (TREE_CODE (op
) != SSA_NAME
1839 && !is_gimple_min_invariant (op
))
1842 ao_ref_init (&r
, op
);
1843 if (refs_may_alias_p_1 (&r
, ref
, true))
1852 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
)
1855 res
= ref_maybe_used_by_call_p_1 (call
, ref
);
1857 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
1859 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
1864 /* If the statement STMT may use the memory reference REF return
1865 true, otherwise return false. */
1868 ref_maybe_used_by_stmt_p (gimple stmt
, ao_ref
*ref
)
1870 if (is_gimple_assign (stmt
))
1874 /* All memory assign statements are single. */
1875 if (!gimple_assign_single_p (stmt
))
1878 rhs
= gimple_assign_rhs1 (stmt
);
1879 if (is_gimple_reg (rhs
)
1880 || is_gimple_min_invariant (rhs
)
1881 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
1884 return refs_may_alias_p (rhs
, ref
);
1886 else if (is_gimple_call (stmt
))
1887 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
);
1888 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
1890 tree retval
= gimple_return_retval (return_stmt
);
1892 && TREE_CODE (retval
) != SSA_NAME
1893 && !is_gimple_min_invariant (retval
)
1894 && refs_may_alias_p (retval
, ref
))
1896 /* If ref escapes the function then the return acts as a use. */
1897 tree base
= ao_ref_base (ref
);
1900 else if (DECL_P (base
))
1901 return is_global_var (base
);
1902 else if (TREE_CODE (base
) == MEM_REF
1903 || TREE_CODE (base
) == TARGET_MEM_REF
)
1904 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
1912 ref_maybe_used_by_stmt_p (gimple stmt
, tree ref
)
1915 ao_ref_init (&r
, ref
);
1916 return ref_maybe_used_by_stmt_p (stmt
, &r
);
1919 /* If the call in statement CALL may clobber the memory reference REF
1920 return true, otherwise return false. */
1923 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
)
1928 /* If the call is pure or const it cannot clobber anything. */
1929 if (gimple_call_flags (call
)
1930 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
1932 if (gimple_call_internal_p (call
))
1933 switch (gimple_call_internal_fn (call
))
1935 /* Treat these internal calls like ECF_PURE for aliasing,
1936 they don't write to any memory the program should care about.
1937 They have important other side-effects, and read memory,
1938 so can't be ECF_NOVOPS. */
1939 case IFN_UBSAN_NULL
:
1940 case IFN_UBSAN_BOUNDS
:
1941 case IFN_UBSAN_VPTR
:
1942 case IFN_UBSAN_OBJECT_SIZE
:
1943 case IFN_ASAN_CHECK
:
1949 base
= ao_ref_base (ref
);
1953 if (TREE_CODE (base
) == SSA_NAME
1954 || CONSTANT_CLASS_P (base
))
1957 /* A call that is not without side-effects might involve volatile
1958 accesses and thus conflicts with all other volatile accesses. */
1959 if (ref
->volatile_p
)
1962 /* If the reference is based on a decl that is not aliased the call
1963 cannot possibly clobber it. */
1965 && !may_be_aliased (base
)
1966 /* But local non-readonly statics can be modified through recursion
1967 or the call may implement a threading barrier which we must
1968 treat as may-def. */
1969 && (TREE_READONLY (base
)
1970 || !is_global_var (base
)))
1973 callee
= gimple_call_fndecl (call
);
1975 /* Handle those builtin functions explicitly that do not act as
1976 escape points. See tree-ssa-structalias.c:find_func_aliases
1977 for the list of builtins we might need to handle here. */
1978 if (callee
!= NULL_TREE
1979 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
1980 switch (DECL_FUNCTION_CODE (callee
))
1982 /* All the following functions clobber memory pointed to by
1983 their first argument. */
1984 case BUILT_IN_STRCPY
:
1985 case BUILT_IN_STRNCPY
:
1986 case BUILT_IN_MEMCPY
:
1987 case BUILT_IN_MEMMOVE
:
1988 case BUILT_IN_MEMPCPY
:
1989 case BUILT_IN_STPCPY
:
1990 case BUILT_IN_STPNCPY
:
1991 case BUILT_IN_STRCAT
:
1992 case BUILT_IN_STRNCAT
:
1993 case BUILT_IN_MEMSET
:
1994 case BUILT_IN_TM_MEMSET
:
1995 CASE_BUILT_IN_TM_STORE (1):
1996 CASE_BUILT_IN_TM_STORE (2):
1997 CASE_BUILT_IN_TM_STORE (4):
1998 CASE_BUILT_IN_TM_STORE (8):
1999 CASE_BUILT_IN_TM_STORE (FLOAT
):
2000 CASE_BUILT_IN_TM_STORE (DOUBLE
):
2001 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
2002 CASE_BUILT_IN_TM_STORE (M64
):
2003 CASE_BUILT_IN_TM_STORE (M128
):
2004 CASE_BUILT_IN_TM_STORE (M256
):
2005 case BUILT_IN_TM_MEMCPY
:
2006 case BUILT_IN_TM_MEMMOVE
:
2009 tree size
= NULL_TREE
;
2010 /* Don't pass in size for strncat, as the maximum size
2011 is strlen (dest) + n + 1 instead of n, resp.
2012 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2014 if (gimple_call_num_args (call
) == 3
2015 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT
)
2016 size
= gimple_call_arg (call
, 2);
2017 ao_ref_init_from_ptr_and_size (&dref
,
2018 gimple_call_arg (call
, 0),
2020 return refs_may_alias_p_1 (&dref
, ref
, false);
2022 case BUILT_IN_STRCPY_CHK
:
2023 case BUILT_IN_STRNCPY_CHK
:
2024 case BUILT_IN_MEMCPY_CHK
:
2025 case BUILT_IN_MEMMOVE_CHK
:
2026 case BUILT_IN_MEMPCPY_CHK
:
2027 case BUILT_IN_STPCPY_CHK
:
2028 case BUILT_IN_STPNCPY_CHK
:
2029 case BUILT_IN_STRCAT_CHK
:
2030 case BUILT_IN_STRNCAT_CHK
:
2031 case BUILT_IN_MEMSET_CHK
:
2034 tree size
= NULL_TREE
;
2035 /* Don't pass in size for __strncat_chk, as the maximum size
2036 is strlen (dest) + n + 1 instead of n, resp.
2037 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2039 if (gimple_call_num_args (call
) == 4
2040 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT_CHK
)
2041 size
= gimple_call_arg (call
, 2);
2042 ao_ref_init_from_ptr_and_size (&dref
,
2043 gimple_call_arg (call
, 0),
2045 return refs_may_alias_p_1 (&dref
, ref
, false);
2047 case BUILT_IN_BCOPY
:
2050 tree size
= gimple_call_arg (call
, 2);
2051 ao_ref_init_from_ptr_and_size (&dref
,
2052 gimple_call_arg (call
, 1),
2054 return refs_may_alias_p_1 (&dref
, ref
, false);
2056 /* Allocating memory does not have any side-effects apart from
2057 being the definition point for the pointer. */
2058 case BUILT_IN_MALLOC
:
2059 case BUILT_IN_ALIGNED_ALLOC
:
2060 case BUILT_IN_CALLOC
:
2061 case BUILT_IN_STRDUP
:
2062 case BUILT_IN_STRNDUP
:
2063 /* Unix98 specifies that errno is set on allocation failure. */
2065 && targetm
.ref_may_alias_errno (ref
))
2068 case BUILT_IN_STACK_SAVE
:
2069 case BUILT_IN_ALLOCA
:
2070 case BUILT_IN_ALLOCA_WITH_ALIGN
:
2071 case BUILT_IN_ASSUME_ALIGNED
:
2073 /* But posix_memalign stores a pointer into the memory pointed to
2074 by its first argument. */
2075 case BUILT_IN_POSIX_MEMALIGN
:
2077 tree ptrptr
= gimple_call_arg (call
, 0);
2079 ao_ref_init_from_ptr_and_size (&dref
, ptrptr
,
2080 TYPE_SIZE_UNIT (ptr_type_node
));
2081 return (refs_may_alias_p_1 (&dref
, ref
, false)
2083 && targetm
.ref_may_alias_errno (ref
)));
2085 /* Freeing memory kills the pointed-to memory. More importantly
2086 the call has to serve as a barrier for moving loads and stores
2089 case BUILT_IN_VA_END
:
2091 tree ptr
= gimple_call_arg (call
, 0);
2092 return ptr_deref_may_alias_ref_p_1 (ptr
, ref
);
2094 /* Realloc serves both as allocation point and deallocation point. */
2095 case BUILT_IN_REALLOC
:
2097 tree ptr
= gimple_call_arg (call
, 0);
2098 /* Unix98 specifies that errno is set on allocation failure. */
2099 return ((flag_errno_math
2100 && targetm
.ref_may_alias_errno (ref
))
2101 || ptr_deref_may_alias_ref_p_1 (ptr
, ref
));
2103 case BUILT_IN_GAMMA_R
:
2104 case BUILT_IN_GAMMAF_R
:
2105 case BUILT_IN_GAMMAL_R
:
2106 case BUILT_IN_LGAMMA_R
:
2107 case BUILT_IN_LGAMMAF_R
:
2108 case BUILT_IN_LGAMMAL_R
:
2110 tree out
= gimple_call_arg (call
, 1);
2111 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2113 if (flag_errno_math
)
2117 case BUILT_IN_FREXP
:
2118 case BUILT_IN_FREXPF
:
2119 case BUILT_IN_FREXPL
:
2121 case BUILT_IN_MODFF
:
2122 case BUILT_IN_MODFL
:
2124 tree out
= gimple_call_arg (call
, 1);
2125 return ptr_deref_may_alias_ref_p_1 (out
, ref
);
2127 case BUILT_IN_REMQUO
:
2128 case BUILT_IN_REMQUOF
:
2129 case BUILT_IN_REMQUOL
:
2131 tree out
= gimple_call_arg (call
, 2);
2132 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2134 if (flag_errno_math
)
2138 case BUILT_IN_SINCOS
:
2139 case BUILT_IN_SINCOSF
:
2140 case BUILT_IN_SINCOSL
:
2142 tree sin
= gimple_call_arg (call
, 1);
2143 tree cos
= gimple_call_arg (call
, 2);
2144 return (ptr_deref_may_alias_ref_p_1 (sin
, ref
)
2145 || ptr_deref_may_alias_ref_p_1 (cos
, ref
));
2147 /* __sync_* builtins and some OpenMP builtins act as threading
2149 #undef DEF_SYNC_BUILTIN
2150 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2151 #include "sync-builtins.def"
2152 #undef DEF_SYNC_BUILTIN
2153 case BUILT_IN_GOMP_ATOMIC_START
:
2154 case BUILT_IN_GOMP_ATOMIC_END
:
2155 case BUILT_IN_GOMP_BARRIER
:
2156 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2157 case BUILT_IN_GOMP_TASKWAIT
:
2158 case BUILT_IN_GOMP_TASKGROUP_END
:
2159 case BUILT_IN_GOMP_CRITICAL_START
:
2160 case BUILT_IN_GOMP_CRITICAL_END
:
2161 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2162 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2163 case BUILT_IN_GOMP_LOOP_END
:
2164 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2165 case BUILT_IN_GOMP_ORDERED_START
:
2166 case BUILT_IN_GOMP_ORDERED_END
:
2167 case BUILT_IN_GOMP_SECTIONS_END
:
2168 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2169 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2170 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2173 /* Fallthru to general call handling. */;
2176 /* Check if base is a global static variable that is not written
2178 if (callee
!= NULL_TREE
2179 && TREE_CODE (base
) == VAR_DECL
2180 && TREE_STATIC (base
))
2182 struct cgraph_node
*node
= cgraph_node::get (callee
);
2186 && (not_written
= ipa_reference_get_not_written_global (node
))
2187 && bitmap_bit_p (not_written
, DECL_UID (base
)))
2191 /* Check if the base variable is call-clobbered. */
2193 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
2194 else if ((TREE_CODE (base
) == MEM_REF
2195 || TREE_CODE (base
) == TARGET_MEM_REF
)
2196 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2198 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2202 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
2208 /* If the call in statement CALL may clobber the memory reference REF
2209 return true, otherwise return false. */
2212 call_may_clobber_ref_p (gcall
*call
, tree ref
)
2216 ao_ref_init (&r
, ref
);
2217 res
= call_may_clobber_ref_p_1 (call
, &r
);
2219 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
2221 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
2226 /* If the statement STMT may clobber the memory reference REF return true,
2227 otherwise return false. */
2230 stmt_may_clobber_ref_p_1 (gimple stmt
, ao_ref
*ref
)
2232 if (is_gimple_call (stmt
))
2234 tree lhs
= gimple_call_lhs (stmt
);
2236 && TREE_CODE (lhs
) != SSA_NAME
)
2239 ao_ref_init (&r
, lhs
);
2240 if (refs_may_alias_p_1 (ref
, &r
, true))
2244 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
);
2246 else if (gimple_assign_single_p (stmt
))
2248 tree lhs
= gimple_assign_lhs (stmt
);
2249 if (TREE_CODE (lhs
) != SSA_NAME
)
2252 ao_ref_init (&r
, lhs
);
2253 return refs_may_alias_p_1 (ref
, &r
, true);
2256 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2263 stmt_may_clobber_ref_p (gimple stmt
, tree ref
)
2266 ao_ref_init (&r
, ref
);
2267 return stmt_may_clobber_ref_p_1 (stmt
, &r
);
2270 /* If STMT kills the memory reference REF return true, otherwise
2274 stmt_kills_ref_p (gimple stmt
, ao_ref
*ref
)
2276 if (!ao_ref_base (ref
))
2279 if (gimple_has_lhs (stmt
)
2280 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
2281 /* The assignment is not necessarily carried out if it can throw
2282 and we can catch it in the current function where we could inspect
2284 ??? We only need to care about the RHS throwing. For aggregate
2285 assignments or similar calls and non-call exceptions the LHS
2286 might throw as well. */
2287 && !stmt_can_throw_internal (stmt
))
2289 tree lhs
= gimple_get_lhs (stmt
);
2290 /* If LHS is literally a base of the access we are done. */
2293 tree base
= ref
->ref
;
2294 if (handled_component_p (base
))
2296 tree saved_lhs0
= NULL_TREE
;
2297 if (handled_component_p (lhs
))
2299 saved_lhs0
= TREE_OPERAND (lhs
, 0);
2300 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
2304 /* Just compare the outermost handled component, if
2305 they are equal we have found a possible common
2307 tree saved_base0
= TREE_OPERAND (base
, 0);
2308 TREE_OPERAND (base
, 0) = integer_zero_node
;
2309 bool res
= operand_equal_p (lhs
, base
, 0);
2310 TREE_OPERAND (base
, 0) = saved_base0
;
2313 /* Otherwise drop handled components of the access. */
2316 while (handled_component_p (base
));
2318 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
2320 /* Finally check if lhs is equal or equal to the base candidate
2322 if (operand_equal_p (lhs
, base
, 0))
2326 /* Now look for non-literal equal bases with the restriction of
2327 handling constant offset and size. */
2328 /* For a must-alias check we need to be able to constrain
2329 the access properly. */
2330 if (ref
->max_size
== -1)
2332 HOST_WIDE_INT size
, offset
, max_size
, ref_offset
= ref
->offset
;
2333 tree base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
2334 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2335 so base == ref->base does not always hold. */
2336 if (base
!= ref
->base
)
2338 /* If both base and ref->base are MEM_REFs, only compare the
2339 first operand, and if the second operand isn't equal constant,
2340 try to add the offsets into offset and ref_offset. */
2341 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
2342 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
2344 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
2345 TREE_OPERAND (ref
->base
, 1)))
2347 offset_int off1
= mem_ref_offset (base
);
2348 off1
= wi::lshift (off1
, LOG2_BITS_PER_UNIT
);
2350 offset_int off2
= mem_ref_offset (ref
->base
);
2351 off2
= wi::lshift (off2
, LOG2_BITS_PER_UNIT
);
2353 if (wi::fits_shwi_p (off1
) && wi::fits_shwi_p (off2
))
2355 offset
= off1
.to_shwi ();
2356 ref_offset
= off2
.to_shwi ();
2365 /* For a must-alias check we need to be able to constrain
2366 the access properly. */
2367 if (size
!= -1 && size
== max_size
)
2369 if (offset
<= ref_offset
2370 && offset
+ size
>= ref_offset
+ ref
->max_size
)
2375 if (is_gimple_call (stmt
))
2377 tree callee
= gimple_call_fndecl (stmt
);
2378 if (callee
!= NULL_TREE
2379 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2380 switch (DECL_FUNCTION_CODE (callee
))
2384 tree ptr
= gimple_call_arg (stmt
, 0);
2385 tree base
= ao_ref_base (ref
);
2386 if (base
&& TREE_CODE (base
) == MEM_REF
2387 && TREE_OPERAND (base
, 0) == ptr
)
2392 case BUILT_IN_MEMCPY
:
2393 case BUILT_IN_MEMPCPY
:
2394 case BUILT_IN_MEMMOVE
:
2395 case BUILT_IN_MEMSET
:
2396 case BUILT_IN_MEMCPY_CHK
:
2397 case BUILT_IN_MEMPCPY_CHK
:
2398 case BUILT_IN_MEMMOVE_CHK
:
2399 case BUILT_IN_MEMSET_CHK
:
2401 /* For a must-alias check we need to be able to constrain
2402 the access properly. */
2403 if (ref
->max_size
== -1)
2405 tree dest
= gimple_call_arg (stmt
, 0);
2406 tree len
= gimple_call_arg (stmt
, 2);
2407 if (!tree_fits_shwi_p (len
))
2409 tree rbase
= ref
->base
;
2410 offset_int roffset
= ref
->offset
;
2412 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
2413 tree base
= ao_ref_base (&dref
);
2414 offset_int offset
= dref
.offset
;
2415 if (!base
|| dref
.size
== -1)
2417 if (TREE_CODE (base
) == MEM_REF
)
2419 if (TREE_CODE (rbase
) != MEM_REF
)
2421 // Compare pointers.
2422 offset
+= wi::lshift (mem_ref_offset (base
),
2423 LOG2_BITS_PER_UNIT
);
2424 roffset
+= wi::lshift (mem_ref_offset (rbase
),
2425 LOG2_BITS_PER_UNIT
);
2426 base
= TREE_OPERAND (base
, 0);
2427 rbase
= TREE_OPERAND (rbase
, 0);
2430 && wi::les_p (offset
, roffset
)
2431 && wi::les_p (roffset
+ ref
->max_size
,
2432 offset
+ wi::lshift (wi::to_offset (len
),
2433 LOG2_BITS_PER_UNIT
)))
2438 case BUILT_IN_VA_END
:
2440 tree ptr
= gimple_call_arg (stmt
, 0);
2441 if (TREE_CODE (ptr
) == ADDR_EXPR
)
2443 tree base
= ao_ref_base (ref
);
2444 if (TREE_OPERAND (ptr
, 0) == base
)
2457 stmt_kills_ref_p (gimple stmt
, tree ref
)
2460 ao_ref_init (&r
, ref
);
2461 return stmt_kills_ref_p (stmt
, &r
);
2465 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2466 TARGET or a statement clobbering the memory reference REF in which
2467 case false is returned. The walk starts with VUSE, one argument of PHI. */
2470 maybe_skip_until (gimple phi
, tree target
, ao_ref
*ref
,
2471 tree vuse
, unsigned int *cnt
, bitmap
*visited
,
2472 bool abort_on_visited
,
2473 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2476 basic_block bb
= gimple_bb (phi
);
2479 *visited
= BITMAP_ALLOC (NULL
);
2481 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
2483 /* Walk until we hit the target. */
2484 while (vuse
!= target
)
2486 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2487 /* Recurse for PHI nodes. */
2488 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2490 /* An already visited PHI node ends the walk successfully. */
2491 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
2492 return !abort_on_visited
;
2493 vuse
= get_continuation_for_phi (def_stmt
, ref
, cnt
,
2494 visited
, abort_on_visited
,
2500 else if (gimple_nop_p (def_stmt
))
2504 /* A clobbering statement or the end of the IL ends it failing. */
2506 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2509 && (*translate
) (ref
, vuse
, data
, true) == NULL
)
2515 /* If we reach a new basic-block see if we already skipped it
2516 in a previous walk that ended successfully. */
2517 if (gimple_bb (def_stmt
) != bb
)
2519 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
2520 return !abort_on_visited
;
2521 bb
= gimple_bb (def_stmt
);
2523 vuse
= gimple_vuse (def_stmt
);
2528 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2529 until we hit the phi argument definition that dominates the other one.
2530 Return that, or NULL_TREE if there is no such definition. */
2533 get_continuation_for_phi_1 (gimple phi
, tree arg0
, tree arg1
,
2534 ao_ref
*ref
, unsigned int *cnt
,
2535 bitmap
*visited
, bool abort_on_visited
,
2536 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2539 gimple def0
= SSA_NAME_DEF_STMT (arg0
);
2540 gimple def1
= SSA_NAME_DEF_STMT (arg1
);
2545 else if (gimple_nop_p (def0
)
2546 || (!gimple_nop_p (def1
)
2547 && dominated_by_p (CDI_DOMINATORS
,
2548 gimple_bb (def1
), gimple_bb (def0
))))
2550 if (maybe_skip_until (phi
, arg0
, ref
, arg1
, cnt
,
2551 visited
, abort_on_visited
, translate
, data
))
2554 else if (gimple_nop_p (def1
)
2555 || dominated_by_p (CDI_DOMINATORS
,
2556 gimple_bb (def0
), gimple_bb (def1
)))
2558 if (maybe_skip_until (phi
, arg1
, ref
, arg0
, cnt
,
2559 visited
, abort_on_visited
, translate
, data
))
2562 /* Special case of a diamond:
2564 goto (cond) ? L1 : L2
2565 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2567 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2568 L3: MEM_4 = PHI<MEM_2, MEM_3>
2569 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2570 dominate each other, but still we can easily skip this PHI node
2571 if we recognize that the vuse MEM operand is the same for both,
2572 and that we can skip both statements (they don't clobber us).
2573 This is still linear. Don't use maybe_skip_until, that might
2574 potentially be slow. */
2575 else if ((common_vuse
= gimple_vuse (def0
))
2576 && common_vuse
== gimple_vuse (def1
))
2579 if ((!stmt_may_clobber_ref_p_1 (def0
, ref
)
2581 && (*translate
) (ref
, arg0
, data
, true) == NULL
))
2582 && (!stmt_may_clobber_ref_p_1 (def1
, ref
)
2584 && (*translate
) (ref
, arg1
, data
, true) == NULL
)))
2592 /* Starting from a PHI node for the virtual operand of the memory reference
2593 REF find a continuation virtual operand that allows to continue walking
2594 statements dominating PHI skipping only statements that cannot possibly
2595 clobber REF. Increments *CNT for each alias disambiguation done.
2596 Returns NULL_TREE if no suitable virtual operand can be found. */
2599 get_continuation_for_phi (gimple phi
, ao_ref
*ref
,
2600 unsigned int *cnt
, bitmap
*visited
,
2601 bool abort_on_visited
,
2602 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2605 unsigned nargs
= gimple_phi_num_args (phi
);
2607 /* Through a single-argument PHI we can simply look through. */
2609 return PHI_ARG_DEF (phi
, 0);
2611 /* For two or more arguments try to pairwise skip non-aliasing code
2612 until we hit the phi argument definition that dominates the other one. */
2613 else if (nargs
>= 2)
2618 /* Find a candidate for the virtual operand which definition
2619 dominates those of all others. */
2620 arg0
= PHI_ARG_DEF (phi
, 0);
2621 if (!SSA_NAME_IS_DEFAULT_DEF (arg0
))
2622 for (i
= 1; i
< nargs
; ++i
)
2624 arg1
= PHI_ARG_DEF (phi
, i
);
2625 if (SSA_NAME_IS_DEFAULT_DEF (arg1
))
2630 if (dominated_by_p (CDI_DOMINATORS
,
2631 gimple_bb (SSA_NAME_DEF_STMT (arg0
)),
2632 gimple_bb (SSA_NAME_DEF_STMT (arg1
))))
2636 /* Then pairwise reduce against the found candidate. */
2637 for (i
= 0; i
< nargs
; ++i
)
2639 arg1
= PHI_ARG_DEF (phi
, i
);
2640 arg0
= get_continuation_for_phi_1 (phi
, arg0
, arg1
, ref
,
2641 cnt
, visited
, abort_on_visited
,
2653 /* Based on the memory reference REF and its virtual use VUSE call
2654 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2655 itself. That is, for each virtual use for which its defining statement
2656 does not clobber REF.
2658 WALKER is called with REF, the current virtual use and DATA. If
2659 WALKER returns non-NULL the walk stops and its result is returned.
2660 At the end of a non-successful walk NULL is returned.
2662 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2663 use which definition is a statement that may clobber REF and DATA.
2664 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2665 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2666 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2667 to adjust REF and *DATA to make that valid.
2669 VALUEIZE if non-NULL is called with the next VUSE that is considered
2670 and return value is substituted for that. This can be used to
2671 implement optimistic value-numbering for example. Note that the
2672 VUSE argument is assumed to be valueized already.
2674 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2677 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
,
2678 void *(*walker
)(ao_ref
*, tree
, unsigned int, void *),
2679 void *(*translate
)(ao_ref
*, tree
, void *, bool),
2680 tree (*valueize
)(tree
),
2683 bitmap visited
= NULL
;
2685 unsigned int cnt
= 0;
2686 bool translated
= false;
2688 timevar_push (TV_ALIAS_STMT_WALK
);
2694 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2695 res
= (*walker
) (ref
, vuse
, cnt
, data
);
2697 if (res
== (void *)-1)
2702 /* Lookup succeeded. */
2703 else if (res
!= NULL
)
2707 vuse
= valueize (vuse
);
2708 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2709 if (gimple_nop_p (def_stmt
))
2711 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2712 vuse
= get_continuation_for_phi (def_stmt
, ref
, &cnt
,
2713 &visited
, translated
, translate
, data
);
2717 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2721 res
= (*translate
) (ref
, vuse
, data
, false);
2722 /* Failed lookup and translation. */
2723 if (res
== (void *)-1)
2728 /* Lookup succeeded. */
2729 else if (res
!= NULL
)
2731 /* Translation succeeded, continue walking. */
2734 vuse
= gimple_vuse (def_stmt
);
2740 BITMAP_FREE (visited
);
2742 timevar_pop (TV_ALIAS_STMT_WALK
);
2748 /* Based on the memory reference REF call WALKER for each vdef which
2749 defining statement may clobber REF, starting with VDEF. If REF
2750 is NULL_TREE, each defining statement is visited.
2752 WALKER is called with REF, the current vdef and DATA. If WALKER
2753 returns true the walk is stopped, otherwise it continues.
2755 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2756 The pointer may be NULL and then we do not track this information.
2758 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2759 PHI argument (but only one walk continues on merge points), the
2760 return value is true if any of the walks was successful.
2762 The function returns the number of statements walked. */
2765 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
2766 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2767 bitmap
*visited
, unsigned int cnt
,
2768 bool *function_entry_reached
)
2772 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
2775 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
2778 if (gimple_nop_p (def_stmt
))
2780 if (function_entry_reached
)
2781 *function_entry_reached
= true;
2784 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2788 *visited
= BITMAP_ALLOC (NULL
);
2789 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
2790 cnt
+= walk_aliased_vdefs_1 (ref
, gimple_phi_arg_def (def_stmt
, i
),
2791 walker
, data
, visited
, 0,
2792 function_entry_reached
);
2796 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2799 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2800 && (*walker
) (ref
, vdef
, data
))
2803 vdef
= gimple_vuse (def_stmt
);
2809 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
2810 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2812 bool *function_entry_reached
)
2814 bitmap local_visited
= NULL
;
2817 timevar_push (TV_ALIAS_STMT_WALK
);
2819 if (function_entry_reached
)
2820 *function_entry_reached
= false;
2822 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
2823 visited
? visited
: &local_visited
, 0,
2824 function_entry_reached
);
2826 BITMAP_FREE (local_visited
);
2828 timevar_pop (TV_ALIAS_STMT_WALK
);