1 /* Alias analysis for trees.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
32 #include "tree-pretty-print.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
39 #include "ipa-reference.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
51 /* Broad overview of how alias analysis on gimple works:
53 Statements clobbering or using memory are linked through the
54 virtual operand factored use-def chain. The virtual operand
55 is unique per function, its symbol is accessible via gimple_vop (cfun).
56 Virtual operands are used for efficiently walking memory statements
57 in the gimple IL and are useful for things like value-numbering as
58 a generation count for memory references.
60 SSA_NAME pointers may have associated points-to information
61 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
62 points-to information is (re-)computed by the TODO_rebuild_alias
63 pass manager todo. Points-to information is also used for more
64 precise tracking of call-clobbered and call-used variables and
65 related disambiguations.
67 This file contains functions for disambiguating memory references,
68 the so called alias-oracle and tools for walking of the gimple IL.
70 The main alias-oracle entry-points are
72 bool stmt_may_clobber_ref_p (gimple *, tree)
74 This function queries if a statement may invalidate (parts of)
75 the memory designated by the reference tree argument.
77 bool ref_maybe_used_by_stmt_p (gimple *, tree)
79 This function queries if a statement may need (parts of) the
80 memory designated by the reference tree argument.
82 There are variants of these functions that only handle the call
83 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
84 Note that these do not disambiguate against a possible call lhs.
86 bool refs_may_alias_p (tree, tree)
88 This function tries to disambiguate two reference trees.
90 bool ptr_deref_may_alias_global_p (tree)
92 This function queries if dereferencing a pointer variable may
95 More low-level disambiguators are available and documented in
96 this file. Low-level disambiguators dealing with points-to
97 information are in tree-ssa-structalias.c. */
99 static int nonoverlapping_refs_since_match_p (tree
, tree
, tree
, tree
, bool);
100 static bool nonoverlapping_component_refs_p (const_tree
, const_tree
);
102 /* Query statistics for the different low-level disambiguators.
103 A high-level query may trigger multiple of them. */
106 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
107 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
109 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
111 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
112 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias
;
113 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias
;
114 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias
;
115 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias
;
116 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias
;
117 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap
;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias
;
119 unsigned HOST_WIDE_INT modref_use_may_alias
;
120 unsigned HOST_WIDE_INT modref_use_no_alias
;
121 unsigned HOST_WIDE_INT modref_clobber_may_alias
;
122 unsigned HOST_WIDE_INT modref_clobber_no_alias
;
123 unsigned HOST_WIDE_INT modref_tests
;
124 unsigned HOST_WIDE_INT modref_baseptr_tests
;
128 dump_alias_stats (FILE *s
)
130 fprintf (s
, "\nAlias oracle query stats:\n");
131 fprintf (s
, " refs_may_alias_p: "
132 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
133 HOST_WIDE_INT_PRINT_DEC
" queries\n",
134 alias_stats
.refs_may_alias_p_no_alias
,
135 alias_stats
.refs_may_alias_p_no_alias
136 + alias_stats
.refs_may_alias_p_may_alias
);
137 fprintf (s
, " ref_maybe_used_by_call_p: "
138 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC
" queries\n",
140 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
141 alias_stats
.refs_may_alias_p_no_alias
142 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
143 fprintf (s
, " call_may_clobber_ref_p: "
144 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC
" queries\n",
146 alias_stats
.call_may_clobber_ref_p_no_alias
,
147 alias_stats
.call_may_clobber_ref_p_no_alias
148 + alias_stats
.call_may_clobber_ref_p_may_alias
);
149 fprintf (s
, " nonoverlapping_component_refs_p: "
150 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC
" queries\n",
152 alias_stats
.nonoverlapping_component_refs_p_no_alias
,
153 alias_stats
.nonoverlapping_component_refs_p_no_alias
154 + alias_stats
.nonoverlapping_component_refs_p_may_alias
);
155 fprintf (s
, " nonoverlapping_refs_since_match_p: "
156 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
157 HOST_WIDE_INT_PRINT_DEC
" must overlaps, "
158 HOST_WIDE_INT_PRINT_DEC
" queries\n",
159 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
,
160 alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
,
161 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
162 + alias_stats
.nonoverlapping_refs_since_match_p_may_alias
163 + alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
);
164 fprintf (s
, " aliasing_component_refs_p: "
165 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
166 HOST_WIDE_INT_PRINT_DEC
" queries\n",
167 alias_stats
.aliasing_component_refs_p_no_alias
,
168 alias_stats
.aliasing_component_refs_p_no_alias
169 + alias_stats
.aliasing_component_refs_p_may_alias
);
170 dump_alias_stats_in_alias_c (s
);
171 fprintf (s
, "\nModref stats:\n");
172 fprintf (s
, " modref use: "
173 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
174 HOST_WIDE_INT_PRINT_DEC
" queries\n",
175 alias_stats
.modref_use_no_alias
,
176 alias_stats
.modref_use_no_alias
177 + alias_stats
.modref_use_may_alias
);
178 fprintf (s
, " modref clobber: "
179 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
180 HOST_WIDE_INT_PRINT_DEC
" queries\n"
181 " " HOST_WIDE_INT_PRINT_DEC
" tbaa queries (%f per modref query)\n"
182 " " HOST_WIDE_INT_PRINT_DEC
" base compares (%f per modref query)\n",
183 alias_stats
.modref_clobber_no_alias
,
184 alias_stats
.modref_clobber_no_alias
185 + alias_stats
.modref_clobber_may_alias
,
186 alias_stats
.modref_tests
,
187 ((double)alias_stats
.modref_tests
)
188 / (alias_stats
.modref_clobber_no_alias
189 + alias_stats
.modref_clobber_may_alias
),
190 alias_stats
.modref_baseptr_tests
,
191 ((double)alias_stats
.modref_baseptr_tests
)
192 / (alias_stats
.modref_clobber_no_alias
193 + alias_stats
.modref_clobber_may_alias
));
197 /* Return true, if dereferencing PTR may alias with a global variable. */
200 ptr_deref_may_alias_global_p (tree ptr
)
202 struct ptr_info_def
*pi
;
204 /* If we end up with a pointer constant here that may point
206 if (TREE_CODE (ptr
) != SSA_NAME
)
209 pi
= SSA_NAME_PTR_INFO (ptr
);
211 /* If we do not have points-to information for this variable,
216 /* ??? This does not use TBAA to prune globals ptr may not access. */
217 return pt_solution_includes_global (&pi
->pt
);
220 /* Return true if dereferencing PTR may alias DECL.
221 The caller is responsible for applying TBAA to see if PTR
222 may access DECL at all. */
225 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
227 struct ptr_info_def
*pi
;
229 /* Conversions are irrelevant for points-to information and
230 data-dependence analysis can feed us those. */
233 /* Anything we do not explicilty handle aliases. */
234 if ((TREE_CODE (ptr
) != SSA_NAME
235 && TREE_CODE (ptr
) != ADDR_EXPR
236 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
237 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
239 && TREE_CODE (decl
) != PARM_DECL
240 && TREE_CODE (decl
) != RESULT_DECL
))
243 /* Disregard pointer offsetting. */
244 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
248 ptr
= TREE_OPERAND (ptr
, 0);
250 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
251 return ptr_deref_may_alias_decl_p (ptr
, decl
);
254 /* ADDR_EXPR pointers either just offset another pointer or directly
255 specify the pointed-to set. */
256 if (TREE_CODE (ptr
) == ADDR_EXPR
)
258 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
260 && (TREE_CODE (base
) == MEM_REF
261 || TREE_CODE (base
) == TARGET_MEM_REF
))
262 ptr
= TREE_OPERAND (base
, 0);
265 return compare_base_decls (base
, decl
) != 0;
267 && CONSTANT_CLASS_P (base
))
273 /* Non-aliased variables cannot be pointed to. */
274 if (!may_be_aliased (decl
))
277 /* If we do not have useful points-to information for this pointer
278 we cannot disambiguate anything else. */
279 pi
= SSA_NAME_PTR_INFO (ptr
);
283 return pt_solution_includes (&pi
->pt
, decl
);
286 /* Return true if dereferenced PTR1 and PTR2 may alias.
287 The caller is responsible for applying TBAA to see if accesses
288 through PTR1 and PTR2 may conflict at all. */
291 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
293 struct ptr_info_def
*pi1
, *pi2
;
295 /* Conversions are irrelevant for points-to information and
296 data-dependence analysis can feed us those. */
300 /* Disregard pointer offsetting. */
301 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
305 ptr1
= TREE_OPERAND (ptr1
, 0);
307 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
308 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
310 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
314 ptr2
= TREE_OPERAND (ptr2
, 0);
316 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
317 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
320 /* ADDR_EXPR pointers either just offset another pointer or directly
321 specify the pointed-to set. */
322 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
324 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
326 && (TREE_CODE (base
) == MEM_REF
327 || TREE_CODE (base
) == TARGET_MEM_REF
))
328 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
331 return ptr_deref_may_alias_decl_p (ptr2
, base
);
335 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
337 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
339 && (TREE_CODE (base
) == MEM_REF
340 || TREE_CODE (base
) == TARGET_MEM_REF
))
341 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
344 return ptr_deref_may_alias_decl_p (ptr1
, base
);
349 /* From here we require SSA name pointers. Anything else aliases. */
350 if (TREE_CODE (ptr1
) != SSA_NAME
351 || TREE_CODE (ptr2
) != SSA_NAME
352 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
353 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
356 /* We may end up with two empty points-to solutions for two same pointers.
357 In this case we still want to say both pointers alias, so shortcut
362 /* If we do not have useful points-to information for either pointer
363 we cannot disambiguate anything else. */
364 pi1
= SSA_NAME_PTR_INFO (ptr1
);
365 pi2
= SSA_NAME_PTR_INFO (ptr2
);
369 /* ??? This does not use TBAA to prune decls from the intersection
370 that not both pointers may access. */
371 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
374 /* Return true if dereferencing PTR may alias *REF.
375 The caller is responsible for applying TBAA to see if PTR
376 may access *REF at all. */
379 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
381 tree base
= ao_ref_base (ref
);
383 if (TREE_CODE (base
) == MEM_REF
384 || TREE_CODE (base
) == TARGET_MEM_REF
)
385 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
386 else if (DECL_P (base
))
387 return ptr_deref_may_alias_decl_p (ptr
, base
);
392 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
395 ptrs_compare_unequal (tree ptr1
, tree ptr2
)
397 /* First resolve the pointers down to a SSA name pointer base or
398 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
399 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
400 or STRING_CSTs which needs points-to adjustments to track them
401 in the points-to sets. */
402 tree obj1
= NULL_TREE
;
403 tree obj2
= NULL_TREE
;
404 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
406 tree tem
= get_base_address (TREE_OPERAND (ptr1
, 0));
410 || TREE_CODE (tem
) == PARM_DECL
411 || TREE_CODE (tem
) == RESULT_DECL
)
413 else if (TREE_CODE (tem
) == MEM_REF
)
414 ptr1
= TREE_OPERAND (tem
, 0);
416 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
418 tree tem
= get_base_address (TREE_OPERAND (ptr2
, 0));
422 || TREE_CODE (tem
) == PARM_DECL
423 || TREE_CODE (tem
) == RESULT_DECL
)
425 else if (TREE_CODE (tem
) == MEM_REF
)
426 ptr2
= TREE_OPERAND (tem
, 0);
429 /* Canonicalize ptr vs. object. */
430 if (TREE_CODE (ptr1
) == SSA_NAME
&& obj2
)
432 std::swap (ptr1
, ptr2
);
433 std::swap (obj1
, obj2
);
437 /* Other code handles this correctly, no need to duplicate it here. */;
438 else if (obj1
&& TREE_CODE (ptr2
) == SSA_NAME
)
440 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr2
);
441 /* We may not use restrict to optimize pointer comparisons.
442 See PR71062. So we have to assume that restrict-pointed-to
443 may be in fact obj1. */
445 || pi
->pt
.vars_contains_restrict
446 || pi
->pt
.vars_contains_interposable
)
449 && (TREE_STATIC (obj1
) || DECL_EXTERNAL (obj1
)))
451 varpool_node
*node
= varpool_node::get (obj1
);
452 /* If obj1 may bind to NULL give up (see below). */
454 || ! node
->nonzero_address ()
455 || ! decl_binds_to_current_def_p (obj1
))
458 return !pt_solution_includes (&pi
->pt
, obj1
);
461 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
462 but those require pt.null to be conservatively correct. */
467 /* Returns whether reference REF to BASE may refer to global memory. */
470 ref_may_alias_global_p_1 (tree base
)
473 return is_global_var (base
);
474 else if (TREE_CODE (base
) == MEM_REF
475 || TREE_CODE (base
) == TARGET_MEM_REF
)
476 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
481 ref_may_alias_global_p (ao_ref
*ref
)
483 tree base
= ao_ref_base (ref
);
484 return ref_may_alias_global_p_1 (base
);
488 ref_may_alias_global_p (tree ref
)
490 tree base
= get_base_address (ref
);
491 return ref_may_alias_global_p_1 (base
);
494 /* Return true whether STMT may clobber global memory. */
497 stmt_may_clobber_global_p (gimple
*stmt
)
501 if (!gimple_vdef (stmt
))
504 /* ??? We can ask the oracle whether an artificial pointer
505 dereference with a pointer with points-to information covering
506 all global memory (what about non-address taken memory?) maybe
507 clobbered by this call. As there is at the moment no convenient
508 way of doing that without generating garbage do some manual
510 ??? We could make a NULL ao_ref argument to the various
511 predicates special, meaning any global memory. */
513 switch (gimple_code (stmt
))
516 lhs
= gimple_assign_lhs (stmt
);
517 return (TREE_CODE (lhs
) != SSA_NAME
518 && ref_may_alias_global_p (lhs
));
527 /* Dump alias information on FILE. */
530 dump_alias_info (FILE *file
)
535 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
538 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
540 fprintf (file
, "Aliased symbols\n\n");
542 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
544 if (may_be_aliased (var
))
545 dump_variable (file
, var
);
548 fprintf (file
, "\nCall clobber information\n");
550 fprintf (file
, "\nESCAPED");
551 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
553 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
555 FOR_EACH_SSA_NAME (i
, ptr
, cfun
)
557 struct ptr_info_def
*pi
;
559 if (!POINTER_TYPE_P (TREE_TYPE (ptr
))
560 || SSA_NAME_IN_FREE_LIST (ptr
))
563 pi
= SSA_NAME_PTR_INFO (ptr
);
565 dump_points_to_info_for (file
, ptr
);
568 fprintf (file
, "\n");
572 /* Dump alias information on stderr. */
575 debug_alias_info (void)
577 dump_alias_info (stderr
);
581 /* Dump the points-to set *PT into FILE. */
584 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
587 fprintf (file
, ", points-to anything");
590 fprintf (file
, ", points-to non-local");
593 fprintf (file
, ", points-to escaped");
596 fprintf (file
, ", points-to unit escaped");
599 fprintf (file
, ", points-to NULL");
603 fprintf (file
, ", points-to vars: ");
604 dump_decl_set (file
, pt
->vars
);
605 if (pt
->vars_contains_nonlocal
606 || pt
->vars_contains_escaped
607 || pt
->vars_contains_escaped_heap
608 || pt
->vars_contains_restrict
)
610 const char *comma
= "";
611 fprintf (file
, " (");
612 if (pt
->vars_contains_nonlocal
)
614 fprintf (file
, "nonlocal");
617 if (pt
->vars_contains_escaped
)
619 fprintf (file
, "%sescaped", comma
);
622 if (pt
->vars_contains_escaped_heap
)
624 fprintf (file
, "%sescaped heap", comma
);
627 if (pt
->vars_contains_restrict
)
629 fprintf (file
, "%srestrict", comma
);
632 if (pt
->vars_contains_interposable
)
633 fprintf (file
, "%sinterposable", comma
);
640 /* Unified dump function for pt_solution. */
643 debug (pt_solution
&ref
)
645 dump_points_to_solution (stderr
, &ref
);
649 debug (pt_solution
*ptr
)
654 fprintf (stderr
, "<nil>\n");
658 /* Dump points-to information for SSA_NAME PTR into FILE. */
661 dump_points_to_info_for (FILE *file
, tree ptr
)
663 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
665 print_generic_expr (file
, ptr
, dump_flags
);
668 dump_points_to_solution (file
, &pi
->pt
);
670 fprintf (file
, ", points-to anything");
672 fprintf (file
, "\n");
676 /* Dump points-to information for VAR into stderr. */
679 debug_points_to_info_for (tree var
)
681 dump_points_to_info_for (stderr
, var
);
685 /* Initializes the alias-oracle reference representation *R from REF. */
688 ao_ref_init (ao_ref
*r
, tree ref
)
695 r
->ref_alias_set
= -1;
696 r
->base_alias_set
= -1;
697 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
700 /* Returns the base object of the memory reference *REF. */
703 ao_ref_base (ao_ref
*ref
)
709 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
710 &ref
->max_size
, &reverse
);
714 /* Returns the base object alias set of the memory reference *REF. */
717 ao_ref_base_alias_set (ao_ref
*ref
)
720 if (ref
->base_alias_set
!= -1)
721 return ref
->base_alias_set
;
725 while (handled_component_p (base_ref
))
726 base_ref
= TREE_OPERAND (base_ref
, 0);
727 ref
->base_alias_set
= get_alias_set (base_ref
);
728 return ref
->base_alias_set
;
731 /* Returns the reference alias set of the memory reference *REF. */
734 ao_ref_alias_set (ao_ref
*ref
)
736 if (ref
->ref_alias_set
!= -1)
737 return ref
->ref_alias_set
;
740 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
741 return ref
->ref_alias_set
;
744 /* Returns a type satisfying
745 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
748 ao_ref_base_alias_ptr_type (ao_ref
*ref
)
755 while (handled_component_p (base_ref
))
756 base_ref
= TREE_OPERAND (base_ref
, 0);
757 tree ret
= reference_alias_ptr_type (base_ref
);
761 /* Returns a type satisfying
762 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
765 ao_ref_alias_ptr_type (ao_ref
*ref
)
769 tree ret
= reference_alias_ptr_type (ref
->ref
);
774 /* Init an alias-oracle reference representation from a gimple pointer
775 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
776 that RANGE_KNOWN is set.
778 The access is assumed to be only to or after of the pointer target adjusted
779 by the offset, not before it (even in the case RANGE_KNOWN is false). */
782 ao_ref_init_from_ptr_and_range (ao_ref
*ref
, tree ptr
,
788 poly_int64 t
, extra_offset
= 0;
790 ref
->ref
= NULL_TREE
;
791 if (TREE_CODE (ptr
) == SSA_NAME
)
793 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
794 if (gimple_assign_single_p (stmt
)
795 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
796 ptr
= gimple_assign_rhs1 (stmt
);
797 else if (is_gimple_assign (stmt
)
798 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
799 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt
), &extra_offset
))
801 ptr
= gimple_assign_rhs1 (stmt
);
802 extra_offset
*= BITS_PER_UNIT
;
806 if (TREE_CODE (ptr
) == ADDR_EXPR
)
808 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
810 ref
->offset
= BITS_PER_UNIT
* t
;
815 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
820 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
821 ref
->base
= build2 (MEM_REF
, char_type_node
,
822 ptr
, null_pointer_node
);
825 ref
->offset
+= extra_offset
+ offset
;
828 ref
->max_size
= max_size
;
832 ref
->max_size
= ref
->size
= -1;
833 ref
->ref_alias_set
= 0;
834 ref
->base_alias_set
= 0;
835 ref
->volatile_p
= false;
838 /* Init an alias-oracle reference representation from a gimple pointer
839 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
840 size is assumed to be unknown. The access is assumed to be only
841 to or after of the pointer target, not before it. */
844 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
848 && poly_int_tree_p (size
, &size_hwi
)
849 && coeffs_in_range_p (size_hwi
, 0, HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
851 size_hwi
= size_hwi
* BITS_PER_UNIT
;
852 ao_ref_init_from_ptr_and_range (ref
, ptr
, true, 0, size_hwi
, size_hwi
);
855 ao_ref_init_from_ptr_and_range (ref
, ptr
, false, 0, -1, -1);
858 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
861 Return 0 if equal or incomparable. */
864 compare_sizes (tree s1
, tree s2
)
872 if (!poly_int_tree_p (s1
, &size1
) || !poly_int_tree_p (s2
, &size2
))
874 if (known_lt (size1
, size2
))
876 if (known_lt (size2
, size1
))
881 /* Compare TYPE1 and TYPE2 by its size.
882 Return -1 if size of TYPE1 < size of TYPE2
883 Return 1 if size of TYPE1 > size of TYPE2
884 Return 0 if types are of equal sizes or we can not compare them. */
887 compare_type_sizes (tree type1
, tree type2
)
889 /* Be conservative for arrays and vectors. We want to support partial
890 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
891 while (TREE_CODE (type1
) == ARRAY_TYPE
892 || TREE_CODE (type1
) == VECTOR_TYPE
)
893 type1
= TREE_TYPE (type1
);
894 while (TREE_CODE (type2
) == ARRAY_TYPE
895 || TREE_CODE (type2
) == VECTOR_TYPE
)
896 type2
= TREE_TYPE (type2
);
897 return compare_sizes (TYPE_SIZE (type1
), TYPE_SIZE (type2
));
900 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
901 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
905 same_type_for_tbaa (tree type1
, tree type2
)
907 type1
= TYPE_MAIN_VARIANT (type1
);
908 type2
= TYPE_MAIN_VARIANT (type2
);
910 /* Handle the most common case first. */
914 /* If we would have to do structural comparison bail out. */
915 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
916 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
919 /* Compare the canonical types. */
920 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
923 /* ??? Array types are not properly unified in all cases as we have
924 spurious changes in the index types for example. Removing this
925 causes all sorts of problems with the Fortran frontend. */
926 if (TREE_CODE (type1
) == ARRAY_TYPE
927 && TREE_CODE (type2
) == ARRAY_TYPE
)
930 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
931 object of one of its constrained subtypes, e.g. when a function with an
932 unconstrained parameter passed by reference is called on an object and
933 inlined. But, even in the case of a fixed size, type and subtypes are
934 not equivalent enough as to share the same TYPE_CANONICAL, since this
935 would mean that conversions between them are useless, whereas they are
936 not (e.g. type and subtypes can have different modes). So, in the end,
937 they are only guaranteed to have the same alias set. */
938 alias_set_type set1
= get_alias_set (type1
);
939 alias_set_type set2
= get_alias_set (type2
);
943 /* Pointers to void are considered compatible with all other pointers,
944 so for two pointers see what the alias set resolution thinks. */
945 if (POINTER_TYPE_P (type1
)
946 && POINTER_TYPE_P (type2
)
947 && alias_sets_conflict_p (set1
, set2
))
950 /* The types are known to be not equal. */
954 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
955 components on it). */
958 type_has_components_p (tree type
)
960 return AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
)
961 || TREE_CODE (type
) == COMPLEX_TYPE
;
964 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
965 respectively are either pointing to same address or are completely
966 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
969 Try to disambiguate using the access path starting from the match
970 and return false if there is no conflict.
972 Helper for aliasing_component_refs_p. */
975 aliasing_matching_component_refs_p (tree match1
, tree ref1
,
976 poly_int64 offset1
, poly_int64 max_size1
,
977 tree match2
, tree ref2
,
978 poly_int64 offset2
, poly_int64 max_size2
,
979 bool partial_overlap
)
981 poly_int64 offadj
, sztmp
, msztmp
;
984 if (!partial_overlap
)
986 get_ref_base_and_extent (match2
, &offadj
, &sztmp
, &msztmp
, &reverse
);
988 get_ref_base_and_extent (match1
, &offadj
, &sztmp
, &msztmp
, &reverse
);
990 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
992 ++alias_stats
.aliasing_component_refs_p_no_alias
;
997 int cmp
= nonoverlapping_refs_since_match_p (match1
, ref1
, match2
, ref2
,
1000 || (cmp
== -1 && nonoverlapping_component_refs_p (ref1
, ref2
)))
1002 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1005 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1009 /* Return true if REF is reference to zero sized trailing array. I.e.
1010 struct foo {int bar; int array[0];} *fooptr;
1014 component_ref_to_zero_sized_trailing_array_p (tree ref
)
1016 return (TREE_CODE (ref
) == COMPONENT_REF
1017 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
1018 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))
1019 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))))
1020 && array_at_struct_end_p (ref
));
1023 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1024 aliasing_component_refs_p.
1026 Walk access path REF2 and try to find type matching TYPE1
1027 (which is a start of possibly aliasing access path REF1).
1028 If match is found, try to disambiguate.
1030 Return 0 for sucessful disambiguation.
1031 Return 1 if match was found but disambiguation failed
1032 Return -1 if there is no match.
1033 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1034 in access patch REF2 and -1 if we are not sure. */
1037 aliasing_component_refs_walk (tree ref1
, tree type1
, tree base1
,
1038 poly_int64 offset1
, poly_int64 max_size1
,
1039 tree end_struct_ref1
,
1040 tree ref2
, tree base2
,
1041 poly_int64 offset2
, poly_int64 max_size2
,
1049 /* We walk from inner type to the outer types. If type we see is
1050 already too large to be part of type1, terminate the search. */
1051 int cmp
= compare_type_sizes (type1
, TREE_TYPE (ref
));
1054 && (!end_struct_ref1
1055 || compare_type_sizes (TREE_TYPE (end_struct_ref1
),
1056 TREE_TYPE (ref
)) < 0))
1058 /* If types may be of same size, see if we can decide about their
1062 same_p
= same_type_for_tbaa (TREE_TYPE (ref
), type1
);
1065 /* In case we can't decide whether types are same try to
1066 continue looking for the exact match.
1067 Remember however that we possibly saw a match
1068 to bypass the access path continuations tests we do later. */
1070 *maybe_match
= true;
1072 if (!handled_component_p (ref
))
1074 ref
= TREE_OPERAND (ref
, 0);
1078 bool partial_overlap
= false;
1080 /* We assume that arrays can overlap by multiple of their elements
1081 size as tested in gcc.dg/torture/alias-2.c.
1082 This partial overlap happen only when both arrays are bases of
1083 the access and not contained within another component ref.
1084 To be safe we also assume partial overlap for VLAs. */
1085 if (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
1086 && (!TYPE_SIZE (TREE_TYPE (base1
))
1087 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
))) != INTEGER_CST
1090 /* Setting maybe_match to true triggers
1091 nonoverlapping_component_refs_p test later that still may do
1092 useful disambiguation. */
1093 *maybe_match
= true;
1094 partial_overlap
= true;
1096 return aliasing_matching_component_refs_p (base1
, ref1
,
1105 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1106 Return true if they can be composed to single access path
1107 base1...ref1...base2...ref2.
1109 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1110 a trailing array access after REF1 in the non-TBAA part of the access.
1111 REF1_ALIAS_SET is the alias set of REF1.
1113 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1114 a traling array access in the TBAA part of access path2.
1115 BASE2_ALIAS_SET is the alias set of base2. */
1118 access_path_may_continue_p (tree ref_type1
, bool end_struct_past_end1
,
1119 alias_set_type ref1_alias_set
,
1120 tree base_type2
, tree end_struct_ref2
,
1121 alias_set_type base2_alias_set
)
1123 /* Access path can not continue past types with no components. */
1124 if (!type_has_components_p (ref_type1
))
1127 /* If first access path ends by too small type to hold base of
1128 the second access path, typically paths can not continue.
1130 Punt if end_struct_past_end1 is true. We want to support arbitrary
1131 type puning past first COMPONENT_REF to union because redundant store
1132 elimination depends on this, see PR92152. For this reason we can not
1133 check size of the reference because types may partially overlap. */
1134 if (!end_struct_past_end1
)
1136 if (compare_type_sizes (ref_type1
, base_type2
) < 0)
1138 /* If the path2 contains trailing array access we can strenghten the check
1139 to verify that also the size of element of the trailing array fits.
1140 In fact we could check for offset + type_size, but we do not track
1141 offsets and this is quite side case. */
1143 && compare_type_sizes (ref_type1
, TREE_TYPE (end_struct_ref2
)) < 0)
1146 return (base2_alias_set
== ref1_alias_set
1147 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
1150 /* Determine if the two component references REF1 and REF2 which are
1151 based on access types TYPE1 and TYPE2 and of which at least one is based
1152 on an indirect reference may alias.
1153 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1154 are the respective alias sets. */
1157 aliasing_component_refs_p (tree ref1
,
1158 alias_set_type ref1_alias_set
,
1159 alias_set_type base1_alias_set
,
1160 poly_int64 offset1
, poly_int64 max_size1
,
1162 alias_set_type ref2_alias_set
,
1163 alias_set_type base2_alias_set
,
1164 poly_int64 offset2
, poly_int64 max_size2
)
1166 /* If one reference is a component references through pointers try to find a
1167 common base and apply offset based disambiguation. This handles
1169 struct A { int i; int j; } *q;
1170 struct B { struct A a; int k; } *p;
1171 disambiguating q->i and p->a.j. */
1174 bool maybe_match
= false;
1175 tree end_struct_ref1
= NULL
, end_struct_ref2
= NULL
;
1176 bool end_struct_past_end1
= false;
1177 bool end_struct_past_end2
= false;
1179 /* Choose bases and base types to search for.
1180 The access path is as follows:
1181 base....end_of_tbaa_ref...actual_ref
1182 At one place in the access path may be a reference to zero sized or
1185 We generally discard the segment after end_of_tbaa_ref however
1186 we need to be careful in case it contains zero sized or traling array.
1187 These may happen after refernce to union and in this case we need to
1188 not disambiguate type puning scenarios.
1191 base1 to point to base
1193 ref1 to point to end_of_tbaa_ref
1195 end_struct_ref1 to point the trailing reference (if it exists
1196 in range base....end_of_tbaa_ref
1198 end_struct_past_end1 is true if this traling refernece occurs in
1199 end_of_tbaa_ref...actual_ref. */
1201 while (handled_component_p (base1
))
1203 /* Generally access paths are monotous in the size of object. The
1204 exception are trailing arrays of structures. I.e.
1205 struct a {int array[0];};
1207 struct a {int array1[0]; int array[];};
1208 Such struct has size 0 but accesses to a.array may have non-zero size.
1209 In this case the size of TREE_TYPE (base1) is smaller than
1210 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1212 Because we compare sizes of arrays just by sizes of their elements,
1213 we only need to care about zero sized array fields here. */
1214 if (component_ref_to_zero_sized_trailing_array_p (base1
))
1216 gcc_checking_assert (!end_struct_ref1
);
1217 end_struct_ref1
= base1
;
1219 if (ends_tbaa_access_path_p (base1
))
1221 ref1
= TREE_OPERAND (base1
, 0);
1222 if (end_struct_ref1
)
1224 end_struct_past_end1
= true;
1225 end_struct_ref1
= NULL
;
1228 base1
= TREE_OPERAND (base1
, 0);
1230 type1
= TREE_TYPE (base1
);
1232 while (handled_component_p (base2
))
1234 if (component_ref_to_zero_sized_trailing_array_p (base2
))
1236 gcc_checking_assert (!end_struct_ref2
);
1237 end_struct_ref2
= base2
;
1239 if (ends_tbaa_access_path_p (base2
))
1241 ref2
= TREE_OPERAND (base2
, 0);
1242 if (end_struct_ref2
)
1244 end_struct_past_end2
= true;
1245 end_struct_ref2
= NULL
;
1248 base2
= TREE_OPERAND (base2
, 0);
1250 type2
= TREE_TYPE (base2
);
1252 /* Now search for the type1 in the access path of ref2. This
1253 would be a common base for doing offset based disambiguation on.
1254 This however only makes sense if type2 is big enough to hold type1. */
1255 int cmp_outer
= compare_type_sizes (type2
, type1
);
1257 /* If type2 is big enough to contain type1 walk its access path.
1258 We also need to care of arrays at the end of structs that may extend
1259 beyond the end of structure. If this occurs in the TBAA part of the
1260 access path, we need to consider the increased type as well. */
1263 && compare_type_sizes (TREE_TYPE (end_struct_ref2
), type1
) >= 0))
1265 int res
= aliasing_component_refs_walk (ref1
, type1
, base1
,
1268 ref2
, base2
, offset2
, max_size2
,
1274 /* If we didn't find a common base, try the other way around. */
1277 && compare_type_sizes (TREE_TYPE (end_struct_ref1
), type1
) <= 0))
1279 int res
= aliasing_component_refs_walk (ref2
, type2
, base2
,
1282 ref1
, base1
, offset1
, max_size1
,
1288 /* In the following code we make an assumption that the types in access
1289 paths do not overlap and thus accesses alias only if one path can be
1290 continuation of another. If we was not able to decide about equivalence,
1291 we need to give up. */
1294 if (!nonoverlapping_component_refs_p (ref1
, ref2
))
1296 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1299 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1303 if (access_path_may_continue_p (TREE_TYPE (ref1
), end_struct_past_end1
,
1305 type2
, end_struct_ref2
,
1307 || access_path_may_continue_p (TREE_TYPE (ref2
), end_struct_past_end2
,
1309 type1
, end_struct_ref1
,
1312 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1315 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1319 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1320 that bases of both component refs are either equivalent or nonoverlapping.
1321 We do not assume that the containers of FIELD1 and FIELD2 are of the
1324 Return 0 in case the base address of component_refs are same then
1325 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1326 may not be of same type or size.
1328 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1330 Return -1 otherwise.
1332 Main difference between 0 and -1 is to let
1333 nonoverlapping_component_refs_since_match_p discover the semantically
1334 equivalent part of the access path.
1336 Note that this function is used even with -fno-strict-aliasing
1337 and makes use of no TBAA assumptions. */
1340 nonoverlapping_component_refs_p_1 (const_tree field1
, const_tree field2
)
1342 /* If both fields are of the same type, we could save hard work of
1343 comparing offsets. */
1344 tree type1
= DECL_CONTEXT (field1
);
1345 tree type2
= DECL_CONTEXT (field2
);
1347 if (TREE_CODE (type1
) == RECORD_TYPE
1348 && DECL_BIT_FIELD_REPRESENTATIVE (field1
))
1349 field1
= DECL_BIT_FIELD_REPRESENTATIVE (field1
);
1350 if (TREE_CODE (type2
) == RECORD_TYPE
1351 && DECL_BIT_FIELD_REPRESENTATIVE (field2
))
1352 field2
= DECL_BIT_FIELD_REPRESENTATIVE (field2
);
1354 /* ??? Bitfields can overlap at RTL level so punt on them.
1355 FIXME: RTL expansion should be fixed by adjusting the access path
1356 when producing MEM_ATTRs for MEMs which are wider than
1357 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1358 if (DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
))
1361 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1362 if (type1
== type2
&& TREE_CODE (type1
) == RECORD_TYPE
)
1363 return field1
!= field2
;
1365 /* In common case the offsets and bit offsets will be the same.
1366 However if frontends do not agree on the alignment, they may be
1367 different even if they actually represent same address.
1368 Try the common case first and if that fails calcualte the
1369 actual bit offset. */
1370 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1
),
1371 DECL_FIELD_OFFSET (field2
))
1372 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1
),
1373 DECL_FIELD_BIT_OFFSET (field2
)))
1376 /* Note that it may be possible to use component_ref_field_offset
1377 which would provide offsets as trees. However constructing and folding
1378 trees is expensive and does not seem to be worth the compile time
1381 poly_uint64 offset1
, offset2
;
1382 poly_uint64 bit_offset1
, bit_offset2
;
1384 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1
), &offset1
)
1385 && poly_int_tree_p (DECL_FIELD_OFFSET (field2
), &offset2
)
1386 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1
), &bit_offset1
)
1387 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2
), &bit_offset2
))
1389 offset1
= (offset1
<< LOG2_BITS_PER_UNIT
) + bit_offset1
;
1390 offset2
= (offset2
<< LOG2_BITS_PER_UNIT
) + bit_offset2
;
1392 if (known_eq (offset1
, offset2
))
1395 poly_uint64 size1
, size2
;
1397 if (poly_int_tree_p (DECL_SIZE (field1
), &size1
)
1398 && poly_int_tree_p (DECL_SIZE (field2
), &size2
)
1399 && !ranges_maybe_overlap_p (offset1
, size1
, offset2
, size2
))
1402 /* Resort to slower overlap checking by looking for matching types in
1403 the middle of access path. */
1407 /* Return low bound of array. Do not produce new trees
1408 and thus do not care about particular type of integer constant
1409 and placeholder exprs. */
1412 cheap_array_ref_low_bound (tree ref
)
1414 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
1416 /* Avoid expensive array_ref_low_bound.
1417 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1418 type or it is zero. */
1419 if (TREE_OPERAND (ref
, 2))
1420 return TREE_OPERAND (ref
, 2);
1421 else if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
1422 return TYPE_MIN_VALUE (domain_type
);
1424 return integer_zero_node
;
1427 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1428 completely disjoint.
1430 Return 1 if the refs are non-overlapping.
1431 Return 0 if they are possibly overlapping but if so the overlap again
1432 starts on the same address.
1433 Return -1 otherwise. */
1436 nonoverlapping_array_refs_p (tree ref1
, tree ref2
)
1438 tree index1
= TREE_OPERAND (ref1
, 1);
1439 tree index2
= TREE_OPERAND (ref2
, 1);
1440 tree low_bound1
= cheap_array_ref_low_bound (ref1
);
1441 tree low_bound2
= cheap_array_ref_low_bound (ref2
);
1443 /* Handle zero offsets first: we do not need to match type size in this
1445 if (operand_equal_p (index1
, low_bound1
, 0)
1446 && operand_equal_p (index2
, low_bound2
, 0))
1449 /* If type sizes are different, give up.
1451 Avoid expensive array_ref_element_size.
1452 If operand 3 is present it denotes size in the alignmnet units.
1453 Otherwise size is TYPE_SIZE of the element type.
1454 Handle only common cases where types are of the same "kind". */
1455 if ((TREE_OPERAND (ref1
, 3) == NULL
) != (TREE_OPERAND (ref2
, 3) == NULL
))
1458 tree elmt_type1
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1
, 0)));
1459 tree elmt_type2
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2
, 0)));
1461 if (TREE_OPERAND (ref1
, 3))
1463 if (TYPE_ALIGN (elmt_type1
) != TYPE_ALIGN (elmt_type2
)
1464 || !operand_equal_p (TREE_OPERAND (ref1
, 3),
1465 TREE_OPERAND (ref2
, 3), 0))
1470 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1
),
1471 TYPE_SIZE_UNIT (elmt_type2
), 0))
1475 /* Since we know that type sizes are the same, there is no need to return
1476 -1 after this point. Partial overlap can not be introduced. */
1478 /* We may need to fold trees in this case.
1479 TODO: Handle integer constant case at least. */
1480 if (!operand_equal_p (low_bound1
, low_bound2
, 0))
1483 if (TREE_CODE (index1
) == INTEGER_CST
&& TREE_CODE (index2
) == INTEGER_CST
)
1485 if (tree_int_cst_equal (index1
, index2
))
1489 /* TODO: We can use VRP to further disambiguate here. */
1493 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1494 MATCH2 either point to the same address or are disjoint.
1495 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1496 respectively or NULL in the case we established equivalence of bases.
1497 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1498 overlap by exact multiply of their element size.
1500 This test works by matching the initial segment of the access path
1501 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1502 match was determined without use of TBAA oracle.
1504 Return 1 if we can determine that component references REF1 and REF2,
1505 that are within a common DECL, cannot overlap.
1507 Return 0 if paths are same and thus there is nothing to disambiguate more
1508 (i.e. there is must alias assuming there is must alias between MATCH1 and
1511 Return -1 if we can not determine 0 or 1 - this happens when we met
1512 non-matching types was met in the path.
1513 In this case it may make sense to continue by other disambiguation
1517 nonoverlapping_refs_since_match_p (tree match1
, tree ref1
,
1518 tree match2
, tree ref2
,
1519 bool partial_overlap
)
1521 int ntbaa1
= 0, ntbaa2
= 0;
1522 /* Early return if there are no references to match, we do not need
1523 to walk the access paths.
1525 Do not consider this as may-alias for stats - it is more useful
1526 to have information how many disambiguations happened provided that
1527 the query was meaningful. */
1529 if (match1
== ref1
|| !handled_component_p (ref1
)
1530 || match2
== ref2
|| !handled_component_p (ref2
))
1533 auto_vec
<tree
, 16> component_refs1
;
1534 auto_vec
<tree
, 16> component_refs2
;
1536 /* Create the stack of handled components for REF1. */
1537 while (handled_component_p (ref1
) && ref1
!= match1
)
1539 /* We use TBAA only to re-synchronize after mismatched refs. So we
1540 do not need to truncate access path after TBAA part ends. */
1541 if (ends_tbaa_access_path_p (ref1
))
1545 component_refs1
.safe_push (ref1
);
1546 ref1
= TREE_OPERAND (ref1
, 0);
1549 /* Create the stack of handled components for REF2. */
1550 while (handled_component_p (ref2
) && ref2
!= match2
)
1552 if (ends_tbaa_access_path_p (ref2
))
1556 component_refs2
.safe_push (ref2
);
1557 ref2
= TREE_OPERAND (ref2
, 0);
1560 if (!flag_strict_aliasing
)
1566 bool mem_ref1
= TREE_CODE (ref1
) == MEM_REF
&& ref1
!= match1
;
1567 bool mem_ref2
= TREE_CODE (ref2
) == MEM_REF
&& ref2
!= match2
;
1569 /* If only one of access path starts with MEM_REF check that offset is 0
1570 so the addresses stays the same after stripping it.
1571 TODO: In this case we may walk the other access path until we get same
1574 If both starts with MEM_REF, offset has to be same. */
1575 if ((mem_ref1
&& !mem_ref2
&& !integer_zerop (TREE_OPERAND (ref1
, 1)))
1576 || (mem_ref2
&& !mem_ref1
&& !integer_zerop (TREE_OPERAND (ref2
, 1)))
1577 || (mem_ref1
&& mem_ref2
1578 && !tree_int_cst_equal (TREE_OPERAND (ref1
, 1),
1579 TREE_OPERAND (ref2
, 1))))
1581 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1585 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1586 to handle them here at all. */
1587 gcc_checking_assert (TREE_CODE (ref1
) != TARGET_MEM_REF
1588 && TREE_CODE (ref2
) != TARGET_MEM_REF
);
1590 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1591 rank. This is sufficient because we start from the same DECL and you
1592 cannot reference several fields at a time with COMPONENT_REFs (unlike
1593 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1594 of them to access a sub-component, unless you're in a union, in which
1595 case the return value will precisely be false. */
1598 /* Track if we seen unmatched ref with non-zero offset. In this case
1599 we must look for partial overlaps. */
1600 bool seen_unmatched_ref_p
= false;
1602 /* First match ARRAY_REFs an try to disambiguate. */
1603 if (!component_refs1
.is_empty ()
1604 && !component_refs2
.is_empty ())
1606 unsigned int narray_refs1
=0, narray_refs2
=0;
1608 /* We generally assume that both access paths starts by same sequence
1609 of refs. However if number of array refs is not in sync, try
1610 to recover and pop elts until number match. This helps the case
1611 where one access path starts by array and other by element. */
1612 for (narray_refs1
= 0; narray_refs1
< component_refs1
.length ();
1614 if (TREE_CODE (component_refs1
[component_refs1
.length()
1615 - 1 - narray_refs1
]) != ARRAY_REF
)
1618 for (narray_refs2
= 0; narray_refs2
< component_refs2
.length ();
1620 if (TREE_CODE (component_refs2
[component_refs2
.length()
1621 - 1 - narray_refs2
]) != ARRAY_REF
)
1623 for (; narray_refs1
> narray_refs2
; narray_refs1
--)
1625 ref1
= component_refs1
.pop ();
1628 /* If index is non-zero we need to check whether the reference
1629 does not break the main invariant that bases are either
1630 disjoint or equal. Consider the example:
1632 unsigned char out[][1];
1636 Here bases out and out are same, but after removing the
1637 [i] index, this invariant no longer holds, because
1638 out[i] points to the middle of array out.
1640 TODO: If size of type of the skipped reference is an integer
1641 multiply of the size of type of the other reference this
1642 invariant can be verified, but even then it is not completely
1643 safe with !flag_strict_aliasing if the other reference contains
1644 unbounded array accesses.
1647 if (!operand_equal_p (TREE_OPERAND (ref1
, 1),
1648 cheap_array_ref_low_bound (ref1
), 0))
1651 for (; narray_refs2
> narray_refs1
; narray_refs2
--)
1653 ref2
= component_refs2
.pop ();
1655 if (!operand_equal_p (TREE_OPERAND (ref2
, 1),
1656 cheap_array_ref_low_bound (ref2
), 0))
1659 /* Try to disambiguate matched arrays. */
1660 for (unsigned int i
= 0; i
< narray_refs1
; i
++)
1662 int cmp
= nonoverlapping_array_refs_p (component_refs1
.pop (),
1663 component_refs2
.pop ());
1666 if (cmp
== 1 && !partial_overlap
)
1669 .nonoverlapping_refs_since_match_p_no_alias
;
1674 seen_unmatched_ref_p
= true;
1675 /* We can not maintain the invariant that bases are either
1676 same or completely disjoint. However we can still recover
1677 from type based alias analysis if we reach referneces to
1678 same sizes. We do not attempt to match array sizes, so
1679 just finish array walking and look for component refs. */
1680 if (ntbaa1
< 0 || ntbaa2
< 0)
1682 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1685 for (i
++; i
< narray_refs1
; i
++)
1687 component_refs1
.pop ();
1688 component_refs2
.pop ();
1694 partial_overlap
= false;
1698 /* Next look for component_refs. */
1701 if (component_refs1
.is_empty ())
1704 .nonoverlapping_refs_since_match_p_must_overlap
;
1707 ref1
= component_refs1
.pop ();
1709 if (TREE_CODE (ref1
) != COMPONENT_REF
)
1711 seen_unmatched_ref_p
= true;
1712 if (ntbaa1
< 0 || ntbaa2
< 0)
1714 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1719 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
1723 if (component_refs2
.is_empty ())
1726 .nonoverlapping_refs_since_match_p_must_overlap
;
1729 ref2
= component_refs2
.pop ();
1731 if (TREE_CODE (ref2
) != COMPONENT_REF
)
1733 if (ntbaa1
< 0 || ntbaa2
< 0)
1735 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1738 seen_unmatched_ref_p
= true;
1741 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
1743 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1745 gcc_checking_assert (TREE_CODE (ref1
) == COMPONENT_REF
1746 && TREE_CODE (ref2
) == COMPONENT_REF
);
1748 tree field1
= TREE_OPERAND (ref1
, 1);
1749 tree field2
= TREE_OPERAND (ref2
, 1);
1751 /* ??? We cannot simply use the type of operand #0 of the refs here
1752 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1753 for common blocks instead of using unions like everyone else. */
1754 tree type1
= DECL_CONTEXT (field1
);
1755 tree type2
= DECL_CONTEXT (field2
);
1757 partial_overlap
= false;
1759 /* If we skipped array refs on type of different sizes, we can
1760 no longer be sure that there are not partial overlaps. */
1761 if (seen_unmatched_ref_p
&& ntbaa1
>= 0 && ntbaa2
>= 0
1762 && !operand_equal_p (TYPE_SIZE (type1
), TYPE_SIZE (type2
), 0))
1765 .nonoverlapping_refs_since_match_p_may_alias
;
1769 int cmp
= nonoverlapping_component_refs_p_1 (field1
, field2
);
1773 .nonoverlapping_refs_since_match_p_may_alias
;
1779 .nonoverlapping_refs_since_match_p_no_alias
;
1784 ++alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
;
1788 /* Return TYPE_UID which can be used to match record types we consider
1789 same for TBAA purposes. */
1792 ncr_type_uid (const_tree field
)
1794 /* ??? We cannot simply use the type of operand #0 of the refs here
1795 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1796 for common blocks instead of using unions like everyone else. */
1797 tree type
= DECL_FIELD_CONTEXT (field
);
1798 /* With LTO types considered same_type_for_tbaa_p
1799 from different translation unit may not have same
1800 main variant. They however have same TYPE_CANONICAL. */
1801 if (TYPE_CANONICAL (type
))
1802 return TYPE_UID (TYPE_CANONICAL (type
));
1803 return TYPE_UID (type
);
1806 /* qsort compare function to sort FIELD_DECLs after their
1807 DECL_FIELD_CONTEXT TYPE_UID. */
1810 ncr_compar (const void *field1_
, const void *field2_
)
1812 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
1813 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
1814 unsigned int uid1
= ncr_type_uid (field1
);
1815 unsigned int uid2
= ncr_type_uid (field2
);
1819 else if (uid1
> uid2
)
1824 /* Return true if we can determine that the fields referenced cannot
1825 overlap for any pair of objects. This relies on TBAA. */
1828 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
1830 /* Early return if we have nothing to do.
1832 Do not consider this as may-alias for stats - it is more useful
1833 to have information how many disambiguations happened provided that
1834 the query was meaningful. */
1835 if (!flag_strict_aliasing
1837 || !handled_component_p (x
)
1838 || !handled_component_p (y
))
1841 auto_vec
<const_tree
, 16> fieldsx
;
1842 while (handled_component_p (x
))
1844 if (TREE_CODE (x
) == COMPONENT_REF
)
1846 tree field
= TREE_OPERAND (x
, 1);
1847 tree type
= DECL_FIELD_CONTEXT (field
);
1848 if (TREE_CODE (type
) == RECORD_TYPE
)
1849 fieldsx
.safe_push (field
);
1851 else if (ends_tbaa_access_path_p (x
))
1852 fieldsx
.truncate (0);
1853 x
= TREE_OPERAND (x
, 0);
1855 if (fieldsx
.length () == 0)
1857 auto_vec
<const_tree
, 16> fieldsy
;
1858 while (handled_component_p (y
))
1860 if (TREE_CODE (y
) == COMPONENT_REF
)
1862 tree field
= TREE_OPERAND (y
, 1);
1863 tree type
= DECL_FIELD_CONTEXT (field
);
1864 if (TREE_CODE (type
) == RECORD_TYPE
)
1865 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
1867 else if (ends_tbaa_access_path_p (y
))
1868 fieldsy
.truncate (0);
1869 y
= TREE_OPERAND (y
, 0);
1871 if (fieldsy
.length () == 0)
1873 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1877 /* Most common case first. */
1878 if (fieldsx
.length () == 1
1879 && fieldsy
.length () == 1)
1881 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx
[0]),
1882 DECL_FIELD_CONTEXT (fieldsy
[0])) == 1
1883 && nonoverlapping_component_refs_p_1 (fieldsx
[0], fieldsy
[0]) == 1)
1885 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1890 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1895 if (fieldsx
.length () == 2)
1897 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
1898 std::swap (fieldsx
[0], fieldsx
[1]);
1901 fieldsx
.qsort (ncr_compar
);
1903 if (fieldsy
.length () == 2)
1905 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
1906 std::swap (fieldsy
[0], fieldsy
[1]);
1909 fieldsy
.qsort (ncr_compar
);
1911 unsigned i
= 0, j
= 0;
1914 const_tree fieldx
= fieldsx
[i
];
1915 const_tree fieldy
= fieldsy
[j
];
1917 /* We're left with accessing different fields of a structure,
1918 no possible overlap. */
1919 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx
),
1920 DECL_FIELD_CONTEXT (fieldy
)) == 1
1921 && nonoverlapping_component_refs_p_1 (fieldx
, fieldy
) == 1)
1923 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1927 if (ncr_type_uid (fieldx
) < ncr_type_uid (fieldy
))
1930 if (i
== fieldsx
.length ())
1936 if (j
== fieldsy
.length ())
1942 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1947 /* Return true if two memory references based on the variables BASE1
1948 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1949 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1950 if non-NULL are the complete memory reference trees. */
1953 decl_refs_may_alias_p (tree ref1
, tree base1
,
1954 poly_int64 offset1
, poly_int64 max_size1
,
1956 tree ref2
, tree base2
,
1957 poly_int64 offset2
, poly_int64 max_size2
,
1960 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
1962 /* If both references are based on different variables, they cannot alias. */
1963 if (compare_base_decls (base1
, base2
) == 0)
1966 /* If both references are based on the same variable, they cannot alias if
1967 the accesses do not overlap. */
1968 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1971 /* If there is must alias, there is no use disambiguating further. */
1972 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
1975 /* For components with variable position, the above test isn't sufficient,
1976 so we disambiguate component references manually. */
1978 && handled_component_p (ref1
) && handled_component_p (ref2
)
1979 && nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
, false) == 1)
1985 /* Return true if access with BASE is view converted.
1986 Base must not be stripped from inner MEM_REF (&decl)
1987 which is done by ao_ref_base and thus one extra walk
1988 of handled components is needed. */
1991 view_converted_memref_p (tree base
)
1993 if (TREE_CODE (base
) != MEM_REF
&& TREE_CODE (base
) != TARGET_MEM_REF
)
1995 return same_type_for_tbaa (TREE_TYPE (base
),
1996 TREE_TYPE (TREE_OPERAND (base
, 1))) != 1;
1999 /* Return true if an indirect reference based on *PTR1 constrained
2000 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2001 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2002 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2003 in which case they are computed on-demand. REF1 and REF2
2004 if non-NULL are the complete memory reference trees. */
2007 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
2008 poly_int64 offset1
, poly_int64 max_size1
,
2010 alias_set_type ref1_alias_set
,
2011 alias_set_type base1_alias_set
,
2012 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
2013 poly_int64 offset2
, poly_int64 max_size2
,
2015 alias_set_type ref2_alias_set
,
2016 alias_set_type base2_alias_set
, bool tbaa_p
)
2019 tree ptrtype1
, dbase2
;
2021 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
2022 || TREE_CODE (base1
) == TARGET_MEM_REF
)
2025 ptr1
= TREE_OPERAND (base1
, 0);
2026 poly_offset_int moff
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
2028 /* If only one reference is based on a variable, they cannot alias if
2029 the pointer access is beyond the extent of the variable access.
2030 (the pointer base cannot validly point to an offset less than zero
2032 ??? IVOPTs creates bases that do not honor this restriction,
2033 so do not apply this optimization for TARGET_MEM_REFs. */
2034 if (TREE_CODE (base1
) != TARGET_MEM_REF
2035 && !ranges_maybe_overlap_p (offset1
+ moff
, -1, offset2
, max_size2
))
2037 /* They also cannot alias if the pointer may not point to the decl. */
2038 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
2041 /* Disambiguations that rely on strict aliasing rules follow. */
2042 if (!flag_strict_aliasing
|| !tbaa_p
)
2045 /* If the alias set for a pointer access is zero all bets are off. */
2046 if (base1_alias_set
== 0 || base2_alias_set
== 0)
2049 /* When we are trying to disambiguate an access with a pointer dereference
2050 as base versus one with a decl as base we can use both the size
2051 of the decl and its dynamic type for extra disambiguation.
2052 ??? We do not know anything about the dynamic type of the decl
2053 other than that its alias-set contains base2_alias_set as a subset
2054 which does not help us here. */
2055 /* As we know nothing useful about the dynamic type of the decl just
2056 use the usual conflict check rather than a subset test.
2057 ??? We could introduce -fvery-strict-aliasing when the language
2058 does not allow decls to have a dynamic type that differs from their
2059 static type. Then we can check
2060 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2061 if (base1_alias_set
!= base2_alias_set
2062 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
2065 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
2067 /* If the size of the access relevant for TBAA through the pointer
2068 is bigger than the size of the decl we can't possibly access the
2069 decl via that pointer. */
2070 if (/* ??? This in turn may run afoul when a decl of type T which is
2071 a member of union type U is accessed through a pointer to
2072 type U and sizeof T is smaller than sizeof U. */
2073 TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
2074 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
2075 && compare_sizes (DECL_SIZE (base2
),
2076 TYPE_SIZE (TREE_TYPE (ptrtype1
))) < 0)
2082 /* If the decl is accessed via a MEM_REF, reconstruct the base
2083 we can use for TBAA and an appropriately adjusted offset. */
2085 while (handled_component_p (dbase2
))
2086 dbase2
= TREE_OPERAND (dbase2
, 0);
2087 poly_int64 doffset1
= offset1
;
2088 poly_offset_int doffset2
= offset2
;
2089 if (TREE_CODE (dbase2
) == MEM_REF
2090 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
2092 doffset2
-= mem_ref_offset (dbase2
) << LOG2_BITS_PER_UNIT
;
2093 tree ptrtype2
= TREE_TYPE (TREE_OPERAND (dbase2
, 1));
2094 /* If second reference is view-converted, give up now. */
2095 if (same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (ptrtype2
)) != 1)
2099 /* If first reference is view-converted, give up now. */
2100 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1)
2103 /* If both references are through the same type, they do not alias
2104 if the accesses do not overlap. This does extra disambiguation
2105 for mixed/pointer accesses but requires strict aliasing.
2106 For MEM_REFs we require that the component-ref offset we computed
2107 is relative to the start of the type which we ensure by
2108 comparing rvalue and access type and disregarding the constant
2111 But avoid treating variable length arrays as "objects", instead assume they
2112 can overlap by an exact multiple of their element size.
2113 See gcc.dg/torture/alias-2.c. */
2114 if (((TREE_CODE (base1
) != TARGET_MEM_REF
2115 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2116 && (TREE_CODE (dbase2
) != TARGET_MEM_REF
2117 || (!TMR_INDEX (dbase2
) && !TMR_INDEX2 (dbase2
))))
2118 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
2120 bool partial_overlap
= (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
2121 && (TYPE_SIZE (TREE_TYPE (base1
))
2122 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
)))
2124 if (!partial_overlap
2125 && !ranges_maybe_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
))
2128 /* If there is must alias, there is no use disambiguating further. */
2129 || (!partial_overlap
2130 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2132 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2135 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2139 /* Do access-path based disambiguation. */
2141 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2142 return aliasing_component_refs_p (ref1
,
2143 ref1_alias_set
, base1_alias_set
,
2146 ref2_alias_set
, base2_alias_set
,
2147 offset2
, max_size2
);
2152 /* Return true if two indirect references based on *PTR1
2153 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2154 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2155 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2156 in which case they are computed on-demand. REF1 and REF2
2157 if non-NULL are the complete memory reference trees. */
2160 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
2161 poly_int64 offset1
, poly_int64 max_size1
,
2163 alias_set_type ref1_alias_set
,
2164 alias_set_type base1_alias_set
,
2165 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
2166 poly_int64 offset2
, poly_int64 max_size2
,
2168 alias_set_type ref2_alias_set
,
2169 alias_set_type base2_alias_set
, bool tbaa_p
)
2173 tree ptrtype1
, ptrtype2
;
2175 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
2176 || TREE_CODE (base1
) == TARGET_MEM_REF
)
2177 && (TREE_CODE (base2
) == MEM_REF
2178 || TREE_CODE (base2
) == TARGET_MEM_REF
));
2180 ptr1
= TREE_OPERAND (base1
, 0);
2181 ptr2
= TREE_OPERAND (base2
, 0);
2183 /* If both bases are based on pointers they cannot alias if they may not
2184 point to the same memory object or if they point to the same object
2185 and the accesses do not overlap. */
2186 if ((!cfun
|| gimple_in_ssa_p (cfun
))
2187 && operand_equal_p (ptr1
, ptr2
, 0)
2188 && (((TREE_CODE (base1
) != TARGET_MEM_REF
2189 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2190 && (TREE_CODE (base2
) != TARGET_MEM_REF
2191 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
2192 || (TREE_CODE (base1
) == TARGET_MEM_REF
2193 && TREE_CODE (base2
) == TARGET_MEM_REF
2194 && (TMR_STEP (base1
) == TMR_STEP (base2
)
2195 || (TMR_STEP (base1
) && TMR_STEP (base2
)
2196 && operand_equal_p (TMR_STEP (base1
),
2197 TMR_STEP (base2
), 0)))
2198 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
2199 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
2200 && operand_equal_p (TMR_INDEX (base1
),
2201 TMR_INDEX (base2
), 0)))
2202 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
2203 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
2204 && operand_equal_p (TMR_INDEX2 (base1
),
2205 TMR_INDEX2 (base2
), 0))))))
2207 poly_offset_int moff1
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
2208 poly_offset_int moff2
= mem_ref_offset (base2
) << LOG2_BITS_PER_UNIT
;
2209 if (!ranges_maybe_overlap_p (offset1
+ moff1
, max_size1
,
2210 offset2
+ moff2
, max_size2
))
2212 /* If there is must alias, there is no use disambiguating further. */
2213 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
2217 int res
= nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
,
2223 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
2226 /* Disambiguations that rely on strict aliasing rules follow. */
2227 if (!flag_strict_aliasing
|| !tbaa_p
)
2230 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
2231 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
2233 /* If the alias set for a pointer access is zero all bets are off. */
2234 if (base1_alias_set
== 0
2235 || base2_alias_set
== 0)
2238 /* Do type-based disambiguation. */
2239 if (base1_alias_set
!= base2_alias_set
2240 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
2243 /* If either reference is view-converted, give up now. */
2244 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
2245 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
2248 /* If both references are through the same type, they do not alias
2249 if the accesses do not overlap. This does extra disambiguation
2250 for mixed/pointer accesses but requires strict aliasing. */
2251 if ((TREE_CODE (base1
) != TARGET_MEM_REF
2252 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2253 && (TREE_CODE (base2
) != TARGET_MEM_REF
2254 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
2255 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
2256 TREE_TYPE (ptrtype2
)) == 1)
2258 /* But avoid treating arrays as "objects", instead assume they
2259 can overlap by an exact multiple of their element size.
2260 See gcc.dg/torture/alias-2.c. */
2261 bool partial_overlap
= TREE_CODE (TREE_TYPE (ptrtype1
)) == ARRAY_TYPE
;
2263 if (!partial_overlap
2264 && !ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
2267 || (!partial_overlap
2268 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2270 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2273 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2277 /* Do access-path based disambiguation. */
2279 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2280 return aliasing_component_refs_p (ref1
,
2281 ref1_alias_set
, base1_alias_set
,
2284 ref2_alias_set
, base2_alias_set
,
2285 offset2
, max_size2
);
2290 /* Return true, if the two memory references REF1 and REF2 may alias. */
2293 refs_may_alias_p_2 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2296 poly_int64 offset1
= 0, offset2
= 0;
2297 poly_int64 max_size1
= -1, max_size2
= -1;
2298 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
2300 gcc_checking_assert ((!ref1
->ref
2301 || TREE_CODE (ref1
->ref
) == SSA_NAME
2302 || DECL_P (ref1
->ref
)
2303 || TREE_CODE (ref1
->ref
) == STRING_CST
2304 || handled_component_p (ref1
->ref
)
2305 || TREE_CODE (ref1
->ref
) == MEM_REF
2306 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
2308 || TREE_CODE (ref2
->ref
) == SSA_NAME
2309 || DECL_P (ref2
->ref
)
2310 || TREE_CODE (ref2
->ref
) == STRING_CST
2311 || handled_component_p (ref2
->ref
)
2312 || TREE_CODE (ref2
->ref
) == MEM_REF
2313 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
2315 /* Decompose the references into their base objects and the access. */
2316 base1
= ao_ref_base (ref1
);
2317 offset1
= ref1
->offset
;
2318 max_size1
= ref1
->max_size
;
2319 base2
= ao_ref_base (ref2
);
2320 offset2
= ref2
->offset
;
2321 max_size2
= ref2
->max_size
;
2323 /* We can end up with registers or constants as bases for example from
2324 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2325 which is seen as a struct copy. */
2326 if (TREE_CODE (base1
) == SSA_NAME
2327 || TREE_CODE (base1
) == CONST_DECL
2328 || TREE_CODE (base1
) == CONSTRUCTOR
2329 || TREE_CODE (base1
) == ADDR_EXPR
2330 || CONSTANT_CLASS_P (base1
)
2331 || TREE_CODE (base2
) == SSA_NAME
2332 || TREE_CODE (base2
) == CONST_DECL
2333 || TREE_CODE (base2
) == CONSTRUCTOR
2334 || TREE_CODE (base2
) == ADDR_EXPR
2335 || CONSTANT_CLASS_P (base2
))
2338 /* We can end up referring to code via function and label decls.
2339 As we likely do not properly track code aliases conservatively
2341 if (TREE_CODE (base1
) == FUNCTION_DECL
2342 || TREE_CODE (base1
) == LABEL_DECL
2343 || TREE_CODE (base2
) == FUNCTION_DECL
2344 || TREE_CODE (base2
) == LABEL_DECL
)
2347 /* Two volatile accesses always conflict. */
2348 if (ref1
->volatile_p
2349 && ref2
->volatile_p
)
2352 /* Defer to simple offset based disambiguation if we have
2353 references based on two decls. Do this before defering to
2354 TBAA to handle must-alias cases in conformance with the
2355 GCC extension of allowing type-punning through unions. */
2356 var1_p
= DECL_P (base1
);
2357 var2_p
= DECL_P (base2
);
2358 if (var1_p
&& var2_p
)
2359 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
2361 ref2
->ref
, base2
, offset2
, max_size2
,
2364 /* Handle restrict based accesses.
2365 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2367 tree rbase1
= base1
;
2368 tree rbase2
= base2
;
2373 while (handled_component_p (rbase1
))
2374 rbase1
= TREE_OPERAND (rbase1
, 0);
2380 while (handled_component_p (rbase2
))
2381 rbase2
= TREE_OPERAND (rbase2
, 0);
2383 if (rbase1
&& rbase2
2384 && (TREE_CODE (base1
) == MEM_REF
|| TREE_CODE (base1
) == TARGET_MEM_REF
)
2385 && (TREE_CODE (base2
) == MEM_REF
|| TREE_CODE (base2
) == TARGET_MEM_REF
)
2386 /* If the accesses are in the same restrict clique... */
2387 && MR_DEPENDENCE_CLIQUE (base1
) == MR_DEPENDENCE_CLIQUE (base2
)
2388 /* But based on different pointers they do not alias. */
2389 && MR_DEPENDENCE_BASE (base1
) != MR_DEPENDENCE_BASE (base2
))
2392 ind1_p
= (TREE_CODE (base1
) == MEM_REF
2393 || TREE_CODE (base1
) == TARGET_MEM_REF
);
2394 ind2_p
= (TREE_CODE (base2
) == MEM_REF
2395 || TREE_CODE (base2
) == TARGET_MEM_REF
);
2397 /* Canonicalize the pointer-vs-decl case. */
2398 if (ind1_p
&& var2_p
)
2400 std::swap (offset1
, offset2
);
2401 std::swap (max_size1
, max_size2
);
2402 std::swap (base1
, base2
);
2403 std::swap (ref1
, ref2
);
2410 /* First defer to TBAA if possible. */
2412 && flag_strict_aliasing
2413 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
2414 ao_ref_alias_set (ref2
)))
2417 /* If the reference is based on a pointer that points to memory
2418 that may not be written to then the other reference cannot possibly
2420 if ((TREE_CODE (TREE_OPERAND (base2
, 0)) == SSA_NAME
2421 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2
, 0)))
2423 && TREE_CODE (TREE_OPERAND (base1
, 0)) == SSA_NAME
2424 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1
, 0))))
2427 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2428 if (var1_p
&& ind2_p
)
2429 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
2430 offset2
, max_size2
, ref2
->size
,
2431 ao_ref_alias_set (ref2
),
2432 ao_ref_base_alias_set (ref2
),
2434 offset1
, max_size1
, ref1
->size
,
2435 ao_ref_alias_set (ref1
),
2436 ao_ref_base_alias_set (ref1
),
2438 else if (ind1_p
&& ind2_p
)
2439 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
2440 offset1
, max_size1
, ref1
->size
,
2441 ao_ref_alias_set (ref1
),
2442 ao_ref_base_alias_set (ref1
),
2444 offset2
, max_size2
, ref2
->size
,
2445 ao_ref_alias_set (ref2
),
2446 ao_ref_base_alias_set (ref2
),
2452 /* Return true, if the two memory references REF1 and REF2 may alias
2453 and update statistics. */
2456 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2458 bool res
= refs_may_alias_p_2 (ref1
, ref2
, tbaa_p
);
2460 ++alias_stats
.refs_may_alias_p_may_alias
;
2462 ++alias_stats
.refs_may_alias_p_no_alias
;
2467 refs_may_alias_p (tree ref1
, ao_ref
*ref2
, bool tbaa_p
)
2470 ao_ref_init (&r1
, ref1
);
2471 return refs_may_alias_p_1 (&r1
, ref2
, tbaa_p
);
2475 refs_may_alias_p (tree ref1
, tree ref2
, bool tbaa_p
)
2478 ao_ref_init (&r1
, ref1
);
2479 ao_ref_init (&r2
, ref2
);
2480 return refs_may_alias_p_1 (&r1
, &r2
, tbaa_p
);
2483 /* Returns true if there is a anti-dependence for the STORE that
2484 executes after the LOAD. */
2487 refs_anti_dependent_p (tree load
, tree store
)
2490 ao_ref_init (&r1
, load
);
2491 ao_ref_init (&r2
, store
);
2492 return refs_may_alias_p_1 (&r1
, &r2
, false);
2495 /* Returns true if there is a output dependence for the stores
2496 STORE1 and STORE2. */
2499 refs_output_dependent_p (tree store1
, tree store2
)
2502 ao_ref_init (&r1
, store1
);
2503 ao_ref_init (&r2
, store2
);
2504 return refs_may_alias_p_1 (&r1
, &r2
, false);
2507 /* Returns true if and only if REF may alias any access stored in TT.
2508 IF TBAA_P is true, use TBAA oracle. */
2511 modref_may_conflict (const gimple
*stmt
,
2512 modref_tree
<alias_set_type
> *tt
, ao_ref
*ref
, bool tbaa_p
)
2514 alias_set_type base_set
, ref_set
;
2515 modref_base_node
<alias_set_type
> *base_node
;
2516 modref_ref_node
<alias_set_type
> *ref_node
;
2522 if (!dbg_cnt (ipa_mod_ref
))
2525 base_set
= ao_ref_base_alias_set (ref
);
2527 ref_set
= ao_ref_alias_set (ref
);
2529 int num_tests
= 0, max_tests
= param_modref_max_tests
;
2530 FOR_EACH_VEC_SAFE_ELT (tt
->bases
, i
, base_node
)
2532 if (tbaa_p
&& flag_strict_aliasing
)
2534 if (num_tests
>= max_tests
)
2536 alias_stats
.modref_tests
++;
2537 if (!alias_sets_conflict_p (base_set
, base_node
->base
))
2542 if (base_node
->every_ref
)
2545 FOR_EACH_VEC_SAFE_ELT (base_node
->refs
, j
, ref_node
)
2547 /* Do not repeat same test as before. */
2548 if ((ref_set
!= base_set
|| base_node
->base
!= ref_node
->ref
)
2549 && tbaa_p
&& flag_strict_aliasing
)
2551 if (num_tests
>= max_tests
)
2553 alias_stats
.modref_tests
++;
2554 if (!alias_sets_conflict_p (ref_set
, ref_node
->ref
))
2559 /* TBAA checks did not disambiguate, try to use base pointer, for
2560 that we however need to have ref->ref or ref->base. */
2561 if (ref_node
->every_access
|| (!ref
->ref
&& !ref
->base
))
2564 modref_access_node
*access_node
;
2565 FOR_EACH_VEC_SAFE_ELT (ref_node
->accesses
, k
, access_node
)
2567 if (num_tests
>= max_tests
)
2570 if (access_node
->parm_index
== -1
2571 || (unsigned)access_node
->parm_index
2572 >= gimple_call_num_args (stmt
))
2575 alias_stats
.modref_baseptr_tests
++;
2577 tree arg
= gimple_call_arg (stmt
, access_node
->parm_index
);
2579 if (integer_zerop (arg
) && flag_delete_null_pointer_checks
)
2582 if (!POINTER_TYPE_P (TREE_TYPE (arg
)))
2585 /* ao_ref_init_from_ptr_and_range assumes that memory access
2586 starts by the pointed to location. If we did not track the
2587 offset it is possible that it starts before the actual
2589 if (!access_node
->parm_offset_known
)
2591 if (ptr_deref_may_alias_ref_p_1 (arg
, ref
))
2597 poly_offset_int off
= (poly_offset_int
)access_node
->offset
2598 + ((poly_offset_int
)access_node
->parm_offset
2599 << LOG2_BITS_PER_UNIT
);
2601 if (off
.to_shwi (&off2
))
2603 ao_ref_init_from_ptr_and_range
2604 (&ref2
, arg
, true, off2
,
2606 access_node
->max_size
);
2607 ref2
.ref_alias_set
= ref_set
;
2608 ref2
.base_alias_set
= base_set
;
2609 if (refs_may_alias_p_1 (&ref2
, ref
, tbaa_p
))
2612 else if (ptr_deref_may_alias_ref_p_1 (arg
, ref
))
2622 /* Check if REF conflicts with call using "fn spec" attribute.
2623 If CLOBBER is true we are checking for writes, otherwise check loads.
2625 Return 0 if there are no conflicts (except for possible function call
2626 argument reads), 1 if there are conflicts and -1 if we can not decide by
2630 check_fnspec (gcall
*call
, ao_ref
*ref
, bool clobber
)
2632 attr_fnspec fnspec
= gimple_call_fnspec (call
);
2633 if (fnspec
.known_p ())
2636 ? !fnspec
.global_memory_written_p ()
2637 : !fnspec
.global_memory_read_p ())
2639 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
2640 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
)))
2641 && (!fnspec
.arg_specified_p (i
)
2642 || (clobber
? fnspec
.arg_maybe_written_p (i
)
2643 : fnspec
.arg_maybe_read_p (i
))))
2646 tree size
= NULL_TREE
;
2647 unsigned int size_arg
;
2649 if (!fnspec
.arg_specified_p (i
))
2651 else if (fnspec
.arg_max_access_size_given_by_arg_p
2653 size
= gimple_call_arg (call
, size_arg
);
2654 else if (fnspec
.arg_access_size_given_by_type_p (i
))
2656 tree callee
= gimple_call_fndecl (call
);
2657 tree t
= TYPE_ARG_TYPES (TREE_TYPE (callee
));
2659 for (unsigned int p
= 0; p
< i
; p
++)
2661 size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t
)));
2663 ao_ref_init_from_ptr_and_size (&dref
,
2664 gimple_call_arg (call
, i
),
2666 if (refs_may_alias_p_1 (&dref
, ref
, false))
2670 && fnspec
.errno_maybe_written_p ()
2672 && targetm
.ref_may_alias_errno (ref
))
2678 /* FIXME: we should handle barriers more consistently, but for now leave the
2680 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2681 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call
)))
2683 /* __sync_* builtins and some OpenMP builtins act as threading
2685 #undef DEF_SYNC_BUILTIN
2686 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2687 #include "sync-builtins.def"
2688 #undef DEF_SYNC_BUILTIN
2689 case BUILT_IN_GOMP_ATOMIC_START
:
2690 case BUILT_IN_GOMP_ATOMIC_END
:
2691 case BUILT_IN_GOMP_BARRIER
:
2692 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2693 case BUILT_IN_GOMP_TASKWAIT
:
2694 case BUILT_IN_GOMP_TASKGROUP_END
:
2695 case BUILT_IN_GOMP_CRITICAL_START
:
2696 case BUILT_IN_GOMP_CRITICAL_END
:
2697 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2698 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2699 case BUILT_IN_GOMP_LOOP_END
:
2700 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2701 case BUILT_IN_GOMP_ORDERED_START
:
2702 case BUILT_IN_GOMP_ORDERED_END
:
2703 case BUILT_IN_GOMP_SECTIONS_END
:
2704 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2705 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2706 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2715 /* If the call CALL may use the memory reference REF return true,
2716 otherwise return false. */
2719 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2723 int flags
= gimple_call_flags (call
);
2725 /* Const functions without a static chain do not implicitly use memory. */
2726 if (!gimple_call_chain (call
)
2727 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
2730 /* A call that is not without side-effects might involve volatile
2731 accesses and thus conflicts with all other volatile accesses. */
2732 if (ref
->volatile_p
)
2735 callee
= gimple_call_fndecl (call
);
2737 if (!gimple_call_chain (call
) && callee
!= NULL_TREE
)
2739 struct cgraph_node
*node
= cgraph_node::get (callee
);
2740 /* We can not safely optimize based on summary of calle if it does
2741 not always bind to current def: it is possible that memory load
2742 was optimized out earlier and the interposed variant may not be
2743 optimized this way. */
2744 if (node
&& node
->binds_to_current_def_p ())
2746 modref_summary
*summary
= get_modref_function_summary (node
);
2749 if (!modref_may_conflict (call
, summary
->loads
, ref
, tbaa_p
))
2751 alias_stats
.modref_use_no_alias
++;
2752 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2755 "ipa-modref: call stmt ");
2756 print_gimple_stmt (dump_file
, call
, 0);
2758 "ipa-modref: call to %s does not use ",
2759 node
->dump_name ());
2760 if (!ref
->ref
&& ref
->base
)
2762 fprintf (dump_file
, "base: ");
2763 print_generic_expr (dump_file
, ref
->base
);
2767 fprintf (dump_file
, "ref: ");
2768 print_generic_expr (dump_file
, ref
->ref
);
2770 fprintf (dump_file
, " alias sets: %i->%i\n",
2771 ao_ref_base_alias_set (ref
),
2772 ao_ref_alias_set (ref
));
2776 alias_stats
.modref_use_may_alias
++;
2781 base
= ao_ref_base (ref
);
2785 /* If the reference is based on a decl that is not aliased the call
2786 cannot possibly use it. */
2788 && !may_be_aliased (base
)
2789 /* But local statics can be used through recursion. */
2790 && !is_global_var (base
))
2793 if (int res
= check_fnspec (call
, ref
, false))
2801 /* Check if base is a global static variable that is not read
2803 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
2805 struct cgraph_node
*node
= cgraph_node::get (callee
);
2809 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2810 node yet. We should enforce that there are nodes for all decls in the
2811 IL and remove this check instead. */
2813 && (id
= ipa_reference_var_uid (base
)) != -1
2814 && (read
= ipa_reference_get_read_global (node
))
2815 && !bitmap_bit_p (read
, id
))
2819 /* Check if the base variable is call-used. */
2822 if (pt_solution_includes (gimple_call_use_set (call
), base
))
2825 else if ((TREE_CODE (base
) == MEM_REF
2826 || TREE_CODE (base
) == TARGET_MEM_REF
)
2827 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2829 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2833 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
2839 /* Inspect call arguments for passed-by-value aliases. */
2841 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
2843 tree op
= gimple_call_arg (call
, i
);
2844 int flags
= gimple_call_arg_flags (call
, i
);
2846 if (flags
& EAF_UNUSED
)
2849 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
2850 op
= TREE_OPERAND (op
, 0);
2852 if (TREE_CODE (op
) != SSA_NAME
2853 && !is_gimple_min_invariant (op
))
2856 ao_ref_init (&r
, op
);
2857 if (refs_may_alias_p_1 (&r
, ref
, tbaa_p
))
2866 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2869 res
= ref_maybe_used_by_call_p_1 (call
, ref
, tbaa_p
);
2871 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
2873 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
2878 /* If the statement STMT may use the memory reference REF return
2879 true, otherwise return false. */
2882 ref_maybe_used_by_stmt_p (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
2884 if (is_gimple_assign (stmt
))
2888 /* All memory assign statements are single. */
2889 if (!gimple_assign_single_p (stmt
))
2892 rhs
= gimple_assign_rhs1 (stmt
);
2893 if (is_gimple_reg (rhs
)
2894 || is_gimple_min_invariant (rhs
)
2895 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
2898 return refs_may_alias_p (rhs
, ref
, tbaa_p
);
2900 else if (is_gimple_call (stmt
))
2901 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
, tbaa_p
);
2902 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
2904 tree retval
= gimple_return_retval (return_stmt
);
2906 && TREE_CODE (retval
) != SSA_NAME
2907 && !is_gimple_min_invariant (retval
)
2908 && refs_may_alias_p (retval
, ref
, tbaa_p
))
2910 /* If ref escapes the function then the return acts as a use. */
2911 tree base
= ao_ref_base (ref
);
2914 else if (DECL_P (base
))
2915 return is_global_var (base
);
2916 else if (TREE_CODE (base
) == MEM_REF
2917 || TREE_CODE (base
) == TARGET_MEM_REF
)
2918 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
2926 ref_maybe_used_by_stmt_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
2929 ao_ref_init (&r
, ref
);
2930 return ref_maybe_used_by_stmt_p (stmt
, &r
, tbaa_p
);
2933 /* If the call in statement CALL may clobber the memory reference REF
2934 return true, otherwise return false. */
2937 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2942 /* If the call is pure or const it cannot clobber anything. */
2943 if (gimple_call_flags (call
)
2944 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
2946 if (gimple_call_internal_p (call
))
2947 switch (gimple_call_internal_fn (call
))
2949 /* Treat these internal calls like ECF_PURE for aliasing,
2950 they don't write to any memory the program should care about.
2951 They have important other side-effects, and read memory,
2952 so can't be ECF_NOVOPS. */
2953 case IFN_UBSAN_NULL
:
2954 case IFN_UBSAN_BOUNDS
:
2955 case IFN_UBSAN_VPTR
:
2956 case IFN_UBSAN_OBJECT_SIZE
:
2958 case IFN_ASAN_CHECK
:
2964 callee
= gimple_call_fndecl (call
);
2966 if (callee
!= NULL_TREE
&& !ref
->volatile_p
)
2968 struct cgraph_node
*node
= cgraph_node::get (callee
);
2971 modref_summary
*summary
= get_modref_function_summary (node
);
2974 if (!modref_may_conflict (call
, summary
->stores
, ref
, tbaa_p
)
2975 && (!summary
->writes_errno
2976 || !targetm
.ref_may_alias_errno (ref
)))
2978 alias_stats
.modref_clobber_no_alias
++;
2979 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2982 "ipa-modref: call stmt ");
2983 print_gimple_stmt (dump_file
, call
, 0);
2985 "ipa-modref: call to %s does not clobber ",
2986 node
->dump_name ());
2987 if (!ref
->ref
&& ref
->base
)
2989 fprintf (dump_file
, "base: ");
2990 print_generic_expr (dump_file
, ref
->base
);
2994 fprintf (dump_file
, "ref: ");
2995 print_generic_expr (dump_file
, ref
->ref
);
2997 fprintf (dump_file
, " alias sets: %i->%i\n",
2998 ao_ref_base_alias_set (ref
),
2999 ao_ref_alias_set (ref
));
3003 alias_stats
.modref_clobber_may_alias
++;
3008 base
= ao_ref_base (ref
);
3012 if (TREE_CODE (base
) == SSA_NAME
3013 || CONSTANT_CLASS_P (base
))
3016 /* A call that is not without side-effects might involve volatile
3017 accesses and thus conflicts with all other volatile accesses. */
3018 if (ref
->volatile_p
)
3021 /* If the reference is based on a decl that is not aliased the call
3022 cannot possibly clobber it. */
3024 && !may_be_aliased (base
)
3025 /* But local non-readonly statics can be modified through recursion
3026 or the call may implement a threading barrier which we must
3027 treat as may-def. */
3028 && (TREE_READONLY (base
)
3029 || !is_global_var (base
)))
3032 /* If the reference is based on a pointer that points to memory
3033 that may not be written to then the call cannot possibly clobber it. */
3034 if ((TREE_CODE (base
) == MEM_REF
3035 || TREE_CODE (base
) == TARGET_MEM_REF
)
3036 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
3037 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base
, 0)))
3040 if (int res
= check_fnspec (call
, ref
, true))
3048 /* Check if base is a global static variable that is not written
3050 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
3052 struct cgraph_node
*node
= cgraph_node::get (callee
);
3057 && (id
= ipa_reference_var_uid (base
)) != -1
3058 && (written
= ipa_reference_get_written_global (node
))
3059 && !bitmap_bit_p (written
, id
))
3063 /* Check if the base variable is call-clobbered. */
3065 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
3066 else if ((TREE_CODE (base
) == MEM_REF
3067 || TREE_CODE (base
) == TARGET_MEM_REF
)
3068 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
3070 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
3074 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
3080 /* If the call in statement CALL may clobber the memory reference REF
3081 return true, otherwise return false. */
3084 call_may_clobber_ref_p (gcall
*call
, tree ref
, bool tbaa_p
)
3088 ao_ref_init (&r
, ref
);
3089 res
= call_may_clobber_ref_p_1 (call
, &r
, tbaa_p
);
3091 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
3093 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
3098 /* If the statement STMT may clobber the memory reference REF return true,
3099 otherwise return false. */
3102 stmt_may_clobber_ref_p_1 (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
3104 if (is_gimple_call (stmt
))
3106 tree lhs
= gimple_call_lhs (stmt
);
3108 && TREE_CODE (lhs
) != SSA_NAME
)
3111 ao_ref_init (&r
, lhs
);
3112 if (refs_may_alias_p_1 (ref
, &r
, tbaa_p
))
3116 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
, tbaa_p
);
3118 else if (gimple_assign_single_p (stmt
))
3120 tree lhs
= gimple_assign_lhs (stmt
);
3121 if (TREE_CODE (lhs
) != SSA_NAME
)
3124 ao_ref_init (&r
, lhs
);
3125 return refs_may_alias_p_1 (ref
, &r
, tbaa_p
);
3128 else if (gimple_code (stmt
) == GIMPLE_ASM
)
3135 stmt_may_clobber_ref_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
3138 ao_ref_init (&r
, ref
);
3139 return stmt_may_clobber_ref_p_1 (stmt
, &r
, tbaa_p
);
3142 /* Return true if store1 and store2 described by corresponding tuples
3143 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3147 same_addr_size_stores_p (tree base1
, poly_int64 offset1
, poly_int64 size1
,
3148 poly_int64 max_size1
,
3149 tree base2
, poly_int64 offset2
, poly_int64 size2
,
3150 poly_int64 max_size2
)
3152 /* Offsets need to be 0. */
3153 if (maybe_ne (offset1
, 0)
3154 || maybe_ne (offset2
, 0))
3157 bool base1_obj_p
= SSA_VAR_P (base1
);
3158 bool base2_obj_p
= SSA_VAR_P (base2
);
3160 /* We need one object. */
3161 if (base1_obj_p
== base2_obj_p
)
3163 tree obj
= base1_obj_p
? base1
: base2
;
3165 /* And we need one MEM_REF. */
3166 bool base1_memref_p
= TREE_CODE (base1
) == MEM_REF
;
3167 bool base2_memref_p
= TREE_CODE (base2
) == MEM_REF
;
3168 if (base1_memref_p
== base2_memref_p
)
3170 tree memref
= base1_memref_p
? base1
: base2
;
3172 /* Sizes need to be valid. */
3173 if (!known_size_p (max_size1
)
3174 || !known_size_p (max_size2
)
3175 || !known_size_p (size1
)
3176 || !known_size_p (size2
))
3179 /* Max_size needs to match size. */
3180 if (maybe_ne (max_size1
, size1
)
3181 || maybe_ne (max_size2
, size2
))
3184 /* Sizes need to match. */
3185 if (maybe_ne (size1
, size2
))
3189 /* Check that memref is a store to pointer with singleton points-to info. */
3190 if (!integer_zerop (TREE_OPERAND (memref
, 1)))
3192 tree ptr
= TREE_OPERAND (memref
, 0);
3193 if (TREE_CODE (ptr
) != SSA_NAME
)
3195 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
3196 unsigned int pt_uid
;
3198 || !pt_solution_singleton_or_null_p (&pi
->pt
, &pt_uid
))
3201 /* Be conservative with non-call exceptions when the address might
3203 if (cfun
->can_throw_non_call_exceptions
&& pi
->pt
.null
)
3206 /* Check that ptr points relative to obj. */
3207 unsigned int obj_uid
= DECL_PT_UID (obj
);
3208 if (obj_uid
!= pt_uid
)
3211 /* Check that the object size is the same as the store size. That ensures us
3212 that ptr points to the start of obj. */
3213 return (DECL_SIZE (obj
)
3214 && poly_int_tree_p (DECL_SIZE (obj
))
3215 && known_eq (wi::to_poly_offset (DECL_SIZE (obj
)), size1
));
3218 /* If STMT kills the memory reference REF return true, otherwise
3222 stmt_kills_ref_p (gimple
*stmt
, ao_ref
*ref
)
3224 if (!ao_ref_base (ref
))
3227 if (gimple_has_lhs (stmt
)
3228 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
3229 /* The assignment is not necessarily carried out if it can throw
3230 and we can catch it in the current function where we could inspect
3232 ??? We only need to care about the RHS throwing. For aggregate
3233 assignments or similar calls and non-call exceptions the LHS
3234 might throw as well. */
3235 && !stmt_can_throw_internal (cfun
, stmt
))
3237 tree lhs
= gimple_get_lhs (stmt
);
3238 /* If LHS is literally a base of the access we are done. */
3241 tree base
= ref
->ref
;
3242 tree innermost_dropped_array_ref
= NULL_TREE
;
3243 if (handled_component_p (base
))
3245 tree saved_lhs0
= NULL_TREE
;
3246 if (handled_component_p (lhs
))
3248 saved_lhs0
= TREE_OPERAND (lhs
, 0);
3249 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
3253 /* Just compare the outermost handled component, if
3254 they are equal we have found a possible common
3256 tree saved_base0
= TREE_OPERAND (base
, 0);
3257 TREE_OPERAND (base
, 0) = integer_zero_node
;
3258 bool res
= operand_equal_p (lhs
, base
, 0);
3259 TREE_OPERAND (base
, 0) = saved_base0
;
3262 /* Remember if we drop an array-ref that we need to
3263 double-check not being at struct end. */
3264 if (TREE_CODE (base
) == ARRAY_REF
3265 || TREE_CODE (base
) == ARRAY_RANGE_REF
)
3266 innermost_dropped_array_ref
= base
;
3267 /* Otherwise drop handled components of the access. */
3270 while (handled_component_p (base
));
3272 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
3274 /* Finally check if the lhs has the same address and size as the
3275 base candidate of the access. Watch out if we have dropped
3276 an array-ref that was at struct end, this means ref->ref may
3277 be outside of the TYPE_SIZE of its base. */
3278 if ((! innermost_dropped_array_ref
3279 || ! array_at_struct_end_p (innermost_dropped_array_ref
))
3281 || (((TYPE_SIZE (TREE_TYPE (lhs
))
3282 == TYPE_SIZE (TREE_TYPE (base
)))
3283 || (TYPE_SIZE (TREE_TYPE (lhs
))
3284 && TYPE_SIZE (TREE_TYPE (base
))
3285 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs
)),
3286 TYPE_SIZE (TREE_TYPE (base
)),
3288 && operand_equal_p (lhs
, base
,
3290 | OEP_MATCH_SIDE_EFFECTS
))))
3294 /* Now look for non-literal equal bases with the restriction of
3295 handling constant offset and size. */
3296 /* For a must-alias check we need to be able to constrain
3297 the access properly. */
3298 if (!ref
->max_size_known_p ())
3300 poly_int64 size
, offset
, max_size
, ref_offset
= ref
->offset
;
3302 tree base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
,
3304 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3305 so base == ref->base does not always hold. */
3306 if (base
!= ref
->base
)
3308 /* Try using points-to info. */
3309 if (same_addr_size_stores_p (base
, offset
, size
, max_size
, ref
->base
,
3310 ref
->offset
, ref
->size
, ref
->max_size
))
3313 /* If both base and ref->base are MEM_REFs, only compare the
3314 first operand, and if the second operand isn't equal constant,
3315 try to add the offsets into offset and ref_offset. */
3316 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
3317 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
3319 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
3320 TREE_OPERAND (ref
->base
, 1)))
3322 poly_offset_int off1
= mem_ref_offset (base
);
3323 off1
<<= LOG2_BITS_PER_UNIT
;
3325 poly_offset_int off2
= mem_ref_offset (ref
->base
);
3326 off2
<<= LOG2_BITS_PER_UNIT
;
3328 if (!off1
.to_shwi (&offset
) || !off2
.to_shwi (&ref_offset
))
3335 /* For a must-alias check we need to be able to constrain
3336 the access properly. */
3337 if (known_eq (size
, max_size
)
3338 && known_subrange_p (ref_offset
, ref
->max_size
, offset
, size
))
3342 if (is_gimple_call (stmt
))
3344 tree callee
= gimple_call_fndecl (stmt
);
3345 if (callee
!= NULL_TREE
3346 && gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3347 switch (DECL_FUNCTION_CODE (callee
))
3351 tree ptr
= gimple_call_arg (stmt
, 0);
3352 tree base
= ao_ref_base (ref
);
3353 if (base
&& TREE_CODE (base
) == MEM_REF
3354 && TREE_OPERAND (base
, 0) == ptr
)
3359 case BUILT_IN_MEMCPY
:
3360 case BUILT_IN_MEMPCPY
:
3361 case BUILT_IN_MEMMOVE
:
3362 case BUILT_IN_MEMSET
:
3363 case BUILT_IN_MEMCPY_CHK
:
3364 case BUILT_IN_MEMPCPY_CHK
:
3365 case BUILT_IN_MEMMOVE_CHK
:
3366 case BUILT_IN_MEMSET_CHK
:
3367 case BUILT_IN_STRNCPY
:
3368 case BUILT_IN_STPNCPY
:
3369 case BUILT_IN_CALLOC
:
3371 /* For a must-alias check we need to be able to constrain
3372 the access properly. */
3373 if (!ref
->max_size_known_p ())
3378 /* In execution order a calloc call will never kill
3379 anything. However, DSE will (ab)use this interface
3380 to ask if a calloc call writes the same memory locations
3381 as a later assignment, memset, etc. So handle calloc
3382 in the expected way. */
3383 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
)
3385 tree arg0
= gimple_call_arg (stmt
, 0);
3386 tree arg1
= gimple_call_arg (stmt
, 1);
3387 if (TREE_CODE (arg0
) != INTEGER_CST
3388 || TREE_CODE (arg1
) != INTEGER_CST
)
3391 dest
= gimple_call_lhs (stmt
);
3394 len
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg0
), arg0
, arg1
);
3398 dest
= gimple_call_arg (stmt
, 0);
3399 len
= gimple_call_arg (stmt
, 2);
3401 if (!poly_int_tree_p (len
))
3403 tree rbase
= ref
->base
;
3404 poly_offset_int roffset
= ref
->offset
;
3406 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
3407 tree base
= ao_ref_base (&dref
);
3408 poly_offset_int offset
= dref
.offset
;
3409 if (!base
|| !known_size_p (dref
.size
))
3411 if (TREE_CODE (base
) == MEM_REF
)
3413 if (TREE_CODE (rbase
) != MEM_REF
)
3415 // Compare pointers.
3416 offset
+= mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
;
3417 roffset
+= mem_ref_offset (rbase
) << LOG2_BITS_PER_UNIT
;
3418 base
= TREE_OPERAND (base
, 0);
3419 rbase
= TREE_OPERAND (rbase
, 0);
3422 && known_subrange_p (roffset
, ref
->max_size
, offset
,
3423 wi::to_poly_offset (len
)
3424 << LOG2_BITS_PER_UNIT
))
3429 case BUILT_IN_VA_END
:
3431 tree ptr
= gimple_call_arg (stmt
, 0);
3432 if (TREE_CODE (ptr
) == ADDR_EXPR
)
3434 tree base
= ao_ref_base (ref
);
3435 if (TREE_OPERAND (ptr
, 0) == base
)
3448 stmt_kills_ref_p (gimple
*stmt
, tree ref
)
3451 ao_ref_init (&r
, ref
);
3452 return stmt_kills_ref_p (stmt
, &r
);
3456 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3457 TARGET or a statement clobbering the memory reference REF in which
3458 case false is returned. The walk starts with VUSE, one argument of PHI. */
3461 maybe_skip_until (gimple
*phi
, tree
&target
, basic_block target_bb
,
3462 ao_ref
*ref
, tree vuse
, bool tbaa_p
, unsigned int &limit
,
3463 bitmap
*visited
, bool abort_on_visited
,
3464 void *(*translate
)(ao_ref
*, tree
, void *, translate_flags
*),
3465 translate_flags disambiguate_only
,
3468 basic_block bb
= gimple_bb (phi
);
3471 *visited
= BITMAP_ALLOC (NULL
);
3473 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
3475 /* Walk until we hit the target. */
3476 while (vuse
!= target
)
3478 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3479 /* If we are searching for the target VUSE by walking up to
3480 TARGET_BB dominating the original PHI we are finished once
3481 we reach a default def or a definition in a block dominating
3482 that block. Update TARGET and return. */
3484 && (gimple_nop_p (def_stmt
)
3485 || dominated_by_p (CDI_DOMINATORS
,
3486 target_bb
, gimple_bb (def_stmt
))))
3492 /* Recurse for PHI nodes. */
3493 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3495 /* An already visited PHI node ends the walk successfully. */
3496 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
3497 return !abort_on_visited
;
3498 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3499 visited
, abort_on_visited
,
3500 translate
, data
, disambiguate_only
);
3505 else if (gimple_nop_p (def_stmt
))
3509 /* A clobbering statement or the end of the IL ends it failing. */
3510 if ((int)limit
<= 0)
3513 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3515 translate_flags tf
= disambiguate_only
;
3517 && (*translate
) (ref
, vuse
, data
, &tf
) == NULL
)
3523 /* If we reach a new basic-block see if we already skipped it
3524 in a previous walk that ended successfully. */
3525 if (gimple_bb (def_stmt
) != bb
)
3527 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
3528 return !abort_on_visited
;
3529 bb
= gimple_bb (def_stmt
);
3531 vuse
= gimple_vuse (def_stmt
);
3537 /* Starting from a PHI node for the virtual operand of the memory reference
3538 REF find a continuation virtual operand that allows to continue walking
3539 statements dominating PHI skipping only statements that cannot possibly
3540 clobber REF. Decrements LIMIT for each alias disambiguation done
3541 and aborts the walk, returning NULL_TREE if it reaches zero.
3542 Returns NULL_TREE if no suitable virtual operand can be found. */
3545 get_continuation_for_phi (gimple
*phi
, ao_ref
*ref
, bool tbaa_p
,
3546 unsigned int &limit
, bitmap
*visited
,
3547 bool abort_on_visited
,
3548 void *(*translate
)(ao_ref
*, tree
, void *,
3551 translate_flags disambiguate_only
)
3553 unsigned nargs
= gimple_phi_num_args (phi
);
3555 /* Through a single-argument PHI we can simply look through. */
3557 return PHI_ARG_DEF (phi
, 0);
3559 /* For two or more arguments try to pairwise skip non-aliasing code
3560 until we hit the phi argument definition that dominates the other one. */
3561 basic_block phi_bb
= gimple_bb (phi
);
3565 /* Find a candidate for the virtual operand which definition
3566 dominates those of all others. */
3567 /* First look if any of the args themselves satisfy this. */
3568 for (i
= 0; i
< nargs
; ++i
)
3570 arg0
= PHI_ARG_DEF (phi
, i
);
3571 if (SSA_NAME_IS_DEFAULT_DEF (arg0
))
3573 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (arg0
));
3574 if (def_bb
!= phi_bb
3575 && dominated_by_p (CDI_DOMINATORS
, phi_bb
, def_bb
))
3579 /* If not, look if we can reach such candidate by walking defs
3580 until we hit the immediate dominator. maybe_skip_until will
3582 basic_block dom
= get_immediate_dominator (CDI_DOMINATORS
, phi_bb
);
3584 /* Then check against the (to be) found candidate. */
3585 for (i
= 0; i
< nargs
; ++i
)
3587 arg1
= PHI_ARG_DEF (phi
, i
);
3590 else if (! maybe_skip_until (phi
, arg0
, dom
, ref
, arg1
, tbaa_p
,
3594 /* Do not valueize when walking over
3598 gimple_bb (SSA_NAME_DEF_STMT (arg1
)),
3601 : disambiguate_only
, data
))
3608 /* Based on the memory reference REF and its virtual use VUSE call
3609 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3610 itself. That is, for each virtual use for which its defining statement
3611 does not clobber REF.
3613 WALKER is called with REF, the current virtual use and DATA. If
3614 WALKER returns non-NULL the walk stops and its result is returned.
3615 At the end of a non-successful walk NULL is returned.
3617 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3618 use which definition is a statement that may clobber REF and DATA.
3619 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3620 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3621 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3622 to adjust REF and *DATA to make that valid.
3624 VALUEIZE if non-NULL is called with the next VUSE that is considered
3625 and return value is substituted for that. This can be used to
3626 implement optimistic value-numbering for example. Note that the
3627 VUSE argument is assumed to be valueized already.
3629 LIMIT specifies the number of alias queries we are allowed to do,
3630 the walk stops when it reaches zero and NULL is returned. LIMIT
3631 is decremented by the number of alias queries (plus adjustments
3632 done by the callbacks) upon return.
3634 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3637 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
, bool tbaa_p
,
3638 void *(*walker
)(ao_ref
*, tree
, void *),
3639 void *(*translate
)(ao_ref
*, tree
, void *,
3641 tree (*valueize
)(tree
),
3642 unsigned &limit
, void *data
)
3644 bitmap visited
= NULL
;
3646 bool translated
= false;
3648 timevar_push (TV_ALIAS_STMT_WALK
);
3654 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3655 res
= (*walker
) (ref
, vuse
, data
);
3657 if (res
== (void *)-1)
3662 /* Lookup succeeded. */
3663 else if (res
!= NULL
)
3668 vuse
= valueize (vuse
);
3675 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3676 if (gimple_nop_p (def_stmt
))
3678 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3679 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3680 &visited
, translated
, translate
, data
);
3683 if ((int)limit
<= 0)
3689 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3693 translate_flags disambiguate_only
= TR_TRANSLATE
;
3694 res
= (*translate
) (ref
, vuse
, data
, &disambiguate_only
);
3695 /* Failed lookup and translation. */
3696 if (res
== (void *)-1)
3701 /* Lookup succeeded. */
3702 else if (res
!= NULL
)
3704 /* Translation succeeded, continue walking. */
3705 translated
= translated
|| disambiguate_only
== TR_TRANSLATE
;
3707 vuse
= gimple_vuse (def_stmt
);
3713 BITMAP_FREE (visited
);
3715 timevar_pop (TV_ALIAS_STMT_WALK
);
3721 /* Based on the memory reference REF call WALKER for each vdef which
3722 defining statement may clobber REF, starting with VDEF. If REF
3723 is NULL_TREE, each defining statement is visited.
3725 WALKER is called with REF, the current vdef and DATA. If WALKER
3726 returns true the walk is stopped, otherwise it continues.
3728 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3729 The pointer may be NULL and then we do not track this information.
3731 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3732 PHI argument (but only one walk continues on merge points), the
3733 return value is true if any of the walks was successful.
3735 The function returns the number of statements walked or -1 if
3736 LIMIT stmts were walked and the walk was aborted at this point.
3737 If LIMIT is zero the walk is not aborted. */
3740 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
3741 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3742 bitmap
*visited
, unsigned int cnt
,
3743 bool *function_entry_reached
, unsigned limit
)
3747 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
3750 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
3753 if (gimple_nop_p (def_stmt
))
3755 if (function_entry_reached
)
3756 *function_entry_reached
= true;
3759 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3763 *visited
= BITMAP_ALLOC (NULL
);
3764 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
3766 int res
= walk_aliased_vdefs_1 (ref
,
3767 gimple_phi_arg_def (def_stmt
, i
),
3768 walker
, data
, visited
, cnt
,
3769 function_entry_reached
, limit
);
3777 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3782 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
3783 && (*walker
) (ref
, vdef
, data
))
3786 vdef
= gimple_vuse (def_stmt
);
3792 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
3793 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3795 bool *function_entry_reached
, unsigned int limit
)
3797 bitmap local_visited
= NULL
;
3800 timevar_push (TV_ALIAS_STMT_WALK
);
3802 if (function_entry_reached
)
3803 *function_entry_reached
= false;
3805 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
3806 visited
? visited
: &local_visited
, 0,
3807 function_entry_reached
, limit
);
3809 BITMAP_FREE (local_visited
);
3811 timevar_pop (TV_ALIAS_STMT_WALK
);
3816 /* Verify validity of the fnspec string.
3817 See attr-fnspec.h for details. */
3820 attr_fnspec::verify ()
3826 /* Check return value specifier. */
3827 if (len
< return_desc_size
)
3829 else if ((len
- return_desc_size
) % arg_desc_size
)
3831 else if ((str
[0] < '1' || str
[0] > '4')
3832 && str
[0] != '.' && str
[0] != 'm')
3847 internal_error ("invalid fn spec attribute \"%s\"", str
);
3849 /* Now check all parameters. */
3850 for (unsigned int i
= 0; arg_specified_p (i
); i
++)
3852 unsigned int idx
= arg_idx (i
);
3864 if ((str
[idx
+ 1] >= '1' && str
[idx
+ 1] <= '9')
3865 || str
[idx
+ 1] == 't')
3867 if (str
[idx
] != 'r' && str
[idx
] != 'R'
3868 && str
[idx
] != 'w' && str
[idx
] != 'W'
3869 && str
[idx
] != 'o' && str
[idx
] != 'O')
3872 /* Size specified is scalar, so it should be described
3873 by ". " if specified at all. */
3874 && (arg_specified_p (str
[idx
+ 1] - '1')
3875 && str
[arg_idx (str
[idx
+ 1] - '1')] != '.'))
3878 else if (str
[idx
+ 1] != ' ')
3882 if (str
[idx
] < '1' || str
[idx
] > '9')
3886 internal_error ("invalid fn spec attribute \"%s\" arg %i", str
, i
);
3890 /* Return ture if TYPE1 and TYPE2 will always give the same answer
3891 when compared wit hother types using same_type_for_tbaa_p. */
3894 types_equal_for_same_type_for_tbaa_p (tree type1
, tree type2
,
3895 bool lto_streaming_safe
)
3897 /* We use same_type_for_tbaa_p to match types in the access path.
3898 This check is overly conservative. */
3899 type1
= TYPE_MAIN_VARIANT (type1
);
3900 type2
= TYPE_MAIN_VARIANT (type2
);
3902 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
3903 != TYPE_STRUCTURAL_EQUALITY_P (type2
))
3905 if (TYPE_STRUCTURAL_EQUALITY_P (type1
))
3908 if (lto_streaming_safe
)
3909 return type1
== type2
;
3911 return TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
);
3914 /* Compare REF1 and REF2 and return flags specifying their differences.
3915 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
3916 types that are going to be recomputed.
3917 If TBAA is true also compare TBAA metadata. */
3920 ao_compare::compare_ao_refs (ao_ref
*ref1
, ao_ref
*ref2
,
3921 bool lto_streaming_safe
,
3924 if (TREE_THIS_VOLATILE (ref1
->ref
) != TREE_THIS_VOLATILE (ref2
->ref
))
3926 tree base1
= ao_ref_base (ref1
);
3927 tree base2
= ao_ref_base (ref2
);
3929 if (!known_eq (ref1
->offset
, ref2
->offset
)
3930 || !known_eq (ref1
->size
, ref2
->size
)
3931 || !known_eq (ref1
->max_size
, ref2
->max_size
))
3934 /* For variable accesses we need to compare actual paths
3935 to check that both refs are accessing same address and the access size. */
3936 if (!known_eq (ref1
->size
, ref1
->max_size
))
3938 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1
->ref
)),
3939 TYPE_SIZE (TREE_TYPE (ref2
->ref
)), 0))
3941 tree r1
= ref1
->ref
;
3942 tree r2
= ref2
->ref
;
3944 /* Handle toplevel COMPONENT_REFs of bitfields.
3945 Those are special since they are not allowed in
3947 if (TREE_CODE (r1
) == COMPONENT_REF
3948 && DECL_BIT_FIELD (TREE_OPERAND (r1
, 1)))
3950 if (TREE_CODE (r2
) != COMPONENT_REF
3951 || !DECL_BIT_FIELD (TREE_OPERAND (r2
, 1)))
3953 tree field1
= TREE_OPERAND (r1
, 1);
3954 tree field2
= TREE_OPERAND (r2
, 1);
3955 if (!operand_equal_p (DECL_FIELD_OFFSET (field1
),
3956 DECL_FIELD_OFFSET (field2
), 0)
3957 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1
),
3958 DECL_FIELD_BIT_OFFSET (field2
), 0)
3959 || !operand_equal_p (DECL_SIZE (field1
), DECL_SIZE (field2
), 0)
3960 || !types_compatible_p (TREE_TYPE (r1
),
3963 r1
= TREE_OPERAND (r1
, 0);
3964 r2
= TREE_OPERAND (r2
, 0);
3966 else if (TREE_CODE (r2
) == COMPONENT_REF
3967 && DECL_BIT_FIELD (TREE_OPERAND (r2
, 1)))
3970 /* Similarly for bit field refs. */
3971 if (TREE_CODE (r1
) == BIT_FIELD_REF
)
3973 if (TREE_CODE (r2
) != BIT_FIELD_REF
3974 || !operand_equal_p (TREE_OPERAND (r1
, 1),
3975 TREE_OPERAND (r2
, 1), 0)
3976 || !operand_equal_p (TREE_OPERAND (r1
, 2),
3977 TREE_OPERAND (r2
, 2), 0)
3978 || !types_compatible_p (TREE_TYPE (r1
),
3981 r1
= TREE_OPERAND (r1
, 0);
3982 r2
= TREE_OPERAND (r2
, 0);
3984 else if (TREE_CODE (r2
) == BIT_FIELD_REF
)
3987 /* Now we can compare the address of actual memory access. */
3988 if (!operand_equal_p (r1
, r2
, OEP_ADDRESS_OF
| OEP_MATCH_SIDE_EFFECTS
))
3991 /* For constant accesses we get more matches by comparing offset only. */
3992 else if (!operand_equal_p (base1
, base2
,
3993 OEP_ADDRESS_OF
| OEP_MATCH_SIDE_EFFECTS
))
3996 /* We can't simply use get_object_alignment_1 on the full
3997 reference as for accesses with variable indexes this reports
3998 too conservative alignment. */
3999 unsigned int align1
, align2
;
4000 unsigned HOST_WIDE_INT bitpos1
, bitpos2
;
4001 bool known1
= get_object_alignment_1 (base1
, &align1
, &bitpos1
);
4002 bool known2
= get_object_alignment_1 (base2
, &align2
, &bitpos2
);
4003 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4004 TYPE_ALIGN but still returns false. This seem to contradict
4005 its description. So compare even if alignment is unknown. */
4006 if (known1
!= known2
4007 || (bitpos1
!= bitpos2
|| align1
!= align2
))
4010 /* Now we know that accesses are semantically same. */
4013 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4014 tree rbase1
= ref1
->ref
;
4016 while (handled_component_p (rbase1
))
4017 rbase1
= TREE_OPERAND (rbase1
, 0);
4018 tree rbase2
= ref2
->ref
;
4019 while (handled_component_p (rbase2
))
4020 rbase2
= TREE_OPERAND (rbase2
, 0);
4022 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4023 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4024 Otherwise we need to match bases and cliques. */
4025 if ((((TREE_CODE (rbase1
) == MEM_REF
|| TREE_CODE (rbase1
) == TARGET_MEM_REF
)
4026 && MR_DEPENDENCE_CLIQUE (rbase1
))
4027 || ((TREE_CODE (rbase2
) == MEM_REF
|| TREE_CODE (rbase2
) == TARGET_MEM_REF
)
4028 && MR_DEPENDENCE_CLIQUE (rbase2
)))
4029 && (TREE_CODE (rbase1
) != TREE_CODE (rbase2
)
4030 || MR_DEPENDENCE_CLIQUE (rbase1
) != MR_DEPENDENCE_CLIQUE (rbase2
)
4031 || (MR_DEPENDENCE_BASE (rbase1
) != MR_DEPENDENCE_BASE (rbase2
))))
4032 flags
|= DEPENDENCE_CLIQUE
;
4037 /* Alias sets are not stable across LTO sreaming; be conservative here
4038 and compare types the alias sets are ultimately based on. */
4039 if (lto_streaming_safe
)
4041 tree t1
= ao_ref_alias_ptr_type (ref1
);
4042 tree t2
= ao_ref_alias_ptr_type (ref2
);
4043 if (!alias_ptr_types_compatible_p (t1
, t2
))
4044 flags
|= REF_ALIAS_SET
;
4046 t1
= ao_ref_base_alias_ptr_type (ref1
);
4047 t2
= ao_ref_base_alias_ptr_type (ref2
);
4048 if (!alias_ptr_types_compatible_p (t1
, t2
))
4049 flags
|= BASE_ALIAS_SET
;
4053 if (ao_ref_alias_set (ref1
) != ao_ref_alias_set (ref2
))
4054 flags
|= REF_ALIAS_SET
;
4055 if (ao_ref_base_alias_set (ref1
) != ao_ref_base_alias_set (ref2
))
4056 flags
|= BASE_ALIAS_SET
;
4059 /* Access path is used only on non-view-converted references. */
4060 bool view_converted
= view_converted_memref_p (rbase1
);
4061 if (view_converted_memref_p (rbase2
) != view_converted
)
4062 return flags
| ACCESS_PATH
;
4063 else if (view_converted
)
4067 /* Find start of access paths and look for trailing arrays. */
4068 tree c1
= ref1
->ref
, c2
= ref2
->ref
;
4069 tree end_struct_ref1
= NULL
, end_struct_ref2
= NULL
;
4070 int nskipped1
= 0, nskipped2
= 0;
4073 for (tree p1
= ref1
->ref
; handled_component_p (p1
); p1
= TREE_OPERAND (p1
, 0))
4075 if (component_ref_to_zero_sized_trailing_array_p (p1
))
4076 end_struct_ref1
= p1
;
4077 if (ends_tbaa_access_path_p (p1
))
4078 c1
= p1
, nskipped1
= i
;
4081 for (tree p2
= ref2
->ref
; handled_component_p (p2
); p2
= TREE_OPERAND (p2
, 0))
4083 if (component_ref_to_zero_sized_trailing_array_p (p2
))
4084 end_struct_ref2
= p2
;
4085 if (ends_tbaa_access_path_p (p2
))
4086 c2
= p2
, nskipped1
= i
;
4090 /* For variable accesses we can not rely on offset match bellow.
4091 We know that paths are struturally same, so only check that
4092 starts of TBAA paths did not diverge. */
4093 if (!known_eq (ref1
->size
, ref1
->max_size
)
4094 && nskipped1
!= nskipped2
)
4095 return flags
| ACCESS_PATH
;
4097 /* Information about trailing refs is used by
4098 aliasing_component_refs_p that is applied only if paths
4099 has handled components.. */
4100 if (!handled_component_p (c1
) && !handled_component_p (c2
))
4102 else if ((end_struct_ref1
!= NULL
) != (end_struct_ref2
!= NULL
))
4103 return flags
| ACCESS_PATH
;
4105 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1
))
4106 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2
)))
4107 return flags
| ACCESS_PATH
;
4109 /* Now compare all handled components of the access path.
4110 We have three oracles that cares about access paths:
4111 - aliasing_component_refs_p
4112 - nonoverlapping_refs_since_match_p
4113 - nonoverlapping_component_refs_p
4114 We need to match things these oracles compare.
4116 It is only necessary to check types for compatibility
4117 and offsets. Rest of what oracles compares are actual
4118 addresses. Those are already known to be same:
4119 - for constant accesses we check offsets
4120 - for variable accesses we already matched
4121 the path lexically with operand_equal_p. */
4124 bool comp1
= handled_component_p (c1
);
4125 bool comp2
= handled_component_p (c2
);
4128 return flags
| ACCESS_PATH
;
4132 if (TREE_CODE (c1
) != TREE_CODE (c2
))
4133 return flags
| ACCESS_PATH
;
4135 /* aliasing_component_refs_p attempts to find type match within
4136 the paths. For that reason both types needs to be equal
4137 with respect to same_type_for_tbaa_p. */
4138 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1
),
4140 lto_streaming_safe
))
4141 return flags
| ACCESS_PATH
;
4142 if (component_ref_to_zero_sized_trailing_array_p (c1
)
4143 != component_ref_to_zero_sized_trailing_array_p (c2
))
4144 return flags
| ACCESS_PATH
;
4146 /* aliasing_matching_component_refs_p compares
4147 offsets within the path. Other properties are ignored.
4148 Do not bother to verify offsets in variable accesses. Here we
4149 already compared them by operand_equal_p so they are
4150 structurally same. */
4151 if (!known_eq (ref1
->size
, ref1
->max_size
))
4153 poly_int64 offadj1
, sztmc1
, msztmc1
;
4155 get_ref_base_and_extent (c1
, &offadj1
, &sztmc1
, &msztmc1
, &reverse1
);
4156 poly_int64 offadj2
, sztmc2
, msztmc2
;
4158 get_ref_base_and_extent (c2
, &offadj2
, &sztmc2
, &msztmc2
, &reverse2
);
4159 if (!known_eq (offadj1
, offadj2
))
4160 return flags
| ACCESS_PATH
;
4162 c1
= TREE_OPERAND (c1
, 0);
4163 c2
= TREE_OPERAND (c2
, 0);
4165 /* Finally test the access type. */
4166 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1
),
4168 lto_streaming_safe
))
4169 return flags
| ACCESS_PATH
;
4173 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4174 and canonical types. */
4176 ao_compare::hash_ao_ref (ao_ref
*ref
, bool lto_streaming_safe
, bool tbaa
,
4177 inchash::hash
&hstate
)
4179 tree base
= ao_ref_base (ref
);
4182 if (!known_eq (ref
->size
, ref
->max_size
))
4185 if (TREE_CODE (r
) == COMPONENT_REF
4186 && DECL_BIT_FIELD (TREE_OPERAND (r
, 1)))
4188 tree field
= TREE_OPERAND (r
, 1);
4189 hash_operand (DECL_FIELD_OFFSET (field
), hstate
, 0);
4190 hash_operand (DECL_FIELD_BIT_OFFSET (field
), hstate
, 0);
4191 hash_operand (DECL_SIZE (field
), hstate
, 0);
4192 r
= TREE_OPERAND (r
, 0);
4194 if (TREE_CODE (r
) == BIT_FIELD_REF
)
4196 hash_operand (TREE_OPERAND (r
, 1), hstate
, 0);
4197 hash_operand (TREE_OPERAND (r
, 2), hstate
, 0);
4198 r
= TREE_OPERAND (r
, 0);
4200 hash_operand (TYPE_SIZE (TREE_TYPE (ref
->ref
)), hstate
, 0);
4201 hash_operand (r
, hstate
, OEP_ADDRESS_OF
| OEP_MATCH_SIDE_EFFECTS
);
4205 hash_operand (tbase
, hstate
, OEP_ADDRESS_OF
| OEP_MATCH_SIDE_EFFECTS
);
4206 hstate
.add_poly_int (ref
->offset
);
4207 hstate
.add_poly_int (ref
->size
);
4208 hstate
.add_poly_int (ref
->max_size
);
4210 if (!lto_streaming_safe
&& tbaa
)
4212 hstate
.add_int (ao_ref_alias_set (ref
));
4213 hstate
.add_int (ao_ref_base_alias_set (ref
));