1 /* Alias analysis for trees.
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
32 #include "tree-pretty-print.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
39 #include "ipa-reference.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
50 #include "internal-fn.h"
52 /* Broad overview of how alias analysis on gimple works:
54 Statements clobbering or using memory are linked through the
55 virtual operand factored use-def chain. The virtual operand
56 is unique per function, its symbol is accessible via gimple_vop (cfun).
57 Virtual operands are used for efficiently walking memory statements
58 in the gimple IL and are useful for things like value-numbering as
59 a generation count for memory references.
61 SSA_NAME pointers may have associated points-to information
62 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
63 points-to information is (re-)computed by the TODO_rebuild_alias
64 pass manager todo. Points-to information is also used for more
65 precise tracking of call-clobbered and call-used variables and
66 related disambiguations.
68 This file contains functions for disambiguating memory references,
69 the so called alias-oracle and tools for walking of the gimple IL.
71 The main alias-oracle entry-points are
73 bool stmt_may_clobber_ref_p (gimple *, tree)
75 This function queries if a statement may invalidate (parts of)
76 the memory designated by the reference tree argument.
78 bool ref_maybe_used_by_stmt_p (gimple *, tree)
80 This function queries if a statement may need (parts of) the
81 memory designated by the reference tree argument.
83 There are variants of these functions that only handle the call
84 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85 Note that these do not disambiguate against a possible call lhs.
87 bool refs_may_alias_p (tree, tree)
89 This function tries to disambiguate two reference trees.
91 bool ptr_deref_may_alias_global_p (tree, bool)
93 This function queries if dereferencing a pointer variable may
94 alias global memory. If bool argument is true, global memory
95 is considered to also include function local memory that escaped.
97 More low-level disambiguators are available and documented in
98 this file. Low-level disambiguators dealing with points-to
99 information are in tree-ssa-structalias.cc. */
101 static int nonoverlapping_refs_since_match_p (tree
, tree
, tree
, tree
, bool);
102 static bool nonoverlapping_component_refs_p (const_tree
, const_tree
);
104 /* Query statistics for the different low-level disambiguators.
105 A high-level query may trigger multiple of them. */
108 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
109 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
110 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
111 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
112 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
113 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
114 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias
;
115 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias
;
116 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias
;
117 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias
;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias
;
119 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap
;
120 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias
;
121 unsigned HOST_WIDE_INT stmt_kills_ref_p_no
;
122 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes
;
123 unsigned HOST_WIDE_INT modref_use_may_alias
;
124 unsigned HOST_WIDE_INT modref_use_no_alias
;
125 unsigned HOST_WIDE_INT modref_clobber_may_alias
;
126 unsigned HOST_WIDE_INT modref_clobber_no_alias
;
127 unsigned HOST_WIDE_INT modref_kill_no
;
128 unsigned HOST_WIDE_INT modref_kill_yes
;
129 unsigned HOST_WIDE_INT modref_tests
;
130 unsigned HOST_WIDE_INT modref_baseptr_tests
;
134 dump_alias_stats (FILE *s
)
136 fprintf (s
, "\nAlias oracle query stats:\n");
137 fprintf (s
, " refs_may_alias_p: "
138 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC
" queries\n",
140 alias_stats
.refs_may_alias_p_no_alias
,
141 alias_stats
.refs_may_alias_p_no_alias
142 + alias_stats
.refs_may_alias_p_may_alias
);
143 fprintf (s
, " ref_maybe_used_by_call_p: "
144 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC
" queries\n",
146 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
147 alias_stats
.refs_may_alias_p_no_alias
148 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
149 fprintf (s
, " call_may_clobber_ref_p: "
150 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC
" queries\n",
152 alias_stats
.call_may_clobber_ref_p_no_alias
,
153 alias_stats
.call_may_clobber_ref_p_no_alias
154 + alias_stats
.call_may_clobber_ref_p_may_alias
);
155 fprintf (s
, " stmt_kills_ref_p: "
156 HOST_WIDE_INT_PRINT_DEC
" kills, "
157 HOST_WIDE_INT_PRINT_DEC
" queries\n",
158 alias_stats
.stmt_kills_ref_p_yes
+ alias_stats
.modref_kill_yes
,
159 alias_stats
.stmt_kills_ref_p_yes
+ alias_stats
.modref_kill_yes
160 + alias_stats
.stmt_kills_ref_p_no
+ alias_stats
.modref_kill_no
);
161 fprintf (s
, " nonoverlapping_component_refs_p: "
162 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
163 HOST_WIDE_INT_PRINT_DEC
" queries\n",
164 alias_stats
.nonoverlapping_component_refs_p_no_alias
,
165 alias_stats
.nonoverlapping_component_refs_p_no_alias
166 + alias_stats
.nonoverlapping_component_refs_p_may_alias
);
167 fprintf (s
, " nonoverlapping_refs_since_match_p: "
168 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
169 HOST_WIDE_INT_PRINT_DEC
" must overlaps, "
170 HOST_WIDE_INT_PRINT_DEC
" queries\n",
171 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
,
172 alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
,
173 alias_stats
.nonoverlapping_refs_since_match_p_no_alias
174 + alias_stats
.nonoverlapping_refs_since_match_p_may_alias
175 + alias_stats
.nonoverlapping_refs_since_match_p_must_overlap
);
176 fprintf (s
, " aliasing_component_refs_p: "
177 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
178 HOST_WIDE_INT_PRINT_DEC
" queries\n",
179 alias_stats
.aliasing_component_refs_p_no_alias
,
180 alias_stats
.aliasing_component_refs_p_no_alias
181 + alias_stats
.aliasing_component_refs_p_may_alias
);
182 dump_alias_stats_in_alias_c (s
);
183 fprintf (s
, "\nModref stats:\n");
184 fprintf (s
, " modref kill: "
185 HOST_WIDE_INT_PRINT_DEC
" kills, "
186 HOST_WIDE_INT_PRINT_DEC
" queries\n",
187 alias_stats
.modref_kill_yes
,
188 alias_stats
.modref_kill_yes
189 + alias_stats
.modref_kill_no
);
190 fprintf (s
, " modref use: "
191 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
192 HOST_WIDE_INT_PRINT_DEC
" queries\n",
193 alias_stats
.modref_use_no_alias
,
194 alias_stats
.modref_use_no_alias
195 + alias_stats
.modref_use_may_alias
);
196 fprintf (s
, " modref clobber: "
197 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
198 HOST_WIDE_INT_PRINT_DEC
" queries\n"
199 " " HOST_WIDE_INT_PRINT_DEC
" tbaa queries (%f per modref query)\n"
200 " " HOST_WIDE_INT_PRINT_DEC
" base compares (%f per modref query)\n",
201 alias_stats
.modref_clobber_no_alias
,
202 alias_stats
.modref_clobber_no_alias
203 + alias_stats
.modref_clobber_may_alias
,
204 alias_stats
.modref_tests
,
205 ((double)alias_stats
.modref_tests
)
206 / (alias_stats
.modref_clobber_no_alias
207 + alias_stats
.modref_clobber_may_alias
),
208 alias_stats
.modref_baseptr_tests
,
209 ((double)alias_stats
.modref_baseptr_tests
)
210 / (alias_stats
.modref_clobber_no_alias
211 + alias_stats
.modref_clobber_may_alias
));
215 /* Return true, if dereferencing PTR may alias with a global variable.
216 When ESCAPED_LOCAL_P is true escaped local memory is also considered
220 ptr_deref_may_alias_global_p (tree ptr
, bool escaped_local_p
)
222 struct ptr_info_def
*pi
;
224 /* If we end up with a pointer constant here that may point
226 if (TREE_CODE (ptr
) != SSA_NAME
)
229 pi
= SSA_NAME_PTR_INFO (ptr
);
231 /* If we do not have points-to information for this variable,
236 /* ??? This does not use TBAA to prune globals ptr may not access. */
237 return pt_solution_includes_global (&pi
->pt
, escaped_local_p
);
240 /* Return true if dereferencing PTR may alias DECL.
241 The caller is responsible for applying TBAA to see if PTR
242 may access DECL at all. */
245 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
247 struct ptr_info_def
*pi
;
249 /* Conversions are irrelevant for points-to information and
250 data-dependence analysis can feed us those. */
253 /* Anything we do not explicilty handle aliases. */
254 if ((TREE_CODE (ptr
) != SSA_NAME
255 && TREE_CODE (ptr
) != ADDR_EXPR
256 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
257 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
259 && TREE_CODE (decl
) != PARM_DECL
260 && TREE_CODE (decl
) != RESULT_DECL
))
263 /* Disregard pointer offsetting. */
264 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
268 ptr
= TREE_OPERAND (ptr
, 0);
270 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
271 return ptr_deref_may_alias_decl_p (ptr
, decl
);
274 /* ADDR_EXPR pointers either just offset another pointer or directly
275 specify the pointed-to set. */
276 if (TREE_CODE (ptr
) == ADDR_EXPR
)
278 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
280 && (TREE_CODE (base
) == MEM_REF
281 || TREE_CODE (base
) == TARGET_MEM_REF
))
282 ptr
= TREE_OPERAND (base
, 0);
285 return compare_base_decls (base
, decl
) != 0;
287 && CONSTANT_CLASS_P (base
))
293 /* Non-aliased variables cannot be pointed to. */
294 if (!may_be_aliased (decl
))
297 /* If we do not have useful points-to information for this pointer
298 we cannot disambiguate anything else. */
299 pi
= SSA_NAME_PTR_INFO (ptr
);
303 return pt_solution_includes (&pi
->pt
, decl
);
306 /* Return true if dereferenced PTR1 and PTR2 may alias.
307 The caller is responsible for applying TBAA to see if accesses
308 through PTR1 and PTR2 may conflict at all. */
311 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
313 struct ptr_info_def
*pi1
, *pi2
;
315 /* Conversions are irrelevant for points-to information and
316 data-dependence analysis can feed us those. */
320 /* Disregard pointer offsetting. */
321 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
325 ptr1
= TREE_OPERAND (ptr1
, 0);
327 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
328 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
330 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
334 ptr2
= TREE_OPERAND (ptr2
, 0);
336 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
337 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
340 /* ADDR_EXPR pointers either just offset another pointer or directly
341 specify the pointed-to set. */
342 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
344 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
346 && (TREE_CODE (base
) == MEM_REF
347 || TREE_CODE (base
) == TARGET_MEM_REF
))
348 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
351 return ptr_deref_may_alias_decl_p (ptr2
, base
);
352 /* Try ptr2 when ptr1 points to a constant. */
354 && !CONSTANT_CLASS_P (base
))
357 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
359 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
361 && (TREE_CODE (base
) == MEM_REF
362 || TREE_CODE (base
) == TARGET_MEM_REF
))
363 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
366 return ptr_deref_may_alias_decl_p (ptr1
, base
);
371 /* From here we require SSA name pointers. Anything else aliases. */
372 if (TREE_CODE (ptr1
) != SSA_NAME
373 || TREE_CODE (ptr2
) != SSA_NAME
374 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
375 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
378 /* We may end up with two empty points-to solutions for two same pointers.
379 In this case we still want to say both pointers alias, so shortcut
384 /* If we do not have useful points-to information for either pointer
385 we cannot disambiguate anything else. */
386 pi1
= SSA_NAME_PTR_INFO (ptr1
);
387 pi2
= SSA_NAME_PTR_INFO (ptr2
);
391 /* ??? This does not use TBAA to prune decls from the intersection
392 that not both pointers may access. */
393 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
396 /* Return true if dereferencing PTR may alias *REF.
397 The caller is responsible for applying TBAA to see if PTR
398 may access *REF at all. */
401 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
403 tree base
= ao_ref_base (ref
);
405 if (TREE_CODE (base
) == MEM_REF
406 || TREE_CODE (base
) == TARGET_MEM_REF
)
407 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
408 else if (DECL_P (base
))
409 return ptr_deref_may_alias_decl_p (ptr
, base
);
414 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
417 ptrs_compare_unequal (tree ptr1
, tree ptr2
)
419 /* First resolve the pointers down to a SSA name pointer base or
420 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
421 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
422 or STRING_CSTs which needs points-to adjustments to track them
423 in the points-to sets. */
424 tree obj1
= NULL_TREE
;
425 tree obj2
= NULL_TREE
;
426 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
428 tree tem
= get_base_address (TREE_OPERAND (ptr1
, 0));
432 || TREE_CODE (tem
) == PARM_DECL
433 || TREE_CODE (tem
) == RESULT_DECL
)
435 else if (TREE_CODE (tem
) == MEM_REF
)
436 ptr1
= TREE_OPERAND (tem
, 0);
438 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
440 tree tem
= get_base_address (TREE_OPERAND (ptr2
, 0));
444 || TREE_CODE (tem
) == PARM_DECL
445 || TREE_CODE (tem
) == RESULT_DECL
)
447 else if (TREE_CODE (tem
) == MEM_REF
)
448 ptr2
= TREE_OPERAND (tem
, 0);
451 /* Canonicalize ptr vs. object. */
452 if (TREE_CODE (ptr1
) == SSA_NAME
&& obj2
)
454 std::swap (ptr1
, ptr2
);
455 std::swap (obj1
, obj2
);
459 /* Other code handles this correctly, no need to duplicate it here. */;
460 else if (obj1
&& TREE_CODE (ptr2
) == SSA_NAME
)
462 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr2
);
463 /* We may not use restrict to optimize pointer comparisons.
464 See PR71062. So we have to assume that restrict-pointed-to
465 may be in fact obj1. */
467 || pi
->pt
.vars_contains_restrict
468 || pi
->pt
.vars_contains_interposable
)
471 && (TREE_STATIC (obj1
) || DECL_EXTERNAL (obj1
)))
473 varpool_node
*node
= varpool_node::get (obj1
);
474 /* If obj1 may bind to NULL give up (see below). */
476 || ! node
->nonzero_address ()
477 || ! decl_binds_to_current_def_p (obj1
))
480 return !pt_solution_includes (&pi
->pt
, obj1
);
483 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
484 but those require pt.null to be conservatively correct. */
489 /* Returns whether reference REF to BASE may refer to global memory.
490 When ESCAPED_LOCAL_P is true escaped local memory is also considered
494 ref_may_alias_global_p_1 (tree base
, bool escaped_local_p
)
497 return (is_global_var (base
)
499 && pt_solution_includes (&cfun
->gimple_df
->escaped_return
,
501 else if (TREE_CODE (base
) == MEM_REF
502 || TREE_CODE (base
) == TARGET_MEM_REF
)
503 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0),
509 ref_may_alias_global_p (ao_ref
*ref
, bool escaped_local_p
)
511 tree base
= ao_ref_base (ref
);
512 return ref_may_alias_global_p_1 (base
, escaped_local_p
);
516 ref_may_alias_global_p (tree ref
, bool escaped_local_p
)
518 tree base
= get_base_address (ref
);
519 return ref_may_alias_global_p_1 (base
, escaped_local_p
);
522 /* Return true whether STMT may clobber global memory.
523 When ESCAPED_LOCAL_P is true escaped local memory is also considered
527 stmt_may_clobber_global_p (gimple
*stmt
, bool escaped_local_p
)
531 if (!gimple_vdef (stmt
))
534 /* ??? We can ask the oracle whether an artificial pointer
535 dereference with a pointer with points-to information covering
536 all global memory (what about non-address taken memory?) maybe
537 clobbered by this call. As there is at the moment no convenient
538 way of doing that without generating garbage do some manual
540 ??? We could make a NULL ao_ref argument to the various
541 predicates special, meaning any global memory. */
543 switch (gimple_code (stmt
))
546 lhs
= gimple_assign_lhs (stmt
);
547 return (TREE_CODE (lhs
) != SSA_NAME
548 && ref_may_alias_global_p (lhs
, escaped_local_p
));
557 /* Dump alias information on FILE. */
560 dump_alias_info (FILE *file
)
565 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
568 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
570 fprintf (file
, "Aliased symbols\n\n");
572 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
574 if (may_be_aliased (var
))
575 dump_variable (file
, var
);
578 fprintf (file
, "\nCall clobber information\n");
580 fprintf (file
, "\nESCAPED");
581 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
583 fprintf (file
, "\nESCAPED_RETURN");
584 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped_return
);
586 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
588 FOR_EACH_SSA_NAME (i
, ptr
, cfun
)
590 struct ptr_info_def
*pi
;
592 if (!POINTER_TYPE_P (TREE_TYPE (ptr
))
593 || SSA_NAME_IN_FREE_LIST (ptr
))
596 pi
= SSA_NAME_PTR_INFO (ptr
);
598 dump_points_to_info_for (file
, ptr
);
601 fprintf (file
, "\n");
605 /* Dump alias information on stderr. */
608 debug_alias_info (void)
610 dump_alias_info (stderr
);
614 /* Dump the points-to set *PT into FILE. */
617 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
620 fprintf (file
, ", points-to anything");
623 fprintf (file
, ", points-to non-local");
626 fprintf (file
, ", points-to escaped");
629 fprintf (file
, ", points-to unit escaped");
632 fprintf (file
, ", points-to NULL");
636 fprintf (file
, ", points-to vars: ");
637 dump_decl_set (file
, pt
->vars
);
638 if (pt
->vars_contains_nonlocal
639 || pt
->vars_contains_escaped
640 || pt
->vars_contains_escaped_heap
641 || pt
->vars_contains_restrict
)
643 const char *comma
= "";
644 fprintf (file
, " (");
645 if (pt
->vars_contains_nonlocal
)
647 fprintf (file
, "nonlocal");
650 if (pt
->vars_contains_escaped
)
652 fprintf (file
, "%sescaped", comma
);
655 if (pt
->vars_contains_escaped_heap
)
657 fprintf (file
, "%sescaped heap", comma
);
660 if (pt
->vars_contains_restrict
)
662 fprintf (file
, "%srestrict", comma
);
665 if (pt
->vars_contains_interposable
)
666 fprintf (file
, "%sinterposable", comma
);
673 /* Unified dump function for pt_solution. */
676 debug (pt_solution
&ref
)
678 dump_points_to_solution (stderr
, &ref
);
682 debug (pt_solution
*ptr
)
687 fprintf (stderr
, "<nil>\n");
691 /* Dump points-to information for SSA_NAME PTR into FILE. */
694 dump_points_to_info_for (FILE *file
, tree ptr
)
696 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
698 print_generic_expr (file
, ptr
, dump_flags
);
701 dump_points_to_solution (file
, &pi
->pt
);
703 fprintf (file
, ", points-to anything");
705 fprintf (file
, "\n");
709 /* Dump points-to information for VAR into stderr. */
712 debug_points_to_info_for (tree var
)
714 dump_points_to_info_for (stderr
, var
);
718 /* Initializes the alias-oracle reference representation *R from REF. */
721 ao_ref_init (ao_ref
*r
, tree ref
)
728 r
->ref_alias_set
= -1;
729 r
->base_alias_set
= -1;
730 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
733 /* Returns the base object of the memory reference *REF. */
736 ao_ref_base (ao_ref
*ref
)
742 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
743 &ref
->max_size
, &reverse
);
747 /* Returns the base object alias set of the memory reference *REF. */
750 ao_ref_base_alias_set (ao_ref
*ref
)
753 if (ref
->base_alias_set
!= -1)
754 return ref
->base_alias_set
;
758 if (TREE_CODE (base_ref
) == WITH_SIZE_EXPR
)
759 base_ref
= TREE_OPERAND (base_ref
, 0);
760 while (handled_component_p (base_ref
))
761 base_ref
= TREE_OPERAND (base_ref
, 0);
762 ref
->base_alias_set
= get_alias_set (base_ref
);
763 return ref
->base_alias_set
;
766 /* Returns the reference alias set of the memory reference *REF. */
769 ao_ref_alias_set (ao_ref
*ref
)
771 if (ref
->ref_alias_set
!= -1)
772 return ref
->ref_alias_set
;
775 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
776 return ref
->ref_alias_set
;
779 /* Returns a type satisfying
780 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
783 ao_ref_base_alias_ptr_type (ao_ref
*ref
)
790 if (TREE_CODE (base_ref
) == WITH_SIZE_EXPR
)
791 base_ref
= TREE_OPERAND (base_ref
, 0);
792 while (handled_component_p (base_ref
))
793 base_ref
= TREE_OPERAND (base_ref
, 0);
794 tree ret
= reference_alias_ptr_type (base_ref
);
798 /* Returns a type satisfying
799 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
802 ao_ref_alias_ptr_type (ao_ref
*ref
)
806 tree ret
= reference_alias_ptr_type (ref
->ref
);
810 /* Return the alignment of the access *REF and store it in the *ALIGN
811 and *BITPOS pairs. Returns false if no alignment could be determined.
812 See get_object_alignment_2 for details. */
815 ao_ref_alignment (ao_ref
*ref
, unsigned int *align
,
816 unsigned HOST_WIDE_INT
*bitpos
)
819 return get_object_alignment_1 (ref
->ref
, align
, bitpos
);
821 /* When we just have ref->base we cannot use get_object_alignment since
822 that will eventually use the type of the appearant access while for
823 example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
824 *align
= BITS_PER_UNIT
;
825 HOST_WIDE_INT offset
;
826 if (!ref
->offset
.is_constant (&offset
)
827 || !get_object_alignment_2 (ref
->base
, align
, bitpos
, true))
829 *bitpos
+= (unsigned HOST_WIDE_INT
)offset
* BITS_PER_UNIT
;
830 *bitpos
= *bitpos
& (*align
- 1);
834 /* Init an alias-oracle reference representation from a gimple pointer
835 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
836 that RANGE_KNOWN is set.
838 The access is assumed to be only to or after of the pointer target adjusted
839 by the offset, not before it (even in the case RANGE_KNOWN is false). */
842 ao_ref_init_from_ptr_and_range (ao_ref
*ref
, tree ptr
,
848 poly_int64 t
, extra_offset
= 0;
850 ref
->ref
= NULL_TREE
;
851 if (TREE_CODE (ptr
) == SSA_NAME
)
853 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
854 if (gimple_assign_single_p (stmt
)
855 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
856 ptr
= gimple_assign_rhs1 (stmt
);
857 else if (is_gimple_assign (stmt
)
858 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
859 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt
), &extra_offset
))
861 ptr
= gimple_assign_rhs1 (stmt
);
862 extra_offset
*= BITS_PER_UNIT
;
866 if (TREE_CODE (ptr
) == ADDR_EXPR
)
868 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
870 ref
->offset
= BITS_PER_UNIT
* t
;
875 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
880 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr
)));
881 ref
->base
= build2 (MEM_REF
, char_type_node
,
882 ptr
, null_pointer_node
);
885 ref
->offset
+= extra_offset
+ offset
;
888 ref
->max_size
= max_size
;
892 ref
->max_size
= ref
->size
= -1;
893 ref
->ref_alias_set
= 0;
894 ref
->base_alias_set
= 0;
895 ref
->volatile_p
= false;
898 /* Init an alias-oracle reference representation from a gimple pointer
899 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
900 size is assumed to be unknown. The access is assumed to be only
901 to or after of the pointer target, not before it. */
904 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
908 && poly_int_tree_p (size
, &size_hwi
)
909 && coeffs_in_range_p (size_hwi
, 0, HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
911 size_hwi
= size_hwi
* BITS_PER_UNIT
;
912 ao_ref_init_from_ptr_and_range (ref
, ptr
, true, 0, size_hwi
, size_hwi
);
915 ao_ref_init_from_ptr_and_range (ref
, ptr
, false, 0, -1, -1);
918 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
921 Return 0 if equal or incomparable. */
924 compare_sizes (tree s1
, tree s2
)
932 if (!poly_int_tree_p (s1
, &size1
) || !poly_int_tree_p (s2
, &size2
))
934 if (known_lt (size1
, size2
))
936 if (known_lt (size2
, size1
))
941 /* Compare TYPE1 and TYPE2 by its size.
942 Return -1 if size of TYPE1 < size of TYPE2
943 Return 1 if size of TYPE1 > size of TYPE2
944 Return 0 if types are of equal sizes or we can not compare them. */
947 compare_type_sizes (tree type1
, tree type2
)
949 /* Be conservative for arrays and vectors. We want to support partial
950 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
951 while (TREE_CODE (type1
) == ARRAY_TYPE
952 || VECTOR_TYPE_P (type1
))
953 type1
= TREE_TYPE (type1
);
954 while (TREE_CODE (type2
) == ARRAY_TYPE
955 || VECTOR_TYPE_P (type2
))
956 type2
= TREE_TYPE (type2
);
957 return compare_sizes (TYPE_SIZE (type1
), TYPE_SIZE (type2
));
960 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
961 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
965 same_type_for_tbaa (tree type1
, tree type2
)
967 type1
= TYPE_MAIN_VARIANT (type1
);
968 type2
= TYPE_MAIN_VARIANT (type2
);
970 /* Handle the most common case first. */
974 /* If we would have to do structural comparison bail out. */
975 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
976 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
979 /* Compare the canonical types. */
980 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
983 /* ??? Array types are not properly unified in all cases as we have
984 spurious changes in the index types for example. Removing this
985 causes all sorts of problems with the Fortran frontend. */
986 if (TREE_CODE (type1
) == ARRAY_TYPE
987 && TREE_CODE (type2
) == ARRAY_TYPE
)
990 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
991 object of one of its constrained subtypes, e.g. when a function with an
992 unconstrained parameter passed by reference is called on an object and
993 inlined. But, even in the case of a fixed size, type and subtypes are
994 not equivalent enough as to share the same TYPE_CANONICAL, since this
995 would mean that conversions between them are useless, whereas they are
996 not (e.g. type and subtypes can have different modes). So, in the end,
997 they are only guaranteed to have the same alias set. */
998 alias_set_type set1
= get_alias_set (type1
);
999 alias_set_type set2
= get_alias_set (type2
);
1003 /* Pointers to void are considered compatible with all other pointers,
1004 so for two pointers see what the alias set resolution thinks. */
1005 if (POINTER_TYPE_P (type1
)
1006 && POINTER_TYPE_P (type2
)
1007 && alias_sets_conflict_p (set1
, set2
))
1010 /* The types are known to be not equal. */
1014 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
1015 components on it). */
1018 type_has_components_p (tree type
)
1020 return AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
)
1021 || TREE_CODE (type
) == COMPLEX_TYPE
;
1024 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1025 respectively are either pointing to same address or are completely
1026 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1027 just partly overlap.
1029 Try to disambiguate using the access path starting from the match
1030 and return false if there is no conflict.
1032 Helper for aliasing_component_refs_p. */
1035 aliasing_matching_component_refs_p (tree match1
, tree ref1
,
1036 poly_int64 offset1
, poly_int64 max_size1
,
1037 tree match2
, tree ref2
,
1038 poly_int64 offset2
, poly_int64 max_size2
,
1039 bool partial_overlap
)
1041 poly_int64 offadj
, sztmp
, msztmp
;
1044 if (!partial_overlap
)
1046 get_ref_base_and_extent (match2
, &offadj
, &sztmp
, &msztmp
, &reverse
);
1048 get_ref_base_and_extent (match1
, &offadj
, &sztmp
, &msztmp
, &reverse
);
1050 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1052 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1057 int cmp
= nonoverlapping_refs_since_match_p (match1
, ref1
, match2
, ref2
,
1060 || (cmp
== -1 && nonoverlapping_component_refs_p (ref1
, ref2
)))
1062 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1065 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1069 /* Return true if REF is reference to zero sized trailing array. I.e.
1070 struct foo {int bar; int array[0];} *fooptr;
1074 component_ref_to_zero_sized_trailing_array_p (tree ref
)
1076 return (TREE_CODE (ref
) == COMPONENT_REF
1077 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref
, 1))) == ARRAY_TYPE
1078 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))
1079 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref
, 1)))))
1080 && array_ref_flexible_size_p (ref
));
1083 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1084 aliasing_component_refs_p.
1086 Walk access path REF2 and try to find type matching TYPE1
1087 (which is a start of possibly aliasing access path REF1).
1088 If match is found, try to disambiguate.
1090 Return 0 for sucessful disambiguation.
1091 Return 1 if match was found but disambiguation failed
1092 Return -1 if there is no match.
1093 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1094 in access patch REF2 and -1 if we are not sure. */
1097 aliasing_component_refs_walk (tree ref1
, tree type1
, tree base1
,
1098 poly_int64 offset1
, poly_int64 max_size1
,
1099 tree end_struct_ref1
,
1100 tree ref2
, tree base2
,
1101 poly_int64 offset2
, poly_int64 max_size2
,
1109 /* We walk from inner type to the outer types. If type we see is
1110 already too large to be part of type1, terminate the search. */
1111 int cmp
= compare_type_sizes (type1
, TREE_TYPE (ref
));
1114 && (!end_struct_ref1
1115 || compare_type_sizes (TREE_TYPE (end_struct_ref1
),
1116 TREE_TYPE (ref
)) < 0))
1118 /* If types may be of same size, see if we can decide about their
1122 same_p
= same_type_for_tbaa (TREE_TYPE (ref
), type1
);
1125 /* In case we can't decide whether types are same try to
1126 continue looking for the exact match.
1127 Remember however that we possibly saw a match
1128 to bypass the access path continuations tests we do later. */
1130 *maybe_match
= true;
1132 if (!handled_component_p (ref
))
1134 ref
= TREE_OPERAND (ref
, 0);
1138 bool partial_overlap
= false;
1140 /* We assume that arrays can overlap by multiple of their elements
1141 size as tested in gcc.dg/torture/alias-2.c.
1142 This partial overlap happen only when both arrays are bases of
1143 the access and not contained within another component ref.
1144 To be safe we also assume partial overlap for VLAs. */
1145 if (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
1146 && (!TYPE_SIZE (TREE_TYPE (base1
))
1147 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
))) != INTEGER_CST
1150 /* Setting maybe_match to true triggers
1151 nonoverlapping_component_refs_p test later that still may do
1152 useful disambiguation. */
1153 *maybe_match
= true;
1154 partial_overlap
= true;
1156 return aliasing_matching_component_refs_p (base1
, ref1
,
1165 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1166 Return true if they can be composed to single access path
1167 base1...ref1...base2...ref2.
1169 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1170 a trailing array access after REF1 in the non-TBAA part of the access.
1171 REF1_ALIAS_SET is the alias set of REF1.
1173 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1174 a trailing array access in the TBAA part of access path2.
1175 BASE2_ALIAS_SET is the alias set of base2. */
1178 access_path_may_continue_p (tree ref_type1
, bool end_struct_past_end1
,
1179 alias_set_type ref1_alias_set
,
1180 tree base_type2
, tree end_struct_ref2
,
1181 alias_set_type base2_alias_set
)
1183 /* Access path can not continue past types with no components. */
1184 if (!type_has_components_p (ref_type1
))
1187 /* If first access path ends by too small type to hold base of
1188 the second access path, typically paths can not continue.
1190 Punt if end_struct_past_end1 is true. We want to support arbitrary
1191 type puning past first COMPONENT_REF to union because redundant store
1192 elimination depends on this, see PR92152. For this reason we can not
1193 check size of the reference because types may partially overlap. */
1194 if (!end_struct_past_end1
)
1196 if (compare_type_sizes (ref_type1
, base_type2
) < 0)
1198 /* If the path2 contains trailing array access we can strenghten the check
1199 to verify that also the size of element of the trailing array fits.
1200 In fact we could check for offset + type_size, but we do not track
1201 offsets and this is quite side case. */
1203 && compare_type_sizes (ref_type1
, TREE_TYPE (end_struct_ref2
)) < 0)
1206 return (base2_alias_set
== ref1_alias_set
1207 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
1210 /* Determine if the two component references REF1 and REF2 which are
1211 based on access types TYPE1 and TYPE2 and of which at least one is based
1212 on an indirect reference may alias.
1213 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1214 are the respective alias sets. */
1217 aliasing_component_refs_p (tree ref1
,
1218 alias_set_type ref1_alias_set
,
1219 alias_set_type base1_alias_set
,
1220 poly_int64 offset1
, poly_int64 max_size1
,
1222 alias_set_type ref2_alias_set
,
1223 alias_set_type base2_alias_set
,
1224 poly_int64 offset2
, poly_int64 max_size2
)
1226 /* If one reference is a component references through pointers try to find a
1227 common base and apply offset based disambiguation. This handles
1229 struct A { int i; int j; } *q;
1230 struct B { struct A a; int k; } *p;
1231 disambiguating q->i and p->a.j. */
1234 bool maybe_match
= false;
1235 tree end_struct_ref1
= NULL
, end_struct_ref2
= NULL
;
1236 bool end_struct_past_end1
= false;
1237 bool end_struct_past_end2
= false;
1239 /* Choose bases and base types to search for.
1240 The access path is as follows:
1241 base....end_of_tbaa_ref...actual_ref
1242 At one place in the access path may be a reference to zero sized or
1245 We generally discard the segment after end_of_tbaa_ref however
1246 we need to be careful in case it contains zero sized or trailing array.
1247 These may happen after reference to union and in this case we need to
1248 not disambiguate type puning scenarios.
1251 base1 to point to base
1253 ref1 to point to end_of_tbaa_ref
1255 end_struct_ref1 to point the trailing reference (if it exists
1256 in range base....end_of_tbaa_ref
1258 end_struct_past_end1 is true if this trailing reference occurs in
1259 end_of_tbaa_ref...actual_ref. */
1261 while (handled_component_p (base1
))
1263 /* Generally access paths are monotous in the size of object. The
1264 exception are trailing arrays of structures. I.e.
1265 struct a {int array[0];};
1267 struct a {int array1[0]; int array[];};
1268 Such struct has size 0 but accesses to a.array may have non-zero size.
1269 In this case the size of TREE_TYPE (base1) is smaller than
1270 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1272 Because we compare sizes of arrays just by sizes of their elements,
1273 we only need to care about zero sized array fields here. */
1274 if (component_ref_to_zero_sized_trailing_array_p (base1
))
1276 gcc_checking_assert (!end_struct_ref1
);
1277 end_struct_ref1
= base1
;
1279 if (ends_tbaa_access_path_p (base1
))
1281 ref1
= TREE_OPERAND (base1
, 0);
1282 if (end_struct_ref1
)
1284 end_struct_past_end1
= true;
1285 end_struct_ref1
= NULL
;
1288 base1
= TREE_OPERAND (base1
, 0);
1290 type1
= TREE_TYPE (base1
);
1292 while (handled_component_p (base2
))
1294 if (component_ref_to_zero_sized_trailing_array_p (base2
))
1296 gcc_checking_assert (!end_struct_ref2
);
1297 end_struct_ref2
= base2
;
1299 if (ends_tbaa_access_path_p (base2
))
1301 ref2
= TREE_OPERAND (base2
, 0);
1302 if (end_struct_ref2
)
1304 end_struct_past_end2
= true;
1305 end_struct_ref2
= NULL
;
1308 base2
= TREE_OPERAND (base2
, 0);
1310 type2
= TREE_TYPE (base2
);
1312 /* Now search for the type1 in the access path of ref2. This
1313 would be a common base for doing offset based disambiguation on.
1314 This however only makes sense if type2 is big enough to hold type1. */
1315 int cmp_outer
= compare_type_sizes (type2
, type1
);
1317 /* If type2 is big enough to contain type1 walk its access path.
1318 We also need to care of arrays at the end of structs that may extend
1319 beyond the end of structure. If this occurs in the TBAA part of the
1320 access path, we need to consider the increased type as well. */
1323 && compare_type_sizes (TREE_TYPE (end_struct_ref2
), type1
) >= 0))
1325 int res
= aliasing_component_refs_walk (ref1
, type1
, base1
,
1328 ref2
, base2
, offset2
, max_size2
,
1334 /* If we didn't find a common base, try the other way around. */
1337 && compare_type_sizes (TREE_TYPE (end_struct_ref1
), type2
) <= 0))
1339 int res
= aliasing_component_refs_walk (ref2
, type2
, base2
,
1342 ref1
, base1
, offset1
, max_size1
,
1348 /* In the following code we make an assumption that the types in access
1349 paths do not overlap and thus accesses alias only if one path can be
1350 continuation of another. If we was not able to decide about equivalence,
1351 we need to give up. */
1354 if (!nonoverlapping_component_refs_p (ref1
, ref2
))
1356 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1359 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1363 if (access_path_may_continue_p (TREE_TYPE (ref1
), end_struct_past_end1
,
1365 type2
, end_struct_ref2
,
1367 || access_path_may_continue_p (TREE_TYPE (ref2
), end_struct_past_end2
,
1369 type1
, end_struct_ref1
,
1372 ++alias_stats
.aliasing_component_refs_p_may_alias
;
1375 ++alias_stats
.aliasing_component_refs_p_no_alias
;
1379 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1380 that bases of both component refs are either equivalent or nonoverlapping.
1381 We do not assume that the containers of FIELD1 and FIELD2 are of the
1384 Return 0 in case the base address of component_refs are same then
1385 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1386 may not be of same type or size.
1388 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1390 Return -1 otherwise.
1392 Main difference between 0 and -1 is to let
1393 nonoverlapping_component_refs_since_match_p discover the semantically
1394 equivalent part of the access path.
1396 Note that this function is used even with -fno-strict-aliasing
1397 and makes use of no TBAA assumptions. */
1400 nonoverlapping_component_refs_p_1 (const_tree field1
, const_tree field2
)
1402 /* If both fields are of the same type, we could save hard work of
1403 comparing offsets. */
1404 tree type1
= DECL_CONTEXT (field1
);
1405 tree type2
= DECL_CONTEXT (field2
);
1407 if (TREE_CODE (type1
) == RECORD_TYPE
1408 && DECL_BIT_FIELD_REPRESENTATIVE (field1
))
1409 field1
= DECL_BIT_FIELD_REPRESENTATIVE (field1
);
1410 if (TREE_CODE (type2
) == RECORD_TYPE
1411 && DECL_BIT_FIELD_REPRESENTATIVE (field2
))
1412 field2
= DECL_BIT_FIELD_REPRESENTATIVE (field2
);
1414 /* ??? Bitfields can overlap at RTL level so punt on them.
1415 FIXME: RTL expansion should be fixed by adjusting the access path
1416 when producing MEM_ATTRs for MEMs which are wider than
1417 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1418 if (DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
))
1421 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1422 if (type1
== type2
&& TREE_CODE (type1
) == RECORD_TYPE
)
1423 return field1
!= field2
;
1425 /* In common case the offsets and bit offsets will be the same.
1426 However if frontends do not agree on the alignment, they may be
1427 different even if they actually represent same address.
1428 Try the common case first and if that fails calcualte the
1429 actual bit offset. */
1430 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1
),
1431 DECL_FIELD_OFFSET (field2
))
1432 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1
),
1433 DECL_FIELD_BIT_OFFSET (field2
)))
1436 /* Note that it may be possible to use component_ref_field_offset
1437 which would provide offsets as trees. However constructing and folding
1438 trees is expensive and does not seem to be worth the compile time
1441 poly_uint64 offset1
, offset2
;
1442 poly_uint64 bit_offset1
, bit_offset2
;
1444 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1
), &offset1
)
1445 && poly_int_tree_p (DECL_FIELD_OFFSET (field2
), &offset2
)
1446 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1
), &bit_offset1
)
1447 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2
), &bit_offset2
))
1449 offset1
= (offset1
<< LOG2_BITS_PER_UNIT
) + bit_offset1
;
1450 offset2
= (offset2
<< LOG2_BITS_PER_UNIT
) + bit_offset2
;
1452 if (known_eq (offset1
, offset2
))
1455 poly_uint64 size1
, size2
;
1457 if (poly_int_tree_p (DECL_SIZE (field1
), &size1
)
1458 && poly_int_tree_p (DECL_SIZE (field2
), &size2
)
1459 && !ranges_maybe_overlap_p (offset1
, size1
, offset2
, size2
))
1462 /* Resort to slower overlap checking by looking for matching types in
1463 the middle of access path. */
1467 /* Return low bound of array. Do not produce new trees
1468 and thus do not care about particular type of integer constant
1469 and placeholder exprs. */
1472 cheap_array_ref_low_bound (tree ref
)
1474 tree domain_type
= TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref
, 0)));
1476 /* Avoid expensive array_ref_low_bound.
1477 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1478 type or it is zero. */
1479 if (TREE_OPERAND (ref
, 2))
1480 return TREE_OPERAND (ref
, 2);
1481 else if (domain_type
&& TYPE_MIN_VALUE (domain_type
))
1482 return TYPE_MIN_VALUE (domain_type
);
1484 return integer_zero_node
;
1487 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1488 completely disjoint.
1490 Return 1 if the refs are non-overlapping.
1491 Return 0 if they are possibly overlapping but if so the overlap again
1492 starts on the same address.
1493 Return -1 otherwise. */
1496 nonoverlapping_array_refs_p (tree ref1
, tree ref2
)
1498 tree index1
= TREE_OPERAND (ref1
, 1);
1499 tree index2
= TREE_OPERAND (ref2
, 1);
1500 tree low_bound1
= cheap_array_ref_low_bound (ref1
);
1501 tree low_bound2
= cheap_array_ref_low_bound (ref2
);
1503 /* Handle zero offsets first: we do not need to match type size in this
1505 if (operand_equal_p (index1
, low_bound1
, 0)
1506 && operand_equal_p (index2
, low_bound2
, 0))
1509 /* If type sizes are different, give up.
1511 Avoid expensive array_ref_element_size.
1512 If operand 3 is present it denotes size in the alignmnet units.
1513 Otherwise size is TYPE_SIZE of the element type.
1514 Handle only common cases where types are of the same "kind". */
1515 if ((TREE_OPERAND (ref1
, 3) == NULL
) != (TREE_OPERAND (ref2
, 3) == NULL
))
1518 tree elmt_type1
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1
, 0)));
1519 tree elmt_type2
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2
, 0)));
1521 if (TREE_OPERAND (ref1
, 3))
1523 if (TYPE_ALIGN (elmt_type1
) != TYPE_ALIGN (elmt_type2
)
1524 || !operand_equal_p (TREE_OPERAND (ref1
, 3),
1525 TREE_OPERAND (ref2
, 3), 0))
1530 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1
),
1531 TYPE_SIZE_UNIT (elmt_type2
), 0))
1535 /* Since we know that type sizes are the same, there is no need to return
1536 -1 after this point. Partial overlap can not be introduced. */
1538 /* We may need to fold trees in this case.
1539 TODO: Handle integer constant case at least. */
1540 if (!operand_equal_p (low_bound1
, low_bound2
, 0))
1543 if (TREE_CODE (index1
) == INTEGER_CST
&& TREE_CODE (index2
) == INTEGER_CST
)
1545 if (tree_int_cst_equal (index1
, index2
))
1549 /* TODO: We can use VRP to further disambiguate here. */
1553 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1554 MATCH2 either point to the same address or are disjoint.
1555 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1556 respectively or NULL in the case we established equivalence of bases.
1557 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1558 overlap by exact multiply of their element size.
1560 This test works by matching the initial segment of the access path
1561 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1562 match was determined without use of TBAA oracle.
1564 Return 1 if we can determine that component references REF1 and REF2,
1565 that are within a common DECL, cannot overlap.
1567 Return 0 if paths are same and thus there is nothing to disambiguate more
1568 (i.e. there is must alias assuming there is must alias between MATCH1 and
1571 Return -1 if we can not determine 0 or 1 - this happens when we met
1572 non-matching types was met in the path.
1573 In this case it may make sense to continue by other disambiguation
1577 nonoverlapping_refs_since_match_p (tree match1
, tree ref1
,
1578 tree match2
, tree ref2
,
1579 bool partial_overlap
)
1581 int ntbaa1
= 0, ntbaa2
= 0;
1582 /* Early return if there are no references to match, we do not need
1583 to walk the access paths.
1585 Do not consider this as may-alias for stats - it is more useful
1586 to have information how many disambiguations happened provided that
1587 the query was meaningful. */
1589 if (match1
== ref1
|| !handled_component_p (ref1
)
1590 || match2
== ref2
|| !handled_component_p (ref2
))
1593 auto_vec
<tree
, 16> component_refs1
;
1594 auto_vec
<tree
, 16> component_refs2
;
1596 /* Create the stack of handled components for REF1. */
1597 while (handled_component_p (ref1
) && ref1
!= match1
)
1599 /* We use TBAA only to re-synchronize after mismatched refs. So we
1600 do not need to truncate access path after TBAA part ends. */
1601 if (ends_tbaa_access_path_p (ref1
))
1605 component_refs1
.safe_push (ref1
);
1606 ref1
= TREE_OPERAND (ref1
, 0);
1609 /* Create the stack of handled components for REF2. */
1610 while (handled_component_p (ref2
) && ref2
!= match2
)
1612 if (ends_tbaa_access_path_p (ref2
))
1616 component_refs2
.safe_push (ref2
);
1617 ref2
= TREE_OPERAND (ref2
, 0);
1620 if (!flag_strict_aliasing
)
1626 bool mem_ref1
= TREE_CODE (ref1
) == MEM_REF
&& ref1
!= match1
;
1627 bool mem_ref2
= TREE_CODE (ref2
) == MEM_REF
&& ref2
!= match2
;
1629 /* If only one of access path starts with MEM_REF check that offset is 0
1630 so the addresses stays the same after stripping it.
1631 TODO: In this case we may walk the other access path until we get same
1634 If both starts with MEM_REF, offset has to be same. */
1635 if ((mem_ref1
&& !mem_ref2
&& !integer_zerop (TREE_OPERAND (ref1
, 1)))
1636 || (mem_ref2
&& !mem_ref1
&& !integer_zerop (TREE_OPERAND (ref2
, 1)))
1637 || (mem_ref1
&& mem_ref2
1638 && !tree_int_cst_equal (TREE_OPERAND (ref1
, 1),
1639 TREE_OPERAND (ref2
, 1))))
1641 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1645 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1646 to handle them here at all. */
1647 gcc_checking_assert (TREE_CODE (ref1
) != TARGET_MEM_REF
1648 && TREE_CODE (ref2
) != TARGET_MEM_REF
);
1650 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1651 rank. This is sufficient because we start from the same DECL and you
1652 cannot reference several fields at a time with COMPONENT_REFs (unlike
1653 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1654 of them to access a sub-component, unless you're in a union, in which
1655 case the return value will precisely be false. */
1658 /* Track if we seen unmatched ref with non-zero offset. In this case
1659 we must look for partial overlaps. */
1660 bool seen_unmatched_ref_p
= false;
1662 /* First match ARRAY_REFs an try to disambiguate. */
1663 if (!component_refs1
.is_empty ()
1664 && !component_refs2
.is_empty ())
1666 unsigned int narray_refs1
=0, narray_refs2
=0;
1668 /* We generally assume that both access paths starts by same sequence
1669 of refs. However if number of array refs is not in sync, try
1670 to recover and pop elts until number match. This helps the case
1671 where one access path starts by array and other by element. */
1672 for (narray_refs1
= 0; narray_refs1
< component_refs1
.length ();
1674 if (TREE_CODE (component_refs1
[component_refs1
.length()
1675 - 1 - narray_refs1
]) != ARRAY_REF
)
1678 for (narray_refs2
= 0; narray_refs2
< component_refs2
.length ();
1680 if (TREE_CODE (component_refs2
[component_refs2
.length()
1681 - 1 - narray_refs2
]) != ARRAY_REF
)
1683 for (; narray_refs1
> narray_refs2
; narray_refs1
--)
1685 ref1
= component_refs1
.pop ();
1688 /* If index is non-zero we need to check whether the reference
1689 does not break the main invariant that bases are either
1690 disjoint or equal. Consider the example:
1692 unsigned char out[][1];
1696 Here bases out and out are same, but after removing the
1697 [i] index, this invariant no longer holds, because
1698 out[i] points to the middle of array out.
1700 TODO: If size of type of the skipped reference is an integer
1701 multiply of the size of type of the other reference this
1702 invariant can be verified, but even then it is not completely
1703 safe with !flag_strict_aliasing if the other reference contains
1704 unbounded array accesses.
1707 if (!operand_equal_p (TREE_OPERAND (ref1
, 1),
1708 cheap_array_ref_low_bound (ref1
), 0))
1711 for (; narray_refs2
> narray_refs1
; narray_refs2
--)
1713 ref2
= component_refs2
.pop ();
1715 if (!operand_equal_p (TREE_OPERAND (ref2
, 1),
1716 cheap_array_ref_low_bound (ref2
), 0))
1719 /* Try to disambiguate matched arrays. */
1720 for (unsigned int i
= 0; i
< narray_refs1
; i
++)
1722 int cmp
= nonoverlapping_array_refs_p (component_refs1
.pop (),
1723 component_refs2
.pop ());
1726 if (cmp
== 1 && !partial_overlap
)
1729 .nonoverlapping_refs_since_match_p_no_alias
;
1734 seen_unmatched_ref_p
= true;
1735 /* We can not maintain the invariant that bases are either
1736 same or completely disjoint. However we can still recover
1737 from type based alias analysis if we reach references to
1738 same sizes. We do not attempt to match array sizes, so
1739 just finish array walking and look for component refs. */
1740 if (ntbaa1
< 0 || ntbaa2
< 0)
1742 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1745 for (i
++; i
< narray_refs1
; i
++)
1747 component_refs1
.pop ();
1748 component_refs2
.pop ();
1754 partial_overlap
= false;
1758 /* Next look for component_refs. */
1761 if (component_refs1
.is_empty ())
1764 .nonoverlapping_refs_since_match_p_must_overlap
;
1767 ref1
= component_refs1
.pop ();
1769 if (TREE_CODE (ref1
) != COMPONENT_REF
)
1771 seen_unmatched_ref_p
= true;
1772 if (ntbaa1
< 0 || ntbaa2
< 0)
1774 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1779 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
1783 if (component_refs2
.is_empty ())
1786 .nonoverlapping_refs_since_match_p_must_overlap
;
1789 ref2
= component_refs2
.pop ();
1791 if (TREE_CODE (ref2
) != COMPONENT_REF
)
1793 if (ntbaa1
< 0 || ntbaa2
< 0)
1795 ++alias_stats
.nonoverlapping_refs_since_match_p_may_alias
;
1798 seen_unmatched_ref_p
= true;
1801 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
1803 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1805 gcc_checking_assert (TREE_CODE (ref1
) == COMPONENT_REF
1806 && TREE_CODE (ref2
) == COMPONENT_REF
);
1808 tree field1
= TREE_OPERAND (ref1
, 1);
1809 tree field2
= TREE_OPERAND (ref2
, 1);
1811 /* ??? We cannot simply use the type of operand #0 of the refs here
1812 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1813 for common blocks instead of using unions like everyone else. */
1814 tree type1
= DECL_CONTEXT (field1
);
1815 tree type2
= DECL_CONTEXT (field2
);
1817 partial_overlap
= false;
1819 /* If we skipped array refs on type of different sizes, we can
1820 no longer be sure that there are not partial overlaps. */
1821 if (seen_unmatched_ref_p
&& ntbaa1
>= 0 && ntbaa2
>= 0
1822 && !operand_equal_p (TYPE_SIZE (type1
), TYPE_SIZE (type2
), 0))
1825 .nonoverlapping_refs_since_match_p_may_alias
;
1829 int cmp
= nonoverlapping_component_refs_p_1 (field1
, field2
);
1833 .nonoverlapping_refs_since_match_p_may_alias
;
1839 .nonoverlapping_refs_since_match_p_no_alias
;
1845 /* Return TYPE_UID which can be used to match record types we consider
1846 same for TBAA purposes. */
1849 ncr_type_uid (const_tree field
)
1851 /* ??? We cannot simply use the type of operand #0 of the refs here
1852 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1853 for common blocks instead of using unions like everyone else. */
1854 tree type
= DECL_FIELD_CONTEXT (field
);
1855 /* With LTO types considered same_type_for_tbaa_p
1856 from different translation unit may not have same
1857 main variant. They however have same TYPE_CANONICAL. */
1858 if (TYPE_CANONICAL (type
))
1859 return TYPE_UID (TYPE_CANONICAL (type
));
1860 return TYPE_UID (type
);
1863 /* qsort compare function to sort FIELD_DECLs after their
1864 DECL_FIELD_CONTEXT TYPE_UID. */
1867 ncr_compar (const void *field1_
, const void *field2_
)
1869 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
1870 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
1871 unsigned int uid1
= ncr_type_uid (field1
);
1872 unsigned int uid2
= ncr_type_uid (field2
);
1876 else if (uid1
> uid2
)
1881 /* Return true if we can determine that the fields referenced cannot
1882 overlap for any pair of objects. This relies on TBAA. */
1885 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
1887 /* Early return if we have nothing to do.
1889 Do not consider this as may-alias for stats - it is more useful
1890 to have information how many disambiguations happened provided that
1891 the query was meaningful. */
1892 if (!flag_strict_aliasing
1894 || !handled_component_p (x
)
1895 || !handled_component_p (y
))
1898 auto_vec
<const_tree
, 16> fieldsx
;
1899 while (handled_component_p (x
))
1901 if (TREE_CODE (x
) == COMPONENT_REF
)
1903 tree field
= TREE_OPERAND (x
, 1);
1904 tree type
= DECL_FIELD_CONTEXT (field
);
1905 if (TREE_CODE (type
) == RECORD_TYPE
)
1906 fieldsx
.safe_push (field
);
1908 else if (ends_tbaa_access_path_p (x
))
1909 fieldsx
.truncate (0);
1910 x
= TREE_OPERAND (x
, 0);
1912 if (fieldsx
.length () == 0)
1914 auto_vec
<const_tree
, 16> fieldsy
;
1915 while (handled_component_p (y
))
1917 if (TREE_CODE (y
) == COMPONENT_REF
)
1919 tree field
= TREE_OPERAND (y
, 1);
1920 tree type
= DECL_FIELD_CONTEXT (field
);
1921 if (TREE_CODE (type
) == RECORD_TYPE
)
1922 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
1924 else if (ends_tbaa_access_path_p (y
))
1925 fieldsy
.truncate (0);
1926 y
= TREE_OPERAND (y
, 0);
1928 if (fieldsy
.length () == 0)
1930 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1934 /* Most common case first. */
1935 if (fieldsx
.length () == 1
1936 && fieldsy
.length () == 1)
1938 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx
[0]),
1939 DECL_FIELD_CONTEXT (fieldsy
[0])) == 1
1940 && nonoverlapping_component_refs_p_1 (fieldsx
[0], fieldsy
[0]) == 1)
1942 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1947 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
1952 if (fieldsx
.length () == 2)
1954 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
1955 std::swap (fieldsx
[0], fieldsx
[1]);
1958 fieldsx
.qsort (ncr_compar
);
1960 if (fieldsy
.length () == 2)
1962 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
1963 std::swap (fieldsy
[0], fieldsy
[1]);
1966 fieldsy
.qsort (ncr_compar
);
1968 unsigned i
= 0, j
= 0;
1971 const_tree fieldx
= fieldsx
[i
];
1972 const_tree fieldy
= fieldsy
[j
];
1974 /* We're left with accessing different fields of a structure,
1975 no possible overlap. */
1976 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx
),
1977 DECL_FIELD_CONTEXT (fieldy
)) == 1
1978 && nonoverlapping_component_refs_p_1 (fieldx
, fieldy
) == 1)
1980 ++alias_stats
.nonoverlapping_component_refs_p_no_alias
;
1984 if (ncr_type_uid (fieldx
) < ncr_type_uid (fieldy
))
1987 if (i
== fieldsx
.length ())
1993 if (j
== fieldsy
.length ())
1999 ++alias_stats
.nonoverlapping_component_refs_p_may_alias
;
2004 /* Return true if two memory references based on the variables BASE1
2005 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2006 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
2007 if non-NULL are the complete memory reference trees. */
2010 decl_refs_may_alias_p (tree ref1
, tree base1
,
2011 poly_int64 offset1
, poly_int64 max_size1
,
2013 tree ref2
, tree base2
,
2014 poly_int64 offset2
, poly_int64 max_size2
,
2017 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
2019 /* If both references are based on different variables, they cannot alias. */
2020 if (compare_base_decls (base1
, base2
) == 0)
2023 /* If both references are based on the same variable, they cannot alias if
2024 the accesses do not overlap. */
2025 if (!ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
2028 /* If there is must alias, there is no use disambiguating further. */
2029 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
2032 /* For components with variable position, the above test isn't sufficient,
2033 so we disambiguate component references manually. */
2035 && handled_component_p (ref1
) && handled_component_p (ref2
)
2036 && nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
, false) == 1)
2042 /* Return true if access with BASE is view converted.
2043 Base must not be stripped from inner MEM_REF (&decl)
2044 which is done by ao_ref_base and thus one extra walk
2045 of handled components is needed. */
2048 view_converted_memref_p (tree base
)
2050 if (TREE_CODE (base
) != MEM_REF
&& TREE_CODE (base
) != TARGET_MEM_REF
)
2052 return same_type_for_tbaa (TREE_TYPE (base
),
2053 TREE_TYPE (TREE_OPERAND (base
, 1))) != 1;
2056 /* Return true if an indirect reference based on *PTR1 constrained
2057 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2058 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2059 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2060 in which case they are computed on-demand. REF1 and REF2
2061 if non-NULL are the complete memory reference trees. */
2064 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
2065 poly_int64 offset1
, poly_int64 max_size1
,
2067 alias_set_type ref1_alias_set
,
2068 alias_set_type base1_alias_set
,
2069 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
2070 poly_int64 offset2
, poly_int64 max_size2
,
2072 alias_set_type ref2_alias_set
,
2073 alias_set_type base2_alias_set
, bool tbaa_p
)
2076 tree ptrtype1
, dbase2
;
2078 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
2079 || TREE_CODE (base1
) == TARGET_MEM_REF
)
2082 ptr1
= TREE_OPERAND (base1
, 0);
2083 poly_offset_int moff
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
2085 /* If only one reference is based on a variable, they cannot alias if
2086 the pointer access is beyond the extent of the variable access.
2087 (the pointer base cannot validly point to an offset less than zero
2089 ??? IVOPTs creates bases that do not honor this restriction,
2090 so do not apply this optimization for TARGET_MEM_REFs. */
2091 if (TREE_CODE (base1
) != TARGET_MEM_REF
2092 && !ranges_maybe_overlap_p (offset1
+ moff
, -1, offset2
, max_size2
))
2095 /* If the pointer based access is bigger than the variable they cannot
2096 alias. This is similar to the check below where we use TBAA to
2097 increase the size of the pointer based access based on the dynamic
2098 type of a containing object we can infer from it. */
2100 if (known_size_p (size1
)
2101 && poly_int_tree_p (DECL_SIZE (base2
), &dsize2
)
2102 && known_lt (dsize2
, size1
))
2105 /* They also cannot alias if the pointer may not point to the decl. */
2106 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
2109 /* Disambiguations that rely on strict aliasing rules follow. */
2110 if (!flag_strict_aliasing
|| !tbaa_p
)
2113 /* If the alias set for a pointer access is zero all bets are off. */
2114 if (base1_alias_set
== 0 || base2_alias_set
== 0)
2117 /* When we are trying to disambiguate an access with a pointer dereference
2118 as base versus one with a decl as base we can use both the size
2119 of the decl and its dynamic type for extra disambiguation.
2120 ??? We do not know anything about the dynamic type of the decl
2121 other than that its alias-set contains base2_alias_set as a subset
2122 which does not help us here. */
2123 /* As we know nothing useful about the dynamic type of the decl just
2124 use the usual conflict check rather than a subset test.
2125 ??? We could introduce -fvery-strict-aliasing when the language
2126 does not allow decls to have a dynamic type that differs from their
2127 static type. Then we can check
2128 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2129 if (base1_alias_set
!= base2_alias_set
2130 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
2133 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
2135 /* If the size of the access relevant for TBAA through the pointer
2136 is bigger than the size of the decl we can't possibly access the
2137 decl via that pointer. */
2138 if (/* ??? This in turn may run afoul when a decl of type T which is
2139 a member of union type U is accessed through a pointer to
2140 type U and sizeof T is smaller than sizeof U. */
2141 TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
2142 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
2143 && compare_sizes (DECL_SIZE (base2
),
2144 TYPE_SIZE (TREE_TYPE (ptrtype1
))) < 0)
2150 /* If the decl is accessed via a MEM_REF, reconstruct the base
2151 we can use for TBAA and an appropriately adjusted offset. */
2153 while (handled_component_p (dbase2
))
2154 dbase2
= TREE_OPERAND (dbase2
, 0);
2155 poly_int64 doffset1
= offset1
;
2156 poly_offset_int doffset2
= offset2
;
2157 if (TREE_CODE (dbase2
) == MEM_REF
2158 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
2160 doffset2
-= mem_ref_offset (dbase2
) << LOG2_BITS_PER_UNIT
;
2161 tree ptrtype2
= TREE_TYPE (TREE_OPERAND (dbase2
, 1));
2162 /* If second reference is view-converted, give up now. */
2163 if (same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (ptrtype2
)) != 1)
2167 /* If first reference is view-converted, give up now. */
2168 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1)
2171 /* If both references are through the same type, they do not alias
2172 if the accesses do not overlap. This does extra disambiguation
2173 for mixed/pointer accesses but requires strict aliasing.
2174 For MEM_REFs we require that the component-ref offset we computed
2175 is relative to the start of the type which we ensure by
2176 comparing rvalue and access type and disregarding the constant
2179 But avoid treating variable length arrays as "objects", instead assume they
2180 can overlap by an exact multiple of their element size.
2181 See gcc.dg/torture/alias-2.c. */
2182 if (((TREE_CODE (base1
) != TARGET_MEM_REF
2183 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2184 && (TREE_CODE (dbase2
) != TARGET_MEM_REF
2185 || (!TMR_INDEX (dbase2
) && !TMR_INDEX2 (dbase2
))))
2186 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
2188 bool partial_overlap
= (TREE_CODE (TREE_TYPE (base1
)) == ARRAY_TYPE
2189 && (TYPE_SIZE (TREE_TYPE (base1
))
2190 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1
)))
2192 if (!partial_overlap
2193 && !ranges_maybe_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
))
2196 /* If there is must alias, there is no use disambiguating further. */
2197 || (!partial_overlap
2198 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2200 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2203 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2207 /* Do access-path based disambiguation. */
2209 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2210 return aliasing_component_refs_p (ref1
,
2211 ref1_alias_set
, base1_alias_set
,
2214 ref2_alias_set
, base2_alias_set
,
2215 offset2
, max_size2
);
2220 /* Return true if two indirect references based on *PTR1
2221 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2222 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2223 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2224 in which case they are computed on-demand. REF1 and REF2
2225 if non-NULL are the complete memory reference trees. */
2228 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
2229 poly_int64 offset1
, poly_int64 max_size1
,
2231 alias_set_type ref1_alias_set
,
2232 alias_set_type base1_alias_set
,
2233 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
2234 poly_int64 offset2
, poly_int64 max_size2
,
2236 alias_set_type ref2_alias_set
,
2237 alias_set_type base2_alias_set
, bool tbaa_p
)
2241 tree ptrtype1
, ptrtype2
;
2243 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
2244 || TREE_CODE (base1
) == TARGET_MEM_REF
)
2245 && (TREE_CODE (base2
) == MEM_REF
2246 || TREE_CODE (base2
) == TARGET_MEM_REF
));
2248 ptr1
= TREE_OPERAND (base1
, 0);
2249 ptr2
= TREE_OPERAND (base2
, 0);
2251 /* If both bases are based on pointers they cannot alias if they may not
2252 point to the same memory object or if they point to the same object
2253 and the accesses do not overlap. */
2254 if ((!cfun
|| gimple_in_ssa_p (cfun
))
2255 && operand_equal_p (ptr1
, ptr2
, 0)
2256 && (((TREE_CODE (base1
) != TARGET_MEM_REF
2257 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2258 && (TREE_CODE (base2
) != TARGET_MEM_REF
2259 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
2260 || (TREE_CODE (base1
) == TARGET_MEM_REF
2261 && TREE_CODE (base2
) == TARGET_MEM_REF
2262 && (TMR_STEP (base1
) == TMR_STEP (base2
)
2263 || (TMR_STEP (base1
) && TMR_STEP (base2
)
2264 && operand_equal_p (TMR_STEP (base1
),
2265 TMR_STEP (base2
), 0)))
2266 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
2267 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
2268 && operand_equal_p (TMR_INDEX (base1
),
2269 TMR_INDEX (base2
), 0)))
2270 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
2271 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
2272 && operand_equal_p (TMR_INDEX2 (base1
),
2273 TMR_INDEX2 (base2
), 0))))))
2275 poly_offset_int moff1
= mem_ref_offset (base1
) << LOG2_BITS_PER_UNIT
;
2276 poly_offset_int moff2
= mem_ref_offset (base2
) << LOG2_BITS_PER_UNIT
;
2277 if (!ranges_maybe_overlap_p (offset1
+ moff1
, max_size1
,
2278 offset2
+ moff2
, max_size2
))
2280 /* If there is must alias, there is no use disambiguating further. */
2281 if (known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
))
2285 int res
= nonoverlapping_refs_since_match_p (NULL
, ref1
, NULL
, ref2
,
2291 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
2294 /* Disambiguations that rely on strict aliasing rules follow. */
2295 if (!flag_strict_aliasing
|| !tbaa_p
)
2298 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
2299 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
2301 /* If the alias set for a pointer access is zero all bets are off. */
2302 if (base1_alias_set
== 0
2303 || base2_alias_set
== 0)
2306 /* Do type-based disambiguation. */
2307 if (base1_alias_set
!= base2_alias_set
2308 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
2311 /* If either reference is view-converted, give up now. */
2312 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
2313 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
2316 /* If both references are through the same type, they do not alias
2317 if the accesses do not overlap. This does extra disambiguation
2318 for mixed/pointer accesses but requires strict aliasing. */
2319 if ((TREE_CODE (base1
) != TARGET_MEM_REF
2320 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
2321 && (TREE_CODE (base2
) != TARGET_MEM_REF
2322 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
2323 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
2324 TREE_TYPE (ptrtype2
)) == 1)
2326 /* But avoid treating arrays as "objects", instead assume they
2327 can overlap by an exact multiple of their element size.
2328 See gcc.dg/torture/alias-2.c. */
2329 bool partial_overlap
= TREE_CODE (TREE_TYPE (ptrtype1
)) == ARRAY_TYPE
;
2331 if (!partial_overlap
2332 && !ranges_maybe_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
2335 || (!partial_overlap
2336 && known_eq (size1
, max_size1
) && known_eq (size2
, max_size2
)))
2338 int res
= nonoverlapping_refs_since_match_p (base1
, ref1
, base2
, ref2
,
2341 return !nonoverlapping_component_refs_p (ref1
, ref2
);
2345 /* Do access-path based disambiguation. */
2347 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
2348 return aliasing_component_refs_p (ref1
,
2349 ref1_alias_set
, base1_alias_set
,
2352 ref2_alias_set
, base2_alias_set
,
2353 offset2
, max_size2
);
2358 /* Return true, if the two memory references REF1 and REF2 may alias. */
2361 refs_may_alias_p_2 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2364 poly_int64 offset1
= 0, offset2
= 0;
2365 poly_int64 max_size1
= -1, max_size2
= -1;
2366 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
2368 gcc_checking_assert ((!ref1
->ref
2369 || TREE_CODE (ref1
->ref
) == SSA_NAME
2370 || DECL_P (ref1
->ref
)
2371 || TREE_CODE (ref1
->ref
) == STRING_CST
2372 || handled_component_p (ref1
->ref
)
2373 || TREE_CODE (ref1
->ref
) == MEM_REF
2374 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
2375 || TREE_CODE (ref1
->ref
) == WITH_SIZE_EXPR
)
2377 || TREE_CODE (ref2
->ref
) == SSA_NAME
2378 || DECL_P (ref2
->ref
)
2379 || TREE_CODE (ref2
->ref
) == STRING_CST
2380 || handled_component_p (ref2
->ref
)
2381 || TREE_CODE (ref2
->ref
) == MEM_REF
2382 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
2383 || TREE_CODE (ref2
->ref
) == WITH_SIZE_EXPR
));
2385 /* Decompose the references into their base objects and the access. */
2386 base1
= ao_ref_base (ref1
);
2387 offset1
= ref1
->offset
;
2388 max_size1
= ref1
->max_size
;
2389 base2
= ao_ref_base (ref2
);
2390 offset2
= ref2
->offset
;
2391 max_size2
= ref2
->max_size
;
2393 /* We can end up with registers or constants as bases for example from
2394 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2395 which is seen as a struct copy. */
2396 if (TREE_CODE (base1
) == SSA_NAME
2397 || TREE_CODE (base1
) == CONST_DECL
2398 || TREE_CODE (base1
) == CONSTRUCTOR
2399 || TREE_CODE (base1
) == ADDR_EXPR
2400 || CONSTANT_CLASS_P (base1
)
2401 || TREE_CODE (base2
) == SSA_NAME
2402 || TREE_CODE (base2
) == CONST_DECL
2403 || TREE_CODE (base2
) == CONSTRUCTOR
2404 || TREE_CODE (base2
) == ADDR_EXPR
2405 || CONSTANT_CLASS_P (base2
))
2408 /* Two volatile accesses always conflict. */
2409 if (ref1
->volatile_p
2410 && ref2
->volatile_p
)
2413 /* refN->ref may convey size information, do not confuse our workers
2414 with that but strip it - ao_ref_base took it into account already. */
2415 tree ref1ref
= ref1
->ref
;
2416 if (ref1ref
&& TREE_CODE (ref1ref
) == WITH_SIZE_EXPR
)
2417 ref1ref
= TREE_OPERAND (ref1ref
, 0);
2418 tree ref2ref
= ref2
->ref
;
2419 if (ref2ref
&& TREE_CODE (ref2ref
) == WITH_SIZE_EXPR
)
2420 ref2ref
= TREE_OPERAND (ref2ref
, 0);
2422 /* Defer to simple offset based disambiguation if we have
2423 references based on two decls. Do this before defering to
2424 TBAA to handle must-alias cases in conformance with the
2425 GCC extension of allowing type-punning through unions. */
2426 var1_p
= DECL_P (base1
);
2427 var2_p
= DECL_P (base2
);
2428 if (var1_p
&& var2_p
)
2429 return decl_refs_may_alias_p (ref1ref
, base1
, offset1
, max_size1
,
2431 ref2ref
, base2
, offset2
, max_size2
,
2434 /* We can end up referring to code via function and label decls.
2435 As we likely do not properly track code aliases conservatively
2437 if (TREE_CODE (base1
) == FUNCTION_DECL
2438 || TREE_CODE (base1
) == LABEL_DECL
2439 || TREE_CODE (base2
) == FUNCTION_DECL
2440 || TREE_CODE (base2
) == LABEL_DECL
)
2443 /* Handle restrict based accesses.
2444 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2446 tree rbase1
= base1
;
2447 tree rbase2
= base2
;
2452 while (handled_component_p (rbase1
))
2453 rbase1
= TREE_OPERAND (rbase1
, 0);
2459 while (handled_component_p (rbase2
))
2460 rbase2
= TREE_OPERAND (rbase2
, 0);
2462 if (rbase1
&& rbase2
2463 && (TREE_CODE (rbase1
) == MEM_REF
|| TREE_CODE (rbase1
) == TARGET_MEM_REF
)
2464 && (TREE_CODE (rbase2
) == MEM_REF
|| TREE_CODE (rbase2
) == TARGET_MEM_REF
)
2465 /* If the accesses are in the same restrict clique... */
2466 && MR_DEPENDENCE_CLIQUE (rbase1
) == MR_DEPENDENCE_CLIQUE (rbase2
)
2467 /* But based on different pointers they do not alias. */
2468 && MR_DEPENDENCE_BASE (rbase1
) != MR_DEPENDENCE_BASE (rbase2
))
2471 ind1_p
= (TREE_CODE (base1
) == MEM_REF
2472 || TREE_CODE (base1
) == TARGET_MEM_REF
);
2473 ind2_p
= (TREE_CODE (base2
) == MEM_REF
2474 || TREE_CODE (base2
) == TARGET_MEM_REF
);
2476 /* Canonicalize the pointer-vs-decl case. */
2477 if (ind1_p
&& var2_p
)
2479 std::swap (offset1
, offset2
);
2480 std::swap (max_size1
, max_size2
);
2481 std::swap (base1
, base2
);
2482 std::swap (ref1
, ref2
);
2483 std::swap (ref1ref
, ref2ref
);
2490 /* First defer to TBAA if possible. */
2492 && flag_strict_aliasing
2493 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
2494 ao_ref_alias_set (ref2
)))
2497 /* If the reference is based on a pointer that points to memory
2498 that may not be written to then the other reference cannot possibly
2500 if ((TREE_CODE (TREE_OPERAND (base2
, 0)) == SSA_NAME
2501 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2
, 0)))
2503 && TREE_CODE (TREE_OPERAND (base1
, 0)) == SSA_NAME
2504 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1
, 0))))
2507 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2508 if (var1_p
&& ind2_p
)
2509 return indirect_ref_may_alias_decl_p (ref2ref
, base2
,
2510 offset2
, max_size2
, ref2
->size
,
2511 ao_ref_alias_set (ref2
),
2512 ao_ref_base_alias_set (ref2
),
2514 offset1
, max_size1
, ref1
->size
,
2515 ao_ref_alias_set (ref1
),
2516 ao_ref_base_alias_set (ref1
),
2518 else if (ind1_p
&& ind2_p
)
2519 return indirect_refs_may_alias_p (ref1ref
, base1
,
2520 offset1
, max_size1
, ref1
->size
,
2521 ao_ref_alias_set (ref1
),
2522 ao_ref_base_alias_set (ref1
),
2524 offset2
, max_size2
, ref2
->size
,
2525 ao_ref_alias_set (ref2
),
2526 ao_ref_base_alias_set (ref2
),
2532 /* Return true, if the two memory references REF1 and REF2 may alias
2533 and update statistics. */
2536 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
2538 bool res
= refs_may_alias_p_2 (ref1
, ref2
, tbaa_p
);
2540 ++alias_stats
.refs_may_alias_p_may_alias
;
2542 ++alias_stats
.refs_may_alias_p_no_alias
;
2547 refs_may_alias_p (tree ref1
, ao_ref
*ref2
, bool tbaa_p
)
2550 ao_ref_init (&r1
, ref1
);
2551 return refs_may_alias_p_1 (&r1
, ref2
, tbaa_p
);
2555 refs_may_alias_p (tree ref1
, tree ref2
, bool tbaa_p
)
2558 ao_ref_init (&r1
, ref1
);
2559 ao_ref_init (&r2
, ref2
);
2560 return refs_may_alias_p_1 (&r1
, &r2
, tbaa_p
);
2563 /* Returns true if there is a anti-dependence for the STORE that
2564 executes after the LOAD. */
2567 refs_anti_dependent_p (tree load
, tree store
)
2570 ao_ref_init (&r1
, load
);
2571 ao_ref_init (&r2
, store
);
2572 return refs_may_alias_p_1 (&r1
, &r2
, false);
2575 /* Returns true if there is a output dependence for the stores
2576 STORE1 and STORE2. */
2579 refs_output_dependent_p (tree store1
, tree store2
)
2582 ao_ref_init (&r1
, store1
);
2583 ao_ref_init (&r2
, store2
);
2584 return refs_may_alias_p_1 (&r1
, &r2
, false);
2587 /* Returns true if and only if REF may alias any access stored in TT.
2588 IF TBAA_P is true, use TBAA oracle. */
2591 modref_may_conflict (const gcall
*stmt
,
2592 modref_tree
<alias_set_type
> *tt
, ao_ref
*ref
, bool tbaa_p
)
2594 alias_set_type base_set
, ref_set
;
2595 bool global_memory_ok
= false;
2600 if (!dbg_cnt (ipa_mod_ref
))
2603 base_set
= ao_ref_base_alias_set (ref
);
2605 ref_set
= ao_ref_alias_set (ref
);
2607 int num_tests
= 0, max_tests
= param_modref_max_tests
;
2608 for (auto base_node
: tt
->bases
)
2610 if (tbaa_p
&& flag_strict_aliasing
)
2612 if (num_tests
>= max_tests
)
2614 alias_stats
.modref_tests
++;
2615 if (!alias_sets_conflict_p (base_set
, base_node
->base
))
2620 if (base_node
->every_ref
)
2623 for (auto ref_node
: base_node
->refs
)
2625 /* Do not repeat same test as before. */
2626 if ((ref_set
!= base_set
|| base_node
->base
!= ref_node
->ref
)
2627 && tbaa_p
&& flag_strict_aliasing
)
2629 if (num_tests
>= max_tests
)
2631 alias_stats
.modref_tests
++;
2632 if (!alias_sets_conflict_p (ref_set
, ref_node
->ref
))
2637 if (ref_node
->every_access
)
2640 /* TBAA checks did not disambiguate, try individual accesses. */
2641 for (auto access_node
: ref_node
->accesses
)
2643 if (num_tests
>= max_tests
)
2646 if (access_node
.parm_index
== MODREF_GLOBAL_MEMORY_PARM
)
2648 if (global_memory_ok
)
2650 if (ref_may_alias_global_p (ref
, true))
2652 global_memory_ok
= true;
2657 tree arg
= access_node
.get_call_arg (stmt
);
2661 alias_stats
.modref_baseptr_tests
++;
2663 if (integer_zerop (arg
) && flag_delete_null_pointer_checks
)
2666 /* PTA oracle will be unhapy of arg is not an pointer. */
2667 if (!POINTER_TYPE_P (TREE_TYPE (arg
)))
2670 /* If we don't have base pointer, give up. */
2671 if (!ref
->ref
&& !ref
->base
)
2675 if (access_node
.get_ao_ref (stmt
, &ref2
))
2677 ref2
.ref_alias_set
= ref_node
->ref
;
2678 ref2
.base_alias_set
= base_node
->base
;
2679 if (refs_may_alias_p_1 (&ref2
, ref
, tbaa_p
))
2682 else if (ptr_deref_may_alias_ref_p_1 (arg
, ref
))
2692 /* Check if REF conflicts with call using "fn spec" attribute.
2693 If CLOBBER is true we are checking for writes, otherwise check loads.
2695 Return 0 if there are no conflicts (except for possible function call
2696 argument reads), 1 if there are conflicts and -1 if we can not decide by
2700 check_fnspec (gcall
*call
, ao_ref
*ref
, bool clobber
)
2702 attr_fnspec fnspec
= gimple_call_fnspec (call
);
2703 if (fnspec
.known_p ())
2706 ? !fnspec
.global_memory_written_p ()
2707 : !fnspec
.global_memory_read_p ())
2709 for (unsigned int i
= 0; i
< gimple_call_num_args (call
); i
++)
2710 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call
, i
)))
2711 && (!fnspec
.arg_specified_p (i
)
2712 || (clobber
? fnspec
.arg_maybe_written_p (i
)
2713 : fnspec
.arg_maybe_read_p (i
))))
2716 tree size
= NULL_TREE
;
2717 unsigned int size_arg
;
2719 if (!fnspec
.arg_specified_p (i
))
2721 else if (fnspec
.arg_max_access_size_given_by_arg_p
2723 size
= gimple_call_arg (call
, size_arg
);
2724 else if (fnspec
.arg_access_size_given_by_type_p (i
))
2726 tree callee
= gimple_call_fndecl (call
);
2727 tree t
= TYPE_ARG_TYPES (TREE_TYPE (callee
));
2729 for (unsigned int p
= 0; p
< i
; p
++)
2731 size
= TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t
)));
2733 poly_int64 size_hwi
;
2735 && poly_int_tree_p (size
, &size_hwi
)
2736 && coeffs_in_range_p (size_hwi
, 0,
2737 HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
))
2739 size_hwi
= size_hwi
* BITS_PER_UNIT
;
2740 ao_ref_init_from_ptr_and_range (&dref
,
2741 gimple_call_arg (call
, i
),
2742 true, 0, -1, size_hwi
);
2745 ao_ref_init_from_ptr_and_range (&dref
,
2746 gimple_call_arg (call
, i
),
2748 if (refs_may_alias_p_1 (&dref
, ref
, false))
2752 && fnspec
.errno_maybe_written_p ()
2754 && targetm
.ref_may_alias_errno (ref
))
2760 /* FIXME: we should handle barriers more consistently, but for now leave the
2762 if (gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2763 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call
)))
2765 /* __sync_* builtins and some OpenMP builtins act as threading
2767 #undef DEF_SYNC_BUILTIN
2768 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2769 #include "sync-builtins.def"
2770 #undef DEF_SYNC_BUILTIN
2771 case BUILT_IN_GOMP_ATOMIC_START
:
2772 case BUILT_IN_GOMP_ATOMIC_END
:
2773 case BUILT_IN_GOMP_BARRIER
:
2774 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2775 case BUILT_IN_GOMP_TASKWAIT
:
2776 case BUILT_IN_GOMP_TASKGROUP_END
:
2777 case BUILT_IN_GOMP_CRITICAL_START
:
2778 case BUILT_IN_GOMP_CRITICAL_END
:
2779 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2780 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2781 case BUILT_IN_GOMP_LOOP_END
:
2782 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2783 case BUILT_IN_GOMP_ORDERED_START
:
2784 case BUILT_IN_GOMP_ORDERED_END
:
2785 case BUILT_IN_GOMP_SECTIONS_END
:
2786 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2787 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2788 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2797 /* If the call CALL may use the memory reference REF return true,
2798 otherwise return false. */
2801 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2805 int flags
= gimple_call_flags (call
);
2807 if (flags
& (ECF_CONST
|ECF_NOVOPS
))
2810 /* A call that is not without side-effects might involve volatile
2811 accesses and thus conflicts with all other volatile accesses. */
2812 if (ref
->volatile_p
)
2815 if (gimple_call_internal_p (call
))
2816 switch (gimple_call_internal_fn (call
))
2818 case IFN_MASK_STORE
:
2819 case IFN_SCATTER_STORE
:
2820 case IFN_MASK_SCATTER_STORE
:
2822 case IFN_MASK_LEN_STORE
:
2824 case IFN_MASK_STORE_LANES
:
2825 case IFN_MASK_LEN_STORE_LANES
:
2829 case IFN_MASK_LEN_LOAD
:
2830 case IFN_MASK_LOAD_LANES
:
2831 case IFN_MASK_LEN_LOAD_LANES
:
2834 tree lhs
= gimple_call_lhs (call
);
2837 ao_ref_init_from_ptr_and_size (&rhs_ref
,
2838 gimple_call_arg (call
, 0),
2839 TYPE_SIZE_UNIT (TREE_TYPE (lhs
)));
2840 /* We cannot make this a known-size access since otherwise
2841 we disambiguate against refs to decls that are smaller. */
2843 rhs_ref
.ref_alias_set
= rhs_ref
.base_alias_set
2844 = tbaa_p
? get_deref_alias_set (TREE_TYPE
2845 (gimple_call_arg (call
, 1))) : 0;
2846 return refs_may_alias_p_1 (ref
, &rhs_ref
, tbaa_p
);
2853 callee
= gimple_call_fndecl (call
);
2854 if (callee
!= NULL_TREE
)
2856 struct cgraph_node
*node
= cgraph_node::get (callee
);
2857 /* We can not safely optimize based on summary of calle if it does
2858 not always bind to current def: it is possible that memory load
2859 was optimized out earlier and the interposed variant may not be
2860 optimized this way. */
2861 if (node
&& node
->binds_to_current_def_p ())
2863 modref_summary
*summary
= get_modref_function_summary (node
);
2864 if (summary
&& !summary
->calls_interposable
)
2866 if (!modref_may_conflict (call
, summary
->loads
, ref
, tbaa_p
))
2868 alias_stats
.modref_use_no_alias
++;
2869 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2872 "ipa-modref: call stmt ");
2873 print_gimple_stmt (dump_file
, call
, 0);
2875 "ipa-modref: call to %s does not use ",
2876 node
->dump_name ());
2877 if (!ref
->ref
&& ref
->base
)
2879 fprintf (dump_file
, "base: ");
2880 print_generic_expr (dump_file
, ref
->base
);
2884 fprintf (dump_file
, "ref: ");
2885 print_generic_expr (dump_file
, ref
->ref
);
2887 fprintf (dump_file
, " alias sets: %i->%i\n",
2888 ao_ref_base_alias_set (ref
),
2889 ao_ref_alias_set (ref
));
2893 alias_stats
.modref_use_may_alias
++;
2898 base
= ao_ref_base (ref
);
2902 /* If the reference is based on a decl that is not aliased the call
2903 cannot possibly use it. */
2905 && !may_be_aliased (base
)
2906 /* But local statics can be used through recursion. */
2907 && !is_global_var (base
))
2910 if (int res
= check_fnspec (call
, ref
, false))
2918 /* Check if base is a global static variable that is not read
2920 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
2922 struct cgraph_node
*node
= cgraph_node::get (callee
);
2926 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2927 node yet. We should enforce that there are nodes for all decls in the
2928 IL and remove this check instead. */
2930 && (id
= ipa_reference_var_uid (base
)) != -1
2931 && (read
= ipa_reference_get_read_global (node
))
2932 && !bitmap_bit_p (read
, id
))
2936 /* Check if the base variable is call-used. */
2939 if (pt_solution_includes (gimple_call_use_set (call
), base
))
2942 else if ((TREE_CODE (base
) == MEM_REF
2943 || TREE_CODE (base
) == TARGET_MEM_REF
)
2944 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2946 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2950 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
2956 /* Inspect call arguments for passed-by-value aliases. */
2958 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
2960 tree op
= gimple_call_arg (call
, i
);
2961 int flags
= gimple_call_arg_flags (call
, i
);
2963 if (flags
& (EAF_UNUSED
| EAF_NO_DIRECT_READ
))
2966 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
2967 op
= TREE_OPERAND (op
, 0);
2969 if (TREE_CODE (op
) != SSA_NAME
2970 && !is_gimple_min_invariant (op
))
2973 ao_ref_init (&r
, op
);
2974 if (refs_may_alias_p_1 (&r
, ref
, tbaa_p
))
2983 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
2986 res
= ref_maybe_used_by_call_p_1 (call
, ref
, tbaa_p
);
2988 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
2990 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
2995 /* If the statement STMT may use the memory reference REF return
2996 true, otherwise return false. */
2999 ref_maybe_used_by_stmt_p (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
3001 if (is_gimple_assign (stmt
))
3005 /* All memory assign statements are single. */
3006 if (!gimple_assign_single_p (stmt
))
3009 rhs
= gimple_assign_rhs1 (stmt
);
3010 if (is_gimple_reg (rhs
)
3011 || is_gimple_min_invariant (rhs
)
3012 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
3015 return refs_may_alias_p (rhs
, ref
, tbaa_p
);
3017 else if (is_gimple_call (stmt
))
3018 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
, tbaa_p
);
3019 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
3021 tree retval
= gimple_return_retval (return_stmt
);
3023 && TREE_CODE (retval
) != SSA_NAME
3024 && !is_gimple_min_invariant (retval
)
3025 && refs_may_alias_p (retval
, ref
, tbaa_p
))
3027 /* If ref escapes the function then the return acts as a use. */
3028 tree base
= ao_ref_base (ref
);
3031 else if (DECL_P (base
))
3032 return is_global_var (base
);
3033 else if (TREE_CODE (base
) == MEM_REF
3034 || TREE_CODE (base
) == TARGET_MEM_REF
)
3035 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0), false);
3043 ref_maybe_used_by_stmt_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
3046 ao_ref_init (&r
, ref
);
3047 return ref_maybe_used_by_stmt_p (stmt
, &r
, tbaa_p
);
3050 /* If the call in statement CALL may clobber the memory reference REF
3051 return true, otherwise return false. */
3054 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
, bool tbaa_p
)
3059 /* If the call is pure or const it cannot clobber anything. */
3060 if (gimple_call_flags (call
)
3061 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
3063 if (gimple_call_internal_p (call
))
3064 switch (auto fn
= gimple_call_internal_fn (call
))
3066 /* Treat these internal calls like ECF_PURE for aliasing,
3067 they don't write to any memory the program should care about.
3068 They have important other side-effects, and read memory,
3069 so can't be ECF_NOVOPS. */
3070 case IFN_UBSAN_NULL
:
3071 case IFN_UBSAN_BOUNDS
:
3072 case IFN_UBSAN_VPTR
:
3073 case IFN_UBSAN_OBJECT_SIZE
:
3075 case IFN_ASAN_CHECK
:
3077 case IFN_MASK_STORE
:
3079 case IFN_MASK_LEN_STORE
:
3080 case IFN_MASK_STORE_LANES
:
3081 case IFN_MASK_LEN_STORE_LANES
:
3083 tree rhs
= gimple_call_arg (call
,
3084 internal_fn_stored_value_index (fn
));
3086 ao_ref_init_from_ptr_and_size (&lhs_ref
, gimple_call_arg (call
, 0),
3087 TYPE_SIZE_UNIT (TREE_TYPE (rhs
)));
3088 /* We cannot make this a known-size access since otherwise
3089 we disambiguate against refs to decls that are smaller. */
3091 lhs_ref
.ref_alias_set
= lhs_ref
.base_alias_set
3092 = tbaa_p
? get_deref_alias_set
3093 (TREE_TYPE (gimple_call_arg (call
, 1))) : 0;
3094 return refs_may_alias_p_1 (ref
, &lhs_ref
, tbaa_p
);
3100 callee
= gimple_call_fndecl (call
);
3102 if (callee
!= NULL_TREE
&& !ref
->volatile_p
)
3104 struct cgraph_node
*node
= cgraph_node::get (callee
);
3107 modref_summary
*summary
= get_modref_function_summary (node
);
3110 if (!modref_may_conflict (call
, summary
->stores
, ref
, tbaa_p
)
3111 && (!summary
->writes_errno
3112 || !targetm
.ref_may_alias_errno (ref
)))
3114 alias_stats
.modref_clobber_no_alias
++;
3115 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3118 "ipa-modref: call stmt ");
3119 print_gimple_stmt (dump_file
, call
, 0);
3121 "ipa-modref: call to %s does not clobber ",
3122 node
->dump_name ());
3123 if (!ref
->ref
&& ref
->base
)
3125 fprintf (dump_file
, "base: ");
3126 print_generic_expr (dump_file
, ref
->base
);
3130 fprintf (dump_file
, "ref: ");
3131 print_generic_expr (dump_file
, ref
->ref
);
3133 fprintf (dump_file
, " alias sets: %i->%i\n",
3134 ao_ref_base_alias_set (ref
),
3135 ao_ref_alias_set (ref
));
3139 alias_stats
.modref_clobber_may_alias
++;
3144 base
= ao_ref_base (ref
);
3148 if (TREE_CODE (base
) == SSA_NAME
3149 || CONSTANT_CLASS_P (base
))
3152 /* A call that is not without side-effects might involve volatile
3153 accesses and thus conflicts with all other volatile accesses. */
3154 if (ref
->volatile_p
)
3157 /* If the reference is based on a decl that is not aliased the call
3158 cannot possibly clobber it. */
3160 && !may_be_aliased (base
)
3161 /* But local non-readonly statics can be modified through recursion
3162 or the call may implement a threading barrier which we must
3163 treat as may-def. */
3164 && (TREE_READONLY (base
)
3165 || !is_global_var (base
)))
3168 /* If the reference is based on a pointer that points to memory
3169 that may not be written to then the call cannot possibly clobber it. */
3170 if ((TREE_CODE (base
) == MEM_REF
3171 || TREE_CODE (base
) == TARGET_MEM_REF
)
3172 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
3173 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base
, 0)))
3176 if (int res
= check_fnspec (call
, ref
, true))
3184 /* Check if base is a global static variable that is not written
3186 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
3188 struct cgraph_node
*node
= cgraph_node::get (callee
);
3193 && (id
= ipa_reference_var_uid (base
)) != -1
3194 && (written
= ipa_reference_get_written_global (node
))
3195 && !bitmap_bit_p (written
, id
))
3199 /* Check if the base variable is call-clobbered. */
3201 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
3202 else if ((TREE_CODE (base
) == MEM_REF
3203 || TREE_CODE (base
) == TARGET_MEM_REF
)
3204 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
3206 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
3210 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
3216 /* If the call in statement CALL may clobber the memory reference REF
3217 return true, otherwise return false. */
3220 call_may_clobber_ref_p (gcall
*call
, tree ref
, bool tbaa_p
)
3224 ao_ref_init (&r
, ref
);
3225 res
= call_may_clobber_ref_p_1 (call
, &r
, tbaa_p
);
3227 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
3229 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
3234 /* If the statement STMT may clobber the memory reference REF return true,
3235 otherwise return false. */
3238 stmt_may_clobber_ref_p_1 (gimple
*stmt
, ao_ref
*ref
, bool tbaa_p
)
3240 if (is_gimple_call (stmt
))
3242 tree lhs
= gimple_call_lhs (stmt
);
3244 && TREE_CODE (lhs
) != SSA_NAME
)
3247 ao_ref_init (&r
, lhs
);
3248 if (refs_may_alias_p_1 (ref
, &r
, tbaa_p
))
3252 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
, tbaa_p
);
3254 else if (gimple_assign_single_p (stmt
))
3256 tree lhs
= gimple_assign_lhs (stmt
);
3257 if (TREE_CODE (lhs
) != SSA_NAME
)
3260 ao_ref_init (&r
, lhs
);
3261 return refs_may_alias_p_1 (ref
, &r
, tbaa_p
);
3264 else if (gimple_code (stmt
) == GIMPLE_ASM
)
3271 stmt_may_clobber_ref_p (gimple
*stmt
, tree ref
, bool tbaa_p
)
3274 ao_ref_init (&r
, ref
);
3275 return stmt_may_clobber_ref_p_1 (stmt
, &r
, tbaa_p
);
3278 /* Return true if store1 and store2 described by corresponding tuples
3279 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3283 same_addr_size_stores_p (tree base1
, poly_int64 offset1
, poly_int64 size1
,
3284 poly_int64 max_size1
,
3285 tree base2
, poly_int64 offset2
, poly_int64 size2
,
3286 poly_int64 max_size2
)
3288 /* Offsets need to be 0. */
3289 if (maybe_ne (offset1
, 0)
3290 || maybe_ne (offset2
, 0))
3293 bool base1_obj_p
= SSA_VAR_P (base1
);
3294 bool base2_obj_p
= SSA_VAR_P (base2
);
3296 /* We need one object. */
3297 if (base1_obj_p
== base2_obj_p
)
3299 tree obj
= base1_obj_p
? base1
: base2
;
3301 /* And we need one MEM_REF. */
3302 bool base1_memref_p
= TREE_CODE (base1
) == MEM_REF
;
3303 bool base2_memref_p
= TREE_CODE (base2
) == MEM_REF
;
3304 if (base1_memref_p
== base2_memref_p
)
3306 tree memref
= base1_memref_p
? base1
: base2
;
3308 /* Sizes need to be valid. */
3309 if (!known_size_p (max_size1
)
3310 || !known_size_p (max_size2
)
3311 || !known_size_p (size1
)
3312 || !known_size_p (size2
))
3315 /* Max_size needs to match size. */
3316 if (maybe_ne (max_size1
, size1
)
3317 || maybe_ne (max_size2
, size2
))
3320 /* Sizes need to match. */
3321 if (maybe_ne (size1
, size2
))
3325 /* Check that memref is a store to pointer with singleton points-to info. */
3326 if (!integer_zerop (TREE_OPERAND (memref
, 1)))
3328 tree ptr
= TREE_OPERAND (memref
, 0);
3329 if (TREE_CODE (ptr
) != SSA_NAME
)
3331 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
3332 unsigned int pt_uid
;
3334 || !pt_solution_singleton_or_null_p (&pi
->pt
, &pt_uid
))
3337 /* Be conservative with non-call exceptions when the address might
3339 if (cfun
->can_throw_non_call_exceptions
&& pi
->pt
.null
)
3342 /* Check that ptr points relative to obj. */
3343 unsigned int obj_uid
= DECL_PT_UID (obj
);
3344 if (obj_uid
!= pt_uid
)
3347 /* Check that the object size is the same as the store size. That ensures us
3348 that ptr points to the start of obj. */
3349 return (DECL_SIZE (obj
)
3350 && poly_int_tree_p (DECL_SIZE (obj
))
3351 && known_eq (wi::to_poly_offset (DECL_SIZE (obj
)), size1
));
3354 /* Return true if REF is killed by an store described by
3355 BASE, OFFSET, SIZE and MAX_SIZE. */
3358 store_kills_ref_p (tree base
, poly_int64 offset
, poly_int64 size
,
3359 poly_int64 max_size
, ao_ref
*ref
)
3361 poly_int64 ref_offset
= ref
->offset
;
3362 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3363 so base == ref->base does not always hold. */
3364 if (base
!= ref
->base
)
3366 /* Try using points-to info. */
3367 if (same_addr_size_stores_p (base
, offset
, size
, max_size
, ref
->base
,
3368 ref
->offset
, ref
->size
, ref
->max_size
))
3371 /* If both base and ref->base are MEM_REFs, only compare the
3372 first operand, and if the second operand isn't equal constant,
3373 try to add the offsets into offset and ref_offset. */
3374 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
3375 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
3377 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
3378 TREE_OPERAND (ref
->base
, 1)))
3380 poly_offset_int off1
= mem_ref_offset (base
);
3381 off1
<<= LOG2_BITS_PER_UNIT
;
3383 poly_offset_int off2
= mem_ref_offset (ref
->base
);
3384 off2
<<= LOG2_BITS_PER_UNIT
;
3386 if (!off1
.to_shwi (&offset
) || !off2
.to_shwi (&ref_offset
))
3393 /* For a must-alias check we need to be able to constrain
3394 the access properly. */
3395 return (known_eq (size
, max_size
)
3396 && known_subrange_p (ref_offset
, ref
->max_size
, offset
, size
));
3399 /* If STMT kills the memory reference REF return true, otherwise
3403 stmt_kills_ref_p (gimple
*stmt
, ao_ref
*ref
)
3405 if (!ao_ref_base (ref
))
3408 if (gimple_has_lhs (stmt
)
3409 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
3410 /* The assignment is not necessarily carried out if it can throw
3411 and we can catch it in the current function where we could inspect
3412 the previous value. Similarly if the function can throw externally
3413 and the ref does not die on the function return.
3414 ??? We only need to care about the RHS throwing. For aggregate
3415 assignments or similar calls and non-call exceptions the LHS
3416 might throw as well.
3417 ??? We also should care about possible longjmp, but since we
3418 do not understand that longjmp is not using global memory we will
3419 not consider a kill here since the function call will be considered
3420 as possibly using REF. */
3421 && !stmt_can_throw_internal (cfun
, stmt
)
3422 && (!stmt_can_throw_external (cfun
, stmt
)
3423 || !ref_may_alias_global_p (ref
, false)))
3425 tree lhs
= gimple_get_lhs (stmt
);
3426 /* If LHS is literally a base of the access we are done. */
3429 tree base
= ref
->ref
;
3430 tree innermost_dropped_array_ref
= NULL_TREE
;
3431 if (handled_component_p (base
))
3433 tree saved_lhs0
= NULL_TREE
;
3434 if (handled_component_p (lhs
))
3436 saved_lhs0
= TREE_OPERAND (lhs
, 0);
3437 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
3441 /* Just compare the outermost handled component, if
3442 they are equal we have found a possible common
3444 tree saved_base0
= TREE_OPERAND (base
, 0);
3445 TREE_OPERAND (base
, 0) = integer_zero_node
;
3446 bool res
= operand_equal_p (lhs
, base
, 0);
3447 TREE_OPERAND (base
, 0) = saved_base0
;
3450 /* Remember if we drop an array-ref that we need to
3451 double-check not being at struct end. */
3452 if (TREE_CODE (base
) == ARRAY_REF
3453 || TREE_CODE (base
) == ARRAY_RANGE_REF
)
3454 innermost_dropped_array_ref
= base
;
3455 /* Otherwise drop handled components of the access. */
3458 while (handled_component_p (base
));
3460 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
3462 /* Finally check if the lhs has the same address and size as the
3463 base candidate of the access. Watch out if we have dropped
3464 an array-ref that might have flexible size, this means ref->ref
3465 may be outside of the TYPE_SIZE of its base. */
3466 if ((! innermost_dropped_array_ref
3467 || ! array_ref_flexible_size_p (innermost_dropped_array_ref
))
3469 || (((TYPE_SIZE (TREE_TYPE (lhs
))
3470 == TYPE_SIZE (TREE_TYPE (base
)))
3471 || (TYPE_SIZE (TREE_TYPE (lhs
))
3472 && TYPE_SIZE (TREE_TYPE (base
))
3473 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs
)),
3474 TYPE_SIZE (TREE_TYPE (base
)),
3476 && operand_equal_p (lhs
, base
,
3478 | OEP_MATCH_SIDE_EFFECTS
))))
3480 ++alias_stats
.stmt_kills_ref_p_yes
;
3485 /* Now look for non-literal equal bases with the restriction of
3486 handling constant offset and size. */
3487 /* For a must-alias check we need to be able to constrain
3488 the access properly. */
3489 if (!ref
->max_size_known_p ())
3491 ++alias_stats
.stmt_kills_ref_p_no
;
3494 poly_int64 size
, offset
, max_size
;
3496 tree base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
,
3498 if (store_kills_ref_p (base
, offset
, size
, max_size
, ref
))
3500 ++alias_stats
.stmt_kills_ref_p_yes
;
3505 if (is_gimple_call (stmt
))
3507 tree callee
= gimple_call_fndecl (stmt
);
3508 struct cgraph_node
*node
;
3509 modref_summary
*summary
;
3511 /* Try to disambiguate using modref summary. Modref records a vector
3512 of stores with known offsets relative to function parameters that must
3513 happen every execution of function. Find if we have a matching
3514 store and verify that function can not use the value. */
3515 if (callee
!= NULL_TREE
3516 && (node
= cgraph_node::get (callee
)) != NULL
3517 && node
->binds_to_current_def_p ()
3518 && (summary
= get_modref_function_summary (node
)) != NULL
3519 && summary
->kills
.length ()
3520 /* Check that we can not trap while evaulating function
3521 parameters. This check is overly conservative. */
3522 && (!cfun
->can_throw_non_call_exceptions
3523 || (!stmt_can_throw_internal (cfun
, stmt
)
3524 && (!stmt_can_throw_external (cfun
, stmt
)
3525 || !ref_may_alias_global_p (ref
, false)))))
3527 for (auto kill
: summary
->kills
)
3531 /* We only can do useful compares if we know the access range
3533 if (!kill
.get_ao_ref (as_a
<gcall
*> (stmt
), &dref
))
3535 if (store_kills_ref_p (ao_ref_base (&dref
), dref
.offset
,
3536 dref
.size
, dref
.max_size
, ref
))
3538 /* For store to be killed it needs to not be used
3540 if (ref_maybe_used_by_call_p_1 (as_a
<gcall
*> (stmt
), ref
,
3542 || !dbg_cnt (ipa_mod_ref
))
3544 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3547 "ipa-modref: call stmt ");
3548 print_gimple_stmt (dump_file
, stmt
, 0);
3550 "ipa-modref: call to %s kills ",
3551 node
->dump_name ());
3552 print_generic_expr (dump_file
, ref
->base
);
3553 fprintf (dump_file
, "\n");
3555 ++alias_stats
.modref_kill_yes
;
3559 ++alias_stats
.modref_kill_no
;
3561 if (callee
!= NULL_TREE
3562 && gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
3563 switch (DECL_FUNCTION_CODE (callee
))
3567 tree ptr
= gimple_call_arg (stmt
, 0);
3568 tree base
= ao_ref_base (ref
);
3569 if (base
&& TREE_CODE (base
) == MEM_REF
3570 && TREE_OPERAND (base
, 0) == ptr
)
3572 ++alias_stats
.stmt_kills_ref_p_yes
;
3578 case BUILT_IN_MEMCPY
:
3579 case BUILT_IN_MEMPCPY
:
3580 case BUILT_IN_MEMMOVE
:
3581 case BUILT_IN_MEMSET
:
3582 case BUILT_IN_MEMCPY_CHK
:
3583 case BUILT_IN_MEMPCPY_CHK
:
3584 case BUILT_IN_MEMMOVE_CHK
:
3585 case BUILT_IN_MEMSET_CHK
:
3586 case BUILT_IN_STRNCPY
:
3587 case BUILT_IN_STPNCPY
:
3588 case BUILT_IN_CALLOC
:
3590 /* For a must-alias check we need to be able to constrain
3591 the access properly. */
3592 if (!ref
->max_size_known_p ())
3594 ++alias_stats
.stmt_kills_ref_p_no
;
3600 /* In execution order a calloc call will never kill
3601 anything. However, DSE will (ab)use this interface
3602 to ask if a calloc call writes the same memory locations
3603 as a later assignment, memset, etc. So handle calloc
3604 in the expected way. */
3605 if (DECL_FUNCTION_CODE (callee
) == BUILT_IN_CALLOC
)
3607 tree arg0
= gimple_call_arg (stmt
, 0);
3608 tree arg1
= gimple_call_arg (stmt
, 1);
3609 if (TREE_CODE (arg0
) != INTEGER_CST
3610 || TREE_CODE (arg1
) != INTEGER_CST
)
3612 ++alias_stats
.stmt_kills_ref_p_no
;
3616 dest
= gimple_call_lhs (stmt
);
3619 ++alias_stats
.stmt_kills_ref_p_no
;
3622 len
= fold_build2 (MULT_EXPR
, TREE_TYPE (arg0
), arg0
, arg1
);
3626 dest
= gimple_call_arg (stmt
, 0);
3627 len
= gimple_call_arg (stmt
, 2);
3629 if (!poly_int_tree_p (len
))
3632 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
3633 if (store_kills_ref_p (ao_ref_base (&dref
), dref
.offset
,
3634 dref
.size
, dref
.max_size
, ref
))
3636 ++alias_stats
.stmt_kills_ref_p_yes
;
3642 case BUILT_IN_VA_END
:
3644 tree ptr
= gimple_call_arg (stmt
, 0);
3645 if (TREE_CODE (ptr
) == ADDR_EXPR
)
3647 tree base
= ao_ref_base (ref
);
3648 if (TREE_OPERAND (ptr
, 0) == base
)
3650 ++alias_stats
.stmt_kills_ref_p_yes
;
3660 ++alias_stats
.stmt_kills_ref_p_no
;
3665 stmt_kills_ref_p (gimple
*stmt
, tree ref
)
3668 ao_ref_init (&r
, ref
);
3669 return stmt_kills_ref_p (stmt
, &r
);
3673 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3674 TARGET or a statement clobbering the memory reference REF in which
3675 case false is returned. The walk starts with VUSE, one argument of PHI. */
3678 maybe_skip_until (gimple
*phi
, tree
&target
, basic_block target_bb
,
3679 ao_ref
*ref
, tree vuse
, bool tbaa_p
, unsigned int &limit
,
3680 bitmap
*visited
, bool abort_on_visited
,
3681 void *(*translate
)(ao_ref
*, tree
, void *, translate_flags
*),
3682 translate_flags disambiguate_only
,
3685 basic_block bb
= gimple_bb (phi
);
3689 *visited
= BITMAP_ALLOC (NULL
);
3690 bitmap_tree_view (*visited
);
3693 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
3695 /* Walk until we hit the target. */
3696 while (vuse
!= target
)
3698 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3699 /* If we are searching for the target VUSE by walking up to
3700 TARGET_BB dominating the original PHI we are finished once
3701 we reach a default def or a definition in a block dominating
3702 that block. Update TARGET and return. */
3704 && (gimple_nop_p (def_stmt
)
3705 || dominated_by_p (CDI_DOMINATORS
,
3706 target_bb
, gimple_bb (def_stmt
))))
3712 /* Recurse for PHI nodes. */
3713 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3715 /* An already visited PHI node ends the walk successfully. */
3716 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
3717 return !abort_on_visited
;
3718 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3719 visited
, abort_on_visited
,
3720 translate
, data
, disambiguate_only
);
3725 else if (gimple_nop_p (def_stmt
))
3729 /* A clobbering statement or the end of the IL ends it failing. */
3730 if ((int)limit
<= 0)
3733 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3735 translate_flags tf
= disambiguate_only
;
3737 && (*translate
) (ref
, vuse
, data
, &tf
) == NULL
)
3743 /* If we reach a new basic-block see if we already skipped it
3744 in a previous walk that ended successfully. */
3745 if (gimple_bb (def_stmt
) != bb
)
3747 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
3748 return !abort_on_visited
;
3749 bb
= gimple_bb (def_stmt
);
3751 vuse
= gimple_vuse (def_stmt
);
3757 /* Starting from a PHI node for the virtual operand of the memory reference
3758 REF find a continuation virtual operand that allows to continue walking
3759 statements dominating PHI skipping only statements that cannot possibly
3760 clobber REF. Decrements LIMIT for each alias disambiguation done
3761 and aborts the walk, returning NULL_TREE if it reaches zero.
3762 Returns NULL_TREE if no suitable virtual operand can be found. */
3765 get_continuation_for_phi (gimple
*phi
, ao_ref
*ref
, bool tbaa_p
,
3766 unsigned int &limit
, bitmap
*visited
,
3767 bool abort_on_visited
,
3768 void *(*translate
)(ao_ref
*, tree
, void *,
3771 translate_flags disambiguate_only
)
3773 unsigned nargs
= gimple_phi_num_args (phi
);
3775 /* Through a single-argument PHI we can simply look through. */
3777 return PHI_ARG_DEF (phi
, 0);
3779 /* For two or more arguments try to pairwise skip non-aliasing code
3780 until we hit the phi argument definition that dominates the other one. */
3781 basic_block phi_bb
= gimple_bb (phi
);
3785 /* Find a candidate for the virtual operand which definition
3786 dominates those of all others. */
3787 /* First look if any of the args themselves satisfy this. */
3788 for (i
= 0; i
< nargs
; ++i
)
3790 arg0
= PHI_ARG_DEF (phi
, i
);
3791 if (SSA_NAME_IS_DEFAULT_DEF (arg0
))
3793 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (arg0
));
3794 if (def_bb
!= phi_bb
3795 && dominated_by_p (CDI_DOMINATORS
, phi_bb
, def_bb
))
3799 /* If not, look if we can reach such candidate by walking defs
3800 until we hit the immediate dominator. maybe_skip_until will
3802 basic_block dom
= get_immediate_dominator (CDI_DOMINATORS
, phi_bb
);
3804 /* Then check against the (to be) found candidate. */
3805 for (i
= 0; i
< nargs
; ++i
)
3807 arg1
= PHI_ARG_DEF (phi
, i
);
3810 else if (! maybe_skip_until (phi
, arg0
, dom
, ref
, arg1
, tbaa_p
,
3814 /* Do not valueize when walking over
3818 gimple_bb (SSA_NAME_DEF_STMT (arg1
)),
3821 : disambiguate_only
, data
))
3828 /* Based on the memory reference REF and its virtual use VUSE call
3829 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3830 itself. That is, for each virtual use for which its defining statement
3831 does not clobber REF.
3833 WALKER is called with REF, the current virtual use and DATA. If
3834 WALKER returns non-NULL the walk stops and its result is returned.
3835 At the end of a non-successful walk NULL is returned.
3837 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3838 use which definition is a statement that may clobber REF and DATA.
3839 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3840 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3841 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3842 to adjust REF and *DATA to make that valid.
3844 VALUEIZE if non-NULL is called with the next VUSE that is considered
3845 and return value is substituted for that. This can be used to
3846 implement optimistic value-numbering for example. Note that the
3847 VUSE argument is assumed to be valueized already.
3849 LIMIT specifies the number of alias queries we are allowed to do,
3850 the walk stops when it reaches zero and NULL is returned. LIMIT
3851 is decremented by the number of alias queries (plus adjustments
3852 done by the callbacks) upon return.
3854 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3857 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
, bool tbaa_p
,
3858 void *(*walker
)(ao_ref
*, tree
, void *),
3859 void *(*translate
)(ao_ref
*, tree
, void *,
3861 tree (*valueize
)(tree
),
3862 unsigned &limit
, void *data
)
3864 bitmap visited
= NULL
;
3866 bool translated
= false;
3868 timevar_push (TV_ALIAS_STMT_WALK
);
3874 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3875 res
= (*walker
) (ref
, vuse
, data
);
3877 if (res
== (void *)-1)
3882 /* Lookup succeeded. */
3883 else if (res
!= NULL
)
3888 vuse
= valueize (vuse
);
3895 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
3896 if (gimple_nop_p (def_stmt
))
3898 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3899 vuse
= get_continuation_for_phi (def_stmt
, ref
, tbaa_p
, limit
,
3900 &visited
, translated
, translate
, data
);
3903 if ((int)limit
<= 0)
3909 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
, tbaa_p
))
3913 translate_flags disambiguate_only
= TR_TRANSLATE
;
3914 res
= (*translate
) (ref
, vuse
, data
, &disambiguate_only
);
3915 /* Failed lookup and translation. */
3916 if (res
== (void *)-1)
3921 /* Lookup succeeded. */
3922 else if (res
!= NULL
)
3924 /* Translation succeeded, continue walking. */
3925 translated
= translated
|| disambiguate_only
== TR_TRANSLATE
;
3927 vuse
= gimple_vuse (def_stmt
);
3933 BITMAP_FREE (visited
);
3935 timevar_pop (TV_ALIAS_STMT_WALK
);
3941 /* Based on the memory reference REF call WALKER for each vdef whose
3942 defining statement may clobber REF, starting with VDEF. If REF
3943 is NULL_TREE, each defining statement is visited.
3945 WALKER is called with REF, the current vdef and DATA. If WALKER
3946 returns true the walk is stopped, otherwise it continues.
3948 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3949 The pointer may be NULL and then we do not track this information.
3951 At PHI nodes walk_aliased_vdefs forks into one walk for each
3952 PHI argument (but only one walk continues at merge points), the
3953 return value is true if any of the walks was successful.
3955 The function returns the number of statements walked or -1 if
3956 LIMIT stmts were walked and the walk was aborted at this point.
3957 If LIMIT is zero the walk is not aborted. */
3960 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
3961 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
3962 bitmap
*visited
, unsigned int cnt
,
3963 bool *function_entry_reached
, unsigned limit
)
3967 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
3970 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
3973 if (gimple_nop_p (def_stmt
))
3975 if (function_entry_reached
)
3976 *function_entry_reached
= true;
3979 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
3984 *visited
= BITMAP_ALLOC (NULL
);
3985 bitmap_tree_view (*visited
);
3987 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
3989 int res
= walk_aliased_vdefs_1 (ref
,
3990 gimple_phi_arg_def (def_stmt
, i
),
3991 walker
, data
, visited
, cnt
,
3992 function_entry_reached
, limit
);
4000 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
4005 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
4006 && (*walker
) (ref
, vdef
, data
))
4009 vdef
= gimple_vuse (def_stmt
);
4015 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
4016 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
4018 bool *function_entry_reached
, unsigned int limit
)
4020 bitmap local_visited
= NULL
;
4023 timevar_push (TV_ALIAS_STMT_WALK
);
4025 if (function_entry_reached
)
4026 *function_entry_reached
= false;
4028 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
4029 visited
? visited
: &local_visited
, 0,
4030 function_entry_reached
, limit
);
4032 BITMAP_FREE (local_visited
);
4034 timevar_pop (TV_ALIAS_STMT_WALK
);
4039 /* Verify validity of the fnspec string.
4040 See attr-fnspec.h for details. */
4043 attr_fnspec::verify ()
4049 /* Check return value specifier. */
4050 if (len
< return_desc_size
)
4052 else if ((len
- return_desc_size
) % arg_desc_size
)
4054 else if ((str
[0] < '1' || str
[0] > '4')
4055 && str
[0] != '.' && str
[0] != 'm')
4070 internal_error ("invalid fn spec attribute \"%s\"", str
);
4072 /* Now check all parameters. */
4073 for (unsigned int i
= 0; arg_specified_p (i
); i
++)
4075 unsigned int idx
= arg_idx (i
);
4087 if ((str
[idx
+ 1] >= '1' && str
[idx
+ 1] <= '9')
4088 || str
[idx
+ 1] == 't')
4090 if (str
[idx
] != 'r' && str
[idx
] != 'R'
4091 && str
[idx
] != 'w' && str
[idx
] != 'W'
4092 && str
[idx
] != 'o' && str
[idx
] != 'O')
4094 if (str
[idx
+ 1] != 't'
4095 /* Size specified is scalar, so it should be described
4096 by ". " if specified at all. */
4097 && (arg_specified_p (str
[idx
+ 1] - '1')
4098 && str
[arg_idx (str
[idx
+ 1] - '1')] != '.'))
4101 else if (str
[idx
+ 1] != ' ')
4105 if (str
[idx
] < '1' || str
[idx
] > '9')
4109 internal_error ("invalid fn spec attribute \"%s\" arg %i", str
, i
);
4113 /* Return ture if TYPE1 and TYPE2 will always give the same answer
4114 when compared wit hother types using same_type_for_tbaa_p. */
4117 types_equal_for_same_type_for_tbaa_p (tree type1
, tree type2
,
4118 bool lto_streaming_safe
)
4120 /* We use same_type_for_tbaa_p to match types in the access path.
4121 This check is overly conservative. */
4122 type1
= TYPE_MAIN_VARIANT (type1
);
4123 type2
= TYPE_MAIN_VARIANT (type2
);
4125 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
4126 != TYPE_STRUCTURAL_EQUALITY_P (type2
))
4128 if (TYPE_STRUCTURAL_EQUALITY_P (type1
))
4131 if (lto_streaming_safe
)
4132 return type1
== type2
;
4134 return TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
);
4137 /* Compare REF1 and REF2 and return flags specifying their differences.
4138 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4139 types that are going to be recomputed.
4140 If TBAA is true also compare TBAA metadata. */
4143 ao_compare::compare_ao_refs (ao_ref
*ref1
, ao_ref
*ref2
,
4144 bool lto_streaming_safe
,
4147 if (TREE_THIS_VOLATILE (ref1
->ref
) != TREE_THIS_VOLATILE (ref2
->ref
))
4149 tree base1
= ao_ref_base (ref1
);
4150 tree base2
= ao_ref_base (ref2
);
4152 if (!known_eq (ref1
->offset
, ref2
->offset
)
4153 || !known_eq (ref1
->size
, ref2
->size
)
4154 || !known_eq (ref1
->max_size
, ref2
->max_size
))
4157 /* For variable accesses we need to compare actual paths
4158 to check that both refs are accessing same address and the access size. */
4159 if (!known_eq (ref1
->size
, ref1
->max_size
))
4161 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1
->ref
)),
4162 TYPE_SIZE (TREE_TYPE (ref2
->ref
)), 0))
4164 tree r1
= ref1
->ref
;
4165 tree r2
= ref2
->ref
;
4167 /* Handle toplevel COMPONENT_REFs of bitfields.
4168 Those are special since they are not allowed in
4170 if (TREE_CODE (r1
) == COMPONENT_REF
4171 && DECL_BIT_FIELD (TREE_OPERAND (r1
, 1)))
4173 if (TREE_CODE (r2
) != COMPONENT_REF
4174 || !DECL_BIT_FIELD (TREE_OPERAND (r2
, 1)))
4176 tree field1
= TREE_OPERAND (r1
, 1);
4177 tree field2
= TREE_OPERAND (r2
, 1);
4178 if (!operand_equal_p (DECL_FIELD_OFFSET (field1
),
4179 DECL_FIELD_OFFSET (field2
), 0)
4180 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1
),
4181 DECL_FIELD_BIT_OFFSET (field2
), 0)
4182 || !operand_equal_p (DECL_SIZE (field1
), DECL_SIZE (field2
), 0)
4183 || !types_compatible_p (TREE_TYPE (r1
),
4186 r1
= TREE_OPERAND (r1
, 0);
4187 r2
= TREE_OPERAND (r2
, 0);
4189 else if (TREE_CODE (r2
) == COMPONENT_REF
4190 && DECL_BIT_FIELD (TREE_OPERAND (r2
, 1)))
4193 /* Similarly for bit field refs. */
4194 if (TREE_CODE (r1
) == BIT_FIELD_REF
)
4196 if (TREE_CODE (r2
) != BIT_FIELD_REF
4197 || !operand_equal_p (TREE_OPERAND (r1
, 1),
4198 TREE_OPERAND (r2
, 1), 0)
4199 || !operand_equal_p (TREE_OPERAND (r1
, 2),
4200 TREE_OPERAND (r2
, 2), 0)
4201 || !types_compatible_p (TREE_TYPE (r1
),
4204 r1
= TREE_OPERAND (r1
, 0);
4205 r2
= TREE_OPERAND (r2
, 0);
4207 else if (TREE_CODE (r2
) == BIT_FIELD_REF
)
4210 /* Now we can compare the address of actual memory access. */
4211 if (!operand_equal_p (r1
, r2
, OEP_ADDRESS_OF
| OEP_MATCH_SIDE_EFFECTS
))
4214 /* For constant accesses we get more matches by comparing offset only. */
4215 else if (!operand_equal_p (base1
, base2
,
4216 OEP_ADDRESS_OF
| OEP_MATCH_SIDE_EFFECTS
))
4219 /* We can't simply use get_object_alignment_1 on the full
4220 reference as for accesses with variable indexes this reports
4221 too conservative alignment. */
4222 unsigned int align1
, align2
;
4223 unsigned HOST_WIDE_INT bitpos1
, bitpos2
;
4224 bool known1
= get_object_alignment_1 (base1
, &align1
, &bitpos1
);
4225 bool known2
= get_object_alignment_1 (base2
, &align2
, &bitpos2
);
4226 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4227 TYPE_ALIGN but still returns false. This seem to contradict
4228 its description. So compare even if alignment is unknown. */
4229 if (known1
!= known2
4230 || (bitpos1
!= bitpos2
|| align1
!= align2
))
4233 /* Now we know that accesses are semantically same. */
4236 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4237 tree rbase1
= ref1
->ref
;
4239 while (handled_component_p (rbase1
))
4240 rbase1
= TREE_OPERAND (rbase1
, 0);
4241 tree rbase2
= ref2
->ref
;
4242 while (handled_component_p (rbase2
))
4243 rbase2
= TREE_OPERAND (rbase2
, 0);
4245 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4246 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4247 Otherwise we need to match bases and cliques. */
4248 if ((((TREE_CODE (rbase1
) == MEM_REF
|| TREE_CODE (rbase1
) == TARGET_MEM_REF
)
4249 && MR_DEPENDENCE_CLIQUE (rbase1
))
4250 || ((TREE_CODE (rbase2
) == MEM_REF
|| TREE_CODE (rbase2
) == TARGET_MEM_REF
)
4251 && MR_DEPENDENCE_CLIQUE (rbase2
)))
4252 && (TREE_CODE (rbase1
) != TREE_CODE (rbase2
)
4253 || MR_DEPENDENCE_CLIQUE (rbase1
) != MR_DEPENDENCE_CLIQUE (rbase2
)
4254 || (MR_DEPENDENCE_BASE (rbase1
) != MR_DEPENDENCE_BASE (rbase2
))))
4255 flags
|= DEPENDENCE_CLIQUE
;
4260 /* Alias sets are not stable across LTO sreaming; be conservative here
4261 and compare types the alias sets are ultimately based on. */
4262 if (lto_streaming_safe
)
4264 tree t1
= ao_ref_alias_ptr_type (ref1
);
4265 tree t2
= ao_ref_alias_ptr_type (ref2
);
4266 if (!alias_ptr_types_compatible_p (t1
, t2
))
4267 flags
|= REF_ALIAS_SET
;
4269 t1
= ao_ref_base_alias_ptr_type (ref1
);
4270 t2
= ao_ref_base_alias_ptr_type (ref2
);
4271 if (!alias_ptr_types_compatible_p (t1
, t2
))
4272 flags
|= BASE_ALIAS_SET
;
4276 if (ao_ref_alias_set (ref1
) != ao_ref_alias_set (ref2
))
4277 flags
|= REF_ALIAS_SET
;
4278 if (ao_ref_base_alias_set (ref1
) != ao_ref_base_alias_set (ref2
))
4279 flags
|= BASE_ALIAS_SET
;
4282 /* Access path is used only on non-view-converted references. */
4283 bool view_converted
= view_converted_memref_p (rbase1
);
4284 if (view_converted_memref_p (rbase2
) != view_converted
)
4285 return flags
| ACCESS_PATH
;
4286 else if (view_converted
)
4290 /* Find start of access paths and look for trailing arrays. */
4291 tree c1
= ref1
->ref
, c2
= ref2
->ref
;
4292 tree end_struct_ref1
= NULL
, end_struct_ref2
= NULL
;
4293 int nskipped1
= 0, nskipped2
= 0;
4296 for (tree p1
= ref1
->ref
; handled_component_p (p1
); p1
= TREE_OPERAND (p1
, 0))
4298 if (component_ref_to_zero_sized_trailing_array_p (p1
))
4299 end_struct_ref1
= p1
;
4300 if (ends_tbaa_access_path_p (p1
))
4301 c1
= p1
, nskipped1
= i
;
4304 for (tree p2
= ref2
->ref
; handled_component_p (p2
); p2
= TREE_OPERAND (p2
, 0))
4306 if (component_ref_to_zero_sized_trailing_array_p (p2
))
4307 end_struct_ref2
= p2
;
4308 if (ends_tbaa_access_path_p (p2
))
4309 c2
= p2
, nskipped1
= i
;
4313 /* For variable accesses we can not rely on offset match bellow.
4314 We know that paths are struturally same, so only check that
4315 starts of TBAA paths did not diverge. */
4316 if (!known_eq (ref1
->size
, ref1
->max_size
)
4317 && nskipped1
!= nskipped2
)
4318 return flags
| ACCESS_PATH
;
4320 /* Information about trailing refs is used by
4321 aliasing_component_refs_p that is applied only if paths
4322 has handled components.. */
4323 if (!handled_component_p (c1
) && !handled_component_p (c2
))
4325 else if ((end_struct_ref1
!= NULL
) != (end_struct_ref2
!= NULL
))
4326 return flags
| ACCESS_PATH
;
4328 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1
))
4329 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2
)))
4330 return flags
| ACCESS_PATH
;
4332 /* Now compare all handled components of the access path.
4333 We have three oracles that cares about access paths:
4334 - aliasing_component_refs_p
4335 - nonoverlapping_refs_since_match_p
4336 - nonoverlapping_component_refs_p
4337 We need to match things these oracles compare.
4339 It is only necessary to check types for compatibility
4340 and offsets. Rest of what oracles compares are actual
4341 addresses. Those are already known to be same:
4342 - for constant accesses we check offsets
4343 - for variable accesses we already matched
4344 the path lexically with operand_equal_p. */
4347 bool comp1
= handled_component_p (c1
);
4348 bool comp2
= handled_component_p (c2
);
4351 return flags
| ACCESS_PATH
;
4355 if (TREE_CODE (c1
) != TREE_CODE (c2
))
4356 return flags
| ACCESS_PATH
;
4358 /* aliasing_component_refs_p attempts to find type match within
4359 the paths. For that reason both types needs to be equal
4360 with respect to same_type_for_tbaa_p. */
4361 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1
),
4363 lto_streaming_safe
))
4364 return flags
| ACCESS_PATH
;
4365 if (component_ref_to_zero_sized_trailing_array_p (c1
)
4366 != component_ref_to_zero_sized_trailing_array_p (c2
))
4367 return flags
| ACCESS_PATH
;
4369 /* aliasing_matching_component_refs_p compares
4370 offsets within the path. Other properties are ignored.
4371 Do not bother to verify offsets in variable accesses. Here we
4372 already compared them by operand_equal_p so they are
4373 structurally same. */
4374 if (!known_eq (ref1
->size
, ref1
->max_size
))
4376 poly_int64 offadj1
, sztmc1
, msztmc1
;
4378 get_ref_base_and_extent (c1
, &offadj1
, &sztmc1
, &msztmc1
, &reverse1
);
4379 poly_int64 offadj2
, sztmc2
, msztmc2
;
4381 get_ref_base_and_extent (c2
, &offadj2
, &sztmc2
, &msztmc2
, &reverse2
);
4382 if (!known_eq (offadj1
, offadj2
))
4383 return flags
| ACCESS_PATH
;
4385 c1
= TREE_OPERAND (c1
, 0);
4386 c2
= TREE_OPERAND (c2
, 0);
4388 /* Finally test the access type. */
4389 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1
),
4391 lto_streaming_safe
))
4392 return flags
| ACCESS_PATH
;
4396 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4397 and canonical types. */
4399 ao_compare::hash_ao_ref (ao_ref
*ref
, bool lto_streaming_safe
, bool tbaa
,
4400 inchash::hash
&hstate
)
4402 tree base
= ao_ref_base (ref
);
4405 if (!known_eq (ref
->size
, ref
->max_size
))
4408 if (TREE_CODE (r
) == COMPONENT_REF
4409 && DECL_BIT_FIELD (TREE_OPERAND (r
, 1)))
4411 tree field
= TREE_OPERAND (r
, 1);
4412 hash_operand (DECL_FIELD_OFFSET (field
), hstate
, 0);
4413 hash_operand (DECL_FIELD_BIT_OFFSET (field
), hstate
, 0);
4414 hash_operand (DECL_SIZE (field
), hstate
, 0);
4415 r
= TREE_OPERAND (r
, 0);
4417 if (TREE_CODE (r
) == BIT_FIELD_REF
)
4419 hash_operand (TREE_OPERAND (r
, 1), hstate
, 0);
4420 hash_operand (TREE_OPERAND (r
, 2), hstate
, 0);
4421 r
= TREE_OPERAND (r
, 0);
4423 hash_operand (TYPE_SIZE (TREE_TYPE (ref
->ref
)), hstate
, 0);
4424 hash_operand (r
, hstate
, OEP_ADDRESS_OF
| OEP_MATCH_SIDE_EFFECTS
);
4428 hash_operand (tbase
, hstate
, OEP_ADDRESS_OF
| OEP_MATCH_SIDE_EFFECTS
);
4429 hstate
.add_poly_int (ref
->offset
);
4430 hstate
.add_poly_int (ref
->size
);
4431 hstate
.add_poly_int (ref
->max_size
);
4433 if (!lto_streaming_safe
&& tbaa
)
4435 hstate
.add_int (ao_ref_alias_set (ref
));
4436 hstate
.add_int (ao_ref_base_alias_set (ref
));