testsuite: Update scanning symbol sections to support AIX.
[official-gcc.git] / gcc / tree-ssa-alias.c
blob5ebbb087285dca93ff3604345a8c214271b5e32c
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
44 #include "errors.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
49 #include "builtins.h"
51 /* Broad overview of how alias analysis on gimple works:
53 Statements clobbering or using memory are linked through the
54 virtual operand factored use-def chain. The virtual operand
55 is unique per function, its symbol is accessible via gimple_vop (cfun).
56 Virtual operands are used for efficiently walking memory statements
57 in the gimple IL and are useful for things like value-numbering as
58 a generation count for memory references.
60 SSA_NAME pointers may have associated points-to information
61 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
62 points-to information is (re-)computed by the TODO_rebuild_alias
63 pass manager todo. Points-to information is also used for more
64 precise tracking of call-clobbered and call-used variables and
65 related disambiguations.
67 This file contains functions for disambiguating memory references,
68 the so called alias-oracle and tools for walking of the gimple IL.
70 The main alias-oracle entry-points are
72 bool stmt_may_clobber_ref_p (gimple *, tree)
74 This function queries if a statement may invalidate (parts of)
75 the memory designated by the reference tree argument.
77 bool ref_maybe_used_by_stmt_p (gimple *, tree)
79 This function queries if a statement may need (parts of) the
80 memory designated by the reference tree argument.
82 There are variants of these functions that only handle the call
83 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
84 Note that these do not disambiguate against a possible call lhs.
86 bool refs_may_alias_p (tree, tree)
88 This function tries to disambiguate two reference trees.
90 bool ptr_deref_may_alias_global_p (tree)
92 This function queries if dereferencing a pointer variable may
93 alias global memory.
95 More low-level disambiguators are available and documented in
96 this file. Low-level disambiguators dealing with points-to
97 information are in tree-ssa-structalias.c. */
99 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
100 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
102 /* Query statistics for the different low-level disambiguators.
103 A high-level query may trigger multiple of them. */
105 static struct {
106 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
107 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
109 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
111 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
112 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
113 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
114 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
119 unsigned HOST_WIDE_INT modref_use_may_alias;
120 unsigned HOST_WIDE_INT modref_use_no_alias;
121 unsigned HOST_WIDE_INT modref_clobber_may_alias;
122 unsigned HOST_WIDE_INT modref_clobber_no_alias;
123 unsigned HOST_WIDE_INT modref_tests;
124 unsigned HOST_WIDE_INT modref_baseptr_tests;
125 } alias_stats;
127 void
128 dump_alias_stats (FILE *s)
130 fprintf (s, "\nAlias oracle query stats:\n");
131 fprintf (s, " refs_may_alias_p: "
132 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
133 HOST_WIDE_INT_PRINT_DEC" queries\n",
134 alias_stats.refs_may_alias_p_no_alias,
135 alias_stats.refs_may_alias_p_no_alias
136 + alias_stats.refs_may_alias_p_may_alias);
137 fprintf (s, " ref_maybe_used_by_call_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.ref_maybe_used_by_call_p_no_alias,
141 alias_stats.refs_may_alias_p_no_alias
142 + alias_stats.ref_maybe_used_by_call_p_may_alias);
143 fprintf (s, " call_may_clobber_ref_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.call_may_clobber_ref_p_no_alias,
147 alias_stats.call_may_clobber_ref_p_no_alias
148 + alias_stats.call_may_clobber_ref_p_may_alias);
149 fprintf (s, " nonoverlapping_component_refs_p: "
150 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC" queries\n",
152 alias_stats.nonoverlapping_component_refs_p_no_alias,
153 alias_stats.nonoverlapping_component_refs_p_no_alias
154 + alias_stats.nonoverlapping_component_refs_p_may_alias);
155 fprintf (s, " nonoverlapping_refs_since_match_p: "
156 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
157 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
158 HOST_WIDE_INT_PRINT_DEC" queries\n",
159 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
160 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
161 alias_stats.nonoverlapping_refs_since_match_p_no_alias
162 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
163 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
164 fprintf (s, " aliasing_component_refs_p: "
165 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
166 HOST_WIDE_INT_PRINT_DEC" queries\n",
167 alias_stats.aliasing_component_refs_p_no_alias,
168 alias_stats.aliasing_component_refs_p_no_alias
169 + alias_stats.aliasing_component_refs_p_may_alias);
170 dump_alias_stats_in_alias_c (s);
171 fprintf (s, "\nModref stats:\n");
172 fprintf (s, " modref use: "
173 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
174 HOST_WIDE_INT_PRINT_DEC" queries\n",
175 alias_stats.modref_use_no_alias,
176 alias_stats.modref_use_no_alias
177 + alias_stats.modref_use_may_alias);
178 fprintf (s, " modref clobber: "
179 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
180 HOST_WIDE_INT_PRINT_DEC" queries\n"
181 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
182 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
183 alias_stats.modref_clobber_no_alias,
184 alias_stats.modref_clobber_no_alias
185 + alias_stats.modref_clobber_may_alias,
186 alias_stats.modref_tests,
187 ((double)alias_stats.modref_tests)
188 / (alias_stats.modref_clobber_no_alias
189 + alias_stats.modref_clobber_may_alias),
190 alias_stats.modref_baseptr_tests,
191 ((double)alias_stats.modref_baseptr_tests)
192 / (alias_stats.modref_clobber_no_alias
193 + alias_stats.modref_clobber_may_alias));
197 /* Return true, if dereferencing PTR may alias with a global variable. */
199 bool
200 ptr_deref_may_alias_global_p (tree ptr)
202 struct ptr_info_def *pi;
204 /* If we end up with a pointer constant here that may point
205 to global memory. */
206 if (TREE_CODE (ptr) != SSA_NAME)
207 return true;
209 pi = SSA_NAME_PTR_INFO (ptr);
211 /* If we do not have points-to information for this variable,
212 we have to punt. */
213 if (!pi)
214 return true;
216 /* ??? This does not use TBAA to prune globals ptr may not access. */
217 return pt_solution_includes_global (&pi->pt);
220 /* Return true if dereferencing PTR may alias DECL.
221 The caller is responsible for applying TBAA to see if PTR
222 may access DECL at all. */
224 static bool
225 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
227 struct ptr_info_def *pi;
229 /* Conversions are irrelevant for points-to information and
230 data-dependence analysis can feed us those. */
231 STRIP_NOPS (ptr);
233 /* Anything we do not explicilty handle aliases. */
234 if ((TREE_CODE (ptr) != SSA_NAME
235 && TREE_CODE (ptr) != ADDR_EXPR
236 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
237 || !POINTER_TYPE_P (TREE_TYPE (ptr))
238 || (!VAR_P (decl)
239 && TREE_CODE (decl) != PARM_DECL
240 && TREE_CODE (decl) != RESULT_DECL))
241 return true;
243 /* Disregard pointer offsetting. */
244 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
248 ptr = TREE_OPERAND (ptr, 0);
250 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
251 return ptr_deref_may_alias_decl_p (ptr, decl);
254 /* ADDR_EXPR pointers either just offset another pointer or directly
255 specify the pointed-to set. */
256 if (TREE_CODE (ptr) == ADDR_EXPR)
258 tree base = get_base_address (TREE_OPERAND (ptr, 0));
259 if (base
260 && (TREE_CODE (base) == MEM_REF
261 || TREE_CODE (base) == TARGET_MEM_REF))
262 ptr = TREE_OPERAND (base, 0);
263 else if (base
264 && DECL_P (base))
265 return compare_base_decls (base, decl) != 0;
266 else if (base
267 && CONSTANT_CLASS_P (base))
268 return false;
269 else
270 return true;
273 /* Non-aliased variables cannot be pointed to. */
274 if (!may_be_aliased (decl))
275 return false;
277 /* If we do not have useful points-to information for this pointer
278 we cannot disambiguate anything else. */
279 pi = SSA_NAME_PTR_INFO (ptr);
280 if (!pi)
281 return true;
283 return pt_solution_includes (&pi->pt, decl);
286 /* Return true if dereferenced PTR1 and PTR2 may alias.
287 The caller is responsible for applying TBAA to see if accesses
288 through PTR1 and PTR2 may conflict at all. */
290 bool
291 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
293 struct ptr_info_def *pi1, *pi2;
295 /* Conversions are irrelevant for points-to information and
296 data-dependence analysis can feed us those. */
297 STRIP_NOPS (ptr1);
298 STRIP_NOPS (ptr2);
300 /* Disregard pointer offsetting. */
301 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
305 ptr1 = TREE_OPERAND (ptr1, 0);
307 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
308 return ptr_derefs_may_alias_p (ptr1, ptr2);
310 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
314 ptr2 = TREE_OPERAND (ptr2, 0);
316 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
317 return ptr_derefs_may_alias_p (ptr1, ptr2);
320 /* ADDR_EXPR pointers either just offset another pointer or directly
321 specify the pointed-to set. */
322 if (TREE_CODE (ptr1) == ADDR_EXPR)
324 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
325 if (base
326 && (TREE_CODE (base) == MEM_REF
327 || TREE_CODE (base) == TARGET_MEM_REF))
328 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
329 else if (base
330 && DECL_P (base))
331 return ptr_deref_may_alias_decl_p (ptr2, base);
332 else
333 return true;
335 if (TREE_CODE (ptr2) == ADDR_EXPR)
337 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
338 if (base
339 && (TREE_CODE (base) == MEM_REF
340 || TREE_CODE (base) == TARGET_MEM_REF))
341 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
342 else if (base
343 && DECL_P (base))
344 return ptr_deref_may_alias_decl_p (ptr1, base);
345 else
346 return true;
349 /* From here we require SSA name pointers. Anything else aliases. */
350 if (TREE_CODE (ptr1) != SSA_NAME
351 || TREE_CODE (ptr2) != SSA_NAME
352 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
353 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
354 return true;
356 /* We may end up with two empty points-to solutions for two same pointers.
357 In this case we still want to say both pointers alias, so shortcut
358 that here. */
359 if (ptr1 == ptr2)
360 return true;
362 /* If we do not have useful points-to information for either pointer
363 we cannot disambiguate anything else. */
364 pi1 = SSA_NAME_PTR_INFO (ptr1);
365 pi2 = SSA_NAME_PTR_INFO (ptr2);
366 if (!pi1 || !pi2)
367 return true;
369 /* ??? This does not use TBAA to prune decls from the intersection
370 that not both pointers may access. */
371 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
374 /* Return true if dereferencing PTR may alias *REF.
375 The caller is responsible for applying TBAA to see if PTR
376 may access *REF at all. */
378 static bool
379 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
381 tree base = ao_ref_base (ref);
383 if (TREE_CODE (base) == MEM_REF
384 || TREE_CODE (base) == TARGET_MEM_REF)
385 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
386 else if (DECL_P (base))
387 return ptr_deref_may_alias_decl_p (ptr, base);
389 return true;
392 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
394 bool
395 ptrs_compare_unequal (tree ptr1, tree ptr2)
397 /* First resolve the pointers down to a SSA name pointer base or
398 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
399 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
400 or STRING_CSTs which needs points-to adjustments to track them
401 in the points-to sets. */
402 tree obj1 = NULL_TREE;
403 tree obj2 = NULL_TREE;
404 if (TREE_CODE (ptr1) == ADDR_EXPR)
406 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
407 if (! tem)
408 return false;
409 if (VAR_P (tem)
410 || TREE_CODE (tem) == PARM_DECL
411 || TREE_CODE (tem) == RESULT_DECL)
412 obj1 = tem;
413 else if (TREE_CODE (tem) == MEM_REF)
414 ptr1 = TREE_OPERAND (tem, 0);
416 if (TREE_CODE (ptr2) == ADDR_EXPR)
418 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
419 if (! tem)
420 return false;
421 if (VAR_P (tem)
422 || TREE_CODE (tem) == PARM_DECL
423 || TREE_CODE (tem) == RESULT_DECL)
424 obj2 = tem;
425 else if (TREE_CODE (tem) == MEM_REF)
426 ptr2 = TREE_OPERAND (tem, 0);
429 /* Canonicalize ptr vs. object. */
430 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
432 std::swap (ptr1, ptr2);
433 std::swap (obj1, obj2);
436 if (obj1 && obj2)
437 /* Other code handles this correctly, no need to duplicate it here. */;
438 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
440 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
441 /* We may not use restrict to optimize pointer comparisons.
442 See PR71062. So we have to assume that restrict-pointed-to
443 may be in fact obj1. */
444 if (!pi
445 || pi->pt.vars_contains_restrict
446 || pi->pt.vars_contains_interposable)
447 return false;
448 if (VAR_P (obj1)
449 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
451 varpool_node *node = varpool_node::get (obj1);
452 /* If obj1 may bind to NULL give up (see below). */
453 if (! node
454 || ! node->nonzero_address ()
455 || ! decl_binds_to_current_def_p (obj1))
456 return false;
458 return !pt_solution_includes (&pi->pt, obj1);
461 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
462 but those require pt.null to be conservatively correct. */
464 return false;
467 /* Returns whether reference REF to BASE may refer to global memory. */
469 static bool
470 ref_may_alias_global_p_1 (tree base)
472 if (DECL_P (base))
473 return is_global_var (base);
474 else if (TREE_CODE (base) == MEM_REF
475 || TREE_CODE (base) == TARGET_MEM_REF)
476 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
477 return true;
480 bool
481 ref_may_alias_global_p (ao_ref *ref)
483 tree base = ao_ref_base (ref);
484 return ref_may_alias_global_p_1 (base);
487 bool
488 ref_may_alias_global_p (tree ref)
490 tree base = get_base_address (ref);
491 return ref_may_alias_global_p_1 (base);
494 /* Return true whether STMT may clobber global memory. */
496 bool
497 stmt_may_clobber_global_p (gimple *stmt)
499 tree lhs;
501 if (!gimple_vdef (stmt))
502 return false;
504 /* ??? We can ask the oracle whether an artificial pointer
505 dereference with a pointer with points-to information covering
506 all global memory (what about non-address taken memory?) maybe
507 clobbered by this call. As there is at the moment no convenient
508 way of doing that without generating garbage do some manual
509 checking instead.
510 ??? We could make a NULL ao_ref argument to the various
511 predicates special, meaning any global memory. */
513 switch (gimple_code (stmt))
515 case GIMPLE_ASSIGN:
516 lhs = gimple_assign_lhs (stmt);
517 return (TREE_CODE (lhs) != SSA_NAME
518 && ref_may_alias_global_p (lhs));
519 case GIMPLE_CALL:
520 return true;
521 default:
522 return true;
527 /* Dump alias information on FILE. */
529 void
530 dump_alias_info (FILE *file)
532 unsigned i;
533 tree ptr;
534 const char *funcname
535 = lang_hooks.decl_printable_name (current_function_decl, 2);
536 tree var;
538 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
540 fprintf (file, "Aliased symbols\n\n");
542 FOR_EACH_LOCAL_DECL (cfun, i, var)
544 if (may_be_aliased (var))
545 dump_variable (file, var);
548 fprintf (file, "\nCall clobber information\n");
550 fprintf (file, "\nESCAPED");
551 dump_points_to_solution (file, &cfun->gimple_df->escaped);
553 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
555 FOR_EACH_SSA_NAME (i, ptr, cfun)
557 struct ptr_info_def *pi;
559 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
560 || SSA_NAME_IN_FREE_LIST (ptr))
561 continue;
563 pi = SSA_NAME_PTR_INFO (ptr);
564 if (pi)
565 dump_points_to_info_for (file, ptr);
568 fprintf (file, "\n");
572 /* Dump alias information on stderr. */
574 DEBUG_FUNCTION void
575 debug_alias_info (void)
577 dump_alias_info (stderr);
581 /* Dump the points-to set *PT into FILE. */
583 void
584 dump_points_to_solution (FILE *file, struct pt_solution *pt)
586 if (pt->anything)
587 fprintf (file, ", points-to anything");
589 if (pt->nonlocal)
590 fprintf (file, ", points-to non-local");
592 if (pt->escaped)
593 fprintf (file, ", points-to escaped");
595 if (pt->ipa_escaped)
596 fprintf (file, ", points-to unit escaped");
598 if (pt->null)
599 fprintf (file, ", points-to NULL");
601 if (pt->vars)
603 fprintf (file, ", points-to vars: ");
604 dump_decl_set (file, pt->vars);
605 if (pt->vars_contains_nonlocal
606 || pt->vars_contains_escaped
607 || pt->vars_contains_escaped_heap
608 || pt->vars_contains_restrict)
610 const char *comma = "";
611 fprintf (file, " (");
612 if (pt->vars_contains_nonlocal)
614 fprintf (file, "nonlocal");
615 comma = ", ";
617 if (pt->vars_contains_escaped)
619 fprintf (file, "%sescaped", comma);
620 comma = ", ";
622 if (pt->vars_contains_escaped_heap)
624 fprintf (file, "%sescaped heap", comma);
625 comma = ", ";
627 if (pt->vars_contains_restrict)
629 fprintf (file, "%srestrict", comma);
630 comma = ", ";
632 if (pt->vars_contains_interposable)
633 fprintf (file, "%sinterposable", comma);
634 fprintf (file, ")");
640 /* Unified dump function for pt_solution. */
642 DEBUG_FUNCTION void
643 debug (pt_solution &ref)
645 dump_points_to_solution (stderr, &ref);
648 DEBUG_FUNCTION void
649 debug (pt_solution *ptr)
651 if (ptr)
652 debug (*ptr);
653 else
654 fprintf (stderr, "<nil>\n");
658 /* Dump points-to information for SSA_NAME PTR into FILE. */
660 void
661 dump_points_to_info_for (FILE *file, tree ptr)
663 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
665 print_generic_expr (file, ptr, dump_flags);
667 if (pi)
668 dump_points_to_solution (file, &pi->pt);
669 else
670 fprintf (file, ", points-to anything");
672 fprintf (file, "\n");
676 /* Dump points-to information for VAR into stderr. */
678 DEBUG_FUNCTION void
679 debug_points_to_info_for (tree var)
681 dump_points_to_info_for (stderr, var);
685 /* Initializes the alias-oracle reference representation *R from REF. */
687 void
688 ao_ref_init (ao_ref *r, tree ref)
690 r->ref = ref;
691 r->base = NULL_TREE;
692 r->offset = 0;
693 r->size = -1;
694 r->max_size = -1;
695 r->ref_alias_set = -1;
696 r->base_alias_set = -1;
697 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
700 /* Returns the base object of the memory reference *REF. */
702 tree
703 ao_ref_base (ao_ref *ref)
705 bool reverse;
707 if (ref->base)
708 return ref->base;
709 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
710 &ref->max_size, &reverse);
711 return ref->base;
714 /* Returns the base object alias set of the memory reference *REF. */
716 alias_set_type
717 ao_ref_base_alias_set (ao_ref *ref)
719 tree base_ref;
720 if (ref->base_alias_set != -1)
721 return ref->base_alias_set;
722 if (!ref->ref)
723 return 0;
724 base_ref = ref->ref;
725 while (handled_component_p (base_ref))
726 base_ref = TREE_OPERAND (base_ref, 0);
727 ref->base_alias_set = get_alias_set (base_ref);
728 return ref->base_alias_set;
731 /* Returns the reference alias set of the memory reference *REF. */
733 alias_set_type
734 ao_ref_alias_set (ao_ref *ref)
736 if (ref->ref_alias_set != -1)
737 return ref->ref_alias_set;
738 if (!ref->ref)
739 return 0;
740 ref->ref_alias_set = get_alias_set (ref->ref);
741 return ref->ref_alias_set;
744 /* Returns a type satisfying
745 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
747 tree
748 ao_ref_base_alias_ptr_type (ao_ref *ref)
750 tree base_ref;
752 if (!ref->ref)
753 return NULL_TREE;
754 base_ref = ref->ref;
755 while (handled_component_p (base_ref))
756 base_ref = TREE_OPERAND (base_ref, 0);
757 tree ret = reference_alias_ptr_type (base_ref);
758 return ret;
761 /* Returns a type satisfying
762 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
764 tree
765 ao_ref_alias_ptr_type (ao_ref *ref)
767 if (!ref->ref)
768 return NULL_TREE;
769 tree ret = reference_alias_ptr_type (ref->ref);
770 return ret;
774 /* Init an alias-oracle reference representation from a gimple pointer
775 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
776 that RANGE_KNOWN is set.
778 The access is assumed to be only to or after of the pointer target adjusted
779 by the offset, not before it (even in the case RANGE_KNOWN is false). */
781 static void
782 ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
783 bool range_known,
784 poly_int64 offset,
785 poly_int64 size,
786 poly_int64 max_size)
788 poly_int64 t, extra_offset = 0;
790 ref->ref = NULL_TREE;
791 if (TREE_CODE (ptr) == SSA_NAME)
793 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
794 if (gimple_assign_single_p (stmt)
795 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
796 ptr = gimple_assign_rhs1 (stmt);
797 else if (is_gimple_assign (stmt)
798 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
799 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
801 ptr = gimple_assign_rhs1 (stmt);
802 extra_offset *= BITS_PER_UNIT;
806 if (TREE_CODE (ptr) == ADDR_EXPR)
808 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
809 if (ref->base)
810 ref->offset = BITS_PER_UNIT * t;
811 else
813 range_known = false;
814 ref->offset = 0;
815 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
818 else
820 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
821 ref->base = build2 (MEM_REF, char_type_node,
822 ptr, null_pointer_node);
823 ref->offset = 0;
825 ref->offset += extra_offset + offset;
826 if (range_known)
828 ref->max_size = max_size;
829 ref->size = size;
831 else
832 ref->max_size = ref->size = -1;
833 ref->ref_alias_set = 0;
834 ref->base_alias_set = 0;
835 ref->volatile_p = false;
838 /* Init an alias-oracle reference representation from a gimple pointer
839 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
840 size is assumed to be unknown. The access is assumed to be only
841 to or after of the pointer target, not before it. */
843 void
844 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
846 poly_int64 size_hwi;
847 if (size
848 && poly_int_tree_p (size, &size_hwi)
849 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
851 size_hwi = size_hwi * BITS_PER_UNIT;
852 ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
854 else
855 ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
858 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
859 Return -1 if S1 < S2
860 Return 1 if S1 > S2
861 Return 0 if equal or incomparable. */
863 static int
864 compare_sizes (tree s1, tree s2)
866 if (!s1 || !s2)
867 return 0;
869 poly_uint64 size1;
870 poly_uint64 size2;
872 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
873 return 0;
874 if (known_lt (size1, size2))
875 return -1;
876 if (known_lt (size2, size1))
877 return 1;
878 return 0;
881 /* Compare TYPE1 and TYPE2 by its size.
882 Return -1 if size of TYPE1 < size of TYPE2
883 Return 1 if size of TYPE1 > size of TYPE2
884 Return 0 if types are of equal sizes or we can not compare them. */
886 static int
887 compare_type_sizes (tree type1, tree type2)
889 /* Be conservative for arrays and vectors. We want to support partial
890 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
891 while (TREE_CODE (type1) == ARRAY_TYPE
892 || TREE_CODE (type1) == VECTOR_TYPE)
893 type1 = TREE_TYPE (type1);
894 while (TREE_CODE (type2) == ARRAY_TYPE
895 || TREE_CODE (type2) == VECTOR_TYPE)
896 type2 = TREE_TYPE (type2);
897 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
900 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
901 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
902 decide. */
904 static inline int
905 same_type_for_tbaa (tree type1, tree type2)
907 type1 = TYPE_MAIN_VARIANT (type1);
908 type2 = TYPE_MAIN_VARIANT (type2);
910 /* Handle the most common case first. */
911 if (type1 == type2)
912 return 1;
914 /* If we would have to do structural comparison bail out. */
915 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
916 || TYPE_STRUCTURAL_EQUALITY_P (type2))
917 return -1;
919 /* Compare the canonical types. */
920 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
921 return 1;
923 /* ??? Array types are not properly unified in all cases as we have
924 spurious changes in the index types for example. Removing this
925 causes all sorts of problems with the Fortran frontend. */
926 if (TREE_CODE (type1) == ARRAY_TYPE
927 && TREE_CODE (type2) == ARRAY_TYPE)
928 return -1;
930 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
931 object of one of its constrained subtypes, e.g. when a function with an
932 unconstrained parameter passed by reference is called on an object and
933 inlined. But, even in the case of a fixed size, type and subtypes are
934 not equivalent enough as to share the same TYPE_CANONICAL, since this
935 would mean that conversions between them are useless, whereas they are
936 not (e.g. type and subtypes can have different modes). So, in the end,
937 they are only guaranteed to have the same alias set. */
938 alias_set_type set1 = get_alias_set (type1);
939 alias_set_type set2 = get_alias_set (type2);
940 if (set1 == set2)
941 return -1;
943 /* Pointers to void are considered compatible with all other pointers,
944 so for two pointers see what the alias set resolution thinks. */
945 if (POINTER_TYPE_P (type1)
946 && POINTER_TYPE_P (type2)
947 && alias_sets_conflict_p (set1, set2))
948 return -1;
950 /* The types are known to be not equal. */
951 return 0;
954 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
955 components on it). */
957 static bool
958 type_has_components_p (tree type)
960 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
961 || TREE_CODE (type) == COMPLEX_TYPE;
964 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
965 respectively are either pointing to same address or are completely
966 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
967 just partly overlap.
969 Try to disambiguate using the access path starting from the match
970 and return false if there is no conflict.
972 Helper for aliasing_component_refs_p. */
974 static bool
975 aliasing_matching_component_refs_p (tree match1, tree ref1,
976 poly_int64 offset1, poly_int64 max_size1,
977 tree match2, tree ref2,
978 poly_int64 offset2, poly_int64 max_size2,
979 bool partial_overlap)
981 poly_int64 offadj, sztmp, msztmp;
982 bool reverse;
984 if (!partial_overlap)
986 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
987 offset2 -= offadj;
988 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
989 offset1 -= offadj;
990 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
992 ++alias_stats.aliasing_component_refs_p_no_alias;
993 return false;
997 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
998 partial_overlap);
999 if (cmp == 1
1000 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1002 ++alias_stats.aliasing_component_refs_p_no_alias;
1003 return false;
1005 ++alias_stats.aliasing_component_refs_p_may_alias;
1006 return true;
1009 /* Return true if REF is reference to zero sized trailing array. I.e.
1010 struct foo {int bar; int array[0];} *fooptr;
1011 fooptr->array. */
1013 static bool
1014 component_ref_to_zero_sized_trailing_array_p (tree ref)
1016 return (TREE_CODE (ref) == COMPONENT_REF
1017 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1018 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1019 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1020 && array_at_struct_end_p (ref));
1023 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1024 aliasing_component_refs_p.
1026 Walk access path REF2 and try to find type matching TYPE1
1027 (which is a start of possibly aliasing access path REF1).
1028 If match is found, try to disambiguate.
1030 Return 0 for sucessful disambiguation.
1031 Return 1 if match was found but disambiguation failed
1032 Return -1 if there is no match.
1033 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1034 in access patch REF2 and -1 if we are not sure. */
1036 static int
1037 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1038 poly_int64 offset1, poly_int64 max_size1,
1039 tree end_struct_ref1,
1040 tree ref2, tree base2,
1041 poly_int64 offset2, poly_int64 max_size2,
1042 bool *maybe_match)
1044 tree ref = ref2;
1045 int same_p = 0;
1047 while (true)
1049 /* We walk from inner type to the outer types. If type we see is
1050 already too large to be part of type1, terminate the search. */
1051 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1053 if (cmp < 0
1054 && (!end_struct_ref1
1055 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1056 TREE_TYPE (ref)) < 0))
1057 break;
1058 /* If types may be of same size, see if we can decide about their
1059 equality. */
1060 if (cmp == 0)
1062 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1063 if (same_p == 1)
1064 break;
1065 /* In case we can't decide whether types are same try to
1066 continue looking for the exact match.
1067 Remember however that we possibly saw a match
1068 to bypass the access path continuations tests we do later. */
1069 if (same_p == -1)
1070 *maybe_match = true;
1072 if (!handled_component_p (ref))
1073 break;
1074 ref = TREE_OPERAND (ref, 0);
1076 if (same_p == 1)
1078 bool partial_overlap = false;
1080 /* We assume that arrays can overlap by multiple of their elements
1081 size as tested in gcc.dg/torture/alias-2.c.
1082 This partial overlap happen only when both arrays are bases of
1083 the access and not contained within another component ref.
1084 To be safe we also assume partial overlap for VLAs. */
1085 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1086 && (!TYPE_SIZE (TREE_TYPE (base1))
1087 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1088 || ref == base2))
1090 /* Setting maybe_match to true triggers
1091 nonoverlapping_component_refs_p test later that still may do
1092 useful disambiguation. */
1093 *maybe_match = true;
1094 partial_overlap = true;
1096 return aliasing_matching_component_refs_p (base1, ref1,
1097 offset1, max_size1,
1098 ref, ref2,
1099 offset2, max_size2,
1100 partial_overlap);
1102 return -1;
1105 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1106 Return true if they can be composed to single access path
1107 base1...ref1...base2...ref2.
1109 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1110 a trailing array access after REF1 in the non-TBAA part of the access.
1111 REF1_ALIAS_SET is the alias set of REF1.
1113 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1114 a traling array access in the TBAA part of access path2.
1115 BASE2_ALIAS_SET is the alias set of base2. */
1117 bool
1118 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1119 alias_set_type ref1_alias_set,
1120 tree base_type2, tree end_struct_ref2,
1121 alias_set_type base2_alias_set)
1123 /* Access path can not continue past types with no components. */
1124 if (!type_has_components_p (ref_type1))
1125 return false;
1127 /* If first access path ends by too small type to hold base of
1128 the second access path, typically paths can not continue.
1130 Punt if end_struct_past_end1 is true. We want to support arbitrary
1131 type puning past first COMPONENT_REF to union because redundant store
1132 elimination depends on this, see PR92152. For this reason we can not
1133 check size of the reference because types may partially overlap. */
1134 if (!end_struct_past_end1)
1136 if (compare_type_sizes (ref_type1, base_type2) < 0)
1137 return false;
1138 /* If the path2 contains trailing array access we can strenghten the check
1139 to verify that also the size of element of the trailing array fits.
1140 In fact we could check for offset + type_size, but we do not track
1141 offsets and this is quite side case. */
1142 if (end_struct_ref2
1143 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1144 return false;
1146 return (base2_alias_set == ref1_alias_set
1147 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1150 /* Determine if the two component references REF1 and REF2 which are
1151 based on access types TYPE1 and TYPE2 and of which at least one is based
1152 on an indirect reference may alias.
1153 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1154 are the respective alias sets. */
1156 static bool
1157 aliasing_component_refs_p (tree ref1,
1158 alias_set_type ref1_alias_set,
1159 alias_set_type base1_alias_set,
1160 poly_int64 offset1, poly_int64 max_size1,
1161 tree ref2,
1162 alias_set_type ref2_alias_set,
1163 alias_set_type base2_alias_set,
1164 poly_int64 offset2, poly_int64 max_size2)
1166 /* If one reference is a component references through pointers try to find a
1167 common base and apply offset based disambiguation. This handles
1168 for example
1169 struct A { int i; int j; } *q;
1170 struct B { struct A a; int k; } *p;
1171 disambiguating q->i and p->a.j. */
1172 tree base1, base2;
1173 tree type1, type2;
1174 bool maybe_match = false;
1175 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1176 bool end_struct_past_end1 = false;
1177 bool end_struct_past_end2 = false;
1179 /* Choose bases and base types to search for.
1180 The access path is as follows:
1181 base....end_of_tbaa_ref...actual_ref
1182 At one place in the access path may be a reference to zero sized or
1183 trailing array.
1185 We generally discard the segment after end_of_tbaa_ref however
1186 we need to be careful in case it contains zero sized or traling array.
1187 These may happen after refernce to union and in this case we need to
1188 not disambiguate type puning scenarios.
1190 We set:
1191 base1 to point to base
1193 ref1 to point to end_of_tbaa_ref
1195 end_struct_ref1 to point the trailing reference (if it exists
1196 in range base....end_of_tbaa_ref
1198 end_struct_past_end1 is true if this traling refernece occurs in
1199 end_of_tbaa_ref...actual_ref. */
1200 base1 = ref1;
1201 while (handled_component_p (base1))
1203 /* Generally access paths are monotous in the size of object. The
1204 exception are trailing arrays of structures. I.e.
1205 struct a {int array[0];};
1207 struct a {int array1[0]; int array[];};
1208 Such struct has size 0 but accesses to a.array may have non-zero size.
1209 In this case the size of TREE_TYPE (base1) is smaller than
1210 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1212 Because we compare sizes of arrays just by sizes of their elements,
1213 we only need to care about zero sized array fields here. */
1214 if (component_ref_to_zero_sized_trailing_array_p (base1))
1216 gcc_checking_assert (!end_struct_ref1);
1217 end_struct_ref1 = base1;
1219 if (ends_tbaa_access_path_p (base1))
1221 ref1 = TREE_OPERAND (base1, 0);
1222 if (end_struct_ref1)
1224 end_struct_past_end1 = true;
1225 end_struct_ref1 = NULL;
1228 base1 = TREE_OPERAND (base1, 0);
1230 type1 = TREE_TYPE (base1);
1231 base2 = ref2;
1232 while (handled_component_p (base2))
1234 if (component_ref_to_zero_sized_trailing_array_p (base2))
1236 gcc_checking_assert (!end_struct_ref2);
1237 end_struct_ref2 = base2;
1239 if (ends_tbaa_access_path_p (base2))
1241 ref2 = TREE_OPERAND (base2, 0);
1242 if (end_struct_ref2)
1244 end_struct_past_end2 = true;
1245 end_struct_ref2 = NULL;
1248 base2 = TREE_OPERAND (base2, 0);
1250 type2 = TREE_TYPE (base2);
1252 /* Now search for the type1 in the access path of ref2. This
1253 would be a common base for doing offset based disambiguation on.
1254 This however only makes sense if type2 is big enough to hold type1. */
1255 int cmp_outer = compare_type_sizes (type2, type1);
1257 /* If type2 is big enough to contain type1 walk its access path.
1258 We also need to care of arrays at the end of structs that may extend
1259 beyond the end of structure. If this occurs in the TBAA part of the
1260 access path, we need to consider the increased type as well. */
1261 if (cmp_outer >= 0
1262 || (end_struct_ref2
1263 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1265 int res = aliasing_component_refs_walk (ref1, type1, base1,
1266 offset1, max_size1,
1267 end_struct_ref1,
1268 ref2, base2, offset2, max_size2,
1269 &maybe_match);
1270 if (res != -1)
1271 return res;
1274 /* If we didn't find a common base, try the other way around. */
1275 if (cmp_outer <= 0
1276 || (end_struct_ref1
1277 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1279 int res = aliasing_component_refs_walk (ref2, type2, base2,
1280 offset2, max_size2,
1281 end_struct_ref2,
1282 ref1, base1, offset1, max_size1,
1283 &maybe_match);
1284 if (res != -1)
1285 return res;
1288 /* In the following code we make an assumption that the types in access
1289 paths do not overlap and thus accesses alias only if one path can be
1290 continuation of another. If we was not able to decide about equivalence,
1291 we need to give up. */
1292 if (maybe_match)
1294 if (!nonoverlapping_component_refs_p (ref1, ref2))
1296 ++alias_stats.aliasing_component_refs_p_may_alias;
1297 return true;
1299 ++alias_stats.aliasing_component_refs_p_no_alias;
1300 return false;
1303 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1304 ref1_alias_set,
1305 type2, end_struct_ref2,
1306 base2_alias_set)
1307 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1308 ref2_alias_set,
1309 type1, end_struct_ref1,
1310 base1_alias_set))
1312 ++alias_stats.aliasing_component_refs_p_may_alias;
1313 return true;
1315 ++alias_stats.aliasing_component_refs_p_no_alias;
1316 return false;
1319 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1320 that bases of both component refs are either equivalent or nonoverlapping.
1321 We do not assume that the containers of FIELD1 and FIELD2 are of the
1322 same type or size.
1324 Return 0 in case the base address of component_refs are same then
1325 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1326 may not be of same type or size.
1328 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1330 Return -1 otherwise.
1332 Main difference between 0 and -1 is to let
1333 nonoverlapping_component_refs_since_match_p discover the semantically
1334 equivalent part of the access path.
1336 Note that this function is used even with -fno-strict-aliasing
1337 and makes use of no TBAA assumptions. */
1339 static int
1340 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1342 /* If both fields are of the same type, we could save hard work of
1343 comparing offsets. */
1344 tree type1 = DECL_CONTEXT (field1);
1345 tree type2 = DECL_CONTEXT (field2);
1347 if (TREE_CODE (type1) == RECORD_TYPE
1348 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1349 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1350 if (TREE_CODE (type2) == RECORD_TYPE
1351 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1352 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1354 /* ??? Bitfields can overlap at RTL level so punt on them.
1355 FIXME: RTL expansion should be fixed by adjusting the access path
1356 when producing MEM_ATTRs for MEMs which are wider than
1357 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1358 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1359 return -1;
1361 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1362 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1363 return field1 != field2;
1365 /* In common case the offsets and bit offsets will be the same.
1366 However if frontends do not agree on the alignment, they may be
1367 different even if they actually represent same address.
1368 Try the common case first and if that fails calcualte the
1369 actual bit offset. */
1370 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1371 DECL_FIELD_OFFSET (field2))
1372 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1373 DECL_FIELD_BIT_OFFSET (field2)))
1374 return 0;
1376 /* Note that it may be possible to use component_ref_field_offset
1377 which would provide offsets as trees. However constructing and folding
1378 trees is expensive and does not seem to be worth the compile time
1379 cost. */
1381 poly_uint64 offset1, offset2;
1382 poly_uint64 bit_offset1, bit_offset2;
1384 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1385 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1386 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1387 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1389 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1390 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1392 if (known_eq (offset1, offset2))
1393 return 0;
1395 poly_uint64 size1, size2;
1397 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1398 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1399 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1400 return 1;
1402 /* Resort to slower overlap checking by looking for matching types in
1403 the middle of access path. */
1404 return -1;
1407 /* Return low bound of array. Do not produce new trees
1408 and thus do not care about particular type of integer constant
1409 and placeholder exprs. */
1411 static tree
1412 cheap_array_ref_low_bound (tree ref)
1414 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1416 /* Avoid expensive array_ref_low_bound.
1417 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1418 type or it is zero. */
1419 if (TREE_OPERAND (ref, 2))
1420 return TREE_OPERAND (ref, 2);
1421 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1422 return TYPE_MIN_VALUE (domain_type);
1423 else
1424 return integer_zero_node;
1427 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1428 completely disjoint.
1430 Return 1 if the refs are non-overlapping.
1431 Return 0 if they are possibly overlapping but if so the overlap again
1432 starts on the same address.
1433 Return -1 otherwise. */
1436 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1438 tree index1 = TREE_OPERAND (ref1, 1);
1439 tree index2 = TREE_OPERAND (ref2, 1);
1440 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1441 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1443 /* Handle zero offsets first: we do not need to match type size in this
1444 case. */
1445 if (operand_equal_p (index1, low_bound1, 0)
1446 && operand_equal_p (index2, low_bound2, 0))
1447 return 0;
1449 /* If type sizes are different, give up.
1451 Avoid expensive array_ref_element_size.
1452 If operand 3 is present it denotes size in the alignmnet units.
1453 Otherwise size is TYPE_SIZE of the element type.
1454 Handle only common cases where types are of the same "kind". */
1455 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1456 return -1;
1458 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1459 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1461 if (TREE_OPERAND (ref1, 3))
1463 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1464 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1465 TREE_OPERAND (ref2, 3), 0))
1466 return -1;
1468 else
1470 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1471 TYPE_SIZE_UNIT (elmt_type2), 0))
1472 return -1;
1475 /* Since we know that type sizes are the same, there is no need to return
1476 -1 after this point. Partial overlap can not be introduced. */
1478 /* We may need to fold trees in this case.
1479 TODO: Handle integer constant case at least. */
1480 if (!operand_equal_p (low_bound1, low_bound2, 0))
1481 return 0;
1483 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1485 if (tree_int_cst_equal (index1, index2))
1486 return 0;
1487 return 1;
1489 /* TODO: We can use VRP to further disambiguate here. */
1490 return 0;
1493 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1494 MATCH2 either point to the same address or are disjoint.
1495 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1496 respectively or NULL in the case we established equivalence of bases.
1497 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1498 overlap by exact multiply of their element size.
1500 This test works by matching the initial segment of the access path
1501 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1502 match was determined without use of TBAA oracle.
1504 Return 1 if we can determine that component references REF1 and REF2,
1505 that are within a common DECL, cannot overlap.
1507 Return 0 if paths are same and thus there is nothing to disambiguate more
1508 (i.e. there is must alias assuming there is must alias between MATCH1 and
1509 MATCH2)
1511 Return -1 if we can not determine 0 or 1 - this happens when we met
1512 non-matching types was met in the path.
1513 In this case it may make sense to continue by other disambiguation
1514 oracles. */
1516 static int
1517 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1518 tree match2, tree ref2,
1519 bool partial_overlap)
1521 int ntbaa1 = 0, ntbaa2 = 0;
1522 /* Early return if there are no references to match, we do not need
1523 to walk the access paths.
1525 Do not consider this as may-alias for stats - it is more useful
1526 to have information how many disambiguations happened provided that
1527 the query was meaningful. */
1529 if (match1 == ref1 || !handled_component_p (ref1)
1530 || match2 == ref2 || !handled_component_p (ref2))
1531 return -1;
1533 auto_vec<tree, 16> component_refs1;
1534 auto_vec<tree, 16> component_refs2;
1536 /* Create the stack of handled components for REF1. */
1537 while (handled_component_p (ref1) && ref1 != match1)
1539 /* We use TBAA only to re-synchronize after mismatched refs. So we
1540 do not need to truncate access path after TBAA part ends. */
1541 if (ends_tbaa_access_path_p (ref1))
1542 ntbaa1 = 0;
1543 else
1544 ntbaa1++;
1545 component_refs1.safe_push (ref1);
1546 ref1 = TREE_OPERAND (ref1, 0);
1549 /* Create the stack of handled components for REF2. */
1550 while (handled_component_p (ref2) && ref2 != match2)
1552 if (ends_tbaa_access_path_p (ref2))
1553 ntbaa2 = 0;
1554 else
1555 ntbaa2++;
1556 component_refs2.safe_push (ref2);
1557 ref2 = TREE_OPERAND (ref2, 0);
1560 if (!flag_strict_aliasing)
1562 ntbaa1 = 0;
1563 ntbaa2 = 0;
1566 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1567 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1569 /* If only one of access path starts with MEM_REF check that offset is 0
1570 so the addresses stays the same after stripping it.
1571 TODO: In this case we may walk the other access path until we get same
1572 offset.
1574 If both starts with MEM_REF, offset has to be same. */
1575 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1576 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1577 || (mem_ref1 && mem_ref2
1578 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1579 TREE_OPERAND (ref2, 1))))
1581 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1582 return -1;
1585 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1586 to handle them here at all. */
1587 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1588 && TREE_CODE (ref2) != TARGET_MEM_REF);
1590 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1591 rank. This is sufficient because we start from the same DECL and you
1592 cannot reference several fields at a time with COMPONENT_REFs (unlike
1593 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1594 of them to access a sub-component, unless you're in a union, in which
1595 case the return value will precisely be false. */
1596 while (true)
1598 /* Track if we seen unmatched ref with non-zero offset. In this case
1599 we must look for partial overlaps. */
1600 bool seen_unmatched_ref_p = false;
1602 /* First match ARRAY_REFs an try to disambiguate. */
1603 if (!component_refs1.is_empty ()
1604 && !component_refs2.is_empty ())
1606 unsigned int narray_refs1=0, narray_refs2=0;
1608 /* We generally assume that both access paths starts by same sequence
1609 of refs. However if number of array refs is not in sync, try
1610 to recover and pop elts until number match. This helps the case
1611 where one access path starts by array and other by element. */
1612 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1613 narray_refs1++)
1614 if (TREE_CODE (component_refs1 [component_refs1.length()
1615 - 1 - narray_refs1]) != ARRAY_REF)
1616 break;
1618 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1619 narray_refs2++)
1620 if (TREE_CODE (component_refs2 [component_refs2.length()
1621 - 1 - narray_refs2]) != ARRAY_REF)
1622 break;
1623 for (; narray_refs1 > narray_refs2; narray_refs1--)
1625 ref1 = component_refs1.pop ();
1626 ntbaa1--;
1628 /* If index is non-zero we need to check whether the reference
1629 does not break the main invariant that bases are either
1630 disjoint or equal. Consider the example:
1632 unsigned char out[][1];
1633 out[1]="a";
1634 out[i][0];
1636 Here bases out and out are same, but after removing the
1637 [i] index, this invariant no longer holds, because
1638 out[i] points to the middle of array out.
1640 TODO: If size of type of the skipped reference is an integer
1641 multiply of the size of type of the other reference this
1642 invariant can be verified, but even then it is not completely
1643 safe with !flag_strict_aliasing if the other reference contains
1644 unbounded array accesses.
1645 See */
1647 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1648 cheap_array_ref_low_bound (ref1), 0))
1649 return 0;
1651 for (; narray_refs2 > narray_refs1; narray_refs2--)
1653 ref2 = component_refs2.pop ();
1654 ntbaa2--;
1655 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1656 cheap_array_ref_low_bound (ref2), 0))
1657 return 0;
1659 /* Try to disambiguate matched arrays. */
1660 for (unsigned int i = 0; i < narray_refs1; i++)
1662 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1663 component_refs2.pop ());
1664 ntbaa1--;
1665 ntbaa2--;
1666 if (cmp == 1 && !partial_overlap)
1668 ++alias_stats
1669 .nonoverlapping_refs_since_match_p_no_alias;
1670 return 1;
1672 if (cmp == -1)
1674 seen_unmatched_ref_p = true;
1675 /* We can not maintain the invariant that bases are either
1676 same or completely disjoint. However we can still recover
1677 from type based alias analysis if we reach referneces to
1678 same sizes. We do not attempt to match array sizes, so
1679 just finish array walking and look for component refs. */
1680 if (ntbaa1 < 0 || ntbaa2 < 0)
1682 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1683 return -1;
1685 for (i++; i < narray_refs1; i++)
1687 component_refs1.pop ();
1688 component_refs2.pop ();
1689 ntbaa1--;
1690 ntbaa2--;
1692 break;
1694 partial_overlap = false;
1698 /* Next look for component_refs. */
1701 if (component_refs1.is_empty ())
1703 ++alias_stats
1704 .nonoverlapping_refs_since_match_p_must_overlap;
1705 return 0;
1707 ref1 = component_refs1.pop ();
1708 ntbaa1--;
1709 if (TREE_CODE (ref1) != COMPONENT_REF)
1711 seen_unmatched_ref_p = true;
1712 if (ntbaa1 < 0 || ntbaa2 < 0)
1714 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1715 return -1;
1719 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1723 if (component_refs2.is_empty ())
1725 ++alias_stats
1726 .nonoverlapping_refs_since_match_p_must_overlap;
1727 return 0;
1729 ref2 = component_refs2.pop ();
1730 ntbaa2--;
1731 if (TREE_CODE (ref2) != COMPONENT_REF)
1733 if (ntbaa1 < 0 || ntbaa2 < 0)
1735 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1736 return -1;
1738 seen_unmatched_ref_p = true;
1741 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1743 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1744 earlier. */
1745 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1746 && TREE_CODE (ref2) == COMPONENT_REF);
1748 tree field1 = TREE_OPERAND (ref1, 1);
1749 tree field2 = TREE_OPERAND (ref2, 1);
1751 /* ??? We cannot simply use the type of operand #0 of the refs here
1752 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1753 for common blocks instead of using unions like everyone else. */
1754 tree type1 = DECL_CONTEXT (field1);
1755 tree type2 = DECL_CONTEXT (field2);
1757 partial_overlap = false;
1759 /* If we skipped array refs on type of different sizes, we can
1760 no longer be sure that there are not partial overlaps. */
1761 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1762 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1764 ++alias_stats
1765 .nonoverlapping_refs_since_match_p_may_alias;
1766 return -1;
1769 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1770 if (cmp == -1)
1772 ++alias_stats
1773 .nonoverlapping_refs_since_match_p_may_alias;
1774 return -1;
1776 else if (cmp == 1)
1778 ++alias_stats
1779 .nonoverlapping_refs_since_match_p_no_alias;
1780 return 1;
1784 ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
1785 return 0;
1788 /* Return TYPE_UID which can be used to match record types we consider
1789 same for TBAA purposes. */
1791 static inline int
1792 ncr_type_uid (const_tree field)
1794 /* ??? We cannot simply use the type of operand #0 of the refs here
1795 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1796 for common blocks instead of using unions like everyone else. */
1797 tree type = DECL_FIELD_CONTEXT (field);
1798 /* With LTO types considered same_type_for_tbaa_p
1799 from different translation unit may not have same
1800 main variant. They however have same TYPE_CANONICAL. */
1801 if (TYPE_CANONICAL (type))
1802 return TYPE_UID (TYPE_CANONICAL (type));
1803 return TYPE_UID (type);
1806 /* qsort compare function to sort FIELD_DECLs after their
1807 DECL_FIELD_CONTEXT TYPE_UID. */
1809 static inline int
1810 ncr_compar (const void *field1_, const void *field2_)
1812 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1813 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1814 unsigned int uid1 = ncr_type_uid (field1);
1815 unsigned int uid2 = ncr_type_uid (field2);
1817 if (uid1 < uid2)
1818 return -1;
1819 else if (uid1 > uid2)
1820 return 1;
1821 return 0;
1824 /* Return true if we can determine that the fields referenced cannot
1825 overlap for any pair of objects. This relies on TBAA. */
1827 static bool
1828 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1830 /* Early return if we have nothing to do.
1832 Do not consider this as may-alias for stats - it is more useful
1833 to have information how many disambiguations happened provided that
1834 the query was meaningful. */
1835 if (!flag_strict_aliasing
1836 || !x || !y
1837 || !handled_component_p (x)
1838 || !handled_component_p (y))
1839 return false;
1841 auto_vec<const_tree, 16> fieldsx;
1842 while (handled_component_p (x))
1844 if (TREE_CODE (x) == COMPONENT_REF)
1846 tree field = TREE_OPERAND (x, 1);
1847 tree type = DECL_FIELD_CONTEXT (field);
1848 if (TREE_CODE (type) == RECORD_TYPE)
1849 fieldsx.safe_push (field);
1851 else if (ends_tbaa_access_path_p (x))
1852 fieldsx.truncate (0);
1853 x = TREE_OPERAND (x, 0);
1855 if (fieldsx.length () == 0)
1856 return false;
1857 auto_vec<const_tree, 16> fieldsy;
1858 while (handled_component_p (y))
1860 if (TREE_CODE (y) == COMPONENT_REF)
1862 tree field = TREE_OPERAND (y, 1);
1863 tree type = DECL_FIELD_CONTEXT (field);
1864 if (TREE_CODE (type) == RECORD_TYPE)
1865 fieldsy.safe_push (TREE_OPERAND (y, 1));
1867 else if (ends_tbaa_access_path_p (y))
1868 fieldsy.truncate (0);
1869 y = TREE_OPERAND (y, 0);
1871 if (fieldsy.length () == 0)
1873 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1874 return false;
1877 /* Most common case first. */
1878 if (fieldsx.length () == 1
1879 && fieldsy.length () == 1)
1881 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1882 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1883 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1885 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1886 return true;
1888 else
1890 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1891 return false;
1895 if (fieldsx.length () == 2)
1897 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1898 std::swap (fieldsx[0], fieldsx[1]);
1900 else
1901 fieldsx.qsort (ncr_compar);
1903 if (fieldsy.length () == 2)
1905 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1906 std::swap (fieldsy[0], fieldsy[1]);
1908 else
1909 fieldsy.qsort (ncr_compar);
1911 unsigned i = 0, j = 0;
1914 const_tree fieldx = fieldsx[i];
1915 const_tree fieldy = fieldsy[j];
1917 /* We're left with accessing different fields of a structure,
1918 no possible overlap. */
1919 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1920 DECL_FIELD_CONTEXT (fieldy)) == 1
1921 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1923 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1924 return true;
1927 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1929 i++;
1930 if (i == fieldsx.length ())
1931 break;
1933 else
1935 j++;
1936 if (j == fieldsy.length ())
1937 break;
1940 while (1);
1942 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1943 return false;
1947 /* Return true if two memory references based on the variables BASE1
1948 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1949 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1950 if non-NULL are the complete memory reference trees. */
1952 static bool
1953 decl_refs_may_alias_p (tree ref1, tree base1,
1954 poly_int64 offset1, poly_int64 max_size1,
1955 poly_int64 size1,
1956 tree ref2, tree base2,
1957 poly_int64 offset2, poly_int64 max_size2,
1958 poly_int64 size2)
1960 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1962 /* If both references are based on different variables, they cannot alias. */
1963 if (compare_base_decls (base1, base2) == 0)
1964 return false;
1966 /* If both references are based on the same variable, they cannot alias if
1967 the accesses do not overlap. */
1968 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1969 return false;
1971 /* If there is must alias, there is no use disambiguating further. */
1972 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1973 return true;
1975 /* For components with variable position, the above test isn't sufficient,
1976 so we disambiguate component references manually. */
1977 if (ref1 && ref2
1978 && handled_component_p (ref1) && handled_component_p (ref2)
1979 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1980 return false;
1982 return true;
1985 /* Return true if access with BASE is view converted.
1986 Base must not be stripped from inner MEM_REF (&decl)
1987 which is done by ao_ref_base and thus one extra walk
1988 of handled components is needed. */
1990 static bool
1991 view_converted_memref_p (tree base)
1993 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1994 return false;
1995 return same_type_for_tbaa (TREE_TYPE (base),
1996 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
1999 /* Return true if an indirect reference based on *PTR1 constrained
2000 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2001 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2002 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2003 in which case they are computed on-demand. REF1 and REF2
2004 if non-NULL are the complete memory reference trees. */
2006 static bool
2007 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2008 poly_int64 offset1, poly_int64 max_size1,
2009 poly_int64 size1,
2010 alias_set_type ref1_alias_set,
2011 alias_set_type base1_alias_set,
2012 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2013 poly_int64 offset2, poly_int64 max_size2,
2014 poly_int64 size2,
2015 alias_set_type ref2_alias_set,
2016 alias_set_type base2_alias_set, bool tbaa_p)
2018 tree ptr1;
2019 tree ptrtype1, dbase2;
2021 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2022 || TREE_CODE (base1) == TARGET_MEM_REF)
2023 && DECL_P (base2));
2025 ptr1 = TREE_OPERAND (base1, 0);
2026 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2028 /* If only one reference is based on a variable, they cannot alias if
2029 the pointer access is beyond the extent of the variable access.
2030 (the pointer base cannot validly point to an offset less than zero
2031 of the variable).
2032 ??? IVOPTs creates bases that do not honor this restriction,
2033 so do not apply this optimization for TARGET_MEM_REFs. */
2034 if (TREE_CODE (base1) != TARGET_MEM_REF
2035 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2036 return false;
2037 /* They also cannot alias if the pointer may not point to the decl. */
2038 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2039 return false;
2041 /* Disambiguations that rely on strict aliasing rules follow. */
2042 if (!flag_strict_aliasing || !tbaa_p)
2043 return true;
2045 /* If the alias set for a pointer access is zero all bets are off. */
2046 if (base1_alias_set == 0 || base2_alias_set == 0)
2047 return true;
2049 /* When we are trying to disambiguate an access with a pointer dereference
2050 as base versus one with a decl as base we can use both the size
2051 of the decl and its dynamic type for extra disambiguation.
2052 ??? We do not know anything about the dynamic type of the decl
2053 other than that its alias-set contains base2_alias_set as a subset
2054 which does not help us here. */
2055 /* As we know nothing useful about the dynamic type of the decl just
2056 use the usual conflict check rather than a subset test.
2057 ??? We could introduce -fvery-strict-aliasing when the language
2058 does not allow decls to have a dynamic type that differs from their
2059 static type. Then we can check
2060 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2061 if (base1_alias_set != base2_alias_set
2062 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2063 return false;
2065 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2067 /* If the size of the access relevant for TBAA through the pointer
2068 is bigger than the size of the decl we can't possibly access the
2069 decl via that pointer. */
2070 if (/* ??? This in turn may run afoul when a decl of type T which is
2071 a member of union type U is accessed through a pointer to
2072 type U and sizeof T is smaller than sizeof U. */
2073 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2074 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2075 && compare_sizes (DECL_SIZE (base2),
2076 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2077 return false;
2079 if (!ref2)
2080 return true;
2082 /* If the decl is accessed via a MEM_REF, reconstruct the base
2083 we can use for TBAA and an appropriately adjusted offset. */
2084 dbase2 = ref2;
2085 while (handled_component_p (dbase2))
2086 dbase2 = TREE_OPERAND (dbase2, 0);
2087 poly_int64 doffset1 = offset1;
2088 poly_offset_int doffset2 = offset2;
2089 if (TREE_CODE (dbase2) == MEM_REF
2090 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2092 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2093 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2094 /* If second reference is view-converted, give up now. */
2095 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2096 return true;
2099 /* If first reference is view-converted, give up now. */
2100 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2101 return true;
2103 /* If both references are through the same type, they do not alias
2104 if the accesses do not overlap. This does extra disambiguation
2105 for mixed/pointer accesses but requires strict aliasing.
2106 For MEM_REFs we require that the component-ref offset we computed
2107 is relative to the start of the type which we ensure by
2108 comparing rvalue and access type and disregarding the constant
2109 pointer offset.
2111 But avoid treating variable length arrays as "objects", instead assume they
2112 can overlap by an exact multiple of their element size.
2113 See gcc.dg/torture/alias-2.c. */
2114 if (((TREE_CODE (base1) != TARGET_MEM_REF
2115 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2116 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2117 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2118 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2120 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2121 && (TYPE_SIZE (TREE_TYPE (base1))
2122 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2123 != INTEGER_CST));
2124 if (!partial_overlap
2125 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2126 return false;
2127 if (!ref1 || !ref2
2128 /* If there is must alias, there is no use disambiguating further. */
2129 || (!partial_overlap
2130 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2131 return true;
2132 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2133 partial_overlap);
2134 if (res == -1)
2135 return !nonoverlapping_component_refs_p (ref1, ref2);
2136 return !res;
2139 /* Do access-path based disambiguation. */
2140 if (ref1 && ref2
2141 && (handled_component_p (ref1) || handled_component_p (ref2)))
2142 return aliasing_component_refs_p (ref1,
2143 ref1_alias_set, base1_alias_set,
2144 offset1, max_size1,
2145 ref2,
2146 ref2_alias_set, base2_alias_set,
2147 offset2, max_size2);
2149 return true;
2152 /* Return true if two indirect references based on *PTR1
2153 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2154 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2155 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2156 in which case they are computed on-demand. REF1 and REF2
2157 if non-NULL are the complete memory reference trees. */
2159 static bool
2160 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2161 poly_int64 offset1, poly_int64 max_size1,
2162 poly_int64 size1,
2163 alias_set_type ref1_alias_set,
2164 alias_set_type base1_alias_set,
2165 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2166 poly_int64 offset2, poly_int64 max_size2,
2167 poly_int64 size2,
2168 alias_set_type ref2_alias_set,
2169 alias_set_type base2_alias_set, bool tbaa_p)
2171 tree ptr1;
2172 tree ptr2;
2173 tree ptrtype1, ptrtype2;
2175 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2176 || TREE_CODE (base1) == TARGET_MEM_REF)
2177 && (TREE_CODE (base2) == MEM_REF
2178 || TREE_CODE (base2) == TARGET_MEM_REF));
2180 ptr1 = TREE_OPERAND (base1, 0);
2181 ptr2 = TREE_OPERAND (base2, 0);
2183 /* If both bases are based on pointers they cannot alias if they may not
2184 point to the same memory object or if they point to the same object
2185 and the accesses do not overlap. */
2186 if ((!cfun || gimple_in_ssa_p (cfun))
2187 && operand_equal_p (ptr1, ptr2, 0)
2188 && (((TREE_CODE (base1) != TARGET_MEM_REF
2189 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2190 && (TREE_CODE (base2) != TARGET_MEM_REF
2191 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2192 || (TREE_CODE (base1) == TARGET_MEM_REF
2193 && TREE_CODE (base2) == TARGET_MEM_REF
2194 && (TMR_STEP (base1) == TMR_STEP (base2)
2195 || (TMR_STEP (base1) && TMR_STEP (base2)
2196 && operand_equal_p (TMR_STEP (base1),
2197 TMR_STEP (base2), 0)))
2198 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2199 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2200 && operand_equal_p (TMR_INDEX (base1),
2201 TMR_INDEX (base2), 0)))
2202 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2203 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2204 && operand_equal_p (TMR_INDEX2 (base1),
2205 TMR_INDEX2 (base2), 0))))))
2207 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2208 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2209 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2210 offset2 + moff2, max_size2))
2211 return false;
2212 /* If there is must alias, there is no use disambiguating further. */
2213 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2214 return true;
2215 if (ref1 && ref2)
2217 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2218 false);
2219 if (res != -1)
2220 return !res;
2223 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2224 return false;
2226 /* Disambiguations that rely on strict aliasing rules follow. */
2227 if (!flag_strict_aliasing || !tbaa_p)
2228 return true;
2230 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2231 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2233 /* If the alias set for a pointer access is zero all bets are off. */
2234 if (base1_alias_set == 0
2235 || base2_alias_set == 0)
2236 return true;
2238 /* Do type-based disambiguation. */
2239 if (base1_alias_set != base2_alias_set
2240 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2241 return false;
2243 /* If either reference is view-converted, give up now. */
2244 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2245 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2246 return true;
2248 /* If both references are through the same type, they do not alias
2249 if the accesses do not overlap. This does extra disambiguation
2250 for mixed/pointer accesses but requires strict aliasing. */
2251 if ((TREE_CODE (base1) != TARGET_MEM_REF
2252 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2253 && (TREE_CODE (base2) != TARGET_MEM_REF
2254 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2255 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2256 TREE_TYPE (ptrtype2)) == 1)
2258 /* But avoid treating arrays as "objects", instead assume they
2259 can overlap by an exact multiple of their element size.
2260 See gcc.dg/torture/alias-2.c. */
2261 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2263 if (!partial_overlap
2264 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2265 return false;
2266 if (!ref1 || !ref2
2267 || (!partial_overlap
2268 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2269 return true;
2270 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2271 partial_overlap);
2272 if (res == -1)
2273 return !nonoverlapping_component_refs_p (ref1, ref2);
2274 return !res;
2277 /* Do access-path based disambiguation. */
2278 if (ref1 && ref2
2279 && (handled_component_p (ref1) || handled_component_p (ref2)))
2280 return aliasing_component_refs_p (ref1,
2281 ref1_alias_set, base1_alias_set,
2282 offset1, max_size1,
2283 ref2,
2284 ref2_alias_set, base2_alias_set,
2285 offset2, max_size2);
2287 return true;
2290 /* Return true, if the two memory references REF1 and REF2 may alias. */
2292 static bool
2293 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2295 tree base1, base2;
2296 poly_int64 offset1 = 0, offset2 = 0;
2297 poly_int64 max_size1 = -1, max_size2 = -1;
2298 bool var1_p, var2_p, ind1_p, ind2_p;
2300 gcc_checking_assert ((!ref1->ref
2301 || TREE_CODE (ref1->ref) == SSA_NAME
2302 || DECL_P (ref1->ref)
2303 || TREE_CODE (ref1->ref) == STRING_CST
2304 || handled_component_p (ref1->ref)
2305 || TREE_CODE (ref1->ref) == MEM_REF
2306 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
2307 && (!ref2->ref
2308 || TREE_CODE (ref2->ref) == SSA_NAME
2309 || DECL_P (ref2->ref)
2310 || TREE_CODE (ref2->ref) == STRING_CST
2311 || handled_component_p (ref2->ref)
2312 || TREE_CODE (ref2->ref) == MEM_REF
2313 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
2315 /* Decompose the references into their base objects and the access. */
2316 base1 = ao_ref_base (ref1);
2317 offset1 = ref1->offset;
2318 max_size1 = ref1->max_size;
2319 base2 = ao_ref_base (ref2);
2320 offset2 = ref2->offset;
2321 max_size2 = ref2->max_size;
2323 /* We can end up with registers or constants as bases for example from
2324 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2325 which is seen as a struct copy. */
2326 if (TREE_CODE (base1) == SSA_NAME
2327 || TREE_CODE (base1) == CONST_DECL
2328 || TREE_CODE (base1) == CONSTRUCTOR
2329 || TREE_CODE (base1) == ADDR_EXPR
2330 || CONSTANT_CLASS_P (base1)
2331 || TREE_CODE (base2) == SSA_NAME
2332 || TREE_CODE (base2) == CONST_DECL
2333 || TREE_CODE (base2) == CONSTRUCTOR
2334 || TREE_CODE (base2) == ADDR_EXPR
2335 || CONSTANT_CLASS_P (base2))
2336 return false;
2338 /* We can end up referring to code via function and label decls.
2339 As we likely do not properly track code aliases conservatively
2340 bail out. */
2341 if (TREE_CODE (base1) == FUNCTION_DECL
2342 || TREE_CODE (base1) == LABEL_DECL
2343 || TREE_CODE (base2) == FUNCTION_DECL
2344 || TREE_CODE (base2) == LABEL_DECL)
2345 return true;
2347 /* Two volatile accesses always conflict. */
2348 if (ref1->volatile_p
2349 && ref2->volatile_p)
2350 return true;
2352 /* Defer to simple offset based disambiguation if we have
2353 references based on two decls. Do this before defering to
2354 TBAA to handle must-alias cases in conformance with the
2355 GCC extension of allowing type-punning through unions. */
2356 var1_p = DECL_P (base1);
2357 var2_p = DECL_P (base2);
2358 if (var1_p && var2_p)
2359 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
2360 ref1->size,
2361 ref2->ref, base2, offset2, max_size2,
2362 ref2->size);
2364 /* Handle restrict based accesses.
2365 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2366 here. */
2367 tree rbase1 = base1;
2368 tree rbase2 = base2;
2369 if (var1_p)
2371 rbase1 = ref1->ref;
2372 if (rbase1)
2373 while (handled_component_p (rbase1))
2374 rbase1 = TREE_OPERAND (rbase1, 0);
2376 if (var2_p)
2378 rbase2 = ref2->ref;
2379 if (rbase2)
2380 while (handled_component_p (rbase2))
2381 rbase2 = TREE_OPERAND (rbase2, 0);
2383 if (rbase1 && rbase2
2384 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2385 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2386 /* If the accesses are in the same restrict clique... */
2387 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2388 /* But based on different pointers they do not alias. */
2389 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2390 return false;
2392 ind1_p = (TREE_CODE (base1) == MEM_REF
2393 || TREE_CODE (base1) == TARGET_MEM_REF);
2394 ind2_p = (TREE_CODE (base2) == MEM_REF
2395 || TREE_CODE (base2) == TARGET_MEM_REF);
2397 /* Canonicalize the pointer-vs-decl case. */
2398 if (ind1_p && var2_p)
2400 std::swap (offset1, offset2);
2401 std::swap (max_size1, max_size2);
2402 std::swap (base1, base2);
2403 std::swap (ref1, ref2);
2404 var1_p = true;
2405 ind1_p = false;
2406 var2_p = false;
2407 ind2_p = true;
2410 /* First defer to TBAA if possible. */
2411 if (tbaa_p
2412 && flag_strict_aliasing
2413 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2414 ao_ref_alias_set (ref2)))
2415 return false;
2417 /* If the reference is based on a pointer that points to memory
2418 that may not be written to then the other reference cannot possibly
2419 clobber it. */
2420 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2421 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2422 || (ind1_p
2423 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2424 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2425 return false;
2427 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2428 if (var1_p && ind2_p)
2429 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
2430 offset2, max_size2, ref2->size,
2431 ao_ref_alias_set (ref2),
2432 ao_ref_base_alias_set (ref2),
2433 ref1->ref, base1,
2434 offset1, max_size1, ref1->size,
2435 ao_ref_alias_set (ref1),
2436 ao_ref_base_alias_set (ref1),
2437 tbaa_p);
2438 else if (ind1_p && ind2_p)
2439 return indirect_refs_may_alias_p (ref1->ref, base1,
2440 offset1, max_size1, ref1->size,
2441 ao_ref_alias_set (ref1),
2442 ao_ref_base_alias_set (ref1),
2443 ref2->ref, base2,
2444 offset2, max_size2, ref2->size,
2445 ao_ref_alias_set (ref2),
2446 ao_ref_base_alias_set (ref2),
2447 tbaa_p);
2449 gcc_unreachable ();
2452 /* Return true, if the two memory references REF1 and REF2 may alias
2453 and update statistics. */
2455 bool
2456 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2458 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2459 if (res)
2460 ++alias_stats.refs_may_alias_p_may_alias;
2461 else
2462 ++alias_stats.refs_may_alias_p_no_alias;
2463 return res;
2466 static bool
2467 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2469 ao_ref r1;
2470 ao_ref_init (&r1, ref1);
2471 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2474 bool
2475 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2477 ao_ref r1, r2;
2478 ao_ref_init (&r1, ref1);
2479 ao_ref_init (&r2, ref2);
2480 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2483 /* Returns true if there is a anti-dependence for the STORE that
2484 executes after the LOAD. */
2486 bool
2487 refs_anti_dependent_p (tree load, tree store)
2489 ao_ref r1, r2;
2490 ao_ref_init (&r1, load);
2491 ao_ref_init (&r2, store);
2492 return refs_may_alias_p_1 (&r1, &r2, false);
2495 /* Returns true if there is a output dependence for the stores
2496 STORE1 and STORE2. */
2498 bool
2499 refs_output_dependent_p (tree store1, tree store2)
2501 ao_ref r1, r2;
2502 ao_ref_init (&r1, store1);
2503 ao_ref_init (&r2, store2);
2504 return refs_may_alias_p_1 (&r1, &r2, false);
2507 /* Returns true if and only if REF may alias any access stored in TT.
2508 IF TBAA_P is true, use TBAA oracle. */
2510 static bool
2511 modref_may_conflict (const gimple *stmt,
2512 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2514 alias_set_type base_set, ref_set;
2515 modref_base_node <alias_set_type> *base_node;
2516 modref_ref_node <alias_set_type> *ref_node;
2517 size_t i, j, k;
2519 if (tt->every_base)
2520 return true;
2522 if (!dbg_cnt (ipa_mod_ref))
2523 return true;
2525 base_set = ao_ref_base_alias_set (ref);
2527 ref_set = ao_ref_alias_set (ref);
2529 int num_tests = 0, max_tests = param_modref_max_tests;
2530 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
2532 if (tbaa_p && flag_strict_aliasing)
2534 if (num_tests >= max_tests)
2535 return true;
2536 alias_stats.modref_tests++;
2537 if (!alias_sets_conflict_p (base_set, base_node->base))
2538 continue;
2539 num_tests++;
2542 if (base_node->every_ref)
2543 return true;
2545 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
2547 /* Do not repeat same test as before. */
2548 if ((ref_set != base_set || base_node->base != ref_node->ref)
2549 && tbaa_p && flag_strict_aliasing)
2551 if (num_tests >= max_tests)
2552 return true;
2553 alias_stats.modref_tests++;
2554 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2555 continue;
2556 num_tests++;
2559 /* TBAA checks did not disambiguate, try to use base pointer, for
2560 that we however need to have ref->ref or ref->base. */
2561 if (ref_node->every_access || (!ref->ref && !ref->base))
2562 return true;
2564 modref_access_node *access_node;
2565 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
2567 if (num_tests >= max_tests)
2568 return true;
2570 if (access_node->parm_index == -1
2571 || (unsigned)access_node->parm_index
2572 >= gimple_call_num_args (stmt))
2573 return true;
2575 alias_stats.modref_baseptr_tests++;
2577 tree arg = gimple_call_arg (stmt, access_node->parm_index);
2579 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2580 continue;
2582 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2583 return true;
2585 /* ao_ref_init_from_ptr_and_range assumes that memory access
2586 starts by the pointed to location. If we did not track the
2587 offset it is possible that it starts before the actual
2588 pointer. */
2589 if (!access_node->parm_offset_known)
2591 if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2592 return true;
2594 else
2596 ao_ref ref2;
2597 poly_offset_int off = (poly_offset_int)access_node->offset
2598 + ((poly_offset_int)access_node->parm_offset
2599 << LOG2_BITS_PER_UNIT);
2600 poly_int64 off2;
2601 if (off.to_shwi (&off2))
2603 ao_ref_init_from_ptr_and_range
2604 (&ref2, arg, true, off2,
2605 access_node->size,
2606 access_node->max_size);
2607 ref2.ref_alias_set = ref_set;
2608 ref2.base_alias_set = base_set;
2609 if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2610 return true;
2612 else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2613 return true;
2615 num_tests++;
2619 return false;
2622 /* Check if REF conflicts with call using "fn spec" attribute.
2623 If CLOBBER is true we are checking for writes, otherwise check loads.
2625 Return 0 if there are no conflicts (except for possible function call
2626 argument reads), 1 if there are conflicts and -1 if we can not decide by
2627 fn spec. */
2629 static int
2630 check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2632 attr_fnspec fnspec = gimple_call_fnspec (call);
2633 if (fnspec.known_p ())
2635 if (clobber
2636 ? !fnspec.global_memory_written_p ()
2637 : !fnspec.global_memory_read_p ())
2639 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2640 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2641 && (!fnspec.arg_specified_p (i)
2642 || (clobber ? fnspec.arg_maybe_written_p (i)
2643 : fnspec.arg_maybe_read_p (i))))
2645 ao_ref dref;
2646 tree size = NULL_TREE;
2647 unsigned int size_arg;
2649 if (!fnspec.arg_specified_p (i))
2651 else if (fnspec.arg_max_access_size_given_by_arg_p
2652 (i, &size_arg))
2653 size = gimple_call_arg (call, size_arg);
2654 else if (fnspec.arg_access_size_given_by_type_p (i))
2656 tree callee = gimple_call_fndecl (call);
2657 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2659 for (unsigned int p = 0; p < i; p++)
2660 t = TREE_CHAIN (t);
2661 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2663 ao_ref_init_from_ptr_and_size (&dref,
2664 gimple_call_arg (call, i),
2665 size);
2666 if (refs_may_alias_p_1 (&dref, ref, false))
2667 return 1;
2669 if (clobber
2670 && fnspec.errno_maybe_written_p ()
2671 && flag_errno_math
2672 && targetm.ref_may_alias_errno (ref))
2673 return 1;
2674 return 0;
2678 /* FIXME: we should handle barriers more consistently, but for now leave the
2679 check here. */
2680 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2681 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2683 /* __sync_* builtins and some OpenMP builtins act as threading
2684 barriers. */
2685 #undef DEF_SYNC_BUILTIN
2686 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2687 #include "sync-builtins.def"
2688 #undef DEF_SYNC_BUILTIN
2689 case BUILT_IN_GOMP_ATOMIC_START:
2690 case BUILT_IN_GOMP_ATOMIC_END:
2691 case BUILT_IN_GOMP_BARRIER:
2692 case BUILT_IN_GOMP_BARRIER_CANCEL:
2693 case BUILT_IN_GOMP_TASKWAIT:
2694 case BUILT_IN_GOMP_TASKGROUP_END:
2695 case BUILT_IN_GOMP_CRITICAL_START:
2696 case BUILT_IN_GOMP_CRITICAL_END:
2697 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2698 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2699 case BUILT_IN_GOMP_LOOP_END:
2700 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2701 case BUILT_IN_GOMP_ORDERED_START:
2702 case BUILT_IN_GOMP_ORDERED_END:
2703 case BUILT_IN_GOMP_SECTIONS_END:
2704 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2705 case BUILT_IN_GOMP_SINGLE_COPY_START:
2706 case BUILT_IN_GOMP_SINGLE_COPY_END:
2707 return 1;
2709 default:
2710 return -1;
2712 return -1;
2715 /* If the call CALL may use the memory reference REF return true,
2716 otherwise return false. */
2718 static bool
2719 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2721 tree base, callee;
2722 unsigned i;
2723 int flags = gimple_call_flags (call);
2725 /* Const functions without a static chain do not implicitly use memory. */
2726 if (!gimple_call_chain (call)
2727 && (flags & (ECF_CONST|ECF_NOVOPS)))
2728 goto process_args;
2730 /* A call that is not without side-effects might involve volatile
2731 accesses and thus conflicts with all other volatile accesses. */
2732 if (ref->volatile_p)
2733 return true;
2735 callee = gimple_call_fndecl (call);
2737 if (!gimple_call_chain (call) && callee != NULL_TREE)
2739 struct cgraph_node *node = cgraph_node::get (callee);
2740 /* We can not safely optimize based on summary of calle if it does
2741 not always bind to current def: it is possible that memory load
2742 was optimized out earlier and the interposed variant may not be
2743 optimized this way. */
2744 if (node && node->binds_to_current_def_p ())
2746 modref_summary *summary = get_modref_function_summary (node);
2747 if (summary)
2749 if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2751 alias_stats.modref_use_no_alias++;
2752 if (dump_file && (dump_flags & TDF_DETAILS))
2754 fprintf (dump_file,
2755 "ipa-modref: call stmt ");
2756 print_gimple_stmt (dump_file, call, 0);
2757 fprintf (dump_file,
2758 "ipa-modref: call to %s does not use ",
2759 node->dump_name ());
2760 if (!ref->ref && ref->base)
2762 fprintf (dump_file, "base: ");
2763 print_generic_expr (dump_file, ref->base);
2765 else if (ref->ref)
2767 fprintf (dump_file, "ref: ");
2768 print_generic_expr (dump_file, ref->ref);
2770 fprintf (dump_file, " alias sets: %i->%i\n",
2771 ao_ref_base_alias_set (ref),
2772 ao_ref_alias_set (ref));
2774 goto process_args;
2776 alias_stats.modref_use_may_alias++;
2781 base = ao_ref_base (ref);
2782 if (!base)
2783 return true;
2785 /* If the reference is based on a decl that is not aliased the call
2786 cannot possibly use it. */
2787 if (DECL_P (base)
2788 && !may_be_aliased (base)
2789 /* But local statics can be used through recursion. */
2790 && !is_global_var (base))
2791 goto process_args;
2793 if (int res = check_fnspec (call, ref, false))
2795 if (res == 1)
2796 return true;
2798 else
2799 goto process_args;
2801 /* Check if base is a global static variable that is not read
2802 by the function. */
2803 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2805 struct cgraph_node *node = cgraph_node::get (callee);
2806 bitmap read;
2807 int id;
2809 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2810 node yet. We should enforce that there are nodes for all decls in the
2811 IL and remove this check instead. */
2812 if (node
2813 && (id = ipa_reference_var_uid (base)) != -1
2814 && (read = ipa_reference_get_read_global (node))
2815 && !bitmap_bit_p (read, id))
2816 goto process_args;
2819 /* Check if the base variable is call-used. */
2820 if (DECL_P (base))
2822 if (pt_solution_includes (gimple_call_use_set (call), base))
2823 return true;
2825 else if ((TREE_CODE (base) == MEM_REF
2826 || TREE_CODE (base) == TARGET_MEM_REF)
2827 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2829 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2830 if (!pi)
2831 return true;
2833 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2834 return true;
2836 else
2837 return true;
2839 /* Inspect call arguments for passed-by-value aliases. */
2840 process_args:
2841 for (i = 0; i < gimple_call_num_args (call); ++i)
2843 tree op = gimple_call_arg (call, i);
2844 int flags = gimple_call_arg_flags (call, i);
2846 if (flags & EAF_UNUSED)
2847 continue;
2849 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2850 op = TREE_OPERAND (op, 0);
2852 if (TREE_CODE (op) != SSA_NAME
2853 && !is_gimple_min_invariant (op))
2855 ao_ref r;
2856 ao_ref_init (&r, op);
2857 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2858 return true;
2862 return false;
2865 static bool
2866 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2868 bool res;
2869 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2870 if (res)
2871 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2872 else
2873 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2874 return res;
2878 /* If the statement STMT may use the memory reference REF return
2879 true, otherwise return false. */
2881 bool
2882 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2884 if (is_gimple_assign (stmt))
2886 tree rhs;
2888 /* All memory assign statements are single. */
2889 if (!gimple_assign_single_p (stmt))
2890 return false;
2892 rhs = gimple_assign_rhs1 (stmt);
2893 if (is_gimple_reg (rhs)
2894 || is_gimple_min_invariant (rhs)
2895 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2896 return false;
2898 return refs_may_alias_p (rhs, ref, tbaa_p);
2900 else if (is_gimple_call (stmt))
2901 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2902 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2904 tree retval = gimple_return_retval (return_stmt);
2905 if (retval
2906 && TREE_CODE (retval) != SSA_NAME
2907 && !is_gimple_min_invariant (retval)
2908 && refs_may_alias_p (retval, ref, tbaa_p))
2909 return true;
2910 /* If ref escapes the function then the return acts as a use. */
2911 tree base = ao_ref_base (ref);
2912 if (!base)
2914 else if (DECL_P (base))
2915 return is_global_var (base);
2916 else if (TREE_CODE (base) == MEM_REF
2917 || TREE_CODE (base) == TARGET_MEM_REF)
2918 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2919 return false;
2922 return true;
2925 bool
2926 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2928 ao_ref r;
2929 ao_ref_init (&r, ref);
2930 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2933 /* If the call in statement CALL may clobber the memory reference REF
2934 return true, otherwise return false. */
2936 bool
2937 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2939 tree base;
2940 tree callee;
2942 /* If the call is pure or const it cannot clobber anything. */
2943 if (gimple_call_flags (call)
2944 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2945 return false;
2946 if (gimple_call_internal_p (call))
2947 switch (gimple_call_internal_fn (call))
2949 /* Treat these internal calls like ECF_PURE for aliasing,
2950 they don't write to any memory the program should care about.
2951 They have important other side-effects, and read memory,
2952 so can't be ECF_NOVOPS. */
2953 case IFN_UBSAN_NULL:
2954 case IFN_UBSAN_BOUNDS:
2955 case IFN_UBSAN_VPTR:
2956 case IFN_UBSAN_OBJECT_SIZE:
2957 case IFN_UBSAN_PTR:
2958 case IFN_ASAN_CHECK:
2959 return false;
2960 default:
2961 break;
2964 callee = gimple_call_fndecl (call);
2966 if (callee != NULL_TREE && !ref->volatile_p)
2968 struct cgraph_node *node = cgraph_node::get (callee);
2969 if (node)
2971 modref_summary *summary = get_modref_function_summary (node);
2972 if (summary)
2974 if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
2975 && (!summary->writes_errno
2976 || !targetm.ref_may_alias_errno (ref)))
2978 alias_stats.modref_clobber_no_alias++;
2979 if (dump_file && (dump_flags & TDF_DETAILS))
2981 fprintf (dump_file,
2982 "ipa-modref: call stmt ");
2983 print_gimple_stmt (dump_file, call, 0);
2984 fprintf (dump_file,
2985 "ipa-modref: call to %s does not clobber ",
2986 node->dump_name ());
2987 if (!ref->ref && ref->base)
2989 fprintf (dump_file, "base: ");
2990 print_generic_expr (dump_file, ref->base);
2992 else if (ref->ref)
2994 fprintf (dump_file, "ref: ");
2995 print_generic_expr (dump_file, ref->ref);
2997 fprintf (dump_file, " alias sets: %i->%i\n",
2998 ao_ref_base_alias_set (ref),
2999 ao_ref_alias_set (ref));
3001 return false;
3003 alias_stats.modref_clobber_may_alias++;
3008 base = ao_ref_base (ref);
3009 if (!base)
3010 return true;
3012 if (TREE_CODE (base) == SSA_NAME
3013 || CONSTANT_CLASS_P (base))
3014 return false;
3016 /* A call that is not without side-effects might involve volatile
3017 accesses and thus conflicts with all other volatile accesses. */
3018 if (ref->volatile_p)
3019 return true;
3021 /* If the reference is based on a decl that is not aliased the call
3022 cannot possibly clobber it. */
3023 if (DECL_P (base)
3024 && !may_be_aliased (base)
3025 /* But local non-readonly statics can be modified through recursion
3026 or the call may implement a threading barrier which we must
3027 treat as may-def. */
3028 && (TREE_READONLY (base)
3029 || !is_global_var (base)))
3030 return false;
3032 /* If the reference is based on a pointer that points to memory
3033 that may not be written to then the call cannot possibly clobber it. */
3034 if ((TREE_CODE (base) == MEM_REF
3035 || TREE_CODE (base) == TARGET_MEM_REF)
3036 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3037 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3038 return false;
3040 if (int res = check_fnspec (call, ref, true))
3042 if (res == 1)
3043 return true;
3045 else
3046 return false;
3048 /* Check if base is a global static variable that is not written
3049 by the function. */
3050 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3052 struct cgraph_node *node = cgraph_node::get (callee);
3053 bitmap written;
3054 int id;
3056 if (node
3057 && (id = ipa_reference_var_uid (base)) != -1
3058 && (written = ipa_reference_get_written_global (node))
3059 && !bitmap_bit_p (written, id))
3060 return false;
3063 /* Check if the base variable is call-clobbered. */
3064 if (DECL_P (base))
3065 return pt_solution_includes (gimple_call_clobber_set (call), base);
3066 else if ((TREE_CODE (base) == MEM_REF
3067 || TREE_CODE (base) == TARGET_MEM_REF)
3068 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3070 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3071 if (!pi)
3072 return true;
3074 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3077 return true;
3080 /* If the call in statement CALL may clobber the memory reference REF
3081 return true, otherwise return false. */
3083 bool
3084 call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3086 bool res;
3087 ao_ref r;
3088 ao_ref_init (&r, ref);
3089 res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3090 if (res)
3091 ++alias_stats.call_may_clobber_ref_p_may_alias;
3092 else
3093 ++alias_stats.call_may_clobber_ref_p_no_alias;
3094 return res;
3098 /* If the statement STMT may clobber the memory reference REF return true,
3099 otherwise return false. */
3101 bool
3102 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3104 if (is_gimple_call (stmt))
3106 tree lhs = gimple_call_lhs (stmt);
3107 if (lhs
3108 && TREE_CODE (lhs) != SSA_NAME)
3110 ao_ref r;
3111 ao_ref_init (&r, lhs);
3112 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3113 return true;
3116 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3118 else if (gimple_assign_single_p (stmt))
3120 tree lhs = gimple_assign_lhs (stmt);
3121 if (TREE_CODE (lhs) != SSA_NAME)
3123 ao_ref r;
3124 ao_ref_init (&r, lhs);
3125 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3128 else if (gimple_code (stmt) == GIMPLE_ASM)
3129 return true;
3131 return false;
3134 bool
3135 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3137 ao_ref r;
3138 ao_ref_init (&r, ref);
3139 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3142 /* Return true if store1 and store2 described by corresponding tuples
3143 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3144 address. */
3146 static bool
3147 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3148 poly_int64 max_size1,
3149 tree base2, poly_int64 offset2, poly_int64 size2,
3150 poly_int64 max_size2)
3152 /* Offsets need to be 0. */
3153 if (maybe_ne (offset1, 0)
3154 || maybe_ne (offset2, 0))
3155 return false;
3157 bool base1_obj_p = SSA_VAR_P (base1);
3158 bool base2_obj_p = SSA_VAR_P (base2);
3160 /* We need one object. */
3161 if (base1_obj_p == base2_obj_p)
3162 return false;
3163 tree obj = base1_obj_p ? base1 : base2;
3165 /* And we need one MEM_REF. */
3166 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3167 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3168 if (base1_memref_p == base2_memref_p)
3169 return false;
3170 tree memref = base1_memref_p ? base1 : base2;
3172 /* Sizes need to be valid. */
3173 if (!known_size_p (max_size1)
3174 || !known_size_p (max_size2)
3175 || !known_size_p (size1)
3176 || !known_size_p (size2))
3177 return false;
3179 /* Max_size needs to match size. */
3180 if (maybe_ne (max_size1, size1)
3181 || maybe_ne (max_size2, size2))
3182 return false;
3184 /* Sizes need to match. */
3185 if (maybe_ne (size1, size2))
3186 return false;
3189 /* Check that memref is a store to pointer with singleton points-to info. */
3190 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3191 return false;
3192 tree ptr = TREE_OPERAND (memref, 0);
3193 if (TREE_CODE (ptr) != SSA_NAME)
3194 return false;
3195 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3196 unsigned int pt_uid;
3197 if (pi == NULL
3198 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3199 return false;
3201 /* Be conservative with non-call exceptions when the address might
3202 be NULL. */
3203 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3204 return false;
3206 /* Check that ptr points relative to obj. */
3207 unsigned int obj_uid = DECL_PT_UID (obj);
3208 if (obj_uid != pt_uid)
3209 return false;
3211 /* Check that the object size is the same as the store size. That ensures us
3212 that ptr points to the start of obj. */
3213 return (DECL_SIZE (obj)
3214 && poly_int_tree_p (DECL_SIZE (obj))
3215 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3218 /* If STMT kills the memory reference REF return true, otherwise
3219 return false. */
3221 bool
3222 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3224 if (!ao_ref_base (ref))
3225 return false;
3227 if (gimple_has_lhs (stmt)
3228 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3229 /* The assignment is not necessarily carried out if it can throw
3230 and we can catch it in the current function where we could inspect
3231 the previous value.
3232 ??? We only need to care about the RHS throwing. For aggregate
3233 assignments or similar calls and non-call exceptions the LHS
3234 might throw as well. */
3235 && !stmt_can_throw_internal (cfun, stmt))
3237 tree lhs = gimple_get_lhs (stmt);
3238 /* If LHS is literally a base of the access we are done. */
3239 if (ref->ref)
3241 tree base = ref->ref;
3242 tree innermost_dropped_array_ref = NULL_TREE;
3243 if (handled_component_p (base))
3245 tree saved_lhs0 = NULL_TREE;
3246 if (handled_component_p (lhs))
3248 saved_lhs0 = TREE_OPERAND (lhs, 0);
3249 TREE_OPERAND (lhs, 0) = integer_zero_node;
3253 /* Just compare the outermost handled component, if
3254 they are equal we have found a possible common
3255 base. */
3256 tree saved_base0 = TREE_OPERAND (base, 0);
3257 TREE_OPERAND (base, 0) = integer_zero_node;
3258 bool res = operand_equal_p (lhs, base, 0);
3259 TREE_OPERAND (base, 0) = saved_base0;
3260 if (res)
3261 break;
3262 /* Remember if we drop an array-ref that we need to
3263 double-check not being at struct end. */
3264 if (TREE_CODE (base) == ARRAY_REF
3265 || TREE_CODE (base) == ARRAY_RANGE_REF)
3266 innermost_dropped_array_ref = base;
3267 /* Otherwise drop handled components of the access. */
3268 base = saved_base0;
3270 while (handled_component_p (base));
3271 if (saved_lhs0)
3272 TREE_OPERAND (lhs, 0) = saved_lhs0;
3274 /* Finally check if the lhs has the same address and size as the
3275 base candidate of the access. Watch out if we have dropped
3276 an array-ref that was at struct end, this means ref->ref may
3277 be outside of the TYPE_SIZE of its base. */
3278 if ((! innermost_dropped_array_ref
3279 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3280 && (lhs == base
3281 || (((TYPE_SIZE (TREE_TYPE (lhs))
3282 == TYPE_SIZE (TREE_TYPE (base)))
3283 || (TYPE_SIZE (TREE_TYPE (lhs))
3284 && TYPE_SIZE (TREE_TYPE (base))
3285 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3286 TYPE_SIZE (TREE_TYPE (base)),
3287 0)))
3288 && operand_equal_p (lhs, base,
3289 OEP_ADDRESS_OF
3290 | OEP_MATCH_SIDE_EFFECTS))))
3291 return true;
3294 /* Now look for non-literal equal bases with the restriction of
3295 handling constant offset and size. */
3296 /* For a must-alias check we need to be able to constrain
3297 the access properly. */
3298 if (!ref->max_size_known_p ())
3299 return false;
3300 poly_int64 size, offset, max_size, ref_offset = ref->offset;
3301 bool reverse;
3302 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3303 &reverse);
3304 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3305 so base == ref->base does not always hold. */
3306 if (base != ref->base)
3308 /* Try using points-to info. */
3309 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3310 ref->offset, ref->size, ref->max_size))
3311 return true;
3313 /* If both base and ref->base are MEM_REFs, only compare the
3314 first operand, and if the second operand isn't equal constant,
3315 try to add the offsets into offset and ref_offset. */
3316 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3317 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3319 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3320 TREE_OPERAND (ref->base, 1)))
3322 poly_offset_int off1 = mem_ref_offset (base);
3323 off1 <<= LOG2_BITS_PER_UNIT;
3324 off1 += offset;
3325 poly_offset_int off2 = mem_ref_offset (ref->base);
3326 off2 <<= LOG2_BITS_PER_UNIT;
3327 off2 += ref_offset;
3328 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3329 size = -1;
3332 else
3333 size = -1;
3335 /* For a must-alias check we need to be able to constrain
3336 the access properly. */
3337 if (known_eq (size, max_size)
3338 && known_subrange_p (ref_offset, ref->max_size, offset, size))
3339 return true;
3342 if (is_gimple_call (stmt))
3344 tree callee = gimple_call_fndecl (stmt);
3345 if (callee != NULL_TREE
3346 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3347 switch (DECL_FUNCTION_CODE (callee))
3349 case BUILT_IN_FREE:
3351 tree ptr = gimple_call_arg (stmt, 0);
3352 tree base = ao_ref_base (ref);
3353 if (base && TREE_CODE (base) == MEM_REF
3354 && TREE_OPERAND (base, 0) == ptr)
3355 return true;
3356 break;
3359 case BUILT_IN_MEMCPY:
3360 case BUILT_IN_MEMPCPY:
3361 case BUILT_IN_MEMMOVE:
3362 case BUILT_IN_MEMSET:
3363 case BUILT_IN_MEMCPY_CHK:
3364 case BUILT_IN_MEMPCPY_CHK:
3365 case BUILT_IN_MEMMOVE_CHK:
3366 case BUILT_IN_MEMSET_CHK:
3367 case BUILT_IN_STRNCPY:
3368 case BUILT_IN_STPNCPY:
3369 case BUILT_IN_CALLOC:
3371 /* For a must-alias check we need to be able to constrain
3372 the access properly. */
3373 if (!ref->max_size_known_p ())
3374 return false;
3375 tree dest;
3376 tree len;
3378 /* In execution order a calloc call will never kill
3379 anything. However, DSE will (ab)use this interface
3380 to ask if a calloc call writes the same memory locations
3381 as a later assignment, memset, etc. So handle calloc
3382 in the expected way. */
3383 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3385 tree arg0 = gimple_call_arg (stmt, 0);
3386 tree arg1 = gimple_call_arg (stmt, 1);
3387 if (TREE_CODE (arg0) != INTEGER_CST
3388 || TREE_CODE (arg1) != INTEGER_CST)
3389 return false;
3391 dest = gimple_call_lhs (stmt);
3392 if (!dest)
3393 return false;
3394 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3396 else
3398 dest = gimple_call_arg (stmt, 0);
3399 len = gimple_call_arg (stmt, 2);
3401 if (!poly_int_tree_p (len))
3402 return false;
3403 tree rbase = ref->base;
3404 poly_offset_int roffset = ref->offset;
3405 ao_ref dref;
3406 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3407 tree base = ao_ref_base (&dref);
3408 poly_offset_int offset = dref.offset;
3409 if (!base || !known_size_p (dref.size))
3410 return false;
3411 if (TREE_CODE (base) == MEM_REF)
3413 if (TREE_CODE (rbase) != MEM_REF)
3414 return false;
3415 // Compare pointers.
3416 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
3417 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
3418 base = TREE_OPERAND (base, 0);
3419 rbase = TREE_OPERAND (rbase, 0);
3421 if (base == rbase
3422 && known_subrange_p (roffset, ref->max_size, offset,
3423 wi::to_poly_offset (len)
3424 << LOG2_BITS_PER_UNIT))
3425 return true;
3426 break;
3429 case BUILT_IN_VA_END:
3431 tree ptr = gimple_call_arg (stmt, 0);
3432 if (TREE_CODE (ptr) == ADDR_EXPR)
3434 tree base = ao_ref_base (ref);
3435 if (TREE_OPERAND (ptr, 0) == base)
3436 return true;
3438 break;
3441 default:;
3444 return false;
3447 bool
3448 stmt_kills_ref_p (gimple *stmt, tree ref)
3450 ao_ref r;
3451 ao_ref_init (&r, ref);
3452 return stmt_kills_ref_p (stmt, &r);
3456 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3457 TARGET or a statement clobbering the memory reference REF in which
3458 case false is returned. The walk starts with VUSE, one argument of PHI. */
3460 static bool
3461 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3462 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3463 bitmap *visited, bool abort_on_visited,
3464 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3465 translate_flags disambiguate_only,
3466 void *data)
3468 basic_block bb = gimple_bb (phi);
3470 if (!*visited)
3471 *visited = BITMAP_ALLOC (NULL);
3473 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3475 /* Walk until we hit the target. */
3476 while (vuse != target)
3478 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3479 /* If we are searching for the target VUSE by walking up to
3480 TARGET_BB dominating the original PHI we are finished once
3481 we reach a default def or a definition in a block dominating
3482 that block. Update TARGET and return. */
3483 if (!target
3484 && (gimple_nop_p (def_stmt)
3485 || dominated_by_p (CDI_DOMINATORS,
3486 target_bb, gimple_bb (def_stmt))))
3488 target = vuse;
3489 return true;
3492 /* Recurse for PHI nodes. */
3493 if (gimple_code (def_stmt) == GIMPLE_PHI)
3495 /* An already visited PHI node ends the walk successfully. */
3496 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3497 return !abort_on_visited;
3498 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3499 visited, abort_on_visited,
3500 translate, data, disambiguate_only);
3501 if (!vuse)
3502 return false;
3503 continue;
3505 else if (gimple_nop_p (def_stmt))
3506 return false;
3507 else
3509 /* A clobbering statement or the end of the IL ends it failing. */
3510 if ((int)limit <= 0)
3511 return false;
3512 --limit;
3513 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3515 translate_flags tf = disambiguate_only;
3516 if (translate
3517 && (*translate) (ref, vuse, data, &tf) == NULL)
3519 else
3520 return false;
3523 /* If we reach a new basic-block see if we already skipped it
3524 in a previous walk that ended successfully. */
3525 if (gimple_bb (def_stmt) != bb)
3527 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3528 return !abort_on_visited;
3529 bb = gimple_bb (def_stmt);
3531 vuse = gimple_vuse (def_stmt);
3533 return true;
3537 /* Starting from a PHI node for the virtual operand of the memory reference
3538 REF find a continuation virtual operand that allows to continue walking
3539 statements dominating PHI skipping only statements that cannot possibly
3540 clobber REF. Decrements LIMIT for each alias disambiguation done
3541 and aborts the walk, returning NULL_TREE if it reaches zero.
3542 Returns NULL_TREE if no suitable virtual operand can be found. */
3544 tree
3545 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3546 unsigned int &limit, bitmap *visited,
3547 bool abort_on_visited,
3548 void *(*translate)(ao_ref *, tree, void *,
3549 translate_flags *),
3550 void *data,
3551 translate_flags disambiguate_only)
3553 unsigned nargs = gimple_phi_num_args (phi);
3555 /* Through a single-argument PHI we can simply look through. */
3556 if (nargs == 1)
3557 return PHI_ARG_DEF (phi, 0);
3559 /* For two or more arguments try to pairwise skip non-aliasing code
3560 until we hit the phi argument definition that dominates the other one. */
3561 basic_block phi_bb = gimple_bb (phi);
3562 tree arg0, arg1;
3563 unsigned i;
3565 /* Find a candidate for the virtual operand which definition
3566 dominates those of all others. */
3567 /* First look if any of the args themselves satisfy this. */
3568 for (i = 0; i < nargs; ++i)
3570 arg0 = PHI_ARG_DEF (phi, i);
3571 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3572 break;
3573 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3574 if (def_bb != phi_bb
3575 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3576 break;
3577 arg0 = NULL_TREE;
3579 /* If not, look if we can reach such candidate by walking defs
3580 until we hit the immediate dominator. maybe_skip_until will
3581 do that for us. */
3582 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3584 /* Then check against the (to be) found candidate. */
3585 for (i = 0; i < nargs; ++i)
3587 arg1 = PHI_ARG_DEF (phi, i);
3588 if (arg1 == arg0)
3590 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3591 limit, visited,
3592 abort_on_visited,
3593 translate,
3594 /* Do not valueize when walking over
3595 backedges. */
3596 dominated_by_p
3597 (CDI_DOMINATORS,
3598 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3599 phi_bb)
3600 ? TR_DISAMBIGUATE
3601 : disambiguate_only, data))
3602 return NULL_TREE;
3605 return arg0;
3608 /* Based on the memory reference REF and its virtual use VUSE call
3609 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3610 itself. That is, for each virtual use for which its defining statement
3611 does not clobber REF.
3613 WALKER is called with REF, the current virtual use and DATA. If
3614 WALKER returns non-NULL the walk stops and its result is returned.
3615 At the end of a non-successful walk NULL is returned.
3617 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3618 use which definition is a statement that may clobber REF and DATA.
3619 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3620 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3621 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3622 to adjust REF and *DATA to make that valid.
3624 VALUEIZE if non-NULL is called with the next VUSE that is considered
3625 and return value is substituted for that. This can be used to
3626 implement optimistic value-numbering for example. Note that the
3627 VUSE argument is assumed to be valueized already.
3629 LIMIT specifies the number of alias queries we are allowed to do,
3630 the walk stops when it reaches zero and NULL is returned. LIMIT
3631 is decremented by the number of alias queries (plus adjustments
3632 done by the callbacks) upon return.
3634 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3636 void *
3637 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3638 void *(*walker)(ao_ref *, tree, void *),
3639 void *(*translate)(ao_ref *, tree, void *,
3640 translate_flags *),
3641 tree (*valueize)(tree),
3642 unsigned &limit, void *data)
3644 bitmap visited = NULL;
3645 void *res;
3646 bool translated = false;
3648 timevar_push (TV_ALIAS_STMT_WALK);
3652 gimple *def_stmt;
3654 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3655 res = (*walker) (ref, vuse, data);
3656 /* Abort walk. */
3657 if (res == (void *)-1)
3659 res = NULL;
3660 break;
3662 /* Lookup succeeded. */
3663 else if (res != NULL)
3664 break;
3666 if (valueize)
3668 vuse = valueize (vuse);
3669 if (!vuse)
3671 res = NULL;
3672 break;
3675 def_stmt = SSA_NAME_DEF_STMT (vuse);
3676 if (gimple_nop_p (def_stmt))
3677 break;
3678 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3679 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3680 &visited, translated, translate, data);
3681 else
3683 if ((int)limit <= 0)
3685 res = NULL;
3686 break;
3688 --limit;
3689 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3691 if (!translate)
3692 break;
3693 translate_flags disambiguate_only = TR_TRANSLATE;
3694 res = (*translate) (ref, vuse, data, &disambiguate_only);
3695 /* Failed lookup and translation. */
3696 if (res == (void *)-1)
3698 res = NULL;
3699 break;
3701 /* Lookup succeeded. */
3702 else if (res != NULL)
3703 break;
3704 /* Translation succeeded, continue walking. */
3705 translated = translated || disambiguate_only == TR_TRANSLATE;
3707 vuse = gimple_vuse (def_stmt);
3710 while (vuse);
3712 if (visited)
3713 BITMAP_FREE (visited);
3715 timevar_pop (TV_ALIAS_STMT_WALK);
3717 return res;
3721 /* Based on the memory reference REF call WALKER for each vdef which
3722 defining statement may clobber REF, starting with VDEF. If REF
3723 is NULL_TREE, each defining statement is visited.
3725 WALKER is called with REF, the current vdef and DATA. If WALKER
3726 returns true the walk is stopped, otherwise it continues.
3728 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3729 The pointer may be NULL and then we do not track this information.
3731 At PHI nodes walk_aliased_vdefs forks into one walk for reach
3732 PHI argument (but only one walk continues on merge points), the
3733 return value is true if any of the walks was successful.
3735 The function returns the number of statements walked or -1 if
3736 LIMIT stmts were walked and the walk was aborted at this point.
3737 If LIMIT is zero the walk is not aborted. */
3739 static int
3740 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3741 bool (*walker)(ao_ref *, tree, void *), void *data,
3742 bitmap *visited, unsigned int cnt,
3743 bool *function_entry_reached, unsigned limit)
3747 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3749 if (*visited
3750 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3751 return cnt;
3753 if (gimple_nop_p (def_stmt))
3755 if (function_entry_reached)
3756 *function_entry_reached = true;
3757 return cnt;
3759 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3761 unsigned i;
3762 if (!*visited)
3763 *visited = BITMAP_ALLOC (NULL);
3764 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3766 int res = walk_aliased_vdefs_1 (ref,
3767 gimple_phi_arg_def (def_stmt, i),
3768 walker, data, visited, cnt,
3769 function_entry_reached, limit);
3770 if (res == -1)
3771 return -1;
3772 cnt = res;
3774 return cnt;
3777 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3778 cnt++;
3779 if (cnt == limit)
3780 return -1;
3781 if ((!ref
3782 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3783 && (*walker) (ref, vdef, data))
3784 return cnt;
3786 vdef = gimple_vuse (def_stmt);
3788 while (1);
3792 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3793 bool (*walker)(ao_ref *, tree, void *), void *data,
3794 bitmap *visited,
3795 bool *function_entry_reached, unsigned int limit)
3797 bitmap local_visited = NULL;
3798 int ret;
3800 timevar_push (TV_ALIAS_STMT_WALK);
3802 if (function_entry_reached)
3803 *function_entry_reached = false;
3805 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3806 visited ? visited : &local_visited, 0,
3807 function_entry_reached, limit);
3808 if (local_visited)
3809 BITMAP_FREE (local_visited);
3811 timevar_pop (TV_ALIAS_STMT_WALK);
3813 return ret;
3816 /* Verify validity of the fnspec string.
3817 See attr-fnspec.h for details. */
3819 void
3820 attr_fnspec::verify ()
3822 bool err = false;
3823 if (!len)
3824 return;
3826 /* Check return value specifier. */
3827 if (len < return_desc_size)
3828 err = true;
3829 else if ((len - return_desc_size) % arg_desc_size)
3830 err = true;
3831 else if ((str[0] < '1' || str[0] > '4')
3832 && str[0] != '.' && str[0] != 'm')
3833 err = true;
3835 switch (str[1])
3837 case ' ':
3838 case 'p':
3839 case 'P':
3840 case 'c':
3841 case 'C':
3842 break;
3843 default:
3844 err = true;
3846 if (err)
3847 internal_error ("invalid fn spec attribute \"%s\"", str);
3849 /* Now check all parameters. */
3850 for (unsigned int i = 0; arg_specified_p (i); i++)
3852 unsigned int idx = arg_idx (i);
3853 switch (str[idx])
3855 case 'x':
3856 case 'X':
3857 case 'r':
3858 case 'R':
3859 case 'o':
3860 case 'O':
3861 case 'w':
3862 case 'W':
3863 case '.':
3864 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
3865 || str[idx + 1] == 't')
3867 if (str[idx] != 'r' && str[idx] != 'R'
3868 && str[idx] != 'w' && str[idx] != 'W'
3869 && str[idx] != 'o' && str[idx] != 'O')
3870 err = true;
3871 if (str[idx] != 't'
3872 /* Size specified is scalar, so it should be described
3873 by ". " if specified at all. */
3874 && (arg_specified_p (str[idx + 1] - '1')
3875 && str[arg_idx (str[idx + 1] - '1')] != '.'))
3876 err = true;
3878 else if (str[idx + 1] != ' ')
3879 err = true;
3880 break;
3881 default:
3882 if (str[idx] < '1' || str[idx] > '9')
3883 err = true;
3885 if (err)
3886 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
3890 /* Return ture if TYPE1 and TYPE2 will always give the same answer
3891 when compared wit hother types using same_type_for_tbaa_p. */
3893 static bool
3894 types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
3895 bool lto_streaming_safe)
3897 /* We use same_type_for_tbaa_p to match types in the access path.
3898 This check is overly conservative. */
3899 type1 = TYPE_MAIN_VARIANT (type1);
3900 type2 = TYPE_MAIN_VARIANT (type2);
3902 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
3903 != TYPE_STRUCTURAL_EQUALITY_P (type2))
3904 return false;
3905 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
3906 return true;
3908 if (lto_streaming_safe)
3909 return type1 == type2;
3910 else
3911 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
3914 /* Compare REF1 and REF2 and return flags specifying their differences.
3915 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
3916 types that are going to be recomputed.
3917 If TBAA is true also compare TBAA metadata. */
3920 ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
3921 bool lto_streaming_safe,
3922 bool tbaa)
3924 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
3925 return SEMANTICS;
3926 tree base1 = ao_ref_base (ref1);
3927 tree base2 = ao_ref_base (ref2);
3929 if (!known_eq (ref1->offset, ref2->offset)
3930 || !known_eq (ref1->size, ref2->size)
3931 || !known_eq (ref1->max_size, ref2->max_size))
3932 return SEMANTICS;
3934 /* For variable accesses we need to compare actual paths
3935 to check that both refs are accessing same address and the access size. */
3936 if (!known_eq (ref1->size, ref1->max_size))
3938 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
3939 TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
3940 return SEMANTICS;
3941 tree r1 = ref1->ref;
3942 tree r2 = ref2->ref;
3944 /* Handle toplevel COMPONENT_REFs of bitfields.
3945 Those are special since they are not allowed in
3946 ADDR_EXPR. */
3947 if (TREE_CODE (r1) == COMPONENT_REF
3948 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
3950 if (TREE_CODE (r2) != COMPONENT_REF
3951 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
3952 return SEMANTICS;
3953 tree field1 = TREE_OPERAND (r1, 1);
3954 tree field2 = TREE_OPERAND (r2, 1);
3955 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
3956 DECL_FIELD_OFFSET (field2), 0)
3957 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
3958 DECL_FIELD_BIT_OFFSET (field2), 0)
3959 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
3960 || !types_compatible_p (TREE_TYPE (r1),
3961 TREE_TYPE (r2)))
3962 return SEMANTICS;
3963 r1 = TREE_OPERAND (r1, 0);
3964 r2 = TREE_OPERAND (r2, 0);
3966 else if (TREE_CODE (r2) == COMPONENT_REF
3967 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
3968 return SEMANTICS;
3970 /* Similarly for bit field refs. */
3971 if (TREE_CODE (r1) == BIT_FIELD_REF)
3973 if (TREE_CODE (r2) != BIT_FIELD_REF
3974 || !operand_equal_p (TREE_OPERAND (r1, 1),
3975 TREE_OPERAND (r2, 1), 0)
3976 || !operand_equal_p (TREE_OPERAND (r1, 2),
3977 TREE_OPERAND (r2, 2), 0)
3978 || !types_compatible_p (TREE_TYPE (r1),
3979 TREE_TYPE (r2)))
3980 return SEMANTICS;
3981 r1 = TREE_OPERAND (r1, 0);
3982 r2 = TREE_OPERAND (r2, 0);
3984 else if (TREE_CODE (r2) == BIT_FIELD_REF)
3985 return SEMANTICS;
3987 /* Now we can compare the address of actual memory access. */
3988 if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF))
3989 return SEMANTICS;
3991 /* For constant accesses we get more matches by comparing offset only. */
3992 else if (!operand_equal_p (base1, base2, OEP_ADDRESS_OF))
3993 return SEMANTICS;
3995 /* We can't simply use get_object_alignment_1 on the full
3996 reference as for accesses with variable indexes this reports
3997 too conservative alignment. */
3998 unsigned int align1, align2;
3999 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4000 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4001 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4002 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4003 TYPE_ALIGN but still returns false. This seem to contradict
4004 its description. So compare even if alignment is unknown. */
4005 if (known1 != known2
4006 || (bitpos1 != bitpos2 || align1 != align2))
4007 return SEMANTICS;
4009 /* Now we know that accesses are semantically same. */
4010 int flags = 0;
4012 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4013 tree rbase1 = ref1->ref;
4014 if (rbase1)
4015 while (handled_component_p (rbase1))
4016 rbase1 = TREE_OPERAND (rbase1, 0);
4017 tree rbase2 = ref2->ref;
4018 while (handled_component_p (rbase2))
4019 rbase2 = TREE_OPERAND (rbase2, 0);
4021 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4022 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4023 Otherwise we need to match bases and cliques. */
4024 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4025 && MR_DEPENDENCE_CLIQUE (rbase1))
4026 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4027 && MR_DEPENDENCE_CLIQUE (rbase2)))
4028 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4029 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4030 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4031 flags |= DEPENDENCE_CLIQUE;
4033 if (!tbaa)
4034 return flags;
4036 /* Alias sets are not stable across LTO sreaming; be conservative here
4037 and compare types the alias sets are ultimately based on. */
4038 if (lto_streaming_safe)
4040 tree t1 = ao_ref_alias_ptr_type (ref1);
4041 tree t2 = ao_ref_alias_ptr_type (ref2);
4042 if (!alias_ptr_types_compatible_p (t1, t2))
4043 flags |= REF_ALIAS_SET;
4045 t1 = ao_ref_base_alias_ptr_type (ref1);
4046 t2 = ao_ref_base_alias_ptr_type (ref2);
4047 if (!alias_ptr_types_compatible_p (t1, t2))
4048 flags |= BASE_ALIAS_SET;
4050 else
4052 if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4053 flags |= REF_ALIAS_SET;
4054 if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4055 flags |= BASE_ALIAS_SET;
4058 /* Access path is used only on non-view-converted references. */
4059 bool view_converted = view_converted_memref_p (rbase1);
4060 if (view_converted_memref_p (rbase2) != view_converted)
4061 return flags | ACCESS_PATH;
4062 else if (view_converted)
4063 return flags;
4066 /* Find start of access paths and look for trailing arrays. */
4067 tree c1 = ref1->ref, c2 = ref2->ref;
4068 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4069 int nskipped1 = 0, nskipped2 = 0;
4070 int i = 0;
4072 for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4074 if (component_ref_to_zero_sized_trailing_array_p (p1))
4075 end_struct_ref1 = p1;
4076 if (ends_tbaa_access_path_p (p1))
4077 c1 = p1, nskipped1 = i;
4078 i++;
4080 for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4082 if (component_ref_to_zero_sized_trailing_array_p (p2))
4083 end_struct_ref2 = p2;
4084 if (ends_tbaa_access_path_p (p2))
4085 c2 = p2, nskipped1 = i;
4086 i++;
4089 /* For variable accesses we can not rely on offset match bellow.
4090 We know that paths are struturally same, so only check that
4091 starts of TBAA paths did not diverge. */
4092 if (!known_eq (ref1->size, ref1->max_size)
4093 && nskipped1 != nskipped2)
4094 return flags | ACCESS_PATH;
4096 /* Information about trailing refs is used by
4097 aliasing_component_refs_p that is applied only if paths
4098 has handled components.. */
4099 if (!handled_component_p (c1) && !handled_component_p (c2))
4101 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4102 return flags | ACCESS_PATH;
4103 if (end_struct_ref1
4104 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4105 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4106 return flags | ACCESS_PATH;
4108 /* Now compare all handled components of the access path.
4109 We have three oracles that cares about access paths:
4110 - aliasing_component_refs_p
4111 - nonoverlapping_refs_since_match_p
4112 - nonoverlapping_component_refs_p
4113 We need to match things these oracles compare.
4115 It is only necessary to check types for compatibility
4116 and offsets. Rest of what oracles compares are actual
4117 addresses. Those are already known to be same:
4118 - for constant accesses we check offsets
4119 - for variable accesses we already matched
4120 the path lexically with operand_equal_p. */
4121 while (true)
4123 bool comp1 = handled_component_p (c1);
4124 bool comp2 = handled_component_p (c2);
4126 if (comp1 != comp2)
4127 return flags | ACCESS_PATH;
4128 if (!comp1)
4129 break;
4131 if (TREE_CODE (c1) != TREE_CODE (c2))
4132 return flags | ACCESS_PATH;
4134 /* aliasing_component_refs_p attempts to find type match within
4135 the paths. For that reason both types needs to be equal
4136 with respect to same_type_for_tbaa_p. */
4137 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4138 TREE_TYPE (c2),
4139 lto_streaming_safe))
4140 return flags | ACCESS_PATH;
4141 if (component_ref_to_zero_sized_trailing_array_p (c1)
4142 != component_ref_to_zero_sized_trailing_array_p (c2))
4143 return flags | ACCESS_PATH;
4145 /* aliasing_matching_component_refs_p compares
4146 offsets within the path. Other properties are ignored.
4147 Do not bother to verify offsets in variable accesses. Here we
4148 already compared them by operand_equal_p so they are
4149 structurally same. */
4150 if (!known_eq (ref1->size, ref1->max_size))
4152 poly_int64 offadj1, sztmc1, msztmc1;
4153 bool reverse1;
4154 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4155 poly_int64 offadj2, sztmc2, msztmc2;
4156 bool reverse2;
4157 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4158 if (!known_eq (offadj1, offadj2))
4159 return flags | ACCESS_PATH;
4161 c1 = TREE_OPERAND (c1, 0);
4162 c2 = TREE_OPERAND (c2, 0);
4164 /* Finally test the access type. */
4165 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4166 TREE_TYPE (c2),
4167 lto_streaming_safe))
4168 return flags | ACCESS_PATH;
4169 return flags;
4172 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4173 and canonical types. */
4174 void
4175 ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4176 inchash::hash &hstate)
4178 tree base = ao_ref_base (ref);
4179 tree tbase = base;
4181 if (!known_eq (ref->size, ref->max_size))
4183 tree r = ref->ref;
4184 if (TREE_CODE (r) == COMPONENT_REF
4185 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4187 tree field = TREE_OPERAND (r, 1);
4188 hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4189 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4190 hash_operand (DECL_SIZE (field), hstate, 0);
4191 r = TREE_OPERAND (r, 0);
4193 if (TREE_CODE (r) == BIT_FIELD_REF)
4195 hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4196 hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4197 r = TREE_OPERAND (r, 0);
4199 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4200 hash_operand (r, hstate, OEP_ADDRESS_OF);
4202 else
4204 hash_operand (tbase, hstate, OEP_ADDRESS_OF);
4205 hstate.add_poly_int (ref->offset);
4206 hstate.add_poly_int (ref->size);
4207 hstate.add_poly_int (ref->max_size);
4209 if (!lto_streaming_safe && tbaa)
4211 hstate.add_int (ao_ref_alias_set (ref));
4212 hstate.add_int (ao_ref_base_alias_set (ref));