Revise -mdisable-fpregs option and add new -msoft-mult option
[official-gcc.git] / gcc / tree-ssa-alias.c
blobce667ff32b9eac930698fa667e51e887ee6d1215
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
44 #include "errors.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
49 #include "builtins.h"
51 /* Broad overview of how alias analysis on gimple works:
53 Statements clobbering or using memory are linked through the
54 virtual operand factored use-def chain. The virtual operand
55 is unique per function, its symbol is accessible via gimple_vop (cfun).
56 Virtual operands are used for efficiently walking memory statements
57 in the gimple IL and are useful for things like value-numbering as
58 a generation count for memory references.
60 SSA_NAME pointers may have associated points-to information
61 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
62 points-to information is (re-)computed by the TODO_rebuild_alias
63 pass manager todo. Points-to information is also used for more
64 precise tracking of call-clobbered and call-used variables and
65 related disambiguations.
67 This file contains functions for disambiguating memory references,
68 the so called alias-oracle and tools for walking of the gimple IL.
70 The main alias-oracle entry-points are
72 bool stmt_may_clobber_ref_p (gimple *, tree)
74 This function queries if a statement may invalidate (parts of)
75 the memory designated by the reference tree argument.
77 bool ref_maybe_used_by_stmt_p (gimple *, tree)
79 This function queries if a statement may need (parts of) the
80 memory designated by the reference tree argument.
82 There are variants of these functions that only handle the call
83 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
84 Note that these do not disambiguate against a possible call lhs.
86 bool refs_may_alias_p (tree, tree)
88 This function tries to disambiguate two reference trees.
90 bool ptr_deref_may_alias_global_p (tree)
92 This function queries if dereferencing a pointer variable may
93 alias global memory.
95 More low-level disambiguators are available and documented in
96 this file. Low-level disambiguators dealing with points-to
97 information are in tree-ssa-structalias.c. */
99 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
100 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
102 /* Query statistics for the different low-level disambiguators.
103 A high-level query may trigger multiple of them. */
105 static struct {
106 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
107 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
109 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
111 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
112 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
113 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
114 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
119 unsigned HOST_WIDE_INT modref_use_may_alias;
120 unsigned HOST_WIDE_INT modref_use_no_alias;
121 unsigned HOST_WIDE_INT modref_clobber_may_alias;
122 unsigned HOST_WIDE_INT modref_clobber_no_alias;
123 unsigned HOST_WIDE_INT modref_tests;
124 unsigned HOST_WIDE_INT modref_baseptr_tests;
125 } alias_stats;
127 void
128 dump_alias_stats (FILE *s)
130 fprintf (s, "\nAlias oracle query stats:\n");
131 fprintf (s, " refs_may_alias_p: "
132 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
133 HOST_WIDE_INT_PRINT_DEC" queries\n",
134 alias_stats.refs_may_alias_p_no_alias,
135 alias_stats.refs_may_alias_p_no_alias
136 + alias_stats.refs_may_alias_p_may_alias);
137 fprintf (s, " ref_maybe_used_by_call_p: "
138 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
139 HOST_WIDE_INT_PRINT_DEC" queries\n",
140 alias_stats.ref_maybe_used_by_call_p_no_alias,
141 alias_stats.refs_may_alias_p_no_alias
142 + alias_stats.ref_maybe_used_by_call_p_may_alias);
143 fprintf (s, " call_may_clobber_ref_p: "
144 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
145 HOST_WIDE_INT_PRINT_DEC" queries\n",
146 alias_stats.call_may_clobber_ref_p_no_alias,
147 alias_stats.call_may_clobber_ref_p_no_alias
148 + alias_stats.call_may_clobber_ref_p_may_alias);
149 fprintf (s, " nonoverlapping_component_refs_p: "
150 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
151 HOST_WIDE_INT_PRINT_DEC" queries\n",
152 alias_stats.nonoverlapping_component_refs_p_no_alias,
153 alias_stats.nonoverlapping_component_refs_p_no_alias
154 + alias_stats.nonoverlapping_component_refs_p_may_alias);
155 fprintf (s, " nonoverlapping_refs_since_match_p: "
156 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
157 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
158 HOST_WIDE_INT_PRINT_DEC" queries\n",
159 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
160 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
161 alias_stats.nonoverlapping_refs_since_match_p_no_alias
162 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
163 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
164 fprintf (s, " aliasing_component_refs_p: "
165 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
166 HOST_WIDE_INT_PRINT_DEC" queries\n",
167 alias_stats.aliasing_component_refs_p_no_alias,
168 alias_stats.aliasing_component_refs_p_no_alias
169 + alias_stats.aliasing_component_refs_p_may_alias);
170 dump_alias_stats_in_alias_c (s);
171 fprintf (s, "\nModref stats:\n");
172 fprintf (s, " modref use: "
173 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
174 HOST_WIDE_INT_PRINT_DEC" queries\n",
175 alias_stats.modref_use_no_alias,
176 alias_stats.modref_use_no_alias
177 + alias_stats.modref_use_may_alias);
178 fprintf (s, " modref clobber: "
179 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
180 HOST_WIDE_INT_PRINT_DEC" queries\n"
181 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
182 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
183 alias_stats.modref_clobber_no_alias,
184 alias_stats.modref_clobber_no_alias
185 + alias_stats.modref_clobber_may_alias,
186 alias_stats.modref_tests,
187 ((double)alias_stats.modref_tests)
188 / (alias_stats.modref_clobber_no_alias
189 + alias_stats.modref_clobber_may_alias),
190 alias_stats.modref_baseptr_tests,
191 ((double)alias_stats.modref_baseptr_tests)
192 / (alias_stats.modref_clobber_no_alias
193 + alias_stats.modref_clobber_may_alias));
197 /* Return true, if dereferencing PTR may alias with a global variable. */
199 bool
200 ptr_deref_may_alias_global_p (tree ptr)
202 struct ptr_info_def *pi;
204 /* If we end up with a pointer constant here that may point
205 to global memory. */
206 if (TREE_CODE (ptr) != SSA_NAME)
207 return true;
209 pi = SSA_NAME_PTR_INFO (ptr);
211 /* If we do not have points-to information for this variable,
212 we have to punt. */
213 if (!pi)
214 return true;
216 /* ??? This does not use TBAA to prune globals ptr may not access. */
217 return pt_solution_includes_global (&pi->pt);
220 /* Return true if dereferencing PTR may alias DECL.
221 The caller is responsible for applying TBAA to see if PTR
222 may access DECL at all. */
224 static bool
225 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
227 struct ptr_info_def *pi;
229 /* Conversions are irrelevant for points-to information and
230 data-dependence analysis can feed us those. */
231 STRIP_NOPS (ptr);
233 /* Anything we do not explicilty handle aliases. */
234 if ((TREE_CODE (ptr) != SSA_NAME
235 && TREE_CODE (ptr) != ADDR_EXPR
236 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
237 || !POINTER_TYPE_P (TREE_TYPE (ptr))
238 || (!VAR_P (decl)
239 && TREE_CODE (decl) != PARM_DECL
240 && TREE_CODE (decl) != RESULT_DECL))
241 return true;
243 /* Disregard pointer offsetting. */
244 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
248 ptr = TREE_OPERAND (ptr, 0);
250 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
251 return ptr_deref_may_alias_decl_p (ptr, decl);
254 /* ADDR_EXPR pointers either just offset another pointer or directly
255 specify the pointed-to set. */
256 if (TREE_CODE (ptr) == ADDR_EXPR)
258 tree base = get_base_address (TREE_OPERAND (ptr, 0));
259 if (base
260 && (TREE_CODE (base) == MEM_REF
261 || TREE_CODE (base) == TARGET_MEM_REF))
262 ptr = TREE_OPERAND (base, 0);
263 else if (base
264 && DECL_P (base))
265 return compare_base_decls (base, decl) != 0;
266 else if (base
267 && CONSTANT_CLASS_P (base))
268 return false;
269 else
270 return true;
273 /* Non-aliased variables cannot be pointed to. */
274 if (!may_be_aliased (decl))
275 return false;
277 /* If we do not have useful points-to information for this pointer
278 we cannot disambiguate anything else. */
279 pi = SSA_NAME_PTR_INFO (ptr);
280 if (!pi)
281 return true;
283 return pt_solution_includes (&pi->pt, decl);
286 /* Return true if dereferenced PTR1 and PTR2 may alias.
287 The caller is responsible for applying TBAA to see if accesses
288 through PTR1 and PTR2 may conflict at all. */
290 bool
291 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
293 struct ptr_info_def *pi1, *pi2;
295 /* Conversions are irrelevant for points-to information and
296 data-dependence analysis can feed us those. */
297 STRIP_NOPS (ptr1);
298 STRIP_NOPS (ptr2);
300 /* Disregard pointer offsetting. */
301 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
305 ptr1 = TREE_OPERAND (ptr1, 0);
307 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
308 return ptr_derefs_may_alias_p (ptr1, ptr2);
310 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
314 ptr2 = TREE_OPERAND (ptr2, 0);
316 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
317 return ptr_derefs_may_alias_p (ptr1, ptr2);
320 /* ADDR_EXPR pointers either just offset another pointer or directly
321 specify the pointed-to set. */
322 if (TREE_CODE (ptr1) == ADDR_EXPR)
324 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
325 if (base
326 && (TREE_CODE (base) == MEM_REF
327 || TREE_CODE (base) == TARGET_MEM_REF))
328 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
329 else if (base
330 && DECL_P (base))
331 return ptr_deref_may_alias_decl_p (ptr2, base);
332 else
333 return true;
335 if (TREE_CODE (ptr2) == ADDR_EXPR)
337 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
338 if (base
339 && (TREE_CODE (base) == MEM_REF
340 || TREE_CODE (base) == TARGET_MEM_REF))
341 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
342 else if (base
343 && DECL_P (base))
344 return ptr_deref_may_alias_decl_p (ptr1, base);
345 else
346 return true;
349 /* From here we require SSA name pointers. Anything else aliases. */
350 if (TREE_CODE (ptr1) != SSA_NAME
351 || TREE_CODE (ptr2) != SSA_NAME
352 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
353 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
354 return true;
356 /* We may end up with two empty points-to solutions for two same pointers.
357 In this case we still want to say both pointers alias, so shortcut
358 that here. */
359 if (ptr1 == ptr2)
360 return true;
362 /* If we do not have useful points-to information for either pointer
363 we cannot disambiguate anything else. */
364 pi1 = SSA_NAME_PTR_INFO (ptr1);
365 pi2 = SSA_NAME_PTR_INFO (ptr2);
366 if (!pi1 || !pi2)
367 return true;
369 /* ??? This does not use TBAA to prune decls from the intersection
370 that not both pointers may access. */
371 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
374 /* Return true if dereferencing PTR may alias *REF.
375 The caller is responsible for applying TBAA to see if PTR
376 may access *REF at all. */
378 static bool
379 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
381 tree base = ao_ref_base (ref);
383 if (TREE_CODE (base) == MEM_REF
384 || TREE_CODE (base) == TARGET_MEM_REF)
385 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
386 else if (DECL_P (base))
387 return ptr_deref_may_alias_decl_p (ptr, base);
389 return true;
392 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
394 bool
395 ptrs_compare_unequal (tree ptr1, tree ptr2)
397 /* First resolve the pointers down to a SSA name pointer base or
398 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
399 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
400 or STRING_CSTs which needs points-to adjustments to track them
401 in the points-to sets. */
402 tree obj1 = NULL_TREE;
403 tree obj2 = NULL_TREE;
404 if (TREE_CODE (ptr1) == ADDR_EXPR)
406 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
407 if (! tem)
408 return false;
409 if (VAR_P (tem)
410 || TREE_CODE (tem) == PARM_DECL
411 || TREE_CODE (tem) == RESULT_DECL)
412 obj1 = tem;
413 else if (TREE_CODE (tem) == MEM_REF)
414 ptr1 = TREE_OPERAND (tem, 0);
416 if (TREE_CODE (ptr2) == ADDR_EXPR)
418 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
419 if (! tem)
420 return false;
421 if (VAR_P (tem)
422 || TREE_CODE (tem) == PARM_DECL
423 || TREE_CODE (tem) == RESULT_DECL)
424 obj2 = tem;
425 else if (TREE_CODE (tem) == MEM_REF)
426 ptr2 = TREE_OPERAND (tem, 0);
429 /* Canonicalize ptr vs. object. */
430 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
432 std::swap (ptr1, ptr2);
433 std::swap (obj1, obj2);
436 if (obj1 && obj2)
437 /* Other code handles this correctly, no need to duplicate it here. */;
438 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
440 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
441 /* We may not use restrict to optimize pointer comparisons.
442 See PR71062. So we have to assume that restrict-pointed-to
443 may be in fact obj1. */
444 if (!pi
445 || pi->pt.vars_contains_restrict
446 || pi->pt.vars_contains_interposable)
447 return false;
448 if (VAR_P (obj1)
449 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
451 varpool_node *node = varpool_node::get (obj1);
452 /* If obj1 may bind to NULL give up (see below). */
453 if (! node
454 || ! node->nonzero_address ()
455 || ! decl_binds_to_current_def_p (obj1))
456 return false;
458 return !pt_solution_includes (&pi->pt, obj1);
461 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
462 but those require pt.null to be conservatively correct. */
464 return false;
467 /* Returns whether reference REF to BASE may refer to global memory. */
469 static bool
470 ref_may_alias_global_p_1 (tree base)
472 if (DECL_P (base))
473 return is_global_var (base);
474 else if (TREE_CODE (base) == MEM_REF
475 || TREE_CODE (base) == TARGET_MEM_REF)
476 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
477 return true;
480 bool
481 ref_may_alias_global_p (ao_ref *ref)
483 tree base = ao_ref_base (ref);
484 return ref_may_alias_global_p_1 (base);
487 bool
488 ref_may_alias_global_p (tree ref)
490 tree base = get_base_address (ref);
491 return ref_may_alias_global_p_1 (base);
494 /* Return true whether STMT may clobber global memory. */
496 bool
497 stmt_may_clobber_global_p (gimple *stmt)
499 tree lhs;
501 if (!gimple_vdef (stmt))
502 return false;
504 /* ??? We can ask the oracle whether an artificial pointer
505 dereference with a pointer with points-to information covering
506 all global memory (what about non-address taken memory?) maybe
507 clobbered by this call. As there is at the moment no convenient
508 way of doing that without generating garbage do some manual
509 checking instead.
510 ??? We could make a NULL ao_ref argument to the various
511 predicates special, meaning any global memory. */
513 switch (gimple_code (stmt))
515 case GIMPLE_ASSIGN:
516 lhs = gimple_assign_lhs (stmt);
517 return (TREE_CODE (lhs) != SSA_NAME
518 && ref_may_alias_global_p (lhs));
519 case GIMPLE_CALL:
520 return true;
521 default:
522 return true;
527 /* Dump alias information on FILE. */
529 void
530 dump_alias_info (FILE *file)
532 unsigned i;
533 tree ptr;
534 const char *funcname
535 = lang_hooks.decl_printable_name (current_function_decl, 2);
536 tree var;
538 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
540 fprintf (file, "Aliased symbols\n\n");
542 FOR_EACH_LOCAL_DECL (cfun, i, var)
544 if (may_be_aliased (var))
545 dump_variable (file, var);
548 fprintf (file, "\nCall clobber information\n");
550 fprintf (file, "\nESCAPED");
551 dump_points_to_solution (file, &cfun->gimple_df->escaped);
553 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
555 FOR_EACH_SSA_NAME (i, ptr, cfun)
557 struct ptr_info_def *pi;
559 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
560 || SSA_NAME_IN_FREE_LIST (ptr))
561 continue;
563 pi = SSA_NAME_PTR_INFO (ptr);
564 if (pi)
565 dump_points_to_info_for (file, ptr);
568 fprintf (file, "\n");
572 /* Dump alias information on stderr. */
574 DEBUG_FUNCTION void
575 debug_alias_info (void)
577 dump_alias_info (stderr);
581 /* Dump the points-to set *PT into FILE. */
583 void
584 dump_points_to_solution (FILE *file, struct pt_solution *pt)
586 if (pt->anything)
587 fprintf (file, ", points-to anything");
589 if (pt->nonlocal)
590 fprintf (file, ", points-to non-local");
592 if (pt->escaped)
593 fprintf (file, ", points-to escaped");
595 if (pt->ipa_escaped)
596 fprintf (file, ", points-to unit escaped");
598 if (pt->null)
599 fprintf (file, ", points-to NULL");
601 if (pt->vars)
603 fprintf (file, ", points-to vars: ");
604 dump_decl_set (file, pt->vars);
605 if (pt->vars_contains_nonlocal
606 || pt->vars_contains_escaped
607 || pt->vars_contains_escaped_heap
608 || pt->vars_contains_restrict)
610 const char *comma = "";
611 fprintf (file, " (");
612 if (pt->vars_contains_nonlocal)
614 fprintf (file, "nonlocal");
615 comma = ", ";
617 if (pt->vars_contains_escaped)
619 fprintf (file, "%sescaped", comma);
620 comma = ", ";
622 if (pt->vars_contains_escaped_heap)
624 fprintf (file, "%sescaped heap", comma);
625 comma = ", ";
627 if (pt->vars_contains_restrict)
629 fprintf (file, "%srestrict", comma);
630 comma = ", ";
632 if (pt->vars_contains_interposable)
633 fprintf (file, "%sinterposable", comma);
634 fprintf (file, ")");
640 /* Unified dump function for pt_solution. */
642 DEBUG_FUNCTION void
643 debug (pt_solution &ref)
645 dump_points_to_solution (stderr, &ref);
648 DEBUG_FUNCTION void
649 debug (pt_solution *ptr)
651 if (ptr)
652 debug (*ptr);
653 else
654 fprintf (stderr, "<nil>\n");
658 /* Dump points-to information for SSA_NAME PTR into FILE. */
660 void
661 dump_points_to_info_for (FILE *file, tree ptr)
663 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
665 print_generic_expr (file, ptr, dump_flags);
667 if (pi)
668 dump_points_to_solution (file, &pi->pt);
669 else
670 fprintf (file, ", points-to anything");
672 fprintf (file, "\n");
676 /* Dump points-to information for VAR into stderr. */
678 DEBUG_FUNCTION void
679 debug_points_to_info_for (tree var)
681 dump_points_to_info_for (stderr, var);
685 /* Initializes the alias-oracle reference representation *R from REF. */
687 void
688 ao_ref_init (ao_ref *r, tree ref)
690 r->ref = ref;
691 r->base = NULL_TREE;
692 r->offset = 0;
693 r->size = -1;
694 r->max_size = -1;
695 r->ref_alias_set = -1;
696 r->base_alias_set = -1;
697 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
700 /* Returns the base object of the memory reference *REF. */
702 tree
703 ao_ref_base (ao_ref *ref)
705 bool reverse;
707 if (ref->base)
708 return ref->base;
709 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
710 &ref->max_size, &reverse);
711 return ref->base;
714 /* Returns the base object alias set of the memory reference *REF. */
716 alias_set_type
717 ao_ref_base_alias_set (ao_ref *ref)
719 tree base_ref;
720 if (ref->base_alias_set != -1)
721 return ref->base_alias_set;
722 if (!ref->ref)
723 return 0;
724 base_ref = ref->ref;
725 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
726 base_ref = TREE_OPERAND (base_ref, 0);
727 while (handled_component_p (base_ref))
728 base_ref = TREE_OPERAND (base_ref, 0);
729 ref->base_alias_set = get_alias_set (base_ref);
730 return ref->base_alias_set;
733 /* Returns the reference alias set of the memory reference *REF. */
735 alias_set_type
736 ao_ref_alias_set (ao_ref *ref)
738 if (ref->ref_alias_set != -1)
739 return ref->ref_alias_set;
740 if (!ref->ref)
741 return 0;
742 ref->ref_alias_set = get_alias_set (ref->ref);
743 return ref->ref_alias_set;
746 /* Returns a type satisfying
747 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
749 tree
750 ao_ref_base_alias_ptr_type (ao_ref *ref)
752 tree base_ref;
754 if (!ref->ref)
755 return NULL_TREE;
756 base_ref = ref->ref;
757 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
758 base_ref = TREE_OPERAND (base_ref, 0);
759 while (handled_component_p (base_ref))
760 base_ref = TREE_OPERAND (base_ref, 0);
761 tree ret = reference_alias_ptr_type (base_ref);
762 return ret;
765 /* Returns a type satisfying
766 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
768 tree
769 ao_ref_alias_ptr_type (ao_ref *ref)
771 if (!ref->ref)
772 return NULL_TREE;
773 tree ret = reference_alias_ptr_type (ref->ref);
774 return ret;
778 /* Init an alias-oracle reference representation from a gimple pointer
779 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
780 that RANGE_KNOWN is set.
782 The access is assumed to be only to or after of the pointer target adjusted
783 by the offset, not before it (even in the case RANGE_KNOWN is false). */
785 static void
786 ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
787 bool range_known,
788 poly_int64 offset,
789 poly_int64 size,
790 poly_int64 max_size)
792 poly_int64 t, extra_offset = 0;
794 ref->ref = NULL_TREE;
795 if (TREE_CODE (ptr) == SSA_NAME)
797 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
798 if (gimple_assign_single_p (stmt)
799 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
800 ptr = gimple_assign_rhs1 (stmt);
801 else if (is_gimple_assign (stmt)
802 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
803 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
805 ptr = gimple_assign_rhs1 (stmt);
806 extra_offset *= BITS_PER_UNIT;
810 if (TREE_CODE (ptr) == ADDR_EXPR)
812 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
813 if (ref->base)
814 ref->offset = BITS_PER_UNIT * t;
815 else
817 range_known = false;
818 ref->offset = 0;
819 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
822 else
824 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
825 ref->base = build2 (MEM_REF, char_type_node,
826 ptr, null_pointer_node);
827 ref->offset = 0;
829 ref->offset += extra_offset + offset;
830 if (range_known)
832 ref->max_size = max_size;
833 ref->size = size;
835 else
836 ref->max_size = ref->size = -1;
837 ref->ref_alias_set = 0;
838 ref->base_alias_set = 0;
839 ref->volatile_p = false;
842 /* Init an alias-oracle reference representation from a gimple pointer
843 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
844 size is assumed to be unknown. The access is assumed to be only
845 to or after of the pointer target, not before it. */
847 void
848 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
850 poly_int64 size_hwi;
851 if (size
852 && poly_int_tree_p (size, &size_hwi)
853 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
855 size_hwi = size_hwi * BITS_PER_UNIT;
856 ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
858 else
859 ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
862 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
863 Return -1 if S1 < S2
864 Return 1 if S1 > S2
865 Return 0 if equal or incomparable. */
867 static int
868 compare_sizes (tree s1, tree s2)
870 if (!s1 || !s2)
871 return 0;
873 poly_uint64 size1;
874 poly_uint64 size2;
876 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
877 return 0;
878 if (known_lt (size1, size2))
879 return -1;
880 if (known_lt (size2, size1))
881 return 1;
882 return 0;
885 /* Compare TYPE1 and TYPE2 by its size.
886 Return -1 if size of TYPE1 < size of TYPE2
887 Return 1 if size of TYPE1 > size of TYPE2
888 Return 0 if types are of equal sizes or we can not compare them. */
890 static int
891 compare_type_sizes (tree type1, tree type2)
893 /* Be conservative for arrays and vectors. We want to support partial
894 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
895 while (TREE_CODE (type1) == ARRAY_TYPE
896 || TREE_CODE (type1) == VECTOR_TYPE)
897 type1 = TREE_TYPE (type1);
898 while (TREE_CODE (type2) == ARRAY_TYPE
899 || TREE_CODE (type2) == VECTOR_TYPE)
900 type2 = TREE_TYPE (type2);
901 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
904 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
905 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
906 decide. */
908 static inline int
909 same_type_for_tbaa (tree type1, tree type2)
911 type1 = TYPE_MAIN_VARIANT (type1);
912 type2 = TYPE_MAIN_VARIANT (type2);
914 /* Handle the most common case first. */
915 if (type1 == type2)
916 return 1;
918 /* If we would have to do structural comparison bail out. */
919 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
920 || TYPE_STRUCTURAL_EQUALITY_P (type2))
921 return -1;
923 /* Compare the canonical types. */
924 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
925 return 1;
927 /* ??? Array types are not properly unified in all cases as we have
928 spurious changes in the index types for example. Removing this
929 causes all sorts of problems with the Fortran frontend. */
930 if (TREE_CODE (type1) == ARRAY_TYPE
931 && TREE_CODE (type2) == ARRAY_TYPE)
932 return -1;
934 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
935 object of one of its constrained subtypes, e.g. when a function with an
936 unconstrained parameter passed by reference is called on an object and
937 inlined. But, even in the case of a fixed size, type and subtypes are
938 not equivalent enough as to share the same TYPE_CANONICAL, since this
939 would mean that conversions between them are useless, whereas they are
940 not (e.g. type and subtypes can have different modes). So, in the end,
941 they are only guaranteed to have the same alias set. */
942 alias_set_type set1 = get_alias_set (type1);
943 alias_set_type set2 = get_alias_set (type2);
944 if (set1 == set2)
945 return -1;
947 /* Pointers to void are considered compatible with all other pointers,
948 so for two pointers see what the alias set resolution thinks. */
949 if (POINTER_TYPE_P (type1)
950 && POINTER_TYPE_P (type2)
951 && alias_sets_conflict_p (set1, set2))
952 return -1;
954 /* The types are known to be not equal. */
955 return 0;
958 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
959 components on it). */
961 static bool
962 type_has_components_p (tree type)
964 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
965 || TREE_CODE (type) == COMPLEX_TYPE;
968 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
969 respectively are either pointing to same address or are completely
970 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
971 just partly overlap.
973 Try to disambiguate using the access path starting from the match
974 and return false if there is no conflict.
976 Helper for aliasing_component_refs_p. */
978 static bool
979 aliasing_matching_component_refs_p (tree match1, tree ref1,
980 poly_int64 offset1, poly_int64 max_size1,
981 tree match2, tree ref2,
982 poly_int64 offset2, poly_int64 max_size2,
983 bool partial_overlap)
985 poly_int64 offadj, sztmp, msztmp;
986 bool reverse;
988 if (!partial_overlap)
990 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
991 offset2 -= offadj;
992 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
993 offset1 -= offadj;
994 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
996 ++alias_stats.aliasing_component_refs_p_no_alias;
997 return false;
1001 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1002 partial_overlap);
1003 if (cmp == 1
1004 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1006 ++alias_stats.aliasing_component_refs_p_no_alias;
1007 return false;
1009 ++alias_stats.aliasing_component_refs_p_may_alias;
1010 return true;
1013 /* Return true if REF is reference to zero sized trailing array. I.e.
1014 struct foo {int bar; int array[0];} *fooptr;
1015 fooptr->array. */
1017 static bool
1018 component_ref_to_zero_sized_trailing_array_p (tree ref)
1020 return (TREE_CODE (ref) == COMPONENT_REF
1021 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1022 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1023 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1024 && array_at_struct_end_p (ref));
1027 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1028 aliasing_component_refs_p.
1030 Walk access path REF2 and try to find type matching TYPE1
1031 (which is a start of possibly aliasing access path REF1).
1032 If match is found, try to disambiguate.
1034 Return 0 for sucessful disambiguation.
1035 Return 1 if match was found but disambiguation failed
1036 Return -1 if there is no match.
1037 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1038 in access patch REF2 and -1 if we are not sure. */
1040 static int
1041 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1042 poly_int64 offset1, poly_int64 max_size1,
1043 tree end_struct_ref1,
1044 tree ref2, tree base2,
1045 poly_int64 offset2, poly_int64 max_size2,
1046 bool *maybe_match)
1048 tree ref = ref2;
1049 int same_p = 0;
1051 while (true)
1053 /* We walk from inner type to the outer types. If type we see is
1054 already too large to be part of type1, terminate the search. */
1055 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1057 if (cmp < 0
1058 && (!end_struct_ref1
1059 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1060 TREE_TYPE (ref)) < 0))
1061 break;
1062 /* If types may be of same size, see if we can decide about their
1063 equality. */
1064 if (cmp == 0)
1066 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1067 if (same_p == 1)
1068 break;
1069 /* In case we can't decide whether types are same try to
1070 continue looking for the exact match.
1071 Remember however that we possibly saw a match
1072 to bypass the access path continuations tests we do later. */
1073 if (same_p == -1)
1074 *maybe_match = true;
1076 if (!handled_component_p (ref))
1077 break;
1078 ref = TREE_OPERAND (ref, 0);
1080 if (same_p == 1)
1082 bool partial_overlap = false;
1084 /* We assume that arrays can overlap by multiple of their elements
1085 size as tested in gcc.dg/torture/alias-2.c.
1086 This partial overlap happen only when both arrays are bases of
1087 the access and not contained within another component ref.
1088 To be safe we also assume partial overlap for VLAs. */
1089 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1090 && (!TYPE_SIZE (TREE_TYPE (base1))
1091 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1092 || ref == base2))
1094 /* Setting maybe_match to true triggers
1095 nonoverlapping_component_refs_p test later that still may do
1096 useful disambiguation. */
1097 *maybe_match = true;
1098 partial_overlap = true;
1100 return aliasing_matching_component_refs_p (base1, ref1,
1101 offset1, max_size1,
1102 ref, ref2,
1103 offset2, max_size2,
1104 partial_overlap);
1106 return -1;
1109 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1110 Return true if they can be composed to single access path
1111 base1...ref1...base2...ref2.
1113 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1114 a trailing array access after REF1 in the non-TBAA part of the access.
1115 REF1_ALIAS_SET is the alias set of REF1.
1117 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1118 a trailing array access in the TBAA part of access path2.
1119 BASE2_ALIAS_SET is the alias set of base2. */
1121 bool
1122 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1123 alias_set_type ref1_alias_set,
1124 tree base_type2, tree end_struct_ref2,
1125 alias_set_type base2_alias_set)
1127 /* Access path can not continue past types with no components. */
1128 if (!type_has_components_p (ref_type1))
1129 return false;
1131 /* If first access path ends by too small type to hold base of
1132 the second access path, typically paths can not continue.
1134 Punt if end_struct_past_end1 is true. We want to support arbitrary
1135 type puning past first COMPONENT_REF to union because redundant store
1136 elimination depends on this, see PR92152. For this reason we can not
1137 check size of the reference because types may partially overlap. */
1138 if (!end_struct_past_end1)
1140 if (compare_type_sizes (ref_type1, base_type2) < 0)
1141 return false;
1142 /* If the path2 contains trailing array access we can strenghten the check
1143 to verify that also the size of element of the trailing array fits.
1144 In fact we could check for offset + type_size, but we do not track
1145 offsets and this is quite side case. */
1146 if (end_struct_ref2
1147 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1148 return false;
1150 return (base2_alias_set == ref1_alias_set
1151 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1154 /* Determine if the two component references REF1 and REF2 which are
1155 based on access types TYPE1 and TYPE2 and of which at least one is based
1156 on an indirect reference may alias.
1157 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1158 are the respective alias sets. */
1160 static bool
1161 aliasing_component_refs_p (tree ref1,
1162 alias_set_type ref1_alias_set,
1163 alias_set_type base1_alias_set,
1164 poly_int64 offset1, poly_int64 max_size1,
1165 tree ref2,
1166 alias_set_type ref2_alias_set,
1167 alias_set_type base2_alias_set,
1168 poly_int64 offset2, poly_int64 max_size2)
1170 /* If one reference is a component references through pointers try to find a
1171 common base and apply offset based disambiguation. This handles
1172 for example
1173 struct A { int i; int j; } *q;
1174 struct B { struct A a; int k; } *p;
1175 disambiguating q->i and p->a.j. */
1176 tree base1, base2;
1177 tree type1, type2;
1178 bool maybe_match = false;
1179 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1180 bool end_struct_past_end1 = false;
1181 bool end_struct_past_end2 = false;
1183 /* Choose bases and base types to search for.
1184 The access path is as follows:
1185 base....end_of_tbaa_ref...actual_ref
1186 At one place in the access path may be a reference to zero sized or
1187 trailing array.
1189 We generally discard the segment after end_of_tbaa_ref however
1190 we need to be careful in case it contains zero sized or trailing array.
1191 These may happen after reference to union and in this case we need to
1192 not disambiguate type puning scenarios.
1194 We set:
1195 base1 to point to base
1197 ref1 to point to end_of_tbaa_ref
1199 end_struct_ref1 to point the trailing reference (if it exists
1200 in range base....end_of_tbaa_ref
1202 end_struct_past_end1 is true if this trailing reference occurs in
1203 end_of_tbaa_ref...actual_ref. */
1204 base1 = ref1;
1205 while (handled_component_p (base1))
1207 /* Generally access paths are monotous in the size of object. The
1208 exception are trailing arrays of structures. I.e.
1209 struct a {int array[0];};
1211 struct a {int array1[0]; int array[];};
1212 Such struct has size 0 but accesses to a.array may have non-zero size.
1213 In this case the size of TREE_TYPE (base1) is smaller than
1214 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1216 Because we compare sizes of arrays just by sizes of their elements,
1217 we only need to care about zero sized array fields here. */
1218 if (component_ref_to_zero_sized_trailing_array_p (base1))
1220 gcc_checking_assert (!end_struct_ref1);
1221 end_struct_ref1 = base1;
1223 if (ends_tbaa_access_path_p (base1))
1225 ref1 = TREE_OPERAND (base1, 0);
1226 if (end_struct_ref1)
1228 end_struct_past_end1 = true;
1229 end_struct_ref1 = NULL;
1232 base1 = TREE_OPERAND (base1, 0);
1234 type1 = TREE_TYPE (base1);
1235 base2 = ref2;
1236 while (handled_component_p (base2))
1238 if (component_ref_to_zero_sized_trailing_array_p (base2))
1240 gcc_checking_assert (!end_struct_ref2);
1241 end_struct_ref2 = base2;
1243 if (ends_tbaa_access_path_p (base2))
1245 ref2 = TREE_OPERAND (base2, 0);
1246 if (end_struct_ref2)
1248 end_struct_past_end2 = true;
1249 end_struct_ref2 = NULL;
1252 base2 = TREE_OPERAND (base2, 0);
1254 type2 = TREE_TYPE (base2);
1256 /* Now search for the type1 in the access path of ref2. This
1257 would be a common base for doing offset based disambiguation on.
1258 This however only makes sense if type2 is big enough to hold type1. */
1259 int cmp_outer = compare_type_sizes (type2, type1);
1261 /* If type2 is big enough to contain type1 walk its access path.
1262 We also need to care of arrays at the end of structs that may extend
1263 beyond the end of structure. If this occurs in the TBAA part of the
1264 access path, we need to consider the increased type as well. */
1265 if (cmp_outer >= 0
1266 || (end_struct_ref2
1267 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1269 int res = aliasing_component_refs_walk (ref1, type1, base1,
1270 offset1, max_size1,
1271 end_struct_ref1,
1272 ref2, base2, offset2, max_size2,
1273 &maybe_match);
1274 if (res != -1)
1275 return res;
1278 /* If we didn't find a common base, try the other way around. */
1279 if (cmp_outer <= 0
1280 || (end_struct_ref1
1281 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1283 int res = aliasing_component_refs_walk (ref2, type2, base2,
1284 offset2, max_size2,
1285 end_struct_ref2,
1286 ref1, base1, offset1, max_size1,
1287 &maybe_match);
1288 if (res != -1)
1289 return res;
1292 /* In the following code we make an assumption that the types in access
1293 paths do not overlap and thus accesses alias only if one path can be
1294 continuation of another. If we was not able to decide about equivalence,
1295 we need to give up. */
1296 if (maybe_match)
1298 if (!nonoverlapping_component_refs_p (ref1, ref2))
1300 ++alias_stats.aliasing_component_refs_p_may_alias;
1301 return true;
1303 ++alias_stats.aliasing_component_refs_p_no_alias;
1304 return false;
1307 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1308 ref1_alias_set,
1309 type2, end_struct_ref2,
1310 base2_alias_set)
1311 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1312 ref2_alias_set,
1313 type1, end_struct_ref1,
1314 base1_alias_set))
1316 ++alias_stats.aliasing_component_refs_p_may_alias;
1317 return true;
1319 ++alias_stats.aliasing_component_refs_p_no_alias;
1320 return false;
1323 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1324 that bases of both component refs are either equivalent or nonoverlapping.
1325 We do not assume that the containers of FIELD1 and FIELD2 are of the
1326 same type or size.
1328 Return 0 in case the base address of component_refs are same then
1329 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1330 may not be of same type or size.
1332 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1334 Return -1 otherwise.
1336 Main difference between 0 and -1 is to let
1337 nonoverlapping_component_refs_since_match_p discover the semantically
1338 equivalent part of the access path.
1340 Note that this function is used even with -fno-strict-aliasing
1341 and makes use of no TBAA assumptions. */
1343 static int
1344 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1346 /* If both fields are of the same type, we could save hard work of
1347 comparing offsets. */
1348 tree type1 = DECL_CONTEXT (field1);
1349 tree type2 = DECL_CONTEXT (field2);
1351 if (TREE_CODE (type1) == RECORD_TYPE
1352 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1353 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1354 if (TREE_CODE (type2) == RECORD_TYPE
1355 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1356 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1358 /* ??? Bitfields can overlap at RTL level so punt on them.
1359 FIXME: RTL expansion should be fixed by adjusting the access path
1360 when producing MEM_ATTRs for MEMs which are wider than
1361 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1362 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1363 return -1;
1365 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1366 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1367 return field1 != field2;
1369 /* In common case the offsets and bit offsets will be the same.
1370 However if frontends do not agree on the alignment, they may be
1371 different even if they actually represent same address.
1372 Try the common case first and if that fails calcualte the
1373 actual bit offset. */
1374 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1375 DECL_FIELD_OFFSET (field2))
1376 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1377 DECL_FIELD_BIT_OFFSET (field2)))
1378 return 0;
1380 /* Note that it may be possible to use component_ref_field_offset
1381 which would provide offsets as trees. However constructing and folding
1382 trees is expensive and does not seem to be worth the compile time
1383 cost. */
1385 poly_uint64 offset1, offset2;
1386 poly_uint64 bit_offset1, bit_offset2;
1388 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1389 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1390 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1391 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1393 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1394 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1396 if (known_eq (offset1, offset2))
1397 return 0;
1399 poly_uint64 size1, size2;
1401 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1402 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1403 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1404 return 1;
1406 /* Resort to slower overlap checking by looking for matching types in
1407 the middle of access path. */
1408 return -1;
1411 /* Return low bound of array. Do not produce new trees
1412 and thus do not care about particular type of integer constant
1413 and placeholder exprs. */
1415 static tree
1416 cheap_array_ref_low_bound (tree ref)
1418 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1420 /* Avoid expensive array_ref_low_bound.
1421 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1422 type or it is zero. */
1423 if (TREE_OPERAND (ref, 2))
1424 return TREE_OPERAND (ref, 2);
1425 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1426 return TYPE_MIN_VALUE (domain_type);
1427 else
1428 return integer_zero_node;
1431 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1432 completely disjoint.
1434 Return 1 if the refs are non-overlapping.
1435 Return 0 if they are possibly overlapping but if so the overlap again
1436 starts on the same address.
1437 Return -1 otherwise. */
1440 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1442 tree index1 = TREE_OPERAND (ref1, 1);
1443 tree index2 = TREE_OPERAND (ref2, 1);
1444 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1445 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1447 /* Handle zero offsets first: we do not need to match type size in this
1448 case. */
1449 if (operand_equal_p (index1, low_bound1, 0)
1450 && operand_equal_p (index2, low_bound2, 0))
1451 return 0;
1453 /* If type sizes are different, give up.
1455 Avoid expensive array_ref_element_size.
1456 If operand 3 is present it denotes size in the alignmnet units.
1457 Otherwise size is TYPE_SIZE of the element type.
1458 Handle only common cases where types are of the same "kind". */
1459 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1460 return -1;
1462 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1463 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1465 if (TREE_OPERAND (ref1, 3))
1467 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1468 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1469 TREE_OPERAND (ref2, 3), 0))
1470 return -1;
1472 else
1474 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1475 TYPE_SIZE_UNIT (elmt_type2), 0))
1476 return -1;
1479 /* Since we know that type sizes are the same, there is no need to return
1480 -1 after this point. Partial overlap can not be introduced. */
1482 /* We may need to fold trees in this case.
1483 TODO: Handle integer constant case at least. */
1484 if (!operand_equal_p (low_bound1, low_bound2, 0))
1485 return 0;
1487 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1489 if (tree_int_cst_equal (index1, index2))
1490 return 0;
1491 return 1;
1493 /* TODO: We can use VRP to further disambiguate here. */
1494 return 0;
1497 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1498 MATCH2 either point to the same address or are disjoint.
1499 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1500 respectively or NULL in the case we established equivalence of bases.
1501 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1502 overlap by exact multiply of their element size.
1504 This test works by matching the initial segment of the access path
1505 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1506 match was determined without use of TBAA oracle.
1508 Return 1 if we can determine that component references REF1 and REF2,
1509 that are within a common DECL, cannot overlap.
1511 Return 0 if paths are same and thus there is nothing to disambiguate more
1512 (i.e. there is must alias assuming there is must alias between MATCH1 and
1513 MATCH2)
1515 Return -1 if we can not determine 0 or 1 - this happens when we met
1516 non-matching types was met in the path.
1517 In this case it may make sense to continue by other disambiguation
1518 oracles. */
1520 static int
1521 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1522 tree match2, tree ref2,
1523 bool partial_overlap)
1525 int ntbaa1 = 0, ntbaa2 = 0;
1526 /* Early return if there are no references to match, we do not need
1527 to walk the access paths.
1529 Do not consider this as may-alias for stats - it is more useful
1530 to have information how many disambiguations happened provided that
1531 the query was meaningful. */
1533 if (match1 == ref1 || !handled_component_p (ref1)
1534 || match2 == ref2 || !handled_component_p (ref2))
1535 return -1;
1537 auto_vec<tree, 16> component_refs1;
1538 auto_vec<tree, 16> component_refs2;
1540 /* Create the stack of handled components for REF1. */
1541 while (handled_component_p (ref1) && ref1 != match1)
1543 /* We use TBAA only to re-synchronize after mismatched refs. So we
1544 do not need to truncate access path after TBAA part ends. */
1545 if (ends_tbaa_access_path_p (ref1))
1546 ntbaa1 = 0;
1547 else
1548 ntbaa1++;
1549 component_refs1.safe_push (ref1);
1550 ref1 = TREE_OPERAND (ref1, 0);
1553 /* Create the stack of handled components for REF2. */
1554 while (handled_component_p (ref2) && ref2 != match2)
1556 if (ends_tbaa_access_path_p (ref2))
1557 ntbaa2 = 0;
1558 else
1559 ntbaa2++;
1560 component_refs2.safe_push (ref2);
1561 ref2 = TREE_OPERAND (ref2, 0);
1564 if (!flag_strict_aliasing)
1566 ntbaa1 = 0;
1567 ntbaa2 = 0;
1570 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1571 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1573 /* If only one of access path starts with MEM_REF check that offset is 0
1574 so the addresses stays the same after stripping it.
1575 TODO: In this case we may walk the other access path until we get same
1576 offset.
1578 If both starts with MEM_REF, offset has to be same. */
1579 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1580 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1581 || (mem_ref1 && mem_ref2
1582 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1583 TREE_OPERAND (ref2, 1))))
1585 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1586 return -1;
1589 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1590 to handle them here at all. */
1591 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1592 && TREE_CODE (ref2) != TARGET_MEM_REF);
1594 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1595 rank. This is sufficient because we start from the same DECL and you
1596 cannot reference several fields at a time with COMPONENT_REFs (unlike
1597 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1598 of them to access a sub-component, unless you're in a union, in which
1599 case the return value will precisely be false. */
1600 while (true)
1602 /* Track if we seen unmatched ref with non-zero offset. In this case
1603 we must look for partial overlaps. */
1604 bool seen_unmatched_ref_p = false;
1606 /* First match ARRAY_REFs an try to disambiguate. */
1607 if (!component_refs1.is_empty ()
1608 && !component_refs2.is_empty ())
1610 unsigned int narray_refs1=0, narray_refs2=0;
1612 /* We generally assume that both access paths starts by same sequence
1613 of refs. However if number of array refs is not in sync, try
1614 to recover and pop elts until number match. This helps the case
1615 where one access path starts by array and other by element. */
1616 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1617 narray_refs1++)
1618 if (TREE_CODE (component_refs1 [component_refs1.length()
1619 - 1 - narray_refs1]) != ARRAY_REF)
1620 break;
1622 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1623 narray_refs2++)
1624 if (TREE_CODE (component_refs2 [component_refs2.length()
1625 - 1 - narray_refs2]) != ARRAY_REF)
1626 break;
1627 for (; narray_refs1 > narray_refs2; narray_refs1--)
1629 ref1 = component_refs1.pop ();
1630 ntbaa1--;
1632 /* If index is non-zero we need to check whether the reference
1633 does not break the main invariant that bases are either
1634 disjoint or equal. Consider the example:
1636 unsigned char out[][1];
1637 out[1]="a";
1638 out[i][0];
1640 Here bases out and out are same, but after removing the
1641 [i] index, this invariant no longer holds, because
1642 out[i] points to the middle of array out.
1644 TODO: If size of type of the skipped reference is an integer
1645 multiply of the size of type of the other reference this
1646 invariant can be verified, but even then it is not completely
1647 safe with !flag_strict_aliasing if the other reference contains
1648 unbounded array accesses.
1649 See */
1651 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1652 cheap_array_ref_low_bound (ref1), 0))
1653 return 0;
1655 for (; narray_refs2 > narray_refs1; narray_refs2--)
1657 ref2 = component_refs2.pop ();
1658 ntbaa2--;
1659 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1660 cheap_array_ref_low_bound (ref2), 0))
1661 return 0;
1663 /* Try to disambiguate matched arrays. */
1664 for (unsigned int i = 0; i < narray_refs1; i++)
1666 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1667 component_refs2.pop ());
1668 ntbaa1--;
1669 ntbaa2--;
1670 if (cmp == 1 && !partial_overlap)
1672 ++alias_stats
1673 .nonoverlapping_refs_since_match_p_no_alias;
1674 return 1;
1676 if (cmp == -1)
1678 seen_unmatched_ref_p = true;
1679 /* We can not maintain the invariant that bases are either
1680 same or completely disjoint. However we can still recover
1681 from type based alias analysis if we reach references to
1682 same sizes. We do not attempt to match array sizes, so
1683 just finish array walking and look for component refs. */
1684 if (ntbaa1 < 0 || ntbaa2 < 0)
1686 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1687 return -1;
1689 for (i++; i < narray_refs1; i++)
1691 component_refs1.pop ();
1692 component_refs2.pop ();
1693 ntbaa1--;
1694 ntbaa2--;
1696 break;
1698 partial_overlap = false;
1702 /* Next look for component_refs. */
1705 if (component_refs1.is_empty ())
1707 ++alias_stats
1708 .nonoverlapping_refs_since_match_p_must_overlap;
1709 return 0;
1711 ref1 = component_refs1.pop ();
1712 ntbaa1--;
1713 if (TREE_CODE (ref1) != COMPONENT_REF)
1715 seen_unmatched_ref_p = true;
1716 if (ntbaa1 < 0 || ntbaa2 < 0)
1718 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1719 return -1;
1723 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1727 if (component_refs2.is_empty ())
1729 ++alias_stats
1730 .nonoverlapping_refs_since_match_p_must_overlap;
1731 return 0;
1733 ref2 = component_refs2.pop ();
1734 ntbaa2--;
1735 if (TREE_CODE (ref2) != COMPONENT_REF)
1737 if (ntbaa1 < 0 || ntbaa2 < 0)
1739 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1740 return -1;
1742 seen_unmatched_ref_p = true;
1745 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1747 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1748 earlier. */
1749 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1750 && TREE_CODE (ref2) == COMPONENT_REF);
1752 tree field1 = TREE_OPERAND (ref1, 1);
1753 tree field2 = TREE_OPERAND (ref2, 1);
1755 /* ??? We cannot simply use the type of operand #0 of the refs here
1756 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1757 for common blocks instead of using unions like everyone else. */
1758 tree type1 = DECL_CONTEXT (field1);
1759 tree type2 = DECL_CONTEXT (field2);
1761 partial_overlap = false;
1763 /* If we skipped array refs on type of different sizes, we can
1764 no longer be sure that there are not partial overlaps. */
1765 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1766 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1768 ++alias_stats
1769 .nonoverlapping_refs_since_match_p_may_alias;
1770 return -1;
1773 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1774 if (cmp == -1)
1776 ++alias_stats
1777 .nonoverlapping_refs_since_match_p_may_alias;
1778 return -1;
1780 else if (cmp == 1)
1782 ++alias_stats
1783 .nonoverlapping_refs_since_match_p_no_alias;
1784 return 1;
1788 ++alias_stats.nonoverlapping_refs_since_match_p_must_overlap;
1789 return 0;
1792 /* Return TYPE_UID which can be used to match record types we consider
1793 same for TBAA purposes. */
1795 static inline int
1796 ncr_type_uid (const_tree field)
1798 /* ??? We cannot simply use the type of operand #0 of the refs here
1799 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1800 for common blocks instead of using unions like everyone else. */
1801 tree type = DECL_FIELD_CONTEXT (field);
1802 /* With LTO types considered same_type_for_tbaa_p
1803 from different translation unit may not have same
1804 main variant. They however have same TYPE_CANONICAL. */
1805 if (TYPE_CANONICAL (type))
1806 return TYPE_UID (TYPE_CANONICAL (type));
1807 return TYPE_UID (type);
1810 /* qsort compare function to sort FIELD_DECLs after their
1811 DECL_FIELD_CONTEXT TYPE_UID. */
1813 static inline int
1814 ncr_compar (const void *field1_, const void *field2_)
1816 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1817 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1818 unsigned int uid1 = ncr_type_uid (field1);
1819 unsigned int uid2 = ncr_type_uid (field2);
1821 if (uid1 < uid2)
1822 return -1;
1823 else if (uid1 > uid2)
1824 return 1;
1825 return 0;
1828 /* Return true if we can determine that the fields referenced cannot
1829 overlap for any pair of objects. This relies on TBAA. */
1831 static bool
1832 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1834 /* Early return if we have nothing to do.
1836 Do not consider this as may-alias for stats - it is more useful
1837 to have information how many disambiguations happened provided that
1838 the query was meaningful. */
1839 if (!flag_strict_aliasing
1840 || !x || !y
1841 || !handled_component_p (x)
1842 || !handled_component_p (y))
1843 return false;
1845 auto_vec<const_tree, 16> fieldsx;
1846 while (handled_component_p (x))
1848 if (TREE_CODE (x) == COMPONENT_REF)
1850 tree field = TREE_OPERAND (x, 1);
1851 tree type = DECL_FIELD_CONTEXT (field);
1852 if (TREE_CODE (type) == RECORD_TYPE)
1853 fieldsx.safe_push (field);
1855 else if (ends_tbaa_access_path_p (x))
1856 fieldsx.truncate (0);
1857 x = TREE_OPERAND (x, 0);
1859 if (fieldsx.length () == 0)
1860 return false;
1861 auto_vec<const_tree, 16> fieldsy;
1862 while (handled_component_p (y))
1864 if (TREE_CODE (y) == COMPONENT_REF)
1866 tree field = TREE_OPERAND (y, 1);
1867 tree type = DECL_FIELD_CONTEXT (field);
1868 if (TREE_CODE (type) == RECORD_TYPE)
1869 fieldsy.safe_push (TREE_OPERAND (y, 1));
1871 else if (ends_tbaa_access_path_p (y))
1872 fieldsy.truncate (0);
1873 y = TREE_OPERAND (y, 0);
1875 if (fieldsy.length () == 0)
1877 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1878 return false;
1881 /* Most common case first. */
1882 if (fieldsx.length () == 1
1883 && fieldsy.length () == 1)
1885 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1886 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1887 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1889 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1890 return true;
1892 else
1894 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1895 return false;
1899 if (fieldsx.length () == 2)
1901 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1902 std::swap (fieldsx[0], fieldsx[1]);
1904 else
1905 fieldsx.qsort (ncr_compar);
1907 if (fieldsy.length () == 2)
1909 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1910 std::swap (fieldsy[0], fieldsy[1]);
1912 else
1913 fieldsy.qsort (ncr_compar);
1915 unsigned i = 0, j = 0;
1918 const_tree fieldx = fieldsx[i];
1919 const_tree fieldy = fieldsy[j];
1921 /* We're left with accessing different fields of a structure,
1922 no possible overlap. */
1923 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1924 DECL_FIELD_CONTEXT (fieldy)) == 1
1925 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1927 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1928 return true;
1931 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1933 i++;
1934 if (i == fieldsx.length ())
1935 break;
1937 else
1939 j++;
1940 if (j == fieldsy.length ())
1941 break;
1944 while (1);
1946 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1947 return false;
1951 /* Return true if two memory references based on the variables BASE1
1952 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1953 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1954 if non-NULL are the complete memory reference trees. */
1956 static bool
1957 decl_refs_may_alias_p (tree ref1, tree base1,
1958 poly_int64 offset1, poly_int64 max_size1,
1959 poly_int64 size1,
1960 tree ref2, tree base2,
1961 poly_int64 offset2, poly_int64 max_size2,
1962 poly_int64 size2)
1964 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1966 /* If both references are based on different variables, they cannot alias. */
1967 if (compare_base_decls (base1, base2) == 0)
1968 return false;
1970 /* If both references are based on the same variable, they cannot alias if
1971 the accesses do not overlap. */
1972 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1973 return false;
1975 /* If there is must alias, there is no use disambiguating further. */
1976 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1977 return true;
1979 /* For components with variable position, the above test isn't sufficient,
1980 so we disambiguate component references manually. */
1981 if (ref1 && ref2
1982 && handled_component_p (ref1) && handled_component_p (ref2)
1983 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1984 return false;
1986 return true;
1989 /* Return true if access with BASE is view converted.
1990 Base must not be stripped from inner MEM_REF (&decl)
1991 which is done by ao_ref_base and thus one extra walk
1992 of handled components is needed. */
1994 static bool
1995 view_converted_memref_p (tree base)
1997 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
1998 return false;
1999 return same_type_for_tbaa (TREE_TYPE (base),
2000 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2003 /* Return true if an indirect reference based on *PTR1 constrained
2004 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2005 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2006 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2007 in which case they are computed on-demand. REF1 and REF2
2008 if non-NULL are the complete memory reference trees. */
2010 static bool
2011 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2012 poly_int64 offset1, poly_int64 max_size1,
2013 poly_int64 size1,
2014 alias_set_type ref1_alias_set,
2015 alias_set_type base1_alias_set,
2016 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2017 poly_int64 offset2, poly_int64 max_size2,
2018 poly_int64 size2,
2019 alias_set_type ref2_alias_set,
2020 alias_set_type base2_alias_set, bool tbaa_p)
2022 tree ptr1;
2023 tree ptrtype1, dbase2;
2025 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2026 || TREE_CODE (base1) == TARGET_MEM_REF)
2027 && DECL_P (base2));
2029 ptr1 = TREE_OPERAND (base1, 0);
2030 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2032 /* If only one reference is based on a variable, they cannot alias if
2033 the pointer access is beyond the extent of the variable access.
2034 (the pointer base cannot validly point to an offset less than zero
2035 of the variable).
2036 ??? IVOPTs creates bases that do not honor this restriction,
2037 so do not apply this optimization for TARGET_MEM_REFs. */
2038 if (TREE_CODE (base1) != TARGET_MEM_REF
2039 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2040 return false;
2042 /* If the pointer based access is bigger than the variable they cannot
2043 alias. This is similar to the check below where we use TBAA to
2044 increase the size of the pointer based access based on the dynamic
2045 type of a containing object we can infer from it. */
2046 poly_int64 dsize2;
2047 if (known_size_p (size1)
2048 && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2049 && known_lt (dsize2, size1))
2050 return false;
2052 /* They also cannot alias if the pointer may not point to the decl. */
2053 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2054 return false;
2056 /* Disambiguations that rely on strict aliasing rules follow. */
2057 if (!flag_strict_aliasing || !tbaa_p)
2058 return true;
2060 /* If the alias set for a pointer access is zero all bets are off. */
2061 if (base1_alias_set == 0 || base2_alias_set == 0)
2062 return true;
2064 /* When we are trying to disambiguate an access with a pointer dereference
2065 as base versus one with a decl as base we can use both the size
2066 of the decl and its dynamic type for extra disambiguation.
2067 ??? We do not know anything about the dynamic type of the decl
2068 other than that its alias-set contains base2_alias_set as a subset
2069 which does not help us here. */
2070 /* As we know nothing useful about the dynamic type of the decl just
2071 use the usual conflict check rather than a subset test.
2072 ??? We could introduce -fvery-strict-aliasing when the language
2073 does not allow decls to have a dynamic type that differs from their
2074 static type. Then we can check
2075 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2076 if (base1_alias_set != base2_alias_set
2077 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2078 return false;
2080 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2082 /* If the size of the access relevant for TBAA through the pointer
2083 is bigger than the size of the decl we can't possibly access the
2084 decl via that pointer. */
2085 if (/* ??? This in turn may run afoul when a decl of type T which is
2086 a member of union type U is accessed through a pointer to
2087 type U and sizeof T is smaller than sizeof U. */
2088 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2089 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2090 && compare_sizes (DECL_SIZE (base2),
2091 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2092 return false;
2094 if (!ref2)
2095 return true;
2097 /* If the decl is accessed via a MEM_REF, reconstruct the base
2098 we can use for TBAA and an appropriately adjusted offset. */
2099 dbase2 = ref2;
2100 while (handled_component_p (dbase2))
2101 dbase2 = TREE_OPERAND (dbase2, 0);
2102 poly_int64 doffset1 = offset1;
2103 poly_offset_int doffset2 = offset2;
2104 if (TREE_CODE (dbase2) == MEM_REF
2105 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2107 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2108 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2109 /* If second reference is view-converted, give up now. */
2110 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2111 return true;
2114 /* If first reference is view-converted, give up now. */
2115 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2116 return true;
2118 /* If both references are through the same type, they do not alias
2119 if the accesses do not overlap. This does extra disambiguation
2120 for mixed/pointer accesses but requires strict aliasing.
2121 For MEM_REFs we require that the component-ref offset we computed
2122 is relative to the start of the type which we ensure by
2123 comparing rvalue and access type and disregarding the constant
2124 pointer offset.
2126 But avoid treating variable length arrays as "objects", instead assume they
2127 can overlap by an exact multiple of their element size.
2128 See gcc.dg/torture/alias-2.c. */
2129 if (((TREE_CODE (base1) != TARGET_MEM_REF
2130 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2131 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2132 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2133 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2135 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2136 && (TYPE_SIZE (TREE_TYPE (base1))
2137 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2138 != INTEGER_CST));
2139 if (!partial_overlap
2140 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2141 return false;
2142 if (!ref1 || !ref2
2143 /* If there is must alias, there is no use disambiguating further. */
2144 || (!partial_overlap
2145 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2146 return true;
2147 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2148 partial_overlap);
2149 if (res == -1)
2150 return !nonoverlapping_component_refs_p (ref1, ref2);
2151 return !res;
2154 /* Do access-path based disambiguation. */
2155 if (ref1 && ref2
2156 && (handled_component_p (ref1) || handled_component_p (ref2)))
2157 return aliasing_component_refs_p (ref1,
2158 ref1_alias_set, base1_alias_set,
2159 offset1, max_size1,
2160 ref2,
2161 ref2_alias_set, base2_alias_set,
2162 offset2, max_size2);
2164 return true;
2167 /* Return true if two indirect references based on *PTR1
2168 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2169 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2170 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2171 in which case they are computed on-demand. REF1 and REF2
2172 if non-NULL are the complete memory reference trees. */
2174 static bool
2175 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2176 poly_int64 offset1, poly_int64 max_size1,
2177 poly_int64 size1,
2178 alias_set_type ref1_alias_set,
2179 alias_set_type base1_alias_set,
2180 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2181 poly_int64 offset2, poly_int64 max_size2,
2182 poly_int64 size2,
2183 alias_set_type ref2_alias_set,
2184 alias_set_type base2_alias_set, bool tbaa_p)
2186 tree ptr1;
2187 tree ptr2;
2188 tree ptrtype1, ptrtype2;
2190 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2191 || TREE_CODE (base1) == TARGET_MEM_REF)
2192 && (TREE_CODE (base2) == MEM_REF
2193 || TREE_CODE (base2) == TARGET_MEM_REF));
2195 ptr1 = TREE_OPERAND (base1, 0);
2196 ptr2 = TREE_OPERAND (base2, 0);
2198 /* If both bases are based on pointers they cannot alias if they may not
2199 point to the same memory object or if they point to the same object
2200 and the accesses do not overlap. */
2201 if ((!cfun || gimple_in_ssa_p (cfun))
2202 && operand_equal_p (ptr1, ptr2, 0)
2203 && (((TREE_CODE (base1) != TARGET_MEM_REF
2204 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2205 && (TREE_CODE (base2) != TARGET_MEM_REF
2206 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2207 || (TREE_CODE (base1) == TARGET_MEM_REF
2208 && TREE_CODE (base2) == TARGET_MEM_REF
2209 && (TMR_STEP (base1) == TMR_STEP (base2)
2210 || (TMR_STEP (base1) && TMR_STEP (base2)
2211 && operand_equal_p (TMR_STEP (base1),
2212 TMR_STEP (base2), 0)))
2213 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2214 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2215 && operand_equal_p (TMR_INDEX (base1),
2216 TMR_INDEX (base2), 0)))
2217 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2218 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2219 && operand_equal_p (TMR_INDEX2 (base1),
2220 TMR_INDEX2 (base2), 0))))))
2222 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2223 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2224 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2225 offset2 + moff2, max_size2))
2226 return false;
2227 /* If there is must alias, there is no use disambiguating further. */
2228 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2229 return true;
2230 if (ref1 && ref2)
2232 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2233 false);
2234 if (res != -1)
2235 return !res;
2238 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2239 return false;
2241 /* Disambiguations that rely on strict aliasing rules follow. */
2242 if (!flag_strict_aliasing || !tbaa_p)
2243 return true;
2245 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2246 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2248 /* If the alias set for a pointer access is zero all bets are off. */
2249 if (base1_alias_set == 0
2250 || base2_alias_set == 0)
2251 return true;
2253 /* Do type-based disambiguation. */
2254 if (base1_alias_set != base2_alias_set
2255 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2256 return false;
2258 /* If either reference is view-converted, give up now. */
2259 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2260 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2261 return true;
2263 /* If both references are through the same type, they do not alias
2264 if the accesses do not overlap. This does extra disambiguation
2265 for mixed/pointer accesses but requires strict aliasing. */
2266 if ((TREE_CODE (base1) != TARGET_MEM_REF
2267 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2268 && (TREE_CODE (base2) != TARGET_MEM_REF
2269 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2270 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2271 TREE_TYPE (ptrtype2)) == 1)
2273 /* But avoid treating arrays as "objects", instead assume they
2274 can overlap by an exact multiple of their element size.
2275 See gcc.dg/torture/alias-2.c. */
2276 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2278 if (!partial_overlap
2279 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2280 return false;
2281 if (!ref1 || !ref2
2282 || (!partial_overlap
2283 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2284 return true;
2285 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2286 partial_overlap);
2287 if (res == -1)
2288 return !nonoverlapping_component_refs_p (ref1, ref2);
2289 return !res;
2292 /* Do access-path based disambiguation. */
2293 if (ref1 && ref2
2294 && (handled_component_p (ref1) || handled_component_p (ref2)))
2295 return aliasing_component_refs_p (ref1,
2296 ref1_alias_set, base1_alias_set,
2297 offset1, max_size1,
2298 ref2,
2299 ref2_alias_set, base2_alias_set,
2300 offset2, max_size2);
2302 return true;
2305 /* Return true, if the two memory references REF1 and REF2 may alias. */
2307 static bool
2308 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2310 tree base1, base2;
2311 poly_int64 offset1 = 0, offset2 = 0;
2312 poly_int64 max_size1 = -1, max_size2 = -1;
2313 bool var1_p, var2_p, ind1_p, ind2_p;
2315 gcc_checking_assert ((!ref1->ref
2316 || TREE_CODE (ref1->ref) == SSA_NAME
2317 || DECL_P (ref1->ref)
2318 || TREE_CODE (ref1->ref) == STRING_CST
2319 || handled_component_p (ref1->ref)
2320 || TREE_CODE (ref1->ref) == MEM_REF
2321 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2322 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2323 && (!ref2->ref
2324 || TREE_CODE (ref2->ref) == SSA_NAME
2325 || DECL_P (ref2->ref)
2326 || TREE_CODE (ref2->ref) == STRING_CST
2327 || handled_component_p (ref2->ref)
2328 || TREE_CODE (ref2->ref) == MEM_REF
2329 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2330 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2332 /* Decompose the references into their base objects and the access. */
2333 base1 = ao_ref_base (ref1);
2334 offset1 = ref1->offset;
2335 max_size1 = ref1->max_size;
2336 base2 = ao_ref_base (ref2);
2337 offset2 = ref2->offset;
2338 max_size2 = ref2->max_size;
2340 /* We can end up with registers or constants as bases for example from
2341 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2342 which is seen as a struct copy. */
2343 if (TREE_CODE (base1) == SSA_NAME
2344 || TREE_CODE (base1) == CONST_DECL
2345 || TREE_CODE (base1) == CONSTRUCTOR
2346 || TREE_CODE (base1) == ADDR_EXPR
2347 || CONSTANT_CLASS_P (base1)
2348 || TREE_CODE (base2) == SSA_NAME
2349 || TREE_CODE (base2) == CONST_DECL
2350 || TREE_CODE (base2) == CONSTRUCTOR
2351 || TREE_CODE (base2) == ADDR_EXPR
2352 || CONSTANT_CLASS_P (base2))
2353 return false;
2355 /* We can end up referring to code via function and label decls.
2356 As we likely do not properly track code aliases conservatively
2357 bail out. */
2358 if (TREE_CODE (base1) == FUNCTION_DECL
2359 || TREE_CODE (base1) == LABEL_DECL
2360 || TREE_CODE (base2) == FUNCTION_DECL
2361 || TREE_CODE (base2) == LABEL_DECL)
2362 return true;
2364 /* Two volatile accesses always conflict. */
2365 if (ref1->volatile_p
2366 && ref2->volatile_p)
2367 return true;
2369 /* refN->ref may convey size information, do not confuse our workers
2370 with that but strip it - ao_ref_base took it into account already. */
2371 tree ref1ref = ref1->ref;
2372 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2373 ref1ref = TREE_OPERAND (ref1ref, 0);
2374 tree ref2ref = ref2->ref;
2375 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2376 ref2ref = TREE_OPERAND (ref2ref, 0);
2378 /* Defer to simple offset based disambiguation if we have
2379 references based on two decls. Do this before defering to
2380 TBAA to handle must-alias cases in conformance with the
2381 GCC extension of allowing type-punning through unions. */
2382 var1_p = DECL_P (base1);
2383 var2_p = DECL_P (base2);
2384 if (var1_p && var2_p)
2385 return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2386 ref1->size,
2387 ref2ref, base2, offset2, max_size2,
2388 ref2->size);
2390 /* Handle restrict based accesses.
2391 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2392 here. */
2393 tree rbase1 = base1;
2394 tree rbase2 = base2;
2395 if (var1_p)
2397 rbase1 = ref1ref;
2398 if (rbase1)
2399 while (handled_component_p (rbase1))
2400 rbase1 = TREE_OPERAND (rbase1, 0);
2402 if (var2_p)
2404 rbase2 = ref2ref;
2405 if (rbase2)
2406 while (handled_component_p (rbase2))
2407 rbase2 = TREE_OPERAND (rbase2, 0);
2409 if (rbase1 && rbase2
2410 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2411 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2412 /* If the accesses are in the same restrict clique... */
2413 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2414 /* But based on different pointers they do not alias. */
2415 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2416 return false;
2418 ind1_p = (TREE_CODE (base1) == MEM_REF
2419 || TREE_CODE (base1) == TARGET_MEM_REF);
2420 ind2_p = (TREE_CODE (base2) == MEM_REF
2421 || TREE_CODE (base2) == TARGET_MEM_REF);
2423 /* Canonicalize the pointer-vs-decl case. */
2424 if (ind1_p && var2_p)
2426 std::swap (offset1, offset2);
2427 std::swap (max_size1, max_size2);
2428 std::swap (base1, base2);
2429 std::swap (ref1, ref2);
2430 std::swap (ref1ref, ref2ref);
2431 var1_p = true;
2432 ind1_p = false;
2433 var2_p = false;
2434 ind2_p = true;
2437 /* First defer to TBAA if possible. */
2438 if (tbaa_p
2439 && flag_strict_aliasing
2440 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2441 ao_ref_alias_set (ref2)))
2442 return false;
2444 /* If the reference is based on a pointer that points to memory
2445 that may not be written to then the other reference cannot possibly
2446 clobber it. */
2447 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2448 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2449 || (ind1_p
2450 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2451 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2452 return false;
2454 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2455 if (var1_p && ind2_p)
2456 return indirect_ref_may_alias_decl_p (ref2ref, base2,
2457 offset2, max_size2, ref2->size,
2458 ao_ref_alias_set (ref2),
2459 ao_ref_base_alias_set (ref2),
2460 ref1ref, base1,
2461 offset1, max_size1, ref1->size,
2462 ao_ref_alias_set (ref1),
2463 ao_ref_base_alias_set (ref1),
2464 tbaa_p);
2465 else if (ind1_p && ind2_p)
2466 return indirect_refs_may_alias_p (ref1ref, base1,
2467 offset1, max_size1, ref1->size,
2468 ao_ref_alias_set (ref1),
2469 ao_ref_base_alias_set (ref1),
2470 ref2ref, base2,
2471 offset2, max_size2, ref2->size,
2472 ao_ref_alias_set (ref2),
2473 ao_ref_base_alias_set (ref2),
2474 tbaa_p);
2476 gcc_unreachable ();
2479 /* Return true, if the two memory references REF1 and REF2 may alias
2480 and update statistics. */
2482 bool
2483 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2485 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2486 if (res)
2487 ++alias_stats.refs_may_alias_p_may_alias;
2488 else
2489 ++alias_stats.refs_may_alias_p_no_alias;
2490 return res;
2493 static bool
2494 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2496 ao_ref r1;
2497 ao_ref_init (&r1, ref1);
2498 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2501 bool
2502 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2504 ao_ref r1, r2;
2505 ao_ref_init (&r1, ref1);
2506 ao_ref_init (&r2, ref2);
2507 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2510 /* Returns true if there is a anti-dependence for the STORE that
2511 executes after the LOAD. */
2513 bool
2514 refs_anti_dependent_p (tree load, tree store)
2516 ao_ref r1, r2;
2517 ao_ref_init (&r1, load);
2518 ao_ref_init (&r2, store);
2519 return refs_may_alias_p_1 (&r1, &r2, false);
2522 /* Returns true if there is a output dependence for the stores
2523 STORE1 and STORE2. */
2525 bool
2526 refs_output_dependent_p (tree store1, tree store2)
2528 ao_ref r1, r2;
2529 ao_ref_init (&r1, store1);
2530 ao_ref_init (&r2, store2);
2531 return refs_may_alias_p_1 (&r1, &r2, false);
2534 /* Returns true if and only if REF may alias any access stored in TT.
2535 IF TBAA_P is true, use TBAA oracle. */
2537 static bool
2538 modref_may_conflict (const gimple *stmt,
2539 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2541 alias_set_type base_set, ref_set;
2542 modref_base_node <alias_set_type> *base_node;
2543 modref_ref_node <alias_set_type> *ref_node;
2544 size_t i, j, k;
2546 if (tt->every_base)
2547 return true;
2549 if (!dbg_cnt (ipa_mod_ref))
2550 return true;
2552 base_set = ao_ref_base_alias_set (ref);
2554 ref_set = ao_ref_alias_set (ref);
2556 int num_tests = 0, max_tests = param_modref_max_tests;
2557 FOR_EACH_VEC_SAFE_ELT (tt->bases, i, base_node)
2559 if (tbaa_p && flag_strict_aliasing)
2561 if (num_tests >= max_tests)
2562 return true;
2563 alias_stats.modref_tests++;
2564 if (!alias_sets_conflict_p (base_set, base_node->base))
2565 continue;
2566 num_tests++;
2569 if (base_node->every_ref)
2570 return true;
2572 FOR_EACH_VEC_SAFE_ELT (base_node->refs, j, ref_node)
2574 /* Do not repeat same test as before. */
2575 if ((ref_set != base_set || base_node->base != ref_node->ref)
2576 && tbaa_p && flag_strict_aliasing)
2578 if (num_tests >= max_tests)
2579 return true;
2580 alias_stats.modref_tests++;
2581 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2582 continue;
2583 num_tests++;
2586 /* TBAA checks did not disambiguate, try to use base pointer, for
2587 that we however need to have ref->ref or ref->base. */
2588 if (ref_node->every_access || (!ref->ref && !ref->base))
2589 return true;
2591 modref_access_node *access_node;
2592 FOR_EACH_VEC_SAFE_ELT (ref_node->accesses, k, access_node)
2594 if (num_tests >= max_tests)
2595 return true;
2597 if (access_node->parm_index == -1
2598 || (unsigned)access_node->parm_index
2599 >= gimple_call_num_args (stmt))
2600 return true;
2602 alias_stats.modref_baseptr_tests++;
2604 tree arg = gimple_call_arg (stmt, access_node->parm_index);
2606 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2607 continue;
2609 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2610 return true;
2612 /* ao_ref_init_from_ptr_and_range assumes that memory access
2613 starts by the pointed to location. If we did not track the
2614 offset it is possible that it starts before the actual
2615 pointer. */
2616 if (!access_node->parm_offset_known)
2618 if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2619 return true;
2621 else
2623 ao_ref ref2;
2624 poly_offset_int off = (poly_offset_int)access_node->offset
2625 + ((poly_offset_int)access_node->parm_offset
2626 << LOG2_BITS_PER_UNIT);
2627 poly_int64 off2;
2628 if (off.to_shwi (&off2))
2630 ao_ref_init_from_ptr_and_range
2631 (&ref2, arg, true, off2,
2632 access_node->size,
2633 access_node->max_size);
2634 ref2.ref_alias_set = ref_set;
2635 ref2.base_alias_set = base_set;
2636 if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2637 return true;
2639 else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2640 return true;
2642 num_tests++;
2646 return false;
2649 /* Check if REF conflicts with call using "fn spec" attribute.
2650 If CLOBBER is true we are checking for writes, otherwise check loads.
2652 Return 0 if there are no conflicts (except for possible function call
2653 argument reads), 1 if there are conflicts and -1 if we can not decide by
2654 fn spec. */
2656 static int
2657 check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2659 attr_fnspec fnspec = gimple_call_fnspec (call);
2660 if (fnspec.known_p ())
2662 if (clobber
2663 ? !fnspec.global_memory_written_p ()
2664 : !fnspec.global_memory_read_p ())
2666 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2667 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2668 && (!fnspec.arg_specified_p (i)
2669 || (clobber ? fnspec.arg_maybe_written_p (i)
2670 : fnspec.arg_maybe_read_p (i))))
2672 ao_ref dref;
2673 tree size = NULL_TREE;
2674 unsigned int size_arg;
2676 if (!fnspec.arg_specified_p (i))
2678 else if (fnspec.arg_max_access_size_given_by_arg_p
2679 (i, &size_arg))
2680 size = gimple_call_arg (call, size_arg);
2681 else if (fnspec.arg_access_size_given_by_type_p (i))
2683 tree callee = gimple_call_fndecl (call);
2684 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2686 for (unsigned int p = 0; p < i; p++)
2687 t = TREE_CHAIN (t);
2688 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2690 ao_ref_init_from_ptr_and_size (&dref,
2691 gimple_call_arg (call, i),
2692 size);
2693 if (refs_may_alias_p_1 (&dref, ref, false))
2694 return 1;
2696 if (clobber
2697 && fnspec.errno_maybe_written_p ()
2698 && flag_errno_math
2699 && targetm.ref_may_alias_errno (ref))
2700 return 1;
2701 return 0;
2705 /* FIXME: we should handle barriers more consistently, but for now leave the
2706 check here. */
2707 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2708 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2710 /* __sync_* builtins and some OpenMP builtins act as threading
2711 barriers. */
2712 #undef DEF_SYNC_BUILTIN
2713 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2714 #include "sync-builtins.def"
2715 #undef DEF_SYNC_BUILTIN
2716 case BUILT_IN_GOMP_ATOMIC_START:
2717 case BUILT_IN_GOMP_ATOMIC_END:
2718 case BUILT_IN_GOMP_BARRIER:
2719 case BUILT_IN_GOMP_BARRIER_CANCEL:
2720 case BUILT_IN_GOMP_TASKWAIT:
2721 case BUILT_IN_GOMP_TASKGROUP_END:
2722 case BUILT_IN_GOMP_CRITICAL_START:
2723 case BUILT_IN_GOMP_CRITICAL_END:
2724 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2725 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2726 case BUILT_IN_GOMP_LOOP_END:
2727 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2728 case BUILT_IN_GOMP_ORDERED_START:
2729 case BUILT_IN_GOMP_ORDERED_END:
2730 case BUILT_IN_GOMP_SECTIONS_END:
2731 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2732 case BUILT_IN_GOMP_SINGLE_COPY_START:
2733 case BUILT_IN_GOMP_SINGLE_COPY_END:
2734 return 1;
2736 default:
2737 return -1;
2739 return -1;
2742 /* If the call CALL may use the memory reference REF return true,
2743 otherwise return false. */
2745 static bool
2746 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2748 tree base, callee;
2749 unsigned i;
2750 int flags = gimple_call_flags (call);
2752 /* Const functions without a static chain do not implicitly use memory. */
2753 if (!gimple_call_chain (call)
2754 && (flags & (ECF_CONST|ECF_NOVOPS)))
2755 goto process_args;
2757 /* A call that is not without side-effects might involve volatile
2758 accesses and thus conflicts with all other volatile accesses. */
2759 if (ref->volatile_p)
2760 return true;
2762 callee = gimple_call_fndecl (call);
2764 if (!gimple_call_chain (call) && callee != NULL_TREE)
2766 struct cgraph_node *node = cgraph_node::get (callee);
2767 /* We can not safely optimize based on summary of calle if it does
2768 not always bind to current def: it is possible that memory load
2769 was optimized out earlier and the interposed variant may not be
2770 optimized this way. */
2771 if (node && node->binds_to_current_def_p ())
2773 modref_summary *summary = get_modref_function_summary (node);
2774 if (summary)
2776 if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2778 alias_stats.modref_use_no_alias++;
2779 if (dump_file && (dump_flags & TDF_DETAILS))
2781 fprintf (dump_file,
2782 "ipa-modref: call stmt ");
2783 print_gimple_stmt (dump_file, call, 0);
2784 fprintf (dump_file,
2785 "ipa-modref: call to %s does not use ",
2786 node->dump_name ());
2787 if (!ref->ref && ref->base)
2789 fprintf (dump_file, "base: ");
2790 print_generic_expr (dump_file, ref->base);
2792 else if (ref->ref)
2794 fprintf (dump_file, "ref: ");
2795 print_generic_expr (dump_file, ref->ref);
2797 fprintf (dump_file, " alias sets: %i->%i\n",
2798 ao_ref_base_alias_set (ref),
2799 ao_ref_alias_set (ref));
2801 goto process_args;
2803 alias_stats.modref_use_may_alias++;
2808 base = ao_ref_base (ref);
2809 if (!base)
2810 return true;
2812 /* If the reference is based on a decl that is not aliased the call
2813 cannot possibly use it. */
2814 if (DECL_P (base)
2815 && !may_be_aliased (base)
2816 /* But local statics can be used through recursion. */
2817 && !is_global_var (base))
2818 goto process_args;
2820 if (int res = check_fnspec (call, ref, false))
2822 if (res == 1)
2823 return true;
2825 else
2826 goto process_args;
2828 /* Check if base is a global static variable that is not read
2829 by the function. */
2830 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2832 struct cgraph_node *node = cgraph_node::get (callee);
2833 bitmap read;
2834 int id;
2836 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2837 node yet. We should enforce that there are nodes for all decls in the
2838 IL and remove this check instead. */
2839 if (node
2840 && (id = ipa_reference_var_uid (base)) != -1
2841 && (read = ipa_reference_get_read_global (node))
2842 && !bitmap_bit_p (read, id))
2843 goto process_args;
2846 /* Check if the base variable is call-used. */
2847 if (DECL_P (base))
2849 if (pt_solution_includes (gimple_call_use_set (call), base))
2850 return true;
2852 else if ((TREE_CODE (base) == MEM_REF
2853 || TREE_CODE (base) == TARGET_MEM_REF)
2854 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2856 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2857 if (!pi)
2858 return true;
2860 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2861 return true;
2863 else
2864 return true;
2866 /* Inspect call arguments for passed-by-value aliases. */
2867 process_args:
2868 for (i = 0; i < gimple_call_num_args (call); ++i)
2870 tree op = gimple_call_arg (call, i);
2871 int flags = gimple_call_arg_flags (call, i);
2873 if (flags & (EAF_UNUSED | EAF_NOREAD))
2874 continue;
2876 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2877 op = TREE_OPERAND (op, 0);
2879 if (TREE_CODE (op) != SSA_NAME
2880 && !is_gimple_min_invariant (op))
2882 ao_ref r;
2883 ao_ref_init (&r, op);
2884 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2885 return true;
2889 return false;
2892 static bool
2893 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2895 bool res;
2896 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2897 if (res)
2898 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2899 else
2900 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2901 return res;
2905 /* If the statement STMT may use the memory reference REF return
2906 true, otherwise return false. */
2908 bool
2909 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2911 if (is_gimple_assign (stmt))
2913 tree rhs;
2915 /* All memory assign statements are single. */
2916 if (!gimple_assign_single_p (stmt))
2917 return false;
2919 rhs = gimple_assign_rhs1 (stmt);
2920 if (is_gimple_reg (rhs)
2921 || is_gimple_min_invariant (rhs)
2922 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2923 return false;
2925 return refs_may_alias_p (rhs, ref, tbaa_p);
2927 else if (is_gimple_call (stmt))
2928 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2929 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2931 tree retval = gimple_return_retval (return_stmt);
2932 if (retval
2933 && TREE_CODE (retval) != SSA_NAME
2934 && !is_gimple_min_invariant (retval)
2935 && refs_may_alias_p (retval, ref, tbaa_p))
2936 return true;
2937 /* If ref escapes the function then the return acts as a use. */
2938 tree base = ao_ref_base (ref);
2939 if (!base)
2941 else if (DECL_P (base))
2942 return is_global_var (base);
2943 else if (TREE_CODE (base) == MEM_REF
2944 || TREE_CODE (base) == TARGET_MEM_REF)
2945 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2946 return false;
2949 return true;
2952 bool
2953 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2955 ao_ref r;
2956 ao_ref_init (&r, ref);
2957 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2960 /* If the call in statement CALL may clobber the memory reference REF
2961 return true, otherwise return false. */
2963 bool
2964 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2966 tree base;
2967 tree callee;
2969 /* If the call is pure or const it cannot clobber anything. */
2970 if (gimple_call_flags (call)
2971 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2972 return false;
2973 if (gimple_call_internal_p (call))
2974 switch (gimple_call_internal_fn (call))
2976 /* Treat these internal calls like ECF_PURE for aliasing,
2977 they don't write to any memory the program should care about.
2978 They have important other side-effects, and read memory,
2979 so can't be ECF_NOVOPS. */
2980 case IFN_UBSAN_NULL:
2981 case IFN_UBSAN_BOUNDS:
2982 case IFN_UBSAN_VPTR:
2983 case IFN_UBSAN_OBJECT_SIZE:
2984 case IFN_UBSAN_PTR:
2985 case IFN_ASAN_CHECK:
2986 return false;
2987 default:
2988 break;
2991 callee = gimple_call_fndecl (call);
2993 if (callee != NULL_TREE && !ref->volatile_p)
2995 struct cgraph_node *node = cgraph_node::get (callee);
2996 if (node)
2998 modref_summary *summary = get_modref_function_summary (node);
2999 if (summary)
3001 if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
3002 && (!summary->writes_errno
3003 || !targetm.ref_may_alias_errno (ref)))
3005 alias_stats.modref_clobber_no_alias++;
3006 if (dump_file && (dump_flags & TDF_DETAILS))
3008 fprintf (dump_file,
3009 "ipa-modref: call stmt ");
3010 print_gimple_stmt (dump_file, call, 0);
3011 fprintf (dump_file,
3012 "ipa-modref: call to %s does not clobber ",
3013 node->dump_name ());
3014 if (!ref->ref && ref->base)
3016 fprintf (dump_file, "base: ");
3017 print_generic_expr (dump_file, ref->base);
3019 else if (ref->ref)
3021 fprintf (dump_file, "ref: ");
3022 print_generic_expr (dump_file, ref->ref);
3024 fprintf (dump_file, " alias sets: %i->%i\n",
3025 ao_ref_base_alias_set (ref),
3026 ao_ref_alias_set (ref));
3028 return false;
3030 alias_stats.modref_clobber_may_alias++;
3035 base = ao_ref_base (ref);
3036 if (!base)
3037 return true;
3039 if (TREE_CODE (base) == SSA_NAME
3040 || CONSTANT_CLASS_P (base))
3041 return false;
3043 /* A call that is not without side-effects might involve volatile
3044 accesses and thus conflicts with all other volatile accesses. */
3045 if (ref->volatile_p)
3046 return true;
3048 /* If the reference is based on a decl that is not aliased the call
3049 cannot possibly clobber it. */
3050 if (DECL_P (base)
3051 && !may_be_aliased (base)
3052 /* But local non-readonly statics can be modified through recursion
3053 or the call may implement a threading barrier which we must
3054 treat as may-def. */
3055 && (TREE_READONLY (base)
3056 || !is_global_var (base)))
3057 return false;
3059 /* If the reference is based on a pointer that points to memory
3060 that may not be written to then the call cannot possibly clobber it. */
3061 if ((TREE_CODE (base) == MEM_REF
3062 || TREE_CODE (base) == TARGET_MEM_REF)
3063 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3064 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3065 return false;
3067 if (int res = check_fnspec (call, ref, true))
3069 if (res == 1)
3070 return true;
3072 else
3073 return false;
3075 /* Check if base is a global static variable that is not written
3076 by the function. */
3077 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3079 struct cgraph_node *node = cgraph_node::get (callee);
3080 bitmap written;
3081 int id;
3083 if (node
3084 && (id = ipa_reference_var_uid (base)) != -1
3085 && (written = ipa_reference_get_written_global (node))
3086 && !bitmap_bit_p (written, id))
3087 return false;
3090 /* Check if the base variable is call-clobbered. */
3091 if (DECL_P (base))
3092 return pt_solution_includes (gimple_call_clobber_set (call), base);
3093 else if ((TREE_CODE (base) == MEM_REF
3094 || TREE_CODE (base) == TARGET_MEM_REF)
3095 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3097 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3098 if (!pi)
3099 return true;
3101 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3104 return true;
3107 /* If the call in statement CALL may clobber the memory reference REF
3108 return true, otherwise return false. */
3110 bool
3111 call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3113 bool res;
3114 ao_ref r;
3115 ao_ref_init (&r, ref);
3116 res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3117 if (res)
3118 ++alias_stats.call_may_clobber_ref_p_may_alias;
3119 else
3120 ++alias_stats.call_may_clobber_ref_p_no_alias;
3121 return res;
3125 /* If the statement STMT may clobber the memory reference REF return true,
3126 otherwise return false. */
3128 bool
3129 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3131 if (is_gimple_call (stmt))
3133 tree lhs = gimple_call_lhs (stmt);
3134 if (lhs
3135 && TREE_CODE (lhs) != SSA_NAME)
3137 ao_ref r;
3138 ao_ref_init (&r, lhs);
3139 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3140 return true;
3143 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3145 else if (gimple_assign_single_p (stmt))
3147 tree lhs = gimple_assign_lhs (stmt);
3148 if (TREE_CODE (lhs) != SSA_NAME)
3150 ao_ref r;
3151 ao_ref_init (&r, lhs);
3152 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3155 else if (gimple_code (stmt) == GIMPLE_ASM)
3156 return true;
3158 return false;
3161 bool
3162 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3164 ao_ref r;
3165 ao_ref_init (&r, ref);
3166 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3169 /* Return true if store1 and store2 described by corresponding tuples
3170 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3171 address. */
3173 static bool
3174 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3175 poly_int64 max_size1,
3176 tree base2, poly_int64 offset2, poly_int64 size2,
3177 poly_int64 max_size2)
3179 /* Offsets need to be 0. */
3180 if (maybe_ne (offset1, 0)
3181 || maybe_ne (offset2, 0))
3182 return false;
3184 bool base1_obj_p = SSA_VAR_P (base1);
3185 bool base2_obj_p = SSA_VAR_P (base2);
3187 /* We need one object. */
3188 if (base1_obj_p == base2_obj_p)
3189 return false;
3190 tree obj = base1_obj_p ? base1 : base2;
3192 /* And we need one MEM_REF. */
3193 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3194 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3195 if (base1_memref_p == base2_memref_p)
3196 return false;
3197 tree memref = base1_memref_p ? base1 : base2;
3199 /* Sizes need to be valid. */
3200 if (!known_size_p (max_size1)
3201 || !known_size_p (max_size2)
3202 || !known_size_p (size1)
3203 || !known_size_p (size2))
3204 return false;
3206 /* Max_size needs to match size. */
3207 if (maybe_ne (max_size1, size1)
3208 || maybe_ne (max_size2, size2))
3209 return false;
3211 /* Sizes need to match. */
3212 if (maybe_ne (size1, size2))
3213 return false;
3216 /* Check that memref is a store to pointer with singleton points-to info. */
3217 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3218 return false;
3219 tree ptr = TREE_OPERAND (memref, 0);
3220 if (TREE_CODE (ptr) != SSA_NAME)
3221 return false;
3222 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3223 unsigned int pt_uid;
3224 if (pi == NULL
3225 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3226 return false;
3228 /* Be conservative with non-call exceptions when the address might
3229 be NULL. */
3230 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3231 return false;
3233 /* Check that ptr points relative to obj. */
3234 unsigned int obj_uid = DECL_PT_UID (obj);
3235 if (obj_uid != pt_uid)
3236 return false;
3238 /* Check that the object size is the same as the store size. That ensures us
3239 that ptr points to the start of obj. */
3240 return (DECL_SIZE (obj)
3241 && poly_int_tree_p (DECL_SIZE (obj))
3242 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3245 /* If STMT kills the memory reference REF return true, otherwise
3246 return false. */
3248 bool
3249 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3251 if (!ao_ref_base (ref))
3252 return false;
3254 if (gimple_has_lhs (stmt)
3255 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3256 /* The assignment is not necessarily carried out if it can throw
3257 and we can catch it in the current function where we could inspect
3258 the previous value.
3259 ??? We only need to care about the RHS throwing. For aggregate
3260 assignments or similar calls and non-call exceptions the LHS
3261 might throw as well. */
3262 && !stmt_can_throw_internal (cfun, stmt))
3264 tree lhs = gimple_get_lhs (stmt);
3265 /* If LHS is literally a base of the access we are done. */
3266 if (ref->ref)
3268 tree base = ref->ref;
3269 tree innermost_dropped_array_ref = NULL_TREE;
3270 if (handled_component_p (base))
3272 tree saved_lhs0 = NULL_TREE;
3273 if (handled_component_p (lhs))
3275 saved_lhs0 = TREE_OPERAND (lhs, 0);
3276 TREE_OPERAND (lhs, 0) = integer_zero_node;
3280 /* Just compare the outermost handled component, if
3281 they are equal we have found a possible common
3282 base. */
3283 tree saved_base0 = TREE_OPERAND (base, 0);
3284 TREE_OPERAND (base, 0) = integer_zero_node;
3285 bool res = operand_equal_p (lhs, base, 0);
3286 TREE_OPERAND (base, 0) = saved_base0;
3287 if (res)
3288 break;
3289 /* Remember if we drop an array-ref that we need to
3290 double-check not being at struct end. */
3291 if (TREE_CODE (base) == ARRAY_REF
3292 || TREE_CODE (base) == ARRAY_RANGE_REF)
3293 innermost_dropped_array_ref = base;
3294 /* Otherwise drop handled components of the access. */
3295 base = saved_base0;
3297 while (handled_component_p (base));
3298 if (saved_lhs0)
3299 TREE_OPERAND (lhs, 0) = saved_lhs0;
3301 /* Finally check if the lhs has the same address and size as the
3302 base candidate of the access. Watch out if we have dropped
3303 an array-ref that was at struct end, this means ref->ref may
3304 be outside of the TYPE_SIZE of its base. */
3305 if ((! innermost_dropped_array_ref
3306 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3307 && (lhs == base
3308 || (((TYPE_SIZE (TREE_TYPE (lhs))
3309 == TYPE_SIZE (TREE_TYPE (base)))
3310 || (TYPE_SIZE (TREE_TYPE (lhs))
3311 && TYPE_SIZE (TREE_TYPE (base))
3312 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3313 TYPE_SIZE (TREE_TYPE (base)),
3314 0)))
3315 && operand_equal_p (lhs, base,
3316 OEP_ADDRESS_OF
3317 | OEP_MATCH_SIDE_EFFECTS))))
3318 return true;
3321 /* Now look for non-literal equal bases with the restriction of
3322 handling constant offset and size. */
3323 /* For a must-alias check we need to be able to constrain
3324 the access properly. */
3325 if (!ref->max_size_known_p ())
3326 return false;
3327 poly_int64 size, offset, max_size, ref_offset = ref->offset;
3328 bool reverse;
3329 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3330 &reverse);
3331 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3332 so base == ref->base does not always hold. */
3333 if (base != ref->base)
3335 /* Try using points-to info. */
3336 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3337 ref->offset, ref->size, ref->max_size))
3338 return true;
3340 /* If both base and ref->base are MEM_REFs, only compare the
3341 first operand, and if the second operand isn't equal constant,
3342 try to add the offsets into offset and ref_offset. */
3343 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3344 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3346 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3347 TREE_OPERAND (ref->base, 1)))
3349 poly_offset_int off1 = mem_ref_offset (base);
3350 off1 <<= LOG2_BITS_PER_UNIT;
3351 off1 += offset;
3352 poly_offset_int off2 = mem_ref_offset (ref->base);
3353 off2 <<= LOG2_BITS_PER_UNIT;
3354 off2 += ref_offset;
3355 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3356 size = -1;
3359 else
3360 size = -1;
3362 /* For a must-alias check we need to be able to constrain
3363 the access properly. */
3364 if (known_eq (size, max_size)
3365 && known_subrange_p (ref_offset, ref->max_size, offset, size))
3366 return true;
3369 if (is_gimple_call (stmt))
3371 tree callee = gimple_call_fndecl (stmt);
3372 if (callee != NULL_TREE
3373 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3374 switch (DECL_FUNCTION_CODE (callee))
3376 case BUILT_IN_FREE:
3378 tree ptr = gimple_call_arg (stmt, 0);
3379 tree base = ao_ref_base (ref);
3380 if (base && TREE_CODE (base) == MEM_REF
3381 && TREE_OPERAND (base, 0) == ptr)
3382 return true;
3383 break;
3386 case BUILT_IN_MEMCPY:
3387 case BUILT_IN_MEMPCPY:
3388 case BUILT_IN_MEMMOVE:
3389 case BUILT_IN_MEMSET:
3390 case BUILT_IN_MEMCPY_CHK:
3391 case BUILT_IN_MEMPCPY_CHK:
3392 case BUILT_IN_MEMMOVE_CHK:
3393 case BUILT_IN_MEMSET_CHK:
3394 case BUILT_IN_STRNCPY:
3395 case BUILT_IN_STPNCPY:
3396 case BUILT_IN_CALLOC:
3398 /* For a must-alias check we need to be able to constrain
3399 the access properly. */
3400 if (!ref->max_size_known_p ())
3401 return false;
3402 tree dest;
3403 tree len;
3405 /* In execution order a calloc call will never kill
3406 anything. However, DSE will (ab)use this interface
3407 to ask if a calloc call writes the same memory locations
3408 as a later assignment, memset, etc. So handle calloc
3409 in the expected way. */
3410 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3412 tree arg0 = gimple_call_arg (stmt, 0);
3413 tree arg1 = gimple_call_arg (stmt, 1);
3414 if (TREE_CODE (arg0) != INTEGER_CST
3415 || TREE_CODE (arg1) != INTEGER_CST)
3416 return false;
3418 dest = gimple_call_lhs (stmt);
3419 if (!dest)
3420 return false;
3421 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3423 else
3425 dest = gimple_call_arg (stmt, 0);
3426 len = gimple_call_arg (stmt, 2);
3428 if (!poly_int_tree_p (len))
3429 return false;
3430 tree rbase = ref->base;
3431 poly_offset_int roffset = ref->offset;
3432 ao_ref dref;
3433 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3434 tree base = ao_ref_base (&dref);
3435 poly_offset_int offset = dref.offset;
3436 if (!base || !known_size_p (dref.size))
3437 return false;
3438 if (TREE_CODE (base) == MEM_REF)
3440 if (TREE_CODE (rbase) != MEM_REF)
3441 return false;
3442 // Compare pointers.
3443 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
3444 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
3445 base = TREE_OPERAND (base, 0);
3446 rbase = TREE_OPERAND (rbase, 0);
3448 if (base == rbase
3449 && known_subrange_p (roffset, ref->max_size, offset,
3450 wi::to_poly_offset (len)
3451 << LOG2_BITS_PER_UNIT))
3452 return true;
3453 break;
3456 case BUILT_IN_VA_END:
3458 tree ptr = gimple_call_arg (stmt, 0);
3459 if (TREE_CODE (ptr) == ADDR_EXPR)
3461 tree base = ao_ref_base (ref);
3462 if (TREE_OPERAND (ptr, 0) == base)
3463 return true;
3465 break;
3468 default:;
3471 return false;
3474 bool
3475 stmt_kills_ref_p (gimple *stmt, tree ref)
3477 ao_ref r;
3478 ao_ref_init (&r, ref);
3479 return stmt_kills_ref_p (stmt, &r);
3483 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3484 TARGET or a statement clobbering the memory reference REF in which
3485 case false is returned. The walk starts with VUSE, one argument of PHI. */
3487 static bool
3488 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3489 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3490 bitmap *visited, bool abort_on_visited,
3491 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3492 translate_flags disambiguate_only,
3493 void *data)
3495 basic_block bb = gimple_bb (phi);
3497 if (!*visited)
3498 *visited = BITMAP_ALLOC (NULL);
3500 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3502 /* Walk until we hit the target. */
3503 while (vuse != target)
3505 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3506 /* If we are searching for the target VUSE by walking up to
3507 TARGET_BB dominating the original PHI we are finished once
3508 we reach a default def or a definition in a block dominating
3509 that block. Update TARGET and return. */
3510 if (!target
3511 && (gimple_nop_p (def_stmt)
3512 || dominated_by_p (CDI_DOMINATORS,
3513 target_bb, gimple_bb (def_stmt))))
3515 target = vuse;
3516 return true;
3519 /* Recurse for PHI nodes. */
3520 if (gimple_code (def_stmt) == GIMPLE_PHI)
3522 /* An already visited PHI node ends the walk successfully. */
3523 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3524 return !abort_on_visited;
3525 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3526 visited, abort_on_visited,
3527 translate, data, disambiguate_only);
3528 if (!vuse)
3529 return false;
3530 continue;
3532 else if (gimple_nop_p (def_stmt))
3533 return false;
3534 else
3536 /* A clobbering statement or the end of the IL ends it failing. */
3537 if ((int)limit <= 0)
3538 return false;
3539 --limit;
3540 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3542 translate_flags tf = disambiguate_only;
3543 if (translate
3544 && (*translate) (ref, vuse, data, &tf) == NULL)
3546 else
3547 return false;
3550 /* If we reach a new basic-block see if we already skipped it
3551 in a previous walk that ended successfully. */
3552 if (gimple_bb (def_stmt) != bb)
3554 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3555 return !abort_on_visited;
3556 bb = gimple_bb (def_stmt);
3558 vuse = gimple_vuse (def_stmt);
3560 return true;
3564 /* Starting from a PHI node for the virtual operand of the memory reference
3565 REF find a continuation virtual operand that allows to continue walking
3566 statements dominating PHI skipping only statements that cannot possibly
3567 clobber REF. Decrements LIMIT for each alias disambiguation done
3568 and aborts the walk, returning NULL_TREE if it reaches zero.
3569 Returns NULL_TREE if no suitable virtual operand can be found. */
3571 tree
3572 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3573 unsigned int &limit, bitmap *visited,
3574 bool abort_on_visited,
3575 void *(*translate)(ao_ref *, tree, void *,
3576 translate_flags *),
3577 void *data,
3578 translate_flags disambiguate_only)
3580 unsigned nargs = gimple_phi_num_args (phi);
3582 /* Through a single-argument PHI we can simply look through. */
3583 if (nargs == 1)
3584 return PHI_ARG_DEF (phi, 0);
3586 /* For two or more arguments try to pairwise skip non-aliasing code
3587 until we hit the phi argument definition that dominates the other one. */
3588 basic_block phi_bb = gimple_bb (phi);
3589 tree arg0, arg1;
3590 unsigned i;
3592 /* Find a candidate for the virtual operand which definition
3593 dominates those of all others. */
3594 /* First look if any of the args themselves satisfy this. */
3595 for (i = 0; i < nargs; ++i)
3597 arg0 = PHI_ARG_DEF (phi, i);
3598 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3599 break;
3600 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3601 if (def_bb != phi_bb
3602 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3603 break;
3604 arg0 = NULL_TREE;
3606 /* If not, look if we can reach such candidate by walking defs
3607 until we hit the immediate dominator. maybe_skip_until will
3608 do that for us. */
3609 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3611 /* Then check against the (to be) found candidate. */
3612 for (i = 0; i < nargs; ++i)
3614 arg1 = PHI_ARG_DEF (phi, i);
3615 if (arg1 == arg0)
3617 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3618 limit, visited,
3619 abort_on_visited,
3620 translate,
3621 /* Do not valueize when walking over
3622 backedges. */
3623 dominated_by_p
3624 (CDI_DOMINATORS,
3625 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3626 phi_bb)
3627 ? TR_DISAMBIGUATE
3628 : disambiguate_only, data))
3629 return NULL_TREE;
3632 return arg0;
3635 /* Based on the memory reference REF and its virtual use VUSE call
3636 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3637 itself. That is, for each virtual use for which its defining statement
3638 does not clobber REF.
3640 WALKER is called with REF, the current virtual use and DATA. If
3641 WALKER returns non-NULL the walk stops and its result is returned.
3642 At the end of a non-successful walk NULL is returned.
3644 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3645 use which definition is a statement that may clobber REF and DATA.
3646 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3647 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3648 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3649 to adjust REF and *DATA to make that valid.
3651 VALUEIZE if non-NULL is called with the next VUSE that is considered
3652 and return value is substituted for that. This can be used to
3653 implement optimistic value-numbering for example. Note that the
3654 VUSE argument is assumed to be valueized already.
3656 LIMIT specifies the number of alias queries we are allowed to do,
3657 the walk stops when it reaches zero and NULL is returned. LIMIT
3658 is decremented by the number of alias queries (plus adjustments
3659 done by the callbacks) upon return.
3661 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3663 void *
3664 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3665 void *(*walker)(ao_ref *, tree, void *),
3666 void *(*translate)(ao_ref *, tree, void *,
3667 translate_flags *),
3668 tree (*valueize)(tree),
3669 unsigned &limit, void *data)
3671 bitmap visited = NULL;
3672 void *res;
3673 bool translated = false;
3675 timevar_push (TV_ALIAS_STMT_WALK);
3679 gimple *def_stmt;
3681 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3682 res = (*walker) (ref, vuse, data);
3683 /* Abort walk. */
3684 if (res == (void *)-1)
3686 res = NULL;
3687 break;
3689 /* Lookup succeeded. */
3690 else if (res != NULL)
3691 break;
3693 if (valueize)
3695 vuse = valueize (vuse);
3696 if (!vuse)
3698 res = NULL;
3699 break;
3702 def_stmt = SSA_NAME_DEF_STMT (vuse);
3703 if (gimple_nop_p (def_stmt))
3704 break;
3705 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3706 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3707 &visited, translated, translate, data);
3708 else
3710 if ((int)limit <= 0)
3712 res = NULL;
3713 break;
3715 --limit;
3716 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3718 if (!translate)
3719 break;
3720 translate_flags disambiguate_only = TR_TRANSLATE;
3721 res = (*translate) (ref, vuse, data, &disambiguate_only);
3722 /* Failed lookup and translation. */
3723 if (res == (void *)-1)
3725 res = NULL;
3726 break;
3728 /* Lookup succeeded. */
3729 else if (res != NULL)
3730 break;
3731 /* Translation succeeded, continue walking. */
3732 translated = translated || disambiguate_only == TR_TRANSLATE;
3734 vuse = gimple_vuse (def_stmt);
3737 while (vuse);
3739 if (visited)
3740 BITMAP_FREE (visited);
3742 timevar_pop (TV_ALIAS_STMT_WALK);
3744 return res;
3748 /* Based on the memory reference REF call WALKER for each vdef whose
3749 defining statement may clobber REF, starting with VDEF. If REF
3750 is NULL_TREE, each defining statement is visited.
3752 WALKER is called with REF, the current vdef and DATA. If WALKER
3753 returns true the walk is stopped, otherwise it continues.
3755 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3756 The pointer may be NULL and then we do not track this information.
3758 At PHI nodes walk_aliased_vdefs forks into one walk for each
3759 PHI argument (but only one walk continues at merge points), the
3760 return value is true if any of the walks was successful.
3762 The function returns the number of statements walked or -1 if
3763 LIMIT stmts were walked and the walk was aborted at this point.
3764 If LIMIT is zero the walk is not aborted. */
3766 static int
3767 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3768 bool (*walker)(ao_ref *, tree, void *), void *data,
3769 bitmap *visited, unsigned int cnt,
3770 bool *function_entry_reached, unsigned limit)
3774 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3776 if (*visited
3777 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3778 return cnt;
3780 if (gimple_nop_p (def_stmt))
3782 if (function_entry_reached)
3783 *function_entry_reached = true;
3784 return cnt;
3786 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3788 unsigned i;
3789 if (!*visited)
3790 *visited = BITMAP_ALLOC (NULL);
3791 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3793 int res = walk_aliased_vdefs_1 (ref,
3794 gimple_phi_arg_def (def_stmt, i),
3795 walker, data, visited, cnt,
3796 function_entry_reached, limit);
3797 if (res == -1)
3798 return -1;
3799 cnt = res;
3801 return cnt;
3804 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3805 cnt++;
3806 if (cnt == limit)
3807 return -1;
3808 if ((!ref
3809 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3810 && (*walker) (ref, vdef, data))
3811 return cnt;
3813 vdef = gimple_vuse (def_stmt);
3815 while (1);
3819 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3820 bool (*walker)(ao_ref *, tree, void *), void *data,
3821 bitmap *visited,
3822 bool *function_entry_reached, unsigned int limit)
3824 bitmap local_visited = NULL;
3825 int ret;
3827 timevar_push (TV_ALIAS_STMT_WALK);
3829 if (function_entry_reached)
3830 *function_entry_reached = false;
3832 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3833 visited ? visited : &local_visited, 0,
3834 function_entry_reached, limit);
3835 if (local_visited)
3836 BITMAP_FREE (local_visited);
3838 timevar_pop (TV_ALIAS_STMT_WALK);
3840 return ret;
3843 /* Verify validity of the fnspec string.
3844 See attr-fnspec.h for details. */
3846 void
3847 attr_fnspec::verify ()
3849 bool err = false;
3850 if (!len)
3851 return;
3853 /* Check return value specifier. */
3854 if (len < return_desc_size)
3855 err = true;
3856 else if ((len - return_desc_size) % arg_desc_size)
3857 err = true;
3858 else if ((str[0] < '1' || str[0] > '4')
3859 && str[0] != '.' && str[0] != 'm')
3860 err = true;
3862 switch (str[1])
3864 case ' ':
3865 case 'p':
3866 case 'P':
3867 case 'c':
3868 case 'C':
3869 break;
3870 default:
3871 err = true;
3873 if (err)
3874 internal_error ("invalid fn spec attribute \"%s\"", str);
3876 /* Now check all parameters. */
3877 for (unsigned int i = 0; arg_specified_p (i); i++)
3879 unsigned int idx = arg_idx (i);
3880 switch (str[idx])
3882 case 'x':
3883 case 'X':
3884 case 'r':
3885 case 'R':
3886 case 'o':
3887 case 'O':
3888 case 'w':
3889 case 'W':
3890 case '.':
3891 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
3892 || str[idx + 1] == 't')
3894 if (str[idx] != 'r' && str[idx] != 'R'
3895 && str[idx] != 'w' && str[idx] != 'W'
3896 && str[idx] != 'o' && str[idx] != 'O')
3897 err = true;
3898 if (str[idx + 1] != 't'
3899 /* Size specified is scalar, so it should be described
3900 by ". " if specified at all. */
3901 && (arg_specified_p (str[idx + 1] - '1')
3902 && str[arg_idx (str[idx + 1] - '1')] != '.'))
3903 err = true;
3905 else if (str[idx + 1] != ' ')
3906 err = true;
3907 break;
3908 default:
3909 if (str[idx] < '1' || str[idx] > '9')
3910 err = true;
3912 if (err)
3913 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
3917 /* Return ture if TYPE1 and TYPE2 will always give the same answer
3918 when compared wit hother types using same_type_for_tbaa_p. */
3920 static bool
3921 types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
3922 bool lto_streaming_safe)
3924 /* We use same_type_for_tbaa_p to match types in the access path.
3925 This check is overly conservative. */
3926 type1 = TYPE_MAIN_VARIANT (type1);
3927 type2 = TYPE_MAIN_VARIANT (type2);
3929 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
3930 != TYPE_STRUCTURAL_EQUALITY_P (type2))
3931 return false;
3932 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
3933 return true;
3935 if (lto_streaming_safe)
3936 return type1 == type2;
3937 else
3938 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
3941 /* Compare REF1 and REF2 and return flags specifying their differences.
3942 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
3943 types that are going to be recomputed.
3944 If TBAA is true also compare TBAA metadata. */
3947 ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
3948 bool lto_streaming_safe,
3949 bool tbaa)
3951 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
3952 return SEMANTICS;
3953 tree base1 = ao_ref_base (ref1);
3954 tree base2 = ao_ref_base (ref2);
3956 if (!known_eq (ref1->offset, ref2->offset)
3957 || !known_eq (ref1->size, ref2->size)
3958 || !known_eq (ref1->max_size, ref2->max_size))
3959 return SEMANTICS;
3961 /* For variable accesses we need to compare actual paths
3962 to check that both refs are accessing same address and the access size. */
3963 if (!known_eq (ref1->size, ref1->max_size))
3965 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
3966 TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
3967 return SEMANTICS;
3968 tree r1 = ref1->ref;
3969 tree r2 = ref2->ref;
3971 /* Handle toplevel COMPONENT_REFs of bitfields.
3972 Those are special since they are not allowed in
3973 ADDR_EXPR. */
3974 if (TREE_CODE (r1) == COMPONENT_REF
3975 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
3977 if (TREE_CODE (r2) != COMPONENT_REF
3978 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
3979 return SEMANTICS;
3980 tree field1 = TREE_OPERAND (r1, 1);
3981 tree field2 = TREE_OPERAND (r2, 1);
3982 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
3983 DECL_FIELD_OFFSET (field2), 0)
3984 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
3985 DECL_FIELD_BIT_OFFSET (field2), 0)
3986 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
3987 || !types_compatible_p (TREE_TYPE (r1),
3988 TREE_TYPE (r2)))
3989 return SEMANTICS;
3990 r1 = TREE_OPERAND (r1, 0);
3991 r2 = TREE_OPERAND (r2, 0);
3993 else if (TREE_CODE (r2) == COMPONENT_REF
3994 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
3995 return SEMANTICS;
3997 /* Similarly for bit field refs. */
3998 if (TREE_CODE (r1) == BIT_FIELD_REF)
4000 if (TREE_CODE (r2) != BIT_FIELD_REF
4001 || !operand_equal_p (TREE_OPERAND (r1, 1),
4002 TREE_OPERAND (r2, 1), 0)
4003 || !operand_equal_p (TREE_OPERAND (r1, 2),
4004 TREE_OPERAND (r2, 2), 0)
4005 || !types_compatible_p (TREE_TYPE (r1),
4006 TREE_TYPE (r2)))
4007 return SEMANTICS;
4008 r1 = TREE_OPERAND (r1, 0);
4009 r2 = TREE_OPERAND (r2, 0);
4011 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4012 return SEMANTICS;
4014 /* Now we can compare the address of actual memory access. */
4015 if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4016 return SEMANTICS;
4018 /* For constant accesses we get more matches by comparing offset only. */
4019 else if (!operand_equal_p (base1, base2,
4020 OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4021 return SEMANTICS;
4023 /* We can't simply use get_object_alignment_1 on the full
4024 reference as for accesses with variable indexes this reports
4025 too conservative alignment. */
4026 unsigned int align1, align2;
4027 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4028 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4029 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4030 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4031 TYPE_ALIGN but still returns false. This seem to contradict
4032 its description. So compare even if alignment is unknown. */
4033 if (known1 != known2
4034 || (bitpos1 != bitpos2 || align1 != align2))
4035 return SEMANTICS;
4037 /* Now we know that accesses are semantically same. */
4038 int flags = 0;
4040 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4041 tree rbase1 = ref1->ref;
4042 if (rbase1)
4043 while (handled_component_p (rbase1))
4044 rbase1 = TREE_OPERAND (rbase1, 0);
4045 tree rbase2 = ref2->ref;
4046 while (handled_component_p (rbase2))
4047 rbase2 = TREE_OPERAND (rbase2, 0);
4049 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4050 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4051 Otherwise we need to match bases and cliques. */
4052 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4053 && MR_DEPENDENCE_CLIQUE (rbase1))
4054 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4055 && MR_DEPENDENCE_CLIQUE (rbase2)))
4056 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4057 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4058 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4059 flags |= DEPENDENCE_CLIQUE;
4061 if (!tbaa)
4062 return flags;
4064 /* Alias sets are not stable across LTO sreaming; be conservative here
4065 and compare types the alias sets are ultimately based on. */
4066 if (lto_streaming_safe)
4068 tree t1 = ao_ref_alias_ptr_type (ref1);
4069 tree t2 = ao_ref_alias_ptr_type (ref2);
4070 if (!alias_ptr_types_compatible_p (t1, t2))
4071 flags |= REF_ALIAS_SET;
4073 t1 = ao_ref_base_alias_ptr_type (ref1);
4074 t2 = ao_ref_base_alias_ptr_type (ref2);
4075 if (!alias_ptr_types_compatible_p (t1, t2))
4076 flags |= BASE_ALIAS_SET;
4078 else
4080 if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4081 flags |= REF_ALIAS_SET;
4082 if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4083 flags |= BASE_ALIAS_SET;
4086 /* Access path is used only on non-view-converted references. */
4087 bool view_converted = view_converted_memref_p (rbase1);
4088 if (view_converted_memref_p (rbase2) != view_converted)
4089 return flags | ACCESS_PATH;
4090 else if (view_converted)
4091 return flags;
4094 /* Find start of access paths and look for trailing arrays. */
4095 tree c1 = ref1->ref, c2 = ref2->ref;
4096 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4097 int nskipped1 = 0, nskipped2 = 0;
4098 int i = 0;
4100 for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4102 if (component_ref_to_zero_sized_trailing_array_p (p1))
4103 end_struct_ref1 = p1;
4104 if (ends_tbaa_access_path_p (p1))
4105 c1 = p1, nskipped1 = i;
4106 i++;
4108 for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4110 if (component_ref_to_zero_sized_trailing_array_p (p2))
4111 end_struct_ref2 = p2;
4112 if (ends_tbaa_access_path_p (p2))
4113 c2 = p2, nskipped1 = i;
4114 i++;
4117 /* For variable accesses we can not rely on offset match bellow.
4118 We know that paths are struturally same, so only check that
4119 starts of TBAA paths did not diverge. */
4120 if (!known_eq (ref1->size, ref1->max_size)
4121 && nskipped1 != nskipped2)
4122 return flags | ACCESS_PATH;
4124 /* Information about trailing refs is used by
4125 aliasing_component_refs_p that is applied only if paths
4126 has handled components.. */
4127 if (!handled_component_p (c1) && !handled_component_p (c2))
4129 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4130 return flags | ACCESS_PATH;
4131 if (end_struct_ref1
4132 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4133 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4134 return flags | ACCESS_PATH;
4136 /* Now compare all handled components of the access path.
4137 We have three oracles that cares about access paths:
4138 - aliasing_component_refs_p
4139 - nonoverlapping_refs_since_match_p
4140 - nonoverlapping_component_refs_p
4141 We need to match things these oracles compare.
4143 It is only necessary to check types for compatibility
4144 and offsets. Rest of what oracles compares are actual
4145 addresses. Those are already known to be same:
4146 - for constant accesses we check offsets
4147 - for variable accesses we already matched
4148 the path lexically with operand_equal_p. */
4149 while (true)
4151 bool comp1 = handled_component_p (c1);
4152 bool comp2 = handled_component_p (c2);
4154 if (comp1 != comp2)
4155 return flags | ACCESS_PATH;
4156 if (!comp1)
4157 break;
4159 if (TREE_CODE (c1) != TREE_CODE (c2))
4160 return flags | ACCESS_PATH;
4162 /* aliasing_component_refs_p attempts to find type match within
4163 the paths. For that reason both types needs to be equal
4164 with respect to same_type_for_tbaa_p. */
4165 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4166 TREE_TYPE (c2),
4167 lto_streaming_safe))
4168 return flags | ACCESS_PATH;
4169 if (component_ref_to_zero_sized_trailing_array_p (c1)
4170 != component_ref_to_zero_sized_trailing_array_p (c2))
4171 return flags | ACCESS_PATH;
4173 /* aliasing_matching_component_refs_p compares
4174 offsets within the path. Other properties are ignored.
4175 Do not bother to verify offsets in variable accesses. Here we
4176 already compared them by operand_equal_p so they are
4177 structurally same. */
4178 if (!known_eq (ref1->size, ref1->max_size))
4180 poly_int64 offadj1, sztmc1, msztmc1;
4181 bool reverse1;
4182 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4183 poly_int64 offadj2, sztmc2, msztmc2;
4184 bool reverse2;
4185 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4186 if (!known_eq (offadj1, offadj2))
4187 return flags | ACCESS_PATH;
4189 c1 = TREE_OPERAND (c1, 0);
4190 c2 = TREE_OPERAND (c2, 0);
4192 /* Finally test the access type. */
4193 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4194 TREE_TYPE (c2),
4195 lto_streaming_safe))
4196 return flags | ACCESS_PATH;
4197 return flags;
4200 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4201 and canonical types. */
4202 void
4203 ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4204 inchash::hash &hstate)
4206 tree base = ao_ref_base (ref);
4207 tree tbase = base;
4209 if (!known_eq (ref->size, ref->max_size))
4211 tree r = ref->ref;
4212 if (TREE_CODE (r) == COMPONENT_REF
4213 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4215 tree field = TREE_OPERAND (r, 1);
4216 hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4217 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4218 hash_operand (DECL_SIZE (field), hstate, 0);
4219 r = TREE_OPERAND (r, 0);
4221 if (TREE_CODE (r) == BIT_FIELD_REF)
4223 hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4224 hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4225 r = TREE_OPERAND (r, 0);
4227 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4228 hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4230 else
4232 hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4233 hstate.add_poly_int (ref->offset);
4234 hstate.add_poly_int (ref->size);
4235 hstate.add_poly_int (ref->max_size);
4237 if (!lto_streaming_safe && tbaa)
4239 hstate.add_int (ao_ref_alias_set (ref));
4240 hstate.add_int (ao_ref_base_alias_set (ref));