Daily bump.
[official-gcc.git] / gcc / tree-ssa-alias.c
blob88fd7821c5e55bd63988ac1df98e108d0400c244
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
44 #include "errors.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
49 #include "builtins.h"
51 /* Broad overview of how alias analysis on gimple works:
53 Statements clobbering or using memory are linked through the
54 virtual operand factored use-def chain. The virtual operand
55 is unique per function, its symbol is accessible via gimple_vop (cfun).
56 Virtual operands are used for efficiently walking memory statements
57 in the gimple IL and are useful for things like value-numbering as
58 a generation count for memory references.
60 SSA_NAME pointers may have associated points-to information
61 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
62 points-to information is (re-)computed by the TODO_rebuild_alias
63 pass manager todo. Points-to information is also used for more
64 precise tracking of call-clobbered and call-used variables and
65 related disambiguations.
67 This file contains functions for disambiguating memory references,
68 the so called alias-oracle and tools for walking of the gimple IL.
70 The main alias-oracle entry-points are
72 bool stmt_may_clobber_ref_p (gimple *, tree)
74 This function queries if a statement may invalidate (parts of)
75 the memory designated by the reference tree argument.
77 bool ref_maybe_used_by_stmt_p (gimple *, tree)
79 This function queries if a statement may need (parts of) the
80 memory designated by the reference tree argument.
82 There are variants of these functions that only handle the call
83 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
84 Note that these do not disambiguate against a possible call lhs.
86 bool refs_may_alias_p (tree, tree)
88 This function tries to disambiguate two reference trees.
90 bool ptr_deref_may_alias_global_p (tree)
92 This function queries if dereferencing a pointer variable may
93 alias global memory.
95 More low-level disambiguators are available and documented in
96 this file. Low-level disambiguators dealing with points-to
97 information are in tree-ssa-structalias.c. */
99 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
100 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
102 /* Query statistics for the different low-level disambiguators.
103 A high-level query may trigger multiple of them. */
105 static struct {
106 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
107 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
109 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
111 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
112 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
113 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
114 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
119 unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
120 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
121 unsigned HOST_WIDE_INT modref_use_may_alias;
122 unsigned HOST_WIDE_INT modref_use_no_alias;
123 unsigned HOST_WIDE_INT modref_clobber_may_alias;
124 unsigned HOST_WIDE_INT modref_clobber_no_alias;
125 unsigned HOST_WIDE_INT modref_kill_no;
126 unsigned HOST_WIDE_INT modref_kill_yes;
127 unsigned HOST_WIDE_INT modref_tests;
128 unsigned HOST_WIDE_INT modref_baseptr_tests;
129 } alias_stats;
131 void
132 dump_alias_stats (FILE *s)
134 fprintf (s, "\nAlias oracle query stats:\n");
135 fprintf (s, " refs_may_alias_p: "
136 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
137 HOST_WIDE_INT_PRINT_DEC" queries\n",
138 alias_stats.refs_may_alias_p_no_alias,
139 alias_stats.refs_may_alias_p_no_alias
140 + alias_stats.refs_may_alias_p_may_alias);
141 fprintf (s, " ref_maybe_used_by_call_p: "
142 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
143 HOST_WIDE_INT_PRINT_DEC" queries\n",
144 alias_stats.ref_maybe_used_by_call_p_no_alias,
145 alias_stats.refs_may_alias_p_no_alias
146 + alias_stats.ref_maybe_used_by_call_p_may_alias);
147 fprintf (s, " call_may_clobber_ref_p: "
148 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
149 HOST_WIDE_INT_PRINT_DEC" queries\n",
150 alias_stats.call_may_clobber_ref_p_no_alias,
151 alias_stats.call_may_clobber_ref_p_no_alias
152 + alias_stats.call_may_clobber_ref_p_may_alias);
153 fprintf (s, " stmt_kills_ref_p: "
154 HOST_WIDE_INT_PRINT_DEC" kills, "
155 HOST_WIDE_INT_PRINT_DEC" queries\n",
156 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
157 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
158 + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
159 fprintf (s, " nonoverlapping_component_refs_p: "
160 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
161 HOST_WIDE_INT_PRINT_DEC" queries\n",
162 alias_stats.nonoverlapping_component_refs_p_no_alias,
163 alias_stats.nonoverlapping_component_refs_p_no_alias
164 + alias_stats.nonoverlapping_component_refs_p_may_alias);
165 fprintf (s, " nonoverlapping_refs_since_match_p: "
166 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
167 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
168 HOST_WIDE_INT_PRINT_DEC" queries\n",
169 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
170 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
171 alias_stats.nonoverlapping_refs_since_match_p_no_alias
172 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
173 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
174 fprintf (s, " aliasing_component_refs_p: "
175 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
176 HOST_WIDE_INT_PRINT_DEC" queries\n",
177 alias_stats.aliasing_component_refs_p_no_alias,
178 alias_stats.aliasing_component_refs_p_no_alias
179 + alias_stats.aliasing_component_refs_p_may_alias);
180 dump_alias_stats_in_alias_c (s);
181 fprintf (s, "\nModref stats:\n");
182 fprintf (s, " modref kill: "
183 HOST_WIDE_INT_PRINT_DEC" kills, "
184 HOST_WIDE_INT_PRINT_DEC" queries\n",
185 alias_stats.modref_kill_yes,
186 alias_stats.modref_kill_yes
187 + alias_stats.modref_kill_no);
188 fprintf (s, " modref use: "
189 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
190 HOST_WIDE_INT_PRINT_DEC" queries\n",
191 alias_stats.modref_use_no_alias,
192 alias_stats.modref_use_no_alias
193 + alias_stats.modref_use_may_alias);
194 fprintf (s, " modref clobber: "
195 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
196 HOST_WIDE_INT_PRINT_DEC" queries\n"
197 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
198 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
199 alias_stats.modref_clobber_no_alias,
200 alias_stats.modref_clobber_no_alias
201 + alias_stats.modref_clobber_may_alias,
202 alias_stats.modref_tests,
203 ((double)alias_stats.modref_tests)
204 / (alias_stats.modref_clobber_no_alias
205 + alias_stats.modref_clobber_may_alias),
206 alias_stats.modref_baseptr_tests,
207 ((double)alias_stats.modref_baseptr_tests)
208 / (alias_stats.modref_clobber_no_alias
209 + alias_stats.modref_clobber_may_alias));
213 /* Return true, if dereferencing PTR may alias with a global variable. */
215 bool
216 ptr_deref_may_alias_global_p (tree ptr)
218 struct ptr_info_def *pi;
220 /* If we end up with a pointer constant here that may point
221 to global memory. */
222 if (TREE_CODE (ptr) != SSA_NAME)
223 return true;
225 pi = SSA_NAME_PTR_INFO (ptr);
227 /* If we do not have points-to information for this variable,
228 we have to punt. */
229 if (!pi)
230 return true;
232 /* ??? This does not use TBAA to prune globals ptr may not access. */
233 return pt_solution_includes_global (&pi->pt);
236 /* Return true if dereferencing PTR may alias DECL.
237 The caller is responsible for applying TBAA to see if PTR
238 may access DECL at all. */
240 static bool
241 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
243 struct ptr_info_def *pi;
245 /* Conversions are irrelevant for points-to information and
246 data-dependence analysis can feed us those. */
247 STRIP_NOPS (ptr);
249 /* Anything we do not explicilty handle aliases. */
250 if ((TREE_CODE (ptr) != SSA_NAME
251 && TREE_CODE (ptr) != ADDR_EXPR
252 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
253 || !POINTER_TYPE_P (TREE_TYPE (ptr))
254 || (!VAR_P (decl)
255 && TREE_CODE (decl) != PARM_DECL
256 && TREE_CODE (decl) != RESULT_DECL))
257 return true;
259 /* Disregard pointer offsetting. */
260 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
264 ptr = TREE_OPERAND (ptr, 0);
266 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
267 return ptr_deref_may_alias_decl_p (ptr, decl);
270 /* ADDR_EXPR pointers either just offset another pointer or directly
271 specify the pointed-to set. */
272 if (TREE_CODE (ptr) == ADDR_EXPR)
274 tree base = get_base_address (TREE_OPERAND (ptr, 0));
275 if (base
276 && (TREE_CODE (base) == MEM_REF
277 || TREE_CODE (base) == TARGET_MEM_REF))
278 ptr = TREE_OPERAND (base, 0);
279 else if (base
280 && DECL_P (base))
281 return compare_base_decls (base, decl) != 0;
282 else if (base
283 && CONSTANT_CLASS_P (base))
284 return false;
285 else
286 return true;
289 /* Non-aliased variables cannot be pointed to. */
290 if (!may_be_aliased (decl))
291 return false;
293 /* If we do not have useful points-to information for this pointer
294 we cannot disambiguate anything else. */
295 pi = SSA_NAME_PTR_INFO (ptr);
296 if (!pi)
297 return true;
299 return pt_solution_includes (&pi->pt, decl);
302 /* Return true if dereferenced PTR1 and PTR2 may alias.
303 The caller is responsible for applying TBAA to see if accesses
304 through PTR1 and PTR2 may conflict at all. */
306 bool
307 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
309 struct ptr_info_def *pi1, *pi2;
311 /* Conversions are irrelevant for points-to information and
312 data-dependence analysis can feed us those. */
313 STRIP_NOPS (ptr1);
314 STRIP_NOPS (ptr2);
316 /* Disregard pointer offsetting. */
317 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
321 ptr1 = TREE_OPERAND (ptr1, 0);
323 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
324 return ptr_derefs_may_alias_p (ptr1, ptr2);
326 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
330 ptr2 = TREE_OPERAND (ptr2, 0);
332 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
333 return ptr_derefs_may_alias_p (ptr1, ptr2);
336 /* ADDR_EXPR pointers either just offset another pointer or directly
337 specify the pointed-to set. */
338 if (TREE_CODE (ptr1) == ADDR_EXPR)
340 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
341 if (base
342 && (TREE_CODE (base) == MEM_REF
343 || TREE_CODE (base) == TARGET_MEM_REF))
344 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
345 else if (base
346 && DECL_P (base))
347 return ptr_deref_may_alias_decl_p (ptr2, base);
348 else
349 return true;
351 if (TREE_CODE (ptr2) == ADDR_EXPR)
353 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
354 if (base
355 && (TREE_CODE (base) == MEM_REF
356 || TREE_CODE (base) == TARGET_MEM_REF))
357 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
358 else if (base
359 && DECL_P (base))
360 return ptr_deref_may_alias_decl_p (ptr1, base);
361 else
362 return true;
365 /* From here we require SSA name pointers. Anything else aliases. */
366 if (TREE_CODE (ptr1) != SSA_NAME
367 || TREE_CODE (ptr2) != SSA_NAME
368 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
369 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
370 return true;
372 /* We may end up with two empty points-to solutions for two same pointers.
373 In this case we still want to say both pointers alias, so shortcut
374 that here. */
375 if (ptr1 == ptr2)
376 return true;
378 /* If we do not have useful points-to information for either pointer
379 we cannot disambiguate anything else. */
380 pi1 = SSA_NAME_PTR_INFO (ptr1);
381 pi2 = SSA_NAME_PTR_INFO (ptr2);
382 if (!pi1 || !pi2)
383 return true;
385 /* ??? This does not use TBAA to prune decls from the intersection
386 that not both pointers may access. */
387 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
390 /* Return true if dereferencing PTR may alias *REF.
391 The caller is responsible for applying TBAA to see if PTR
392 may access *REF at all. */
394 static bool
395 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
397 tree base = ao_ref_base (ref);
399 if (TREE_CODE (base) == MEM_REF
400 || TREE_CODE (base) == TARGET_MEM_REF)
401 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
402 else if (DECL_P (base))
403 return ptr_deref_may_alias_decl_p (ptr, base);
405 return true;
408 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
410 bool
411 ptrs_compare_unequal (tree ptr1, tree ptr2)
413 /* First resolve the pointers down to a SSA name pointer base or
414 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
415 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
416 or STRING_CSTs which needs points-to adjustments to track them
417 in the points-to sets. */
418 tree obj1 = NULL_TREE;
419 tree obj2 = NULL_TREE;
420 if (TREE_CODE (ptr1) == ADDR_EXPR)
422 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
423 if (! tem)
424 return false;
425 if (VAR_P (tem)
426 || TREE_CODE (tem) == PARM_DECL
427 || TREE_CODE (tem) == RESULT_DECL)
428 obj1 = tem;
429 else if (TREE_CODE (tem) == MEM_REF)
430 ptr1 = TREE_OPERAND (tem, 0);
432 if (TREE_CODE (ptr2) == ADDR_EXPR)
434 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
435 if (! tem)
436 return false;
437 if (VAR_P (tem)
438 || TREE_CODE (tem) == PARM_DECL
439 || TREE_CODE (tem) == RESULT_DECL)
440 obj2 = tem;
441 else if (TREE_CODE (tem) == MEM_REF)
442 ptr2 = TREE_OPERAND (tem, 0);
445 /* Canonicalize ptr vs. object. */
446 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
448 std::swap (ptr1, ptr2);
449 std::swap (obj1, obj2);
452 if (obj1 && obj2)
453 /* Other code handles this correctly, no need to duplicate it here. */;
454 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
457 /* We may not use restrict to optimize pointer comparisons.
458 See PR71062. So we have to assume that restrict-pointed-to
459 may be in fact obj1. */
460 if (!pi
461 || pi->pt.vars_contains_restrict
462 || pi->pt.vars_contains_interposable)
463 return false;
464 if (VAR_P (obj1)
465 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
467 varpool_node *node = varpool_node::get (obj1);
468 /* If obj1 may bind to NULL give up (see below). */
469 if (! node
470 || ! node->nonzero_address ()
471 || ! decl_binds_to_current_def_p (obj1))
472 return false;
474 return !pt_solution_includes (&pi->pt, obj1);
477 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
478 but those require pt.null to be conservatively correct. */
480 return false;
483 /* Returns whether reference REF to BASE may refer to global memory. */
485 static bool
486 ref_may_alias_global_p_1 (tree base)
488 if (DECL_P (base))
489 return is_global_var (base);
490 else if (TREE_CODE (base) == MEM_REF
491 || TREE_CODE (base) == TARGET_MEM_REF)
492 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
493 return true;
496 bool
497 ref_may_alias_global_p (ao_ref *ref)
499 tree base = ao_ref_base (ref);
500 return ref_may_alias_global_p_1 (base);
503 bool
504 ref_may_alias_global_p (tree ref)
506 tree base = get_base_address (ref);
507 return ref_may_alias_global_p_1 (base);
510 /* Return true whether STMT may clobber global memory. */
512 bool
513 stmt_may_clobber_global_p (gimple *stmt)
515 tree lhs;
517 if (!gimple_vdef (stmt))
518 return false;
520 /* ??? We can ask the oracle whether an artificial pointer
521 dereference with a pointer with points-to information covering
522 all global memory (what about non-address taken memory?) maybe
523 clobbered by this call. As there is at the moment no convenient
524 way of doing that without generating garbage do some manual
525 checking instead.
526 ??? We could make a NULL ao_ref argument to the various
527 predicates special, meaning any global memory. */
529 switch (gimple_code (stmt))
531 case GIMPLE_ASSIGN:
532 lhs = gimple_assign_lhs (stmt);
533 return (TREE_CODE (lhs) != SSA_NAME
534 && ref_may_alias_global_p (lhs));
535 case GIMPLE_CALL:
536 return true;
537 default:
538 return true;
543 /* Dump alias information on FILE. */
545 void
546 dump_alias_info (FILE *file)
548 unsigned i;
549 tree ptr;
550 const char *funcname
551 = lang_hooks.decl_printable_name (current_function_decl, 2);
552 tree var;
554 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
556 fprintf (file, "Aliased symbols\n\n");
558 FOR_EACH_LOCAL_DECL (cfun, i, var)
560 if (may_be_aliased (var))
561 dump_variable (file, var);
564 fprintf (file, "\nCall clobber information\n");
566 fprintf (file, "\nESCAPED");
567 dump_points_to_solution (file, &cfun->gimple_df->escaped);
569 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
571 FOR_EACH_SSA_NAME (i, ptr, cfun)
573 struct ptr_info_def *pi;
575 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
576 || SSA_NAME_IN_FREE_LIST (ptr))
577 continue;
579 pi = SSA_NAME_PTR_INFO (ptr);
580 if (pi)
581 dump_points_to_info_for (file, ptr);
584 fprintf (file, "\n");
588 /* Dump alias information on stderr. */
590 DEBUG_FUNCTION void
591 debug_alias_info (void)
593 dump_alias_info (stderr);
597 /* Dump the points-to set *PT into FILE. */
599 void
600 dump_points_to_solution (FILE *file, struct pt_solution *pt)
602 if (pt->anything)
603 fprintf (file, ", points-to anything");
605 if (pt->nonlocal)
606 fprintf (file, ", points-to non-local");
608 if (pt->escaped)
609 fprintf (file, ", points-to escaped");
611 if (pt->ipa_escaped)
612 fprintf (file, ", points-to unit escaped");
614 if (pt->null)
615 fprintf (file, ", points-to NULL");
617 if (pt->vars)
619 fprintf (file, ", points-to vars: ");
620 dump_decl_set (file, pt->vars);
621 if (pt->vars_contains_nonlocal
622 || pt->vars_contains_escaped
623 || pt->vars_contains_escaped_heap
624 || pt->vars_contains_restrict)
626 const char *comma = "";
627 fprintf (file, " (");
628 if (pt->vars_contains_nonlocal)
630 fprintf (file, "nonlocal");
631 comma = ", ";
633 if (pt->vars_contains_escaped)
635 fprintf (file, "%sescaped", comma);
636 comma = ", ";
638 if (pt->vars_contains_escaped_heap)
640 fprintf (file, "%sescaped heap", comma);
641 comma = ", ";
643 if (pt->vars_contains_restrict)
645 fprintf (file, "%srestrict", comma);
646 comma = ", ";
648 if (pt->vars_contains_interposable)
649 fprintf (file, "%sinterposable", comma);
650 fprintf (file, ")");
656 /* Unified dump function for pt_solution. */
658 DEBUG_FUNCTION void
659 debug (pt_solution &ref)
661 dump_points_to_solution (stderr, &ref);
664 DEBUG_FUNCTION void
665 debug (pt_solution *ptr)
667 if (ptr)
668 debug (*ptr);
669 else
670 fprintf (stderr, "<nil>\n");
674 /* Dump points-to information for SSA_NAME PTR into FILE. */
676 void
677 dump_points_to_info_for (FILE *file, tree ptr)
679 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
681 print_generic_expr (file, ptr, dump_flags);
683 if (pi)
684 dump_points_to_solution (file, &pi->pt);
685 else
686 fprintf (file, ", points-to anything");
688 fprintf (file, "\n");
692 /* Dump points-to information for VAR into stderr. */
694 DEBUG_FUNCTION void
695 debug_points_to_info_for (tree var)
697 dump_points_to_info_for (stderr, var);
701 /* Initializes the alias-oracle reference representation *R from REF. */
703 void
704 ao_ref_init (ao_ref *r, tree ref)
706 r->ref = ref;
707 r->base = NULL_TREE;
708 r->offset = 0;
709 r->size = -1;
710 r->max_size = -1;
711 r->ref_alias_set = -1;
712 r->base_alias_set = -1;
713 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
716 /* Returns the base object of the memory reference *REF. */
718 tree
719 ao_ref_base (ao_ref *ref)
721 bool reverse;
723 if (ref->base)
724 return ref->base;
725 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
726 &ref->max_size, &reverse);
727 return ref->base;
730 /* Returns the base object alias set of the memory reference *REF. */
732 alias_set_type
733 ao_ref_base_alias_set (ao_ref *ref)
735 tree base_ref;
736 if (ref->base_alias_set != -1)
737 return ref->base_alias_set;
738 if (!ref->ref)
739 return 0;
740 base_ref = ref->ref;
741 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
742 base_ref = TREE_OPERAND (base_ref, 0);
743 while (handled_component_p (base_ref))
744 base_ref = TREE_OPERAND (base_ref, 0);
745 ref->base_alias_set = get_alias_set (base_ref);
746 return ref->base_alias_set;
749 /* Returns the reference alias set of the memory reference *REF. */
751 alias_set_type
752 ao_ref_alias_set (ao_ref *ref)
754 if (ref->ref_alias_set != -1)
755 return ref->ref_alias_set;
756 if (!ref->ref)
757 return 0;
758 ref->ref_alias_set = get_alias_set (ref->ref);
759 return ref->ref_alias_set;
762 /* Returns a type satisfying
763 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
765 tree
766 ao_ref_base_alias_ptr_type (ao_ref *ref)
768 tree base_ref;
770 if (!ref->ref)
771 return NULL_TREE;
772 base_ref = ref->ref;
773 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
774 base_ref = TREE_OPERAND (base_ref, 0);
775 while (handled_component_p (base_ref))
776 base_ref = TREE_OPERAND (base_ref, 0);
777 tree ret = reference_alias_ptr_type (base_ref);
778 return ret;
781 /* Returns a type satisfying
782 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
784 tree
785 ao_ref_alias_ptr_type (ao_ref *ref)
787 if (!ref->ref)
788 return NULL_TREE;
789 tree ret = reference_alias_ptr_type (ref->ref);
790 return ret;
794 /* Init an alias-oracle reference representation from a gimple pointer
795 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
796 that RANGE_KNOWN is set.
798 The access is assumed to be only to or after of the pointer target adjusted
799 by the offset, not before it (even in the case RANGE_KNOWN is false). */
801 void
802 ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
803 bool range_known,
804 poly_int64 offset,
805 poly_int64 size,
806 poly_int64 max_size)
808 poly_int64 t, extra_offset = 0;
810 ref->ref = NULL_TREE;
811 if (TREE_CODE (ptr) == SSA_NAME)
813 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
814 if (gimple_assign_single_p (stmt)
815 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
816 ptr = gimple_assign_rhs1 (stmt);
817 else if (is_gimple_assign (stmt)
818 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
819 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
821 ptr = gimple_assign_rhs1 (stmt);
822 extra_offset *= BITS_PER_UNIT;
826 if (TREE_CODE (ptr) == ADDR_EXPR)
828 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
829 if (ref->base)
830 ref->offset = BITS_PER_UNIT * t;
831 else
833 range_known = false;
834 ref->offset = 0;
835 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
838 else
840 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
841 ref->base = build2 (MEM_REF, char_type_node,
842 ptr, null_pointer_node);
843 ref->offset = 0;
845 ref->offset += extra_offset + offset;
846 if (range_known)
848 ref->max_size = max_size;
849 ref->size = size;
851 else
852 ref->max_size = ref->size = -1;
853 ref->ref_alias_set = 0;
854 ref->base_alias_set = 0;
855 ref->volatile_p = false;
858 /* Init an alias-oracle reference representation from a gimple pointer
859 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
860 size is assumed to be unknown. The access is assumed to be only
861 to or after of the pointer target, not before it. */
863 void
864 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
866 poly_int64 size_hwi;
867 if (size
868 && poly_int_tree_p (size, &size_hwi)
869 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
871 size_hwi = size_hwi * BITS_PER_UNIT;
872 ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
874 else
875 ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
878 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
879 Return -1 if S1 < S2
880 Return 1 if S1 > S2
881 Return 0 if equal or incomparable. */
883 static int
884 compare_sizes (tree s1, tree s2)
886 if (!s1 || !s2)
887 return 0;
889 poly_uint64 size1;
890 poly_uint64 size2;
892 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
893 return 0;
894 if (known_lt (size1, size2))
895 return -1;
896 if (known_lt (size2, size1))
897 return 1;
898 return 0;
901 /* Compare TYPE1 and TYPE2 by its size.
902 Return -1 if size of TYPE1 < size of TYPE2
903 Return 1 if size of TYPE1 > size of TYPE2
904 Return 0 if types are of equal sizes or we can not compare them. */
906 static int
907 compare_type_sizes (tree type1, tree type2)
909 /* Be conservative for arrays and vectors. We want to support partial
910 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
911 while (TREE_CODE (type1) == ARRAY_TYPE
912 || TREE_CODE (type1) == VECTOR_TYPE)
913 type1 = TREE_TYPE (type1);
914 while (TREE_CODE (type2) == ARRAY_TYPE
915 || TREE_CODE (type2) == VECTOR_TYPE)
916 type2 = TREE_TYPE (type2);
917 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
920 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
921 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
922 decide. */
924 static inline int
925 same_type_for_tbaa (tree type1, tree type2)
927 type1 = TYPE_MAIN_VARIANT (type1);
928 type2 = TYPE_MAIN_VARIANT (type2);
930 /* Handle the most common case first. */
931 if (type1 == type2)
932 return 1;
934 /* If we would have to do structural comparison bail out. */
935 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
936 || TYPE_STRUCTURAL_EQUALITY_P (type2))
937 return -1;
939 /* Compare the canonical types. */
940 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
941 return 1;
943 /* ??? Array types are not properly unified in all cases as we have
944 spurious changes in the index types for example. Removing this
945 causes all sorts of problems with the Fortran frontend. */
946 if (TREE_CODE (type1) == ARRAY_TYPE
947 && TREE_CODE (type2) == ARRAY_TYPE)
948 return -1;
950 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
951 object of one of its constrained subtypes, e.g. when a function with an
952 unconstrained parameter passed by reference is called on an object and
953 inlined. But, even in the case of a fixed size, type and subtypes are
954 not equivalent enough as to share the same TYPE_CANONICAL, since this
955 would mean that conversions between them are useless, whereas they are
956 not (e.g. type and subtypes can have different modes). So, in the end,
957 they are only guaranteed to have the same alias set. */
958 alias_set_type set1 = get_alias_set (type1);
959 alias_set_type set2 = get_alias_set (type2);
960 if (set1 == set2)
961 return -1;
963 /* Pointers to void are considered compatible with all other pointers,
964 so for two pointers see what the alias set resolution thinks. */
965 if (POINTER_TYPE_P (type1)
966 && POINTER_TYPE_P (type2)
967 && alias_sets_conflict_p (set1, set2))
968 return -1;
970 /* The types are known to be not equal. */
971 return 0;
974 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
975 components on it). */
977 static bool
978 type_has_components_p (tree type)
980 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
981 || TREE_CODE (type) == COMPLEX_TYPE;
984 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
985 respectively are either pointing to same address or are completely
986 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
987 just partly overlap.
989 Try to disambiguate using the access path starting from the match
990 and return false if there is no conflict.
992 Helper for aliasing_component_refs_p. */
994 static bool
995 aliasing_matching_component_refs_p (tree match1, tree ref1,
996 poly_int64 offset1, poly_int64 max_size1,
997 tree match2, tree ref2,
998 poly_int64 offset2, poly_int64 max_size2,
999 bool partial_overlap)
1001 poly_int64 offadj, sztmp, msztmp;
1002 bool reverse;
1004 if (!partial_overlap)
1006 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1007 offset2 -= offadj;
1008 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1009 offset1 -= offadj;
1010 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1012 ++alias_stats.aliasing_component_refs_p_no_alias;
1013 return false;
1017 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1018 partial_overlap);
1019 if (cmp == 1
1020 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1022 ++alias_stats.aliasing_component_refs_p_no_alias;
1023 return false;
1025 ++alias_stats.aliasing_component_refs_p_may_alias;
1026 return true;
1029 /* Return true if REF is reference to zero sized trailing array. I.e.
1030 struct foo {int bar; int array[0];} *fooptr;
1031 fooptr->array. */
1033 static bool
1034 component_ref_to_zero_sized_trailing_array_p (tree ref)
1036 return (TREE_CODE (ref) == COMPONENT_REF
1037 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1038 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1039 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1040 && array_at_struct_end_p (ref));
1043 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1044 aliasing_component_refs_p.
1046 Walk access path REF2 and try to find type matching TYPE1
1047 (which is a start of possibly aliasing access path REF1).
1048 If match is found, try to disambiguate.
1050 Return 0 for sucessful disambiguation.
1051 Return 1 if match was found but disambiguation failed
1052 Return -1 if there is no match.
1053 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1054 in access patch REF2 and -1 if we are not sure. */
1056 static int
1057 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1058 poly_int64 offset1, poly_int64 max_size1,
1059 tree end_struct_ref1,
1060 tree ref2, tree base2,
1061 poly_int64 offset2, poly_int64 max_size2,
1062 bool *maybe_match)
1064 tree ref = ref2;
1065 int same_p = 0;
1067 while (true)
1069 /* We walk from inner type to the outer types. If type we see is
1070 already too large to be part of type1, terminate the search. */
1071 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1073 if (cmp < 0
1074 && (!end_struct_ref1
1075 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1076 TREE_TYPE (ref)) < 0))
1077 break;
1078 /* If types may be of same size, see if we can decide about their
1079 equality. */
1080 if (cmp == 0)
1082 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1083 if (same_p == 1)
1084 break;
1085 /* In case we can't decide whether types are same try to
1086 continue looking for the exact match.
1087 Remember however that we possibly saw a match
1088 to bypass the access path continuations tests we do later. */
1089 if (same_p == -1)
1090 *maybe_match = true;
1092 if (!handled_component_p (ref))
1093 break;
1094 ref = TREE_OPERAND (ref, 0);
1096 if (same_p == 1)
1098 bool partial_overlap = false;
1100 /* We assume that arrays can overlap by multiple of their elements
1101 size as tested in gcc.dg/torture/alias-2.c.
1102 This partial overlap happen only when both arrays are bases of
1103 the access and not contained within another component ref.
1104 To be safe we also assume partial overlap for VLAs. */
1105 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1106 && (!TYPE_SIZE (TREE_TYPE (base1))
1107 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1108 || ref == base2))
1110 /* Setting maybe_match to true triggers
1111 nonoverlapping_component_refs_p test later that still may do
1112 useful disambiguation. */
1113 *maybe_match = true;
1114 partial_overlap = true;
1116 return aliasing_matching_component_refs_p (base1, ref1,
1117 offset1, max_size1,
1118 ref, ref2,
1119 offset2, max_size2,
1120 partial_overlap);
1122 return -1;
1125 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1126 Return true if they can be composed to single access path
1127 base1...ref1...base2...ref2.
1129 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1130 a trailing array access after REF1 in the non-TBAA part of the access.
1131 REF1_ALIAS_SET is the alias set of REF1.
1133 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1134 a trailing array access in the TBAA part of access path2.
1135 BASE2_ALIAS_SET is the alias set of base2. */
1137 bool
1138 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1139 alias_set_type ref1_alias_set,
1140 tree base_type2, tree end_struct_ref2,
1141 alias_set_type base2_alias_set)
1143 /* Access path can not continue past types with no components. */
1144 if (!type_has_components_p (ref_type1))
1145 return false;
1147 /* If first access path ends by too small type to hold base of
1148 the second access path, typically paths can not continue.
1150 Punt if end_struct_past_end1 is true. We want to support arbitrary
1151 type puning past first COMPONENT_REF to union because redundant store
1152 elimination depends on this, see PR92152. For this reason we can not
1153 check size of the reference because types may partially overlap. */
1154 if (!end_struct_past_end1)
1156 if (compare_type_sizes (ref_type1, base_type2) < 0)
1157 return false;
1158 /* If the path2 contains trailing array access we can strenghten the check
1159 to verify that also the size of element of the trailing array fits.
1160 In fact we could check for offset + type_size, but we do not track
1161 offsets and this is quite side case. */
1162 if (end_struct_ref2
1163 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1164 return false;
1166 return (base2_alias_set == ref1_alias_set
1167 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1170 /* Determine if the two component references REF1 and REF2 which are
1171 based on access types TYPE1 and TYPE2 and of which at least one is based
1172 on an indirect reference may alias.
1173 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1174 are the respective alias sets. */
1176 static bool
1177 aliasing_component_refs_p (tree ref1,
1178 alias_set_type ref1_alias_set,
1179 alias_set_type base1_alias_set,
1180 poly_int64 offset1, poly_int64 max_size1,
1181 tree ref2,
1182 alias_set_type ref2_alias_set,
1183 alias_set_type base2_alias_set,
1184 poly_int64 offset2, poly_int64 max_size2)
1186 /* If one reference is a component references through pointers try to find a
1187 common base and apply offset based disambiguation. This handles
1188 for example
1189 struct A { int i; int j; } *q;
1190 struct B { struct A a; int k; } *p;
1191 disambiguating q->i and p->a.j. */
1192 tree base1, base2;
1193 tree type1, type2;
1194 bool maybe_match = false;
1195 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1196 bool end_struct_past_end1 = false;
1197 bool end_struct_past_end2 = false;
1199 /* Choose bases and base types to search for.
1200 The access path is as follows:
1201 base....end_of_tbaa_ref...actual_ref
1202 At one place in the access path may be a reference to zero sized or
1203 trailing array.
1205 We generally discard the segment after end_of_tbaa_ref however
1206 we need to be careful in case it contains zero sized or trailing array.
1207 These may happen after reference to union and in this case we need to
1208 not disambiguate type puning scenarios.
1210 We set:
1211 base1 to point to base
1213 ref1 to point to end_of_tbaa_ref
1215 end_struct_ref1 to point the trailing reference (if it exists
1216 in range base....end_of_tbaa_ref
1218 end_struct_past_end1 is true if this trailing reference occurs in
1219 end_of_tbaa_ref...actual_ref. */
1220 base1 = ref1;
1221 while (handled_component_p (base1))
1223 /* Generally access paths are monotous in the size of object. The
1224 exception are trailing arrays of structures. I.e.
1225 struct a {int array[0];};
1227 struct a {int array1[0]; int array[];};
1228 Such struct has size 0 but accesses to a.array may have non-zero size.
1229 In this case the size of TREE_TYPE (base1) is smaller than
1230 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1232 Because we compare sizes of arrays just by sizes of their elements,
1233 we only need to care about zero sized array fields here. */
1234 if (component_ref_to_zero_sized_trailing_array_p (base1))
1236 gcc_checking_assert (!end_struct_ref1);
1237 end_struct_ref1 = base1;
1239 if (ends_tbaa_access_path_p (base1))
1241 ref1 = TREE_OPERAND (base1, 0);
1242 if (end_struct_ref1)
1244 end_struct_past_end1 = true;
1245 end_struct_ref1 = NULL;
1248 base1 = TREE_OPERAND (base1, 0);
1250 type1 = TREE_TYPE (base1);
1251 base2 = ref2;
1252 while (handled_component_p (base2))
1254 if (component_ref_to_zero_sized_trailing_array_p (base2))
1256 gcc_checking_assert (!end_struct_ref2);
1257 end_struct_ref2 = base2;
1259 if (ends_tbaa_access_path_p (base2))
1261 ref2 = TREE_OPERAND (base2, 0);
1262 if (end_struct_ref2)
1264 end_struct_past_end2 = true;
1265 end_struct_ref2 = NULL;
1268 base2 = TREE_OPERAND (base2, 0);
1270 type2 = TREE_TYPE (base2);
1272 /* Now search for the type1 in the access path of ref2. This
1273 would be a common base for doing offset based disambiguation on.
1274 This however only makes sense if type2 is big enough to hold type1. */
1275 int cmp_outer = compare_type_sizes (type2, type1);
1277 /* If type2 is big enough to contain type1 walk its access path.
1278 We also need to care of arrays at the end of structs that may extend
1279 beyond the end of structure. If this occurs in the TBAA part of the
1280 access path, we need to consider the increased type as well. */
1281 if (cmp_outer >= 0
1282 || (end_struct_ref2
1283 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1285 int res = aliasing_component_refs_walk (ref1, type1, base1,
1286 offset1, max_size1,
1287 end_struct_ref1,
1288 ref2, base2, offset2, max_size2,
1289 &maybe_match);
1290 if (res != -1)
1291 return res;
1294 /* If we didn't find a common base, try the other way around. */
1295 if (cmp_outer <= 0
1296 || (end_struct_ref1
1297 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1299 int res = aliasing_component_refs_walk (ref2, type2, base2,
1300 offset2, max_size2,
1301 end_struct_ref2,
1302 ref1, base1, offset1, max_size1,
1303 &maybe_match);
1304 if (res != -1)
1305 return res;
1308 /* In the following code we make an assumption that the types in access
1309 paths do not overlap and thus accesses alias only if one path can be
1310 continuation of another. If we was not able to decide about equivalence,
1311 we need to give up. */
1312 if (maybe_match)
1314 if (!nonoverlapping_component_refs_p (ref1, ref2))
1316 ++alias_stats.aliasing_component_refs_p_may_alias;
1317 return true;
1319 ++alias_stats.aliasing_component_refs_p_no_alias;
1320 return false;
1323 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1324 ref1_alias_set,
1325 type2, end_struct_ref2,
1326 base2_alias_set)
1327 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1328 ref2_alias_set,
1329 type1, end_struct_ref1,
1330 base1_alias_set))
1332 ++alias_stats.aliasing_component_refs_p_may_alias;
1333 return true;
1335 ++alias_stats.aliasing_component_refs_p_no_alias;
1336 return false;
1339 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1340 that bases of both component refs are either equivalent or nonoverlapping.
1341 We do not assume that the containers of FIELD1 and FIELD2 are of the
1342 same type or size.
1344 Return 0 in case the base address of component_refs are same then
1345 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1346 may not be of same type or size.
1348 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1350 Return -1 otherwise.
1352 Main difference between 0 and -1 is to let
1353 nonoverlapping_component_refs_since_match_p discover the semantically
1354 equivalent part of the access path.
1356 Note that this function is used even with -fno-strict-aliasing
1357 and makes use of no TBAA assumptions. */
1359 static int
1360 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1362 /* If both fields are of the same type, we could save hard work of
1363 comparing offsets. */
1364 tree type1 = DECL_CONTEXT (field1);
1365 tree type2 = DECL_CONTEXT (field2);
1367 if (TREE_CODE (type1) == RECORD_TYPE
1368 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1369 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1370 if (TREE_CODE (type2) == RECORD_TYPE
1371 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1372 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1374 /* ??? Bitfields can overlap at RTL level so punt on them.
1375 FIXME: RTL expansion should be fixed by adjusting the access path
1376 when producing MEM_ATTRs for MEMs which are wider than
1377 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1378 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1379 return -1;
1381 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1382 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1383 return field1 != field2;
1385 /* In common case the offsets and bit offsets will be the same.
1386 However if frontends do not agree on the alignment, they may be
1387 different even if they actually represent same address.
1388 Try the common case first and if that fails calcualte the
1389 actual bit offset. */
1390 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1391 DECL_FIELD_OFFSET (field2))
1392 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1393 DECL_FIELD_BIT_OFFSET (field2)))
1394 return 0;
1396 /* Note that it may be possible to use component_ref_field_offset
1397 which would provide offsets as trees. However constructing and folding
1398 trees is expensive and does not seem to be worth the compile time
1399 cost. */
1401 poly_uint64 offset1, offset2;
1402 poly_uint64 bit_offset1, bit_offset2;
1404 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1405 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1406 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1407 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1409 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1410 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1412 if (known_eq (offset1, offset2))
1413 return 0;
1415 poly_uint64 size1, size2;
1417 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1418 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1419 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1420 return 1;
1422 /* Resort to slower overlap checking by looking for matching types in
1423 the middle of access path. */
1424 return -1;
1427 /* Return low bound of array. Do not produce new trees
1428 and thus do not care about particular type of integer constant
1429 and placeholder exprs. */
1431 static tree
1432 cheap_array_ref_low_bound (tree ref)
1434 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1436 /* Avoid expensive array_ref_low_bound.
1437 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1438 type or it is zero. */
1439 if (TREE_OPERAND (ref, 2))
1440 return TREE_OPERAND (ref, 2);
1441 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1442 return TYPE_MIN_VALUE (domain_type);
1443 else
1444 return integer_zero_node;
1447 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1448 completely disjoint.
1450 Return 1 if the refs are non-overlapping.
1451 Return 0 if they are possibly overlapping but if so the overlap again
1452 starts on the same address.
1453 Return -1 otherwise. */
1456 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1458 tree index1 = TREE_OPERAND (ref1, 1);
1459 tree index2 = TREE_OPERAND (ref2, 1);
1460 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1461 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1463 /* Handle zero offsets first: we do not need to match type size in this
1464 case. */
1465 if (operand_equal_p (index1, low_bound1, 0)
1466 && operand_equal_p (index2, low_bound2, 0))
1467 return 0;
1469 /* If type sizes are different, give up.
1471 Avoid expensive array_ref_element_size.
1472 If operand 3 is present it denotes size in the alignmnet units.
1473 Otherwise size is TYPE_SIZE of the element type.
1474 Handle only common cases where types are of the same "kind". */
1475 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1476 return -1;
1478 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1479 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1481 if (TREE_OPERAND (ref1, 3))
1483 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1484 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1485 TREE_OPERAND (ref2, 3), 0))
1486 return -1;
1488 else
1490 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1491 TYPE_SIZE_UNIT (elmt_type2), 0))
1492 return -1;
1495 /* Since we know that type sizes are the same, there is no need to return
1496 -1 after this point. Partial overlap can not be introduced. */
1498 /* We may need to fold trees in this case.
1499 TODO: Handle integer constant case at least. */
1500 if (!operand_equal_p (low_bound1, low_bound2, 0))
1501 return 0;
1503 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1505 if (tree_int_cst_equal (index1, index2))
1506 return 0;
1507 return 1;
1509 /* TODO: We can use VRP to further disambiguate here. */
1510 return 0;
1513 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1514 MATCH2 either point to the same address or are disjoint.
1515 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1516 respectively or NULL in the case we established equivalence of bases.
1517 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1518 overlap by exact multiply of their element size.
1520 This test works by matching the initial segment of the access path
1521 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1522 match was determined without use of TBAA oracle.
1524 Return 1 if we can determine that component references REF1 and REF2,
1525 that are within a common DECL, cannot overlap.
1527 Return 0 if paths are same and thus there is nothing to disambiguate more
1528 (i.e. there is must alias assuming there is must alias between MATCH1 and
1529 MATCH2)
1531 Return -1 if we can not determine 0 or 1 - this happens when we met
1532 non-matching types was met in the path.
1533 In this case it may make sense to continue by other disambiguation
1534 oracles. */
1536 static int
1537 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1538 tree match2, tree ref2,
1539 bool partial_overlap)
1541 int ntbaa1 = 0, ntbaa2 = 0;
1542 /* Early return if there are no references to match, we do not need
1543 to walk the access paths.
1545 Do not consider this as may-alias for stats - it is more useful
1546 to have information how many disambiguations happened provided that
1547 the query was meaningful. */
1549 if (match1 == ref1 || !handled_component_p (ref1)
1550 || match2 == ref2 || !handled_component_p (ref2))
1551 return -1;
1553 auto_vec<tree, 16> component_refs1;
1554 auto_vec<tree, 16> component_refs2;
1556 /* Create the stack of handled components for REF1. */
1557 while (handled_component_p (ref1) && ref1 != match1)
1559 /* We use TBAA only to re-synchronize after mismatched refs. So we
1560 do not need to truncate access path after TBAA part ends. */
1561 if (ends_tbaa_access_path_p (ref1))
1562 ntbaa1 = 0;
1563 else
1564 ntbaa1++;
1565 component_refs1.safe_push (ref1);
1566 ref1 = TREE_OPERAND (ref1, 0);
1569 /* Create the stack of handled components for REF2. */
1570 while (handled_component_p (ref2) && ref2 != match2)
1572 if (ends_tbaa_access_path_p (ref2))
1573 ntbaa2 = 0;
1574 else
1575 ntbaa2++;
1576 component_refs2.safe_push (ref2);
1577 ref2 = TREE_OPERAND (ref2, 0);
1580 if (!flag_strict_aliasing)
1582 ntbaa1 = 0;
1583 ntbaa2 = 0;
1586 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1587 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1589 /* If only one of access path starts with MEM_REF check that offset is 0
1590 so the addresses stays the same after stripping it.
1591 TODO: In this case we may walk the other access path until we get same
1592 offset.
1594 If both starts with MEM_REF, offset has to be same. */
1595 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1596 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1597 || (mem_ref1 && mem_ref2
1598 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1599 TREE_OPERAND (ref2, 1))))
1601 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1602 return -1;
1605 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1606 to handle them here at all. */
1607 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1608 && TREE_CODE (ref2) != TARGET_MEM_REF);
1610 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1611 rank. This is sufficient because we start from the same DECL and you
1612 cannot reference several fields at a time with COMPONENT_REFs (unlike
1613 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1614 of them to access a sub-component, unless you're in a union, in which
1615 case the return value will precisely be false. */
1616 while (true)
1618 /* Track if we seen unmatched ref with non-zero offset. In this case
1619 we must look for partial overlaps. */
1620 bool seen_unmatched_ref_p = false;
1622 /* First match ARRAY_REFs an try to disambiguate. */
1623 if (!component_refs1.is_empty ()
1624 && !component_refs2.is_empty ())
1626 unsigned int narray_refs1=0, narray_refs2=0;
1628 /* We generally assume that both access paths starts by same sequence
1629 of refs. However if number of array refs is not in sync, try
1630 to recover and pop elts until number match. This helps the case
1631 where one access path starts by array and other by element. */
1632 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1633 narray_refs1++)
1634 if (TREE_CODE (component_refs1 [component_refs1.length()
1635 - 1 - narray_refs1]) != ARRAY_REF)
1636 break;
1638 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1639 narray_refs2++)
1640 if (TREE_CODE (component_refs2 [component_refs2.length()
1641 - 1 - narray_refs2]) != ARRAY_REF)
1642 break;
1643 for (; narray_refs1 > narray_refs2; narray_refs1--)
1645 ref1 = component_refs1.pop ();
1646 ntbaa1--;
1648 /* If index is non-zero we need to check whether the reference
1649 does not break the main invariant that bases are either
1650 disjoint or equal. Consider the example:
1652 unsigned char out[][1];
1653 out[1]="a";
1654 out[i][0];
1656 Here bases out and out are same, but after removing the
1657 [i] index, this invariant no longer holds, because
1658 out[i] points to the middle of array out.
1660 TODO: If size of type of the skipped reference is an integer
1661 multiply of the size of type of the other reference this
1662 invariant can be verified, but even then it is not completely
1663 safe with !flag_strict_aliasing if the other reference contains
1664 unbounded array accesses.
1665 See */
1667 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1668 cheap_array_ref_low_bound (ref1), 0))
1669 return 0;
1671 for (; narray_refs2 > narray_refs1; narray_refs2--)
1673 ref2 = component_refs2.pop ();
1674 ntbaa2--;
1675 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1676 cheap_array_ref_low_bound (ref2), 0))
1677 return 0;
1679 /* Try to disambiguate matched arrays. */
1680 for (unsigned int i = 0; i < narray_refs1; i++)
1682 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1683 component_refs2.pop ());
1684 ntbaa1--;
1685 ntbaa2--;
1686 if (cmp == 1 && !partial_overlap)
1688 ++alias_stats
1689 .nonoverlapping_refs_since_match_p_no_alias;
1690 return 1;
1692 if (cmp == -1)
1694 seen_unmatched_ref_p = true;
1695 /* We can not maintain the invariant that bases are either
1696 same or completely disjoint. However we can still recover
1697 from type based alias analysis if we reach references to
1698 same sizes. We do not attempt to match array sizes, so
1699 just finish array walking and look for component refs. */
1700 if (ntbaa1 < 0 || ntbaa2 < 0)
1702 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1703 return -1;
1705 for (i++; i < narray_refs1; i++)
1707 component_refs1.pop ();
1708 component_refs2.pop ();
1709 ntbaa1--;
1710 ntbaa2--;
1712 break;
1714 partial_overlap = false;
1718 /* Next look for component_refs. */
1721 if (component_refs1.is_empty ())
1723 ++alias_stats
1724 .nonoverlapping_refs_since_match_p_must_overlap;
1725 return 0;
1727 ref1 = component_refs1.pop ();
1728 ntbaa1--;
1729 if (TREE_CODE (ref1) != COMPONENT_REF)
1731 seen_unmatched_ref_p = true;
1732 if (ntbaa1 < 0 || ntbaa2 < 0)
1734 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1735 return -1;
1739 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1743 if (component_refs2.is_empty ())
1745 ++alias_stats
1746 .nonoverlapping_refs_since_match_p_must_overlap;
1747 return 0;
1749 ref2 = component_refs2.pop ();
1750 ntbaa2--;
1751 if (TREE_CODE (ref2) != COMPONENT_REF)
1753 if (ntbaa1 < 0 || ntbaa2 < 0)
1755 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1756 return -1;
1758 seen_unmatched_ref_p = true;
1761 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1763 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1764 earlier. */
1765 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1766 && TREE_CODE (ref2) == COMPONENT_REF);
1768 tree field1 = TREE_OPERAND (ref1, 1);
1769 tree field2 = TREE_OPERAND (ref2, 1);
1771 /* ??? We cannot simply use the type of operand #0 of the refs here
1772 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1773 for common blocks instead of using unions like everyone else. */
1774 tree type1 = DECL_CONTEXT (field1);
1775 tree type2 = DECL_CONTEXT (field2);
1777 partial_overlap = false;
1779 /* If we skipped array refs on type of different sizes, we can
1780 no longer be sure that there are not partial overlaps. */
1781 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1782 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1784 ++alias_stats
1785 .nonoverlapping_refs_since_match_p_may_alias;
1786 return -1;
1789 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1790 if (cmp == -1)
1792 ++alias_stats
1793 .nonoverlapping_refs_since_match_p_may_alias;
1794 return -1;
1796 else if (cmp == 1)
1798 ++alias_stats
1799 .nonoverlapping_refs_since_match_p_no_alias;
1800 return 1;
1805 /* Return TYPE_UID which can be used to match record types we consider
1806 same for TBAA purposes. */
1808 static inline int
1809 ncr_type_uid (const_tree field)
1811 /* ??? We cannot simply use the type of operand #0 of the refs here
1812 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1813 for common blocks instead of using unions like everyone else. */
1814 tree type = DECL_FIELD_CONTEXT (field);
1815 /* With LTO types considered same_type_for_tbaa_p
1816 from different translation unit may not have same
1817 main variant. They however have same TYPE_CANONICAL. */
1818 if (TYPE_CANONICAL (type))
1819 return TYPE_UID (TYPE_CANONICAL (type));
1820 return TYPE_UID (type);
1823 /* qsort compare function to sort FIELD_DECLs after their
1824 DECL_FIELD_CONTEXT TYPE_UID. */
1826 static inline int
1827 ncr_compar (const void *field1_, const void *field2_)
1829 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1830 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1831 unsigned int uid1 = ncr_type_uid (field1);
1832 unsigned int uid2 = ncr_type_uid (field2);
1834 if (uid1 < uid2)
1835 return -1;
1836 else if (uid1 > uid2)
1837 return 1;
1838 return 0;
1841 /* Return true if we can determine that the fields referenced cannot
1842 overlap for any pair of objects. This relies on TBAA. */
1844 static bool
1845 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1847 /* Early return if we have nothing to do.
1849 Do not consider this as may-alias for stats - it is more useful
1850 to have information how many disambiguations happened provided that
1851 the query was meaningful. */
1852 if (!flag_strict_aliasing
1853 || !x || !y
1854 || !handled_component_p (x)
1855 || !handled_component_p (y))
1856 return false;
1858 auto_vec<const_tree, 16> fieldsx;
1859 while (handled_component_p (x))
1861 if (TREE_CODE (x) == COMPONENT_REF)
1863 tree field = TREE_OPERAND (x, 1);
1864 tree type = DECL_FIELD_CONTEXT (field);
1865 if (TREE_CODE (type) == RECORD_TYPE)
1866 fieldsx.safe_push (field);
1868 else if (ends_tbaa_access_path_p (x))
1869 fieldsx.truncate (0);
1870 x = TREE_OPERAND (x, 0);
1872 if (fieldsx.length () == 0)
1873 return false;
1874 auto_vec<const_tree, 16> fieldsy;
1875 while (handled_component_p (y))
1877 if (TREE_CODE (y) == COMPONENT_REF)
1879 tree field = TREE_OPERAND (y, 1);
1880 tree type = DECL_FIELD_CONTEXT (field);
1881 if (TREE_CODE (type) == RECORD_TYPE)
1882 fieldsy.safe_push (TREE_OPERAND (y, 1));
1884 else if (ends_tbaa_access_path_p (y))
1885 fieldsy.truncate (0);
1886 y = TREE_OPERAND (y, 0);
1888 if (fieldsy.length () == 0)
1890 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1891 return false;
1894 /* Most common case first. */
1895 if (fieldsx.length () == 1
1896 && fieldsy.length () == 1)
1898 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1899 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1900 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1902 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1903 return true;
1905 else
1907 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1908 return false;
1912 if (fieldsx.length () == 2)
1914 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1915 std::swap (fieldsx[0], fieldsx[1]);
1917 else
1918 fieldsx.qsort (ncr_compar);
1920 if (fieldsy.length () == 2)
1922 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1923 std::swap (fieldsy[0], fieldsy[1]);
1925 else
1926 fieldsy.qsort (ncr_compar);
1928 unsigned i = 0, j = 0;
1931 const_tree fieldx = fieldsx[i];
1932 const_tree fieldy = fieldsy[j];
1934 /* We're left with accessing different fields of a structure,
1935 no possible overlap. */
1936 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1937 DECL_FIELD_CONTEXT (fieldy)) == 1
1938 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1940 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1941 return true;
1944 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1946 i++;
1947 if (i == fieldsx.length ())
1948 break;
1950 else
1952 j++;
1953 if (j == fieldsy.length ())
1954 break;
1957 while (1);
1959 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1960 return false;
1964 /* Return true if two memory references based on the variables BASE1
1965 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1966 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1967 if non-NULL are the complete memory reference trees. */
1969 static bool
1970 decl_refs_may_alias_p (tree ref1, tree base1,
1971 poly_int64 offset1, poly_int64 max_size1,
1972 poly_int64 size1,
1973 tree ref2, tree base2,
1974 poly_int64 offset2, poly_int64 max_size2,
1975 poly_int64 size2)
1977 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1979 /* If both references are based on different variables, they cannot alias. */
1980 if (compare_base_decls (base1, base2) == 0)
1981 return false;
1983 /* If both references are based on the same variable, they cannot alias if
1984 the accesses do not overlap. */
1985 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1986 return false;
1988 /* If there is must alias, there is no use disambiguating further. */
1989 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1990 return true;
1992 /* For components with variable position, the above test isn't sufficient,
1993 so we disambiguate component references manually. */
1994 if (ref1 && ref2
1995 && handled_component_p (ref1) && handled_component_p (ref2)
1996 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1997 return false;
1999 return true;
2002 /* Return true if access with BASE is view converted.
2003 Base must not be stripped from inner MEM_REF (&decl)
2004 which is done by ao_ref_base and thus one extra walk
2005 of handled components is needed. */
2007 static bool
2008 view_converted_memref_p (tree base)
2010 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2011 return false;
2012 return same_type_for_tbaa (TREE_TYPE (base),
2013 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2016 /* Return true if an indirect reference based on *PTR1 constrained
2017 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2018 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2019 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2020 in which case they are computed on-demand. REF1 and REF2
2021 if non-NULL are the complete memory reference trees. */
2023 static bool
2024 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2025 poly_int64 offset1, poly_int64 max_size1,
2026 poly_int64 size1,
2027 alias_set_type ref1_alias_set,
2028 alias_set_type base1_alias_set,
2029 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2030 poly_int64 offset2, poly_int64 max_size2,
2031 poly_int64 size2,
2032 alias_set_type ref2_alias_set,
2033 alias_set_type base2_alias_set, bool tbaa_p)
2035 tree ptr1;
2036 tree ptrtype1, dbase2;
2038 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2039 || TREE_CODE (base1) == TARGET_MEM_REF)
2040 && DECL_P (base2));
2042 ptr1 = TREE_OPERAND (base1, 0);
2043 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2045 /* If only one reference is based on a variable, they cannot alias if
2046 the pointer access is beyond the extent of the variable access.
2047 (the pointer base cannot validly point to an offset less than zero
2048 of the variable).
2049 ??? IVOPTs creates bases that do not honor this restriction,
2050 so do not apply this optimization for TARGET_MEM_REFs. */
2051 if (TREE_CODE (base1) != TARGET_MEM_REF
2052 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2053 return false;
2055 /* If the pointer based access is bigger than the variable they cannot
2056 alias. This is similar to the check below where we use TBAA to
2057 increase the size of the pointer based access based on the dynamic
2058 type of a containing object we can infer from it. */
2059 poly_int64 dsize2;
2060 if (known_size_p (size1)
2061 && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2062 && known_lt (dsize2, size1))
2063 return false;
2065 /* They also cannot alias if the pointer may not point to the decl. */
2066 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2067 return false;
2069 /* Disambiguations that rely on strict aliasing rules follow. */
2070 if (!flag_strict_aliasing || !tbaa_p)
2071 return true;
2073 /* If the alias set for a pointer access is zero all bets are off. */
2074 if (base1_alias_set == 0 || base2_alias_set == 0)
2075 return true;
2077 /* When we are trying to disambiguate an access with a pointer dereference
2078 as base versus one with a decl as base we can use both the size
2079 of the decl and its dynamic type for extra disambiguation.
2080 ??? We do not know anything about the dynamic type of the decl
2081 other than that its alias-set contains base2_alias_set as a subset
2082 which does not help us here. */
2083 /* As we know nothing useful about the dynamic type of the decl just
2084 use the usual conflict check rather than a subset test.
2085 ??? We could introduce -fvery-strict-aliasing when the language
2086 does not allow decls to have a dynamic type that differs from their
2087 static type. Then we can check
2088 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2089 if (base1_alias_set != base2_alias_set
2090 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2091 return false;
2093 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2095 /* If the size of the access relevant for TBAA through the pointer
2096 is bigger than the size of the decl we can't possibly access the
2097 decl via that pointer. */
2098 if (/* ??? This in turn may run afoul when a decl of type T which is
2099 a member of union type U is accessed through a pointer to
2100 type U and sizeof T is smaller than sizeof U. */
2101 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2102 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2103 && compare_sizes (DECL_SIZE (base2),
2104 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2105 return false;
2107 if (!ref2)
2108 return true;
2110 /* If the decl is accessed via a MEM_REF, reconstruct the base
2111 we can use for TBAA and an appropriately adjusted offset. */
2112 dbase2 = ref2;
2113 while (handled_component_p (dbase2))
2114 dbase2 = TREE_OPERAND (dbase2, 0);
2115 poly_int64 doffset1 = offset1;
2116 poly_offset_int doffset2 = offset2;
2117 if (TREE_CODE (dbase2) == MEM_REF
2118 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2120 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2121 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2122 /* If second reference is view-converted, give up now. */
2123 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2124 return true;
2127 /* If first reference is view-converted, give up now. */
2128 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2129 return true;
2131 /* If both references are through the same type, they do not alias
2132 if the accesses do not overlap. This does extra disambiguation
2133 for mixed/pointer accesses but requires strict aliasing.
2134 For MEM_REFs we require that the component-ref offset we computed
2135 is relative to the start of the type which we ensure by
2136 comparing rvalue and access type and disregarding the constant
2137 pointer offset.
2139 But avoid treating variable length arrays as "objects", instead assume they
2140 can overlap by an exact multiple of their element size.
2141 See gcc.dg/torture/alias-2.c. */
2142 if (((TREE_CODE (base1) != TARGET_MEM_REF
2143 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2144 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2145 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2146 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2148 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2149 && (TYPE_SIZE (TREE_TYPE (base1))
2150 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2151 != INTEGER_CST));
2152 if (!partial_overlap
2153 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2154 return false;
2155 if (!ref1 || !ref2
2156 /* If there is must alias, there is no use disambiguating further. */
2157 || (!partial_overlap
2158 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2159 return true;
2160 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2161 partial_overlap);
2162 if (res == -1)
2163 return !nonoverlapping_component_refs_p (ref1, ref2);
2164 return !res;
2167 /* Do access-path based disambiguation. */
2168 if (ref1 && ref2
2169 && (handled_component_p (ref1) || handled_component_p (ref2)))
2170 return aliasing_component_refs_p (ref1,
2171 ref1_alias_set, base1_alias_set,
2172 offset1, max_size1,
2173 ref2,
2174 ref2_alias_set, base2_alias_set,
2175 offset2, max_size2);
2177 return true;
2180 /* Return true if two indirect references based on *PTR1
2181 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2182 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2183 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2184 in which case they are computed on-demand. REF1 and REF2
2185 if non-NULL are the complete memory reference trees. */
2187 static bool
2188 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2189 poly_int64 offset1, poly_int64 max_size1,
2190 poly_int64 size1,
2191 alias_set_type ref1_alias_set,
2192 alias_set_type base1_alias_set,
2193 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2194 poly_int64 offset2, poly_int64 max_size2,
2195 poly_int64 size2,
2196 alias_set_type ref2_alias_set,
2197 alias_set_type base2_alias_set, bool tbaa_p)
2199 tree ptr1;
2200 tree ptr2;
2201 tree ptrtype1, ptrtype2;
2203 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2204 || TREE_CODE (base1) == TARGET_MEM_REF)
2205 && (TREE_CODE (base2) == MEM_REF
2206 || TREE_CODE (base2) == TARGET_MEM_REF));
2208 ptr1 = TREE_OPERAND (base1, 0);
2209 ptr2 = TREE_OPERAND (base2, 0);
2211 /* If both bases are based on pointers they cannot alias if they may not
2212 point to the same memory object or if they point to the same object
2213 and the accesses do not overlap. */
2214 if ((!cfun || gimple_in_ssa_p (cfun))
2215 && operand_equal_p (ptr1, ptr2, 0)
2216 && (((TREE_CODE (base1) != TARGET_MEM_REF
2217 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2218 && (TREE_CODE (base2) != TARGET_MEM_REF
2219 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2220 || (TREE_CODE (base1) == TARGET_MEM_REF
2221 && TREE_CODE (base2) == TARGET_MEM_REF
2222 && (TMR_STEP (base1) == TMR_STEP (base2)
2223 || (TMR_STEP (base1) && TMR_STEP (base2)
2224 && operand_equal_p (TMR_STEP (base1),
2225 TMR_STEP (base2), 0)))
2226 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2227 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2228 && operand_equal_p (TMR_INDEX (base1),
2229 TMR_INDEX (base2), 0)))
2230 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2231 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2232 && operand_equal_p (TMR_INDEX2 (base1),
2233 TMR_INDEX2 (base2), 0))))))
2235 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2236 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2237 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2238 offset2 + moff2, max_size2))
2239 return false;
2240 /* If there is must alias, there is no use disambiguating further. */
2241 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2242 return true;
2243 if (ref1 && ref2)
2245 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2246 false);
2247 if (res != -1)
2248 return !res;
2251 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2252 return false;
2254 /* Disambiguations that rely on strict aliasing rules follow. */
2255 if (!flag_strict_aliasing || !tbaa_p)
2256 return true;
2258 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2259 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2261 /* If the alias set for a pointer access is zero all bets are off. */
2262 if (base1_alias_set == 0
2263 || base2_alias_set == 0)
2264 return true;
2266 /* Do type-based disambiguation. */
2267 if (base1_alias_set != base2_alias_set
2268 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2269 return false;
2271 /* If either reference is view-converted, give up now. */
2272 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2273 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2274 return true;
2276 /* If both references are through the same type, they do not alias
2277 if the accesses do not overlap. This does extra disambiguation
2278 for mixed/pointer accesses but requires strict aliasing. */
2279 if ((TREE_CODE (base1) != TARGET_MEM_REF
2280 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2281 && (TREE_CODE (base2) != TARGET_MEM_REF
2282 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2283 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2284 TREE_TYPE (ptrtype2)) == 1)
2286 /* But avoid treating arrays as "objects", instead assume they
2287 can overlap by an exact multiple of their element size.
2288 See gcc.dg/torture/alias-2.c. */
2289 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2291 if (!partial_overlap
2292 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2293 return false;
2294 if (!ref1 || !ref2
2295 || (!partial_overlap
2296 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2297 return true;
2298 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2299 partial_overlap);
2300 if (res == -1)
2301 return !nonoverlapping_component_refs_p (ref1, ref2);
2302 return !res;
2305 /* Do access-path based disambiguation. */
2306 if (ref1 && ref2
2307 && (handled_component_p (ref1) || handled_component_p (ref2)))
2308 return aliasing_component_refs_p (ref1,
2309 ref1_alias_set, base1_alias_set,
2310 offset1, max_size1,
2311 ref2,
2312 ref2_alias_set, base2_alias_set,
2313 offset2, max_size2);
2315 return true;
2318 /* Return true, if the two memory references REF1 and REF2 may alias. */
2320 static bool
2321 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2323 tree base1, base2;
2324 poly_int64 offset1 = 0, offset2 = 0;
2325 poly_int64 max_size1 = -1, max_size2 = -1;
2326 bool var1_p, var2_p, ind1_p, ind2_p;
2328 gcc_checking_assert ((!ref1->ref
2329 || TREE_CODE (ref1->ref) == SSA_NAME
2330 || DECL_P (ref1->ref)
2331 || TREE_CODE (ref1->ref) == STRING_CST
2332 || handled_component_p (ref1->ref)
2333 || TREE_CODE (ref1->ref) == MEM_REF
2334 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2335 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2336 && (!ref2->ref
2337 || TREE_CODE (ref2->ref) == SSA_NAME
2338 || DECL_P (ref2->ref)
2339 || TREE_CODE (ref2->ref) == STRING_CST
2340 || handled_component_p (ref2->ref)
2341 || TREE_CODE (ref2->ref) == MEM_REF
2342 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2343 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2345 /* Decompose the references into their base objects and the access. */
2346 base1 = ao_ref_base (ref1);
2347 offset1 = ref1->offset;
2348 max_size1 = ref1->max_size;
2349 base2 = ao_ref_base (ref2);
2350 offset2 = ref2->offset;
2351 max_size2 = ref2->max_size;
2353 /* We can end up with registers or constants as bases for example from
2354 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2355 which is seen as a struct copy. */
2356 if (TREE_CODE (base1) == SSA_NAME
2357 || TREE_CODE (base1) == CONST_DECL
2358 || TREE_CODE (base1) == CONSTRUCTOR
2359 || TREE_CODE (base1) == ADDR_EXPR
2360 || CONSTANT_CLASS_P (base1)
2361 || TREE_CODE (base2) == SSA_NAME
2362 || TREE_CODE (base2) == CONST_DECL
2363 || TREE_CODE (base2) == CONSTRUCTOR
2364 || TREE_CODE (base2) == ADDR_EXPR
2365 || CONSTANT_CLASS_P (base2))
2366 return false;
2368 /* We can end up referring to code via function and label decls.
2369 As we likely do not properly track code aliases conservatively
2370 bail out. */
2371 if (TREE_CODE (base1) == FUNCTION_DECL
2372 || TREE_CODE (base1) == LABEL_DECL
2373 || TREE_CODE (base2) == FUNCTION_DECL
2374 || TREE_CODE (base2) == LABEL_DECL)
2375 return true;
2377 /* Two volatile accesses always conflict. */
2378 if (ref1->volatile_p
2379 && ref2->volatile_p)
2380 return true;
2382 /* refN->ref may convey size information, do not confuse our workers
2383 with that but strip it - ao_ref_base took it into account already. */
2384 tree ref1ref = ref1->ref;
2385 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2386 ref1ref = TREE_OPERAND (ref1ref, 0);
2387 tree ref2ref = ref2->ref;
2388 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2389 ref2ref = TREE_OPERAND (ref2ref, 0);
2391 /* Defer to simple offset based disambiguation if we have
2392 references based on two decls. Do this before defering to
2393 TBAA to handle must-alias cases in conformance with the
2394 GCC extension of allowing type-punning through unions. */
2395 var1_p = DECL_P (base1);
2396 var2_p = DECL_P (base2);
2397 if (var1_p && var2_p)
2398 return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2399 ref1->size,
2400 ref2ref, base2, offset2, max_size2,
2401 ref2->size);
2403 /* Handle restrict based accesses.
2404 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2405 here. */
2406 tree rbase1 = base1;
2407 tree rbase2 = base2;
2408 if (var1_p)
2410 rbase1 = ref1ref;
2411 if (rbase1)
2412 while (handled_component_p (rbase1))
2413 rbase1 = TREE_OPERAND (rbase1, 0);
2415 if (var2_p)
2417 rbase2 = ref2ref;
2418 if (rbase2)
2419 while (handled_component_p (rbase2))
2420 rbase2 = TREE_OPERAND (rbase2, 0);
2422 if (rbase1 && rbase2
2423 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2424 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2425 /* If the accesses are in the same restrict clique... */
2426 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2427 /* But based on different pointers they do not alias. */
2428 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2429 return false;
2431 ind1_p = (TREE_CODE (base1) == MEM_REF
2432 || TREE_CODE (base1) == TARGET_MEM_REF);
2433 ind2_p = (TREE_CODE (base2) == MEM_REF
2434 || TREE_CODE (base2) == TARGET_MEM_REF);
2436 /* Canonicalize the pointer-vs-decl case. */
2437 if (ind1_p && var2_p)
2439 std::swap (offset1, offset2);
2440 std::swap (max_size1, max_size2);
2441 std::swap (base1, base2);
2442 std::swap (ref1, ref2);
2443 std::swap (ref1ref, ref2ref);
2444 var1_p = true;
2445 ind1_p = false;
2446 var2_p = false;
2447 ind2_p = true;
2450 /* First defer to TBAA if possible. */
2451 if (tbaa_p
2452 && flag_strict_aliasing
2453 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2454 ao_ref_alias_set (ref2)))
2455 return false;
2457 /* If the reference is based on a pointer that points to memory
2458 that may not be written to then the other reference cannot possibly
2459 clobber it. */
2460 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2461 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2462 || (ind1_p
2463 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2464 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2465 return false;
2467 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2468 if (var1_p && ind2_p)
2469 return indirect_ref_may_alias_decl_p (ref2ref, base2,
2470 offset2, max_size2, ref2->size,
2471 ao_ref_alias_set (ref2),
2472 ao_ref_base_alias_set (ref2),
2473 ref1ref, base1,
2474 offset1, max_size1, ref1->size,
2475 ao_ref_alias_set (ref1),
2476 ao_ref_base_alias_set (ref1),
2477 tbaa_p);
2478 else if (ind1_p && ind2_p)
2479 return indirect_refs_may_alias_p (ref1ref, base1,
2480 offset1, max_size1, ref1->size,
2481 ao_ref_alias_set (ref1),
2482 ao_ref_base_alias_set (ref1),
2483 ref2ref, base2,
2484 offset2, max_size2, ref2->size,
2485 ao_ref_alias_set (ref2),
2486 ao_ref_base_alias_set (ref2),
2487 tbaa_p);
2489 gcc_unreachable ();
2492 /* Return true, if the two memory references REF1 and REF2 may alias
2493 and update statistics. */
2495 bool
2496 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2498 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2499 if (res)
2500 ++alias_stats.refs_may_alias_p_may_alias;
2501 else
2502 ++alias_stats.refs_may_alias_p_no_alias;
2503 return res;
2506 static bool
2507 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2509 ao_ref r1;
2510 ao_ref_init (&r1, ref1);
2511 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2514 bool
2515 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2517 ao_ref r1, r2;
2518 ao_ref_init (&r1, ref1);
2519 ao_ref_init (&r2, ref2);
2520 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2523 /* Returns true if there is a anti-dependence for the STORE that
2524 executes after the LOAD. */
2526 bool
2527 refs_anti_dependent_p (tree load, tree store)
2529 ao_ref r1, r2;
2530 ao_ref_init (&r1, load);
2531 ao_ref_init (&r2, store);
2532 return refs_may_alias_p_1 (&r1, &r2, false);
2535 /* Returns true if there is a output dependence for the stores
2536 STORE1 and STORE2. */
2538 bool
2539 refs_output_dependent_p (tree store1, tree store2)
2541 ao_ref r1, r2;
2542 ao_ref_init (&r1, store1);
2543 ao_ref_init (&r2, store2);
2544 return refs_may_alias_p_1 (&r1, &r2, false);
2547 /* Returns true if and only if REF may alias any access stored in TT.
2548 IF TBAA_P is true, use TBAA oracle. */
2550 static bool
2551 modref_may_conflict (const gcall *stmt,
2552 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2554 alias_set_type base_set, ref_set;
2556 if (tt->every_base)
2557 return true;
2559 if (!dbg_cnt (ipa_mod_ref))
2560 return true;
2562 base_set = ao_ref_base_alias_set (ref);
2564 ref_set = ao_ref_alias_set (ref);
2566 int num_tests = 0, max_tests = param_modref_max_tests;
2567 for (auto base_node : tt->bases)
2569 if (tbaa_p && flag_strict_aliasing)
2571 if (num_tests >= max_tests)
2572 return true;
2573 alias_stats.modref_tests++;
2574 if (!alias_sets_conflict_p (base_set, base_node->base))
2575 continue;
2576 num_tests++;
2579 if (base_node->every_ref)
2580 return true;
2582 for (auto ref_node : base_node->refs)
2584 /* Do not repeat same test as before. */
2585 if ((ref_set != base_set || base_node->base != ref_node->ref)
2586 && tbaa_p && flag_strict_aliasing)
2588 if (num_tests >= max_tests)
2589 return true;
2590 alias_stats.modref_tests++;
2591 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2592 continue;
2593 num_tests++;
2596 if (ref_node->every_access)
2597 return true;
2599 /* TBAA checks did not disambiguate, try individual accesses. */
2600 for (auto access_node : ref_node->accesses)
2602 if (num_tests >= max_tests)
2603 return true;
2605 tree arg = access_node.get_call_arg (stmt);
2606 if (!arg)
2607 return true;
2609 alias_stats.modref_baseptr_tests++;
2611 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2612 continue;
2614 /* PTA oracle will be unhapy of arg is not an pointer. */
2615 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2616 return true;
2618 /* If we don't have base pointer, give up. */
2619 if (!ref->ref && !ref->base)
2620 continue;
2622 ao_ref ref2;
2623 if (access_node.get_ao_ref (stmt, &ref2))
2625 ref2.ref_alias_set = ref_node->ref;
2626 ref2.base_alias_set = base_node->base;
2627 if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2628 return true;
2630 else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2631 return true;
2633 num_tests++;
2637 return false;
2640 /* Check if REF conflicts with call using "fn spec" attribute.
2641 If CLOBBER is true we are checking for writes, otherwise check loads.
2643 Return 0 if there are no conflicts (except for possible function call
2644 argument reads), 1 if there are conflicts and -1 if we can not decide by
2645 fn spec. */
2647 static int
2648 check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2650 attr_fnspec fnspec = gimple_call_fnspec (call);
2651 if (fnspec.known_p ())
2653 if (clobber
2654 ? !fnspec.global_memory_written_p ()
2655 : !fnspec.global_memory_read_p ())
2657 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2658 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2659 && (!fnspec.arg_specified_p (i)
2660 || (clobber ? fnspec.arg_maybe_written_p (i)
2661 : fnspec.arg_maybe_read_p (i))))
2663 ao_ref dref;
2664 tree size = NULL_TREE;
2665 unsigned int size_arg;
2667 if (!fnspec.arg_specified_p (i))
2669 else if (fnspec.arg_max_access_size_given_by_arg_p
2670 (i, &size_arg))
2671 size = gimple_call_arg (call, size_arg);
2672 else if (fnspec.arg_access_size_given_by_type_p (i))
2674 tree callee = gimple_call_fndecl (call);
2675 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2677 for (unsigned int p = 0; p < i; p++)
2678 t = TREE_CHAIN (t);
2679 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2681 ao_ref_init_from_ptr_and_size (&dref,
2682 gimple_call_arg (call, i),
2683 size);
2684 if (refs_may_alias_p_1 (&dref, ref, false))
2685 return 1;
2687 if (clobber
2688 && fnspec.errno_maybe_written_p ()
2689 && flag_errno_math
2690 && targetm.ref_may_alias_errno (ref))
2691 return 1;
2692 return 0;
2696 /* FIXME: we should handle barriers more consistently, but for now leave the
2697 check here. */
2698 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2699 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2701 /* __sync_* builtins and some OpenMP builtins act as threading
2702 barriers. */
2703 #undef DEF_SYNC_BUILTIN
2704 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2705 #include "sync-builtins.def"
2706 #undef DEF_SYNC_BUILTIN
2707 case BUILT_IN_GOMP_ATOMIC_START:
2708 case BUILT_IN_GOMP_ATOMIC_END:
2709 case BUILT_IN_GOMP_BARRIER:
2710 case BUILT_IN_GOMP_BARRIER_CANCEL:
2711 case BUILT_IN_GOMP_TASKWAIT:
2712 case BUILT_IN_GOMP_TASKGROUP_END:
2713 case BUILT_IN_GOMP_CRITICAL_START:
2714 case BUILT_IN_GOMP_CRITICAL_END:
2715 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2716 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2717 case BUILT_IN_GOMP_LOOP_END:
2718 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2719 case BUILT_IN_GOMP_ORDERED_START:
2720 case BUILT_IN_GOMP_ORDERED_END:
2721 case BUILT_IN_GOMP_SECTIONS_END:
2722 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2723 case BUILT_IN_GOMP_SINGLE_COPY_START:
2724 case BUILT_IN_GOMP_SINGLE_COPY_END:
2725 return 1;
2727 default:
2728 return -1;
2730 return -1;
2733 /* If the call CALL may use the memory reference REF return true,
2734 otherwise return false. */
2736 static bool
2737 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2739 tree base, callee;
2740 unsigned i;
2741 int flags = gimple_call_flags (call);
2743 if (flags & (ECF_CONST|ECF_NOVOPS))
2744 goto process_args;
2746 /* A call that is not without side-effects might involve volatile
2747 accesses and thus conflicts with all other volatile accesses. */
2748 if (ref->volatile_p)
2749 return true;
2751 callee = gimple_call_fndecl (call);
2753 if (callee != NULL_TREE)
2755 struct cgraph_node *node = cgraph_node::get (callee);
2756 /* We can not safely optimize based on summary of calle if it does
2757 not always bind to current def: it is possible that memory load
2758 was optimized out earlier and the interposed variant may not be
2759 optimized this way. */
2760 if (node && node->binds_to_current_def_p ())
2762 modref_summary *summary = get_modref_function_summary (node);
2763 if (summary && !summary->calls_interposable)
2765 if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2767 alias_stats.modref_use_no_alias++;
2768 if (dump_file && (dump_flags & TDF_DETAILS))
2770 fprintf (dump_file,
2771 "ipa-modref: call stmt ");
2772 print_gimple_stmt (dump_file, call, 0);
2773 fprintf (dump_file,
2774 "ipa-modref: call to %s does not use ",
2775 node->dump_name ());
2776 if (!ref->ref && ref->base)
2778 fprintf (dump_file, "base: ");
2779 print_generic_expr (dump_file, ref->base);
2781 else if (ref->ref)
2783 fprintf (dump_file, "ref: ");
2784 print_generic_expr (dump_file, ref->ref);
2786 fprintf (dump_file, " alias sets: %i->%i\n",
2787 ao_ref_base_alias_set (ref),
2788 ao_ref_alias_set (ref));
2790 goto process_args;
2792 alias_stats.modref_use_may_alias++;
2797 base = ao_ref_base (ref);
2798 if (!base)
2799 return true;
2801 /* If the reference is based on a decl that is not aliased the call
2802 cannot possibly use it. */
2803 if (DECL_P (base)
2804 && !may_be_aliased (base)
2805 /* But local statics can be used through recursion. */
2806 && !is_global_var (base))
2807 goto process_args;
2809 if (int res = check_fnspec (call, ref, false))
2811 if (res == 1)
2812 return true;
2814 else
2815 goto process_args;
2817 /* Check if base is a global static variable that is not read
2818 by the function. */
2819 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2821 struct cgraph_node *node = cgraph_node::get (callee);
2822 bitmap read;
2823 int id;
2825 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2826 node yet. We should enforce that there are nodes for all decls in the
2827 IL and remove this check instead. */
2828 if (node
2829 && (id = ipa_reference_var_uid (base)) != -1
2830 && (read = ipa_reference_get_read_global (node))
2831 && !bitmap_bit_p (read, id))
2832 goto process_args;
2835 /* Check if the base variable is call-used. */
2836 if (DECL_P (base))
2838 if (pt_solution_includes (gimple_call_use_set (call), base))
2839 return true;
2841 else if ((TREE_CODE (base) == MEM_REF
2842 || TREE_CODE (base) == TARGET_MEM_REF)
2843 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2845 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2846 if (!pi)
2847 return true;
2849 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2850 return true;
2852 else
2853 return true;
2855 /* Inspect call arguments for passed-by-value aliases. */
2856 process_args:
2857 for (i = 0; i < gimple_call_num_args (call); ++i)
2859 tree op = gimple_call_arg (call, i);
2860 int flags = gimple_call_arg_flags (call, i);
2862 if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2863 continue;
2865 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2866 op = TREE_OPERAND (op, 0);
2868 if (TREE_CODE (op) != SSA_NAME
2869 && !is_gimple_min_invariant (op))
2871 ao_ref r;
2872 ao_ref_init (&r, op);
2873 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2874 return true;
2878 return false;
2881 static bool
2882 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2884 bool res;
2885 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2886 if (res)
2887 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2888 else
2889 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2890 return res;
2894 /* If the statement STMT may use the memory reference REF return
2895 true, otherwise return false. */
2897 bool
2898 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2900 if (is_gimple_assign (stmt))
2902 tree rhs;
2904 /* All memory assign statements are single. */
2905 if (!gimple_assign_single_p (stmt))
2906 return false;
2908 rhs = gimple_assign_rhs1 (stmt);
2909 if (is_gimple_reg (rhs)
2910 || is_gimple_min_invariant (rhs)
2911 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2912 return false;
2914 return refs_may_alias_p (rhs, ref, tbaa_p);
2916 else if (is_gimple_call (stmt))
2917 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2918 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2920 tree retval = gimple_return_retval (return_stmt);
2921 if (retval
2922 && TREE_CODE (retval) != SSA_NAME
2923 && !is_gimple_min_invariant (retval)
2924 && refs_may_alias_p (retval, ref, tbaa_p))
2925 return true;
2926 /* If ref escapes the function then the return acts as a use. */
2927 tree base = ao_ref_base (ref);
2928 if (!base)
2930 else if (DECL_P (base))
2931 return is_global_var (base);
2932 else if (TREE_CODE (base) == MEM_REF
2933 || TREE_CODE (base) == TARGET_MEM_REF)
2934 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2935 return false;
2938 return true;
2941 bool
2942 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2944 ao_ref r;
2945 ao_ref_init (&r, ref);
2946 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2949 /* If the call in statement CALL may clobber the memory reference REF
2950 return true, otherwise return false. */
2952 bool
2953 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2955 tree base;
2956 tree callee;
2958 /* If the call is pure or const it cannot clobber anything. */
2959 if (gimple_call_flags (call)
2960 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2961 return false;
2962 if (gimple_call_internal_p (call))
2963 switch (gimple_call_internal_fn (call))
2965 /* Treat these internal calls like ECF_PURE for aliasing,
2966 they don't write to any memory the program should care about.
2967 They have important other side-effects, and read memory,
2968 so can't be ECF_NOVOPS. */
2969 case IFN_UBSAN_NULL:
2970 case IFN_UBSAN_BOUNDS:
2971 case IFN_UBSAN_VPTR:
2972 case IFN_UBSAN_OBJECT_SIZE:
2973 case IFN_UBSAN_PTR:
2974 case IFN_ASAN_CHECK:
2975 return false;
2976 default:
2977 break;
2980 callee = gimple_call_fndecl (call);
2982 if (callee != NULL_TREE && !ref->volatile_p)
2984 struct cgraph_node *node = cgraph_node::get (callee);
2985 if (node)
2987 modref_summary *summary = get_modref_function_summary (node);
2988 if (summary)
2990 if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
2991 && (!summary->writes_errno
2992 || !targetm.ref_may_alias_errno (ref)))
2994 alias_stats.modref_clobber_no_alias++;
2995 if (dump_file && (dump_flags & TDF_DETAILS))
2997 fprintf (dump_file,
2998 "ipa-modref: call stmt ");
2999 print_gimple_stmt (dump_file, call, 0);
3000 fprintf (dump_file,
3001 "ipa-modref: call to %s does not clobber ",
3002 node->dump_name ());
3003 if (!ref->ref && ref->base)
3005 fprintf (dump_file, "base: ");
3006 print_generic_expr (dump_file, ref->base);
3008 else if (ref->ref)
3010 fprintf (dump_file, "ref: ");
3011 print_generic_expr (dump_file, ref->ref);
3013 fprintf (dump_file, " alias sets: %i->%i\n",
3014 ao_ref_base_alias_set (ref),
3015 ao_ref_alias_set (ref));
3017 return false;
3019 alias_stats.modref_clobber_may_alias++;
3024 base = ao_ref_base (ref);
3025 if (!base)
3026 return true;
3028 if (TREE_CODE (base) == SSA_NAME
3029 || CONSTANT_CLASS_P (base))
3030 return false;
3032 /* A call that is not without side-effects might involve volatile
3033 accesses and thus conflicts with all other volatile accesses. */
3034 if (ref->volatile_p)
3035 return true;
3037 /* If the reference is based on a decl that is not aliased the call
3038 cannot possibly clobber it. */
3039 if (DECL_P (base)
3040 && !may_be_aliased (base)
3041 /* But local non-readonly statics can be modified through recursion
3042 or the call may implement a threading barrier which we must
3043 treat as may-def. */
3044 && (TREE_READONLY (base)
3045 || !is_global_var (base)))
3046 return false;
3048 /* If the reference is based on a pointer that points to memory
3049 that may not be written to then the call cannot possibly clobber it. */
3050 if ((TREE_CODE (base) == MEM_REF
3051 || TREE_CODE (base) == TARGET_MEM_REF)
3052 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3053 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3054 return false;
3056 if (int res = check_fnspec (call, ref, true))
3058 if (res == 1)
3059 return true;
3061 else
3062 return false;
3064 /* Check if base is a global static variable that is not written
3065 by the function. */
3066 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3068 struct cgraph_node *node = cgraph_node::get (callee);
3069 bitmap written;
3070 int id;
3072 if (node
3073 && (id = ipa_reference_var_uid (base)) != -1
3074 && (written = ipa_reference_get_written_global (node))
3075 && !bitmap_bit_p (written, id))
3076 return false;
3079 /* Check if the base variable is call-clobbered. */
3080 if (DECL_P (base))
3081 return pt_solution_includes (gimple_call_clobber_set (call), base);
3082 else if ((TREE_CODE (base) == MEM_REF
3083 || TREE_CODE (base) == TARGET_MEM_REF)
3084 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3086 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3087 if (!pi)
3088 return true;
3090 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3093 return true;
3096 /* If the call in statement CALL may clobber the memory reference REF
3097 return true, otherwise return false. */
3099 bool
3100 call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3102 bool res;
3103 ao_ref r;
3104 ao_ref_init (&r, ref);
3105 res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3106 if (res)
3107 ++alias_stats.call_may_clobber_ref_p_may_alias;
3108 else
3109 ++alias_stats.call_may_clobber_ref_p_no_alias;
3110 return res;
3114 /* If the statement STMT may clobber the memory reference REF return true,
3115 otherwise return false. */
3117 bool
3118 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3120 if (is_gimple_call (stmt))
3122 tree lhs = gimple_call_lhs (stmt);
3123 if (lhs
3124 && TREE_CODE (lhs) != SSA_NAME)
3126 ao_ref r;
3127 ao_ref_init (&r, lhs);
3128 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3129 return true;
3132 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3134 else if (gimple_assign_single_p (stmt))
3136 tree lhs = gimple_assign_lhs (stmt);
3137 if (TREE_CODE (lhs) != SSA_NAME)
3139 ao_ref r;
3140 ao_ref_init (&r, lhs);
3141 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3144 else if (gimple_code (stmt) == GIMPLE_ASM)
3145 return true;
3147 return false;
3150 bool
3151 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3153 ao_ref r;
3154 ao_ref_init (&r, ref);
3155 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3158 /* Return true if store1 and store2 described by corresponding tuples
3159 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3160 address. */
3162 static bool
3163 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3164 poly_int64 max_size1,
3165 tree base2, poly_int64 offset2, poly_int64 size2,
3166 poly_int64 max_size2)
3168 /* Offsets need to be 0. */
3169 if (maybe_ne (offset1, 0)
3170 || maybe_ne (offset2, 0))
3171 return false;
3173 bool base1_obj_p = SSA_VAR_P (base1);
3174 bool base2_obj_p = SSA_VAR_P (base2);
3176 /* We need one object. */
3177 if (base1_obj_p == base2_obj_p)
3178 return false;
3179 tree obj = base1_obj_p ? base1 : base2;
3181 /* And we need one MEM_REF. */
3182 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3183 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3184 if (base1_memref_p == base2_memref_p)
3185 return false;
3186 tree memref = base1_memref_p ? base1 : base2;
3188 /* Sizes need to be valid. */
3189 if (!known_size_p (max_size1)
3190 || !known_size_p (max_size2)
3191 || !known_size_p (size1)
3192 || !known_size_p (size2))
3193 return false;
3195 /* Max_size needs to match size. */
3196 if (maybe_ne (max_size1, size1)
3197 || maybe_ne (max_size2, size2))
3198 return false;
3200 /* Sizes need to match. */
3201 if (maybe_ne (size1, size2))
3202 return false;
3205 /* Check that memref is a store to pointer with singleton points-to info. */
3206 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3207 return false;
3208 tree ptr = TREE_OPERAND (memref, 0);
3209 if (TREE_CODE (ptr) != SSA_NAME)
3210 return false;
3211 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3212 unsigned int pt_uid;
3213 if (pi == NULL
3214 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3215 return false;
3217 /* Be conservative with non-call exceptions when the address might
3218 be NULL. */
3219 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3220 return false;
3222 /* Check that ptr points relative to obj. */
3223 unsigned int obj_uid = DECL_PT_UID (obj);
3224 if (obj_uid != pt_uid)
3225 return false;
3227 /* Check that the object size is the same as the store size. That ensures us
3228 that ptr points to the start of obj. */
3229 return (DECL_SIZE (obj)
3230 && poly_int_tree_p (DECL_SIZE (obj))
3231 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3234 /* Return true if REF is killed by an store described by
3235 BASE, OFFSET, SIZE and MAX_SIZE. */
3237 static bool
3238 store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3239 poly_int64 max_size, ao_ref *ref)
3241 poly_int64 ref_offset = ref->offset;
3242 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3243 so base == ref->base does not always hold. */
3244 if (base != ref->base)
3246 /* Try using points-to info. */
3247 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3248 ref->offset, ref->size, ref->max_size))
3249 return true;
3251 /* If both base and ref->base are MEM_REFs, only compare the
3252 first operand, and if the second operand isn't equal constant,
3253 try to add the offsets into offset and ref_offset. */
3254 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3255 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3257 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3258 TREE_OPERAND (ref->base, 1)))
3260 poly_offset_int off1 = mem_ref_offset (base);
3261 off1 <<= LOG2_BITS_PER_UNIT;
3262 off1 += offset;
3263 poly_offset_int off2 = mem_ref_offset (ref->base);
3264 off2 <<= LOG2_BITS_PER_UNIT;
3265 off2 += ref_offset;
3266 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3267 size = -1;
3270 else
3271 size = -1;
3273 /* For a must-alias check we need to be able to constrain
3274 the access properly. */
3275 return (known_eq (size, max_size)
3276 && known_subrange_p (ref_offset, ref->max_size, offset, size));
3279 /* If STMT kills the memory reference REF return true, otherwise
3280 return false. */
3282 bool
3283 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3285 if (!ao_ref_base (ref))
3286 return false;
3288 if (gimple_has_lhs (stmt)
3289 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3290 /* The assignment is not necessarily carried out if it can throw
3291 and we can catch it in the current function where we could inspect
3292 the previous value.
3293 ??? We only need to care about the RHS throwing. For aggregate
3294 assignments or similar calls and non-call exceptions the LHS
3295 might throw as well. */
3296 && !stmt_can_throw_internal (cfun, stmt))
3298 tree lhs = gimple_get_lhs (stmt);
3299 /* If LHS is literally a base of the access we are done. */
3300 if (ref->ref)
3302 tree base = ref->ref;
3303 tree innermost_dropped_array_ref = NULL_TREE;
3304 if (handled_component_p (base))
3306 tree saved_lhs0 = NULL_TREE;
3307 if (handled_component_p (lhs))
3309 saved_lhs0 = TREE_OPERAND (lhs, 0);
3310 TREE_OPERAND (lhs, 0) = integer_zero_node;
3314 /* Just compare the outermost handled component, if
3315 they are equal we have found a possible common
3316 base. */
3317 tree saved_base0 = TREE_OPERAND (base, 0);
3318 TREE_OPERAND (base, 0) = integer_zero_node;
3319 bool res = operand_equal_p (lhs, base, 0);
3320 TREE_OPERAND (base, 0) = saved_base0;
3321 if (res)
3322 break;
3323 /* Remember if we drop an array-ref that we need to
3324 double-check not being at struct end. */
3325 if (TREE_CODE (base) == ARRAY_REF
3326 || TREE_CODE (base) == ARRAY_RANGE_REF)
3327 innermost_dropped_array_ref = base;
3328 /* Otherwise drop handled components of the access. */
3329 base = saved_base0;
3331 while (handled_component_p (base));
3332 if (saved_lhs0)
3333 TREE_OPERAND (lhs, 0) = saved_lhs0;
3335 /* Finally check if the lhs has the same address and size as the
3336 base candidate of the access. Watch out if we have dropped
3337 an array-ref that was at struct end, this means ref->ref may
3338 be outside of the TYPE_SIZE of its base. */
3339 if ((! innermost_dropped_array_ref
3340 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3341 && (lhs == base
3342 || (((TYPE_SIZE (TREE_TYPE (lhs))
3343 == TYPE_SIZE (TREE_TYPE (base)))
3344 || (TYPE_SIZE (TREE_TYPE (lhs))
3345 && TYPE_SIZE (TREE_TYPE (base))
3346 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3347 TYPE_SIZE (TREE_TYPE (base)),
3348 0)))
3349 && operand_equal_p (lhs, base,
3350 OEP_ADDRESS_OF
3351 | OEP_MATCH_SIDE_EFFECTS))))
3353 ++alias_stats.stmt_kills_ref_p_yes;
3354 return true;
3358 /* Now look for non-literal equal bases with the restriction of
3359 handling constant offset and size. */
3360 /* For a must-alias check we need to be able to constrain
3361 the access properly. */
3362 if (!ref->max_size_known_p ())
3364 ++alias_stats.stmt_kills_ref_p_no;
3365 return false;
3367 poly_int64 size, offset, max_size;
3368 bool reverse;
3369 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3370 &reverse);
3371 if (store_kills_ref_p (base, offset, size, max_size, ref))
3373 ++alias_stats.stmt_kills_ref_p_yes;
3374 return true;
3378 if (is_gimple_call (stmt))
3380 tree callee = gimple_call_fndecl (stmt);
3381 struct cgraph_node *node;
3382 modref_summary *summary;
3384 /* Try to disambiguate using modref summary. Modref records a vector
3385 of stores with known offsets relative to function parameters that must
3386 happen every execution of function. Find if we have a matching
3387 store and verify that function can not use the value. */
3388 if (callee != NULL_TREE
3389 && (node = cgraph_node::get (callee)) != NULL
3390 && node->binds_to_current_def_p ()
3391 && (summary = get_modref_function_summary (node)) != NULL
3392 && summary->kills.length ()
3393 && (!cfun->can_throw_non_call_exceptions
3394 || !stmt_can_throw_internal (cfun, stmt)))
3396 for (auto kill : summary->kills)
3398 ao_ref dref;
3400 /* We only can do useful compares if we know the access range
3401 precisely. */
3402 if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
3403 continue;
3404 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3405 dref.size, dref.max_size, ref))
3407 /* For store to be killed it needs to not be used
3408 earlier. */
3409 if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
3410 true)
3411 || !dbg_cnt (ipa_mod_ref))
3412 break;
3413 if (dump_file && (dump_flags & TDF_DETAILS))
3415 fprintf (dump_file,
3416 "ipa-modref: call stmt ");
3417 print_gimple_stmt (dump_file, stmt, 0);
3418 fprintf (dump_file,
3419 "ipa-modref: call to %s kills ",
3420 node->dump_name ());
3421 print_generic_expr (dump_file, ref->base);
3423 ++alias_stats.modref_kill_yes;
3424 return true;
3427 ++alias_stats.modref_kill_no;
3429 if (callee != NULL_TREE
3430 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3431 switch (DECL_FUNCTION_CODE (callee))
3433 case BUILT_IN_FREE:
3435 tree ptr = gimple_call_arg (stmt, 0);
3436 tree base = ao_ref_base (ref);
3437 if (base && TREE_CODE (base) == MEM_REF
3438 && TREE_OPERAND (base, 0) == ptr)
3440 ++alias_stats.stmt_kills_ref_p_yes;
3441 return true;
3443 break;
3446 case BUILT_IN_MEMCPY:
3447 case BUILT_IN_MEMPCPY:
3448 case BUILT_IN_MEMMOVE:
3449 case BUILT_IN_MEMSET:
3450 case BUILT_IN_MEMCPY_CHK:
3451 case BUILT_IN_MEMPCPY_CHK:
3452 case BUILT_IN_MEMMOVE_CHK:
3453 case BUILT_IN_MEMSET_CHK:
3454 case BUILT_IN_STRNCPY:
3455 case BUILT_IN_STPNCPY:
3456 case BUILT_IN_CALLOC:
3458 /* For a must-alias check we need to be able to constrain
3459 the access properly. */
3460 if (!ref->max_size_known_p ())
3462 ++alias_stats.stmt_kills_ref_p_no;
3463 return false;
3465 tree dest;
3466 tree len;
3468 /* In execution order a calloc call will never kill
3469 anything. However, DSE will (ab)use this interface
3470 to ask if a calloc call writes the same memory locations
3471 as a later assignment, memset, etc. So handle calloc
3472 in the expected way. */
3473 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3475 tree arg0 = gimple_call_arg (stmt, 0);
3476 tree arg1 = gimple_call_arg (stmt, 1);
3477 if (TREE_CODE (arg0) != INTEGER_CST
3478 || TREE_CODE (arg1) != INTEGER_CST)
3480 ++alias_stats.stmt_kills_ref_p_no;
3481 return false;
3484 dest = gimple_call_lhs (stmt);
3485 if (!dest)
3487 ++alias_stats.stmt_kills_ref_p_no;
3488 return false;
3490 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3492 else
3494 dest = gimple_call_arg (stmt, 0);
3495 len = gimple_call_arg (stmt, 2);
3497 if (!poly_int_tree_p (len))
3498 return false;
3499 ao_ref dref;
3500 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3501 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3502 dref.size, dref.max_size, ref))
3504 ++alias_stats.stmt_kills_ref_p_yes;
3505 return true;
3507 break;
3510 case BUILT_IN_VA_END:
3512 tree ptr = gimple_call_arg (stmt, 0);
3513 if (TREE_CODE (ptr) == ADDR_EXPR)
3515 tree base = ao_ref_base (ref);
3516 if (TREE_OPERAND (ptr, 0) == base)
3518 ++alias_stats.stmt_kills_ref_p_yes;
3519 return true;
3522 break;
3525 default:;
3528 ++alias_stats.stmt_kills_ref_p_no;
3529 return false;
3532 bool
3533 stmt_kills_ref_p (gimple *stmt, tree ref)
3535 ao_ref r;
3536 ao_ref_init (&r, ref);
3537 return stmt_kills_ref_p (stmt, &r);
3541 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3542 TARGET or a statement clobbering the memory reference REF in which
3543 case false is returned. The walk starts with VUSE, one argument of PHI. */
3545 static bool
3546 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3547 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3548 bitmap *visited, bool abort_on_visited,
3549 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3550 translate_flags disambiguate_only,
3551 void *data)
3553 basic_block bb = gimple_bb (phi);
3555 if (!*visited)
3556 *visited = BITMAP_ALLOC (NULL);
3558 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3560 /* Walk until we hit the target. */
3561 while (vuse != target)
3563 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3564 /* If we are searching for the target VUSE by walking up to
3565 TARGET_BB dominating the original PHI we are finished once
3566 we reach a default def or a definition in a block dominating
3567 that block. Update TARGET and return. */
3568 if (!target
3569 && (gimple_nop_p (def_stmt)
3570 || dominated_by_p (CDI_DOMINATORS,
3571 target_bb, gimple_bb (def_stmt))))
3573 target = vuse;
3574 return true;
3577 /* Recurse for PHI nodes. */
3578 if (gimple_code (def_stmt) == GIMPLE_PHI)
3580 /* An already visited PHI node ends the walk successfully. */
3581 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3582 return !abort_on_visited;
3583 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3584 visited, abort_on_visited,
3585 translate, data, disambiguate_only);
3586 if (!vuse)
3587 return false;
3588 continue;
3590 else if (gimple_nop_p (def_stmt))
3591 return false;
3592 else
3594 /* A clobbering statement or the end of the IL ends it failing. */
3595 if ((int)limit <= 0)
3596 return false;
3597 --limit;
3598 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3600 translate_flags tf = disambiguate_only;
3601 if (translate
3602 && (*translate) (ref, vuse, data, &tf) == NULL)
3604 else
3605 return false;
3608 /* If we reach a new basic-block see if we already skipped it
3609 in a previous walk that ended successfully. */
3610 if (gimple_bb (def_stmt) != bb)
3612 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3613 return !abort_on_visited;
3614 bb = gimple_bb (def_stmt);
3616 vuse = gimple_vuse (def_stmt);
3618 return true;
3622 /* Starting from a PHI node for the virtual operand of the memory reference
3623 REF find a continuation virtual operand that allows to continue walking
3624 statements dominating PHI skipping only statements that cannot possibly
3625 clobber REF. Decrements LIMIT for each alias disambiguation done
3626 and aborts the walk, returning NULL_TREE if it reaches zero.
3627 Returns NULL_TREE if no suitable virtual operand can be found. */
3629 tree
3630 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3631 unsigned int &limit, bitmap *visited,
3632 bool abort_on_visited,
3633 void *(*translate)(ao_ref *, tree, void *,
3634 translate_flags *),
3635 void *data,
3636 translate_flags disambiguate_only)
3638 unsigned nargs = gimple_phi_num_args (phi);
3640 /* Through a single-argument PHI we can simply look through. */
3641 if (nargs == 1)
3642 return PHI_ARG_DEF (phi, 0);
3644 /* For two or more arguments try to pairwise skip non-aliasing code
3645 until we hit the phi argument definition that dominates the other one. */
3646 basic_block phi_bb = gimple_bb (phi);
3647 tree arg0, arg1;
3648 unsigned i;
3650 /* Find a candidate for the virtual operand which definition
3651 dominates those of all others. */
3652 /* First look if any of the args themselves satisfy this. */
3653 for (i = 0; i < nargs; ++i)
3655 arg0 = PHI_ARG_DEF (phi, i);
3656 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3657 break;
3658 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3659 if (def_bb != phi_bb
3660 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3661 break;
3662 arg0 = NULL_TREE;
3664 /* If not, look if we can reach such candidate by walking defs
3665 until we hit the immediate dominator. maybe_skip_until will
3666 do that for us. */
3667 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3669 /* Then check against the (to be) found candidate. */
3670 for (i = 0; i < nargs; ++i)
3672 arg1 = PHI_ARG_DEF (phi, i);
3673 if (arg1 == arg0)
3675 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3676 limit, visited,
3677 abort_on_visited,
3678 translate,
3679 /* Do not valueize when walking over
3680 backedges. */
3681 dominated_by_p
3682 (CDI_DOMINATORS,
3683 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3684 phi_bb)
3685 ? TR_DISAMBIGUATE
3686 : disambiguate_only, data))
3687 return NULL_TREE;
3690 return arg0;
3693 /* Based on the memory reference REF and its virtual use VUSE call
3694 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3695 itself. That is, for each virtual use for which its defining statement
3696 does not clobber REF.
3698 WALKER is called with REF, the current virtual use and DATA. If
3699 WALKER returns non-NULL the walk stops and its result is returned.
3700 At the end of a non-successful walk NULL is returned.
3702 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3703 use which definition is a statement that may clobber REF and DATA.
3704 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3705 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3706 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3707 to adjust REF and *DATA to make that valid.
3709 VALUEIZE if non-NULL is called with the next VUSE that is considered
3710 and return value is substituted for that. This can be used to
3711 implement optimistic value-numbering for example. Note that the
3712 VUSE argument is assumed to be valueized already.
3714 LIMIT specifies the number of alias queries we are allowed to do,
3715 the walk stops when it reaches zero and NULL is returned. LIMIT
3716 is decremented by the number of alias queries (plus adjustments
3717 done by the callbacks) upon return.
3719 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3721 void *
3722 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3723 void *(*walker)(ao_ref *, tree, void *),
3724 void *(*translate)(ao_ref *, tree, void *,
3725 translate_flags *),
3726 tree (*valueize)(tree),
3727 unsigned &limit, void *data)
3729 bitmap visited = NULL;
3730 void *res;
3731 bool translated = false;
3733 timevar_push (TV_ALIAS_STMT_WALK);
3737 gimple *def_stmt;
3739 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3740 res = (*walker) (ref, vuse, data);
3741 /* Abort walk. */
3742 if (res == (void *)-1)
3744 res = NULL;
3745 break;
3747 /* Lookup succeeded. */
3748 else if (res != NULL)
3749 break;
3751 if (valueize)
3753 vuse = valueize (vuse);
3754 if (!vuse)
3756 res = NULL;
3757 break;
3760 def_stmt = SSA_NAME_DEF_STMT (vuse);
3761 if (gimple_nop_p (def_stmt))
3762 break;
3763 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3764 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3765 &visited, translated, translate, data);
3766 else
3768 if ((int)limit <= 0)
3770 res = NULL;
3771 break;
3773 --limit;
3774 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3776 if (!translate)
3777 break;
3778 translate_flags disambiguate_only = TR_TRANSLATE;
3779 res = (*translate) (ref, vuse, data, &disambiguate_only);
3780 /* Failed lookup and translation. */
3781 if (res == (void *)-1)
3783 res = NULL;
3784 break;
3786 /* Lookup succeeded. */
3787 else if (res != NULL)
3788 break;
3789 /* Translation succeeded, continue walking. */
3790 translated = translated || disambiguate_only == TR_TRANSLATE;
3792 vuse = gimple_vuse (def_stmt);
3795 while (vuse);
3797 if (visited)
3798 BITMAP_FREE (visited);
3800 timevar_pop (TV_ALIAS_STMT_WALK);
3802 return res;
3806 /* Based on the memory reference REF call WALKER for each vdef whose
3807 defining statement may clobber REF, starting with VDEF. If REF
3808 is NULL_TREE, each defining statement is visited.
3810 WALKER is called with REF, the current vdef and DATA. If WALKER
3811 returns true the walk is stopped, otherwise it continues.
3813 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3814 The pointer may be NULL and then we do not track this information.
3816 At PHI nodes walk_aliased_vdefs forks into one walk for each
3817 PHI argument (but only one walk continues at merge points), the
3818 return value is true if any of the walks was successful.
3820 The function returns the number of statements walked or -1 if
3821 LIMIT stmts were walked and the walk was aborted at this point.
3822 If LIMIT is zero the walk is not aborted. */
3824 static int
3825 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3826 bool (*walker)(ao_ref *, tree, void *), void *data,
3827 bitmap *visited, unsigned int cnt,
3828 bool *function_entry_reached, unsigned limit)
3832 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3834 if (*visited
3835 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3836 return cnt;
3838 if (gimple_nop_p (def_stmt))
3840 if (function_entry_reached)
3841 *function_entry_reached = true;
3842 return cnt;
3844 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3846 unsigned i;
3847 if (!*visited)
3848 *visited = BITMAP_ALLOC (NULL);
3849 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3851 int res = walk_aliased_vdefs_1 (ref,
3852 gimple_phi_arg_def (def_stmt, i),
3853 walker, data, visited, cnt,
3854 function_entry_reached, limit);
3855 if (res == -1)
3856 return -1;
3857 cnt = res;
3859 return cnt;
3862 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3863 cnt++;
3864 if (cnt == limit)
3865 return -1;
3866 if ((!ref
3867 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3868 && (*walker) (ref, vdef, data))
3869 return cnt;
3871 vdef = gimple_vuse (def_stmt);
3873 while (1);
3877 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3878 bool (*walker)(ao_ref *, tree, void *), void *data,
3879 bitmap *visited,
3880 bool *function_entry_reached, unsigned int limit)
3882 bitmap local_visited = NULL;
3883 int ret;
3885 timevar_push (TV_ALIAS_STMT_WALK);
3887 if (function_entry_reached)
3888 *function_entry_reached = false;
3890 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3891 visited ? visited : &local_visited, 0,
3892 function_entry_reached, limit);
3893 if (local_visited)
3894 BITMAP_FREE (local_visited);
3896 timevar_pop (TV_ALIAS_STMT_WALK);
3898 return ret;
3901 /* Verify validity of the fnspec string.
3902 See attr-fnspec.h for details. */
3904 void
3905 attr_fnspec::verify ()
3907 bool err = false;
3908 if (!len)
3909 return;
3911 /* Check return value specifier. */
3912 if (len < return_desc_size)
3913 err = true;
3914 else if ((len - return_desc_size) % arg_desc_size)
3915 err = true;
3916 else if ((str[0] < '1' || str[0] > '4')
3917 && str[0] != '.' && str[0] != 'm')
3918 err = true;
3920 switch (str[1])
3922 case ' ':
3923 case 'p':
3924 case 'P':
3925 case 'c':
3926 case 'C':
3927 break;
3928 default:
3929 err = true;
3931 if (err)
3932 internal_error ("invalid fn spec attribute \"%s\"", str);
3934 /* Now check all parameters. */
3935 for (unsigned int i = 0; arg_specified_p (i); i++)
3937 unsigned int idx = arg_idx (i);
3938 switch (str[idx])
3940 case 'x':
3941 case 'X':
3942 case 'r':
3943 case 'R':
3944 case 'o':
3945 case 'O':
3946 case 'w':
3947 case 'W':
3948 case '.':
3949 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
3950 || str[idx + 1] == 't')
3952 if (str[idx] != 'r' && str[idx] != 'R'
3953 && str[idx] != 'w' && str[idx] != 'W'
3954 && str[idx] != 'o' && str[idx] != 'O')
3955 err = true;
3956 if (str[idx + 1] != 't'
3957 /* Size specified is scalar, so it should be described
3958 by ". " if specified at all. */
3959 && (arg_specified_p (str[idx + 1] - '1')
3960 && str[arg_idx (str[idx + 1] - '1')] != '.'))
3961 err = true;
3963 else if (str[idx + 1] != ' ')
3964 err = true;
3965 break;
3966 default:
3967 if (str[idx] < '1' || str[idx] > '9')
3968 err = true;
3970 if (err)
3971 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
3975 /* Return ture if TYPE1 and TYPE2 will always give the same answer
3976 when compared wit hother types using same_type_for_tbaa_p. */
3978 static bool
3979 types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
3980 bool lto_streaming_safe)
3982 /* We use same_type_for_tbaa_p to match types in the access path.
3983 This check is overly conservative. */
3984 type1 = TYPE_MAIN_VARIANT (type1);
3985 type2 = TYPE_MAIN_VARIANT (type2);
3987 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
3988 != TYPE_STRUCTURAL_EQUALITY_P (type2))
3989 return false;
3990 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
3991 return true;
3993 if (lto_streaming_safe)
3994 return type1 == type2;
3995 else
3996 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
3999 /* Compare REF1 and REF2 and return flags specifying their differences.
4000 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4001 types that are going to be recomputed.
4002 If TBAA is true also compare TBAA metadata. */
4005 ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4006 bool lto_streaming_safe,
4007 bool tbaa)
4009 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4010 return SEMANTICS;
4011 tree base1 = ao_ref_base (ref1);
4012 tree base2 = ao_ref_base (ref2);
4014 if (!known_eq (ref1->offset, ref2->offset)
4015 || !known_eq (ref1->size, ref2->size)
4016 || !known_eq (ref1->max_size, ref2->max_size))
4017 return SEMANTICS;
4019 /* For variable accesses we need to compare actual paths
4020 to check that both refs are accessing same address and the access size. */
4021 if (!known_eq (ref1->size, ref1->max_size))
4023 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4024 TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
4025 return SEMANTICS;
4026 tree r1 = ref1->ref;
4027 tree r2 = ref2->ref;
4029 /* Handle toplevel COMPONENT_REFs of bitfields.
4030 Those are special since they are not allowed in
4031 ADDR_EXPR. */
4032 if (TREE_CODE (r1) == COMPONENT_REF
4033 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4035 if (TREE_CODE (r2) != COMPONENT_REF
4036 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4037 return SEMANTICS;
4038 tree field1 = TREE_OPERAND (r1, 1);
4039 tree field2 = TREE_OPERAND (r2, 1);
4040 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4041 DECL_FIELD_OFFSET (field2), 0)
4042 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4043 DECL_FIELD_BIT_OFFSET (field2), 0)
4044 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
4045 || !types_compatible_p (TREE_TYPE (r1),
4046 TREE_TYPE (r2)))
4047 return SEMANTICS;
4048 r1 = TREE_OPERAND (r1, 0);
4049 r2 = TREE_OPERAND (r2, 0);
4051 else if (TREE_CODE (r2) == COMPONENT_REF
4052 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4053 return SEMANTICS;
4055 /* Similarly for bit field refs. */
4056 if (TREE_CODE (r1) == BIT_FIELD_REF)
4058 if (TREE_CODE (r2) != BIT_FIELD_REF
4059 || !operand_equal_p (TREE_OPERAND (r1, 1),
4060 TREE_OPERAND (r2, 1), 0)
4061 || !operand_equal_p (TREE_OPERAND (r1, 2),
4062 TREE_OPERAND (r2, 2), 0)
4063 || !types_compatible_p (TREE_TYPE (r1),
4064 TREE_TYPE (r2)))
4065 return SEMANTICS;
4066 r1 = TREE_OPERAND (r1, 0);
4067 r2 = TREE_OPERAND (r2, 0);
4069 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4070 return SEMANTICS;
4072 /* Now we can compare the address of actual memory access. */
4073 if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4074 return SEMANTICS;
4076 /* For constant accesses we get more matches by comparing offset only. */
4077 else if (!operand_equal_p (base1, base2,
4078 OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4079 return SEMANTICS;
4081 /* We can't simply use get_object_alignment_1 on the full
4082 reference as for accesses with variable indexes this reports
4083 too conservative alignment. */
4084 unsigned int align1, align2;
4085 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4086 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4087 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4088 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4089 TYPE_ALIGN but still returns false. This seem to contradict
4090 its description. So compare even if alignment is unknown. */
4091 if (known1 != known2
4092 || (bitpos1 != bitpos2 || align1 != align2))
4093 return SEMANTICS;
4095 /* Now we know that accesses are semantically same. */
4096 int flags = 0;
4098 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4099 tree rbase1 = ref1->ref;
4100 if (rbase1)
4101 while (handled_component_p (rbase1))
4102 rbase1 = TREE_OPERAND (rbase1, 0);
4103 tree rbase2 = ref2->ref;
4104 while (handled_component_p (rbase2))
4105 rbase2 = TREE_OPERAND (rbase2, 0);
4107 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4108 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4109 Otherwise we need to match bases and cliques. */
4110 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4111 && MR_DEPENDENCE_CLIQUE (rbase1))
4112 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4113 && MR_DEPENDENCE_CLIQUE (rbase2)))
4114 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4115 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4116 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4117 flags |= DEPENDENCE_CLIQUE;
4119 if (!tbaa)
4120 return flags;
4122 /* Alias sets are not stable across LTO sreaming; be conservative here
4123 and compare types the alias sets are ultimately based on. */
4124 if (lto_streaming_safe)
4126 tree t1 = ao_ref_alias_ptr_type (ref1);
4127 tree t2 = ao_ref_alias_ptr_type (ref2);
4128 if (!alias_ptr_types_compatible_p (t1, t2))
4129 flags |= REF_ALIAS_SET;
4131 t1 = ao_ref_base_alias_ptr_type (ref1);
4132 t2 = ao_ref_base_alias_ptr_type (ref2);
4133 if (!alias_ptr_types_compatible_p (t1, t2))
4134 flags |= BASE_ALIAS_SET;
4136 else
4138 if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4139 flags |= REF_ALIAS_SET;
4140 if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4141 flags |= BASE_ALIAS_SET;
4144 /* Access path is used only on non-view-converted references. */
4145 bool view_converted = view_converted_memref_p (rbase1);
4146 if (view_converted_memref_p (rbase2) != view_converted)
4147 return flags | ACCESS_PATH;
4148 else if (view_converted)
4149 return flags;
4152 /* Find start of access paths and look for trailing arrays. */
4153 tree c1 = ref1->ref, c2 = ref2->ref;
4154 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4155 int nskipped1 = 0, nskipped2 = 0;
4156 int i = 0;
4158 for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4160 if (component_ref_to_zero_sized_trailing_array_p (p1))
4161 end_struct_ref1 = p1;
4162 if (ends_tbaa_access_path_p (p1))
4163 c1 = p1, nskipped1 = i;
4164 i++;
4166 for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4168 if (component_ref_to_zero_sized_trailing_array_p (p2))
4169 end_struct_ref2 = p2;
4170 if (ends_tbaa_access_path_p (p2))
4171 c2 = p2, nskipped1 = i;
4172 i++;
4175 /* For variable accesses we can not rely on offset match bellow.
4176 We know that paths are struturally same, so only check that
4177 starts of TBAA paths did not diverge. */
4178 if (!known_eq (ref1->size, ref1->max_size)
4179 && nskipped1 != nskipped2)
4180 return flags | ACCESS_PATH;
4182 /* Information about trailing refs is used by
4183 aliasing_component_refs_p that is applied only if paths
4184 has handled components.. */
4185 if (!handled_component_p (c1) && !handled_component_p (c2))
4187 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4188 return flags | ACCESS_PATH;
4189 if (end_struct_ref1
4190 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4191 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4192 return flags | ACCESS_PATH;
4194 /* Now compare all handled components of the access path.
4195 We have three oracles that cares about access paths:
4196 - aliasing_component_refs_p
4197 - nonoverlapping_refs_since_match_p
4198 - nonoverlapping_component_refs_p
4199 We need to match things these oracles compare.
4201 It is only necessary to check types for compatibility
4202 and offsets. Rest of what oracles compares are actual
4203 addresses. Those are already known to be same:
4204 - for constant accesses we check offsets
4205 - for variable accesses we already matched
4206 the path lexically with operand_equal_p. */
4207 while (true)
4209 bool comp1 = handled_component_p (c1);
4210 bool comp2 = handled_component_p (c2);
4212 if (comp1 != comp2)
4213 return flags | ACCESS_PATH;
4214 if (!comp1)
4215 break;
4217 if (TREE_CODE (c1) != TREE_CODE (c2))
4218 return flags | ACCESS_PATH;
4220 /* aliasing_component_refs_p attempts to find type match within
4221 the paths. For that reason both types needs to be equal
4222 with respect to same_type_for_tbaa_p. */
4223 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4224 TREE_TYPE (c2),
4225 lto_streaming_safe))
4226 return flags | ACCESS_PATH;
4227 if (component_ref_to_zero_sized_trailing_array_p (c1)
4228 != component_ref_to_zero_sized_trailing_array_p (c2))
4229 return flags | ACCESS_PATH;
4231 /* aliasing_matching_component_refs_p compares
4232 offsets within the path. Other properties are ignored.
4233 Do not bother to verify offsets in variable accesses. Here we
4234 already compared them by operand_equal_p so they are
4235 structurally same. */
4236 if (!known_eq (ref1->size, ref1->max_size))
4238 poly_int64 offadj1, sztmc1, msztmc1;
4239 bool reverse1;
4240 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4241 poly_int64 offadj2, sztmc2, msztmc2;
4242 bool reverse2;
4243 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4244 if (!known_eq (offadj1, offadj2))
4245 return flags | ACCESS_PATH;
4247 c1 = TREE_OPERAND (c1, 0);
4248 c2 = TREE_OPERAND (c2, 0);
4250 /* Finally test the access type. */
4251 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4252 TREE_TYPE (c2),
4253 lto_streaming_safe))
4254 return flags | ACCESS_PATH;
4255 return flags;
4258 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4259 and canonical types. */
4260 void
4261 ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4262 inchash::hash &hstate)
4264 tree base = ao_ref_base (ref);
4265 tree tbase = base;
4267 if (!known_eq (ref->size, ref->max_size))
4269 tree r = ref->ref;
4270 if (TREE_CODE (r) == COMPONENT_REF
4271 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4273 tree field = TREE_OPERAND (r, 1);
4274 hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4275 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4276 hash_operand (DECL_SIZE (field), hstate, 0);
4277 r = TREE_OPERAND (r, 0);
4279 if (TREE_CODE (r) == BIT_FIELD_REF)
4281 hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4282 hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4283 r = TREE_OPERAND (r, 0);
4285 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4286 hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4288 else
4290 hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4291 hstate.add_poly_int (ref->offset);
4292 hstate.add_poly_int (ref->size);
4293 hstate.add_poly_int (ref->max_size);
4295 if (!lto_streaming_safe && tbaa)
4297 hstate.add_int (ao_ref_alias_set (ref));
4298 hstate.add_int (ao_ref_base_alias_set (ref));