Fix issue for pointers to anonymous types with -fdump-ada-spec
[official-gcc.git] / gcc / tree-ssa-alias.cc
blob50bd47b31f3ee1bdfd33b91d629615ed6d687011
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
44 #include "errors.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
49 #include "builtins.h"
51 /* Broad overview of how alias analysis on gimple works:
53 Statements clobbering or using memory are linked through the
54 virtual operand factored use-def chain. The virtual operand
55 is unique per function, its symbol is accessible via gimple_vop (cfun).
56 Virtual operands are used for efficiently walking memory statements
57 in the gimple IL and are useful for things like value-numbering as
58 a generation count for memory references.
60 SSA_NAME pointers may have associated points-to information
61 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
62 points-to information is (re-)computed by the TODO_rebuild_alias
63 pass manager todo. Points-to information is also used for more
64 precise tracking of call-clobbered and call-used variables and
65 related disambiguations.
67 This file contains functions for disambiguating memory references,
68 the so called alias-oracle and tools for walking of the gimple IL.
70 The main alias-oracle entry-points are
72 bool stmt_may_clobber_ref_p (gimple *, tree)
74 This function queries if a statement may invalidate (parts of)
75 the memory designated by the reference tree argument.
77 bool ref_maybe_used_by_stmt_p (gimple *, tree)
79 This function queries if a statement may need (parts of) the
80 memory designated by the reference tree argument.
82 There are variants of these functions that only handle the call
83 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
84 Note that these do not disambiguate against a possible call lhs.
86 bool refs_may_alias_p (tree, tree)
88 This function tries to disambiguate two reference trees.
90 bool ptr_deref_may_alias_global_p (tree)
92 This function queries if dereferencing a pointer variable may
93 alias global memory.
95 More low-level disambiguators are available and documented in
96 this file. Low-level disambiguators dealing with points-to
97 information are in tree-ssa-structalias.cc. */
99 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
100 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
102 /* Query statistics for the different low-level disambiguators.
103 A high-level query may trigger multiple of them. */
105 static struct {
106 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
107 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
109 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
111 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
112 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
113 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
114 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
119 unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
120 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
121 unsigned HOST_WIDE_INT modref_use_may_alias;
122 unsigned HOST_WIDE_INT modref_use_no_alias;
123 unsigned HOST_WIDE_INT modref_clobber_may_alias;
124 unsigned HOST_WIDE_INT modref_clobber_no_alias;
125 unsigned HOST_WIDE_INT modref_kill_no;
126 unsigned HOST_WIDE_INT modref_kill_yes;
127 unsigned HOST_WIDE_INT modref_tests;
128 unsigned HOST_WIDE_INT modref_baseptr_tests;
129 } alias_stats;
131 void
132 dump_alias_stats (FILE *s)
134 fprintf (s, "\nAlias oracle query stats:\n");
135 fprintf (s, " refs_may_alias_p: "
136 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
137 HOST_WIDE_INT_PRINT_DEC" queries\n",
138 alias_stats.refs_may_alias_p_no_alias,
139 alias_stats.refs_may_alias_p_no_alias
140 + alias_stats.refs_may_alias_p_may_alias);
141 fprintf (s, " ref_maybe_used_by_call_p: "
142 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
143 HOST_WIDE_INT_PRINT_DEC" queries\n",
144 alias_stats.ref_maybe_used_by_call_p_no_alias,
145 alias_stats.refs_may_alias_p_no_alias
146 + alias_stats.ref_maybe_used_by_call_p_may_alias);
147 fprintf (s, " call_may_clobber_ref_p: "
148 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
149 HOST_WIDE_INT_PRINT_DEC" queries\n",
150 alias_stats.call_may_clobber_ref_p_no_alias,
151 alias_stats.call_may_clobber_ref_p_no_alias
152 + alias_stats.call_may_clobber_ref_p_may_alias);
153 fprintf (s, " stmt_kills_ref_p: "
154 HOST_WIDE_INT_PRINT_DEC" kills, "
155 HOST_WIDE_INT_PRINT_DEC" queries\n",
156 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
157 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
158 + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
159 fprintf (s, " nonoverlapping_component_refs_p: "
160 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
161 HOST_WIDE_INT_PRINT_DEC" queries\n",
162 alias_stats.nonoverlapping_component_refs_p_no_alias,
163 alias_stats.nonoverlapping_component_refs_p_no_alias
164 + alias_stats.nonoverlapping_component_refs_p_may_alias);
165 fprintf (s, " nonoverlapping_refs_since_match_p: "
166 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
167 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
168 HOST_WIDE_INT_PRINT_DEC" queries\n",
169 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
170 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
171 alias_stats.nonoverlapping_refs_since_match_p_no_alias
172 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
173 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
174 fprintf (s, " aliasing_component_refs_p: "
175 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
176 HOST_WIDE_INT_PRINT_DEC" queries\n",
177 alias_stats.aliasing_component_refs_p_no_alias,
178 alias_stats.aliasing_component_refs_p_no_alias
179 + alias_stats.aliasing_component_refs_p_may_alias);
180 dump_alias_stats_in_alias_c (s);
181 fprintf (s, "\nModref stats:\n");
182 fprintf (s, " modref kill: "
183 HOST_WIDE_INT_PRINT_DEC" kills, "
184 HOST_WIDE_INT_PRINT_DEC" queries\n",
185 alias_stats.modref_kill_yes,
186 alias_stats.modref_kill_yes
187 + alias_stats.modref_kill_no);
188 fprintf (s, " modref use: "
189 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
190 HOST_WIDE_INT_PRINT_DEC" queries\n",
191 alias_stats.modref_use_no_alias,
192 alias_stats.modref_use_no_alias
193 + alias_stats.modref_use_may_alias);
194 fprintf (s, " modref clobber: "
195 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
196 HOST_WIDE_INT_PRINT_DEC" queries\n"
197 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
198 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
199 alias_stats.modref_clobber_no_alias,
200 alias_stats.modref_clobber_no_alias
201 + alias_stats.modref_clobber_may_alias,
202 alias_stats.modref_tests,
203 ((double)alias_stats.modref_tests)
204 / (alias_stats.modref_clobber_no_alias
205 + alias_stats.modref_clobber_may_alias),
206 alias_stats.modref_baseptr_tests,
207 ((double)alias_stats.modref_baseptr_tests)
208 / (alias_stats.modref_clobber_no_alias
209 + alias_stats.modref_clobber_may_alias));
213 /* Return true, if dereferencing PTR may alias with a global variable. */
215 bool
216 ptr_deref_may_alias_global_p (tree ptr)
218 struct ptr_info_def *pi;
220 /* If we end up with a pointer constant here that may point
221 to global memory. */
222 if (TREE_CODE (ptr) != SSA_NAME)
223 return true;
225 pi = SSA_NAME_PTR_INFO (ptr);
227 /* If we do not have points-to information for this variable,
228 we have to punt. */
229 if (!pi)
230 return true;
232 /* ??? This does not use TBAA to prune globals ptr may not access. */
233 return pt_solution_includes_global (&pi->pt);
236 /* Return true if dereferencing PTR may alias DECL.
237 The caller is responsible for applying TBAA to see if PTR
238 may access DECL at all. */
240 static bool
241 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
243 struct ptr_info_def *pi;
245 /* Conversions are irrelevant for points-to information and
246 data-dependence analysis can feed us those. */
247 STRIP_NOPS (ptr);
249 /* Anything we do not explicilty handle aliases. */
250 if ((TREE_CODE (ptr) != SSA_NAME
251 && TREE_CODE (ptr) != ADDR_EXPR
252 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
253 || !POINTER_TYPE_P (TREE_TYPE (ptr))
254 || (!VAR_P (decl)
255 && TREE_CODE (decl) != PARM_DECL
256 && TREE_CODE (decl) != RESULT_DECL))
257 return true;
259 /* Disregard pointer offsetting. */
260 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
264 ptr = TREE_OPERAND (ptr, 0);
266 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
267 return ptr_deref_may_alias_decl_p (ptr, decl);
270 /* ADDR_EXPR pointers either just offset another pointer or directly
271 specify the pointed-to set. */
272 if (TREE_CODE (ptr) == ADDR_EXPR)
274 tree base = get_base_address (TREE_OPERAND (ptr, 0));
275 if (base
276 && (TREE_CODE (base) == MEM_REF
277 || TREE_CODE (base) == TARGET_MEM_REF))
278 ptr = TREE_OPERAND (base, 0);
279 else if (base
280 && DECL_P (base))
281 return compare_base_decls (base, decl) != 0;
282 else if (base
283 && CONSTANT_CLASS_P (base))
284 return false;
285 else
286 return true;
289 /* Non-aliased variables cannot be pointed to. */
290 if (!may_be_aliased (decl))
291 return false;
293 /* If we do not have useful points-to information for this pointer
294 we cannot disambiguate anything else. */
295 pi = SSA_NAME_PTR_INFO (ptr);
296 if (!pi)
297 return true;
299 return pt_solution_includes (&pi->pt, decl);
302 /* Return true if dereferenced PTR1 and PTR2 may alias.
303 The caller is responsible for applying TBAA to see if accesses
304 through PTR1 and PTR2 may conflict at all. */
306 bool
307 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
309 struct ptr_info_def *pi1, *pi2;
311 /* Conversions are irrelevant for points-to information and
312 data-dependence analysis can feed us those. */
313 STRIP_NOPS (ptr1);
314 STRIP_NOPS (ptr2);
316 /* Disregard pointer offsetting. */
317 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
321 ptr1 = TREE_OPERAND (ptr1, 0);
323 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
324 return ptr_derefs_may_alias_p (ptr1, ptr2);
326 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
330 ptr2 = TREE_OPERAND (ptr2, 0);
332 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
333 return ptr_derefs_may_alias_p (ptr1, ptr2);
336 /* ADDR_EXPR pointers either just offset another pointer or directly
337 specify the pointed-to set. */
338 if (TREE_CODE (ptr1) == ADDR_EXPR)
340 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
341 if (base
342 && (TREE_CODE (base) == MEM_REF
343 || TREE_CODE (base) == TARGET_MEM_REF))
344 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
345 else if (base
346 && DECL_P (base))
347 return ptr_deref_may_alias_decl_p (ptr2, base);
348 else
349 return true;
351 if (TREE_CODE (ptr2) == ADDR_EXPR)
353 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
354 if (base
355 && (TREE_CODE (base) == MEM_REF
356 || TREE_CODE (base) == TARGET_MEM_REF))
357 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
358 else if (base
359 && DECL_P (base))
360 return ptr_deref_may_alias_decl_p (ptr1, base);
361 else
362 return true;
365 /* From here we require SSA name pointers. Anything else aliases. */
366 if (TREE_CODE (ptr1) != SSA_NAME
367 || TREE_CODE (ptr2) != SSA_NAME
368 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
369 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
370 return true;
372 /* We may end up with two empty points-to solutions for two same pointers.
373 In this case we still want to say both pointers alias, so shortcut
374 that here. */
375 if (ptr1 == ptr2)
376 return true;
378 /* If we do not have useful points-to information for either pointer
379 we cannot disambiguate anything else. */
380 pi1 = SSA_NAME_PTR_INFO (ptr1);
381 pi2 = SSA_NAME_PTR_INFO (ptr2);
382 if (!pi1 || !pi2)
383 return true;
385 /* ??? This does not use TBAA to prune decls from the intersection
386 that not both pointers may access. */
387 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
390 /* Return true if dereferencing PTR may alias *REF.
391 The caller is responsible for applying TBAA to see if PTR
392 may access *REF at all. */
394 static bool
395 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
397 tree base = ao_ref_base (ref);
399 if (TREE_CODE (base) == MEM_REF
400 || TREE_CODE (base) == TARGET_MEM_REF)
401 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
402 else if (DECL_P (base))
403 return ptr_deref_may_alias_decl_p (ptr, base);
405 return true;
408 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
410 bool
411 ptrs_compare_unequal (tree ptr1, tree ptr2)
413 /* First resolve the pointers down to a SSA name pointer base or
414 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
415 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
416 or STRING_CSTs which needs points-to adjustments to track them
417 in the points-to sets. */
418 tree obj1 = NULL_TREE;
419 tree obj2 = NULL_TREE;
420 if (TREE_CODE (ptr1) == ADDR_EXPR)
422 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
423 if (! tem)
424 return false;
425 if (VAR_P (tem)
426 || TREE_CODE (tem) == PARM_DECL
427 || TREE_CODE (tem) == RESULT_DECL)
428 obj1 = tem;
429 else if (TREE_CODE (tem) == MEM_REF)
430 ptr1 = TREE_OPERAND (tem, 0);
432 if (TREE_CODE (ptr2) == ADDR_EXPR)
434 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
435 if (! tem)
436 return false;
437 if (VAR_P (tem)
438 || TREE_CODE (tem) == PARM_DECL
439 || TREE_CODE (tem) == RESULT_DECL)
440 obj2 = tem;
441 else if (TREE_CODE (tem) == MEM_REF)
442 ptr2 = TREE_OPERAND (tem, 0);
445 /* Canonicalize ptr vs. object. */
446 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
448 std::swap (ptr1, ptr2);
449 std::swap (obj1, obj2);
452 if (obj1 && obj2)
453 /* Other code handles this correctly, no need to duplicate it here. */;
454 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
457 /* We may not use restrict to optimize pointer comparisons.
458 See PR71062. So we have to assume that restrict-pointed-to
459 may be in fact obj1. */
460 if (!pi
461 || pi->pt.vars_contains_restrict
462 || pi->pt.vars_contains_interposable)
463 return false;
464 if (VAR_P (obj1)
465 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
467 varpool_node *node = varpool_node::get (obj1);
468 /* If obj1 may bind to NULL give up (see below). */
469 if (! node
470 || ! node->nonzero_address ()
471 || ! decl_binds_to_current_def_p (obj1))
472 return false;
474 return !pt_solution_includes (&pi->pt, obj1);
477 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
478 but those require pt.null to be conservatively correct. */
480 return false;
483 /* Returns whether reference REF to BASE may refer to global memory. */
485 static bool
486 ref_may_alias_global_p_1 (tree base)
488 if (DECL_P (base))
489 return is_global_var (base);
490 else if (TREE_CODE (base) == MEM_REF
491 || TREE_CODE (base) == TARGET_MEM_REF)
492 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
493 return true;
496 bool
497 ref_may_alias_global_p (ao_ref *ref)
499 tree base = ao_ref_base (ref);
500 return ref_may_alias_global_p_1 (base);
503 bool
504 ref_may_alias_global_p (tree ref)
506 tree base = get_base_address (ref);
507 return ref_may_alias_global_p_1 (base);
510 /* Return true whether STMT may clobber global memory. */
512 bool
513 stmt_may_clobber_global_p (gimple *stmt)
515 tree lhs;
517 if (!gimple_vdef (stmt))
518 return false;
520 /* ??? We can ask the oracle whether an artificial pointer
521 dereference with a pointer with points-to information covering
522 all global memory (what about non-address taken memory?) maybe
523 clobbered by this call. As there is at the moment no convenient
524 way of doing that without generating garbage do some manual
525 checking instead.
526 ??? We could make a NULL ao_ref argument to the various
527 predicates special, meaning any global memory. */
529 switch (gimple_code (stmt))
531 case GIMPLE_ASSIGN:
532 lhs = gimple_assign_lhs (stmt);
533 return (TREE_CODE (lhs) != SSA_NAME
534 && ref_may_alias_global_p (lhs));
535 case GIMPLE_CALL:
536 return true;
537 default:
538 return true;
543 /* Dump alias information on FILE. */
545 void
546 dump_alias_info (FILE *file)
548 unsigned i;
549 tree ptr;
550 const char *funcname
551 = lang_hooks.decl_printable_name (current_function_decl, 2);
552 tree var;
554 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
556 fprintf (file, "Aliased symbols\n\n");
558 FOR_EACH_LOCAL_DECL (cfun, i, var)
560 if (may_be_aliased (var))
561 dump_variable (file, var);
564 fprintf (file, "\nCall clobber information\n");
566 fprintf (file, "\nESCAPED");
567 dump_points_to_solution (file, &cfun->gimple_df->escaped);
569 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
571 FOR_EACH_SSA_NAME (i, ptr, cfun)
573 struct ptr_info_def *pi;
575 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
576 || SSA_NAME_IN_FREE_LIST (ptr))
577 continue;
579 pi = SSA_NAME_PTR_INFO (ptr);
580 if (pi)
581 dump_points_to_info_for (file, ptr);
584 fprintf (file, "\n");
588 /* Dump alias information on stderr. */
590 DEBUG_FUNCTION void
591 debug_alias_info (void)
593 dump_alias_info (stderr);
597 /* Dump the points-to set *PT into FILE. */
599 void
600 dump_points_to_solution (FILE *file, struct pt_solution *pt)
602 if (pt->anything)
603 fprintf (file, ", points-to anything");
605 if (pt->nonlocal)
606 fprintf (file, ", points-to non-local");
608 if (pt->escaped)
609 fprintf (file, ", points-to escaped");
611 if (pt->ipa_escaped)
612 fprintf (file, ", points-to unit escaped");
614 if (pt->null)
615 fprintf (file, ", points-to NULL");
617 if (pt->vars)
619 fprintf (file, ", points-to vars: ");
620 dump_decl_set (file, pt->vars);
621 if (pt->vars_contains_nonlocal
622 || pt->vars_contains_escaped
623 || pt->vars_contains_escaped_heap
624 || pt->vars_contains_restrict)
626 const char *comma = "";
627 fprintf (file, " (");
628 if (pt->vars_contains_nonlocal)
630 fprintf (file, "nonlocal");
631 comma = ", ";
633 if (pt->vars_contains_escaped)
635 fprintf (file, "%sescaped", comma);
636 comma = ", ";
638 if (pt->vars_contains_escaped_heap)
640 fprintf (file, "%sescaped heap", comma);
641 comma = ", ";
643 if (pt->vars_contains_restrict)
645 fprintf (file, "%srestrict", comma);
646 comma = ", ";
648 if (pt->vars_contains_interposable)
649 fprintf (file, "%sinterposable", comma);
650 fprintf (file, ")");
656 /* Unified dump function for pt_solution. */
658 DEBUG_FUNCTION void
659 debug (pt_solution &ref)
661 dump_points_to_solution (stderr, &ref);
664 DEBUG_FUNCTION void
665 debug (pt_solution *ptr)
667 if (ptr)
668 debug (*ptr);
669 else
670 fprintf (stderr, "<nil>\n");
674 /* Dump points-to information for SSA_NAME PTR into FILE. */
676 void
677 dump_points_to_info_for (FILE *file, tree ptr)
679 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
681 print_generic_expr (file, ptr, dump_flags);
683 if (pi)
684 dump_points_to_solution (file, &pi->pt);
685 else
686 fprintf (file, ", points-to anything");
688 fprintf (file, "\n");
692 /* Dump points-to information for VAR into stderr. */
694 DEBUG_FUNCTION void
695 debug_points_to_info_for (tree var)
697 dump_points_to_info_for (stderr, var);
701 /* Initializes the alias-oracle reference representation *R from REF. */
703 void
704 ao_ref_init (ao_ref *r, tree ref)
706 r->ref = ref;
707 r->base = NULL_TREE;
708 r->offset = 0;
709 r->size = -1;
710 r->max_size = -1;
711 r->ref_alias_set = -1;
712 r->base_alias_set = -1;
713 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
716 /* Returns the base object of the memory reference *REF. */
718 tree
719 ao_ref_base (ao_ref *ref)
721 bool reverse;
723 if (ref->base)
724 return ref->base;
725 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
726 &ref->max_size, &reverse);
727 return ref->base;
730 /* Returns the base object alias set of the memory reference *REF. */
732 alias_set_type
733 ao_ref_base_alias_set (ao_ref *ref)
735 tree base_ref;
736 if (ref->base_alias_set != -1)
737 return ref->base_alias_set;
738 if (!ref->ref)
739 return 0;
740 base_ref = ref->ref;
741 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
742 base_ref = TREE_OPERAND (base_ref, 0);
743 while (handled_component_p (base_ref))
744 base_ref = TREE_OPERAND (base_ref, 0);
745 ref->base_alias_set = get_alias_set (base_ref);
746 return ref->base_alias_set;
749 /* Returns the reference alias set of the memory reference *REF. */
751 alias_set_type
752 ao_ref_alias_set (ao_ref *ref)
754 if (ref->ref_alias_set != -1)
755 return ref->ref_alias_set;
756 if (!ref->ref)
757 return 0;
758 ref->ref_alias_set = get_alias_set (ref->ref);
759 return ref->ref_alias_set;
762 /* Returns a type satisfying
763 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
765 tree
766 ao_ref_base_alias_ptr_type (ao_ref *ref)
768 tree base_ref;
770 if (!ref->ref)
771 return NULL_TREE;
772 base_ref = ref->ref;
773 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
774 base_ref = TREE_OPERAND (base_ref, 0);
775 while (handled_component_p (base_ref))
776 base_ref = TREE_OPERAND (base_ref, 0);
777 tree ret = reference_alias_ptr_type (base_ref);
778 return ret;
781 /* Returns a type satisfying
782 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
784 tree
785 ao_ref_alias_ptr_type (ao_ref *ref)
787 if (!ref->ref)
788 return NULL_TREE;
789 tree ret = reference_alias_ptr_type (ref->ref);
790 return ret;
793 /* Return the alignment of the access *REF and store it in the *ALIGN
794 and *BITPOS pairs. Returns false if no alignment could be determined.
795 See get_object_alignment_2 for details. */
797 bool
798 ao_ref_alignment (ao_ref *ref, unsigned int *align,
799 unsigned HOST_WIDE_INT *bitpos)
801 if (ref->ref)
802 return get_object_alignment_1 (ref->ref, align, bitpos);
804 /* When we just have ref->base we cannot use get_object_alignment since
805 that will eventually use the type of the appearant access while for
806 example ao_ref_init_from_ptr_and_range is not careful to adjust that. */
807 *align = BITS_PER_UNIT;
808 HOST_WIDE_INT offset;
809 if (!ref->offset.is_constant (&offset)
810 || !get_object_alignment_2 (ref->base, align, bitpos, true))
811 return false;
812 *bitpos += (unsigned HOST_WIDE_INT)offset * BITS_PER_UNIT;
813 *bitpos = *bitpos & (*align - 1);
814 return true;
817 /* Init an alias-oracle reference representation from a gimple pointer
818 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
819 that RANGE_KNOWN is set.
821 The access is assumed to be only to or after of the pointer target adjusted
822 by the offset, not before it (even in the case RANGE_KNOWN is false). */
824 void
825 ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
826 bool range_known,
827 poly_int64 offset,
828 poly_int64 size,
829 poly_int64 max_size)
831 poly_int64 t, extra_offset = 0;
833 ref->ref = NULL_TREE;
834 if (TREE_CODE (ptr) == SSA_NAME)
836 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
837 if (gimple_assign_single_p (stmt)
838 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
839 ptr = gimple_assign_rhs1 (stmt);
840 else if (is_gimple_assign (stmt)
841 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
842 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
844 ptr = gimple_assign_rhs1 (stmt);
845 extra_offset *= BITS_PER_UNIT;
849 if (TREE_CODE (ptr) == ADDR_EXPR)
851 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
852 if (ref->base)
853 ref->offset = BITS_PER_UNIT * t;
854 else
856 range_known = false;
857 ref->offset = 0;
858 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
861 else
863 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
864 ref->base = build2 (MEM_REF, char_type_node,
865 ptr, null_pointer_node);
866 ref->offset = 0;
868 ref->offset += extra_offset + offset;
869 if (range_known)
871 ref->max_size = max_size;
872 ref->size = size;
874 else
875 ref->max_size = ref->size = -1;
876 ref->ref_alias_set = 0;
877 ref->base_alias_set = 0;
878 ref->volatile_p = false;
881 /* Init an alias-oracle reference representation from a gimple pointer
882 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
883 size is assumed to be unknown. The access is assumed to be only
884 to or after of the pointer target, not before it. */
886 void
887 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
889 poly_int64 size_hwi;
890 if (size
891 && poly_int_tree_p (size, &size_hwi)
892 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
894 size_hwi = size_hwi * BITS_PER_UNIT;
895 ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
897 else
898 ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
901 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
902 Return -1 if S1 < S2
903 Return 1 if S1 > S2
904 Return 0 if equal or incomparable. */
906 static int
907 compare_sizes (tree s1, tree s2)
909 if (!s1 || !s2)
910 return 0;
912 poly_uint64 size1;
913 poly_uint64 size2;
915 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
916 return 0;
917 if (known_lt (size1, size2))
918 return -1;
919 if (known_lt (size2, size1))
920 return 1;
921 return 0;
924 /* Compare TYPE1 and TYPE2 by its size.
925 Return -1 if size of TYPE1 < size of TYPE2
926 Return 1 if size of TYPE1 > size of TYPE2
927 Return 0 if types are of equal sizes or we can not compare them. */
929 static int
930 compare_type_sizes (tree type1, tree type2)
932 /* Be conservative for arrays and vectors. We want to support partial
933 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
934 while (TREE_CODE (type1) == ARRAY_TYPE
935 || TREE_CODE (type1) == VECTOR_TYPE)
936 type1 = TREE_TYPE (type1);
937 while (TREE_CODE (type2) == ARRAY_TYPE
938 || TREE_CODE (type2) == VECTOR_TYPE)
939 type2 = TREE_TYPE (type2);
940 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
943 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
944 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
945 decide. */
947 static inline int
948 same_type_for_tbaa (tree type1, tree type2)
950 type1 = TYPE_MAIN_VARIANT (type1);
951 type2 = TYPE_MAIN_VARIANT (type2);
953 /* Handle the most common case first. */
954 if (type1 == type2)
955 return 1;
957 /* If we would have to do structural comparison bail out. */
958 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
959 || TYPE_STRUCTURAL_EQUALITY_P (type2))
960 return -1;
962 /* Compare the canonical types. */
963 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
964 return 1;
966 /* ??? Array types are not properly unified in all cases as we have
967 spurious changes in the index types for example. Removing this
968 causes all sorts of problems with the Fortran frontend. */
969 if (TREE_CODE (type1) == ARRAY_TYPE
970 && TREE_CODE (type2) == ARRAY_TYPE)
971 return -1;
973 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
974 object of one of its constrained subtypes, e.g. when a function with an
975 unconstrained parameter passed by reference is called on an object and
976 inlined. But, even in the case of a fixed size, type and subtypes are
977 not equivalent enough as to share the same TYPE_CANONICAL, since this
978 would mean that conversions between them are useless, whereas they are
979 not (e.g. type and subtypes can have different modes). So, in the end,
980 they are only guaranteed to have the same alias set. */
981 alias_set_type set1 = get_alias_set (type1);
982 alias_set_type set2 = get_alias_set (type2);
983 if (set1 == set2)
984 return -1;
986 /* Pointers to void are considered compatible with all other pointers,
987 so for two pointers see what the alias set resolution thinks. */
988 if (POINTER_TYPE_P (type1)
989 && POINTER_TYPE_P (type2)
990 && alias_sets_conflict_p (set1, set2))
991 return -1;
993 /* The types are known to be not equal. */
994 return 0;
997 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
998 components on it). */
1000 static bool
1001 type_has_components_p (tree type)
1003 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
1004 || TREE_CODE (type) == COMPLEX_TYPE;
1007 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
1008 respectively are either pointing to same address or are completely
1009 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
1010 just partly overlap.
1012 Try to disambiguate using the access path starting from the match
1013 and return false if there is no conflict.
1015 Helper for aliasing_component_refs_p. */
1017 static bool
1018 aliasing_matching_component_refs_p (tree match1, tree ref1,
1019 poly_int64 offset1, poly_int64 max_size1,
1020 tree match2, tree ref2,
1021 poly_int64 offset2, poly_int64 max_size2,
1022 bool partial_overlap)
1024 poly_int64 offadj, sztmp, msztmp;
1025 bool reverse;
1027 if (!partial_overlap)
1029 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1030 offset2 -= offadj;
1031 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1032 offset1 -= offadj;
1033 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1035 ++alias_stats.aliasing_component_refs_p_no_alias;
1036 return false;
1040 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1041 partial_overlap);
1042 if (cmp == 1
1043 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1045 ++alias_stats.aliasing_component_refs_p_no_alias;
1046 return false;
1048 ++alias_stats.aliasing_component_refs_p_may_alias;
1049 return true;
1052 /* Return true if REF is reference to zero sized trailing array. I.e.
1053 struct foo {int bar; int array[0];} *fooptr;
1054 fooptr->array. */
1056 static bool
1057 component_ref_to_zero_sized_trailing_array_p (tree ref)
1059 return (TREE_CODE (ref) == COMPONENT_REF
1060 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1061 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1062 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1063 && array_at_struct_end_p (ref));
1066 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1067 aliasing_component_refs_p.
1069 Walk access path REF2 and try to find type matching TYPE1
1070 (which is a start of possibly aliasing access path REF1).
1071 If match is found, try to disambiguate.
1073 Return 0 for sucessful disambiguation.
1074 Return 1 if match was found but disambiguation failed
1075 Return -1 if there is no match.
1076 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1077 in access patch REF2 and -1 if we are not sure. */
1079 static int
1080 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1081 poly_int64 offset1, poly_int64 max_size1,
1082 tree end_struct_ref1,
1083 tree ref2, tree base2,
1084 poly_int64 offset2, poly_int64 max_size2,
1085 bool *maybe_match)
1087 tree ref = ref2;
1088 int same_p = 0;
1090 while (true)
1092 /* We walk from inner type to the outer types. If type we see is
1093 already too large to be part of type1, terminate the search. */
1094 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1096 if (cmp < 0
1097 && (!end_struct_ref1
1098 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1099 TREE_TYPE (ref)) < 0))
1100 break;
1101 /* If types may be of same size, see if we can decide about their
1102 equality. */
1103 if (cmp == 0)
1105 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1106 if (same_p == 1)
1107 break;
1108 /* In case we can't decide whether types are same try to
1109 continue looking for the exact match.
1110 Remember however that we possibly saw a match
1111 to bypass the access path continuations tests we do later. */
1112 if (same_p == -1)
1113 *maybe_match = true;
1115 if (!handled_component_p (ref))
1116 break;
1117 ref = TREE_OPERAND (ref, 0);
1119 if (same_p == 1)
1121 bool partial_overlap = false;
1123 /* We assume that arrays can overlap by multiple of their elements
1124 size as tested in gcc.dg/torture/alias-2.c.
1125 This partial overlap happen only when both arrays are bases of
1126 the access and not contained within another component ref.
1127 To be safe we also assume partial overlap for VLAs. */
1128 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1129 && (!TYPE_SIZE (TREE_TYPE (base1))
1130 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1131 || ref == base2))
1133 /* Setting maybe_match to true triggers
1134 nonoverlapping_component_refs_p test later that still may do
1135 useful disambiguation. */
1136 *maybe_match = true;
1137 partial_overlap = true;
1139 return aliasing_matching_component_refs_p (base1, ref1,
1140 offset1, max_size1,
1141 ref, ref2,
1142 offset2, max_size2,
1143 partial_overlap);
1145 return -1;
1148 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1149 Return true if they can be composed to single access path
1150 base1...ref1...base2...ref2.
1152 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1153 a trailing array access after REF1 in the non-TBAA part of the access.
1154 REF1_ALIAS_SET is the alias set of REF1.
1156 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1157 a trailing array access in the TBAA part of access path2.
1158 BASE2_ALIAS_SET is the alias set of base2. */
1160 bool
1161 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1162 alias_set_type ref1_alias_set,
1163 tree base_type2, tree end_struct_ref2,
1164 alias_set_type base2_alias_set)
1166 /* Access path can not continue past types with no components. */
1167 if (!type_has_components_p (ref_type1))
1168 return false;
1170 /* If first access path ends by too small type to hold base of
1171 the second access path, typically paths can not continue.
1173 Punt if end_struct_past_end1 is true. We want to support arbitrary
1174 type puning past first COMPONENT_REF to union because redundant store
1175 elimination depends on this, see PR92152. For this reason we can not
1176 check size of the reference because types may partially overlap. */
1177 if (!end_struct_past_end1)
1179 if (compare_type_sizes (ref_type1, base_type2) < 0)
1180 return false;
1181 /* If the path2 contains trailing array access we can strenghten the check
1182 to verify that also the size of element of the trailing array fits.
1183 In fact we could check for offset + type_size, but we do not track
1184 offsets and this is quite side case. */
1185 if (end_struct_ref2
1186 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1187 return false;
1189 return (base2_alias_set == ref1_alias_set
1190 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1193 /* Determine if the two component references REF1 and REF2 which are
1194 based on access types TYPE1 and TYPE2 and of which at least one is based
1195 on an indirect reference may alias.
1196 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1197 are the respective alias sets. */
1199 static bool
1200 aliasing_component_refs_p (tree ref1,
1201 alias_set_type ref1_alias_set,
1202 alias_set_type base1_alias_set,
1203 poly_int64 offset1, poly_int64 max_size1,
1204 tree ref2,
1205 alias_set_type ref2_alias_set,
1206 alias_set_type base2_alias_set,
1207 poly_int64 offset2, poly_int64 max_size2)
1209 /* If one reference is a component references through pointers try to find a
1210 common base and apply offset based disambiguation. This handles
1211 for example
1212 struct A { int i; int j; } *q;
1213 struct B { struct A a; int k; } *p;
1214 disambiguating q->i and p->a.j. */
1215 tree base1, base2;
1216 tree type1, type2;
1217 bool maybe_match = false;
1218 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1219 bool end_struct_past_end1 = false;
1220 bool end_struct_past_end2 = false;
1222 /* Choose bases and base types to search for.
1223 The access path is as follows:
1224 base....end_of_tbaa_ref...actual_ref
1225 At one place in the access path may be a reference to zero sized or
1226 trailing array.
1228 We generally discard the segment after end_of_tbaa_ref however
1229 we need to be careful in case it contains zero sized or trailing array.
1230 These may happen after reference to union and in this case we need to
1231 not disambiguate type puning scenarios.
1233 We set:
1234 base1 to point to base
1236 ref1 to point to end_of_tbaa_ref
1238 end_struct_ref1 to point the trailing reference (if it exists
1239 in range base....end_of_tbaa_ref
1241 end_struct_past_end1 is true if this trailing reference occurs in
1242 end_of_tbaa_ref...actual_ref. */
1243 base1 = ref1;
1244 while (handled_component_p (base1))
1246 /* Generally access paths are monotous in the size of object. The
1247 exception are trailing arrays of structures. I.e.
1248 struct a {int array[0];};
1250 struct a {int array1[0]; int array[];};
1251 Such struct has size 0 but accesses to a.array may have non-zero size.
1252 In this case the size of TREE_TYPE (base1) is smaller than
1253 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1255 Because we compare sizes of arrays just by sizes of their elements,
1256 we only need to care about zero sized array fields here. */
1257 if (component_ref_to_zero_sized_trailing_array_p (base1))
1259 gcc_checking_assert (!end_struct_ref1);
1260 end_struct_ref1 = base1;
1262 if (ends_tbaa_access_path_p (base1))
1264 ref1 = TREE_OPERAND (base1, 0);
1265 if (end_struct_ref1)
1267 end_struct_past_end1 = true;
1268 end_struct_ref1 = NULL;
1271 base1 = TREE_OPERAND (base1, 0);
1273 type1 = TREE_TYPE (base1);
1274 base2 = ref2;
1275 while (handled_component_p (base2))
1277 if (component_ref_to_zero_sized_trailing_array_p (base2))
1279 gcc_checking_assert (!end_struct_ref2);
1280 end_struct_ref2 = base2;
1282 if (ends_tbaa_access_path_p (base2))
1284 ref2 = TREE_OPERAND (base2, 0);
1285 if (end_struct_ref2)
1287 end_struct_past_end2 = true;
1288 end_struct_ref2 = NULL;
1291 base2 = TREE_OPERAND (base2, 0);
1293 type2 = TREE_TYPE (base2);
1295 /* Now search for the type1 in the access path of ref2. This
1296 would be a common base for doing offset based disambiguation on.
1297 This however only makes sense if type2 is big enough to hold type1. */
1298 int cmp_outer = compare_type_sizes (type2, type1);
1300 /* If type2 is big enough to contain type1 walk its access path.
1301 We also need to care of arrays at the end of structs that may extend
1302 beyond the end of structure. If this occurs in the TBAA part of the
1303 access path, we need to consider the increased type as well. */
1304 if (cmp_outer >= 0
1305 || (end_struct_ref2
1306 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1308 int res = aliasing_component_refs_walk (ref1, type1, base1,
1309 offset1, max_size1,
1310 end_struct_ref1,
1311 ref2, base2, offset2, max_size2,
1312 &maybe_match);
1313 if (res != -1)
1314 return res;
1317 /* If we didn't find a common base, try the other way around. */
1318 if (cmp_outer <= 0
1319 || (end_struct_ref1
1320 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1322 int res = aliasing_component_refs_walk (ref2, type2, base2,
1323 offset2, max_size2,
1324 end_struct_ref2,
1325 ref1, base1, offset1, max_size1,
1326 &maybe_match);
1327 if (res != -1)
1328 return res;
1331 /* In the following code we make an assumption that the types in access
1332 paths do not overlap and thus accesses alias only if one path can be
1333 continuation of another. If we was not able to decide about equivalence,
1334 we need to give up. */
1335 if (maybe_match)
1337 if (!nonoverlapping_component_refs_p (ref1, ref2))
1339 ++alias_stats.aliasing_component_refs_p_may_alias;
1340 return true;
1342 ++alias_stats.aliasing_component_refs_p_no_alias;
1343 return false;
1346 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1347 ref1_alias_set,
1348 type2, end_struct_ref2,
1349 base2_alias_set)
1350 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1351 ref2_alias_set,
1352 type1, end_struct_ref1,
1353 base1_alias_set))
1355 ++alias_stats.aliasing_component_refs_p_may_alias;
1356 return true;
1358 ++alias_stats.aliasing_component_refs_p_no_alias;
1359 return false;
1362 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1363 that bases of both component refs are either equivalent or nonoverlapping.
1364 We do not assume that the containers of FIELD1 and FIELD2 are of the
1365 same type or size.
1367 Return 0 in case the base address of component_refs are same then
1368 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1369 may not be of same type or size.
1371 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1373 Return -1 otherwise.
1375 Main difference between 0 and -1 is to let
1376 nonoverlapping_component_refs_since_match_p discover the semantically
1377 equivalent part of the access path.
1379 Note that this function is used even with -fno-strict-aliasing
1380 and makes use of no TBAA assumptions. */
1382 static int
1383 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1385 /* If both fields are of the same type, we could save hard work of
1386 comparing offsets. */
1387 tree type1 = DECL_CONTEXT (field1);
1388 tree type2 = DECL_CONTEXT (field2);
1390 if (TREE_CODE (type1) == RECORD_TYPE
1391 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1392 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1393 if (TREE_CODE (type2) == RECORD_TYPE
1394 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1395 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1397 /* ??? Bitfields can overlap at RTL level so punt on them.
1398 FIXME: RTL expansion should be fixed by adjusting the access path
1399 when producing MEM_ATTRs for MEMs which are wider than
1400 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1401 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1402 return -1;
1404 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1405 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1406 return field1 != field2;
1408 /* In common case the offsets and bit offsets will be the same.
1409 However if frontends do not agree on the alignment, they may be
1410 different even if they actually represent same address.
1411 Try the common case first and if that fails calcualte the
1412 actual bit offset. */
1413 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1414 DECL_FIELD_OFFSET (field2))
1415 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1416 DECL_FIELD_BIT_OFFSET (field2)))
1417 return 0;
1419 /* Note that it may be possible to use component_ref_field_offset
1420 which would provide offsets as trees. However constructing and folding
1421 trees is expensive and does not seem to be worth the compile time
1422 cost. */
1424 poly_uint64 offset1, offset2;
1425 poly_uint64 bit_offset1, bit_offset2;
1427 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1428 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1429 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1430 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1432 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1433 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1435 if (known_eq (offset1, offset2))
1436 return 0;
1438 poly_uint64 size1, size2;
1440 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1441 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1442 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1443 return 1;
1445 /* Resort to slower overlap checking by looking for matching types in
1446 the middle of access path. */
1447 return -1;
1450 /* Return low bound of array. Do not produce new trees
1451 and thus do not care about particular type of integer constant
1452 and placeholder exprs. */
1454 static tree
1455 cheap_array_ref_low_bound (tree ref)
1457 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1459 /* Avoid expensive array_ref_low_bound.
1460 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1461 type or it is zero. */
1462 if (TREE_OPERAND (ref, 2))
1463 return TREE_OPERAND (ref, 2);
1464 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1465 return TYPE_MIN_VALUE (domain_type);
1466 else
1467 return integer_zero_node;
1470 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1471 completely disjoint.
1473 Return 1 if the refs are non-overlapping.
1474 Return 0 if they are possibly overlapping but if so the overlap again
1475 starts on the same address.
1476 Return -1 otherwise. */
1479 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1481 tree index1 = TREE_OPERAND (ref1, 1);
1482 tree index2 = TREE_OPERAND (ref2, 1);
1483 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1484 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1486 /* Handle zero offsets first: we do not need to match type size in this
1487 case. */
1488 if (operand_equal_p (index1, low_bound1, 0)
1489 && operand_equal_p (index2, low_bound2, 0))
1490 return 0;
1492 /* If type sizes are different, give up.
1494 Avoid expensive array_ref_element_size.
1495 If operand 3 is present it denotes size in the alignmnet units.
1496 Otherwise size is TYPE_SIZE of the element type.
1497 Handle only common cases where types are of the same "kind". */
1498 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1499 return -1;
1501 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1502 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1504 if (TREE_OPERAND (ref1, 3))
1506 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1507 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1508 TREE_OPERAND (ref2, 3), 0))
1509 return -1;
1511 else
1513 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1514 TYPE_SIZE_UNIT (elmt_type2), 0))
1515 return -1;
1518 /* Since we know that type sizes are the same, there is no need to return
1519 -1 after this point. Partial overlap can not be introduced. */
1521 /* We may need to fold trees in this case.
1522 TODO: Handle integer constant case at least. */
1523 if (!operand_equal_p (low_bound1, low_bound2, 0))
1524 return 0;
1526 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1528 if (tree_int_cst_equal (index1, index2))
1529 return 0;
1530 return 1;
1532 /* TODO: We can use VRP to further disambiguate here. */
1533 return 0;
1536 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1537 MATCH2 either point to the same address or are disjoint.
1538 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1539 respectively or NULL in the case we established equivalence of bases.
1540 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1541 overlap by exact multiply of their element size.
1543 This test works by matching the initial segment of the access path
1544 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1545 match was determined without use of TBAA oracle.
1547 Return 1 if we can determine that component references REF1 and REF2,
1548 that are within a common DECL, cannot overlap.
1550 Return 0 if paths are same and thus there is nothing to disambiguate more
1551 (i.e. there is must alias assuming there is must alias between MATCH1 and
1552 MATCH2)
1554 Return -1 if we can not determine 0 or 1 - this happens when we met
1555 non-matching types was met in the path.
1556 In this case it may make sense to continue by other disambiguation
1557 oracles. */
1559 static int
1560 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1561 tree match2, tree ref2,
1562 bool partial_overlap)
1564 int ntbaa1 = 0, ntbaa2 = 0;
1565 /* Early return if there are no references to match, we do not need
1566 to walk the access paths.
1568 Do not consider this as may-alias for stats - it is more useful
1569 to have information how many disambiguations happened provided that
1570 the query was meaningful. */
1572 if (match1 == ref1 || !handled_component_p (ref1)
1573 || match2 == ref2 || !handled_component_p (ref2))
1574 return -1;
1576 auto_vec<tree, 16> component_refs1;
1577 auto_vec<tree, 16> component_refs2;
1579 /* Create the stack of handled components for REF1. */
1580 while (handled_component_p (ref1) && ref1 != match1)
1582 /* We use TBAA only to re-synchronize after mismatched refs. So we
1583 do not need to truncate access path after TBAA part ends. */
1584 if (ends_tbaa_access_path_p (ref1))
1585 ntbaa1 = 0;
1586 else
1587 ntbaa1++;
1588 component_refs1.safe_push (ref1);
1589 ref1 = TREE_OPERAND (ref1, 0);
1592 /* Create the stack of handled components for REF2. */
1593 while (handled_component_p (ref2) && ref2 != match2)
1595 if (ends_tbaa_access_path_p (ref2))
1596 ntbaa2 = 0;
1597 else
1598 ntbaa2++;
1599 component_refs2.safe_push (ref2);
1600 ref2 = TREE_OPERAND (ref2, 0);
1603 if (!flag_strict_aliasing)
1605 ntbaa1 = 0;
1606 ntbaa2 = 0;
1609 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1610 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1612 /* If only one of access path starts with MEM_REF check that offset is 0
1613 so the addresses stays the same after stripping it.
1614 TODO: In this case we may walk the other access path until we get same
1615 offset.
1617 If both starts with MEM_REF, offset has to be same. */
1618 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1619 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1620 || (mem_ref1 && mem_ref2
1621 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1622 TREE_OPERAND (ref2, 1))))
1624 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1625 return -1;
1628 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1629 to handle them here at all. */
1630 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1631 && TREE_CODE (ref2) != TARGET_MEM_REF);
1633 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1634 rank. This is sufficient because we start from the same DECL and you
1635 cannot reference several fields at a time with COMPONENT_REFs (unlike
1636 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1637 of them to access a sub-component, unless you're in a union, in which
1638 case the return value will precisely be false. */
1639 while (true)
1641 /* Track if we seen unmatched ref with non-zero offset. In this case
1642 we must look for partial overlaps. */
1643 bool seen_unmatched_ref_p = false;
1645 /* First match ARRAY_REFs an try to disambiguate. */
1646 if (!component_refs1.is_empty ()
1647 && !component_refs2.is_empty ())
1649 unsigned int narray_refs1=0, narray_refs2=0;
1651 /* We generally assume that both access paths starts by same sequence
1652 of refs. However if number of array refs is not in sync, try
1653 to recover and pop elts until number match. This helps the case
1654 where one access path starts by array and other by element. */
1655 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1656 narray_refs1++)
1657 if (TREE_CODE (component_refs1 [component_refs1.length()
1658 - 1 - narray_refs1]) != ARRAY_REF)
1659 break;
1661 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1662 narray_refs2++)
1663 if (TREE_CODE (component_refs2 [component_refs2.length()
1664 - 1 - narray_refs2]) != ARRAY_REF)
1665 break;
1666 for (; narray_refs1 > narray_refs2; narray_refs1--)
1668 ref1 = component_refs1.pop ();
1669 ntbaa1--;
1671 /* If index is non-zero we need to check whether the reference
1672 does not break the main invariant that bases are either
1673 disjoint or equal. Consider the example:
1675 unsigned char out[][1];
1676 out[1]="a";
1677 out[i][0];
1679 Here bases out and out are same, but after removing the
1680 [i] index, this invariant no longer holds, because
1681 out[i] points to the middle of array out.
1683 TODO: If size of type of the skipped reference is an integer
1684 multiply of the size of type of the other reference this
1685 invariant can be verified, but even then it is not completely
1686 safe with !flag_strict_aliasing if the other reference contains
1687 unbounded array accesses.
1688 See */
1690 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1691 cheap_array_ref_low_bound (ref1), 0))
1692 return 0;
1694 for (; narray_refs2 > narray_refs1; narray_refs2--)
1696 ref2 = component_refs2.pop ();
1697 ntbaa2--;
1698 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1699 cheap_array_ref_low_bound (ref2), 0))
1700 return 0;
1702 /* Try to disambiguate matched arrays. */
1703 for (unsigned int i = 0; i < narray_refs1; i++)
1705 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1706 component_refs2.pop ());
1707 ntbaa1--;
1708 ntbaa2--;
1709 if (cmp == 1 && !partial_overlap)
1711 ++alias_stats
1712 .nonoverlapping_refs_since_match_p_no_alias;
1713 return 1;
1715 if (cmp == -1)
1717 seen_unmatched_ref_p = true;
1718 /* We can not maintain the invariant that bases are either
1719 same or completely disjoint. However we can still recover
1720 from type based alias analysis if we reach references to
1721 same sizes. We do not attempt to match array sizes, so
1722 just finish array walking and look for component refs. */
1723 if (ntbaa1 < 0 || ntbaa2 < 0)
1725 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1726 return -1;
1728 for (i++; i < narray_refs1; i++)
1730 component_refs1.pop ();
1731 component_refs2.pop ();
1732 ntbaa1--;
1733 ntbaa2--;
1735 break;
1737 partial_overlap = false;
1741 /* Next look for component_refs. */
1744 if (component_refs1.is_empty ())
1746 ++alias_stats
1747 .nonoverlapping_refs_since_match_p_must_overlap;
1748 return 0;
1750 ref1 = component_refs1.pop ();
1751 ntbaa1--;
1752 if (TREE_CODE (ref1) != COMPONENT_REF)
1754 seen_unmatched_ref_p = true;
1755 if (ntbaa1 < 0 || ntbaa2 < 0)
1757 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1758 return -1;
1762 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1766 if (component_refs2.is_empty ())
1768 ++alias_stats
1769 .nonoverlapping_refs_since_match_p_must_overlap;
1770 return 0;
1772 ref2 = component_refs2.pop ();
1773 ntbaa2--;
1774 if (TREE_CODE (ref2) != COMPONENT_REF)
1776 if (ntbaa1 < 0 || ntbaa2 < 0)
1778 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1779 return -1;
1781 seen_unmatched_ref_p = true;
1784 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1786 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1787 earlier. */
1788 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1789 && TREE_CODE (ref2) == COMPONENT_REF);
1791 tree field1 = TREE_OPERAND (ref1, 1);
1792 tree field2 = TREE_OPERAND (ref2, 1);
1794 /* ??? We cannot simply use the type of operand #0 of the refs here
1795 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1796 for common blocks instead of using unions like everyone else. */
1797 tree type1 = DECL_CONTEXT (field1);
1798 tree type2 = DECL_CONTEXT (field2);
1800 partial_overlap = false;
1802 /* If we skipped array refs on type of different sizes, we can
1803 no longer be sure that there are not partial overlaps. */
1804 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1805 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1807 ++alias_stats
1808 .nonoverlapping_refs_since_match_p_may_alias;
1809 return -1;
1812 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1813 if (cmp == -1)
1815 ++alias_stats
1816 .nonoverlapping_refs_since_match_p_may_alias;
1817 return -1;
1819 else if (cmp == 1)
1821 ++alias_stats
1822 .nonoverlapping_refs_since_match_p_no_alias;
1823 return 1;
1828 /* Return TYPE_UID which can be used to match record types we consider
1829 same for TBAA purposes. */
1831 static inline int
1832 ncr_type_uid (const_tree field)
1834 /* ??? We cannot simply use the type of operand #0 of the refs here
1835 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1836 for common blocks instead of using unions like everyone else. */
1837 tree type = DECL_FIELD_CONTEXT (field);
1838 /* With LTO types considered same_type_for_tbaa_p
1839 from different translation unit may not have same
1840 main variant. They however have same TYPE_CANONICAL. */
1841 if (TYPE_CANONICAL (type))
1842 return TYPE_UID (TYPE_CANONICAL (type));
1843 return TYPE_UID (type);
1846 /* qsort compare function to sort FIELD_DECLs after their
1847 DECL_FIELD_CONTEXT TYPE_UID. */
1849 static inline int
1850 ncr_compar (const void *field1_, const void *field2_)
1852 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1853 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1854 unsigned int uid1 = ncr_type_uid (field1);
1855 unsigned int uid2 = ncr_type_uid (field2);
1857 if (uid1 < uid2)
1858 return -1;
1859 else if (uid1 > uid2)
1860 return 1;
1861 return 0;
1864 /* Return true if we can determine that the fields referenced cannot
1865 overlap for any pair of objects. This relies on TBAA. */
1867 static bool
1868 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1870 /* Early return if we have nothing to do.
1872 Do not consider this as may-alias for stats - it is more useful
1873 to have information how many disambiguations happened provided that
1874 the query was meaningful. */
1875 if (!flag_strict_aliasing
1876 || !x || !y
1877 || !handled_component_p (x)
1878 || !handled_component_p (y))
1879 return false;
1881 auto_vec<const_tree, 16> fieldsx;
1882 while (handled_component_p (x))
1884 if (TREE_CODE (x) == COMPONENT_REF)
1886 tree field = TREE_OPERAND (x, 1);
1887 tree type = DECL_FIELD_CONTEXT (field);
1888 if (TREE_CODE (type) == RECORD_TYPE)
1889 fieldsx.safe_push (field);
1891 else if (ends_tbaa_access_path_p (x))
1892 fieldsx.truncate (0);
1893 x = TREE_OPERAND (x, 0);
1895 if (fieldsx.length () == 0)
1896 return false;
1897 auto_vec<const_tree, 16> fieldsy;
1898 while (handled_component_p (y))
1900 if (TREE_CODE (y) == COMPONENT_REF)
1902 tree field = TREE_OPERAND (y, 1);
1903 tree type = DECL_FIELD_CONTEXT (field);
1904 if (TREE_CODE (type) == RECORD_TYPE)
1905 fieldsy.safe_push (TREE_OPERAND (y, 1));
1907 else if (ends_tbaa_access_path_p (y))
1908 fieldsy.truncate (0);
1909 y = TREE_OPERAND (y, 0);
1911 if (fieldsy.length () == 0)
1913 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1914 return false;
1917 /* Most common case first. */
1918 if (fieldsx.length () == 1
1919 && fieldsy.length () == 1)
1921 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1922 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1923 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1925 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1926 return true;
1928 else
1930 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1931 return false;
1935 if (fieldsx.length () == 2)
1937 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1938 std::swap (fieldsx[0], fieldsx[1]);
1940 else
1941 fieldsx.qsort (ncr_compar);
1943 if (fieldsy.length () == 2)
1945 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1946 std::swap (fieldsy[0], fieldsy[1]);
1948 else
1949 fieldsy.qsort (ncr_compar);
1951 unsigned i = 0, j = 0;
1954 const_tree fieldx = fieldsx[i];
1955 const_tree fieldy = fieldsy[j];
1957 /* We're left with accessing different fields of a structure,
1958 no possible overlap. */
1959 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1960 DECL_FIELD_CONTEXT (fieldy)) == 1
1961 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1963 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1964 return true;
1967 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1969 i++;
1970 if (i == fieldsx.length ())
1971 break;
1973 else
1975 j++;
1976 if (j == fieldsy.length ())
1977 break;
1980 while (1);
1982 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1983 return false;
1987 /* Return true if two memory references based on the variables BASE1
1988 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1989 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1990 if non-NULL are the complete memory reference trees. */
1992 static bool
1993 decl_refs_may_alias_p (tree ref1, tree base1,
1994 poly_int64 offset1, poly_int64 max_size1,
1995 poly_int64 size1,
1996 tree ref2, tree base2,
1997 poly_int64 offset2, poly_int64 max_size2,
1998 poly_int64 size2)
2000 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
2002 /* If both references are based on different variables, they cannot alias. */
2003 if (compare_base_decls (base1, base2) == 0)
2004 return false;
2006 /* If both references are based on the same variable, they cannot alias if
2007 the accesses do not overlap. */
2008 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2009 return false;
2011 /* If there is must alias, there is no use disambiguating further. */
2012 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2013 return true;
2015 /* For components with variable position, the above test isn't sufficient,
2016 so we disambiguate component references manually. */
2017 if (ref1 && ref2
2018 && handled_component_p (ref1) && handled_component_p (ref2)
2019 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
2020 return false;
2022 return true;
2025 /* Return true if access with BASE is view converted.
2026 Base must not be stripped from inner MEM_REF (&decl)
2027 which is done by ao_ref_base and thus one extra walk
2028 of handled components is needed. */
2030 static bool
2031 view_converted_memref_p (tree base)
2033 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2034 return false;
2035 return same_type_for_tbaa (TREE_TYPE (base),
2036 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2039 /* Return true if an indirect reference based on *PTR1 constrained
2040 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2041 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2042 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2043 in which case they are computed on-demand. REF1 and REF2
2044 if non-NULL are the complete memory reference trees. */
2046 static bool
2047 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2048 poly_int64 offset1, poly_int64 max_size1,
2049 poly_int64 size1,
2050 alias_set_type ref1_alias_set,
2051 alias_set_type base1_alias_set,
2052 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2053 poly_int64 offset2, poly_int64 max_size2,
2054 poly_int64 size2,
2055 alias_set_type ref2_alias_set,
2056 alias_set_type base2_alias_set, bool tbaa_p)
2058 tree ptr1;
2059 tree ptrtype1, dbase2;
2061 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2062 || TREE_CODE (base1) == TARGET_MEM_REF)
2063 && DECL_P (base2));
2065 ptr1 = TREE_OPERAND (base1, 0);
2066 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2068 /* If only one reference is based on a variable, they cannot alias if
2069 the pointer access is beyond the extent of the variable access.
2070 (the pointer base cannot validly point to an offset less than zero
2071 of the variable).
2072 ??? IVOPTs creates bases that do not honor this restriction,
2073 so do not apply this optimization for TARGET_MEM_REFs. */
2074 if (TREE_CODE (base1) != TARGET_MEM_REF
2075 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2076 return false;
2078 /* If the pointer based access is bigger than the variable they cannot
2079 alias. This is similar to the check below where we use TBAA to
2080 increase the size of the pointer based access based on the dynamic
2081 type of a containing object we can infer from it. */
2082 poly_int64 dsize2;
2083 if (known_size_p (size1)
2084 && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2085 && known_lt (dsize2, size1))
2086 return false;
2088 /* They also cannot alias if the pointer may not point to the decl. */
2089 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2090 return false;
2092 /* Disambiguations that rely on strict aliasing rules follow. */
2093 if (!flag_strict_aliasing || !tbaa_p)
2094 return true;
2096 /* If the alias set for a pointer access is zero all bets are off. */
2097 if (base1_alias_set == 0 || base2_alias_set == 0)
2098 return true;
2100 /* When we are trying to disambiguate an access with a pointer dereference
2101 as base versus one with a decl as base we can use both the size
2102 of the decl and its dynamic type for extra disambiguation.
2103 ??? We do not know anything about the dynamic type of the decl
2104 other than that its alias-set contains base2_alias_set as a subset
2105 which does not help us here. */
2106 /* As we know nothing useful about the dynamic type of the decl just
2107 use the usual conflict check rather than a subset test.
2108 ??? We could introduce -fvery-strict-aliasing when the language
2109 does not allow decls to have a dynamic type that differs from their
2110 static type. Then we can check
2111 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2112 if (base1_alias_set != base2_alias_set
2113 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2114 return false;
2116 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2118 /* If the size of the access relevant for TBAA through the pointer
2119 is bigger than the size of the decl we can't possibly access the
2120 decl via that pointer. */
2121 if (/* ??? This in turn may run afoul when a decl of type T which is
2122 a member of union type U is accessed through a pointer to
2123 type U and sizeof T is smaller than sizeof U. */
2124 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2125 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2126 && compare_sizes (DECL_SIZE (base2),
2127 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2128 return false;
2130 if (!ref2)
2131 return true;
2133 /* If the decl is accessed via a MEM_REF, reconstruct the base
2134 we can use for TBAA and an appropriately adjusted offset. */
2135 dbase2 = ref2;
2136 while (handled_component_p (dbase2))
2137 dbase2 = TREE_OPERAND (dbase2, 0);
2138 poly_int64 doffset1 = offset1;
2139 poly_offset_int doffset2 = offset2;
2140 if (TREE_CODE (dbase2) == MEM_REF
2141 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2143 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2144 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2145 /* If second reference is view-converted, give up now. */
2146 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2147 return true;
2150 /* If first reference is view-converted, give up now. */
2151 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2152 return true;
2154 /* If both references are through the same type, they do not alias
2155 if the accesses do not overlap. This does extra disambiguation
2156 for mixed/pointer accesses but requires strict aliasing.
2157 For MEM_REFs we require that the component-ref offset we computed
2158 is relative to the start of the type which we ensure by
2159 comparing rvalue and access type and disregarding the constant
2160 pointer offset.
2162 But avoid treating variable length arrays as "objects", instead assume they
2163 can overlap by an exact multiple of their element size.
2164 See gcc.dg/torture/alias-2.c. */
2165 if (((TREE_CODE (base1) != TARGET_MEM_REF
2166 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2167 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2168 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2169 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2171 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2172 && (TYPE_SIZE (TREE_TYPE (base1))
2173 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2174 != INTEGER_CST));
2175 if (!partial_overlap
2176 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2177 return false;
2178 if (!ref1 || !ref2
2179 /* If there is must alias, there is no use disambiguating further. */
2180 || (!partial_overlap
2181 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2182 return true;
2183 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2184 partial_overlap);
2185 if (res == -1)
2186 return !nonoverlapping_component_refs_p (ref1, ref2);
2187 return !res;
2190 /* Do access-path based disambiguation. */
2191 if (ref1 && ref2
2192 && (handled_component_p (ref1) || handled_component_p (ref2)))
2193 return aliasing_component_refs_p (ref1,
2194 ref1_alias_set, base1_alias_set,
2195 offset1, max_size1,
2196 ref2,
2197 ref2_alias_set, base2_alias_set,
2198 offset2, max_size2);
2200 return true;
2203 /* Return true if two indirect references based on *PTR1
2204 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2205 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2206 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2207 in which case they are computed on-demand. REF1 and REF2
2208 if non-NULL are the complete memory reference trees. */
2210 static bool
2211 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2212 poly_int64 offset1, poly_int64 max_size1,
2213 poly_int64 size1,
2214 alias_set_type ref1_alias_set,
2215 alias_set_type base1_alias_set,
2216 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2217 poly_int64 offset2, poly_int64 max_size2,
2218 poly_int64 size2,
2219 alias_set_type ref2_alias_set,
2220 alias_set_type base2_alias_set, bool tbaa_p)
2222 tree ptr1;
2223 tree ptr2;
2224 tree ptrtype1, ptrtype2;
2226 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2227 || TREE_CODE (base1) == TARGET_MEM_REF)
2228 && (TREE_CODE (base2) == MEM_REF
2229 || TREE_CODE (base2) == TARGET_MEM_REF));
2231 ptr1 = TREE_OPERAND (base1, 0);
2232 ptr2 = TREE_OPERAND (base2, 0);
2234 /* If both bases are based on pointers they cannot alias if they may not
2235 point to the same memory object or if they point to the same object
2236 and the accesses do not overlap. */
2237 if ((!cfun || gimple_in_ssa_p (cfun))
2238 && operand_equal_p (ptr1, ptr2, 0)
2239 && (((TREE_CODE (base1) != TARGET_MEM_REF
2240 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2241 && (TREE_CODE (base2) != TARGET_MEM_REF
2242 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2243 || (TREE_CODE (base1) == TARGET_MEM_REF
2244 && TREE_CODE (base2) == TARGET_MEM_REF
2245 && (TMR_STEP (base1) == TMR_STEP (base2)
2246 || (TMR_STEP (base1) && TMR_STEP (base2)
2247 && operand_equal_p (TMR_STEP (base1),
2248 TMR_STEP (base2), 0)))
2249 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2250 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2251 && operand_equal_p (TMR_INDEX (base1),
2252 TMR_INDEX (base2), 0)))
2253 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2254 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2255 && operand_equal_p (TMR_INDEX2 (base1),
2256 TMR_INDEX2 (base2), 0))))))
2258 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2259 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2260 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2261 offset2 + moff2, max_size2))
2262 return false;
2263 /* If there is must alias, there is no use disambiguating further. */
2264 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2265 return true;
2266 if (ref1 && ref2)
2268 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2269 false);
2270 if (res != -1)
2271 return !res;
2274 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2275 return false;
2277 /* Disambiguations that rely on strict aliasing rules follow. */
2278 if (!flag_strict_aliasing || !tbaa_p)
2279 return true;
2281 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2282 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2284 /* If the alias set for a pointer access is zero all bets are off. */
2285 if (base1_alias_set == 0
2286 || base2_alias_set == 0)
2287 return true;
2289 /* Do type-based disambiguation. */
2290 if (base1_alias_set != base2_alias_set
2291 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2292 return false;
2294 /* If either reference is view-converted, give up now. */
2295 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2296 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2297 return true;
2299 /* If both references are through the same type, they do not alias
2300 if the accesses do not overlap. This does extra disambiguation
2301 for mixed/pointer accesses but requires strict aliasing. */
2302 if ((TREE_CODE (base1) != TARGET_MEM_REF
2303 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2304 && (TREE_CODE (base2) != TARGET_MEM_REF
2305 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2306 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2307 TREE_TYPE (ptrtype2)) == 1)
2309 /* But avoid treating arrays as "objects", instead assume they
2310 can overlap by an exact multiple of their element size.
2311 See gcc.dg/torture/alias-2.c. */
2312 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2314 if (!partial_overlap
2315 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2316 return false;
2317 if (!ref1 || !ref2
2318 || (!partial_overlap
2319 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2320 return true;
2321 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2322 partial_overlap);
2323 if (res == -1)
2324 return !nonoverlapping_component_refs_p (ref1, ref2);
2325 return !res;
2328 /* Do access-path based disambiguation. */
2329 if (ref1 && ref2
2330 && (handled_component_p (ref1) || handled_component_p (ref2)))
2331 return aliasing_component_refs_p (ref1,
2332 ref1_alias_set, base1_alias_set,
2333 offset1, max_size1,
2334 ref2,
2335 ref2_alias_set, base2_alias_set,
2336 offset2, max_size2);
2338 return true;
2341 /* Return true, if the two memory references REF1 and REF2 may alias. */
2343 static bool
2344 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2346 tree base1, base2;
2347 poly_int64 offset1 = 0, offset2 = 0;
2348 poly_int64 max_size1 = -1, max_size2 = -1;
2349 bool var1_p, var2_p, ind1_p, ind2_p;
2351 gcc_checking_assert ((!ref1->ref
2352 || TREE_CODE (ref1->ref) == SSA_NAME
2353 || DECL_P (ref1->ref)
2354 || TREE_CODE (ref1->ref) == STRING_CST
2355 || handled_component_p (ref1->ref)
2356 || TREE_CODE (ref1->ref) == MEM_REF
2357 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2358 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2359 && (!ref2->ref
2360 || TREE_CODE (ref2->ref) == SSA_NAME
2361 || DECL_P (ref2->ref)
2362 || TREE_CODE (ref2->ref) == STRING_CST
2363 || handled_component_p (ref2->ref)
2364 || TREE_CODE (ref2->ref) == MEM_REF
2365 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2366 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2368 /* Decompose the references into their base objects and the access. */
2369 base1 = ao_ref_base (ref1);
2370 offset1 = ref1->offset;
2371 max_size1 = ref1->max_size;
2372 base2 = ao_ref_base (ref2);
2373 offset2 = ref2->offset;
2374 max_size2 = ref2->max_size;
2376 /* We can end up with registers or constants as bases for example from
2377 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2378 which is seen as a struct copy. */
2379 if (TREE_CODE (base1) == SSA_NAME
2380 || TREE_CODE (base1) == CONST_DECL
2381 || TREE_CODE (base1) == CONSTRUCTOR
2382 || TREE_CODE (base1) == ADDR_EXPR
2383 || CONSTANT_CLASS_P (base1)
2384 || TREE_CODE (base2) == SSA_NAME
2385 || TREE_CODE (base2) == CONST_DECL
2386 || TREE_CODE (base2) == CONSTRUCTOR
2387 || TREE_CODE (base2) == ADDR_EXPR
2388 || CONSTANT_CLASS_P (base2))
2389 return false;
2391 /* We can end up referring to code via function and label decls.
2392 As we likely do not properly track code aliases conservatively
2393 bail out. */
2394 if (TREE_CODE (base1) == FUNCTION_DECL
2395 || TREE_CODE (base1) == LABEL_DECL
2396 || TREE_CODE (base2) == FUNCTION_DECL
2397 || TREE_CODE (base2) == LABEL_DECL)
2398 return true;
2400 /* Two volatile accesses always conflict. */
2401 if (ref1->volatile_p
2402 && ref2->volatile_p)
2403 return true;
2405 /* refN->ref may convey size information, do not confuse our workers
2406 with that but strip it - ao_ref_base took it into account already. */
2407 tree ref1ref = ref1->ref;
2408 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2409 ref1ref = TREE_OPERAND (ref1ref, 0);
2410 tree ref2ref = ref2->ref;
2411 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2412 ref2ref = TREE_OPERAND (ref2ref, 0);
2414 /* Defer to simple offset based disambiguation if we have
2415 references based on two decls. Do this before defering to
2416 TBAA to handle must-alias cases in conformance with the
2417 GCC extension of allowing type-punning through unions. */
2418 var1_p = DECL_P (base1);
2419 var2_p = DECL_P (base2);
2420 if (var1_p && var2_p)
2421 return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2422 ref1->size,
2423 ref2ref, base2, offset2, max_size2,
2424 ref2->size);
2426 /* Handle restrict based accesses.
2427 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2428 here. */
2429 tree rbase1 = base1;
2430 tree rbase2 = base2;
2431 if (var1_p)
2433 rbase1 = ref1ref;
2434 if (rbase1)
2435 while (handled_component_p (rbase1))
2436 rbase1 = TREE_OPERAND (rbase1, 0);
2438 if (var2_p)
2440 rbase2 = ref2ref;
2441 if (rbase2)
2442 while (handled_component_p (rbase2))
2443 rbase2 = TREE_OPERAND (rbase2, 0);
2445 if (rbase1 && rbase2
2446 && (TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
2447 && (TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
2448 /* If the accesses are in the same restrict clique... */
2449 && MR_DEPENDENCE_CLIQUE (rbase1) == MR_DEPENDENCE_CLIQUE (rbase2)
2450 /* But based on different pointers they do not alias. */
2451 && MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))
2452 return false;
2454 ind1_p = (TREE_CODE (base1) == MEM_REF
2455 || TREE_CODE (base1) == TARGET_MEM_REF);
2456 ind2_p = (TREE_CODE (base2) == MEM_REF
2457 || TREE_CODE (base2) == TARGET_MEM_REF);
2459 /* Canonicalize the pointer-vs-decl case. */
2460 if (ind1_p && var2_p)
2462 std::swap (offset1, offset2);
2463 std::swap (max_size1, max_size2);
2464 std::swap (base1, base2);
2465 std::swap (ref1, ref2);
2466 std::swap (ref1ref, ref2ref);
2467 var1_p = true;
2468 ind1_p = false;
2469 var2_p = false;
2470 ind2_p = true;
2473 /* First defer to TBAA if possible. */
2474 if (tbaa_p
2475 && flag_strict_aliasing
2476 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2477 ao_ref_alias_set (ref2)))
2478 return false;
2480 /* If the reference is based on a pointer that points to memory
2481 that may not be written to then the other reference cannot possibly
2482 clobber it. */
2483 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2484 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2485 || (ind1_p
2486 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2487 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2488 return false;
2490 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2491 if (var1_p && ind2_p)
2492 return indirect_ref_may_alias_decl_p (ref2ref, base2,
2493 offset2, max_size2, ref2->size,
2494 ao_ref_alias_set (ref2),
2495 ao_ref_base_alias_set (ref2),
2496 ref1ref, base1,
2497 offset1, max_size1, ref1->size,
2498 ao_ref_alias_set (ref1),
2499 ao_ref_base_alias_set (ref1),
2500 tbaa_p);
2501 else if (ind1_p && ind2_p)
2502 return indirect_refs_may_alias_p (ref1ref, base1,
2503 offset1, max_size1, ref1->size,
2504 ao_ref_alias_set (ref1),
2505 ao_ref_base_alias_set (ref1),
2506 ref2ref, base2,
2507 offset2, max_size2, ref2->size,
2508 ao_ref_alias_set (ref2),
2509 ao_ref_base_alias_set (ref2),
2510 tbaa_p);
2512 gcc_unreachable ();
2515 /* Return true, if the two memory references REF1 and REF2 may alias
2516 and update statistics. */
2518 bool
2519 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2521 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2522 if (res)
2523 ++alias_stats.refs_may_alias_p_may_alias;
2524 else
2525 ++alias_stats.refs_may_alias_p_no_alias;
2526 return res;
2529 static bool
2530 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2532 ao_ref r1;
2533 ao_ref_init (&r1, ref1);
2534 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2537 bool
2538 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2540 ao_ref r1, r2;
2541 ao_ref_init (&r1, ref1);
2542 ao_ref_init (&r2, ref2);
2543 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2546 /* Returns true if there is a anti-dependence for the STORE that
2547 executes after the LOAD. */
2549 bool
2550 refs_anti_dependent_p (tree load, tree store)
2552 ao_ref r1, r2;
2553 ao_ref_init (&r1, load);
2554 ao_ref_init (&r2, store);
2555 return refs_may_alias_p_1 (&r1, &r2, false);
2558 /* Returns true if there is a output dependence for the stores
2559 STORE1 and STORE2. */
2561 bool
2562 refs_output_dependent_p (tree store1, tree store2)
2564 ao_ref r1, r2;
2565 ao_ref_init (&r1, store1);
2566 ao_ref_init (&r2, store2);
2567 return refs_may_alias_p_1 (&r1, &r2, false);
2570 /* Return ture if REF may access global memory. */
2572 bool
2573 ref_may_access_global_memory_p (ao_ref *ref)
2575 if (!ref->ref)
2576 return true;
2577 tree base = ao_ref_base (ref);
2578 if (TREE_CODE (base) == MEM_REF
2579 || TREE_CODE (base) == TARGET_MEM_REF)
2581 if (ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0)))
2582 return true;
2584 else
2586 if (!auto_var_in_fn_p (base, current_function_decl)
2587 || pt_solution_includes (&cfun->gimple_df->escaped,
2588 base))
2589 return true;
2591 return false;
2594 /* Returns true if and only if REF may alias any access stored in TT.
2595 IF TBAA_P is true, use TBAA oracle. */
2597 static bool
2598 modref_may_conflict (const gcall *stmt,
2599 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2601 alias_set_type base_set, ref_set;
2602 bool global_memory_ok = false;
2604 if (tt->every_base)
2605 return true;
2607 if (!dbg_cnt (ipa_mod_ref))
2608 return true;
2610 base_set = ao_ref_base_alias_set (ref);
2612 ref_set = ao_ref_alias_set (ref);
2614 int num_tests = 0, max_tests = param_modref_max_tests;
2615 for (auto base_node : tt->bases)
2617 if (tbaa_p && flag_strict_aliasing)
2619 if (num_tests >= max_tests)
2620 return true;
2621 alias_stats.modref_tests++;
2622 if (!alias_sets_conflict_p (base_set, base_node->base))
2623 continue;
2624 num_tests++;
2627 if (base_node->every_ref)
2628 return true;
2630 for (auto ref_node : base_node->refs)
2632 /* Do not repeat same test as before. */
2633 if ((ref_set != base_set || base_node->base != ref_node->ref)
2634 && tbaa_p && flag_strict_aliasing)
2636 if (num_tests >= max_tests)
2637 return true;
2638 alias_stats.modref_tests++;
2639 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2640 continue;
2641 num_tests++;
2644 if (ref_node->every_access)
2645 return true;
2647 /* TBAA checks did not disambiguate, try individual accesses. */
2648 for (auto access_node : ref_node->accesses)
2650 if (num_tests >= max_tests)
2651 return true;
2653 if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2655 if (global_memory_ok)
2656 continue;
2657 if (ref_may_access_global_memory_p (ref))
2658 return true;
2659 global_memory_ok = true;
2660 num_tests++;
2661 continue;
2664 tree arg = access_node.get_call_arg (stmt);
2665 if (!arg)
2666 return true;
2668 alias_stats.modref_baseptr_tests++;
2670 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2671 continue;
2673 /* PTA oracle will be unhapy of arg is not an pointer. */
2674 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2675 return true;
2677 /* If we don't have base pointer, give up. */
2678 if (!ref->ref && !ref->base)
2679 continue;
2681 ao_ref ref2;
2682 if (access_node.get_ao_ref (stmt, &ref2))
2684 ref2.ref_alias_set = ref_node->ref;
2685 ref2.base_alias_set = base_node->base;
2686 if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2687 return true;
2689 else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2690 return true;
2692 num_tests++;
2696 return false;
2699 /* Check if REF conflicts with call using "fn spec" attribute.
2700 If CLOBBER is true we are checking for writes, otherwise check loads.
2702 Return 0 if there are no conflicts (except for possible function call
2703 argument reads), 1 if there are conflicts and -1 if we can not decide by
2704 fn spec. */
2706 static int
2707 check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2709 attr_fnspec fnspec = gimple_call_fnspec (call);
2710 if (fnspec.known_p ())
2712 if (clobber
2713 ? !fnspec.global_memory_written_p ()
2714 : !fnspec.global_memory_read_p ())
2716 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2717 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2718 && (!fnspec.arg_specified_p (i)
2719 || (clobber ? fnspec.arg_maybe_written_p (i)
2720 : fnspec.arg_maybe_read_p (i))))
2722 ao_ref dref;
2723 tree size = NULL_TREE;
2724 unsigned int size_arg;
2726 if (!fnspec.arg_specified_p (i))
2728 else if (fnspec.arg_max_access_size_given_by_arg_p
2729 (i, &size_arg))
2730 size = gimple_call_arg (call, size_arg);
2731 else if (fnspec.arg_access_size_given_by_type_p (i))
2733 tree callee = gimple_call_fndecl (call);
2734 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2736 for (unsigned int p = 0; p < i; p++)
2737 t = TREE_CHAIN (t);
2738 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2740 ao_ref_init_from_ptr_and_size (&dref,
2741 gimple_call_arg (call, i),
2742 size);
2743 if (refs_may_alias_p_1 (&dref, ref, false))
2744 return 1;
2746 if (clobber
2747 && fnspec.errno_maybe_written_p ()
2748 && flag_errno_math
2749 && targetm.ref_may_alias_errno (ref))
2750 return 1;
2751 return 0;
2755 /* FIXME: we should handle barriers more consistently, but for now leave the
2756 check here. */
2757 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2758 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2760 /* __sync_* builtins and some OpenMP builtins act as threading
2761 barriers. */
2762 #undef DEF_SYNC_BUILTIN
2763 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2764 #include "sync-builtins.def"
2765 #undef DEF_SYNC_BUILTIN
2766 case BUILT_IN_GOMP_ATOMIC_START:
2767 case BUILT_IN_GOMP_ATOMIC_END:
2768 case BUILT_IN_GOMP_BARRIER:
2769 case BUILT_IN_GOMP_BARRIER_CANCEL:
2770 case BUILT_IN_GOMP_TASKWAIT:
2771 case BUILT_IN_GOMP_TASKGROUP_END:
2772 case BUILT_IN_GOMP_CRITICAL_START:
2773 case BUILT_IN_GOMP_CRITICAL_END:
2774 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2775 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2776 case BUILT_IN_GOMP_LOOP_END:
2777 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2778 case BUILT_IN_GOMP_ORDERED_START:
2779 case BUILT_IN_GOMP_ORDERED_END:
2780 case BUILT_IN_GOMP_SECTIONS_END:
2781 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2782 case BUILT_IN_GOMP_SINGLE_COPY_START:
2783 case BUILT_IN_GOMP_SINGLE_COPY_END:
2784 return 1;
2786 default:
2787 return -1;
2789 return -1;
2792 /* If the call CALL may use the memory reference REF return true,
2793 otherwise return false. */
2795 static bool
2796 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2798 tree base, callee;
2799 unsigned i;
2800 int flags = gimple_call_flags (call);
2802 if (flags & (ECF_CONST|ECF_NOVOPS))
2803 goto process_args;
2805 /* A call that is not without side-effects might involve volatile
2806 accesses and thus conflicts with all other volatile accesses. */
2807 if (ref->volatile_p)
2808 return true;
2810 callee = gimple_call_fndecl (call);
2812 if (callee != NULL_TREE)
2814 struct cgraph_node *node = cgraph_node::get (callee);
2815 /* We can not safely optimize based on summary of calle if it does
2816 not always bind to current def: it is possible that memory load
2817 was optimized out earlier and the interposed variant may not be
2818 optimized this way. */
2819 if (node && node->binds_to_current_def_p ())
2821 modref_summary *summary = get_modref_function_summary (node);
2822 if (summary && !summary->calls_interposable)
2824 if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2826 alias_stats.modref_use_no_alias++;
2827 if (dump_file && (dump_flags & TDF_DETAILS))
2829 fprintf (dump_file,
2830 "ipa-modref: call stmt ");
2831 print_gimple_stmt (dump_file, call, 0);
2832 fprintf (dump_file,
2833 "ipa-modref: call to %s does not use ",
2834 node->dump_name ());
2835 if (!ref->ref && ref->base)
2837 fprintf (dump_file, "base: ");
2838 print_generic_expr (dump_file, ref->base);
2840 else if (ref->ref)
2842 fprintf (dump_file, "ref: ");
2843 print_generic_expr (dump_file, ref->ref);
2845 fprintf (dump_file, " alias sets: %i->%i\n",
2846 ao_ref_base_alias_set (ref),
2847 ao_ref_alias_set (ref));
2849 goto process_args;
2851 alias_stats.modref_use_may_alias++;
2856 base = ao_ref_base (ref);
2857 if (!base)
2858 return true;
2860 /* If the reference is based on a decl that is not aliased the call
2861 cannot possibly use it. */
2862 if (DECL_P (base)
2863 && !may_be_aliased (base)
2864 /* But local statics can be used through recursion. */
2865 && !is_global_var (base))
2866 goto process_args;
2868 if (int res = check_fnspec (call, ref, false))
2870 if (res == 1)
2871 return true;
2873 else
2874 goto process_args;
2876 /* Check if base is a global static variable that is not read
2877 by the function. */
2878 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2880 struct cgraph_node *node = cgraph_node::get (callee);
2881 bitmap read;
2882 int id;
2884 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2885 node yet. We should enforce that there are nodes for all decls in the
2886 IL and remove this check instead. */
2887 if (node
2888 && (id = ipa_reference_var_uid (base)) != -1
2889 && (read = ipa_reference_get_read_global (node))
2890 && !bitmap_bit_p (read, id))
2891 goto process_args;
2894 /* Check if the base variable is call-used. */
2895 if (DECL_P (base))
2897 if (pt_solution_includes (gimple_call_use_set (call), base))
2898 return true;
2900 else if ((TREE_CODE (base) == MEM_REF
2901 || TREE_CODE (base) == TARGET_MEM_REF)
2902 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2904 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2905 if (!pi)
2906 return true;
2908 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2909 return true;
2911 else
2912 return true;
2914 /* Inspect call arguments for passed-by-value aliases. */
2915 process_args:
2916 for (i = 0; i < gimple_call_num_args (call); ++i)
2918 tree op = gimple_call_arg (call, i);
2919 int flags = gimple_call_arg_flags (call, i);
2921 if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2922 continue;
2924 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2925 op = TREE_OPERAND (op, 0);
2927 if (TREE_CODE (op) != SSA_NAME
2928 && !is_gimple_min_invariant (op))
2930 ao_ref r;
2931 ao_ref_init (&r, op);
2932 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2933 return true;
2937 return false;
2940 static bool
2941 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2943 bool res;
2944 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2945 if (res)
2946 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2947 else
2948 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2949 return res;
2953 /* If the statement STMT may use the memory reference REF return
2954 true, otherwise return false. */
2956 bool
2957 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2959 if (is_gimple_assign (stmt))
2961 tree rhs;
2963 /* All memory assign statements are single. */
2964 if (!gimple_assign_single_p (stmt))
2965 return false;
2967 rhs = gimple_assign_rhs1 (stmt);
2968 if (is_gimple_reg (rhs)
2969 || is_gimple_min_invariant (rhs)
2970 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2971 return false;
2973 return refs_may_alias_p (rhs, ref, tbaa_p);
2975 else if (is_gimple_call (stmt))
2976 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2977 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2979 tree retval = gimple_return_retval (return_stmt);
2980 if (retval
2981 && TREE_CODE (retval) != SSA_NAME
2982 && !is_gimple_min_invariant (retval)
2983 && refs_may_alias_p (retval, ref, tbaa_p))
2984 return true;
2985 /* If ref escapes the function then the return acts as a use. */
2986 tree base = ao_ref_base (ref);
2987 if (!base)
2989 else if (DECL_P (base))
2990 return is_global_var (base);
2991 else if (TREE_CODE (base) == MEM_REF
2992 || TREE_CODE (base) == TARGET_MEM_REF)
2993 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2994 return false;
2997 return true;
3000 bool
3001 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
3003 ao_ref r;
3004 ao_ref_init (&r, ref);
3005 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
3008 /* If the call in statement CALL may clobber the memory reference REF
3009 return true, otherwise return false. */
3011 bool
3012 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
3014 tree base;
3015 tree callee;
3017 /* If the call is pure or const it cannot clobber anything. */
3018 if (gimple_call_flags (call)
3019 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
3020 return false;
3021 if (gimple_call_internal_p (call))
3022 switch (gimple_call_internal_fn (call))
3024 /* Treat these internal calls like ECF_PURE for aliasing,
3025 they don't write to any memory the program should care about.
3026 They have important other side-effects, and read memory,
3027 so can't be ECF_NOVOPS. */
3028 case IFN_UBSAN_NULL:
3029 case IFN_UBSAN_BOUNDS:
3030 case IFN_UBSAN_VPTR:
3031 case IFN_UBSAN_OBJECT_SIZE:
3032 case IFN_UBSAN_PTR:
3033 case IFN_ASAN_CHECK:
3034 return false;
3035 default:
3036 break;
3039 callee = gimple_call_fndecl (call);
3041 if (callee != NULL_TREE && !ref->volatile_p)
3043 struct cgraph_node *node = cgraph_node::get (callee);
3044 if (node)
3046 modref_summary *summary = get_modref_function_summary (node);
3047 if (summary)
3049 if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
3050 && (!summary->writes_errno
3051 || !targetm.ref_may_alias_errno (ref)))
3053 alias_stats.modref_clobber_no_alias++;
3054 if (dump_file && (dump_flags & TDF_DETAILS))
3056 fprintf (dump_file,
3057 "ipa-modref: call stmt ");
3058 print_gimple_stmt (dump_file, call, 0);
3059 fprintf (dump_file,
3060 "ipa-modref: call to %s does not clobber ",
3061 node->dump_name ());
3062 if (!ref->ref && ref->base)
3064 fprintf (dump_file, "base: ");
3065 print_generic_expr (dump_file, ref->base);
3067 else if (ref->ref)
3069 fprintf (dump_file, "ref: ");
3070 print_generic_expr (dump_file, ref->ref);
3072 fprintf (dump_file, " alias sets: %i->%i\n",
3073 ao_ref_base_alias_set (ref),
3074 ao_ref_alias_set (ref));
3076 return false;
3078 alias_stats.modref_clobber_may_alias++;
3083 base = ao_ref_base (ref);
3084 if (!base)
3085 return true;
3087 if (TREE_CODE (base) == SSA_NAME
3088 || CONSTANT_CLASS_P (base))
3089 return false;
3091 /* A call that is not without side-effects might involve volatile
3092 accesses and thus conflicts with all other volatile accesses. */
3093 if (ref->volatile_p)
3094 return true;
3096 /* If the reference is based on a decl that is not aliased the call
3097 cannot possibly clobber it. */
3098 if (DECL_P (base)
3099 && !may_be_aliased (base)
3100 /* But local non-readonly statics can be modified through recursion
3101 or the call may implement a threading barrier which we must
3102 treat as may-def. */
3103 && (TREE_READONLY (base)
3104 || !is_global_var (base)))
3105 return false;
3107 /* If the reference is based on a pointer that points to memory
3108 that may not be written to then the call cannot possibly clobber it. */
3109 if ((TREE_CODE (base) == MEM_REF
3110 || TREE_CODE (base) == TARGET_MEM_REF)
3111 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3112 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3113 return false;
3115 if (int res = check_fnspec (call, ref, true))
3117 if (res == 1)
3118 return true;
3120 else
3121 return false;
3123 /* Check if base is a global static variable that is not written
3124 by the function. */
3125 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3127 struct cgraph_node *node = cgraph_node::get (callee);
3128 bitmap written;
3129 int id;
3131 if (node
3132 && (id = ipa_reference_var_uid (base)) != -1
3133 && (written = ipa_reference_get_written_global (node))
3134 && !bitmap_bit_p (written, id))
3135 return false;
3138 /* Check if the base variable is call-clobbered. */
3139 if (DECL_P (base))
3140 return pt_solution_includes (gimple_call_clobber_set (call), base);
3141 else if ((TREE_CODE (base) == MEM_REF
3142 || TREE_CODE (base) == TARGET_MEM_REF)
3143 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3145 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3146 if (!pi)
3147 return true;
3149 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3152 return true;
3155 /* If the call in statement CALL may clobber the memory reference REF
3156 return true, otherwise return false. */
3158 bool
3159 call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3161 bool res;
3162 ao_ref r;
3163 ao_ref_init (&r, ref);
3164 res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3165 if (res)
3166 ++alias_stats.call_may_clobber_ref_p_may_alias;
3167 else
3168 ++alias_stats.call_may_clobber_ref_p_no_alias;
3169 return res;
3173 /* If the statement STMT may clobber the memory reference REF return true,
3174 otherwise return false. */
3176 bool
3177 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3179 if (is_gimple_call (stmt))
3181 tree lhs = gimple_call_lhs (stmt);
3182 if (lhs
3183 && TREE_CODE (lhs) != SSA_NAME)
3185 ao_ref r;
3186 ao_ref_init (&r, lhs);
3187 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3188 return true;
3191 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3193 else if (gimple_assign_single_p (stmt))
3195 tree lhs = gimple_assign_lhs (stmt);
3196 if (TREE_CODE (lhs) != SSA_NAME)
3198 ao_ref r;
3199 ao_ref_init (&r, lhs);
3200 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3203 else if (gimple_code (stmt) == GIMPLE_ASM)
3204 return true;
3206 return false;
3209 bool
3210 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3212 ao_ref r;
3213 ao_ref_init (&r, ref);
3214 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3217 /* Return true if store1 and store2 described by corresponding tuples
3218 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3219 address. */
3221 static bool
3222 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3223 poly_int64 max_size1,
3224 tree base2, poly_int64 offset2, poly_int64 size2,
3225 poly_int64 max_size2)
3227 /* Offsets need to be 0. */
3228 if (maybe_ne (offset1, 0)
3229 || maybe_ne (offset2, 0))
3230 return false;
3232 bool base1_obj_p = SSA_VAR_P (base1);
3233 bool base2_obj_p = SSA_VAR_P (base2);
3235 /* We need one object. */
3236 if (base1_obj_p == base2_obj_p)
3237 return false;
3238 tree obj = base1_obj_p ? base1 : base2;
3240 /* And we need one MEM_REF. */
3241 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3242 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3243 if (base1_memref_p == base2_memref_p)
3244 return false;
3245 tree memref = base1_memref_p ? base1 : base2;
3247 /* Sizes need to be valid. */
3248 if (!known_size_p (max_size1)
3249 || !known_size_p (max_size2)
3250 || !known_size_p (size1)
3251 || !known_size_p (size2))
3252 return false;
3254 /* Max_size needs to match size. */
3255 if (maybe_ne (max_size1, size1)
3256 || maybe_ne (max_size2, size2))
3257 return false;
3259 /* Sizes need to match. */
3260 if (maybe_ne (size1, size2))
3261 return false;
3264 /* Check that memref is a store to pointer with singleton points-to info. */
3265 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3266 return false;
3267 tree ptr = TREE_OPERAND (memref, 0);
3268 if (TREE_CODE (ptr) != SSA_NAME)
3269 return false;
3270 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3271 unsigned int pt_uid;
3272 if (pi == NULL
3273 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3274 return false;
3276 /* Be conservative with non-call exceptions when the address might
3277 be NULL. */
3278 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3279 return false;
3281 /* Check that ptr points relative to obj. */
3282 unsigned int obj_uid = DECL_PT_UID (obj);
3283 if (obj_uid != pt_uid)
3284 return false;
3286 /* Check that the object size is the same as the store size. That ensures us
3287 that ptr points to the start of obj. */
3288 return (DECL_SIZE (obj)
3289 && poly_int_tree_p (DECL_SIZE (obj))
3290 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3293 /* Return true if REF is killed by an store described by
3294 BASE, OFFSET, SIZE and MAX_SIZE. */
3296 static bool
3297 store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3298 poly_int64 max_size, ao_ref *ref)
3300 poly_int64 ref_offset = ref->offset;
3301 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3302 so base == ref->base does not always hold. */
3303 if (base != ref->base)
3305 /* Try using points-to info. */
3306 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3307 ref->offset, ref->size, ref->max_size))
3308 return true;
3310 /* If both base and ref->base are MEM_REFs, only compare the
3311 first operand, and if the second operand isn't equal constant,
3312 try to add the offsets into offset and ref_offset. */
3313 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3314 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3316 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3317 TREE_OPERAND (ref->base, 1)))
3319 poly_offset_int off1 = mem_ref_offset (base);
3320 off1 <<= LOG2_BITS_PER_UNIT;
3321 off1 += offset;
3322 poly_offset_int off2 = mem_ref_offset (ref->base);
3323 off2 <<= LOG2_BITS_PER_UNIT;
3324 off2 += ref_offset;
3325 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3326 size = -1;
3329 else
3330 size = -1;
3332 /* For a must-alias check we need to be able to constrain
3333 the access properly. */
3334 return (known_eq (size, max_size)
3335 && known_subrange_p (ref_offset, ref->max_size, offset, size));
3338 /* If STMT kills the memory reference REF return true, otherwise
3339 return false. */
3341 bool
3342 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3344 if (!ao_ref_base (ref))
3345 return false;
3347 if (gimple_has_lhs (stmt)
3348 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3349 /* The assignment is not necessarily carried out if it can throw
3350 and we can catch it in the current function where we could inspect
3351 the previous value.
3352 ??? We only need to care about the RHS throwing. For aggregate
3353 assignments or similar calls and non-call exceptions the LHS
3354 might throw as well. */
3355 && !stmt_can_throw_internal (cfun, stmt))
3357 tree lhs = gimple_get_lhs (stmt);
3358 /* If LHS is literally a base of the access we are done. */
3359 if (ref->ref)
3361 tree base = ref->ref;
3362 tree innermost_dropped_array_ref = NULL_TREE;
3363 if (handled_component_p (base))
3365 tree saved_lhs0 = NULL_TREE;
3366 if (handled_component_p (lhs))
3368 saved_lhs0 = TREE_OPERAND (lhs, 0);
3369 TREE_OPERAND (lhs, 0) = integer_zero_node;
3373 /* Just compare the outermost handled component, if
3374 they are equal we have found a possible common
3375 base. */
3376 tree saved_base0 = TREE_OPERAND (base, 0);
3377 TREE_OPERAND (base, 0) = integer_zero_node;
3378 bool res = operand_equal_p (lhs, base, 0);
3379 TREE_OPERAND (base, 0) = saved_base0;
3380 if (res)
3381 break;
3382 /* Remember if we drop an array-ref that we need to
3383 double-check not being at struct end. */
3384 if (TREE_CODE (base) == ARRAY_REF
3385 || TREE_CODE (base) == ARRAY_RANGE_REF)
3386 innermost_dropped_array_ref = base;
3387 /* Otherwise drop handled components of the access. */
3388 base = saved_base0;
3390 while (handled_component_p (base));
3391 if (saved_lhs0)
3392 TREE_OPERAND (lhs, 0) = saved_lhs0;
3394 /* Finally check if the lhs has the same address and size as the
3395 base candidate of the access. Watch out if we have dropped
3396 an array-ref that was at struct end, this means ref->ref may
3397 be outside of the TYPE_SIZE of its base. */
3398 if ((! innermost_dropped_array_ref
3399 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3400 && (lhs == base
3401 || (((TYPE_SIZE (TREE_TYPE (lhs))
3402 == TYPE_SIZE (TREE_TYPE (base)))
3403 || (TYPE_SIZE (TREE_TYPE (lhs))
3404 && TYPE_SIZE (TREE_TYPE (base))
3405 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3406 TYPE_SIZE (TREE_TYPE (base)),
3407 0)))
3408 && operand_equal_p (lhs, base,
3409 OEP_ADDRESS_OF
3410 | OEP_MATCH_SIDE_EFFECTS))))
3412 ++alias_stats.stmt_kills_ref_p_yes;
3413 return true;
3417 /* Now look for non-literal equal bases with the restriction of
3418 handling constant offset and size. */
3419 /* For a must-alias check we need to be able to constrain
3420 the access properly. */
3421 if (!ref->max_size_known_p ())
3423 ++alias_stats.stmt_kills_ref_p_no;
3424 return false;
3426 poly_int64 size, offset, max_size;
3427 bool reverse;
3428 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3429 &reverse);
3430 if (store_kills_ref_p (base, offset, size, max_size, ref))
3432 ++alias_stats.stmt_kills_ref_p_yes;
3433 return true;
3437 if (is_gimple_call (stmt))
3439 tree callee = gimple_call_fndecl (stmt);
3440 struct cgraph_node *node;
3441 modref_summary *summary;
3443 /* Try to disambiguate using modref summary. Modref records a vector
3444 of stores with known offsets relative to function parameters that must
3445 happen every execution of function. Find if we have a matching
3446 store and verify that function can not use the value. */
3447 if (callee != NULL_TREE
3448 && (node = cgraph_node::get (callee)) != NULL
3449 && node->binds_to_current_def_p ()
3450 && (summary = get_modref_function_summary (node)) != NULL
3451 && summary->kills.length ()
3452 && (!cfun->can_throw_non_call_exceptions
3453 || !stmt_can_throw_internal (cfun, stmt)))
3455 for (auto kill : summary->kills)
3457 ao_ref dref;
3459 /* We only can do useful compares if we know the access range
3460 precisely. */
3461 if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
3462 continue;
3463 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3464 dref.size, dref.max_size, ref))
3466 /* For store to be killed it needs to not be used
3467 earlier. */
3468 if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
3469 true)
3470 || !dbg_cnt (ipa_mod_ref))
3471 break;
3472 if (dump_file && (dump_flags & TDF_DETAILS))
3474 fprintf (dump_file,
3475 "ipa-modref: call stmt ");
3476 print_gimple_stmt (dump_file, stmt, 0);
3477 fprintf (dump_file,
3478 "ipa-modref: call to %s kills ",
3479 node->dump_name ());
3480 print_generic_expr (dump_file, ref->base);
3482 ++alias_stats.modref_kill_yes;
3483 return true;
3486 ++alias_stats.modref_kill_no;
3488 if (callee != NULL_TREE
3489 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3490 switch (DECL_FUNCTION_CODE (callee))
3492 case BUILT_IN_FREE:
3494 tree ptr = gimple_call_arg (stmt, 0);
3495 tree base = ao_ref_base (ref);
3496 if (base && TREE_CODE (base) == MEM_REF
3497 && TREE_OPERAND (base, 0) == ptr)
3499 ++alias_stats.stmt_kills_ref_p_yes;
3500 return true;
3502 break;
3505 case BUILT_IN_MEMCPY:
3506 case BUILT_IN_MEMPCPY:
3507 case BUILT_IN_MEMMOVE:
3508 case BUILT_IN_MEMSET:
3509 case BUILT_IN_MEMCPY_CHK:
3510 case BUILT_IN_MEMPCPY_CHK:
3511 case BUILT_IN_MEMMOVE_CHK:
3512 case BUILT_IN_MEMSET_CHK:
3513 case BUILT_IN_STRNCPY:
3514 case BUILT_IN_STPNCPY:
3515 case BUILT_IN_CALLOC:
3517 /* For a must-alias check we need to be able to constrain
3518 the access properly. */
3519 if (!ref->max_size_known_p ())
3521 ++alias_stats.stmt_kills_ref_p_no;
3522 return false;
3524 tree dest;
3525 tree len;
3527 /* In execution order a calloc call will never kill
3528 anything. However, DSE will (ab)use this interface
3529 to ask if a calloc call writes the same memory locations
3530 as a later assignment, memset, etc. So handle calloc
3531 in the expected way. */
3532 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3534 tree arg0 = gimple_call_arg (stmt, 0);
3535 tree arg1 = gimple_call_arg (stmt, 1);
3536 if (TREE_CODE (arg0) != INTEGER_CST
3537 || TREE_CODE (arg1) != INTEGER_CST)
3539 ++alias_stats.stmt_kills_ref_p_no;
3540 return false;
3543 dest = gimple_call_lhs (stmt);
3544 if (!dest)
3546 ++alias_stats.stmt_kills_ref_p_no;
3547 return false;
3549 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3551 else
3553 dest = gimple_call_arg (stmt, 0);
3554 len = gimple_call_arg (stmt, 2);
3556 if (!poly_int_tree_p (len))
3557 return false;
3558 ao_ref dref;
3559 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3560 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3561 dref.size, dref.max_size, ref))
3563 ++alias_stats.stmt_kills_ref_p_yes;
3564 return true;
3566 break;
3569 case BUILT_IN_VA_END:
3571 tree ptr = gimple_call_arg (stmt, 0);
3572 if (TREE_CODE (ptr) == ADDR_EXPR)
3574 tree base = ao_ref_base (ref);
3575 if (TREE_OPERAND (ptr, 0) == base)
3577 ++alias_stats.stmt_kills_ref_p_yes;
3578 return true;
3581 break;
3584 default:;
3587 ++alias_stats.stmt_kills_ref_p_no;
3588 return false;
3591 bool
3592 stmt_kills_ref_p (gimple *stmt, tree ref)
3594 ao_ref r;
3595 ao_ref_init (&r, ref);
3596 return stmt_kills_ref_p (stmt, &r);
3600 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3601 TARGET or a statement clobbering the memory reference REF in which
3602 case false is returned. The walk starts with VUSE, one argument of PHI. */
3604 static bool
3605 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3606 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3607 bitmap *visited, bool abort_on_visited,
3608 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3609 translate_flags disambiguate_only,
3610 void *data)
3612 basic_block bb = gimple_bb (phi);
3614 if (!*visited)
3615 *visited = BITMAP_ALLOC (NULL);
3617 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3619 /* Walk until we hit the target. */
3620 while (vuse != target)
3622 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3623 /* If we are searching for the target VUSE by walking up to
3624 TARGET_BB dominating the original PHI we are finished once
3625 we reach a default def or a definition in a block dominating
3626 that block. Update TARGET and return. */
3627 if (!target
3628 && (gimple_nop_p (def_stmt)
3629 || dominated_by_p (CDI_DOMINATORS,
3630 target_bb, gimple_bb (def_stmt))))
3632 target = vuse;
3633 return true;
3636 /* Recurse for PHI nodes. */
3637 if (gimple_code (def_stmt) == GIMPLE_PHI)
3639 /* An already visited PHI node ends the walk successfully. */
3640 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3641 return !abort_on_visited;
3642 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3643 visited, abort_on_visited,
3644 translate, data, disambiguate_only);
3645 if (!vuse)
3646 return false;
3647 continue;
3649 else if (gimple_nop_p (def_stmt))
3650 return false;
3651 else
3653 /* A clobbering statement or the end of the IL ends it failing. */
3654 if ((int)limit <= 0)
3655 return false;
3656 --limit;
3657 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3659 translate_flags tf = disambiguate_only;
3660 if (translate
3661 && (*translate) (ref, vuse, data, &tf) == NULL)
3663 else
3664 return false;
3667 /* If we reach a new basic-block see if we already skipped it
3668 in a previous walk that ended successfully. */
3669 if (gimple_bb (def_stmt) != bb)
3671 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3672 return !abort_on_visited;
3673 bb = gimple_bb (def_stmt);
3675 vuse = gimple_vuse (def_stmt);
3677 return true;
3681 /* Starting from a PHI node for the virtual operand of the memory reference
3682 REF find a continuation virtual operand that allows to continue walking
3683 statements dominating PHI skipping only statements that cannot possibly
3684 clobber REF. Decrements LIMIT for each alias disambiguation done
3685 and aborts the walk, returning NULL_TREE if it reaches zero.
3686 Returns NULL_TREE if no suitable virtual operand can be found. */
3688 tree
3689 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3690 unsigned int &limit, bitmap *visited,
3691 bool abort_on_visited,
3692 void *(*translate)(ao_ref *, tree, void *,
3693 translate_flags *),
3694 void *data,
3695 translate_flags disambiguate_only)
3697 unsigned nargs = gimple_phi_num_args (phi);
3699 /* Through a single-argument PHI we can simply look through. */
3700 if (nargs == 1)
3701 return PHI_ARG_DEF (phi, 0);
3703 /* For two or more arguments try to pairwise skip non-aliasing code
3704 until we hit the phi argument definition that dominates the other one. */
3705 basic_block phi_bb = gimple_bb (phi);
3706 tree arg0, arg1;
3707 unsigned i;
3709 /* Find a candidate for the virtual operand which definition
3710 dominates those of all others. */
3711 /* First look if any of the args themselves satisfy this. */
3712 for (i = 0; i < nargs; ++i)
3714 arg0 = PHI_ARG_DEF (phi, i);
3715 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3716 break;
3717 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3718 if (def_bb != phi_bb
3719 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3720 break;
3721 arg0 = NULL_TREE;
3723 /* If not, look if we can reach such candidate by walking defs
3724 until we hit the immediate dominator. maybe_skip_until will
3725 do that for us. */
3726 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3728 /* Then check against the (to be) found candidate. */
3729 for (i = 0; i < nargs; ++i)
3731 arg1 = PHI_ARG_DEF (phi, i);
3732 if (arg1 == arg0)
3734 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3735 limit, visited,
3736 abort_on_visited,
3737 translate,
3738 /* Do not valueize when walking over
3739 backedges. */
3740 dominated_by_p
3741 (CDI_DOMINATORS,
3742 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3743 phi_bb)
3744 ? TR_DISAMBIGUATE
3745 : disambiguate_only, data))
3746 return NULL_TREE;
3749 return arg0;
3752 /* Based on the memory reference REF and its virtual use VUSE call
3753 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3754 itself. That is, for each virtual use for which its defining statement
3755 does not clobber REF.
3757 WALKER is called with REF, the current virtual use and DATA. If
3758 WALKER returns non-NULL the walk stops and its result is returned.
3759 At the end of a non-successful walk NULL is returned.
3761 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3762 use which definition is a statement that may clobber REF and DATA.
3763 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3764 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3765 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3766 to adjust REF and *DATA to make that valid.
3768 VALUEIZE if non-NULL is called with the next VUSE that is considered
3769 and return value is substituted for that. This can be used to
3770 implement optimistic value-numbering for example. Note that the
3771 VUSE argument is assumed to be valueized already.
3773 LIMIT specifies the number of alias queries we are allowed to do,
3774 the walk stops when it reaches zero and NULL is returned. LIMIT
3775 is decremented by the number of alias queries (plus adjustments
3776 done by the callbacks) upon return.
3778 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3780 void *
3781 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3782 void *(*walker)(ao_ref *, tree, void *),
3783 void *(*translate)(ao_ref *, tree, void *,
3784 translate_flags *),
3785 tree (*valueize)(tree),
3786 unsigned &limit, void *data)
3788 bitmap visited = NULL;
3789 void *res;
3790 bool translated = false;
3792 timevar_push (TV_ALIAS_STMT_WALK);
3796 gimple *def_stmt;
3798 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3799 res = (*walker) (ref, vuse, data);
3800 /* Abort walk. */
3801 if (res == (void *)-1)
3803 res = NULL;
3804 break;
3806 /* Lookup succeeded. */
3807 else if (res != NULL)
3808 break;
3810 if (valueize)
3812 vuse = valueize (vuse);
3813 if (!vuse)
3815 res = NULL;
3816 break;
3819 def_stmt = SSA_NAME_DEF_STMT (vuse);
3820 if (gimple_nop_p (def_stmt))
3821 break;
3822 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3823 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3824 &visited, translated, translate, data);
3825 else
3827 if ((int)limit <= 0)
3829 res = NULL;
3830 break;
3832 --limit;
3833 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3835 if (!translate)
3836 break;
3837 translate_flags disambiguate_only = TR_TRANSLATE;
3838 res = (*translate) (ref, vuse, data, &disambiguate_only);
3839 /* Failed lookup and translation. */
3840 if (res == (void *)-1)
3842 res = NULL;
3843 break;
3845 /* Lookup succeeded. */
3846 else if (res != NULL)
3847 break;
3848 /* Translation succeeded, continue walking. */
3849 translated = translated || disambiguate_only == TR_TRANSLATE;
3851 vuse = gimple_vuse (def_stmt);
3854 while (vuse);
3856 if (visited)
3857 BITMAP_FREE (visited);
3859 timevar_pop (TV_ALIAS_STMT_WALK);
3861 return res;
3865 /* Based on the memory reference REF call WALKER for each vdef whose
3866 defining statement may clobber REF, starting with VDEF. If REF
3867 is NULL_TREE, each defining statement is visited.
3869 WALKER is called with REF, the current vdef and DATA. If WALKER
3870 returns true the walk is stopped, otherwise it continues.
3872 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3873 The pointer may be NULL and then we do not track this information.
3875 At PHI nodes walk_aliased_vdefs forks into one walk for each
3876 PHI argument (but only one walk continues at merge points), the
3877 return value is true if any of the walks was successful.
3879 The function returns the number of statements walked or -1 if
3880 LIMIT stmts were walked and the walk was aborted at this point.
3881 If LIMIT is zero the walk is not aborted. */
3883 static int
3884 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3885 bool (*walker)(ao_ref *, tree, void *), void *data,
3886 bitmap *visited, unsigned int cnt,
3887 bool *function_entry_reached, unsigned limit)
3891 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3893 if (*visited
3894 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3895 return cnt;
3897 if (gimple_nop_p (def_stmt))
3899 if (function_entry_reached)
3900 *function_entry_reached = true;
3901 return cnt;
3903 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3905 unsigned i;
3906 if (!*visited)
3907 *visited = BITMAP_ALLOC (NULL);
3908 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3910 int res = walk_aliased_vdefs_1 (ref,
3911 gimple_phi_arg_def (def_stmt, i),
3912 walker, data, visited, cnt,
3913 function_entry_reached, limit);
3914 if (res == -1)
3915 return -1;
3916 cnt = res;
3918 return cnt;
3921 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3922 cnt++;
3923 if (cnt == limit)
3924 return -1;
3925 if ((!ref
3926 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3927 && (*walker) (ref, vdef, data))
3928 return cnt;
3930 vdef = gimple_vuse (def_stmt);
3932 while (1);
3936 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3937 bool (*walker)(ao_ref *, tree, void *), void *data,
3938 bitmap *visited,
3939 bool *function_entry_reached, unsigned int limit)
3941 bitmap local_visited = NULL;
3942 int ret;
3944 timevar_push (TV_ALIAS_STMT_WALK);
3946 if (function_entry_reached)
3947 *function_entry_reached = false;
3949 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3950 visited ? visited : &local_visited, 0,
3951 function_entry_reached, limit);
3952 if (local_visited)
3953 BITMAP_FREE (local_visited);
3955 timevar_pop (TV_ALIAS_STMT_WALK);
3957 return ret;
3960 /* Verify validity of the fnspec string.
3961 See attr-fnspec.h for details. */
3963 void
3964 attr_fnspec::verify ()
3966 bool err = false;
3967 if (!len)
3968 return;
3970 /* Check return value specifier. */
3971 if (len < return_desc_size)
3972 err = true;
3973 else if ((len - return_desc_size) % arg_desc_size)
3974 err = true;
3975 else if ((str[0] < '1' || str[0] > '4')
3976 && str[0] != '.' && str[0] != 'm')
3977 err = true;
3979 switch (str[1])
3981 case ' ':
3982 case 'p':
3983 case 'P':
3984 case 'c':
3985 case 'C':
3986 break;
3987 default:
3988 err = true;
3990 if (err)
3991 internal_error ("invalid fn spec attribute \"%s\"", str);
3993 /* Now check all parameters. */
3994 for (unsigned int i = 0; arg_specified_p (i); i++)
3996 unsigned int idx = arg_idx (i);
3997 switch (str[idx])
3999 case 'x':
4000 case 'X':
4001 case 'r':
4002 case 'R':
4003 case 'o':
4004 case 'O':
4005 case 'w':
4006 case 'W':
4007 case '.':
4008 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
4009 || str[idx + 1] == 't')
4011 if (str[idx] != 'r' && str[idx] != 'R'
4012 && str[idx] != 'w' && str[idx] != 'W'
4013 && str[idx] != 'o' && str[idx] != 'O')
4014 err = true;
4015 if (str[idx + 1] != 't'
4016 /* Size specified is scalar, so it should be described
4017 by ". " if specified at all. */
4018 && (arg_specified_p (str[idx + 1] - '1')
4019 && str[arg_idx (str[idx + 1] - '1')] != '.'))
4020 err = true;
4022 else if (str[idx + 1] != ' ')
4023 err = true;
4024 break;
4025 default:
4026 if (str[idx] < '1' || str[idx] > '9')
4027 err = true;
4029 if (err)
4030 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4034 /* Return ture if TYPE1 and TYPE2 will always give the same answer
4035 when compared wit hother types using same_type_for_tbaa_p. */
4037 static bool
4038 types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4039 bool lto_streaming_safe)
4041 /* We use same_type_for_tbaa_p to match types in the access path.
4042 This check is overly conservative. */
4043 type1 = TYPE_MAIN_VARIANT (type1);
4044 type2 = TYPE_MAIN_VARIANT (type2);
4046 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4047 != TYPE_STRUCTURAL_EQUALITY_P (type2))
4048 return false;
4049 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4050 return true;
4052 if (lto_streaming_safe)
4053 return type1 == type2;
4054 else
4055 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4058 /* Compare REF1 and REF2 and return flags specifying their differences.
4059 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4060 types that are going to be recomputed.
4061 If TBAA is true also compare TBAA metadata. */
4064 ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4065 bool lto_streaming_safe,
4066 bool tbaa)
4068 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4069 return SEMANTICS;
4070 tree base1 = ao_ref_base (ref1);
4071 tree base2 = ao_ref_base (ref2);
4073 if (!known_eq (ref1->offset, ref2->offset)
4074 || !known_eq (ref1->size, ref2->size)
4075 || !known_eq (ref1->max_size, ref2->max_size))
4076 return SEMANTICS;
4078 /* For variable accesses we need to compare actual paths
4079 to check that both refs are accessing same address and the access size. */
4080 if (!known_eq (ref1->size, ref1->max_size))
4082 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4083 TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
4084 return SEMANTICS;
4085 tree r1 = ref1->ref;
4086 tree r2 = ref2->ref;
4088 /* Handle toplevel COMPONENT_REFs of bitfields.
4089 Those are special since they are not allowed in
4090 ADDR_EXPR. */
4091 if (TREE_CODE (r1) == COMPONENT_REF
4092 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4094 if (TREE_CODE (r2) != COMPONENT_REF
4095 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4096 return SEMANTICS;
4097 tree field1 = TREE_OPERAND (r1, 1);
4098 tree field2 = TREE_OPERAND (r2, 1);
4099 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4100 DECL_FIELD_OFFSET (field2), 0)
4101 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4102 DECL_FIELD_BIT_OFFSET (field2), 0)
4103 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
4104 || !types_compatible_p (TREE_TYPE (r1),
4105 TREE_TYPE (r2)))
4106 return SEMANTICS;
4107 r1 = TREE_OPERAND (r1, 0);
4108 r2 = TREE_OPERAND (r2, 0);
4110 else if (TREE_CODE (r2) == COMPONENT_REF
4111 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4112 return SEMANTICS;
4114 /* Similarly for bit field refs. */
4115 if (TREE_CODE (r1) == BIT_FIELD_REF)
4117 if (TREE_CODE (r2) != BIT_FIELD_REF
4118 || !operand_equal_p (TREE_OPERAND (r1, 1),
4119 TREE_OPERAND (r2, 1), 0)
4120 || !operand_equal_p (TREE_OPERAND (r1, 2),
4121 TREE_OPERAND (r2, 2), 0)
4122 || !types_compatible_p (TREE_TYPE (r1),
4123 TREE_TYPE (r2)))
4124 return SEMANTICS;
4125 r1 = TREE_OPERAND (r1, 0);
4126 r2 = TREE_OPERAND (r2, 0);
4128 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4129 return SEMANTICS;
4131 /* Now we can compare the address of actual memory access. */
4132 if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4133 return SEMANTICS;
4135 /* For constant accesses we get more matches by comparing offset only. */
4136 else if (!operand_equal_p (base1, base2,
4137 OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4138 return SEMANTICS;
4140 /* We can't simply use get_object_alignment_1 on the full
4141 reference as for accesses with variable indexes this reports
4142 too conservative alignment. */
4143 unsigned int align1, align2;
4144 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4145 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4146 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4147 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4148 TYPE_ALIGN but still returns false. This seem to contradict
4149 its description. So compare even if alignment is unknown. */
4150 if (known1 != known2
4151 || (bitpos1 != bitpos2 || align1 != align2))
4152 return SEMANTICS;
4154 /* Now we know that accesses are semantically same. */
4155 int flags = 0;
4157 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4158 tree rbase1 = ref1->ref;
4159 if (rbase1)
4160 while (handled_component_p (rbase1))
4161 rbase1 = TREE_OPERAND (rbase1, 0);
4162 tree rbase2 = ref2->ref;
4163 while (handled_component_p (rbase2))
4164 rbase2 = TREE_OPERAND (rbase2, 0);
4166 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4167 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4168 Otherwise we need to match bases and cliques. */
4169 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4170 && MR_DEPENDENCE_CLIQUE (rbase1))
4171 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4172 && MR_DEPENDENCE_CLIQUE (rbase2)))
4173 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4174 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4175 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4176 flags |= DEPENDENCE_CLIQUE;
4178 if (!tbaa)
4179 return flags;
4181 /* Alias sets are not stable across LTO sreaming; be conservative here
4182 and compare types the alias sets are ultimately based on. */
4183 if (lto_streaming_safe)
4185 tree t1 = ao_ref_alias_ptr_type (ref1);
4186 tree t2 = ao_ref_alias_ptr_type (ref2);
4187 if (!alias_ptr_types_compatible_p (t1, t2))
4188 flags |= REF_ALIAS_SET;
4190 t1 = ao_ref_base_alias_ptr_type (ref1);
4191 t2 = ao_ref_base_alias_ptr_type (ref2);
4192 if (!alias_ptr_types_compatible_p (t1, t2))
4193 flags |= BASE_ALIAS_SET;
4195 else
4197 if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4198 flags |= REF_ALIAS_SET;
4199 if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4200 flags |= BASE_ALIAS_SET;
4203 /* Access path is used only on non-view-converted references. */
4204 bool view_converted = view_converted_memref_p (rbase1);
4205 if (view_converted_memref_p (rbase2) != view_converted)
4206 return flags | ACCESS_PATH;
4207 else if (view_converted)
4208 return flags;
4211 /* Find start of access paths and look for trailing arrays. */
4212 tree c1 = ref1->ref, c2 = ref2->ref;
4213 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4214 int nskipped1 = 0, nskipped2 = 0;
4215 int i = 0;
4217 for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4219 if (component_ref_to_zero_sized_trailing_array_p (p1))
4220 end_struct_ref1 = p1;
4221 if (ends_tbaa_access_path_p (p1))
4222 c1 = p1, nskipped1 = i;
4223 i++;
4225 for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4227 if (component_ref_to_zero_sized_trailing_array_p (p2))
4228 end_struct_ref2 = p2;
4229 if (ends_tbaa_access_path_p (p2))
4230 c2 = p2, nskipped1 = i;
4231 i++;
4234 /* For variable accesses we can not rely on offset match bellow.
4235 We know that paths are struturally same, so only check that
4236 starts of TBAA paths did not diverge. */
4237 if (!known_eq (ref1->size, ref1->max_size)
4238 && nskipped1 != nskipped2)
4239 return flags | ACCESS_PATH;
4241 /* Information about trailing refs is used by
4242 aliasing_component_refs_p that is applied only if paths
4243 has handled components.. */
4244 if (!handled_component_p (c1) && !handled_component_p (c2))
4246 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4247 return flags | ACCESS_PATH;
4248 if (end_struct_ref1
4249 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4250 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4251 return flags | ACCESS_PATH;
4253 /* Now compare all handled components of the access path.
4254 We have three oracles that cares about access paths:
4255 - aliasing_component_refs_p
4256 - nonoverlapping_refs_since_match_p
4257 - nonoverlapping_component_refs_p
4258 We need to match things these oracles compare.
4260 It is only necessary to check types for compatibility
4261 and offsets. Rest of what oracles compares are actual
4262 addresses. Those are already known to be same:
4263 - for constant accesses we check offsets
4264 - for variable accesses we already matched
4265 the path lexically with operand_equal_p. */
4266 while (true)
4268 bool comp1 = handled_component_p (c1);
4269 bool comp2 = handled_component_p (c2);
4271 if (comp1 != comp2)
4272 return flags | ACCESS_PATH;
4273 if (!comp1)
4274 break;
4276 if (TREE_CODE (c1) != TREE_CODE (c2))
4277 return flags | ACCESS_PATH;
4279 /* aliasing_component_refs_p attempts to find type match within
4280 the paths. For that reason both types needs to be equal
4281 with respect to same_type_for_tbaa_p. */
4282 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4283 TREE_TYPE (c2),
4284 lto_streaming_safe))
4285 return flags | ACCESS_PATH;
4286 if (component_ref_to_zero_sized_trailing_array_p (c1)
4287 != component_ref_to_zero_sized_trailing_array_p (c2))
4288 return flags | ACCESS_PATH;
4290 /* aliasing_matching_component_refs_p compares
4291 offsets within the path. Other properties are ignored.
4292 Do not bother to verify offsets in variable accesses. Here we
4293 already compared them by operand_equal_p so they are
4294 structurally same. */
4295 if (!known_eq (ref1->size, ref1->max_size))
4297 poly_int64 offadj1, sztmc1, msztmc1;
4298 bool reverse1;
4299 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4300 poly_int64 offadj2, sztmc2, msztmc2;
4301 bool reverse2;
4302 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4303 if (!known_eq (offadj1, offadj2))
4304 return flags | ACCESS_PATH;
4306 c1 = TREE_OPERAND (c1, 0);
4307 c2 = TREE_OPERAND (c2, 0);
4309 /* Finally test the access type. */
4310 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4311 TREE_TYPE (c2),
4312 lto_streaming_safe))
4313 return flags | ACCESS_PATH;
4314 return flags;
4317 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4318 and canonical types. */
4319 void
4320 ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4321 inchash::hash &hstate)
4323 tree base = ao_ref_base (ref);
4324 tree tbase = base;
4326 if (!known_eq (ref->size, ref->max_size))
4328 tree r = ref->ref;
4329 if (TREE_CODE (r) == COMPONENT_REF
4330 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4332 tree field = TREE_OPERAND (r, 1);
4333 hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4334 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4335 hash_operand (DECL_SIZE (field), hstate, 0);
4336 r = TREE_OPERAND (r, 0);
4338 if (TREE_CODE (r) == BIT_FIELD_REF)
4340 hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4341 hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4342 r = TREE_OPERAND (r, 0);
4344 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4345 hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4347 else
4349 hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4350 hstate.add_poly_int (ref->offset);
4351 hstate.add_poly_int (ref->size);
4352 hstate.add_poly_int (ref->max_size);
4354 if (!lto_streaming_safe && tbaa)
4356 hstate.add_int (ao_ref_alias_set (ref));
4357 hstate.add_int (ao_ref_base_alias_set (ref));