Daily bump.
[official-gcc.git] / gcc / tree-ssa-alias.c
blob011298998b7eb1a239e9ed57df41aed50e625a4b
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
41 #include "ipa-modref-tree.h"
42 #include "ipa-modref.h"
43 #include "attr-fnspec.h"
44 #include "errors.h"
45 #include "dbgcnt.h"
46 #include "gimple-pretty-print.h"
47 #include "print-tree.h"
48 #include "tree-ssa-alias-compare.h"
49 #include "builtins.h"
51 /* Broad overview of how alias analysis on gimple works:
53 Statements clobbering or using memory are linked through the
54 virtual operand factored use-def chain. The virtual operand
55 is unique per function, its symbol is accessible via gimple_vop (cfun).
56 Virtual operands are used for efficiently walking memory statements
57 in the gimple IL and are useful for things like value-numbering as
58 a generation count for memory references.
60 SSA_NAME pointers may have associated points-to information
61 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
62 points-to information is (re-)computed by the TODO_rebuild_alias
63 pass manager todo. Points-to information is also used for more
64 precise tracking of call-clobbered and call-used variables and
65 related disambiguations.
67 This file contains functions for disambiguating memory references,
68 the so called alias-oracle and tools for walking of the gimple IL.
70 The main alias-oracle entry-points are
72 bool stmt_may_clobber_ref_p (gimple *, tree)
74 This function queries if a statement may invalidate (parts of)
75 the memory designated by the reference tree argument.
77 bool ref_maybe_used_by_stmt_p (gimple *, tree)
79 This function queries if a statement may need (parts of) the
80 memory designated by the reference tree argument.
82 There are variants of these functions that only handle the call
83 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
84 Note that these do not disambiguate against a possible call lhs.
86 bool refs_may_alias_p (tree, tree)
88 This function tries to disambiguate two reference trees.
90 bool ptr_deref_may_alias_global_p (tree)
92 This function queries if dereferencing a pointer variable may
93 alias global memory.
95 More low-level disambiguators are available and documented in
96 this file. Low-level disambiguators dealing with points-to
97 information are in tree-ssa-structalias.c. */
99 static int nonoverlapping_refs_since_match_p (tree, tree, tree, tree, bool);
100 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
102 /* Query statistics for the different low-level disambiguators.
103 A high-level query may trigger multiple of them. */
105 static struct {
106 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
107 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
109 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
111 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
112 unsigned HOST_WIDE_INT aliasing_component_refs_p_may_alias;
113 unsigned HOST_WIDE_INT aliasing_component_refs_p_no_alias;
114 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_may_alias;
115 unsigned HOST_WIDE_INT nonoverlapping_component_refs_p_no_alias;
116 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_may_alias;
117 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_must_overlap;
118 unsigned HOST_WIDE_INT nonoverlapping_refs_since_match_p_no_alias;
119 unsigned HOST_WIDE_INT stmt_kills_ref_p_no;
120 unsigned HOST_WIDE_INT stmt_kills_ref_p_yes;
121 unsigned HOST_WIDE_INT modref_use_may_alias;
122 unsigned HOST_WIDE_INT modref_use_no_alias;
123 unsigned HOST_WIDE_INT modref_clobber_may_alias;
124 unsigned HOST_WIDE_INT modref_clobber_no_alias;
125 unsigned HOST_WIDE_INT modref_kill_no;
126 unsigned HOST_WIDE_INT modref_kill_yes;
127 unsigned HOST_WIDE_INT modref_tests;
128 unsigned HOST_WIDE_INT modref_baseptr_tests;
129 } alias_stats;
131 void
132 dump_alias_stats (FILE *s)
134 fprintf (s, "\nAlias oracle query stats:\n");
135 fprintf (s, " refs_may_alias_p: "
136 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
137 HOST_WIDE_INT_PRINT_DEC" queries\n",
138 alias_stats.refs_may_alias_p_no_alias,
139 alias_stats.refs_may_alias_p_no_alias
140 + alias_stats.refs_may_alias_p_may_alias);
141 fprintf (s, " ref_maybe_used_by_call_p: "
142 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
143 HOST_WIDE_INT_PRINT_DEC" queries\n",
144 alias_stats.ref_maybe_used_by_call_p_no_alias,
145 alias_stats.refs_may_alias_p_no_alias
146 + alias_stats.ref_maybe_used_by_call_p_may_alias);
147 fprintf (s, " call_may_clobber_ref_p: "
148 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
149 HOST_WIDE_INT_PRINT_DEC" queries\n",
150 alias_stats.call_may_clobber_ref_p_no_alias,
151 alias_stats.call_may_clobber_ref_p_no_alias
152 + alias_stats.call_may_clobber_ref_p_may_alias);
153 fprintf (s, " stmt_kills_ref_p: "
154 HOST_WIDE_INT_PRINT_DEC" kills, "
155 HOST_WIDE_INT_PRINT_DEC" queries\n",
156 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes,
157 alias_stats.stmt_kills_ref_p_yes + alias_stats.modref_kill_yes
158 + alias_stats.stmt_kills_ref_p_no + alias_stats.modref_kill_no);
159 fprintf (s, " nonoverlapping_component_refs_p: "
160 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
161 HOST_WIDE_INT_PRINT_DEC" queries\n",
162 alias_stats.nonoverlapping_component_refs_p_no_alias,
163 alias_stats.nonoverlapping_component_refs_p_no_alias
164 + alias_stats.nonoverlapping_component_refs_p_may_alias);
165 fprintf (s, " nonoverlapping_refs_since_match_p: "
166 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
167 HOST_WIDE_INT_PRINT_DEC" must overlaps, "
168 HOST_WIDE_INT_PRINT_DEC" queries\n",
169 alias_stats.nonoverlapping_refs_since_match_p_no_alias,
170 alias_stats.nonoverlapping_refs_since_match_p_must_overlap,
171 alias_stats.nonoverlapping_refs_since_match_p_no_alias
172 + alias_stats.nonoverlapping_refs_since_match_p_may_alias
173 + alias_stats.nonoverlapping_refs_since_match_p_must_overlap);
174 fprintf (s, " aliasing_component_refs_p: "
175 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
176 HOST_WIDE_INT_PRINT_DEC" queries\n",
177 alias_stats.aliasing_component_refs_p_no_alias,
178 alias_stats.aliasing_component_refs_p_no_alias
179 + alias_stats.aliasing_component_refs_p_may_alias);
180 dump_alias_stats_in_alias_c (s);
181 fprintf (s, "\nModref stats:\n");
182 fprintf (s, " modref kill: "
183 HOST_WIDE_INT_PRINT_DEC" kills, "
184 HOST_WIDE_INT_PRINT_DEC" queries\n",
185 alias_stats.modref_kill_yes,
186 alias_stats.modref_kill_yes
187 + alias_stats.modref_kill_no);
188 fprintf (s, " modref use: "
189 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
190 HOST_WIDE_INT_PRINT_DEC" queries\n",
191 alias_stats.modref_use_no_alias,
192 alias_stats.modref_use_no_alias
193 + alias_stats.modref_use_may_alias);
194 fprintf (s, " modref clobber: "
195 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
196 HOST_WIDE_INT_PRINT_DEC" queries\n"
197 " " HOST_WIDE_INT_PRINT_DEC" tbaa queries (%f per modref query)\n"
198 " " HOST_WIDE_INT_PRINT_DEC" base compares (%f per modref query)\n",
199 alias_stats.modref_clobber_no_alias,
200 alias_stats.modref_clobber_no_alias
201 + alias_stats.modref_clobber_may_alias,
202 alias_stats.modref_tests,
203 ((double)alias_stats.modref_tests)
204 / (alias_stats.modref_clobber_no_alias
205 + alias_stats.modref_clobber_may_alias),
206 alias_stats.modref_baseptr_tests,
207 ((double)alias_stats.modref_baseptr_tests)
208 / (alias_stats.modref_clobber_no_alias
209 + alias_stats.modref_clobber_may_alias));
213 /* Return true, if dereferencing PTR may alias with a global variable. */
215 bool
216 ptr_deref_may_alias_global_p (tree ptr)
218 struct ptr_info_def *pi;
220 /* If we end up with a pointer constant here that may point
221 to global memory. */
222 if (TREE_CODE (ptr) != SSA_NAME)
223 return true;
225 pi = SSA_NAME_PTR_INFO (ptr);
227 /* If we do not have points-to information for this variable,
228 we have to punt. */
229 if (!pi)
230 return true;
232 /* ??? This does not use TBAA to prune globals ptr may not access. */
233 return pt_solution_includes_global (&pi->pt);
236 /* Return true if dereferencing PTR may alias DECL.
237 The caller is responsible for applying TBAA to see if PTR
238 may access DECL at all. */
240 static bool
241 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
243 struct ptr_info_def *pi;
245 /* Conversions are irrelevant for points-to information and
246 data-dependence analysis can feed us those. */
247 STRIP_NOPS (ptr);
249 /* Anything we do not explicilty handle aliases. */
250 if ((TREE_CODE (ptr) != SSA_NAME
251 && TREE_CODE (ptr) != ADDR_EXPR
252 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
253 || !POINTER_TYPE_P (TREE_TYPE (ptr))
254 || (!VAR_P (decl)
255 && TREE_CODE (decl) != PARM_DECL
256 && TREE_CODE (decl) != RESULT_DECL))
257 return true;
259 /* Disregard pointer offsetting. */
260 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
264 ptr = TREE_OPERAND (ptr, 0);
266 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
267 return ptr_deref_may_alias_decl_p (ptr, decl);
270 /* ADDR_EXPR pointers either just offset another pointer or directly
271 specify the pointed-to set. */
272 if (TREE_CODE (ptr) == ADDR_EXPR)
274 tree base = get_base_address (TREE_OPERAND (ptr, 0));
275 if (base
276 && (TREE_CODE (base) == MEM_REF
277 || TREE_CODE (base) == TARGET_MEM_REF))
278 ptr = TREE_OPERAND (base, 0);
279 else if (base
280 && DECL_P (base))
281 return compare_base_decls (base, decl) != 0;
282 else if (base
283 && CONSTANT_CLASS_P (base))
284 return false;
285 else
286 return true;
289 /* Non-aliased variables cannot be pointed to. */
290 if (!may_be_aliased (decl))
291 return false;
293 /* If we do not have useful points-to information for this pointer
294 we cannot disambiguate anything else. */
295 pi = SSA_NAME_PTR_INFO (ptr);
296 if (!pi)
297 return true;
299 return pt_solution_includes (&pi->pt, decl);
302 /* Return true if dereferenced PTR1 and PTR2 may alias.
303 The caller is responsible for applying TBAA to see if accesses
304 through PTR1 and PTR2 may conflict at all. */
306 bool
307 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
309 struct ptr_info_def *pi1, *pi2;
311 /* Conversions are irrelevant for points-to information and
312 data-dependence analysis can feed us those. */
313 STRIP_NOPS (ptr1);
314 STRIP_NOPS (ptr2);
316 /* Disregard pointer offsetting. */
317 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
321 ptr1 = TREE_OPERAND (ptr1, 0);
323 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
324 return ptr_derefs_may_alias_p (ptr1, ptr2);
326 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
330 ptr2 = TREE_OPERAND (ptr2, 0);
332 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
333 return ptr_derefs_may_alias_p (ptr1, ptr2);
336 /* ADDR_EXPR pointers either just offset another pointer or directly
337 specify the pointed-to set. */
338 if (TREE_CODE (ptr1) == ADDR_EXPR)
340 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
341 if (base
342 && (TREE_CODE (base) == MEM_REF
343 || TREE_CODE (base) == TARGET_MEM_REF))
344 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
345 else if (base
346 && DECL_P (base))
347 return ptr_deref_may_alias_decl_p (ptr2, base);
348 else
349 return true;
351 if (TREE_CODE (ptr2) == ADDR_EXPR)
353 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
354 if (base
355 && (TREE_CODE (base) == MEM_REF
356 || TREE_CODE (base) == TARGET_MEM_REF))
357 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
358 else if (base
359 && DECL_P (base))
360 return ptr_deref_may_alias_decl_p (ptr1, base);
361 else
362 return true;
365 /* From here we require SSA name pointers. Anything else aliases. */
366 if (TREE_CODE (ptr1) != SSA_NAME
367 || TREE_CODE (ptr2) != SSA_NAME
368 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
369 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
370 return true;
372 /* We may end up with two empty points-to solutions for two same pointers.
373 In this case we still want to say both pointers alias, so shortcut
374 that here. */
375 if (ptr1 == ptr2)
376 return true;
378 /* If we do not have useful points-to information for either pointer
379 we cannot disambiguate anything else. */
380 pi1 = SSA_NAME_PTR_INFO (ptr1);
381 pi2 = SSA_NAME_PTR_INFO (ptr2);
382 if (!pi1 || !pi2)
383 return true;
385 /* ??? This does not use TBAA to prune decls from the intersection
386 that not both pointers may access. */
387 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
390 /* Return true if dereferencing PTR may alias *REF.
391 The caller is responsible for applying TBAA to see if PTR
392 may access *REF at all. */
394 static bool
395 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
397 tree base = ao_ref_base (ref);
399 if (TREE_CODE (base) == MEM_REF
400 || TREE_CODE (base) == TARGET_MEM_REF)
401 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
402 else if (DECL_P (base))
403 return ptr_deref_may_alias_decl_p (ptr, base);
405 return true;
408 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
410 bool
411 ptrs_compare_unequal (tree ptr1, tree ptr2)
413 /* First resolve the pointers down to a SSA name pointer base or
414 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
415 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
416 or STRING_CSTs which needs points-to adjustments to track them
417 in the points-to sets. */
418 tree obj1 = NULL_TREE;
419 tree obj2 = NULL_TREE;
420 if (TREE_CODE (ptr1) == ADDR_EXPR)
422 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
423 if (! tem)
424 return false;
425 if (VAR_P (tem)
426 || TREE_CODE (tem) == PARM_DECL
427 || TREE_CODE (tem) == RESULT_DECL)
428 obj1 = tem;
429 else if (TREE_CODE (tem) == MEM_REF)
430 ptr1 = TREE_OPERAND (tem, 0);
432 if (TREE_CODE (ptr2) == ADDR_EXPR)
434 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
435 if (! tem)
436 return false;
437 if (VAR_P (tem)
438 || TREE_CODE (tem) == PARM_DECL
439 || TREE_CODE (tem) == RESULT_DECL)
440 obj2 = tem;
441 else if (TREE_CODE (tem) == MEM_REF)
442 ptr2 = TREE_OPERAND (tem, 0);
445 /* Canonicalize ptr vs. object. */
446 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
448 std::swap (ptr1, ptr2);
449 std::swap (obj1, obj2);
452 if (obj1 && obj2)
453 /* Other code handles this correctly, no need to duplicate it here. */;
454 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
456 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
457 /* We may not use restrict to optimize pointer comparisons.
458 See PR71062. So we have to assume that restrict-pointed-to
459 may be in fact obj1. */
460 if (!pi
461 || pi->pt.vars_contains_restrict
462 || pi->pt.vars_contains_interposable)
463 return false;
464 if (VAR_P (obj1)
465 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
467 varpool_node *node = varpool_node::get (obj1);
468 /* If obj1 may bind to NULL give up (see below). */
469 if (! node
470 || ! node->nonzero_address ()
471 || ! decl_binds_to_current_def_p (obj1))
472 return false;
474 return !pt_solution_includes (&pi->pt, obj1);
477 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
478 but those require pt.null to be conservatively correct. */
480 return false;
483 /* Returns whether reference REF to BASE may refer to global memory. */
485 static bool
486 ref_may_alias_global_p_1 (tree base)
488 if (DECL_P (base))
489 return is_global_var (base);
490 else if (TREE_CODE (base) == MEM_REF
491 || TREE_CODE (base) == TARGET_MEM_REF)
492 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
493 return true;
496 bool
497 ref_may_alias_global_p (ao_ref *ref)
499 tree base = ao_ref_base (ref);
500 return ref_may_alias_global_p_1 (base);
503 bool
504 ref_may_alias_global_p (tree ref)
506 tree base = get_base_address (ref);
507 return ref_may_alias_global_p_1 (base);
510 /* Return true whether STMT may clobber global memory. */
512 bool
513 stmt_may_clobber_global_p (gimple *stmt)
515 tree lhs;
517 if (!gimple_vdef (stmt))
518 return false;
520 /* ??? We can ask the oracle whether an artificial pointer
521 dereference with a pointer with points-to information covering
522 all global memory (what about non-address taken memory?) maybe
523 clobbered by this call. As there is at the moment no convenient
524 way of doing that without generating garbage do some manual
525 checking instead.
526 ??? We could make a NULL ao_ref argument to the various
527 predicates special, meaning any global memory. */
529 switch (gimple_code (stmt))
531 case GIMPLE_ASSIGN:
532 lhs = gimple_assign_lhs (stmt);
533 return (TREE_CODE (lhs) != SSA_NAME
534 && ref_may_alias_global_p (lhs));
535 case GIMPLE_CALL:
536 return true;
537 default:
538 return true;
543 /* Dump alias information on FILE. */
545 void
546 dump_alias_info (FILE *file)
548 unsigned i;
549 tree ptr;
550 const char *funcname
551 = lang_hooks.decl_printable_name (current_function_decl, 2);
552 tree var;
554 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
556 fprintf (file, "Aliased symbols\n\n");
558 FOR_EACH_LOCAL_DECL (cfun, i, var)
560 if (may_be_aliased (var))
561 dump_variable (file, var);
564 fprintf (file, "\nCall clobber information\n");
566 fprintf (file, "\nESCAPED");
567 dump_points_to_solution (file, &cfun->gimple_df->escaped);
569 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
571 FOR_EACH_SSA_NAME (i, ptr, cfun)
573 struct ptr_info_def *pi;
575 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
576 || SSA_NAME_IN_FREE_LIST (ptr))
577 continue;
579 pi = SSA_NAME_PTR_INFO (ptr);
580 if (pi)
581 dump_points_to_info_for (file, ptr);
584 fprintf (file, "\n");
588 /* Dump alias information on stderr. */
590 DEBUG_FUNCTION void
591 debug_alias_info (void)
593 dump_alias_info (stderr);
597 /* Dump the points-to set *PT into FILE. */
599 void
600 dump_points_to_solution (FILE *file, struct pt_solution *pt)
602 if (pt->anything)
603 fprintf (file, ", points-to anything");
605 if (pt->nonlocal)
606 fprintf (file, ", points-to non-local");
608 if (pt->escaped)
609 fprintf (file, ", points-to escaped");
611 if (pt->ipa_escaped)
612 fprintf (file, ", points-to unit escaped");
614 if (pt->null)
615 fprintf (file, ", points-to NULL");
617 if (pt->vars)
619 fprintf (file, ", points-to vars: ");
620 dump_decl_set (file, pt->vars);
621 if (pt->vars_contains_nonlocal
622 || pt->vars_contains_escaped
623 || pt->vars_contains_escaped_heap
624 || pt->vars_contains_restrict)
626 const char *comma = "";
627 fprintf (file, " (");
628 if (pt->vars_contains_nonlocal)
630 fprintf (file, "nonlocal");
631 comma = ", ";
633 if (pt->vars_contains_escaped)
635 fprintf (file, "%sescaped", comma);
636 comma = ", ";
638 if (pt->vars_contains_escaped_heap)
640 fprintf (file, "%sescaped heap", comma);
641 comma = ", ";
643 if (pt->vars_contains_restrict)
645 fprintf (file, "%srestrict", comma);
646 comma = ", ";
648 if (pt->vars_contains_interposable)
649 fprintf (file, "%sinterposable", comma);
650 fprintf (file, ")");
656 /* Unified dump function for pt_solution. */
658 DEBUG_FUNCTION void
659 debug (pt_solution &ref)
661 dump_points_to_solution (stderr, &ref);
664 DEBUG_FUNCTION void
665 debug (pt_solution *ptr)
667 if (ptr)
668 debug (*ptr);
669 else
670 fprintf (stderr, "<nil>\n");
674 /* Dump points-to information for SSA_NAME PTR into FILE. */
676 void
677 dump_points_to_info_for (FILE *file, tree ptr)
679 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
681 print_generic_expr (file, ptr, dump_flags);
683 if (pi)
684 dump_points_to_solution (file, &pi->pt);
685 else
686 fprintf (file, ", points-to anything");
688 fprintf (file, "\n");
692 /* Dump points-to information for VAR into stderr. */
694 DEBUG_FUNCTION void
695 debug_points_to_info_for (tree var)
697 dump_points_to_info_for (stderr, var);
701 /* Initializes the alias-oracle reference representation *R from REF. */
703 void
704 ao_ref_init (ao_ref *r, tree ref)
706 r->ref = ref;
707 r->base = NULL_TREE;
708 r->offset = 0;
709 r->size = -1;
710 r->max_size = -1;
711 r->ref_alias_set = -1;
712 r->base_alias_set = -1;
713 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
716 /* Returns the base object of the memory reference *REF. */
718 tree
719 ao_ref_base (ao_ref *ref)
721 bool reverse;
723 if (ref->base)
724 return ref->base;
725 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
726 &ref->max_size, &reverse);
727 return ref->base;
730 /* Returns the base object alias set of the memory reference *REF. */
732 alias_set_type
733 ao_ref_base_alias_set (ao_ref *ref)
735 tree base_ref;
736 if (ref->base_alias_set != -1)
737 return ref->base_alias_set;
738 if (!ref->ref)
739 return 0;
740 base_ref = ref->ref;
741 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
742 base_ref = TREE_OPERAND (base_ref, 0);
743 while (handled_component_p (base_ref))
744 base_ref = TREE_OPERAND (base_ref, 0);
745 ref->base_alias_set = get_alias_set (base_ref);
746 return ref->base_alias_set;
749 /* Returns the reference alias set of the memory reference *REF. */
751 alias_set_type
752 ao_ref_alias_set (ao_ref *ref)
754 if (ref->ref_alias_set != -1)
755 return ref->ref_alias_set;
756 if (!ref->ref)
757 return 0;
758 ref->ref_alias_set = get_alias_set (ref->ref);
759 return ref->ref_alias_set;
762 /* Returns a type satisfying
763 get_deref_alias_set (type) == ao_ref_base_alias_set (REF). */
765 tree
766 ao_ref_base_alias_ptr_type (ao_ref *ref)
768 tree base_ref;
770 if (!ref->ref)
771 return NULL_TREE;
772 base_ref = ref->ref;
773 if (TREE_CODE (base_ref) == WITH_SIZE_EXPR)
774 base_ref = TREE_OPERAND (base_ref, 0);
775 while (handled_component_p (base_ref))
776 base_ref = TREE_OPERAND (base_ref, 0);
777 tree ret = reference_alias_ptr_type (base_ref);
778 return ret;
781 /* Returns a type satisfying
782 get_deref_alias_set (type) == ao_ref_alias_set (REF). */
784 tree
785 ao_ref_alias_ptr_type (ao_ref *ref)
787 if (!ref->ref)
788 return NULL_TREE;
789 tree ret = reference_alias_ptr_type (ref->ref);
790 return ret;
794 /* Init an alias-oracle reference representation from a gimple pointer
795 PTR a range specified by OFFSET, SIZE and MAX_SIZE under the assumption
796 that RANGE_KNOWN is set.
798 The access is assumed to be only to or after of the pointer target adjusted
799 by the offset, not before it (even in the case RANGE_KNOWN is false). */
801 void
802 ao_ref_init_from_ptr_and_range (ao_ref *ref, tree ptr,
803 bool range_known,
804 poly_int64 offset,
805 poly_int64 size,
806 poly_int64 max_size)
808 poly_int64 t, extra_offset = 0;
810 ref->ref = NULL_TREE;
811 if (TREE_CODE (ptr) == SSA_NAME)
813 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
814 if (gimple_assign_single_p (stmt)
815 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
816 ptr = gimple_assign_rhs1 (stmt);
817 else if (is_gimple_assign (stmt)
818 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
819 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
821 ptr = gimple_assign_rhs1 (stmt);
822 extra_offset *= BITS_PER_UNIT;
826 if (TREE_CODE (ptr) == ADDR_EXPR)
828 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
829 if (ref->base)
830 ref->offset = BITS_PER_UNIT * t;
831 else
833 range_known = false;
834 ref->offset = 0;
835 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
838 else
840 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
841 ref->base = build2 (MEM_REF, char_type_node,
842 ptr, null_pointer_node);
843 ref->offset = 0;
845 ref->offset += extra_offset + offset;
846 if (range_known)
848 ref->max_size = max_size;
849 ref->size = size;
851 else
852 ref->max_size = ref->size = -1;
853 ref->ref_alias_set = 0;
854 ref->base_alias_set = 0;
855 ref->volatile_p = false;
858 /* Init an alias-oracle reference representation from a gimple pointer
859 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
860 size is assumed to be unknown. The access is assumed to be only
861 to or after of the pointer target, not before it. */
863 void
864 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
866 poly_int64 size_hwi;
867 if (size
868 && poly_int_tree_p (size, &size_hwi)
869 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
871 size_hwi = size_hwi * BITS_PER_UNIT;
872 ao_ref_init_from_ptr_and_range (ref, ptr, true, 0, size_hwi, size_hwi);
874 else
875 ao_ref_init_from_ptr_and_range (ref, ptr, false, 0, -1, -1);
878 /* S1 and S2 are TYPE_SIZE or DECL_SIZE. Compare them:
879 Return -1 if S1 < S2
880 Return 1 if S1 > S2
881 Return 0 if equal or incomparable. */
883 static int
884 compare_sizes (tree s1, tree s2)
886 if (!s1 || !s2)
887 return 0;
889 poly_uint64 size1;
890 poly_uint64 size2;
892 if (!poly_int_tree_p (s1, &size1) || !poly_int_tree_p (s2, &size2))
893 return 0;
894 if (known_lt (size1, size2))
895 return -1;
896 if (known_lt (size2, size1))
897 return 1;
898 return 0;
901 /* Compare TYPE1 and TYPE2 by its size.
902 Return -1 if size of TYPE1 < size of TYPE2
903 Return 1 if size of TYPE1 > size of TYPE2
904 Return 0 if types are of equal sizes or we can not compare them. */
906 static int
907 compare_type_sizes (tree type1, tree type2)
909 /* Be conservative for arrays and vectors. We want to support partial
910 overlap on int[3] and int[3] as tested in gcc.dg/torture/alias-2.c. */
911 while (TREE_CODE (type1) == ARRAY_TYPE
912 || TREE_CODE (type1) == VECTOR_TYPE)
913 type1 = TREE_TYPE (type1);
914 while (TREE_CODE (type2) == ARRAY_TYPE
915 || TREE_CODE (type2) == VECTOR_TYPE)
916 type2 = TREE_TYPE (type2);
917 return compare_sizes (TYPE_SIZE (type1), TYPE_SIZE (type2));
920 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
921 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
922 decide. */
924 static inline int
925 same_type_for_tbaa (tree type1, tree type2)
927 type1 = TYPE_MAIN_VARIANT (type1);
928 type2 = TYPE_MAIN_VARIANT (type2);
930 /* Handle the most common case first. */
931 if (type1 == type2)
932 return 1;
934 /* If we would have to do structural comparison bail out. */
935 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
936 || TYPE_STRUCTURAL_EQUALITY_P (type2))
937 return -1;
939 /* Compare the canonical types. */
940 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
941 return 1;
943 /* ??? Array types are not properly unified in all cases as we have
944 spurious changes in the index types for example. Removing this
945 causes all sorts of problems with the Fortran frontend. */
946 if (TREE_CODE (type1) == ARRAY_TYPE
947 && TREE_CODE (type2) == ARRAY_TYPE)
948 return -1;
950 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
951 object of one of its constrained subtypes, e.g. when a function with an
952 unconstrained parameter passed by reference is called on an object and
953 inlined. But, even in the case of a fixed size, type and subtypes are
954 not equivalent enough as to share the same TYPE_CANONICAL, since this
955 would mean that conversions between them are useless, whereas they are
956 not (e.g. type and subtypes can have different modes). So, in the end,
957 they are only guaranteed to have the same alias set. */
958 alias_set_type set1 = get_alias_set (type1);
959 alias_set_type set2 = get_alias_set (type2);
960 if (set1 == set2)
961 return -1;
963 /* Pointers to void are considered compatible with all other pointers,
964 so for two pointers see what the alias set resolution thinks. */
965 if (POINTER_TYPE_P (type1)
966 && POINTER_TYPE_P (type2)
967 && alias_sets_conflict_p (set1, set2))
968 return -1;
970 /* The types are known to be not equal. */
971 return 0;
974 /* Return true if TYPE is a composite type (i.e. we may apply one of handled
975 components on it). */
977 static bool
978 type_has_components_p (tree type)
980 return AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type)
981 || TREE_CODE (type) == COMPLEX_TYPE;
984 /* MATCH1 and MATCH2 which are part of access path of REF1 and REF2
985 respectively are either pointing to same address or are completely
986 disjoint. If PARTIAL_OVERLAP is true, assume that outermost arrays may
987 just partly overlap.
989 Try to disambiguate using the access path starting from the match
990 and return false if there is no conflict.
992 Helper for aliasing_component_refs_p. */
994 static bool
995 aliasing_matching_component_refs_p (tree match1, tree ref1,
996 poly_int64 offset1, poly_int64 max_size1,
997 tree match2, tree ref2,
998 poly_int64 offset2, poly_int64 max_size2,
999 bool partial_overlap)
1001 poly_int64 offadj, sztmp, msztmp;
1002 bool reverse;
1004 if (!partial_overlap)
1006 get_ref_base_and_extent (match2, &offadj, &sztmp, &msztmp, &reverse);
1007 offset2 -= offadj;
1008 get_ref_base_and_extent (match1, &offadj, &sztmp, &msztmp, &reverse);
1009 offset1 -= offadj;
1010 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1012 ++alias_stats.aliasing_component_refs_p_no_alias;
1013 return false;
1017 int cmp = nonoverlapping_refs_since_match_p (match1, ref1, match2, ref2,
1018 partial_overlap);
1019 if (cmp == 1
1020 || (cmp == -1 && nonoverlapping_component_refs_p (ref1, ref2)))
1022 ++alias_stats.aliasing_component_refs_p_no_alias;
1023 return false;
1025 ++alias_stats.aliasing_component_refs_p_may_alias;
1026 return true;
1029 /* Return true if REF is reference to zero sized trailing array. I.e.
1030 struct foo {int bar; int array[0];} *fooptr;
1031 fooptr->array. */
1033 static bool
1034 component_ref_to_zero_sized_trailing_array_p (tree ref)
1036 return (TREE_CODE (ref) == COMPONENT_REF
1037 && TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 1))) == ARRAY_TYPE
1038 && (!TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))
1039 || integer_zerop (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (ref, 1)))))
1040 && array_at_struct_end_p (ref));
1043 /* Worker for aliasing_component_refs_p. Most parameters match parameters of
1044 aliasing_component_refs_p.
1046 Walk access path REF2 and try to find type matching TYPE1
1047 (which is a start of possibly aliasing access path REF1).
1048 If match is found, try to disambiguate.
1050 Return 0 for sucessful disambiguation.
1051 Return 1 if match was found but disambiguation failed
1052 Return -1 if there is no match.
1053 In this case MAYBE_MATCH is set to 0 if there is no type matching TYPE1
1054 in access patch REF2 and -1 if we are not sure. */
1056 static int
1057 aliasing_component_refs_walk (tree ref1, tree type1, tree base1,
1058 poly_int64 offset1, poly_int64 max_size1,
1059 tree end_struct_ref1,
1060 tree ref2, tree base2,
1061 poly_int64 offset2, poly_int64 max_size2,
1062 bool *maybe_match)
1064 tree ref = ref2;
1065 int same_p = 0;
1067 while (true)
1069 /* We walk from inner type to the outer types. If type we see is
1070 already too large to be part of type1, terminate the search. */
1071 int cmp = compare_type_sizes (type1, TREE_TYPE (ref));
1073 if (cmp < 0
1074 && (!end_struct_ref1
1075 || compare_type_sizes (TREE_TYPE (end_struct_ref1),
1076 TREE_TYPE (ref)) < 0))
1077 break;
1078 /* If types may be of same size, see if we can decide about their
1079 equality. */
1080 if (cmp == 0)
1082 same_p = same_type_for_tbaa (TREE_TYPE (ref), type1);
1083 if (same_p == 1)
1084 break;
1085 /* In case we can't decide whether types are same try to
1086 continue looking for the exact match.
1087 Remember however that we possibly saw a match
1088 to bypass the access path continuations tests we do later. */
1089 if (same_p == -1)
1090 *maybe_match = true;
1092 if (!handled_component_p (ref))
1093 break;
1094 ref = TREE_OPERAND (ref, 0);
1096 if (same_p == 1)
1098 bool partial_overlap = false;
1100 /* We assume that arrays can overlap by multiple of their elements
1101 size as tested in gcc.dg/torture/alias-2.c.
1102 This partial overlap happen only when both arrays are bases of
1103 the access and not contained within another component ref.
1104 To be safe we also assume partial overlap for VLAs. */
1105 if (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
1106 && (!TYPE_SIZE (TREE_TYPE (base1))
1107 || TREE_CODE (TYPE_SIZE (TREE_TYPE (base1))) != INTEGER_CST
1108 || ref == base2))
1110 /* Setting maybe_match to true triggers
1111 nonoverlapping_component_refs_p test later that still may do
1112 useful disambiguation. */
1113 *maybe_match = true;
1114 partial_overlap = true;
1116 return aliasing_matching_component_refs_p (base1, ref1,
1117 offset1, max_size1,
1118 ref, ref2,
1119 offset2, max_size2,
1120 partial_overlap);
1122 return -1;
1125 /* Consider access path1 base1....ref1 and access path2 base2...ref2.
1126 Return true if they can be composed to single access path
1127 base1...ref1...base2...ref2.
1129 REF_TYPE1 if type of REF1. END_STRUCT_PAST_END1 is true if there is
1130 a trailing array access after REF1 in the non-TBAA part of the access.
1131 REF1_ALIAS_SET is the alias set of REF1.
1133 BASE_TYPE2 is type of base2. END_STRUCT_REF2 is non-NULL if there is
1134 a trailing array access in the TBAA part of access path2.
1135 BASE2_ALIAS_SET is the alias set of base2. */
1137 bool
1138 access_path_may_continue_p (tree ref_type1, bool end_struct_past_end1,
1139 alias_set_type ref1_alias_set,
1140 tree base_type2, tree end_struct_ref2,
1141 alias_set_type base2_alias_set)
1143 /* Access path can not continue past types with no components. */
1144 if (!type_has_components_p (ref_type1))
1145 return false;
1147 /* If first access path ends by too small type to hold base of
1148 the second access path, typically paths can not continue.
1150 Punt if end_struct_past_end1 is true. We want to support arbitrary
1151 type puning past first COMPONENT_REF to union because redundant store
1152 elimination depends on this, see PR92152. For this reason we can not
1153 check size of the reference because types may partially overlap. */
1154 if (!end_struct_past_end1)
1156 if (compare_type_sizes (ref_type1, base_type2) < 0)
1157 return false;
1158 /* If the path2 contains trailing array access we can strenghten the check
1159 to verify that also the size of element of the trailing array fits.
1160 In fact we could check for offset + type_size, but we do not track
1161 offsets and this is quite side case. */
1162 if (end_struct_ref2
1163 && compare_type_sizes (ref_type1, TREE_TYPE (end_struct_ref2)) < 0)
1164 return false;
1166 return (base2_alias_set == ref1_alias_set
1167 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
1170 /* Determine if the two component references REF1 and REF2 which are
1171 based on access types TYPE1 and TYPE2 and of which at least one is based
1172 on an indirect reference may alias.
1173 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
1174 are the respective alias sets. */
1176 static bool
1177 aliasing_component_refs_p (tree ref1,
1178 alias_set_type ref1_alias_set,
1179 alias_set_type base1_alias_set,
1180 poly_int64 offset1, poly_int64 max_size1,
1181 tree ref2,
1182 alias_set_type ref2_alias_set,
1183 alias_set_type base2_alias_set,
1184 poly_int64 offset2, poly_int64 max_size2)
1186 /* If one reference is a component references through pointers try to find a
1187 common base and apply offset based disambiguation. This handles
1188 for example
1189 struct A { int i; int j; } *q;
1190 struct B { struct A a; int k; } *p;
1191 disambiguating q->i and p->a.j. */
1192 tree base1, base2;
1193 tree type1, type2;
1194 bool maybe_match = false;
1195 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
1196 bool end_struct_past_end1 = false;
1197 bool end_struct_past_end2 = false;
1199 /* Choose bases and base types to search for.
1200 The access path is as follows:
1201 base....end_of_tbaa_ref...actual_ref
1202 At one place in the access path may be a reference to zero sized or
1203 trailing array.
1205 We generally discard the segment after end_of_tbaa_ref however
1206 we need to be careful in case it contains zero sized or trailing array.
1207 These may happen after reference to union and in this case we need to
1208 not disambiguate type puning scenarios.
1210 We set:
1211 base1 to point to base
1213 ref1 to point to end_of_tbaa_ref
1215 end_struct_ref1 to point the trailing reference (if it exists
1216 in range base....end_of_tbaa_ref
1218 end_struct_past_end1 is true if this trailing reference occurs in
1219 end_of_tbaa_ref...actual_ref. */
1220 base1 = ref1;
1221 while (handled_component_p (base1))
1223 /* Generally access paths are monotous in the size of object. The
1224 exception are trailing arrays of structures. I.e.
1225 struct a {int array[0];};
1227 struct a {int array1[0]; int array[];};
1228 Such struct has size 0 but accesses to a.array may have non-zero size.
1229 In this case the size of TREE_TYPE (base1) is smaller than
1230 size of TREE_TYPE (TREE_OPERAND (base1, 0)).
1232 Because we compare sizes of arrays just by sizes of their elements,
1233 we only need to care about zero sized array fields here. */
1234 if (component_ref_to_zero_sized_trailing_array_p (base1))
1236 gcc_checking_assert (!end_struct_ref1);
1237 end_struct_ref1 = base1;
1239 if (ends_tbaa_access_path_p (base1))
1241 ref1 = TREE_OPERAND (base1, 0);
1242 if (end_struct_ref1)
1244 end_struct_past_end1 = true;
1245 end_struct_ref1 = NULL;
1248 base1 = TREE_OPERAND (base1, 0);
1250 type1 = TREE_TYPE (base1);
1251 base2 = ref2;
1252 while (handled_component_p (base2))
1254 if (component_ref_to_zero_sized_trailing_array_p (base2))
1256 gcc_checking_assert (!end_struct_ref2);
1257 end_struct_ref2 = base2;
1259 if (ends_tbaa_access_path_p (base2))
1261 ref2 = TREE_OPERAND (base2, 0);
1262 if (end_struct_ref2)
1264 end_struct_past_end2 = true;
1265 end_struct_ref2 = NULL;
1268 base2 = TREE_OPERAND (base2, 0);
1270 type2 = TREE_TYPE (base2);
1272 /* Now search for the type1 in the access path of ref2. This
1273 would be a common base for doing offset based disambiguation on.
1274 This however only makes sense if type2 is big enough to hold type1. */
1275 int cmp_outer = compare_type_sizes (type2, type1);
1277 /* If type2 is big enough to contain type1 walk its access path.
1278 We also need to care of arrays at the end of structs that may extend
1279 beyond the end of structure. If this occurs in the TBAA part of the
1280 access path, we need to consider the increased type as well. */
1281 if (cmp_outer >= 0
1282 || (end_struct_ref2
1283 && compare_type_sizes (TREE_TYPE (end_struct_ref2), type1) >= 0))
1285 int res = aliasing_component_refs_walk (ref1, type1, base1,
1286 offset1, max_size1,
1287 end_struct_ref1,
1288 ref2, base2, offset2, max_size2,
1289 &maybe_match);
1290 if (res != -1)
1291 return res;
1294 /* If we didn't find a common base, try the other way around. */
1295 if (cmp_outer <= 0
1296 || (end_struct_ref1
1297 && compare_type_sizes (TREE_TYPE (end_struct_ref1), type1) <= 0))
1299 int res = aliasing_component_refs_walk (ref2, type2, base2,
1300 offset2, max_size2,
1301 end_struct_ref2,
1302 ref1, base1, offset1, max_size1,
1303 &maybe_match);
1304 if (res != -1)
1305 return res;
1308 /* In the following code we make an assumption that the types in access
1309 paths do not overlap and thus accesses alias only if one path can be
1310 continuation of another. If we was not able to decide about equivalence,
1311 we need to give up. */
1312 if (maybe_match)
1314 if (!nonoverlapping_component_refs_p (ref1, ref2))
1316 ++alias_stats.aliasing_component_refs_p_may_alias;
1317 return true;
1319 ++alias_stats.aliasing_component_refs_p_no_alias;
1320 return false;
1323 if (access_path_may_continue_p (TREE_TYPE (ref1), end_struct_past_end1,
1324 ref1_alias_set,
1325 type2, end_struct_ref2,
1326 base2_alias_set)
1327 || access_path_may_continue_p (TREE_TYPE (ref2), end_struct_past_end2,
1328 ref2_alias_set,
1329 type1, end_struct_ref1,
1330 base1_alias_set))
1332 ++alias_stats.aliasing_component_refs_p_may_alias;
1333 return true;
1335 ++alias_stats.aliasing_component_refs_p_no_alias;
1336 return false;
1339 /* FIELD1 and FIELD2 are two fields of component refs. We assume
1340 that bases of both component refs are either equivalent or nonoverlapping.
1341 We do not assume that the containers of FIELD1 and FIELD2 are of the
1342 same type or size.
1344 Return 0 in case the base address of component_refs are same then
1345 FIELD1 and FIELD2 have same address. Note that FIELD1 and FIELD2
1346 may not be of same type or size.
1348 Return 1 if FIELD1 and FIELD2 are non-overlapping.
1350 Return -1 otherwise.
1352 Main difference between 0 and -1 is to let
1353 nonoverlapping_component_refs_since_match_p discover the semantically
1354 equivalent part of the access path.
1356 Note that this function is used even with -fno-strict-aliasing
1357 and makes use of no TBAA assumptions. */
1359 static int
1360 nonoverlapping_component_refs_p_1 (const_tree field1, const_tree field2)
1362 /* If both fields are of the same type, we could save hard work of
1363 comparing offsets. */
1364 tree type1 = DECL_CONTEXT (field1);
1365 tree type2 = DECL_CONTEXT (field2);
1367 if (TREE_CODE (type1) == RECORD_TYPE
1368 && DECL_BIT_FIELD_REPRESENTATIVE (field1))
1369 field1 = DECL_BIT_FIELD_REPRESENTATIVE (field1);
1370 if (TREE_CODE (type2) == RECORD_TYPE
1371 && DECL_BIT_FIELD_REPRESENTATIVE (field2))
1372 field2 = DECL_BIT_FIELD_REPRESENTATIVE (field2);
1374 /* ??? Bitfields can overlap at RTL level so punt on them.
1375 FIXME: RTL expansion should be fixed by adjusting the access path
1376 when producing MEM_ATTRs for MEMs which are wider than
1377 the bitfields similarly as done in set_mem_attrs_minus_bitpos. */
1378 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
1379 return -1;
1381 /* Assume that different FIELD_DECLs never overlap within a RECORD_TYPE. */
1382 if (type1 == type2 && TREE_CODE (type1) == RECORD_TYPE)
1383 return field1 != field2;
1385 /* In common case the offsets and bit offsets will be the same.
1386 However if frontends do not agree on the alignment, they may be
1387 different even if they actually represent same address.
1388 Try the common case first and if that fails calcualte the
1389 actual bit offset. */
1390 if (tree_int_cst_equal (DECL_FIELD_OFFSET (field1),
1391 DECL_FIELD_OFFSET (field2))
1392 && tree_int_cst_equal (DECL_FIELD_BIT_OFFSET (field1),
1393 DECL_FIELD_BIT_OFFSET (field2)))
1394 return 0;
1396 /* Note that it may be possible to use component_ref_field_offset
1397 which would provide offsets as trees. However constructing and folding
1398 trees is expensive and does not seem to be worth the compile time
1399 cost. */
1401 poly_uint64 offset1, offset2;
1402 poly_uint64 bit_offset1, bit_offset2;
1404 if (poly_int_tree_p (DECL_FIELD_OFFSET (field1), &offset1)
1405 && poly_int_tree_p (DECL_FIELD_OFFSET (field2), &offset2)
1406 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field1), &bit_offset1)
1407 && poly_int_tree_p (DECL_FIELD_BIT_OFFSET (field2), &bit_offset2))
1409 offset1 = (offset1 << LOG2_BITS_PER_UNIT) + bit_offset1;
1410 offset2 = (offset2 << LOG2_BITS_PER_UNIT) + bit_offset2;
1412 if (known_eq (offset1, offset2))
1413 return 0;
1415 poly_uint64 size1, size2;
1417 if (poly_int_tree_p (DECL_SIZE (field1), &size1)
1418 && poly_int_tree_p (DECL_SIZE (field2), &size2)
1419 && !ranges_maybe_overlap_p (offset1, size1, offset2, size2))
1420 return 1;
1422 /* Resort to slower overlap checking by looking for matching types in
1423 the middle of access path. */
1424 return -1;
1427 /* Return low bound of array. Do not produce new trees
1428 and thus do not care about particular type of integer constant
1429 and placeholder exprs. */
1431 static tree
1432 cheap_array_ref_low_bound (tree ref)
1434 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
1436 /* Avoid expensive array_ref_low_bound.
1437 low bound is either stored in operand2, or it is TYPE_MIN_VALUE of domain
1438 type or it is zero. */
1439 if (TREE_OPERAND (ref, 2))
1440 return TREE_OPERAND (ref, 2);
1441 else if (domain_type && TYPE_MIN_VALUE (domain_type))
1442 return TYPE_MIN_VALUE (domain_type);
1443 else
1444 return integer_zero_node;
1447 /* REF1 and REF2 are ARRAY_REFs with either same base address or which are
1448 completely disjoint.
1450 Return 1 if the refs are non-overlapping.
1451 Return 0 if they are possibly overlapping but if so the overlap again
1452 starts on the same address.
1453 Return -1 otherwise. */
1456 nonoverlapping_array_refs_p (tree ref1, tree ref2)
1458 tree index1 = TREE_OPERAND (ref1, 1);
1459 tree index2 = TREE_OPERAND (ref2, 1);
1460 tree low_bound1 = cheap_array_ref_low_bound (ref1);
1461 tree low_bound2 = cheap_array_ref_low_bound (ref2);
1463 /* Handle zero offsets first: we do not need to match type size in this
1464 case. */
1465 if (operand_equal_p (index1, low_bound1, 0)
1466 && operand_equal_p (index2, low_bound2, 0))
1467 return 0;
1469 /* If type sizes are different, give up.
1471 Avoid expensive array_ref_element_size.
1472 If operand 3 is present it denotes size in the alignmnet units.
1473 Otherwise size is TYPE_SIZE of the element type.
1474 Handle only common cases where types are of the same "kind". */
1475 if ((TREE_OPERAND (ref1, 3) == NULL) != (TREE_OPERAND (ref2, 3) == NULL))
1476 return -1;
1478 tree elmt_type1 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref1, 0)));
1479 tree elmt_type2 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref2, 0)));
1481 if (TREE_OPERAND (ref1, 3))
1483 if (TYPE_ALIGN (elmt_type1) != TYPE_ALIGN (elmt_type2)
1484 || !operand_equal_p (TREE_OPERAND (ref1, 3),
1485 TREE_OPERAND (ref2, 3), 0))
1486 return -1;
1488 else
1490 if (!operand_equal_p (TYPE_SIZE_UNIT (elmt_type1),
1491 TYPE_SIZE_UNIT (elmt_type2), 0))
1492 return -1;
1495 /* Since we know that type sizes are the same, there is no need to return
1496 -1 after this point. Partial overlap can not be introduced. */
1498 /* We may need to fold trees in this case.
1499 TODO: Handle integer constant case at least. */
1500 if (!operand_equal_p (low_bound1, low_bound2, 0))
1501 return 0;
1503 if (TREE_CODE (index1) == INTEGER_CST && TREE_CODE (index2) == INTEGER_CST)
1505 if (tree_int_cst_equal (index1, index2))
1506 return 0;
1507 return 1;
1509 /* TODO: We can use VRP to further disambiguate here. */
1510 return 0;
1513 /* Try to disambiguate REF1 and REF2 under the assumption that MATCH1 and
1514 MATCH2 either point to the same address or are disjoint.
1515 MATCH1 and MATCH2 are assumed to be ref in the access path of REF1 and REF2
1516 respectively or NULL in the case we established equivalence of bases.
1517 If PARTIAL_OVERLAP is true assume that the toplevel arrays may actually
1518 overlap by exact multiply of their element size.
1520 This test works by matching the initial segment of the access path
1521 and does not rely on TBAA thus is safe for !flag_strict_aliasing if
1522 match was determined without use of TBAA oracle.
1524 Return 1 if we can determine that component references REF1 and REF2,
1525 that are within a common DECL, cannot overlap.
1527 Return 0 if paths are same and thus there is nothing to disambiguate more
1528 (i.e. there is must alias assuming there is must alias between MATCH1 and
1529 MATCH2)
1531 Return -1 if we can not determine 0 or 1 - this happens when we met
1532 non-matching types was met in the path.
1533 In this case it may make sense to continue by other disambiguation
1534 oracles. */
1536 static int
1537 nonoverlapping_refs_since_match_p (tree match1, tree ref1,
1538 tree match2, tree ref2,
1539 bool partial_overlap)
1541 int ntbaa1 = 0, ntbaa2 = 0;
1542 /* Early return if there are no references to match, we do not need
1543 to walk the access paths.
1545 Do not consider this as may-alias for stats - it is more useful
1546 to have information how many disambiguations happened provided that
1547 the query was meaningful. */
1549 if (match1 == ref1 || !handled_component_p (ref1)
1550 || match2 == ref2 || !handled_component_p (ref2))
1551 return -1;
1553 auto_vec<tree, 16> component_refs1;
1554 auto_vec<tree, 16> component_refs2;
1556 /* Create the stack of handled components for REF1. */
1557 while (handled_component_p (ref1) && ref1 != match1)
1559 /* We use TBAA only to re-synchronize after mismatched refs. So we
1560 do not need to truncate access path after TBAA part ends. */
1561 if (ends_tbaa_access_path_p (ref1))
1562 ntbaa1 = 0;
1563 else
1564 ntbaa1++;
1565 component_refs1.safe_push (ref1);
1566 ref1 = TREE_OPERAND (ref1, 0);
1569 /* Create the stack of handled components for REF2. */
1570 while (handled_component_p (ref2) && ref2 != match2)
1572 if (ends_tbaa_access_path_p (ref2))
1573 ntbaa2 = 0;
1574 else
1575 ntbaa2++;
1576 component_refs2.safe_push (ref2);
1577 ref2 = TREE_OPERAND (ref2, 0);
1580 if (!flag_strict_aliasing)
1582 ntbaa1 = 0;
1583 ntbaa2 = 0;
1586 bool mem_ref1 = TREE_CODE (ref1) == MEM_REF && ref1 != match1;
1587 bool mem_ref2 = TREE_CODE (ref2) == MEM_REF && ref2 != match2;
1589 /* If only one of access path starts with MEM_REF check that offset is 0
1590 so the addresses stays the same after stripping it.
1591 TODO: In this case we may walk the other access path until we get same
1592 offset.
1594 If both starts with MEM_REF, offset has to be same. */
1595 if ((mem_ref1 && !mem_ref2 && !integer_zerop (TREE_OPERAND (ref1, 1)))
1596 || (mem_ref2 && !mem_ref1 && !integer_zerop (TREE_OPERAND (ref2, 1)))
1597 || (mem_ref1 && mem_ref2
1598 && !tree_int_cst_equal (TREE_OPERAND (ref1, 1),
1599 TREE_OPERAND (ref2, 1))))
1601 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1602 return -1;
1605 /* TARGET_MEM_REF are never wrapped in handled components, so we do not need
1606 to handle them here at all. */
1607 gcc_checking_assert (TREE_CODE (ref1) != TARGET_MEM_REF
1608 && TREE_CODE (ref2) != TARGET_MEM_REF);
1610 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
1611 rank. This is sufficient because we start from the same DECL and you
1612 cannot reference several fields at a time with COMPONENT_REFs (unlike
1613 with ARRAY_RANGE_REFs for arrays) so you always need the same number
1614 of them to access a sub-component, unless you're in a union, in which
1615 case the return value will precisely be false. */
1616 while (true)
1618 /* Track if we seen unmatched ref with non-zero offset. In this case
1619 we must look for partial overlaps. */
1620 bool seen_unmatched_ref_p = false;
1622 /* First match ARRAY_REFs an try to disambiguate. */
1623 if (!component_refs1.is_empty ()
1624 && !component_refs2.is_empty ())
1626 unsigned int narray_refs1=0, narray_refs2=0;
1628 /* We generally assume that both access paths starts by same sequence
1629 of refs. However if number of array refs is not in sync, try
1630 to recover and pop elts until number match. This helps the case
1631 where one access path starts by array and other by element. */
1632 for (narray_refs1 = 0; narray_refs1 < component_refs1.length ();
1633 narray_refs1++)
1634 if (TREE_CODE (component_refs1 [component_refs1.length()
1635 - 1 - narray_refs1]) != ARRAY_REF)
1636 break;
1638 for (narray_refs2 = 0; narray_refs2 < component_refs2.length ();
1639 narray_refs2++)
1640 if (TREE_CODE (component_refs2 [component_refs2.length()
1641 - 1 - narray_refs2]) != ARRAY_REF)
1642 break;
1643 for (; narray_refs1 > narray_refs2; narray_refs1--)
1645 ref1 = component_refs1.pop ();
1646 ntbaa1--;
1648 /* If index is non-zero we need to check whether the reference
1649 does not break the main invariant that bases are either
1650 disjoint or equal. Consider the example:
1652 unsigned char out[][1];
1653 out[1]="a";
1654 out[i][0];
1656 Here bases out and out are same, but after removing the
1657 [i] index, this invariant no longer holds, because
1658 out[i] points to the middle of array out.
1660 TODO: If size of type of the skipped reference is an integer
1661 multiply of the size of type of the other reference this
1662 invariant can be verified, but even then it is not completely
1663 safe with !flag_strict_aliasing if the other reference contains
1664 unbounded array accesses.
1665 See */
1667 if (!operand_equal_p (TREE_OPERAND (ref1, 1),
1668 cheap_array_ref_low_bound (ref1), 0))
1669 return 0;
1671 for (; narray_refs2 > narray_refs1; narray_refs2--)
1673 ref2 = component_refs2.pop ();
1674 ntbaa2--;
1675 if (!operand_equal_p (TREE_OPERAND (ref2, 1),
1676 cheap_array_ref_low_bound (ref2), 0))
1677 return 0;
1679 /* Try to disambiguate matched arrays. */
1680 for (unsigned int i = 0; i < narray_refs1; i++)
1682 int cmp = nonoverlapping_array_refs_p (component_refs1.pop (),
1683 component_refs2.pop ());
1684 ntbaa1--;
1685 ntbaa2--;
1686 if (cmp == 1 && !partial_overlap)
1688 ++alias_stats
1689 .nonoverlapping_refs_since_match_p_no_alias;
1690 return 1;
1692 if (cmp == -1)
1694 seen_unmatched_ref_p = true;
1695 /* We can not maintain the invariant that bases are either
1696 same or completely disjoint. However we can still recover
1697 from type based alias analysis if we reach references to
1698 same sizes. We do not attempt to match array sizes, so
1699 just finish array walking and look for component refs. */
1700 if (ntbaa1 < 0 || ntbaa2 < 0)
1702 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1703 return -1;
1705 for (i++; i < narray_refs1; i++)
1707 component_refs1.pop ();
1708 component_refs2.pop ();
1709 ntbaa1--;
1710 ntbaa2--;
1712 break;
1714 partial_overlap = false;
1718 /* Next look for component_refs. */
1721 if (component_refs1.is_empty ())
1723 ++alias_stats
1724 .nonoverlapping_refs_since_match_p_must_overlap;
1725 return 0;
1727 ref1 = component_refs1.pop ();
1728 ntbaa1--;
1729 if (TREE_CODE (ref1) != COMPONENT_REF)
1731 seen_unmatched_ref_p = true;
1732 if (ntbaa1 < 0 || ntbaa2 < 0)
1734 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1735 return -1;
1739 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
1743 if (component_refs2.is_empty ())
1745 ++alias_stats
1746 .nonoverlapping_refs_since_match_p_must_overlap;
1747 return 0;
1749 ref2 = component_refs2.pop ();
1750 ntbaa2--;
1751 if (TREE_CODE (ref2) != COMPONENT_REF)
1753 if (ntbaa1 < 0 || ntbaa2 < 0)
1755 ++alias_stats.nonoverlapping_refs_since_match_p_may_alias;
1756 return -1;
1758 seen_unmatched_ref_p = true;
1761 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
1763 /* BIT_FIELD_REF and VIEW_CONVERT_EXPR are taken off the vectors
1764 earlier. */
1765 gcc_checking_assert (TREE_CODE (ref1) == COMPONENT_REF
1766 && TREE_CODE (ref2) == COMPONENT_REF);
1768 tree field1 = TREE_OPERAND (ref1, 1);
1769 tree field2 = TREE_OPERAND (ref2, 1);
1771 /* ??? We cannot simply use the type of operand #0 of the refs here
1772 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1773 for common blocks instead of using unions like everyone else. */
1774 tree type1 = DECL_CONTEXT (field1);
1775 tree type2 = DECL_CONTEXT (field2);
1777 partial_overlap = false;
1779 /* If we skipped array refs on type of different sizes, we can
1780 no longer be sure that there are not partial overlaps. */
1781 if (seen_unmatched_ref_p && ntbaa1 >= 0 && ntbaa2 >= 0
1782 && !operand_equal_p (TYPE_SIZE (type1), TYPE_SIZE (type2), 0))
1784 ++alias_stats
1785 .nonoverlapping_refs_since_match_p_may_alias;
1786 return -1;
1789 int cmp = nonoverlapping_component_refs_p_1 (field1, field2);
1790 if (cmp == -1)
1792 ++alias_stats
1793 .nonoverlapping_refs_since_match_p_may_alias;
1794 return -1;
1796 else if (cmp == 1)
1798 ++alias_stats
1799 .nonoverlapping_refs_since_match_p_no_alias;
1800 return 1;
1805 /* Return TYPE_UID which can be used to match record types we consider
1806 same for TBAA purposes. */
1808 static inline int
1809 ncr_type_uid (const_tree field)
1811 /* ??? We cannot simply use the type of operand #0 of the refs here
1812 as the Fortran compiler smuggles type punning into COMPONENT_REFs
1813 for common blocks instead of using unions like everyone else. */
1814 tree type = DECL_FIELD_CONTEXT (field);
1815 /* With LTO types considered same_type_for_tbaa_p
1816 from different translation unit may not have same
1817 main variant. They however have same TYPE_CANONICAL. */
1818 if (TYPE_CANONICAL (type))
1819 return TYPE_UID (TYPE_CANONICAL (type));
1820 return TYPE_UID (type);
1823 /* qsort compare function to sort FIELD_DECLs after their
1824 DECL_FIELD_CONTEXT TYPE_UID. */
1826 static inline int
1827 ncr_compar (const void *field1_, const void *field2_)
1829 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
1830 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
1831 unsigned int uid1 = ncr_type_uid (field1);
1832 unsigned int uid2 = ncr_type_uid (field2);
1834 if (uid1 < uid2)
1835 return -1;
1836 else if (uid1 > uid2)
1837 return 1;
1838 return 0;
1841 /* Return true if we can determine that the fields referenced cannot
1842 overlap for any pair of objects. This relies on TBAA. */
1844 static bool
1845 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1847 /* Early return if we have nothing to do.
1849 Do not consider this as may-alias for stats - it is more useful
1850 to have information how many disambiguations happened provided that
1851 the query was meaningful. */
1852 if (!flag_strict_aliasing
1853 || !x || !y
1854 || !handled_component_p (x)
1855 || !handled_component_p (y))
1856 return false;
1858 auto_vec<const_tree, 16> fieldsx;
1859 while (handled_component_p (x))
1861 if (TREE_CODE (x) == COMPONENT_REF)
1863 tree field = TREE_OPERAND (x, 1);
1864 tree type = DECL_FIELD_CONTEXT (field);
1865 if (TREE_CODE (type) == RECORD_TYPE)
1866 fieldsx.safe_push (field);
1868 else if (ends_tbaa_access_path_p (x))
1869 fieldsx.truncate (0);
1870 x = TREE_OPERAND (x, 0);
1872 if (fieldsx.length () == 0)
1873 return false;
1874 auto_vec<const_tree, 16> fieldsy;
1875 while (handled_component_p (y))
1877 if (TREE_CODE (y) == COMPONENT_REF)
1879 tree field = TREE_OPERAND (y, 1);
1880 tree type = DECL_FIELD_CONTEXT (field);
1881 if (TREE_CODE (type) == RECORD_TYPE)
1882 fieldsy.safe_push (TREE_OPERAND (y, 1));
1884 else if (ends_tbaa_access_path_p (y))
1885 fieldsy.truncate (0);
1886 y = TREE_OPERAND (y, 0);
1888 if (fieldsy.length () == 0)
1890 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1891 return false;
1894 /* Most common case first. */
1895 if (fieldsx.length () == 1
1896 && fieldsy.length () == 1)
1898 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldsx[0]),
1899 DECL_FIELD_CONTEXT (fieldsy[0])) == 1
1900 && nonoverlapping_component_refs_p_1 (fieldsx[0], fieldsy[0]) == 1)
1902 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1903 return true;
1905 else
1907 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1908 return false;
1912 if (fieldsx.length () == 2)
1914 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1915 std::swap (fieldsx[0], fieldsx[1]);
1917 else
1918 fieldsx.qsort (ncr_compar);
1920 if (fieldsy.length () == 2)
1922 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1923 std::swap (fieldsy[0], fieldsy[1]);
1925 else
1926 fieldsy.qsort (ncr_compar);
1928 unsigned i = 0, j = 0;
1931 const_tree fieldx = fieldsx[i];
1932 const_tree fieldy = fieldsy[j];
1934 /* We're left with accessing different fields of a structure,
1935 no possible overlap. */
1936 if (same_type_for_tbaa (DECL_FIELD_CONTEXT (fieldx),
1937 DECL_FIELD_CONTEXT (fieldy)) == 1
1938 && nonoverlapping_component_refs_p_1 (fieldx, fieldy) == 1)
1940 ++alias_stats.nonoverlapping_component_refs_p_no_alias;
1941 return true;
1944 if (ncr_type_uid (fieldx) < ncr_type_uid (fieldy))
1946 i++;
1947 if (i == fieldsx.length ())
1948 break;
1950 else
1952 j++;
1953 if (j == fieldsy.length ())
1954 break;
1957 while (1);
1959 ++alias_stats.nonoverlapping_component_refs_p_may_alias;
1960 return false;
1964 /* Return true if two memory references based on the variables BASE1
1965 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1966 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1967 if non-NULL are the complete memory reference trees. */
1969 static bool
1970 decl_refs_may_alias_p (tree ref1, tree base1,
1971 poly_int64 offset1, poly_int64 max_size1,
1972 poly_int64 size1,
1973 tree ref2, tree base2,
1974 poly_int64 offset2, poly_int64 max_size2,
1975 poly_int64 size2)
1977 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1979 /* If both references are based on different variables, they cannot alias. */
1980 if (compare_base_decls (base1, base2) == 0)
1981 return false;
1983 /* If both references are based on the same variable, they cannot alias if
1984 the accesses do not overlap. */
1985 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1986 return false;
1988 /* If there is must alias, there is no use disambiguating further. */
1989 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
1990 return true;
1992 /* For components with variable position, the above test isn't sufficient,
1993 so we disambiguate component references manually. */
1994 if (ref1 && ref2
1995 && handled_component_p (ref1) && handled_component_p (ref2)
1996 && nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2, false) == 1)
1997 return false;
1999 return true;
2002 /* Return true if access with BASE is view converted.
2003 Base must not be stripped from inner MEM_REF (&decl)
2004 which is done by ao_ref_base and thus one extra walk
2005 of handled components is needed. */
2007 static bool
2008 view_converted_memref_p (tree base)
2010 if (TREE_CODE (base) != MEM_REF && TREE_CODE (base) != TARGET_MEM_REF)
2011 return false;
2012 return same_type_for_tbaa (TREE_TYPE (base),
2013 TREE_TYPE (TREE_OPERAND (base, 1))) != 1;
2016 /* Return true if an indirect reference based on *PTR1 constrained
2017 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
2018 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
2019 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2020 in which case they are computed on-demand. REF1 and REF2
2021 if non-NULL are the complete memory reference trees. */
2023 static bool
2024 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2025 poly_int64 offset1, poly_int64 max_size1,
2026 poly_int64 size1,
2027 alias_set_type ref1_alias_set,
2028 alias_set_type base1_alias_set,
2029 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2030 poly_int64 offset2, poly_int64 max_size2,
2031 poly_int64 size2,
2032 alias_set_type ref2_alias_set,
2033 alias_set_type base2_alias_set, bool tbaa_p)
2035 tree ptr1;
2036 tree ptrtype1, dbase2;
2038 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2039 || TREE_CODE (base1) == TARGET_MEM_REF)
2040 && DECL_P (base2));
2042 ptr1 = TREE_OPERAND (base1, 0);
2043 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2045 /* If only one reference is based on a variable, they cannot alias if
2046 the pointer access is beyond the extent of the variable access.
2047 (the pointer base cannot validly point to an offset less than zero
2048 of the variable).
2049 ??? IVOPTs creates bases that do not honor this restriction,
2050 so do not apply this optimization for TARGET_MEM_REFs. */
2051 if (TREE_CODE (base1) != TARGET_MEM_REF
2052 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
2053 return false;
2055 /* If the pointer based access is bigger than the variable they cannot
2056 alias. This is similar to the check below where we use TBAA to
2057 increase the size of the pointer based access based on the dynamic
2058 type of a containing object we can infer from it. */
2059 poly_int64 dsize2;
2060 if (known_size_p (size1)
2061 && poly_int_tree_p (DECL_SIZE (base2), &dsize2)
2062 && known_lt (dsize2, size1))
2063 return false;
2065 /* They also cannot alias if the pointer may not point to the decl. */
2066 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
2067 return false;
2069 /* Disambiguations that rely on strict aliasing rules follow. */
2070 if (!flag_strict_aliasing || !tbaa_p)
2071 return true;
2073 /* If the alias set for a pointer access is zero all bets are off. */
2074 if (base1_alias_set == 0 || base2_alias_set == 0)
2075 return true;
2077 /* When we are trying to disambiguate an access with a pointer dereference
2078 as base versus one with a decl as base we can use both the size
2079 of the decl and its dynamic type for extra disambiguation.
2080 ??? We do not know anything about the dynamic type of the decl
2081 other than that its alias-set contains base2_alias_set as a subset
2082 which does not help us here. */
2083 /* As we know nothing useful about the dynamic type of the decl just
2084 use the usual conflict check rather than a subset test.
2085 ??? We could introduce -fvery-strict-aliasing when the language
2086 does not allow decls to have a dynamic type that differs from their
2087 static type. Then we can check
2088 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
2089 if (base1_alias_set != base2_alias_set
2090 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2091 return false;
2093 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2095 /* If the size of the access relevant for TBAA through the pointer
2096 is bigger than the size of the decl we can't possibly access the
2097 decl via that pointer. */
2098 if (/* ??? This in turn may run afoul when a decl of type T which is
2099 a member of union type U is accessed through a pointer to
2100 type U and sizeof T is smaller than sizeof U. */
2101 TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
2102 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
2103 && compare_sizes (DECL_SIZE (base2),
2104 TYPE_SIZE (TREE_TYPE (ptrtype1))) < 0)
2105 return false;
2107 if (!ref2)
2108 return true;
2110 /* If the decl is accessed via a MEM_REF, reconstruct the base
2111 we can use for TBAA and an appropriately adjusted offset. */
2112 dbase2 = ref2;
2113 while (handled_component_p (dbase2))
2114 dbase2 = TREE_OPERAND (dbase2, 0);
2115 poly_int64 doffset1 = offset1;
2116 poly_offset_int doffset2 = offset2;
2117 if (TREE_CODE (dbase2) == MEM_REF
2118 || TREE_CODE (dbase2) == TARGET_MEM_REF)
2120 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
2121 tree ptrtype2 = TREE_TYPE (TREE_OPERAND (dbase2, 1));
2122 /* If second reference is view-converted, give up now. */
2123 if (same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (ptrtype2)) != 1)
2124 return true;
2127 /* If first reference is view-converted, give up now. */
2128 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1)
2129 return true;
2131 /* If both references are through the same type, they do not alias
2132 if the accesses do not overlap. This does extra disambiguation
2133 for mixed/pointer accesses but requires strict aliasing.
2134 For MEM_REFs we require that the component-ref offset we computed
2135 is relative to the start of the type which we ensure by
2136 comparing rvalue and access type and disregarding the constant
2137 pointer offset.
2139 But avoid treating variable length arrays as "objects", instead assume they
2140 can overlap by an exact multiple of their element size.
2141 See gcc.dg/torture/alias-2.c. */
2142 if (((TREE_CODE (base1) != TARGET_MEM_REF
2143 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2144 && (TREE_CODE (dbase2) != TARGET_MEM_REF
2145 || (!TMR_INDEX (dbase2) && !TMR_INDEX2 (dbase2))))
2146 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
2148 bool partial_overlap = (TREE_CODE (TREE_TYPE (base1)) == ARRAY_TYPE
2149 && (TYPE_SIZE (TREE_TYPE (base1))
2150 && TREE_CODE (TYPE_SIZE (TREE_TYPE (base1)))
2151 != INTEGER_CST));
2152 if (!partial_overlap
2153 && !ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2))
2154 return false;
2155 if (!ref1 || !ref2
2156 /* If there is must alias, there is no use disambiguating further. */
2157 || (!partial_overlap
2158 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2159 return true;
2160 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2161 partial_overlap);
2162 if (res == -1)
2163 return !nonoverlapping_component_refs_p (ref1, ref2);
2164 return !res;
2167 /* Do access-path based disambiguation. */
2168 if (ref1 && ref2
2169 && (handled_component_p (ref1) || handled_component_p (ref2)))
2170 return aliasing_component_refs_p (ref1,
2171 ref1_alias_set, base1_alias_set,
2172 offset1, max_size1,
2173 ref2,
2174 ref2_alias_set, base2_alias_set,
2175 offset2, max_size2);
2177 return true;
2180 /* Return true if two indirect references based on *PTR1
2181 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
2182 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
2183 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
2184 in which case they are computed on-demand. REF1 and REF2
2185 if non-NULL are the complete memory reference trees. */
2187 static bool
2188 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
2189 poly_int64 offset1, poly_int64 max_size1,
2190 poly_int64 size1,
2191 alias_set_type ref1_alias_set,
2192 alias_set_type base1_alias_set,
2193 tree ref2 ATTRIBUTE_UNUSED, tree base2,
2194 poly_int64 offset2, poly_int64 max_size2,
2195 poly_int64 size2,
2196 alias_set_type ref2_alias_set,
2197 alias_set_type base2_alias_set, bool tbaa_p)
2199 tree ptr1;
2200 tree ptr2;
2201 tree ptrtype1, ptrtype2;
2203 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
2204 || TREE_CODE (base1) == TARGET_MEM_REF)
2205 && (TREE_CODE (base2) == MEM_REF
2206 || TREE_CODE (base2) == TARGET_MEM_REF));
2208 ptr1 = TREE_OPERAND (base1, 0);
2209 ptr2 = TREE_OPERAND (base2, 0);
2211 /* If both bases are based on pointers they cannot alias if they may not
2212 point to the same memory object or if they point to the same object
2213 and the accesses do not overlap. */
2214 if ((!cfun || gimple_in_ssa_p (cfun))
2215 && operand_equal_p (ptr1, ptr2, 0)
2216 && (((TREE_CODE (base1) != TARGET_MEM_REF
2217 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2218 && (TREE_CODE (base2) != TARGET_MEM_REF
2219 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
2220 || (TREE_CODE (base1) == TARGET_MEM_REF
2221 && TREE_CODE (base2) == TARGET_MEM_REF
2222 && (TMR_STEP (base1) == TMR_STEP (base2)
2223 || (TMR_STEP (base1) && TMR_STEP (base2)
2224 && operand_equal_p (TMR_STEP (base1),
2225 TMR_STEP (base2), 0)))
2226 && (TMR_INDEX (base1) == TMR_INDEX (base2)
2227 || (TMR_INDEX (base1) && TMR_INDEX (base2)
2228 && operand_equal_p (TMR_INDEX (base1),
2229 TMR_INDEX (base2), 0)))
2230 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
2231 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
2232 && operand_equal_p (TMR_INDEX2 (base1),
2233 TMR_INDEX2 (base2), 0))))))
2235 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
2236 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
2237 if (!ranges_maybe_overlap_p (offset1 + moff1, max_size1,
2238 offset2 + moff2, max_size2))
2239 return false;
2240 /* If there is must alias, there is no use disambiguating further. */
2241 if (known_eq (size1, max_size1) && known_eq (size2, max_size2))
2242 return true;
2243 if (ref1 && ref2)
2245 int res = nonoverlapping_refs_since_match_p (NULL, ref1, NULL, ref2,
2246 false);
2247 if (res != -1)
2248 return !res;
2251 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
2252 return false;
2254 /* Disambiguations that rely on strict aliasing rules follow. */
2255 if (!flag_strict_aliasing || !tbaa_p)
2256 return true;
2258 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
2259 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
2261 /* If the alias set for a pointer access is zero all bets are off. */
2262 if (base1_alias_set == 0
2263 || base2_alias_set == 0)
2264 return true;
2266 /* Do type-based disambiguation. */
2267 if (base1_alias_set != base2_alias_set
2268 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
2269 return false;
2271 /* If either reference is view-converted, give up now. */
2272 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
2273 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
2274 return true;
2276 /* If both references are through the same type, they do not alias
2277 if the accesses do not overlap. This does extra disambiguation
2278 for mixed/pointer accesses but requires strict aliasing. */
2279 if ((TREE_CODE (base1) != TARGET_MEM_REF
2280 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
2281 && (TREE_CODE (base2) != TARGET_MEM_REF
2282 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
2283 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
2284 TREE_TYPE (ptrtype2)) == 1)
2286 /* But avoid treating arrays as "objects", instead assume they
2287 can overlap by an exact multiple of their element size.
2288 See gcc.dg/torture/alias-2.c. */
2289 bool partial_overlap = TREE_CODE (TREE_TYPE (ptrtype1)) == ARRAY_TYPE;
2291 if (!partial_overlap
2292 && !ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
2293 return false;
2294 if (!ref1 || !ref2
2295 || (!partial_overlap
2296 && known_eq (size1, max_size1) && known_eq (size2, max_size2)))
2297 return true;
2298 int res = nonoverlapping_refs_since_match_p (base1, ref1, base2, ref2,
2299 partial_overlap);
2300 if (res == -1)
2301 return !nonoverlapping_component_refs_p (ref1, ref2);
2302 return !res;
2305 /* Do access-path based disambiguation. */
2306 if (ref1 && ref2
2307 && (handled_component_p (ref1) || handled_component_p (ref2)))
2308 return aliasing_component_refs_p (ref1,
2309 ref1_alias_set, base1_alias_set,
2310 offset1, max_size1,
2311 ref2,
2312 ref2_alias_set, base2_alias_set,
2313 offset2, max_size2);
2315 return true;
2318 /* Return true, if the two memory references REF1 and REF2 may alias. */
2320 static bool
2321 refs_may_alias_p_2 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2323 tree base1, base2;
2324 poly_int64 offset1 = 0, offset2 = 0;
2325 poly_int64 max_size1 = -1, max_size2 = -1;
2326 bool var1_p, var2_p, ind1_p, ind2_p;
2328 gcc_checking_assert ((!ref1->ref
2329 || TREE_CODE (ref1->ref) == SSA_NAME
2330 || DECL_P (ref1->ref)
2331 || TREE_CODE (ref1->ref) == STRING_CST
2332 || handled_component_p (ref1->ref)
2333 || TREE_CODE (ref1->ref) == MEM_REF
2334 || TREE_CODE (ref1->ref) == TARGET_MEM_REF
2335 || TREE_CODE (ref1->ref) == WITH_SIZE_EXPR)
2336 && (!ref2->ref
2337 || TREE_CODE (ref2->ref) == SSA_NAME
2338 || DECL_P (ref2->ref)
2339 || TREE_CODE (ref2->ref) == STRING_CST
2340 || handled_component_p (ref2->ref)
2341 || TREE_CODE (ref2->ref) == MEM_REF
2342 || TREE_CODE (ref2->ref) == TARGET_MEM_REF
2343 || TREE_CODE (ref2->ref) == WITH_SIZE_EXPR));
2345 /* Decompose the references into their base objects and the access. */
2346 base1 = ao_ref_base (ref1);
2347 offset1 = ref1->offset;
2348 max_size1 = ref1->max_size;
2349 base2 = ao_ref_base (ref2);
2350 offset2 = ref2->offset;
2351 max_size2 = ref2->max_size;
2353 /* We can end up with registers or constants as bases for example from
2354 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
2355 which is seen as a struct copy. */
2356 if (TREE_CODE (base1) == SSA_NAME
2357 || TREE_CODE (base1) == CONST_DECL
2358 || TREE_CODE (base1) == CONSTRUCTOR
2359 || TREE_CODE (base1) == ADDR_EXPR
2360 || CONSTANT_CLASS_P (base1)
2361 || TREE_CODE (base2) == SSA_NAME
2362 || TREE_CODE (base2) == CONST_DECL
2363 || TREE_CODE (base2) == CONSTRUCTOR
2364 || TREE_CODE (base2) == ADDR_EXPR
2365 || CONSTANT_CLASS_P (base2))
2366 return false;
2368 /* We can end up referring to code via function and label decls.
2369 As we likely do not properly track code aliases conservatively
2370 bail out. */
2371 if (TREE_CODE (base1) == FUNCTION_DECL
2372 || TREE_CODE (base1) == LABEL_DECL
2373 || TREE_CODE (base2) == FUNCTION_DECL
2374 || TREE_CODE (base2) == LABEL_DECL)
2375 return true;
2377 /* Two volatile accesses always conflict. */
2378 if (ref1->volatile_p
2379 && ref2->volatile_p)
2380 return true;
2382 /* refN->ref may convey size information, do not confuse our workers
2383 with that but strip it - ao_ref_base took it into account already. */
2384 tree ref1ref = ref1->ref;
2385 if (ref1ref && TREE_CODE (ref1ref) == WITH_SIZE_EXPR)
2386 ref1ref = TREE_OPERAND (ref1ref, 0);
2387 tree ref2ref = ref2->ref;
2388 if (ref2ref && TREE_CODE (ref2ref) == WITH_SIZE_EXPR)
2389 ref2ref = TREE_OPERAND (ref2ref, 0);
2391 /* Defer to simple offset based disambiguation if we have
2392 references based on two decls. Do this before defering to
2393 TBAA to handle must-alias cases in conformance with the
2394 GCC extension of allowing type-punning through unions. */
2395 var1_p = DECL_P (base1);
2396 var2_p = DECL_P (base2);
2397 if (var1_p && var2_p)
2398 return decl_refs_may_alias_p (ref1ref, base1, offset1, max_size1,
2399 ref1->size,
2400 ref2ref, base2, offset2, max_size2,
2401 ref2->size);
2403 /* Handle restrict based accesses.
2404 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
2405 here. */
2406 tree rbase1 = base1;
2407 tree rbase2 = base2;
2408 if (var1_p)
2410 rbase1 = ref1ref;
2411 if (rbase1)
2412 while (handled_component_p (rbase1))
2413 rbase1 = TREE_OPERAND (rbase1, 0);
2415 if (var2_p)
2417 rbase2 = ref2ref;
2418 if (rbase2)
2419 while (handled_component_p (rbase2))
2420 rbase2 = TREE_OPERAND (rbase2, 0);
2422 if (rbase1 && rbase2
2423 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
2424 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
2425 /* If the accesses are in the same restrict clique... */
2426 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
2427 /* But based on different pointers they do not alias. */
2428 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
2429 return false;
2431 ind1_p = (TREE_CODE (base1) == MEM_REF
2432 || TREE_CODE (base1) == TARGET_MEM_REF);
2433 ind2_p = (TREE_CODE (base2) == MEM_REF
2434 || TREE_CODE (base2) == TARGET_MEM_REF);
2436 /* Canonicalize the pointer-vs-decl case. */
2437 if (ind1_p && var2_p)
2439 std::swap (offset1, offset2);
2440 std::swap (max_size1, max_size2);
2441 std::swap (base1, base2);
2442 std::swap (ref1, ref2);
2443 std::swap (ref1ref, ref2ref);
2444 var1_p = true;
2445 ind1_p = false;
2446 var2_p = false;
2447 ind2_p = true;
2450 /* First defer to TBAA if possible. */
2451 if (tbaa_p
2452 && flag_strict_aliasing
2453 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
2454 ao_ref_alias_set (ref2)))
2455 return false;
2457 /* If the reference is based on a pointer that points to memory
2458 that may not be written to then the other reference cannot possibly
2459 clobber it. */
2460 if ((TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME
2461 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base2, 0)))
2462 || (ind1_p
2463 && TREE_CODE (TREE_OPERAND (base1, 0)) == SSA_NAME
2464 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base1, 0))))
2465 return false;
2467 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
2468 if (var1_p && ind2_p)
2469 return indirect_ref_may_alias_decl_p (ref2ref, base2,
2470 offset2, max_size2, ref2->size,
2471 ao_ref_alias_set (ref2),
2472 ao_ref_base_alias_set (ref2),
2473 ref1ref, base1,
2474 offset1, max_size1, ref1->size,
2475 ao_ref_alias_set (ref1),
2476 ao_ref_base_alias_set (ref1),
2477 tbaa_p);
2478 else if (ind1_p && ind2_p)
2479 return indirect_refs_may_alias_p (ref1ref, base1,
2480 offset1, max_size1, ref1->size,
2481 ao_ref_alias_set (ref1),
2482 ao_ref_base_alias_set (ref1),
2483 ref2ref, base2,
2484 offset2, max_size2, ref2->size,
2485 ao_ref_alias_set (ref2),
2486 ao_ref_base_alias_set (ref2),
2487 tbaa_p);
2489 gcc_unreachable ();
2492 /* Return true, if the two memory references REF1 and REF2 may alias
2493 and update statistics. */
2495 bool
2496 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
2498 bool res = refs_may_alias_p_2 (ref1, ref2, tbaa_p);
2499 if (res)
2500 ++alias_stats.refs_may_alias_p_may_alias;
2501 else
2502 ++alias_stats.refs_may_alias_p_no_alias;
2503 return res;
2506 static bool
2507 refs_may_alias_p (tree ref1, ao_ref *ref2, bool tbaa_p)
2509 ao_ref r1;
2510 ao_ref_init (&r1, ref1);
2511 return refs_may_alias_p_1 (&r1, ref2, tbaa_p);
2514 bool
2515 refs_may_alias_p (tree ref1, tree ref2, bool tbaa_p)
2517 ao_ref r1, r2;
2518 ao_ref_init (&r1, ref1);
2519 ao_ref_init (&r2, ref2);
2520 return refs_may_alias_p_1 (&r1, &r2, tbaa_p);
2523 /* Returns true if there is a anti-dependence for the STORE that
2524 executes after the LOAD. */
2526 bool
2527 refs_anti_dependent_p (tree load, tree store)
2529 ao_ref r1, r2;
2530 ao_ref_init (&r1, load);
2531 ao_ref_init (&r2, store);
2532 return refs_may_alias_p_1 (&r1, &r2, false);
2535 /* Returns true if there is a output dependence for the stores
2536 STORE1 and STORE2. */
2538 bool
2539 refs_output_dependent_p (tree store1, tree store2)
2541 ao_ref r1, r2;
2542 ao_ref_init (&r1, store1);
2543 ao_ref_init (&r2, store2);
2544 return refs_may_alias_p_1 (&r1, &r2, false);
2547 /* Return ture if REF may access global memory. */
2549 bool
2550 ref_may_access_global_memory_p (ao_ref *ref)
2552 if (!ref->ref)
2553 return true;
2554 tree base = ao_ref_base (ref);
2555 if (TREE_CODE (base) == MEM_REF
2556 || TREE_CODE (base) == TARGET_MEM_REF)
2558 if (ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0)))
2559 return true;
2561 else
2563 if (!auto_var_in_fn_p (base, current_function_decl)
2564 || pt_solution_includes (&cfun->gimple_df->escaped,
2565 base))
2566 return true;
2568 return false;
2571 /* Returns true if and only if REF may alias any access stored in TT.
2572 IF TBAA_P is true, use TBAA oracle. */
2574 static bool
2575 modref_may_conflict (const gcall *stmt,
2576 modref_tree <alias_set_type> *tt, ao_ref *ref, bool tbaa_p)
2578 alias_set_type base_set, ref_set;
2579 bool global_memory_ok = false;
2581 if (tt->every_base)
2582 return true;
2584 if (!dbg_cnt (ipa_mod_ref))
2585 return true;
2587 base_set = ao_ref_base_alias_set (ref);
2589 ref_set = ao_ref_alias_set (ref);
2591 int num_tests = 0, max_tests = param_modref_max_tests;
2592 for (auto base_node : tt->bases)
2594 if (tbaa_p && flag_strict_aliasing)
2596 if (num_tests >= max_tests)
2597 return true;
2598 alias_stats.modref_tests++;
2599 if (!alias_sets_conflict_p (base_set, base_node->base))
2600 continue;
2601 num_tests++;
2604 if (base_node->every_ref)
2605 return true;
2607 for (auto ref_node : base_node->refs)
2609 /* Do not repeat same test as before. */
2610 if ((ref_set != base_set || base_node->base != ref_node->ref)
2611 && tbaa_p && flag_strict_aliasing)
2613 if (num_tests >= max_tests)
2614 return true;
2615 alias_stats.modref_tests++;
2616 if (!alias_sets_conflict_p (ref_set, ref_node->ref))
2617 continue;
2618 num_tests++;
2621 if (ref_node->every_access)
2622 return true;
2624 /* TBAA checks did not disambiguate, try individual accesses. */
2625 for (auto access_node : ref_node->accesses)
2627 if (num_tests >= max_tests)
2628 return true;
2630 if (access_node.parm_index == MODREF_GLOBAL_MEMORY_PARM)
2632 if (global_memory_ok)
2633 continue;
2634 if (ref_may_access_global_memory_p (ref))
2635 return true;
2636 global_memory_ok = true;
2637 num_tests++;
2638 continue;
2641 tree arg = access_node.get_call_arg (stmt);
2642 if (!arg)
2643 return true;
2645 alias_stats.modref_baseptr_tests++;
2647 if (integer_zerop (arg) && flag_delete_null_pointer_checks)
2648 continue;
2650 /* PTA oracle will be unhapy of arg is not an pointer. */
2651 if (!POINTER_TYPE_P (TREE_TYPE (arg)))
2652 return true;
2654 /* If we don't have base pointer, give up. */
2655 if (!ref->ref && !ref->base)
2656 continue;
2658 ao_ref ref2;
2659 if (access_node.get_ao_ref (stmt, &ref2))
2661 ref2.ref_alias_set = ref_node->ref;
2662 ref2.base_alias_set = base_node->base;
2663 if (refs_may_alias_p_1 (&ref2, ref, tbaa_p))
2664 return true;
2666 else if (ptr_deref_may_alias_ref_p_1 (arg, ref))
2667 return true;
2669 num_tests++;
2673 return false;
2676 /* Check if REF conflicts with call using "fn spec" attribute.
2677 If CLOBBER is true we are checking for writes, otherwise check loads.
2679 Return 0 if there are no conflicts (except for possible function call
2680 argument reads), 1 if there are conflicts and -1 if we can not decide by
2681 fn spec. */
2683 static int
2684 check_fnspec (gcall *call, ao_ref *ref, bool clobber)
2686 attr_fnspec fnspec = gimple_call_fnspec (call);
2687 if (fnspec.known_p ())
2689 if (clobber
2690 ? !fnspec.global_memory_written_p ()
2691 : !fnspec.global_memory_read_p ())
2693 for (unsigned int i = 0; i < gimple_call_num_args (call); i++)
2694 if (POINTER_TYPE_P (TREE_TYPE (gimple_call_arg (call, i)))
2695 && (!fnspec.arg_specified_p (i)
2696 || (clobber ? fnspec.arg_maybe_written_p (i)
2697 : fnspec.arg_maybe_read_p (i))))
2699 ao_ref dref;
2700 tree size = NULL_TREE;
2701 unsigned int size_arg;
2703 if (!fnspec.arg_specified_p (i))
2705 else if (fnspec.arg_max_access_size_given_by_arg_p
2706 (i, &size_arg))
2707 size = gimple_call_arg (call, size_arg);
2708 else if (fnspec.arg_access_size_given_by_type_p (i))
2710 tree callee = gimple_call_fndecl (call);
2711 tree t = TYPE_ARG_TYPES (TREE_TYPE (callee));
2713 for (unsigned int p = 0; p < i; p++)
2714 t = TREE_CHAIN (t);
2715 size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_VALUE (t)));
2717 ao_ref_init_from_ptr_and_size (&dref,
2718 gimple_call_arg (call, i),
2719 size);
2720 if (refs_may_alias_p_1 (&dref, ref, false))
2721 return 1;
2723 if (clobber
2724 && fnspec.errno_maybe_written_p ()
2725 && flag_errno_math
2726 && targetm.ref_may_alias_errno (ref))
2727 return 1;
2728 return 0;
2732 /* FIXME: we should handle barriers more consistently, but for now leave the
2733 check here. */
2734 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2735 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (call)))
2737 /* __sync_* builtins and some OpenMP builtins act as threading
2738 barriers. */
2739 #undef DEF_SYNC_BUILTIN
2740 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2741 #include "sync-builtins.def"
2742 #undef DEF_SYNC_BUILTIN
2743 case BUILT_IN_GOMP_ATOMIC_START:
2744 case BUILT_IN_GOMP_ATOMIC_END:
2745 case BUILT_IN_GOMP_BARRIER:
2746 case BUILT_IN_GOMP_BARRIER_CANCEL:
2747 case BUILT_IN_GOMP_TASKWAIT:
2748 case BUILT_IN_GOMP_TASKGROUP_END:
2749 case BUILT_IN_GOMP_CRITICAL_START:
2750 case BUILT_IN_GOMP_CRITICAL_END:
2751 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2752 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2753 case BUILT_IN_GOMP_LOOP_END:
2754 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2755 case BUILT_IN_GOMP_ORDERED_START:
2756 case BUILT_IN_GOMP_ORDERED_END:
2757 case BUILT_IN_GOMP_SECTIONS_END:
2758 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2759 case BUILT_IN_GOMP_SINGLE_COPY_START:
2760 case BUILT_IN_GOMP_SINGLE_COPY_END:
2761 return 1;
2763 default:
2764 return -1;
2766 return -1;
2769 /* If the call CALL may use the memory reference REF return true,
2770 otherwise return false. */
2772 static bool
2773 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2775 tree base, callee;
2776 unsigned i;
2777 int flags = gimple_call_flags (call);
2779 if (flags & (ECF_CONST|ECF_NOVOPS))
2780 goto process_args;
2782 /* A call that is not without side-effects might involve volatile
2783 accesses and thus conflicts with all other volatile accesses. */
2784 if (ref->volatile_p)
2785 return true;
2787 callee = gimple_call_fndecl (call);
2789 if (callee != NULL_TREE)
2791 struct cgraph_node *node = cgraph_node::get (callee);
2792 /* We can not safely optimize based on summary of calle if it does
2793 not always bind to current def: it is possible that memory load
2794 was optimized out earlier and the interposed variant may not be
2795 optimized this way. */
2796 if (node && node->binds_to_current_def_p ())
2798 modref_summary *summary = get_modref_function_summary (node);
2799 if (summary && !summary->calls_interposable)
2801 if (!modref_may_conflict (call, summary->loads, ref, tbaa_p))
2803 alias_stats.modref_use_no_alias++;
2804 if (dump_file && (dump_flags & TDF_DETAILS))
2806 fprintf (dump_file,
2807 "ipa-modref: call stmt ");
2808 print_gimple_stmt (dump_file, call, 0);
2809 fprintf (dump_file,
2810 "ipa-modref: call to %s does not use ",
2811 node->dump_name ());
2812 if (!ref->ref && ref->base)
2814 fprintf (dump_file, "base: ");
2815 print_generic_expr (dump_file, ref->base);
2817 else if (ref->ref)
2819 fprintf (dump_file, "ref: ");
2820 print_generic_expr (dump_file, ref->ref);
2822 fprintf (dump_file, " alias sets: %i->%i\n",
2823 ao_ref_base_alias_set (ref),
2824 ao_ref_alias_set (ref));
2826 goto process_args;
2828 alias_stats.modref_use_may_alias++;
2833 base = ao_ref_base (ref);
2834 if (!base)
2835 return true;
2837 /* If the reference is based on a decl that is not aliased the call
2838 cannot possibly use it. */
2839 if (DECL_P (base)
2840 && !may_be_aliased (base)
2841 /* But local statics can be used through recursion. */
2842 && !is_global_var (base))
2843 goto process_args;
2845 if (int res = check_fnspec (call, ref, false))
2847 if (res == 1)
2848 return true;
2850 else
2851 goto process_args;
2853 /* Check if base is a global static variable that is not read
2854 by the function. */
2855 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2857 struct cgraph_node *node = cgraph_node::get (callee);
2858 bitmap read;
2859 int id;
2861 /* FIXME: Callee can be an OMP builtin that does not have a call graph
2862 node yet. We should enforce that there are nodes for all decls in the
2863 IL and remove this check instead. */
2864 if (node
2865 && (id = ipa_reference_var_uid (base)) != -1
2866 && (read = ipa_reference_get_read_global (node))
2867 && !bitmap_bit_p (read, id))
2868 goto process_args;
2871 /* Check if the base variable is call-used. */
2872 if (DECL_P (base))
2874 if (pt_solution_includes (gimple_call_use_set (call), base))
2875 return true;
2877 else if ((TREE_CODE (base) == MEM_REF
2878 || TREE_CODE (base) == TARGET_MEM_REF)
2879 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2881 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2882 if (!pi)
2883 return true;
2885 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
2886 return true;
2888 else
2889 return true;
2891 /* Inspect call arguments for passed-by-value aliases. */
2892 process_args:
2893 for (i = 0; i < gimple_call_num_args (call); ++i)
2895 tree op = gimple_call_arg (call, i);
2896 int flags = gimple_call_arg_flags (call, i);
2898 if (flags & (EAF_UNUSED | EAF_NO_DIRECT_READ))
2899 continue;
2901 if (TREE_CODE (op) == WITH_SIZE_EXPR)
2902 op = TREE_OPERAND (op, 0);
2904 if (TREE_CODE (op) != SSA_NAME
2905 && !is_gimple_min_invariant (op))
2907 ao_ref r;
2908 ao_ref_init (&r, op);
2909 if (refs_may_alias_p_1 (&r, ref, tbaa_p))
2910 return true;
2914 return false;
2917 static bool
2918 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref, bool tbaa_p)
2920 bool res;
2921 res = ref_maybe_used_by_call_p_1 (call, ref, tbaa_p);
2922 if (res)
2923 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
2924 else
2925 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
2926 return res;
2930 /* If the statement STMT may use the memory reference REF return
2931 true, otherwise return false. */
2933 bool
2934 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref, bool tbaa_p)
2936 if (is_gimple_assign (stmt))
2938 tree rhs;
2940 /* All memory assign statements are single. */
2941 if (!gimple_assign_single_p (stmt))
2942 return false;
2944 rhs = gimple_assign_rhs1 (stmt);
2945 if (is_gimple_reg (rhs)
2946 || is_gimple_min_invariant (rhs)
2947 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
2948 return false;
2950 return refs_may_alias_p (rhs, ref, tbaa_p);
2952 else if (is_gimple_call (stmt))
2953 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref, tbaa_p);
2954 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
2956 tree retval = gimple_return_retval (return_stmt);
2957 if (retval
2958 && TREE_CODE (retval) != SSA_NAME
2959 && !is_gimple_min_invariant (retval)
2960 && refs_may_alias_p (retval, ref, tbaa_p))
2961 return true;
2962 /* If ref escapes the function then the return acts as a use. */
2963 tree base = ao_ref_base (ref);
2964 if (!base)
2966 else if (DECL_P (base))
2967 return is_global_var (base);
2968 else if (TREE_CODE (base) == MEM_REF
2969 || TREE_CODE (base) == TARGET_MEM_REF)
2970 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
2971 return false;
2974 return true;
2977 bool
2978 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref, bool tbaa_p)
2980 ao_ref r;
2981 ao_ref_init (&r, ref);
2982 return ref_maybe_used_by_stmt_p (stmt, &r, tbaa_p);
2985 /* If the call in statement CALL may clobber the memory reference REF
2986 return true, otherwise return false. */
2988 bool
2989 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref, bool tbaa_p)
2991 tree base;
2992 tree callee;
2994 /* If the call is pure or const it cannot clobber anything. */
2995 if (gimple_call_flags (call)
2996 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
2997 return false;
2998 if (gimple_call_internal_p (call))
2999 switch (gimple_call_internal_fn (call))
3001 /* Treat these internal calls like ECF_PURE for aliasing,
3002 they don't write to any memory the program should care about.
3003 They have important other side-effects, and read memory,
3004 so can't be ECF_NOVOPS. */
3005 case IFN_UBSAN_NULL:
3006 case IFN_UBSAN_BOUNDS:
3007 case IFN_UBSAN_VPTR:
3008 case IFN_UBSAN_OBJECT_SIZE:
3009 case IFN_UBSAN_PTR:
3010 case IFN_ASAN_CHECK:
3011 return false;
3012 default:
3013 break;
3016 callee = gimple_call_fndecl (call);
3018 if (callee != NULL_TREE && !ref->volatile_p)
3020 struct cgraph_node *node = cgraph_node::get (callee);
3021 if (node)
3023 modref_summary *summary = get_modref_function_summary (node);
3024 if (summary)
3026 if (!modref_may_conflict (call, summary->stores, ref, tbaa_p)
3027 && (!summary->writes_errno
3028 || !targetm.ref_may_alias_errno (ref)))
3030 alias_stats.modref_clobber_no_alias++;
3031 if (dump_file && (dump_flags & TDF_DETAILS))
3033 fprintf (dump_file,
3034 "ipa-modref: call stmt ");
3035 print_gimple_stmt (dump_file, call, 0);
3036 fprintf (dump_file,
3037 "ipa-modref: call to %s does not clobber ",
3038 node->dump_name ());
3039 if (!ref->ref && ref->base)
3041 fprintf (dump_file, "base: ");
3042 print_generic_expr (dump_file, ref->base);
3044 else if (ref->ref)
3046 fprintf (dump_file, "ref: ");
3047 print_generic_expr (dump_file, ref->ref);
3049 fprintf (dump_file, " alias sets: %i->%i\n",
3050 ao_ref_base_alias_set (ref),
3051 ao_ref_alias_set (ref));
3053 return false;
3055 alias_stats.modref_clobber_may_alias++;
3060 base = ao_ref_base (ref);
3061 if (!base)
3062 return true;
3064 if (TREE_CODE (base) == SSA_NAME
3065 || CONSTANT_CLASS_P (base))
3066 return false;
3068 /* A call that is not without side-effects might involve volatile
3069 accesses and thus conflicts with all other volatile accesses. */
3070 if (ref->volatile_p)
3071 return true;
3073 /* If the reference is based on a decl that is not aliased the call
3074 cannot possibly clobber it. */
3075 if (DECL_P (base)
3076 && !may_be_aliased (base)
3077 /* But local non-readonly statics can be modified through recursion
3078 or the call may implement a threading barrier which we must
3079 treat as may-def. */
3080 && (TREE_READONLY (base)
3081 || !is_global_var (base)))
3082 return false;
3084 /* If the reference is based on a pointer that points to memory
3085 that may not be written to then the call cannot possibly clobber it. */
3086 if ((TREE_CODE (base) == MEM_REF
3087 || TREE_CODE (base) == TARGET_MEM_REF)
3088 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
3089 && SSA_NAME_POINTS_TO_READONLY_MEMORY (TREE_OPERAND (base, 0)))
3090 return false;
3092 if (int res = check_fnspec (call, ref, true))
3094 if (res == 1)
3095 return true;
3097 else
3098 return false;
3100 /* Check if base is a global static variable that is not written
3101 by the function. */
3102 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
3104 struct cgraph_node *node = cgraph_node::get (callee);
3105 bitmap written;
3106 int id;
3108 if (node
3109 && (id = ipa_reference_var_uid (base)) != -1
3110 && (written = ipa_reference_get_written_global (node))
3111 && !bitmap_bit_p (written, id))
3112 return false;
3115 /* Check if the base variable is call-clobbered. */
3116 if (DECL_P (base))
3117 return pt_solution_includes (gimple_call_clobber_set (call), base);
3118 else if ((TREE_CODE (base) == MEM_REF
3119 || TREE_CODE (base) == TARGET_MEM_REF)
3120 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
3122 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
3123 if (!pi)
3124 return true;
3126 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
3129 return true;
3132 /* If the call in statement CALL may clobber the memory reference REF
3133 return true, otherwise return false. */
3135 bool
3136 call_may_clobber_ref_p (gcall *call, tree ref, bool tbaa_p)
3138 bool res;
3139 ao_ref r;
3140 ao_ref_init (&r, ref);
3141 res = call_may_clobber_ref_p_1 (call, &r, tbaa_p);
3142 if (res)
3143 ++alias_stats.call_may_clobber_ref_p_may_alias;
3144 else
3145 ++alias_stats.call_may_clobber_ref_p_no_alias;
3146 return res;
3150 /* If the statement STMT may clobber the memory reference REF return true,
3151 otherwise return false. */
3153 bool
3154 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref, bool tbaa_p)
3156 if (is_gimple_call (stmt))
3158 tree lhs = gimple_call_lhs (stmt);
3159 if (lhs
3160 && TREE_CODE (lhs) != SSA_NAME)
3162 ao_ref r;
3163 ao_ref_init (&r, lhs);
3164 if (refs_may_alias_p_1 (ref, &r, tbaa_p))
3165 return true;
3168 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref, tbaa_p);
3170 else if (gimple_assign_single_p (stmt))
3172 tree lhs = gimple_assign_lhs (stmt);
3173 if (TREE_CODE (lhs) != SSA_NAME)
3175 ao_ref r;
3176 ao_ref_init (&r, lhs);
3177 return refs_may_alias_p_1 (ref, &r, tbaa_p);
3180 else if (gimple_code (stmt) == GIMPLE_ASM)
3181 return true;
3183 return false;
3186 bool
3187 stmt_may_clobber_ref_p (gimple *stmt, tree ref, bool tbaa_p)
3189 ao_ref r;
3190 ao_ref_init (&r, ref);
3191 return stmt_may_clobber_ref_p_1 (stmt, &r, tbaa_p);
3194 /* Return true if store1 and store2 described by corresponding tuples
3195 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
3196 address. */
3198 static bool
3199 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
3200 poly_int64 max_size1,
3201 tree base2, poly_int64 offset2, poly_int64 size2,
3202 poly_int64 max_size2)
3204 /* Offsets need to be 0. */
3205 if (maybe_ne (offset1, 0)
3206 || maybe_ne (offset2, 0))
3207 return false;
3209 bool base1_obj_p = SSA_VAR_P (base1);
3210 bool base2_obj_p = SSA_VAR_P (base2);
3212 /* We need one object. */
3213 if (base1_obj_p == base2_obj_p)
3214 return false;
3215 tree obj = base1_obj_p ? base1 : base2;
3217 /* And we need one MEM_REF. */
3218 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
3219 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
3220 if (base1_memref_p == base2_memref_p)
3221 return false;
3222 tree memref = base1_memref_p ? base1 : base2;
3224 /* Sizes need to be valid. */
3225 if (!known_size_p (max_size1)
3226 || !known_size_p (max_size2)
3227 || !known_size_p (size1)
3228 || !known_size_p (size2))
3229 return false;
3231 /* Max_size needs to match size. */
3232 if (maybe_ne (max_size1, size1)
3233 || maybe_ne (max_size2, size2))
3234 return false;
3236 /* Sizes need to match. */
3237 if (maybe_ne (size1, size2))
3238 return false;
3241 /* Check that memref is a store to pointer with singleton points-to info. */
3242 if (!integer_zerop (TREE_OPERAND (memref, 1)))
3243 return false;
3244 tree ptr = TREE_OPERAND (memref, 0);
3245 if (TREE_CODE (ptr) != SSA_NAME)
3246 return false;
3247 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3248 unsigned int pt_uid;
3249 if (pi == NULL
3250 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
3251 return false;
3253 /* Be conservative with non-call exceptions when the address might
3254 be NULL. */
3255 if (cfun->can_throw_non_call_exceptions && pi->pt.null)
3256 return false;
3258 /* Check that ptr points relative to obj. */
3259 unsigned int obj_uid = DECL_PT_UID (obj);
3260 if (obj_uid != pt_uid)
3261 return false;
3263 /* Check that the object size is the same as the store size. That ensures us
3264 that ptr points to the start of obj. */
3265 return (DECL_SIZE (obj)
3266 && poly_int_tree_p (DECL_SIZE (obj))
3267 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
3270 /* Return true if REF is killed by an store described by
3271 BASE, OFFSET, SIZE and MAX_SIZE. */
3273 static bool
3274 store_kills_ref_p (tree base, poly_int64 offset, poly_int64 size,
3275 poly_int64 max_size, ao_ref *ref)
3277 poly_int64 ref_offset = ref->offset;
3278 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
3279 so base == ref->base does not always hold. */
3280 if (base != ref->base)
3282 /* Try using points-to info. */
3283 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
3284 ref->offset, ref->size, ref->max_size))
3285 return true;
3287 /* If both base and ref->base are MEM_REFs, only compare the
3288 first operand, and if the second operand isn't equal constant,
3289 try to add the offsets into offset and ref_offset. */
3290 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
3291 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
3293 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
3294 TREE_OPERAND (ref->base, 1)))
3296 poly_offset_int off1 = mem_ref_offset (base);
3297 off1 <<= LOG2_BITS_PER_UNIT;
3298 off1 += offset;
3299 poly_offset_int off2 = mem_ref_offset (ref->base);
3300 off2 <<= LOG2_BITS_PER_UNIT;
3301 off2 += ref_offset;
3302 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
3303 size = -1;
3306 else
3307 size = -1;
3309 /* For a must-alias check we need to be able to constrain
3310 the access properly. */
3311 return (known_eq (size, max_size)
3312 && known_subrange_p (ref_offset, ref->max_size, offset, size));
3315 /* If STMT kills the memory reference REF return true, otherwise
3316 return false. */
3318 bool
3319 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
3321 if (!ao_ref_base (ref))
3322 return false;
3324 if (gimple_has_lhs (stmt)
3325 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
3326 /* The assignment is not necessarily carried out if it can throw
3327 and we can catch it in the current function where we could inspect
3328 the previous value.
3329 ??? We only need to care about the RHS throwing. For aggregate
3330 assignments or similar calls and non-call exceptions the LHS
3331 might throw as well. */
3332 && !stmt_can_throw_internal (cfun, stmt))
3334 tree lhs = gimple_get_lhs (stmt);
3335 /* If LHS is literally a base of the access we are done. */
3336 if (ref->ref)
3338 tree base = ref->ref;
3339 tree innermost_dropped_array_ref = NULL_TREE;
3340 if (handled_component_p (base))
3342 tree saved_lhs0 = NULL_TREE;
3343 if (handled_component_p (lhs))
3345 saved_lhs0 = TREE_OPERAND (lhs, 0);
3346 TREE_OPERAND (lhs, 0) = integer_zero_node;
3350 /* Just compare the outermost handled component, if
3351 they are equal we have found a possible common
3352 base. */
3353 tree saved_base0 = TREE_OPERAND (base, 0);
3354 TREE_OPERAND (base, 0) = integer_zero_node;
3355 bool res = operand_equal_p (lhs, base, 0);
3356 TREE_OPERAND (base, 0) = saved_base0;
3357 if (res)
3358 break;
3359 /* Remember if we drop an array-ref that we need to
3360 double-check not being at struct end. */
3361 if (TREE_CODE (base) == ARRAY_REF
3362 || TREE_CODE (base) == ARRAY_RANGE_REF)
3363 innermost_dropped_array_ref = base;
3364 /* Otherwise drop handled components of the access. */
3365 base = saved_base0;
3367 while (handled_component_p (base));
3368 if (saved_lhs0)
3369 TREE_OPERAND (lhs, 0) = saved_lhs0;
3371 /* Finally check if the lhs has the same address and size as the
3372 base candidate of the access. Watch out if we have dropped
3373 an array-ref that was at struct end, this means ref->ref may
3374 be outside of the TYPE_SIZE of its base. */
3375 if ((! innermost_dropped_array_ref
3376 || ! array_at_struct_end_p (innermost_dropped_array_ref))
3377 && (lhs == base
3378 || (((TYPE_SIZE (TREE_TYPE (lhs))
3379 == TYPE_SIZE (TREE_TYPE (base)))
3380 || (TYPE_SIZE (TREE_TYPE (lhs))
3381 && TYPE_SIZE (TREE_TYPE (base))
3382 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
3383 TYPE_SIZE (TREE_TYPE (base)),
3384 0)))
3385 && operand_equal_p (lhs, base,
3386 OEP_ADDRESS_OF
3387 | OEP_MATCH_SIDE_EFFECTS))))
3389 ++alias_stats.stmt_kills_ref_p_yes;
3390 return true;
3394 /* Now look for non-literal equal bases with the restriction of
3395 handling constant offset and size. */
3396 /* For a must-alias check we need to be able to constrain
3397 the access properly. */
3398 if (!ref->max_size_known_p ())
3400 ++alias_stats.stmt_kills_ref_p_no;
3401 return false;
3403 poly_int64 size, offset, max_size;
3404 bool reverse;
3405 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
3406 &reverse);
3407 if (store_kills_ref_p (base, offset, size, max_size, ref))
3409 ++alias_stats.stmt_kills_ref_p_yes;
3410 return true;
3414 if (is_gimple_call (stmt))
3416 tree callee = gimple_call_fndecl (stmt);
3417 struct cgraph_node *node;
3418 modref_summary *summary;
3420 /* Try to disambiguate using modref summary. Modref records a vector
3421 of stores with known offsets relative to function parameters that must
3422 happen every execution of function. Find if we have a matching
3423 store and verify that function can not use the value. */
3424 if (callee != NULL_TREE
3425 && (node = cgraph_node::get (callee)) != NULL
3426 && node->binds_to_current_def_p ()
3427 && (summary = get_modref_function_summary (node)) != NULL
3428 && summary->kills.length ()
3429 && (!cfun->can_throw_non_call_exceptions
3430 || !stmt_can_throw_internal (cfun, stmt)))
3432 for (auto kill : summary->kills)
3434 ao_ref dref;
3436 /* We only can do useful compares if we know the access range
3437 precisely. */
3438 if (!kill.get_ao_ref (as_a <gcall *> (stmt), &dref))
3439 continue;
3440 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3441 dref.size, dref.max_size, ref))
3443 /* For store to be killed it needs to not be used
3444 earlier. */
3445 if (ref_maybe_used_by_call_p_1 (as_a <gcall *> (stmt), ref,
3446 true)
3447 || !dbg_cnt (ipa_mod_ref))
3448 break;
3449 if (dump_file && (dump_flags & TDF_DETAILS))
3451 fprintf (dump_file,
3452 "ipa-modref: call stmt ");
3453 print_gimple_stmt (dump_file, stmt, 0);
3454 fprintf (dump_file,
3455 "ipa-modref: call to %s kills ",
3456 node->dump_name ());
3457 print_generic_expr (dump_file, ref->base);
3459 ++alias_stats.modref_kill_yes;
3460 return true;
3463 ++alias_stats.modref_kill_no;
3465 if (callee != NULL_TREE
3466 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3467 switch (DECL_FUNCTION_CODE (callee))
3469 case BUILT_IN_FREE:
3471 tree ptr = gimple_call_arg (stmt, 0);
3472 tree base = ao_ref_base (ref);
3473 if (base && TREE_CODE (base) == MEM_REF
3474 && TREE_OPERAND (base, 0) == ptr)
3476 ++alias_stats.stmt_kills_ref_p_yes;
3477 return true;
3479 break;
3482 case BUILT_IN_MEMCPY:
3483 case BUILT_IN_MEMPCPY:
3484 case BUILT_IN_MEMMOVE:
3485 case BUILT_IN_MEMSET:
3486 case BUILT_IN_MEMCPY_CHK:
3487 case BUILT_IN_MEMPCPY_CHK:
3488 case BUILT_IN_MEMMOVE_CHK:
3489 case BUILT_IN_MEMSET_CHK:
3490 case BUILT_IN_STRNCPY:
3491 case BUILT_IN_STPNCPY:
3492 case BUILT_IN_CALLOC:
3494 /* For a must-alias check we need to be able to constrain
3495 the access properly. */
3496 if (!ref->max_size_known_p ())
3498 ++alias_stats.stmt_kills_ref_p_no;
3499 return false;
3501 tree dest;
3502 tree len;
3504 /* In execution order a calloc call will never kill
3505 anything. However, DSE will (ab)use this interface
3506 to ask if a calloc call writes the same memory locations
3507 as a later assignment, memset, etc. So handle calloc
3508 in the expected way. */
3509 if (DECL_FUNCTION_CODE (callee) == BUILT_IN_CALLOC)
3511 tree arg0 = gimple_call_arg (stmt, 0);
3512 tree arg1 = gimple_call_arg (stmt, 1);
3513 if (TREE_CODE (arg0) != INTEGER_CST
3514 || TREE_CODE (arg1) != INTEGER_CST)
3516 ++alias_stats.stmt_kills_ref_p_no;
3517 return false;
3520 dest = gimple_call_lhs (stmt);
3521 if (!dest)
3523 ++alias_stats.stmt_kills_ref_p_no;
3524 return false;
3526 len = fold_build2 (MULT_EXPR, TREE_TYPE (arg0), arg0, arg1);
3528 else
3530 dest = gimple_call_arg (stmt, 0);
3531 len = gimple_call_arg (stmt, 2);
3533 if (!poly_int_tree_p (len))
3534 return false;
3535 ao_ref dref;
3536 ao_ref_init_from_ptr_and_size (&dref, dest, len);
3537 if (store_kills_ref_p (ao_ref_base (&dref), dref.offset,
3538 dref.size, dref.max_size, ref))
3540 ++alias_stats.stmt_kills_ref_p_yes;
3541 return true;
3543 break;
3546 case BUILT_IN_VA_END:
3548 tree ptr = gimple_call_arg (stmt, 0);
3549 if (TREE_CODE (ptr) == ADDR_EXPR)
3551 tree base = ao_ref_base (ref);
3552 if (TREE_OPERAND (ptr, 0) == base)
3554 ++alias_stats.stmt_kills_ref_p_yes;
3555 return true;
3558 break;
3561 default:;
3564 ++alias_stats.stmt_kills_ref_p_no;
3565 return false;
3568 bool
3569 stmt_kills_ref_p (gimple *stmt, tree ref)
3571 ao_ref r;
3572 ao_ref_init (&r, ref);
3573 return stmt_kills_ref_p (stmt, &r);
3577 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
3578 TARGET or a statement clobbering the memory reference REF in which
3579 case false is returned. The walk starts with VUSE, one argument of PHI. */
3581 static bool
3582 maybe_skip_until (gimple *phi, tree &target, basic_block target_bb,
3583 ao_ref *ref, tree vuse, bool tbaa_p, unsigned int &limit,
3584 bitmap *visited, bool abort_on_visited,
3585 void *(*translate)(ao_ref *, tree, void *, translate_flags *),
3586 translate_flags disambiguate_only,
3587 void *data)
3589 basic_block bb = gimple_bb (phi);
3591 if (!*visited)
3592 *visited = BITMAP_ALLOC (NULL);
3594 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
3596 /* Walk until we hit the target. */
3597 while (vuse != target)
3599 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
3600 /* If we are searching for the target VUSE by walking up to
3601 TARGET_BB dominating the original PHI we are finished once
3602 we reach a default def or a definition in a block dominating
3603 that block. Update TARGET and return. */
3604 if (!target
3605 && (gimple_nop_p (def_stmt)
3606 || dominated_by_p (CDI_DOMINATORS,
3607 target_bb, gimple_bb (def_stmt))))
3609 target = vuse;
3610 return true;
3613 /* Recurse for PHI nodes. */
3614 if (gimple_code (def_stmt) == GIMPLE_PHI)
3616 /* An already visited PHI node ends the walk successfully. */
3617 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
3618 return !abort_on_visited;
3619 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3620 visited, abort_on_visited,
3621 translate, data, disambiguate_only);
3622 if (!vuse)
3623 return false;
3624 continue;
3626 else if (gimple_nop_p (def_stmt))
3627 return false;
3628 else
3630 /* A clobbering statement or the end of the IL ends it failing. */
3631 if ((int)limit <= 0)
3632 return false;
3633 --limit;
3634 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3636 translate_flags tf = disambiguate_only;
3637 if (translate
3638 && (*translate) (ref, vuse, data, &tf) == NULL)
3640 else
3641 return false;
3644 /* If we reach a new basic-block see if we already skipped it
3645 in a previous walk that ended successfully. */
3646 if (gimple_bb (def_stmt) != bb)
3648 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
3649 return !abort_on_visited;
3650 bb = gimple_bb (def_stmt);
3652 vuse = gimple_vuse (def_stmt);
3654 return true;
3658 /* Starting from a PHI node for the virtual operand of the memory reference
3659 REF find a continuation virtual operand that allows to continue walking
3660 statements dominating PHI skipping only statements that cannot possibly
3661 clobber REF. Decrements LIMIT for each alias disambiguation done
3662 and aborts the walk, returning NULL_TREE if it reaches zero.
3663 Returns NULL_TREE if no suitable virtual operand can be found. */
3665 tree
3666 get_continuation_for_phi (gimple *phi, ao_ref *ref, bool tbaa_p,
3667 unsigned int &limit, bitmap *visited,
3668 bool abort_on_visited,
3669 void *(*translate)(ao_ref *, tree, void *,
3670 translate_flags *),
3671 void *data,
3672 translate_flags disambiguate_only)
3674 unsigned nargs = gimple_phi_num_args (phi);
3676 /* Through a single-argument PHI we can simply look through. */
3677 if (nargs == 1)
3678 return PHI_ARG_DEF (phi, 0);
3680 /* For two or more arguments try to pairwise skip non-aliasing code
3681 until we hit the phi argument definition that dominates the other one. */
3682 basic_block phi_bb = gimple_bb (phi);
3683 tree arg0, arg1;
3684 unsigned i;
3686 /* Find a candidate for the virtual operand which definition
3687 dominates those of all others. */
3688 /* First look if any of the args themselves satisfy this. */
3689 for (i = 0; i < nargs; ++i)
3691 arg0 = PHI_ARG_DEF (phi, i);
3692 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
3693 break;
3694 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
3695 if (def_bb != phi_bb
3696 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
3697 break;
3698 arg0 = NULL_TREE;
3700 /* If not, look if we can reach such candidate by walking defs
3701 until we hit the immediate dominator. maybe_skip_until will
3702 do that for us. */
3703 basic_block dom = get_immediate_dominator (CDI_DOMINATORS, phi_bb);
3705 /* Then check against the (to be) found candidate. */
3706 for (i = 0; i < nargs; ++i)
3708 arg1 = PHI_ARG_DEF (phi, i);
3709 if (arg1 == arg0)
3711 else if (! maybe_skip_until (phi, arg0, dom, ref, arg1, tbaa_p,
3712 limit, visited,
3713 abort_on_visited,
3714 translate,
3715 /* Do not valueize when walking over
3716 backedges. */
3717 dominated_by_p
3718 (CDI_DOMINATORS,
3719 gimple_bb (SSA_NAME_DEF_STMT (arg1)),
3720 phi_bb)
3721 ? TR_DISAMBIGUATE
3722 : disambiguate_only, data))
3723 return NULL_TREE;
3726 return arg0;
3729 /* Based on the memory reference REF and its virtual use VUSE call
3730 WALKER for each virtual use that is equivalent to VUSE, including VUSE
3731 itself. That is, for each virtual use for which its defining statement
3732 does not clobber REF.
3734 WALKER is called with REF, the current virtual use and DATA. If
3735 WALKER returns non-NULL the walk stops and its result is returned.
3736 At the end of a non-successful walk NULL is returned.
3738 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
3739 use which definition is a statement that may clobber REF and DATA.
3740 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
3741 If TRANSLATE returns non-NULL the walk stops and its result is returned.
3742 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
3743 to adjust REF and *DATA to make that valid.
3745 VALUEIZE if non-NULL is called with the next VUSE that is considered
3746 and return value is substituted for that. This can be used to
3747 implement optimistic value-numbering for example. Note that the
3748 VUSE argument is assumed to be valueized already.
3750 LIMIT specifies the number of alias queries we are allowed to do,
3751 the walk stops when it reaches zero and NULL is returned. LIMIT
3752 is decremented by the number of alias queries (plus adjustments
3753 done by the callbacks) upon return.
3755 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
3757 void *
3758 walk_non_aliased_vuses (ao_ref *ref, tree vuse, bool tbaa_p,
3759 void *(*walker)(ao_ref *, tree, void *),
3760 void *(*translate)(ao_ref *, tree, void *,
3761 translate_flags *),
3762 tree (*valueize)(tree),
3763 unsigned &limit, void *data)
3765 bitmap visited = NULL;
3766 void *res;
3767 bool translated = false;
3769 timevar_push (TV_ALIAS_STMT_WALK);
3773 gimple *def_stmt;
3775 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3776 res = (*walker) (ref, vuse, data);
3777 /* Abort walk. */
3778 if (res == (void *)-1)
3780 res = NULL;
3781 break;
3783 /* Lookup succeeded. */
3784 else if (res != NULL)
3785 break;
3787 if (valueize)
3789 vuse = valueize (vuse);
3790 if (!vuse)
3792 res = NULL;
3793 break;
3796 def_stmt = SSA_NAME_DEF_STMT (vuse);
3797 if (gimple_nop_p (def_stmt))
3798 break;
3799 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3800 vuse = get_continuation_for_phi (def_stmt, ref, tbaa_p, limit,
3801 &visited, translated, translate, data);
3802 else
3804 if ((int)limit <= 0)
3806 res = NULL;
3807 break;
3809 --limit;
3810 if (stmt_may_clobber_ref_p_1 (def_stmt, ref, tbaa_p))
3812 if (!translate)
3813 break;
3814 translate_flags disambiguate_only = TR_TRANSLATE;
3815 res = (*translate) (ref, vuse, data, &disambiguate_only);
3816 /* Failed lookup and translation. */
3817 if (res == (void *)-1)
3819 res = NULL;
3820 break;
3822 /* Lookup succeeded. */
3823 else if (res != NULL)
3824 break;
3825 /* Translation succeeded, continue walking. */
3826 translated = translated || disambiguate_only == TR_TRANSLATE;
3828 vuse = gimple_vuse (def_stmt);
3831 while (vuse);
3833 if (visited)
3834 BITMAP_FREE (visited);
3836 timevar_pop (TV_ALIAS_STMT_WALK);
3838 return res;
3842 /* Based on the memory reference REF call WALKER for each vdef whose
3843 defining statement may clobber REF, starting with VDEF. If REF
3844 is NULL_TREE, each defining statement is visited.
3846 WALKER is called with REF, the current vdef and DATA. If WALKER
3847 returns true the walk is stopped, otherwise it continues.
3849 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
3850 The pointer may be NULL and then we do not track this information.
3852 At PHI nodes walk_aliased_vdefs forks into one walk for each
3853 PHI argument (but only one walk continues at merge points), the
3854 return value is true if any of the walks was successful.
3856 The function returns the number of statements walked or -1 if
3857 LIMIT stmts were walked and the walk was aborted at this point.
3858 If LIMIT is zero the walk is not aborted. */
3860 static int
3861 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
3862 bool (*walker)(ao_ref *, tree, void *), void *data,
3863 bitmap *visited, unsigned int cnt,
3864 bool *function_entry_reached, unsigned limit)
3868 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
3870 if (*visited
3871 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
3872 return cnt;
3874 if (gimple_nop_p (def_stmt))
3876 if (function_entry_reached)
3877 *function_entry_reached = true;
3878 return cnt;
3880 else if (gimple_code (def_stmt) == GIMPLE_PHI)
3882 unsigned i;
3883 if (!*visited)
3884 *visited = BITMAP_ALLOC (NULL);
3885 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
3887 int res = walk_aliased_vdefs_1 (ref,
3888 gimple_phi_arg_def (def_stmt, i),
3889 walker, data, visited, cnt,
3890 function_entry_reached, limit);
3891 if (res == -1)
3892 return -1;
3893 cnt = res;
3895 return cnt;
3898 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
3899 cnt++;
3900 if (cnt == limit)
3901 return -1;
3902 if ((!ref
3903 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
3904 && (*walker) (ref, vdef, data))
3905 return cnt;
3907 vdef = gimple_vuse (def_stmt);
3909 while (1);
3913 walk_aliased_vdefs (ao_ref *ref, tree vdef,
3914 bool (*walker)(ao_ref *, tree, void *), void *data,
3915 bitmap *visited,
3916 bool *function_entry_reached, unsigned int limit)
3918 bitmap local_visited = NULL;
3919 int ret;
3921 timevar_push (TV_ALIAS_STMT_WALK);
3923 if (function_entry_reached)
3924 *function_entry_reached = false;
3926 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
3927 visited ? visited : &local_visited, 0,
3928 function_entry_reached, limit);
3929 if (local_visited)
3930 BITMAP_FREE (local_visited);
3932 timevar_pop (TV_ALIAS_STMT_WALK);
3934 return ret;
3937 /* Verify validity of the fnspec string.
3938 See attr-fnspec.h for details. */
3940 void
3941 attr_fnspec::verify ()
3943 bool err = false;
3944 if (!len)
3945 return;
3947 /* Check return value specifier. */
3948 if (len < return_desc_size)
3949 err = true;
3950 else if ((len - return_desc_size) % arg_desc_size)
3951 err = true;
3952 else if ((str[0] < '1' || str[0] > '4')
3953 && str[0] != '.' && str[0] != 'm')
3954 err = true;
3956 switch (str[1])
3958 case ' ':
3959 case 'p':
3960 case 'P':
3961 case 'c':
3962 case 'C':
3963 break;
3964 default:
3965 err = true;
3967 if (err)
3968 internal_error ("invalid fn spec attribute \"%s\"", str);
3970 /* Now check all parameters. */
3971 for (unsigned int i = 0; arg_specified_p (i); i++)
3973 unsigned int idx = arg_idx (i);
3974 switch (str[idx])
3976 case 'x':
3977 case 'X':
3978 case 'r':
3979 case 'R':
3980 case 'o':
3981 case 'O':
3982 case 'w':
3983 case 'W':
3984 case '.':
3985 if ((str[idx + 1] >= '1' && str[idx + 1] <= '9')
3986 || str[idx + 1] == 't')
3988 if (str[idx] != 'r' && str[idx] != 'R'
3989 && str[idx] != 'w' && str[idx] != 'W'
3990 && str[idx] != 'o' && str[idx] != 'O')
3991 err = true;
3992 if (str[idx + 1] != 't'
3993 /* Size specified is scalar, so it should be described
3994 by ". " if specified at all. */
3995 && (arg_specified_p (str[idx + 1] - '1')
3996 && str[arg_idx (str[idx + 1] - '1')] != '.'))
3997 err = true;
3999 else if (str[idx + 1] != ' ')
4000 err = true;
4001 break;
4002 default:
4003 if (str[idx] < '1' || str[idx] > '9')
4004 err = true;
4006 if (err)
4007 internal_error ("invalid fn spec attribute \"%s\" arg %i", str, i);
4011 /* Return ture if TYPE1 and TYPE2 will always give the same answer
4012 when compared wit hother types using same_type_for_tbaa_p. */
4014 static bool
4015 types_equal_for_same_type_for_tbaa_p (tree type1, tree type2,
4016 bool lto_streaming_safe)
4018 /* We use same_type_for_tbaa_p to match types in the access path.
4019 This check is overly conservative. */
4020 type1 = TYPE_MAIN_VARIANT (type1);
4021 type2 = TYPE_MAIN_VARIANT (type2);
4023 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
4024 != TYPE_STRUCTURAL_EQUALITY_P (type2))
4025 return false;
4026 if (TYPE_STRUCTURAL_EQUALITY_P (type1))
4027 return true;
4029 if (lto_streaming_safe)
4030 return type1 == type2;
4031 else
4032 return TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2);
4035 /* Compare REF1 and REF2 and return flags specifying their differences.
4036 If LTO_STREAMING_SAFE is true do not use alias sets and canonical
4037 types that are going to be recomputed.
4038 If TBAA is true also compare TBAA metadata. */
4041 ao_compare::compare_ao_refs (ao_ref *ref1, ao_ref *ref2,
4042 bool lto_streaming_safe,
4043 bool tbaa)
4045 if (TREE_THIS_VOLATILE (ref1->ref) != TREE_THIS_VOLATILE (ref2->ref))
4046 return SEMANTICS;
4047 tree base1 = ao_ref_base (ref1);
4048 tree base2 = ao_ref_base (ref2);
4050 if (!known_eq (ref1->offset, ref2->offset)
4051 || !known_eq (ref1->size, ref2->size)
4052 || !known_eq (ref1->max_size, ref2->max_size))
4053 return SEMANTICS;
4055 /* For variable accesses we need to compare actual paths
4056 to check that both refs are accessing same address and the access size. */
4057 if (!known_eq (ref1->size, ref1->max_size))
4059 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (ref1->ref)),
4060 TYPE_SIZE (TREE_TYPE (ref2->ref)), 0))
4061 return SEMANTICS;
4062 tree r1 = ref1->ref;
4063 tree r2 = ref2->ref;
4065 /* Handle toplevel COMPONENT_REFs of bitfields.
4066 Those are special since they are not allowed in
4067 ADDR_EXPR. */
4068 if (TREE_CODE (r1) == COMPONENT_REF
4069 && DECL_BIT_FIELD (TREE_OPERAND (r1, 1)))
4071 if (TREE_CODE (r2) != COMPONENT_REF
4072 || !DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4073 return SEMANTICS;
4074 tree field1 = TREE_OPERAND (r1, 1);
4075 tree field2 = TREE_OPERAND (r2, 1);
4076 if (!operand_equal_p (DECL_FIELD_OFFSET (field1),
4077 DECL_FIELD_OFFSET (field2), 0)
4078 || !operand_equal_p (DECL_FIELD_BIT_OFFSET (field1),
4079 DECL_FIELD_BIT_OFFSET (field2), 0)
4080 || !operand_equal_p (DECL_SIZE (field1), DECL_SIZE (field2), 0)
4081 || !types_compatible_p (TREE_TYPE (r1),
4082 TREE_TYPE (r2)))
4083 return SEMANTICS;
4084 r1 = TREE_OPERAND (r1, 0);
4085 r2 = TREE_OPERAND (r2, 0);
4087 else if (TREE_CODE (r2) == COMPONENT_REF
4088 && DECL_BIT_FIELD (TREE_OPERAND (r2, 1)))
4089 return SEMANTICS;
4091 /* Similarly for bit field refs. */
4092 if (TREE_CODE (r1) == BIT_FIELD_REF)
4094 if (TREE_CODE (r2) != BIT_FIELD_REF
4095 || !operand_equal_p (TREE_OPERAND (r1, 1),
4096 TREE_OPERAND (r2, 1), 0)
4097 || !operand_equal_p (TREE_OPERAND (r1, 2),
4098 TREE_OPERAND (r2, 2), 0)
4099 || !types_compatible_p (TREE_TYPE (r1),
4100 TREE_TYPE (r2)))
4101 return SEMANTICS;
4102 r1 = TREE_OPERAND (r1, 0);
4103 r2 = TREE_OPERAND (r2, 0);
4105 else if (TREE_CODE (r2) == BIT_FIELD_REF)
4106 return SEMANTICS;
4108 /* Now we can compare the address of actual memory access. */
4109 if (!operand_equal_p (r1, r2, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4110 return SEMANTICS;
4112 /* For constant accesses we get more matches by comparing offset only. */
4113 else if (!operand_equal_p (base1, base2,
4114 OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS))
4115 return SEMANTICS;
4117 /* We can't simply use get_object_alignment_1 on the full
4118 reference as for accesses with variable indexes this reports
4119 too conservative alignment. */
4120 unsigned int align1, align2;
4121 unsigned HOST_WIDE_INT bitpos1, bitpos2;
4122 bool known1 = get_object_alignment_1 (base1, &align1, &bitpos1);
4123 bool known2 = get_object_alignment_1 (base2, &align2, &bitpos2);
4124 /* ??? For MEMREF get_object_alignment_1 determines aligned from
4125 TYPE_ALIGN but still returns false. This seem to contradict
4126 its description. So compare even if alignment is unknown. */
4127 if (known1 != known2
4128 || (bitpos1 != bitpos2 || align1 != align2))
4129 return SEMANTICS;
4131 /* Now we know that accesses are semantically same. */
4132 int flags = 0;
4134 /* ao_ref_base strips inner MEM_REF [&decl], recover from that here. */
4135 tree rbase1 = ref1->ref;
4136 if (rbase1)
4137 while (handled_component_p (rbase1))
4138 rbase1 = TREE_OPERAND (rbase1, 0);
4139 tree rbase2 = ref2->ref;
4140 while (handled_component_p (rbase2))
4141 rbase2 = TREE_OPERAND (rbase2, 0);
4143 /* MEM_REFs and TARGET_MEM_REFs record dependence cliques which are used to
4144 implement restrict pointers. MR_DEPENDENCE_CLIQUE 0 means no information.
4145 Otherwise we need to match bases and cliques. */
4146 if ((((TREE_CODE (rbase1) == MEM_REF || TREE_CODE (rbase1) == TARGET_MEM_REF)
4147 && MR_DEPENDENCE_CLIQUE (rbase1))
4148 || ((TREE_CODE (rbase2) == MEM_REF || TREE_CODE (rbase2) == TARGET_MEM_REF)
4149 && MR_DEPENDENCE_CLIQUE (rbase2)))
4150 && (TREE_CODE (rbase1) != TREE_CODE (rbase2)
4151 || MR_DEPENDENCE_CLIQUE (rbase1) != MR_DEPENDENCE_CLIQUE (rbase2)
4152 || (MR_DEPENDENCE_BASE (rbase1) != MR_DEPENDENCE_BASE (rbase2))))
4153 flags |= DEPENDENCE_CLIQUE;
4155 if (!tbaa)
4156 return flags;
4158 /* Alias sets are not stable across LTO sreaming; be conservative here
4159 and compare types the alias sets are ultimately based on. */
4160 if (lto_streaming_safe)
4162 tree t1 = ao_ref_alias_ptr_type (ref1);
4163 tree t2 = ao_ref_alias_ptr_type (ref2);
4164 if (!alias_ptr_types_compatible_p (t1, t2))
4165 flags |= REF_ALIAS_SET;
4167 t1 = ao_ref_base_alias_ptr_type (ref1);
4168 t2 = ao_ref_base_alias_ptr_type (ref2);
4169 if (!alias_ptr_types_compatible_p (t1, t2))
4170 flags |= BASE_ALIAS_SET;
4172 else
4174 if (ao_ref_alias_set (ref1) != ao_ref_alias_set (ref2))
4175 flags |= REF_ALIAS_SET;
4176 if (ao_ref_base_alias_set (ref1) != ao_ref_base_alias_set (ref2))
4177 flags |= BASE_ALIAS_SET;
4180 /* Access path is used only on non-view-converted references. */
4181 bool view_converted = view_converted_memref_p (rbase1);
4182 if (view_converted_memref_p (rbase2) != view_converted)
4183 return flags | ACCESS_PATH;
4184 else if (view_converted)
4185 return flags;
4188 /* Find start of access paths and look for trailing arrays. */
4189 tree c1 = ref1->ref, c2 = ref2->ref;
4190 tree end_struct_ref1 = NULL, end_struct_ref2 = NULL;
4191 int nskipped1 = 0, nskipped2 = 0;
4192 int i = 0;
4194 for (tree p1 = ref1->ref; handled_component_p (p1); p1 = TREE_OPERAND (p1, 0))
4196 if (component_ref_to_zero_sized_trailing_array_p (p1))
4197 end_struct_ref1 = p1;
4198 if (ends_tbaa_access_path_p (p1))
4199 c1 = p1, nskipped1 = i;
4200 i++;
4202 for (tree p2 = ref2->ref; handled_component_p (p2); p2 = TREE_OPERAND (p2, 0))
4204 if (component_ref_to_zero_sized_trailing_array_p (p2))
4205 end_struct_ref2 = p2;
4206 if (ends_tbaa_access_path_p (p2))
4207 c2 = p2, nskipped1 = i;
4208 i++;
4211 /* For variable accesses we can not rely on offset match bellow.
4212 We know that paths are struturally same, so only check that
4213 starts of TBAA paths did not diverge. */
4214 if (!known_eq (ref1->size, ref1->max_size)
4215 && nskipped1 != nskipped2)
4216 return flags | ACCESS_PATH;
4218 /* Information about trailing refs is used by
4219 aliasing_component_refs_p that is applied only if paths
4220 has handled components.. */
4221 if (!handled_component_p (c1) && !handled_component_p (c2))
4223 else if ((end_struct_ref1 != NULL) != (end_struct_ref2 != NULL))
4224 return flags | ACCESS_PATH;
4225 if (end_struct_ref1
4226 && TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref1))
4227 != TYPE_MAIN_VARIANT (TREE_TYPE (end_struct_ref2)))
4228 return flags | ACCESS_PATH;
4230 /* Now compare all handled components of the access path.
4231 We have three oracles that cares about access paths:
4232 - aliasing_component_refs_p
4233 - nonoverlapping_refs_since_match_p
4234 - nonoverlapping_component_refs_p
4235 We need to match things these oracles compare.
4237 It is only necessary to check types for compatibility
4238 and offsets. Rest of what oracles compares are actual
4239 addresses. Those are already known to be same:
4240 - for constant accesses we check offsets
4241 - for variable accesses we already matched
4242 the path lexically with operand_equal_p. */
4243 while (true)
4245 bool comp1 = handled_component_p (c1);
4246 bool comp2 = handled_component_p (c2);
4248 if (comp1 != comp2)
4249 return flags | ACCESS_PATH;
4250 if (!comp1)
4251 break;
4253 if (TREE_CODE (c1) != TREE_CODE (c2))
4254 return flags | ACCESS_PATH;
4256 /* aliasing_component_refs_p attempts to find type match within
4257 the paths. For that reason both types needs to be equal
4258 with respect to same_type_for_tbaa_p. */
4259 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4260 TREE_TYPE (c2),
4261 lto_streaming_safe))
4262 return flags | ACCESS_PATH;
4263 if (component_ref_to_zero_sized_trailing_array_p (c1)
4264 != component_ref_to_zero_sized_trailing_array_p (c2))
4265 return flags | ACCESS_PATH;
4267 /* aliasing_matching_component_refs_p compares
4268 offsets within the path. Other properties are ignored.
4269 Do not bother to verify offsets in variable accesses. Here we
4270 already compared them by operand_equal_p so they are
4271 structurally same. */
4272 if (!known_eq (ref1->size, ref1->max_size))
4274 poly_int64 offadj1, sztmc1, msztmc1;
4275 bool reverse1;
4276 get_ref_base_and_extent (c1, &offadj1, &sztmc1, &msztmc1, &reverse1);
4277 poly_int64 offadj2, sztmc2, msztmc2;
4278 bool reverse2;
4279 get_ref_base_and_extent (c2, &offadj2, &sztmc2, &msztmc2, &reverse2);
4280 if (!known_eq (offadj1, offadj2))
4281 return flags | ACCESS_PATH;
4283 c1 = TREE_OPERAND (c1, 0);
4284 c2 = TREE_OPERAND (c2, 0);
4286 /* Finally test the access type. */
4287 if (!types_equal_for_same_type_for_tbaa_p (TREE_TYPE (c1),
4288 TREE_TYPE (c2),
4289 lto_streaming_safe))
4290 return flags | ACCESS_PATH;
4291 return flags;
4294 /* Hash REF to HSTATE. If LTO_STREAMING_SAFE do not use alias sets
4295 and canonical types. */
4296 void
4297 ao_compare::hash_ao_ref (ao_ref *ref, bool lto_streaming_safe, bool tbaa,
4298 inchash::hash &hstate)
4300 tree base = ao_ref_base (ref);
4301 tree tbase = base;
4303 if (!known_eq (ref->size, ref->max_size))
4305 tree r = ref->ref;
4306 if (TREE_CODE (r) == COMPONENT_REF
4307 && DECL_BIT_FIELD (TREE_OPERAND (r, 1)))
4309 tree field = TREE_OPERAND (r, 1);
4310 hash_operand (DECL_FIELD_OFFSET (field), hstate, 0);
4311 hash_operand (DECL_FIELD_BIT_OFFSET (field), hstate, 0);
4312 hash_operand (DECL_SIZE (field), hstate, 0);
4313 r = TREE_OPERAND (r, 0);
4315 if (TREE_CODE (r) == BIT_FIELD_REF)
4317 hash_operand (TREE_OPERAND (r, 1), hstate, 0);
4318 hash_operand (TREE_OPERAND (r, 2), hstate, 0);
4319 r = TREE_OPERAND (r, 0);
4321 hash_operand (TYPE_SIZE (TREE_TYPE (ref->ref)), hstate, 0);
4322 hash_operand (r, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4324 else
4326 hash_operand (tbase, hstate, OEP_ADDRESS_OF | OEP_MATCH_SIDE_EFFECTS);
4327 hstate.add_poly_int (ref->offset);
4328 hstate.add_poly_int (ref->size);
4329 hstate.add_poly_int (ref->max_size);
4331 if (!lto_streaming_safe && tbaa)
4333 hstate.add_int (ao_ref_alias_set (ref));
4334 hstate.add_int (ao_ref_base_alias_set (ref));