2015-12-18 Ville Voutilainen <ville.voutilainen@gmail.com>
[official-gcc.git] / gcc / tree-ssa-alias.c
blobb5165461bc4370b1f0a0e834478362b5904fd607
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
36 #include "langhooks.h"
37 #include "dumpfile.h"
38 #include "tree-eh.h"
39 #include "tree-dfa.h"
40 #include "ipa-reference.h"
42 /* Broad overview of how alias analysis on gimple works:
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
61 The main alias-oracle entry-points are
63 bool stmt_may_clobber_ref_p (gimple *, tree)
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
77 bool refs_may_alias_p (tree, tree)
79 This function tries to disambiguate two reference trees.
81 bool ptr_deref_may_alias_global_p (tree)
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 } alias_stats;
103 void
104 dump_alias_stats (FILE *s)
106 fprintf (s, "\nAlias oracle query stats:\n");
107 fprintf (s, " refs_may_alias_p: "
108 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
109 HOST_WIDE_INT_PRINT_DEC" queries\n",
110 alias_stats.refs_may_alias_p_no_alias,
111 alias_stats.refs_may_alias_p_no_alias
112 + alias_stats.refs_may_alias_p_may_alias);
113 fprintf (s, " ref_maybe_used_by_call_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.ref_maybe_used_by_call_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.ref_maybe_used_by_call_p_may_alias);
119 fprintf (s, " call_may_clobber_ref_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.call_may_clobber_ref_p_no_alias,
123 alias_stats.call_may_clobber_ref_p_no_alias
124 + alias_stats.call_may_clobber_ref_p_may_alias);
125 dump_alias_stats_in_alias_c (s);
129 /* Return true, if dereferencing PTR may alias with a global variable. */
131 bool
132 ptr_deref_may_alias_global_p (tree ptr)
134 struct ptr_info_def *pi;
136 /* If we end up with a pointer constant here that may point
137 to global memory. */
138 if (TREE_CODE (ptr) != SSA_NAME)
139 return true;
141 pi = SSA_NAME_PTR_INFO (ptr);
143 /* If we do not have points-to information for this variable,
144 we have to punt. */
145 if (!pi)
146 return true;
148 /* ??? This does not use TBAA to prune globals ptr may not access. */
149 return pt_solution_includes_global (&pi->pt);
152 /* Return true if dereferencing PTR may alias DECL.
153 The caller is responsible for applying TBAA to see if PTR
154 may access DECL at all. */
156 static bool
157 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
159 struct ptr_info_def *pi;
161 /* Conversions are irrelevant for points-to information and
162 data-dependence analysis can feed us those. */
163 STRIP_NOPS (ptr);
165 /* Anything we do not explicilty handle aliases. */
166 if ((TREE_CODE (ptr) != SSA_NAME
167 && TREE_CODE (ptr) != ADDR_EXPR
168 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
169 || !POINTER_TYPE_P (TREE_TYPE (ptr))
170 || (TREE_CODE (decl) != VAR_DECL
171 && TREE_CODE (decl) != PARM_DECL
172 && TREE_CODE (decl) != RESULT_DECL))
173 return true;
175 /* Disregard pointer offsetting. */
176 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
180 ptr = TREE_OPERAND (ptr, 0);
182 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
183 return ptr_deref_may_alias_decl_p (ptr, decl);
186 /* ADDR_EXPR pointers either just offset another pointer or directly
187 specify the pointed-to set. */
188 if (TREE_CODE (ptr) == ADDR_EXPR)
190 tree base = get_base_address (TREE_OPERAND (ptr, 0));
191 if (base
192 && (TREE_CODE (base) == MEM_REF
193 || TREE_CODE (base) == TARGET_MEM_REF))
194 ptr = TREE_OPERAND (base, 0);
195 else if (base
196 && DECL_P (base))
197 return compare_base_decls (base, decl) != 0;
198 else if (base
199 && CONSTANT_CLASS_P (base))
200 return false;
201 else
202 return true;
205 /* Non-aliased variables can not be pointed to. */
206 if (!may_be_aliased (decl))
207 return false;
209 /* If we do not have useful points-to information for this pointer
210 we cannot disambiguate anything else. */
211 pi = SSA_NAME_PTR_INFO (ptr);
212 if (!pi)
213 return true;
215 return pt_solution_includes (&pi->pt, decl);
218 /* Return true if dereferenced PTR1 and PTR2 may alias.
219 The caller is responsible for applying TBAA to see if accesses
220 through PTR1 and PTR2 may conflict at all. */
222 bool
223 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
225 struct ptr_info_def *pi1, *pi2;
227 /* Conversions are irrelevant for points-to information and
228 data-dependence analysis can feed us those. */
229 STRIP_NOPS (ptr1);
230 STRIP_NOPS (ptr2);
232 /* Disregard pointer offsetting. */
233 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
237 ptr1 = TREE_OPERAND (ptr1, 0);
239 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
240 return ptr_derefs_may_alias_p (ptr1, ptr2);
242 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
246 ptr2 = TREE_OPERAND (ptr2, 0);
248 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
249 return ptr_derefs_may_alias_p (ptr1, ptr2);
252 /* ADDR_EXPR pointers either just offset another pointer or directly
253 specify the pointed-to set. */
254 if (TREE_CODE (ptr1) == ADDR_EXPR)
256 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
257 if (base
258 && (TREE_CODE (base) == MEM_REF
259 || TREE_CODE (base) == TARGET_MEM_REF))
260 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
261 else if (base
262 && DECL_P (base))
263 return ptr_deref_may_alias_decl_p (ptr2, base);
264 else
265 return true;
267 if (TREE_CODE (ptr2) == ADDR_EXPR)
269 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
270 if (base
271 && (TREE_CODE (base) == MEM_REF
272 || TREE_CODE (base) == TARGET_MEM_REF))
273 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
274 else if (base
275 && DECL_P (base))
276 return ptr_deref_may_alias_decl_p (ptr1, base);
277 else
278 return true;
281 /* From here we require SSA name pointers. Anything else aliases. */
282 if (TREE_CODE (ptr1) != SSA_NAME
283 || TREE_CODE (ptr2) != SSA_NAME
284 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
285 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
286 return true;
288 /* We may end up with two empty points-to solutions for two same pointers.
289 In this case we still want to say both pointers alias, so shortcut
290 that here. */
291 if (ptr1 == ptr2)
292 return true;
294 /* If we do not have useful points-to information for either pointer
295 we cannot disambiguate anything else. */
296 pi1 = SSA_NAME_PTR_INFO (ptr1);
297 pi2 = SSA_NAME_PTR_INFO (ptr2);
298 if (!pi1 || !pi2)
299 return true;
301 /* ??? This does not use TBAA to prune decls from the intersection
302 that not both pointers may access. */
303 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
306 /* Return true if dereferencing PTR may alias *REF.
307 The caller is responsible for applying TBAA to see if PTR
308 may access *REF at all. */
310 static bool
311 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
313 tree base = ao_ref_base (ref);
315 if (TREE_CODE (base) == MEM_REF
316 || TREE_CODE (base) == TARGET_MEM_REF)
317 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
318 else if (DECL_P (base))
319 return ptr_deref_may_alias_decl_p (ptr, base);
321 return true;
324 /* Returns whether reference REF to BASE may refer to global memory. */
326 static bool
327 ref_may_alias_global_p_1 (tree base)
329 if (DECL_P (base))
330 return is_global_var (base);
331 else if (TREE_CODE (base) == MEM_REF
332 || TREE_CODE (base) == TARGET_MEM_REF)
333 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
334 return true;
337 bool
338 ref_may_alias_global_p (ao_ref *ref)
340 tree base = ao_ref_base (ref);
341 return ref_may_alias_global_p_1 (base);
344 bool
345 ref_may_alias_global_p (tree ref)
347 tree base = get_base_address (ref);
348 return ref_may_alias_global_p_1 (base);
351 /* Return true whether STMT may clobber global memory. */
353 bool
354 stmt_may_clobber_global_p (gimple *stmt)
356 tree lhs;
358 if (!gimple_vdef (stmt))
359 return false;
361 /* ??? We can ask the oracle whether an artificial pointer
362 dereference with a pointer with points-to information covering
363 all global memory (what about non-address taken memory?) maybe
364 clobbered by this call. As there is at the moment no convenient
365 way of doing that without generating garbage do some manual
366 checking instead.
367 ??? We could make a NULL ao_ref argument to the various
368 predicates special, meaning any global memory. */
370 switch (gimple_code (stmt))
372 case GIMPLE_ASSIGN:
373 lhs = gimple_assign_lhs (stmt);
374 return (TREE_CODE (lhs) != SSA_NAME
375 && ref_may_alias_global_p (lhs));
376 case GIMPLE_CALL:
377 return true;
378 default:
379 return true;
384 /* Dump alias information on FILE. */
386 void
387 dump_alias_info (FILE *file)
389 unsigned i;
390 const char *funcname
391 = lang_hooks.decl_printable_name (current_function_decl, 2);
392 tree var;
394 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
396 fprintf (file, "Aliased symbols\n\n");
398 FOR_EACH_LOCAL_DECL (cfun, i, var)
400 if (may_be_aliased (var))
401 dump_variable (file, var);
404 fprintf (file, "\nCall clobber information\n");
406 fprintf (file, "\nESCAPED");
407 dump_points_to_solution (file, &cfun->gimple_df->escaped);
409 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
411 for (i = 1; i < num_ssa_names; i++)
413 tree ptr = ssa_name (i);
414 struct ptr_info_def *pi;
416 if (ptr == NULL_TREE
417 || !POINTER_TYPE_P (TREE_TYPE (ptr))
418 || SSA_NAME_IN_FREE_LIST (ptr))
419 continue;
421 pi = SSA_NAME_PTR_INFO (ptr);
422 if (pi)
423 dump_points_to_info_for (file, ptr);
426 fprintf (file, "\n");
430 /* Dump alias information on stderr. */
432 DEBUG_FUNCTION void
433 debug_alias_info (void)
435 dump_alias_info (stderr);
439 /* Dump the points-to set *PT into FILE. */
441 void
442 dump_points_to_solution (FILE *file, struct pt_solution *pt)
444 if (pt->anything)
445 fprintf (file, ", points-to anything");
447 if (pt->nonlocal)
448 fprintf (file, ", points-to non-local");
450 if (pt->escaped)
451 fprintf (file, ", points-to escaped");
453 if (pt->ipa_escaped)
454 fprintf (file, ", points-to unit escaped");
456 if (pt->null)
457 fprintf (file, ", points-to NULL");
459 if (pt->vars)
461 fprintf (file, ", points-to vars: ");
462 dump_decl_set (file, pt->vars);
463 if (pt->vars_contains_nonlocal
464 && pt->vars_contains_escaped_heap)
465 fprintf (file, " (nonlocal, escaped heap)");
466 else if (pt->vars_contains_nonlocal
467 && pt->vars_contains_escaped)
468 fprintf (file, " (nonlocal, escaped)");
469 else if (pt->vars_contains_nonlocal)
470 fprintf (file, " (nonlocal)");
471 else if (pt->vars_contains_escaped_heap)
472 fprintf (file, " (escaped heap)");
473 else if (pt->vars_contains_escaped)
474 fprintf (file, " (escaped)");
479 /* Unified dump function for pt_solution. */
481 DEBUG_FUNCTION void
482 debug (pt_solution &ref)
484 dump_points_to_solution (stderr, &ref);
487 DEBUG_FUNCTION void
488 debug (pt_solution *ptr)
490 if (ptr)
491 debug (*ptr);
492 else
493 fprintf (stderr, "<nil>\n");
497 /* Dump points-to information for SSA_NAME PTR into FILE. */
499 void
500 dump_points_to_info_for (FILE *file, tree ptr)
502 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
504 print_generic_expr (file, ptr, dump_flags);
506 if (pi)
507 dump_points_to_solution (file, &pi->pt);
508 else
509 fprintf (file, ", points-to anything");
511 fprintf (file, "\n");
515 /* Dump points-to information for VAR into stderr. */
517 DEBUG_FUNCTION void
518 debug_points_to_info_for (tree var)
520 dump_points_to_info_for (stderr, var);
524 /* Initializes the alias-oracle reference representation *R from REF. */
526 void
527 ao_ref_init (ao_ref *r, tree ref)
529 r->ref = ref;
530 r->base = NULL_TREE;
531 r->offset = 0;
532 r->size = -1;
533 r->max_size = -1;
534 r->ref_alias_set = -1;
535 r->base_alias_set = -1;
536 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
539 /* Returns the base object of the memory reference *REF. */
541 tree
542 ao_ref_base (ao_ref *ref)
544 bool reverse;
546 if (ref->base)
547 return ref->base;
548 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
549 &ref->max_size, &reverse);
550 return ref->base;
553 /* Returns the base object alias set of the memory reference *REF. */
555 alias_set_type
556 ao_ref_base_alias_set (ao_ref *ref)
558 tree base_ref;
559 if (ref->base_alias_set != -1)
560 return ref->base_alias_set;
561 if (!ref->ref)
562 return 0;
563 base_ref = ref->ref;
564 while (handled_component_p (base_ref))
565 base_ref = TREE_OPERAND (base_ref, 0);
566 ref->base_alias_set = get_alias_set (base_ref);
567 return ref->base_alias_set;
570 /* Returns the reference alias set of the memory reference *REF. */
572 alias_set_type
573 ao_ref_alias_set (ao_ref *ref)
575 if (ref->ref_alias_set != -1)
576 return ref->ref_alias_set;
577 ref->ref_alias_set = get_alias_set (ref->ref);
578 return ref->ref_alias_set;
581 /* Init an alias-oracle reference representation from a gimple pointer
582 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
583 size is assumed to be unknown. The access is assumed to be only
584 to or after of the pointer target, not before it. */
586 void
587 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
589 HOST_WIDE_INT t, size_hwi, extra_offset = 0;
590 ref->ref = NULL_TREE;
591 if (TREE_CODE (ptr) == SSA_NAME)
593 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
594 if (gimple_assign_single_p (stmt)
595 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
596 ptr = gimple_assign_rhs1 (stmt);
597 else if (is_gimple_assign (stmt)
598 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
599 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
601 ptr = gimple_assign_rhs1 (stmt);
602 extra_offset = BITS_PER_UNIT
603 * int_cst_value (gimple_assign_rhs2 (stmt));
607 if (TREE_CODE (ptr) == ADDR_EXPR)
609 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
610 if (ref->base)
611 ref->offset = BITS_PER_UNIT * t;
612 else
614 size = NULL_TREE;
615 ref->offset = 0;
616 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
619 else
621 ref->base = build2 (MEM_REF, char_type_node,
622 ptr, null_pointer_node);
623 ref->offset = 0;
625 ref->offset += extra_offset;
626 if (size
627 && tree_fits_shwi_p (size)
628 && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
629 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
630 else
631 ref->max_size = ref->size = -1;
632 ref->ref_alias_set = 0;
633 ref->base_alias_set = 0;
634 ref->volatile_p = false;
637 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
638 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
639 decide. */
641 static inline int
642 same_type_for_tbaa (tree type1, tree type2)
644 type1 = TYPE_MAIN_VARIANT (type1);
645 type2 = TYPE_MAIN_VARIANT (type2);
647 /* If we would have to do structural comparison bail out. */
648 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
649 || TYPE_STRUCTURAL_EQUALITY_P (type2))
650 return -1;
652 /* Compare the canonical types. */
653 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
654 return 1;
656 /* ??? Array types are not properly unified in all cases as we have
657 spurious changes in the index types for example. Removing this
658 causes all sorts of problems with the Fortran frontend. */
659 if (TREE_CODE (type1) == ARRAY_TYPE
660 && TREE_CODE (type2) == ARRAY_TYPE)
661 return -1;
663 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
664 object of one of its constrained subtypes, e.g. when a function with an
665 unconstrained parameter passed by reference is called on an object and
666 inlined. But, even in the case of a fixed size, type and subtypes are
667 not equivalent enough as to share the same TYPE_CANONICAL, since this
668 would mean that conversions between them are useless, whereas they are
669 not (e.g. type and subtypes can have different modes). So, in the end,
670 they are only guaranteed to have the same alias set. */
671 if (get_alias_set (type1) == get_alias_set (type2))
672 return -1;
674 /* The types are known to be not equal. */
675 return 0;
678 /* Determine if the two component references REF1 and REF2 which are
679 based on access types TYPE1 and TYPE2 and of which at least one is based
680 on an indirect reference may alias. REF2 is the only one that can
681 be a decl in which case REF2_IS_DECL is true.
682 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
683 are the respective alias sets. */
685 static bool
686 aliasing_component_refs_p (tree ref1,
687 alias_set_type ref1_alias_set,
688 alias_set_type base1_alias_set,
689 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
690 tree ref2,
691 alias_set_type ref2_alias_set,
692 alias_set_type base2_alias_set,
693 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
694 bool ref2_is_decl)
696 /* If one reference is a component references through pointers try to find a
697 common base and apply offset based disambiguation. This handles
698 for example
699 struct A { int i; int j; } *q;
700 struct B { struct A a; int k; } *p;
701 disambiguating q->i and p->a.j. */
702 tree base1, base2;
703 tree type1, type2;
704 tree *refp;
705 int same_p;
707 /* Choose bases and base types to search for. */
708 base1 = ref1;
709 while (handled_component_p (base1))
710 base1 = TREE_OPERAND (base1, 0);
711 type1 = TREE_TYPE (base1);
712 base2 = ref2;
713 while (handled_component_p (base2))
714 base2 = TREE_OPERAND (base2, 0);
715 type2 = TREE_TYPE (base2);
717 /* Now search for the type1 in the access path of ref2. This
718 would be a common base for doing offset based disambiguation on. */
719 refp = &ref2;
720 while (handled_component_p (*refp)
721 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
722 refp = &TREE_OPERAND (*refp, 0);
723 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
724 /* If we couldn't compare types we have to bail out. */
725 if (same_p == -1)
726 return true;
727 else if (same_p == 1)
729 HOST_WIDE_INT offadj, sztmp, msztmp;
730 bool reverse;
731 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
732 offset2 -= offadj;
733 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
734 offset1 -= offadj;
735 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
737 /* If we didn't find a common base, try the other way around. */
738 refp = &ref1;
739 while (handled_component_p (*refp)
740 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
741 refp = &TREE_OPERAND (*refp, 0);
742 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
743 /* If we couldn't compare types we have to bail out. */
744 if (same_p == -1)
745 return true;
746 else if (same_p == 1)
748 HOST_WIDE_INT offadj, sztmp, msztmp;
749 bool reverse;
750 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
751 offset1 -= offadj;
752 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
753 offset2 -= offadj;
754 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
757 /* If we have two type access paths B1.path1 and B2.path2 they may
758 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
759 But we can still have a path that goes B1.path1...B2.path2 with
760 a part that we do not see. So we can only disambiguate now
761 if there is no B2 in the tail of path1 and no B1 on the
762 tail of path2. */
763 if (base1_alias_set == ref2_alias_set
764 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
765 return true;
766 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
767 if (!ref2_is_decl)
768 return (base2_alias_set == ref1_alias_set
769 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
770 return false;
773 /* Return true if we can determine that component references REF1 and REF2,
774 that are within a common DECL, cannot overlap. */
776 static bool
777 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
779 auto_vec<tree, 16> component_refs1;
780 auto_vec<tree, 16> component_refs2;
782 /* Create the stack of handled components for REF1. */
783 while (handled_component_p (ref1))
785 component_refs1.safe_push (ref1);
786 ref1 = TREE_OPERAND (ref1, 0);
788 if (TREE_CODE (ref1) == MEM_REF)
790 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
791 goto may_overlap;
792 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
795 /* Create the stack of handled components for REF2. */
796 while (handled_component_p (ref2))
798 component_refs2.safe_push (ref2);
799 ref2 = TREE_OPERAND (ref2, 0);
801 if (TREE_CODE (ref2) == MEM_REF)
803 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
804 goto may_overlap;
805 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
808 /* Bases must be either same or uncomparable. */
809 gcc_checking_assert (ref1 == ref2
810 || (DECL_P (ref1) && DECL_P (ref2)
811 && compare_base_decls (ref1, ref2) != 0));
813 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
814 rank. This is sufficient because we start from the same DECL and you
815 cannot reference several fields at a time with COMPONENT_REFs (unlike
816 with ARRAY_RANGE_REFs for arrays) so you always need the same number
817 of them to access a sub-component, unless you're in a union, in which
818 case the return value will precisely be false. */
819 while (true)
823 if (component_refs1.is_empty ())
824 goto may_overlap;
825 ref1 = component_refs1.pop ();
827 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
831 if (component_refs2.is_empty ())
832 goto may_overlap;
833 ref2 = component_refs2.pop ();
835 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
837 /* Beware of BIT_FIELD_REF. */
838 if (TREE_CODE (ref1) != COMPONENT_REF
839 || TREE_CODE (ref2) != COMPONENT_REF)
840 goto may_overlap;
842 tree field1 = TREE_OPERAND (ref1, 1);
843 tree field2 = TREE_OPERAND (ref2, 1);
845 /* ??? We cannot simply use the type of operand #0 of the refs here
846 as the Fortran compiler smuggles type punning into COMPONENT_REFs
847 for common blocks instead of using unions like everyone else. */
848 tree type1 = DECL_CONTEXT (field1);
849 tree type2 = DECL_CONTEXT (field2);
851 /* We cannot disambiguate fields in a union or qualified union. */
852 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
853 goto may_overlap;
855 /* Different fields of the same record type cannot overlap.
856 ??? Bitfields can overlap at RTL level so punt on them. */
857 if (field1 != field2)
859 component_refs1.release ();
860 component_refs2.release ();
861 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
865 may_overlap:
866 component_refs1.release ();
867 component_refs2.release ();
868 return false;
871 /* qsort compare function to sort FIELD_DECLs after their
872 DECL_FIELD_CONTEXT TYPE_UID. */
874 static inline int
875 ncr_compar (const void *field1_, const void *field2_)
877 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
878 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
879 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
880 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
881 if (uid1 < uid2)
882 return -1;
883 else if (uid1 > uid2)
884 return 1;
885 return 0;
888 /* Return true if we can determine that the fields referenced cannot
889 overlap for any pair of objects. */
891 static bool
892 nonoverlapping_component_refs_p (const_tree x, const_tree y)
894 if (!flag_strict_aliasing
895 || !x || !y
896 || TREE_CODE (x) != COMPONENT_REF
897 || TREE_CODE (y) != COMPONENT_REF)
898 return false;
900 auto_vec<const_tree, 16> fieldsx;
901 while (TREE_CODE (x) == COMPONENT_REF)
903 tree field = TREE_OPERAND (x, 1);
904 tree type = DECL_FIELD_CONTEXT (field);
905 if (TREE_CODE (type) == RECORD_TYPE)
906 fieldsx.safe_push (field);
907 x = TREE_OPERAND (x, 0);
909 if (fieldsx.length () == 0)
910 return false;
911 auto_vec<const_tree, 16> fieldsy;
912 while (TREE_CODE (y) == COMPONENT_REF)
914 tree field = TREE_OPERAND (y, 1);
915 tree type = DECL_FIELD_CONTEXT (field);
916 if (TREE_CODE (type) == RECORD_TYPE)
917 fieldsy.safe_push (TREE_OPERAND (y, 1));
918 y = TREE_OPERAND (y, 0);
920 if (fieldsy.length () == 0)
921 return false;
923 /* Most common case first. */
924 if (fieldsx.length () == 1
925 && fieldsy.length () == 1)
926 return ((DECL_FIELD_CONTEXT (fieldsx[0])
927 == DECL_FIELD_CONTEXT (fieldsy[0]))
928 && fieldsx[0] != fieldsy[0]
929 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
931 if (fieldsx.length () == 2)
933 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
934 std::swap (fieldsx[0], fieldsx[1]);
936 else
937 fieldsx.qsort (ncr_compar);
939 if (fieldsy.length () == 2)
941 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
942 std::swap (fieldsy[0], fieldsy[1]);
944 else
945 fieldsy.qsort (ncr_compar);
947 unsigned i = 0, j = 0;
950 const_tree fieldx = fieldsx[i];
951 const_tree fieldy = fieldsy[j];
952 tree typex = DECL_FIELD_CONTEXT (fieldx);
953 tree typey = DECL_FIELD_CONTEXT (fieldy);
954 if (typex == typey)
956 /* We're left with accessing different fields of a structure,
957 no possible overlap, unless they are both bitfields. */
958 if (fieldx != fieldy)
959 return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
961 if (TYPE_UID (typex) < TYPE_UID (typey))
963 i++;
964 if (i == fieldsx.length ())
965 break;
967 else
969 j++;
970 if (j == fieldsy.length ())
971 break;
974 while (1);
976 return false;
980 /* Return true if two memory references based on the variables BASE1
981 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
982 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
983 if non-NULL are the complete memory reference trees. */
985 static bool
986 decl_refs_may_alias_p (tree ref1, tree base1,
987 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
988 tree ref2, tree base2,
989 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
991 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
993 /* If both references are based on different variables, they cannot alias. */
994 if (compare_base_decls (base1, base2) == 0)
995 return false;
997 /* If both references are based on the same variable, they cannot alias if
998 the accesses do not overlap. */
999 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1000 return false;
1002 /* For components with variable position, the above test isn't sufficient,
1003 so we disambiguate component references manually. */
1004 if (ref1 && ref2
1005 && handled_component_p (ref1) && handled_component_p (ref2)
1006 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1007 return false;
1009 return true;
1012 /* Return true if an indirect reference based on *PTR1 constrained
1013 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1014 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1015 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1016 in which case they are computed on-demand. REF1 and REF2
1017 if non-NULL are the complete memory reference trees. */
1019 static bool
1020 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1021 HOST_WIDE_INT offset1,
1022 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1023 alias_set_type ref1_alias_set,
1024 alias_set_type base1_alias_set,
1025 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1026 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1027 alias_set_type ref2_alias_set,
1028 alias_set_type base2_alias_set, bool tbaa_p)
1030 tree ptr1;
1031 tree ptrtype1, dbase2;
1032 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1033 HOST_WIDE_INT doffset1, doffset2;
1035 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1036 || TREE_CODE (base1) == TARGET_MEM_REF)
1037 && DECL_P (base2));
1039 ptr1 = TREE_OPERAND (base1, 0);
1041 /* The offset embedded in MEM_REFs can be negative. Bias them
1042 so that the resulting offset adjustment is positive. */
1043 offset_int moff = mem_ref_offset (base1);
1044 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1045 if (wi::neg_p (moff))
1046 offset2p += (-moff).to_short_addr ();
1047 else
1048 offset1p += moff.to_short_addr ();
1050 /* If only one reference is based on a variable, they cannot alias if
1051 the pointer access is beyond the extent of the variable access.
1052 (the pointer base cannot validly point to an offset less than zero
1053 of the variable).
1054 ??? IVOPTs creates bases that do not honor this restriction,
1055 so do not apply this optimization for TARGET_MEM_REFs. */
1056 if (TREE_CODE (base1) != TARGET_MEM_REF
1057 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1058 return false;
1059 /* They also cannot alias if the pointer may not point to the decl. */
1060 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1061 return false;
1063 /* Disambiguations that rely on strict aliasing rules follow. */
1064 if (!flag_strict_aliasing || !tbaa_p)
1065 return true;
1067 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1069 /* If the alias set for a pointer access is zero all bets are off. */
1070 if (base1_alias_set == -1)
1071 base1_alias_set = get_deref_alias_set (ptrtype1);
1072 if (base1_alias_set == 0)
1073 return true;
1074 if (base2_alias_set == -1)
1075 base2_alias_set = get_alias_set (base2);
1077 /* When we are trying to disambiguate an access with a pointer dereference
1078 as base versus one with a decl as base we can use both the size
1079 of the decl and its dynamic type for extra disambiguation.
1080 ??? We do not know anything about the dynamic type of the decl
1081 other than that its alias-set contains base2_alias_set as a subset
1082 which does not help us here. */
1083 /* As we know nothing useful about the dynamic type of the decl just
1084 use the usual conflict check rather than a subset test.
1085 ??? We could introduce -fvery-strict-aliasing when the language
1086 does not allow decls to have a dynamic type that differs from their
1087 static type. Then we can check
1088 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1089 if (base1_alias_set != base2_alias_set
1090 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1091 return false;
1092 /* If the size of the access relevant for TBAA through the pointer
1093 is bigger than the size of the decl we can't possibly access the
1094 decl via that pointer. */
1095 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1096 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1097 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1098 /* ??? This in turn may run afoul when a decl of type T which is
1099 a member of union type U is accessed through a pointer to
1100 type U and sizeof T is smaller than sizeof U. */
1101 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1102 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1103 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1104 return false;
1106 if (!ref2)
1107 return true;
1109 /* If the decl is accessed via a MEM_REF, reconstruct the base
1110 we can use for TBAA and an appropriately adjusted offset. */
1111 dbase2 = ref2;
1112 while (handled_component_p (dbase2))
1113 dbase2 = TREE_OPERAND (dbase2, 0);
1114 doffset1 = offset1;
1115 doffset2 = offset2;
1116 if (TREE_CODE (dbase2) == MEM_REF
1117 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1119 offset_int moff = mem_ref_offset (dbase2);
1120 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1121 if (wi::neg_p (moff))
1122 doffset1 -= (-moff).to_short_addr ();
1123 else
1124 doffset2 -= moff.to_short_addr ();
1127 /* If either reference is view-converted, give up now. */
1128 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1129 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1130 return true;
1132 /* If both references are through the same type, they do not alias
1133 if the accesses do not overlap. This does extra disambiguation
1134 for mixed/pointer accesses but requires strict aliasing.
1135 For MEM_REFs we require that the component-ref offset we computed
1136 is relative to the start of the type which we ensure by
1137 comparing rvalue and access type and disregarding the constant
1138 pointer offset. */
1139 if ((TREE_CODE (base1) != TARGET_MEM_REF
1140 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1141 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1142 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1144 if (ref1 && ref2
1145 && nonoverlapping_component_refs_p (ref1, ref2))
1146 return false;
1148 /* Do access-path based disambiguation. */
1149 if (ref1 && ref2
1150 && (handled_component_p (ref1) || handled_component_p (ref2)))
1151 return aliasing_component_refs_p (ref1,
1152 ref1_alias_set, base1_alias_set,
1153 offset1, max_size1,
1154 ref2,
1155 ref2_alias_set, base2_alias_set,
1156 offset2, max_size2, true);
1158 return true;
1161 /* Return true if two indirect references based on *PTR1
1162 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1163 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1164 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1165 in which case they are computed on-demand. REF1 and REF2
1166 if non-NULL are the complete memory reference trees. */
1168 static bool
1169 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1170 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1171 alias_set_type ref1_alias_set,
1172 alias_set_type base1_alias_set,
1173 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1174 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1175 alias_set_type ref2_alias_set,
1176 alias_set_type base2_alias_set, bool tbaa_p)
1178 tree ptr1;
1179 tree ptr2;
1180 tree ptrtype1, ptrtype2;
1182 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1183 || TREE_CODE (base1) == TARGET_MEM_REF)
1184 && (TREE_CODE (base2) == MEM_REF
1185 || TREE_CODE (base2) == TARGET_MEM_REF));
1187 ptr1 = TREE_OPERAND (base1, 0);
1188 ptr2 = TREE_OPERAND (base2, 0);
1190 /* If both bases are based on pointers they cannot alias if they may not
1191 point to the same memory object or if they point to the same object
1192 and the accesses do not overlap. */
1193 if ((!cfun || gimple_in_ssa_p (cfun))
1194 && operand_equal_p (ptr1, ptr2, 0)
1195 && (((TREE_CODE (base1) != TARGET_MEM_REF
1196 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1197 && (TREE_CODE (base2) != TARGET_MEM_REF
1198 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1199 || (TREE_CODE (base1) == TARGET_MEM_REF
1200 && TREE_CODE (base2) == TARGET_MEM_REF
1201 && (TMR_STEP (base1) == TMR_STEP (base2)
1202 || (TMR_STEP (base1) && TMR_STEP (base2)
1203 && operand_equal_p (TMR_STEP (base1),
1204 TMR_STEP (base2), 0)))
1205 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1206 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1207 && operand_equal_p (TMR_INDEX (base1),
1208 TMR_INDEX (base2), 0)))
1209 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1210 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1211 && operand_equal_p (TMR_INDEX2 (base1),
1212 TMR_INDEX2 (base2), 0))))))
1214 offset_int moff;
1215 /* The offset embedded in MEM_REFs can be negative. Bias them
1216 so that the resulting offset adjustment is positive. */
1217 moff = mem_ref_offset (base1);
1218 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1219 if (wi::neg_p (moff))
1220 offset2 += (-moff).to_short_addr ();
1221 else
1222 offset1 += moff.to_shwi ();
1223 moff = mem_ref_offset (base2);
1224 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1225 if (wi::neg_p (moff))
1226 offset1 += (-moff).to_short_addr ();
1227 else
1228 offset2 += moff.to_short_addr ();
1229 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1231 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1232 return false;
1234 /* Disambiguations that rely on strict aliasing rules follow. */
1235 if (!flag_strict_aliasing || !tbaa_p)
1236 return true;
1238 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1239 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1241 /* If the alias set for a pointer access is zero all bets are off. */
1242 if (base1_alias_set == -1)
1243 base1_alias_set = get_deref_alias_set (ptrtype1);
1244 if (base1_alias_set == 0)
1245 return true;
1246 if (base2_alias_set == -1)
1247 base2_alias_set = get_deref_alias_set (ptrtype2);
1248 if (base2_alias_set == 0)
1249 return true;
1251 /* If both references are through the same type, they do not alias
1252 if the accesses do not overlap. This does extra disambiguation
1253 for mixed/pointer accesses but requires strict aliasing. */
1254 if ((TREE_CODE (base1) != TARGET_MEM_REF
1255 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1256 && (TREE_CODE (base2) != TARGET_MEM_REF
1257 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1258 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1259 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1260 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1261 TREE_TYPE (ptrtype2)) == 1)
1262 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1264 /* Do type-based disambiguation. */
1265 if (base1_alias_set != base2_alias_set
1266 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1267 return false;
1269 /* If either reference is view-converted, give up now. */
1270 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1271 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1272 return true;
1274 if (ref1 && ref2
1275 && nonoverlapping_component_refs_p (ref1, ref2))
1276 return false;
1278 /* Do access-path based disambiguation. */
1279 if (ref1 && ref2
1280 && (handled_component_p (ref1) || handled_component_p (ref2)))
1281 return aliasing_component_refs_p (ref1,
1282 ref1_alias_set, base1_alias_set,
1283 offset1, max_size1,
1284 ref2,
1285 ref2_alias_set, base2_alias_set,
1286 offset2, max_size2, false);
1288 return true;
1291 /* Return true, if the two memory references REF1 and REF2 may alias. */
1293 bool
1294 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1296 tree base1, base2;
1297 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1298 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1299 bool var1_p, var2_p, ind1_p, ind2_p;
1301 gcc_checking_assert ((!ref1->ref
1302 || TREE_CODE (ref1->ref) == SSA_NAME
1303 || DECL_P (ref1->ref)
1304 || TREE_CODE (ref1->ref) == STRING_CST
1305 || handled_component_p (ref1->ref)
1306 || TREE_CODE (ref1->ref) == MEM_REF
1307 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1308 && (!ref2->ref
1309 || TREE_CODE (ref2->ref) == SSA_NAME
1310 || DECL_P (ref2->ref)
1311 || TREE_CODE (ref2->ref) == STRING_CST
1312 || handled_component_p (ref2->ref)
1313 || TREE_CODE (ref2->ref) == MEM_REF
1314 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1316 /* Decompose the references into their base objects and the access. */
1317 base1 = ao_ref_base (ref1);
1318 offset1 = ref1->offset;
1319 max_size1 = ref1->max_size;
1320 base2 = ao_ref_base (ref2);
1321 offset2 = ref2->offset;
1322 max_size2 = ref2->max_size;
1324 /* We can end up with registers or constants as bases for example from
1325 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1326 which is seen as a struct copy. */
1327 if (TREE_CODE (base1) == SSA_NAME
1328 || TREE_CODE (base1) == CONST_DECL
1329 || TREE_CODE (base1) == CONSTRUCTOR
1330 || TREE_CODE (base1) == ADDR_EXPR
1331 || CONSTANT_CLASS_P (base1)
1332 || TREE_CODE (base2) == SSA_NAME
1333 || TREE_CODE (base2) == CONST_DECL
1334 || TREE_CODE (base2) == CONSTRUCTOR
1335 || TREE_CODE (base2) == ADDR_EXPR
1336 || CONSTANT_CLASS_P (base2))
1337 return false;
1339 /* We can end up referring to code via function and label decls.
1340 As we likely do not properly track code aliases conservatively
1341 bail out. */
1342 if (TREE_CODE (base1) == FUNCTION_DECL
1343 || TREE_CODE (base1) == LABEL_DECL
1344 || TREE_CODE (base2) == FUNCTION_DECL
1345 || TREE_CODE (base2) == LABEL_DECL)
1346 return true;
1348 /* Two volatile accesses always conflict. */
1349 if (ref1->volatile_p
1350 && ref2->volatile_p)
1351 return true;
1353 /* Defer to simple offset based disambiguation if we have
1354 references based on two decls. Do this before defering to
1355 TBAA to handle must-alias cases in conformance with the
1356 GCC extension of allowing type-punning through unions. */
1357 var1_p = DECL_P (base1);
1358 var2_p = DECL_P (base2);
1359 if (var1_p && var2_p)
1360 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1361 ref2->ref, base2, offset2, max_size2);
1363 /* Handle restrict based accesses.
1364 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1365 here. */
1366 tree rbase1 = base1;
1367 tree rbase2 = base2;
1368 if (var1_p)
1370 rbase1 = ref1->ref;
1371 if (rbase1)
1372 while (handled_component_p (rbase1))
1373 rbase1 = TREE_OPERAND (rbase1, 0);
1375 if (var2_p)
1377 rbase2 = ref2->ref;
1378 if (rbase2)
1379 while (handled_component_p (rbase2))
1380 rbase2 = TREE_OPERAND (rbase2, 0);
1382 if (rbase1 && rbase2
1383 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1384 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1385 /* If the accesses are in the same restrict clique... */
1386 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1387 /* But based on different pointers they do not alias. */
1388 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1389 return false;
1391 ind1_p = (TREE_CODE (base1) == MEM_REF
1392 || TREE_CODE (base1) == TARGET_MEM_REF);
1393 ind2_p = (TREE_CODE (base2) == MEM_REF
1394 || TREE_CODE (base2) == TARGET_MEM_REF);
1396 /* Canonicalize the pointer-vs-decl case. */
1397 if (ind1_p && var2_p)
1399 std::swap (offset1, offset2);
1400 std::swap (max_size1, max_size2);
1401 std::swap (base1, base2);
1402 std::swap (ref1, ref2);
1403 var1_p = true;
1404 ind1_p = false;
1405 var2_p = false;
1406 ind2_p = true;
1409 /* First defer to TBAA if possible. */
1410 if (tbaa_p
1411 && flag_strict_aliasing
1412 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1413 ao_ref_alias_set (ref2)))
1414 return false;
1416 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1417 if (var1_p && ind2_p)
1418 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1419 offset2, max_size2,
1420 ao_ref_alias_set (ref2), -1,
1421 ref1->ref, base1,
1422 offset1, max_size1,
1423 ao_ref_alias_set (ref1),
1424 ao_ref_base_alias_set (ref1),
1425 tbaa_p);
1426 else if (ind1_p && ind2_p)
1427 return indirect_refs_may_alias_p (ref1->ref, base1,
1428 offset1, max_size1,
1429 ao_ref_alias_set (ref1), -1,
1430 ref2->ref, base2,
1431 offset2, max_size2,
1432 ao_ref_alias_set (ref2), -1,
1433 tbaa_p);
1435 gcc_unreachable ();
1438 static bool
1439 refs_may_alias_p (tree ref1, ao_ref *ref2)
1441 ao_ref r1;
1442 ao_ref_init (&r1, ref1);
1443 return refs_may_alias_p_1 (&r1, ref2, true);
1446 bool
1447 refs_may_alias_p (tree ref1, tree ref2)
1449 ao_ref r1, r2;
1450 bool res;
1451 ao_ref_init (&r1, ref1);
1452 ao_ref_init (&r2, ref2);
1453 res = refs_may_alias_p_1 (&r1, &r2, true);
1454 if (res)
1455 ++alias_stats.refs_may_alias_p_may_alias;
1456 else
1457 ++alias_stats.refs_may_alias_p_no_alias;
1458 return res;
1461 /* Returns true if there is a anti-dependence for the STORE that
1462 executes after the LOAD. */
1464 bool
1465 refs_anti_dependent_p (tree load, tree store)
1467 ao_ref r1, r2;
1468 ao_ref_init (&r1, load);
1469 ao_ref_init (&r2, store);
1470 return refs_may_alias_p_1 (&r1, &r2, false);
1473 /* Returns true if there is a output dependence for the stores
1474 STORE1 and STORE2. */
1476 bool
1477 refs_output_dependent_p (tree store1, tree store2)
1479 ao_ref r1, r2;
1480 ao_ref_init (&r1, store1);
1481 ao_ref_init (&r2, store2);
1482 return refs_may_alias_p_1 (&r1, &r2, false);
1485 /* If the call CALL may use the memory reference REF return true,
1486 otherwise return false. */
1488 static bool
1489 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1491 tree base, callee;
1492 unsigned i;
1493 int flags = gimple_call_flags (call);
1495 /* Const functions without a static chain do not implicitly use memory. */
1496 if (!gimple_call_chain (call)
1497 && (flags & (ECF_CONST|ECF_NOVOPS)))
1498 goto process_args;
1500 base = ao_ref_base (ref);
1501 if (!base)
1502 return true;
1504 /* A call that is not without side-effects might involve volatile
1505 accesses and thus conflicts with all other volatile accesses. */
1506 if (ref->volatile_p)
1507 return true;
1509 /* If the reference is based on a decl that is not aliased the call
1510 cannot possibly use it. */
1511 if (DECL_P (base)
1512 && !may_be_aliased (base)
1513 /* But local statics can be used through recursion. */
1514 && !is_global_var (base))
1515 goto process_args;
1517 callee = gimple_call_fndecl (call);
1519 /* Handle those builtin functions explicitly that do not act as
1520 escape points. See tree-ssa-structalias.c:find_func_aliases
1521 for the list of builtins we might need to handle here. */
1522 if (callee != NULL_TREE
1523 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1524 switch (DECL_FUNCTION_CODE (callee))
1526 /* All the following functions read memory pointed to by
1527 their second argument. strcat/strncat additionally
1528 reads memory pointed to by the first argument. */
1529 case BUILT_IN_STRCAT:
1530 case BUILT_IN_STRNCAT:
1532 ao_ref dref;
1533 ao_ref_init_from_ptr_and_size (&dref,
1534 gimple_call_arg (call, 0),
1535 NULL_TREE);
1536 if (refs_may_alias_p_1 (&dref, ref, false))
1537 return true;
1539 /* FALLTHRU */
1540 case BUILT_IN_STRCPY:
1541 case BUILT_IN_STRNCPY:
1542 case BUILT_IN_MEMCPY:
1543 case BUILT_IN_MEMMOVE:
1544 case BUILT_IN_MEMPCPY:
1545 case BUILT_IN_STPCPY:
1546 case BUILT_IN_STPNCPY:
1547 case BUILT_IN_TM_MEMCPY:
1548 case BUILT_IN_TM_MEMMOVE:
1550 ao_ref dref;
1551 tree size = NULL_TREE;
1552 if (gimple_call_num_args (call) == 3)
1553 size = gimple_call_arg (call, 2);
1554 ao_ref_init_from_ptr_and_size (&dref,
1555 gimple_call_arg (call, 1),
1556 size);
1557 return refs_may_alias_p_1 (&dref, ref, false);
1559 case BUILT_IN_STRCAT_CHK:
1560 case BUILT_IN_STRNCAT_CHK:
1562 ao_ref dref;
1563 ao_ref_init_from_ptr_and_size (&dref,
1564 gimple_call_arg (call, 0),
1565 NULL_TREE);
1566 if (refs_may_alias_p_1 (&dref, ref, false))
1567 return true;
1569 /* FALLTHRU */
1570 case BUILT_IN_STRCPY_CHK:
1571 case BUILT_IN_STRNCPY_CHK:
1572 case BUILT_IN_MEMCPY_CHK:
1573 case BUILT_IN_MEMMOVE_CHK:
1574 case BUILT_IN_MEMPCPY_CHK:
1575 case BUILT_IN_STPCPY_CHK:
1576 case BUILT_IN_STPNCPY_CHK:
1578 ao_ref dref;
1579 tree size = NULL_TREE;
1580 if (gimple_call_num_args (call) == 4)
1581 size = gimple_call_arg (call, 2);
1582 ao_ref_init_from_ptr_and_size (&dref,
1583 gimple_call_arg (call, 1),
1584 size);
1585 return refs_may_alias_p_1 (&dref, ref, false);
1587 case BUILT_IN_BCOPY:
1589 ao_ref dref;
1590 tree size = gimple_call_arg (call, 2);
1591 ao_ref_init_from_ptr_and_size (&dref,
1592 gimple_call_arg (call, 0),
1593 size);
1594 return refs_may_alias_p_1 (&dref, ref, false);
1597 /* The following functions read memory pointed to by their
1598 first argument. */
1599 CASE_BUILT_IN_TM_LOAD (1):
1600 CASE_BUILT_IN_TM_LOAD (2):
1601 CASE_BUILT_IN_TM_LOAD (4):
1602 CASE_BUILT_IN_TM_LOAD (8):
1603 CASE_BUILT_IN_TM_LOAD (FLOAT):
1604 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1605 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1606 CASE_BUILT_IN_TM_LOAD (M64):
1607 CASE_BUILT_IN_TM_LOAD (M128):
1608 CASE_BUILT_IN_TM_LOAD (M256):
1609 case BUILT_IN_TM_LOG:
1610 case BUILT_IN_TM_LOG_1:
1611 case BUILT_IN_TM_LOG_2:
1612 case BUILT_IN_TM_LOG_4:
1613 case BUILT_IN_TM_LOG_8:
1614 case BUILT_IN_TM_LOG_FLOAT:
1615 case BUILT_IN_TM_LOG_DOUBLE:
1616 case BUILT_IN_TM_LOG_LDOUBLE:
1617 case BUILT_IN_TM_LOG_M64:
1618 case BUILT_IN_TM_LOG_M128:
1619 case BUILT_IN_TM_LOG_M256:
1620 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1622 /* These read memory pointed to by the first argument. */
1623 case BUILT_IN_STRDUP:
1624 case BUILT_IN_STRNDUP:
1625 case BUILT_IN_REALLOC:
1627 ao_ref dref;
1628 tree size = NULL_TREE;
1629 if (gimple_call_num_args (call) == 2)
1630 size = gimple_call_arg (call, 1);
1631 ao_ref_init_from_ptr_and_size (&dref,
1632 gimple_call_arg (call, 0),
1633 size);
1634 return refs_may_alias_p_1 (&dref, ref, false);
1636 /* These read memory pointed to by the first argument. */
1637 case BUILT_IN_INDEX:
1638 case BUILT_IN_STRCHR:
1639 case BUILT_IN_STRRCHR:
1641 ao_ref dref;
1642 ao_ref_init_from_ptr_and_size (&dref,
1643 gimple_call_arg (call, 0),
1644 NULL_TREE);
1645 return refs_may_alias_p_1 (&dref, ref, false);
1647 /* These read memory pointed to by the first argument with size
1648 in the third argument. */
1649 case BUILT_IN_MEMCHR:
1651 ao_ref dref;
1652 ao_ref_init_from_ptr_and_size (&dref,
1653 gimple_call_arg (call, 0),
1654 gimple_call_arg (call, 2));
1655 return refs_may_alias_p_1 (&dref, ref, false);
1657 /* These read memory pointed to by the first and second arguments. */
1658 case BUILT_IN_STRSTR:
1659 case BUILT_IN_STRPBRK:
1661 ao_ref dref;
1662 ao_ref_init_from_ptr_and_size (&dref,
1663 gimple_call_arg (call, 0),
1664 NULL_TREE);
1665 if (refs_may_alias_p_1 (&dref, ref, false))
1666 return true;
1667 ao_ref_init_from_ptr_and_size (&dref,
1668 gimple_call_arg (call, 1),
1669 NULL_TREE);
1670 return refs_may_alias_p_1 (&dref, ref, false);
1673 /* The following builtins do not read from memory. */
1674 case BUILT_IN_FREE:
1675 case BUILT_IN_MALLOC:
1676 case BUILT_IN_POSIX_MEMALIGN:
1677 case BUILT_IN_ALIGNED_ALLOC:
1678 case BUILT_IN_CALLOC:
1679 case BUILT_IN_ALLOCA:
1680 case BUILT_IN_ALLOCA_WITH_ALIGN:
1681 case BUILT_IN_STACK_SAVE:
1682 case BUILT_IN_STACK_RESTORE:
1683 case BUILT_IN_MEMSET:
1684 case BUILT_IN_TM_MEMSET:
1685 case BUILT_IN_MEMSET_CHK:
1686 case BUILT_IN_FREXP:
1687 case BUILT_IN_FREXPF:
1688 case BUILT_IN_FREXPL:
1689 case BUILT_IN_GAMMA_R:
1690 case BUILT_IN_GAMMAF_R:
1691 case BUILT_IN_GAMMAL_R:
1692 case BUILT_IN_LGAMMA_R:
1693 case BUILT_IN_LGAMMAF_R:
1694 case BUILT_IN_LGAMMAL_R:
1695 case BUILT_IN_MODF:
1696 case BUILT_IN_MODFF:
1697 case BUILT_IN_MODFL:
1698 case BUILT_IN_REMQUO:
1699 case BUILT_IN_REMQUOF:
1700 case BUILT_IN_REMQUOL:
1701 case BUILT_IN_SINCOS:
1702 case BUILT_IN_SINCOSF:
1703 case BUILT_IN_SINCOSL:
1704 case BUILT_IN_ASSUME_ALIGNED:
1705 case BUILT_IN_VA_END:
1706 return false;
1707 /* __sync_* builtins and some OpenMP builtins act as threading
1708 barriers. */
1709 #undef DEF_SYNC_BUILTIN
1710 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1711 #include "sync-builtins.def"
1712 #undef DEF_SYNC_BUILTIN
1713 case BUILT_IN_GOMP_ATOMIC_START:
1714 case BUILT_IN_GOMP_ATOMIC_END:
1715 case BUILT_IN_GOMP_BARRIER:
1716 case BUILT_IN_GOMP_BARRIER_CANCEL:
1717 case BUILT_IN_GOMP_TASKWAIT:
1718 case BUILT_IN_GOMP_TASKGROUP_END:
1719 case BUILT_IN_GOMP_CRITICAL_START:
1720 case BUILT_IN_GOMP_CRITICAL_END:
1721 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1722 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1723 case BUILT_IN_GOMP_LOOP_END:
1724 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1725 case BUILT_IN_GOMP_ORDERED_START:
1726 case BUILT_IN_GOMP_ORDERED_END:
1727 case BUILT_IN_GOMP_SECTIONS_END:
1728 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1729 case BUILT_IN_GOMP_SINGLE_COPY_START:
1730 case BUILT_IN_GOMP_SINGLE_COPY_END:
1731 return true;
1733 default:
1734 /* Fallthru to general call handling. */;
1737 /* Check if base is a global static variable that is not read
1738 by the function. */
1739 if (callee != NULL_TREE
1740 && TREE_CODE (base) == VAR_DECL
1741 && TREE_STATIC (base))
1743 struct cgraph_node *node = cgraph_node::get (callee);
1744 bitmap not_read;
1746 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1747 node yet. We should enforce that there are nodes for all decls in the
1748 IL and remove this check instead. */
1749 if (node
1750 && (not_read = ipa_reference_get_not_read_global (node))
1751 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1752 goto process_args;
1755 /* Check if the base variable is call-used. */
1756 if (DECL_P (base))
1758 if (pt_solution_includes (gimple_call_use_set (call), base))
1759 return true;
1761 else if ((TREE_CODE (base) == MEM_REF
1762 || TREE_CODE (base) == TARGET_MEM_REF)
1763 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1765 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1766 if (!pi)
1767 return true;
1769 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1770 return true;
1772 else
1773 return true;
1775 /* Inspect call arguments for passed-by-value aliases. */
1776 process_args:
1777 for (i = 0; i < gimple_call_num_args (call); ++i)
1779 tree op = gimple_call_arg (call, i);
1780 int flags = gimple_call_arg_flags (call, i);
1782 if (flags & EAF_UNUSED)
1783 continue;
1785 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1786 op = TREE_OPERAND (op, 0);
1788 if (TREE_CODE (op) != SSA_NAME
1789 && !is_gimple_min_invariant (op))
1791 ao_ref r;
1792 ao_ref_init (&r, op);
1793 if (refs_may_alias_p_1 (&r, ref, true))
1794 return true;
1798 return false;
1801 static bool
1802 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1804 bool res;
1805 res = ref_maybe_used_by_call_p_1 (call, ref);
1806 if (res)
1807 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1808 else
1809 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1810 return res;
1814 /* If the statement STMT may use the memory reference REF return
1815 true, otherwise return false. */
1817 bool
1818 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
1820 if (is_gimple_assign (stmt))
1822 tree rhs;
1824 /* All memory assign statements are single. */
1825 if (!gimple_assign_single_p (stmt))
1826 return false;
1828 rhs = gimple_assign_rhs1 (stmt);
1829 if (is_gimple_reg (rhs)
1830 || is_gimple_min_invariant (rhs)
1831 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1832 return false;
1834 return refs_may_alias_p (rhs, ref);
1836 else if (is_gimple_call (stmt))
1837 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1838 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1840 tree retval = gimple_return_retval (return_stmt);
1841 if (retval
1842 && TREE_CODE (retval) != SSA_NAME
1843 && !is_gimple_min_invariant (retval)
1844 && refs_may_alias_p (retval, ref))
1845 return true;
1846 /* If ref escapes the function then the return acts as a use. */
1847 tree base = ao_ref_base (ref);
1848 if (!base)
1850 else if (DECL_P (base))
1851 return is_global_var (base);
1852 else if (TREE_CODE (base) == MEM_REF
1853 || TREE_CODE (base) == TARGET_MEM_REF)
1854 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1855 return false;
1858 return true;
1861 bool
1862 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
1864 ao_ref r;
1865 ao_ref_init (&r, ref);
1866 return ref_maybe_used_by_stmt_p (stmt, &r);
1869 /* If the call in statement CALL may clobber the memory reference REF
1870 return true, otherwise return false. */
1872 bool
1873 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1875 tree base;
1876 tree callee;
1878 /* If the call is pure or const it cannot clobber anything. */
1879 if (gimple_call_flags (call)
1880 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1881 return false;
1882 if (gimple_call_internal_p (call))
1883 switch (gimple_call_internal_fn (call))
1885 /* Treat these internal calls like ECF_PURE for aliasing,
1886 they don't write to any memory the program should care about.
1887 They have important other side-effects, and read memory,
1888 so can't be ECF_NOVOPS. */
1889 case IFN_UBSAN_NULL:
1890 case IFN_UBSAN_BOUNDS:
1891 case IFN_UBSAN_VPTR:
1892 case IFN_UBSAN_OBJECT_SIZE:
1893 case IFN_ASAN_CHECK:
1894 return false;
1895 default:
1896 break;
1899 base = ao_ref_base (ref);
1900 if (!base)
1901 return true;
1903 if (TREE_CODE (base) == SSA_NAME
1904 || CONSTANT_CLASS_P (base))
1905 return false;
1907 /* A call that is not without side-effects might involve volatile
1908 accesses and thus conflicts with all other volatile accesses. */
1909 if (ref->volatile_p)
1910 return true;
1912 /* If the reference is based on a decl that is not aliased the call
1913 cannot possibly clobber it. */
1914 if (DECL_P (base)
1915 && !may_be_aliased (base)
1916 /* But local non-readonly statics can be modified through recursion
1917 or the call may implement a threading barrier which we must
1918 treat as may-def. */
1919 && (TREE_READONLY (base)
1920 || !is_global_var (base)))
1921 return false;
1923 callee = gimple_call_fndecl (call);
1925 /* Handle those builtin functions explicitly that do not act as
1926 escape points. See tree-ssa-structalias.c:find_func_aliases
1927 for the list of builtins we might need to handle here. */
1928 if (callee != NULL_TREE
1929 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1930 switch (DECL_FUNCTION_CODE (callee))
1932 /* All the following functions clobber memory pointed to by
1933 their first argument. */
1934 case BUILT_IN_STRCPY:
1935 case BUILT_IN_STRNCPY:
1936 case BUILT_IN_MEMCPY:
1937 case BUILT_IN_MEMMOVE:
1938 case BUILT_IN_MEMPCPY:
1939 case BUILT_IN_STPCPY:
1940 case BUILT_IN_STPNCPY:
1941 case BUILT_IN_STRCAT:
1942 case BUILT_IN_STRNCAT:
1943 case BUILT_IN_MEMSET:
1944 case BUILT_IN_TM_MEMSET:
1945 CASE_BUILT_IN_TM_STORE (1):
1946 CASE_BUILT_IN_TM_STORE (2):
1947 CASE_BUILT_IN_TM_STORE (4):
1948 CASE_BUILT_IN_TM_STORE (8):
1949 CASE_BUILT_IN_TM_STORE (FLOAT):
1950 CASE_BUILT_IN_TM_STORE (DOUBLE):
1951 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1952 CASE_BUILT_IN_TM_STORE (M64):
1953 CASE_BUILT_IN_TM_STORE (M128):
1954 CASE_BUILT_IN_TM_STORE (M256):
1955 case BUILT_IN_TM_MEMCPY:
1956 case BUILT_IN_TM_MEMMOVE:
1958 ao_ref dref;
1959 tree size = NULL_TREE;
1960 /* Don't pass in size for strncat, as the maximum size
1961 is strlen (dest) + n + 1 instead of n, resp.
1962 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1963 known. */
1964 if (gimple_call_num_args (call) == 3
1965 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1966 size = gimple_call_arg (call, 2);
1967 ao_ref_init_from_ptr_and_size (&dref,
1968 gimple_call_arg (call, 0),
1969 size);
1970 return refs_may_alias_p_1 (&dref, ref, false);
1972 case BUILT_IN_STRCPY_CHK:
1973 case BUILT_IN_STRNCPY_CHK:
1974 case BUILT_IN_MEMCPY_CHK:
1975 case BUILT_IN_MEMMOVE_CHK:
1976 case BUILT_IN_MEMPCPY_CHK:
1977 case BUILT_IN_STPCPY_CHK:
1978 case BUILT_IN_STPNCPY_CHK:
1979 case BUILT_IN_STRCAT_CHK:
1980 case BUILT_IN_STRNCAT_CHK:
1981 case BUILT_IN_MEMSET_CHK:
1983 ao_ref dref;
1984 tree size = NULL_TREE;
1985 /* Don't pass in size for __strncat_chk, as the maximum size
1986 is strlen (dest) + n + 1 instead of n, resp.
1987 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1988 known. */
1989 if (gimple_call_num_args (call) == 4
1990 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1991 size = gimple_call_arg (call, 2);
1992 ao_ref_init_from_ptr_and_size (&dref,
1993 gimple_call_arg (call, 0),
1994 size);
1995 return refs_may_alias_p_1 (&dref, ref, false);
1997 case BUILT_IN_BCOPY:
1999 ao_ref dref;
2000 tree size = gimple_call_arg (call, 2);
2001 ao_ref_init_from_ptr_and_size (&dref,
2002 gimple_call_arg (call, 1),
2003 size);
2004 return refs_may_alias_p_1 (&dref, ref, false);
2006 /* Allocating memory does not have any side-effects apart from
2007 being the definition point for the pointer. */
2008 case BUILT_IN_MALLOC:
2009 case BUILT_IN_ALIGNED_ALLOC:
2010 case BUILT_IN_CALLOC:
2011 case BUILT_IN_STRDUP:
2012 case BUILT_IN_STRNDUP:
2013 /* Unix98 specifies that errno is set on allocation failure. */
2014 if (flag_errno_math
2015 && targetm.ref_may_alias_errno (ref))
2016 return true;
2017 return false;
2018 case BUILT_IN_STACK_SAVE:
2019 case BUILT_IN_ALLOCA:
2020 case BUILT_IN_ALLOCA_WITH_ALIGN:
2021 case BUILT_IN_ASSUME_ALIGNED:
2022 return false;
2023 /* But posix_memalign stores a pointer into the memory pointed to
2024 by its first argument. */
2025 case BUILT_IN_POSIX_MEMALIGN:
2027 tree ptrptr = gimple_call_arg (call, 0);
2028 ao_ref dref;
2029 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2030 TYPE_SIZE_UNIT (ptr_type_node));
2031 return (refs_may_alias_p_1 (&dref, ref, false)
2032 || (flag_errno_math
2033 && targetm.ref_may_alias_errno (ref)));
2035 /* Freeing memory kills the pointed-to memory. More importantly
2036 the call has to serve as a barrier for moving loads and stores
2037 across it. */
2038 case BUILT_IN_FREE:
2039 case BUILT_IN_VA_END:
2041 tree ptr = gimple_call_arg (call, 0);
2042 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2044 /* Realloc serves both as allocation point and deallocation point. */
2045 case BUILT_IN_REALLOC:
2047 tree ptr = gimple_call_arg (call, 0);
2048 /* Unix98 specifies that errno is set on allocation failure. */
2049 return ((flag_errno_math
2050 && targetm.ref_may_alias_errno (ref))
2051 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2053 case BUILT_IN_GAMMA_R:
2054 case BUILT_IN_GAMMAF_R:
2055 case BUILT_IN_GAMMAL_R:
2056 case BUILT_IN_LGAMMA_R:
2057 case BUILT_IN_LGAMMAF_R:
2058 case BUILT_IN_LGAMMAL_R:
2060 tree out = gimple_call_arg (call, 1);
2061 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2062 return true;
2063 if (flag_errno_math)
2064 break;
2065 return false;
2067 case BUILT_IN_FREXP:
2068 case BUILT_IN_FREXPF:
2069 case BUILT_IN_FREXPL:
2070 case BUILT_IN_MODF:
2071 case BUILT_IN_MODFF:
2072 case BUILT_IN_MODFL:
2074 tree out = gimple_call_arg (call, 1);
2075 return ptr_deref_may_alias_ref_p_1 (out, ref);
2077 case BUILT_IN_REMQUO:
2078 case BUILT_IN_REMQUOF:
2079 case BUILT_IN_REMQUOL:
2081 tree out = gimple_call_arg (call, 2);
2082 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2083 return true;
2084 if (flag_errno_math)
2085 break;
2086 return false;
2088 case BUILT_IN_SINCOS:
2089 case BUILT_IN_SINCOSF:
2090 case BUILT_IN_SINCOSL:
2092 tree sin = gimple_call_arg (call, 1);
2093 tree cos = gimple_call_arg (call, 2);
2094 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2095 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2097 /* __sync_* builtins and some OpenMP builtins act as threading
2098 barriers. */
2099 #undef DEF_SYNC_BUILTIN
2100 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2101 #include "sync-builtins.def"
2102 #undef DEF_SYNC_BUILTIN
2103 case BUILT_IN_GOMP_ATOMIC_START:
2104 case BUILT_IN_GOMP_ATOMIC_END:
2105 case BUILT_IN_GOMP_BARRIER:
2106 case BUILT_IN_GOMP_BARRIER_CANCEL:
2107 case BUILT_IN_GOMP_TASKWAIT:
2108 case BUILT_IN_GOMP_TASKGROUP_END:
2109 case BUILT_IN_GOMP_CRITICAL_START:
2110 case BUILT_IN_GOMP_CRITICAL_END:
2111 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2112 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2113 case BUILT_IN_GOMP_LOOP_END:
2114 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2115 case BUILT_IN_GOMP_ORDERED_START:
2116 case BUILT_IN_GOMP_ORDERED_END:
2117 case BUILT_IN_GOMP_SECTIONS_END:
2118 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2119 case BUILT_IN_GOMP_SINGLE_COPY_START:
2120 case BUILT_IN_GOMP_SINGLE_COPY_END:
2121 return true;
2122 default:
2123 /* Fallthru to general call handling. */;
2126 /* Check if base is a global static variable that is not written
2127 by the function. */
2128 if (callee != NULL_TREE
2129 && TREE_CODE (base) == VAR_DECL
2130 && TREE_STATIC (base))
2132 struct cgraph_node *node = cgraph_node::get (callee);
2133 bitmap not_written;
2135 if (node
2136 && (not_written = ipa_reference_get_not_written_global (node))
2137 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2138 return false;
2141 /* Check if the base variable is call-clobbered. */
2142 if (DECL_P (base))
2143 return pt_solution_includes (gimple_call_clobber_set (call), base);
2144 else if ((TREE_CODE (base) == MEM_REF
2145 || TREE_CODE (base) == TARGET_MEM_REF)
2146 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2148 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2149 if (!pi)
2150 return true;
2152 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2155 return true;
2158 /* If the call in statement CALL may clobber the memory reference REF
2159 return true, otherwise return false. */
2161 bool
2162 call_may_clobber_ref_p (gcall *call, tree ref)
2164 bool res;
2165 ao_ref r;
2166 ao_ref_init (&r, ref);
2167 res = call_may_clobber_ref_p_1 (call, &r);
2168 if (res)
2169 ++alias_stats.call_may_clobber_ref_p_may_alias;
2170 else
2171 ++alias_stats.call_may_clobber_ref_p_no_alias;
2172 return res;
2176 /* If the statement STMT may clobber the memory reference REF return true,
2177 otherwise return false. */
2179 bool
2180 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
2182 if (is_gimple_call (stmt))
2184 tree lhs = gimple_call_lhs (stmt);
2185 if (lhs
2186 && TREE_CODE (lhs) != SSA_NAME)
2188 ao_ref r;
2189 ao_ref_init (&r, lhs);
2190 if (refs_may_alias_p_1 (ref, &r, true))
2191 return true;
2194 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2196 else if (gimple_assign_single_p (stmt))
2198 tree lhs = gimple_assign_lhs (stmt);
2199 if (TREE_CODE (lhs) != SSA_NAME)
2201 ao_ref r;
2202 ao_ref_init (&r, lhs);
2203 return refs_may_alias_p_1 (ref, &r, true);
2206 else if (gimple_code (stmt) == GIMPLE_ASM)
2207 return true;
2209 return false;
2212 bool
2213 stmt_may_clobber_ref_p (gimple *stmt, tree ref)
2215 ao_ref r;
2216 ao_ref_init (&r, ref);
2217 return stmt_may_clobber_ref_p_1 (stmt, &r);
2220 /* If STMT kills the memory reference REF return true, otherwise
2221 return false. */
2223 bool
2224 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2226 if (!ao_ref_base (ref))
2227 return false;
2229 if (gimple_has_lhs (stmt)
2230 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2231 /* The assignment is not necessarily carried out if it can throw
2232 and we can catch it in the current function where we could inspect
2233 the previous value.
2234 ??? We only need to care about the RHS throwing. For aggregate
2235 assignments or similar calls and non-call exceptions the LHS
2236 might throw as well. */
2237 && !stmt_can_throw_internal (stmt))
2239 tree lhs = gimple_get_lhs (stmt);
2240 /* If LHS is literally a base of the access we are done. */
2241 if (ref->ref)
2243 tree base = ref->ref;
2244 if (handled_component_p (base))
2246 tree saved_lhs0 = NULL_TREE;
2247 if (handled_component_p (lhs))
2249 saved_lhs0 = TREE_OPERAND (lhs, 0);
2250 TREE_OPERAND (lhs, 0) = integer_zero_node;
2254 /* Just compare the outermost handled component, if
2255 they are equal we have found a possible common
2256 base. */
2257 tree saved_base0 = TREE_OPERAND (base, 0);
2258 TREE_OPERAND (base, 0) = integer_zero_node;
2259 bool res = operand_equal_p (lhs, base, 0);
2260 TREE_OPERAND (base, 0) = saved_base0;
2261 if (res)
2262 break;
2263 /* Otherwise drop handled components of the access. */
2264 base = saved_base0;
2266 while (handled_component_p (base));
2267 if (saved_lhs0)
2268 TREE_OPERAND (lhs, 0) = saved_lhs0;
2270 /* Finally check if the lhs has the same address and size as the
2271 base candidate of the access. */
2272 if (lhs == base
2273 || (((TYPE_SIZE (TREE_TYPE (lhs))
2274 == TYPE_SIZE (TREE_TYPE (base)))
2275 || (TYPE_SIZE (TREE_TYPE (lhs))
2276 && TYPE_SIZE (TREE_TYPE (base))
2277 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2278 TYPE_SIZE (TREE_TYPE (base)), 0)))
2279 && operand_equal_p (lhs, base, OEP_ADDRESS_OF)))
2280 return true;
2283 /* Now look for non-literal equal bases with the restriction of
2284 handling constant offset and size. */
2285 /* For a must-alias check we need to be able to constrain
2286 the access properly. */
2287 if (ref->max_size == -1)
2288 return false;
2289 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2290 bool reverse;
2291 tree base
2292 = get_ref_base_and_extent (lhs, &offset, &size, &max_size, &reverse);
2293 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2294 so base == ref->base does not always hold. */
2295 if (base != ref->base)
2297 /* If both base and ref->base are MEM_REFs, only compare the
2298 first operand, and if the second operand isn't equal constant,
2299 try to add the offsets into offset and ref_offset. */
2300 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2301 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2303 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2304 TREE_OPERAND (ref->base, 1)))
2306 offset_int off1 = mem_ref_offset (base);
2307 off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
2308 off1 += offset;
2309 offset_int off2 = mem_ref_offset (ref->base);
2310 off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
2311 off2 += ref_offset;
2312 if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2314 offset = off1.to_shwi ();
2315 ref_offset = off2.to_shwi ();
2317 else
2318 size = -1;
2321 else
2322 size = -1;
2324 /* For a must-alias check we need to be able to constrain
2325 the access properly. */
2326 if (size != -1 && size == max_size)
2328 if (offset <= ref_offset
2329 && offset + size >= ref_offset + ref->max_size)
2330 return true;
2334 if (is_gimple_call (stmt))
2336 tree callee = gimple_call_fndecl (stmt);
2337 if (callee != NULL_TREE
2338 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2339 switch (DECL_FUNCTION_CODE (callee))
2341 case BUILT_IN_FREE:
2343 tree ptr = gimple_call_arg (stmt, 0);
2344 tree base = ao_ref_base (ref);
2345 if (base && TREE_CODE (base) == MEM_REF
2346 && TREE_OPERAND (base, 0) == ptr)
2347 return true;
2348 break;
2351 case BUILT_IN_MEMCPY:
2352 case BUILT_IN_MEMPCPY:
2353 case BUILT_IN_MEMMOVE:
2354 case BUILT_IN_MEMSET:
2355 case BUILT_IN_MEMCPY_CHK:
2356 case BUILT_IN_MEMPCPY_CHK:
2357 case BUILT_IN_MEMMOVE_CHK:
2358 case BUILT_IN_MEMSET_CHK:
2360 /* For a must-alias check we need to be able to constrain
2361 the access properly. */
2362 if (ref->max_size == -1)
2363 return false;
2364 tree dest = gimple_call_arg (stmt, 0);
2365 tree len = gimple_call_arg (stmt, 2);
2366 if (!tree_fits_shwi_p (len))
2367 return false;
2368 tree rbase = ref->base;
2369 offset_int roffset = ref->offset;
2370 ao_ref dref;
2371 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2372 tree base = ao_ref_base (&dref);
2373 offset_int offset = dref.offset;
2374 if (!base || dref.size == -1)
2375 return false;
2376 if (TREE_CODE (base) == MEM_REF)
2378 if (TREE_CODE (rbase) != MEM_REF)
2379 return false;
2380 // Compare pointers.
2381 offset += wi::lshift (mem_ref_offset (base),
2382 LOG2_BITS_PER_UNIT);
2383 roffset += wi::lshift (mem_ref_offset (rbase),
2384 LOG2_BITS_PER_UNIT);
2385 base = TREE_OPERAND (base, 0);
2386 rbase = TREE_OPERAND (rbase, 0);
2388 if (base == rbase
2389 && wi::les_p (offset, roffset)
2390 && wi::les_p (roffset + ref->max_size,
2391 offset + wi::lshift (wi::to_offset (len),
2392 LOG2_BITS_PER_UNIT)))
2393 return true;
2394 break;
2397 case BUILT_IN_VA_END:
2399 tree ptr = gimple_call_arg (stmt, 0);
2400 if (TREE_CODE (ptr) == ADDR_EXPR)
2402 tree base = ao_ref_base (ref);
2403 if (TREE_OPERAND (ptr, 0) == base)
2404 return true;
2406 break;
2409 default:;
2412 return false;
2415 bool
2416 stmt_kills_ref_p (gimple *stmt, tree ref)
2418 ao_ref r;
2419 ao_ref_init (&r, ref);
2420 return stmt_kills_ref_p (stmt, &r);
2424 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2425 TARGET or a statement clobbering the memory reference REF in which
2426 case false is returned. The walk starts with VUSE, one argument of PHI. */
2428 static bool
2429 maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
2430 tree vuse, unsigned int *cnt, bitmap *visited,
2431 bool abort_on_visited,
2432 void *(*translate)(ao_ref *, tree, void *, bool *),
2433 void *data)
2435 basic_block bb = gimple_bb (phi);
2437 if (!*visited)
2438 *visited = BITMAP_ALLOC (NULL);
2440 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2442 /* Walk until we hit the target. */
2443 while (vuse != target)
2445 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2446 /* Recurse for PHI nodes. */
2447 if (gimple_code (def_stmt) == GIMPLE_PHI)
2449 /* An already visited PHI node ends the walk successfully. */
2450 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2451 return !abort_on_visited;
2452 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2453 visited, abort_on_visited,
2454 translate, data);
2455 if (!vuse)
2456 return false;
2457 continue;
2459 else if (gimple_nop_p (def_stmt))
2460 return false;
2461 else
2463 /* A clobbering statement or the end of the IL ends it failing. */
2464 ++*cnt;
2465 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2467 bool disambiguate_only = true;
2468 if (translate
2469 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2471 else
2472 return false;
2475 /* If we reach a new basic-block see if we already skipped it
2476 in a previous walk that ended successfully. */
2477 if (gimple_bb (def_stmt) != bb)
2479 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2480 return !abort_on_visited;
2481 bb = gimple_bb (def_stmt);
2483 vuse = gimple_vuse (def_stmt);
2485 return true;
2488 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2489 until we hit the phi argument definition that dominates the other one.
2490 Return that, or NULL_TREE if there is no such definition. */
2492 static tree
2493 get_continuation_for_phi_1 (gimple *phi, tree arg0, tree arg1,
2494 ao_ref *ref, unsigned int *cnt,
2495 bitmap *visited, bool abort_on_visited,
2496 void *(*translate)(ao_ref *, tree, void *, bool *),
2497 void *data)
2499 gimple *def0 = SSA_NAME_DEF_STMT (arg0);
2500 gimple *def1 = SSA_NAME_DEF_STMT (arg1);
2501 tree common_vuse;
2503 if (arg0 == arg1)
2504 return arg0;
2505 else if (gimple_nop_p (def0)
2506 || (!gimple_nop_p (def1)
2507 && dominated_by_p (CDI_DOMINATORS,
2508 gimple_bb (def1), gimple_bb (def0))))
2510 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2511 visited, abort_on_visited, translate, data))
2512 return arg0;
2514 else if (gimple_nop_p (def1)
2515 || dominated_by_p (CDI_DOMINATORS,
2516 gimple_bb (def0), gimple_bb (def1)))
2518 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2519 visited, abort_on_visited, translate, data))
2520 return arg1;
2522 /* Special case of a diamond:
2523 MEM_1 = ...
2524 goto (cond) ? L1 : L2
2525 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2526 goto L3
2527 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2528 L3: MEM_4 = PHI<MEM_2, MEM_3>
2529 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2530 dominate each other, but still we can easily skip this PHI node
2531 if we recognize that the vuse MEM operand is the same for both,
2532 and that we can skip both statements (they don't clobber us).
2533 This is still linear. Don't use maybe_skip_until, that might
2534 potentially be slow. */
2535 else if ((common_vuse = gimple_vuse (def0))
2536 && common_vuse == gimple_vuse (def1))
2538 bool disambiguate_only = true;
2539 *cnt += 2;
2540 if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2541 || (translate
2542 && (*translate) (ref, arg0, data, &disambiguate_only) == NULL))
2543 && (!stmt_may_clobber_ref_p_1 (def1, ref)
2544 || (translate
2545 && (*translate) (ref, arg1, data, &disambiguate_only) == NULL)))
2546 return common_vuse;
2549 return NULL_TREE;
2553 /* Starting from a PHI node for the virtual operand of the memory reference
2554 REF find a continuation virtual operand that allows to continue walking
2555 statements dominating PHI skipping only statements that cannot possibly
2556 clobber REF. Increments *CNT for each alias disambiguation done.
2557 Returns NULL_TREE if no suitable virtual operand can be found. */
2559 tree
2560 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2561 unsigned int *cnt, bitmap *visited,
2562 bool abort_on_visited,
2563 void *(*translate)(ao_ref *, tree, void *, bool *),
2564 void *data)
2566 unsigned nargs = gimple_phi_num_args (phi);
2568 /* Through a single-argument PHI we can simply look through. */
2569 if (nargs == 1)
2570 return PHI_ARG_DEF (phi, 0);
2572 /* For two or more arguments try to pairwise skip non-aliasing code
2573 until we hit the phi argument definition that dominates the other one. */
2574 else if (nargs >= 2)
2576 tree arg0, arg1;
2577 unsigned i;
2579 /* Find a candidate for the virtual operand which definition
2580 dominates those of all others. */
2581 arg0 = PHI_ARG_DEF (phi, 0);
2582 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2583 for (i = 1; i < nargs; ++i)
2585 arg1 = PHI_ARG_DEF (phi, i);
2586 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2588 arg0 = arg1;
2589 break;
2591 if (dominated_by_p (CDI_DOMINATORS,
2592 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2593 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2594 arg0 = arg1;
2597 /* Then pairwise reduce against the found candidate. */
2598 for (i = 0; i < nargs; ++i)
2600 arg1 = PHI_ARG_DEF (phi, i);
2601 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2602 cnt, visited, abort_on_visited,
2603 translate, data);
2604 if (!arg0)
2605 return NULL_TREE;
2608 return arg0;
2611 return NULL_TREE;
2614 /* Based on the memory reference REF and its virtual use VUSE call
2615 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2616 itself. That is, for each virtual use for which its defining statement
2617 does not clobber REF.
2619 WALKER is called with REF, the current virtual use and DATA. If
2620 WALKER returns non-NULL the walk stops and its result is returned.
2621 At the end of a non-successful walk NULL is returned.
2623 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2624 use which definition is a statement that may clobber REF and DATA.
2625 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2626 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2627 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2628 to adjust REF and *DATA to make that valid.
2630 VALUEIZE if non-NULL is called with the next VUSE that is considered
2631 and return value is substituted for that. This can be used to
2632 implement optimistic value-numbering for example. Note that the
2633 VUSE argument is assumed to be valueized already.
2635 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2637 void *
2638 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2639 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2640 void *(*translate)(ao_ref *, tree, void *, bool *),
2641 tree (*valueize)(tree),
2642 void *data)
2644 bitmap visited = NULL;
2645 void *res;
2646 unsigned int cnt = 0;
2647 bool translated = false;
2649 timevar_push (TV_ALIAS_STMT_WALK);
2653 gimple *def_stmt;
2655 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2656 res = (*walker) (ref, vuse, cnt, data);
2657 /* Abort walk. */
2658 if (res == (void *)-1)
2660 res = NULL;
2661 break;
2663 /* Lookup succeeded. */
2664 else if (res != NULL)
2665 break;
2667 if (valueize)
2668 vuse = valueize (vuse);
2669 def_stmt = SSA_NAME_DEF_STMT (vuse);
2670 if (gimple_nop_p (def_stmt))
2671 break;
2672 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2673 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2674 &visited, translated, translate, data);
2675 else
2677 cnt++;
2678 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2680 if (!translate)
2681 break;
2682 bool disambiguate_only = false;
2683 res = (*translate) (ref, vuse, data, &disambiguate_only);
2684 /* Failed lookup and translation. */
2685 if (res == (void *)-1)
2687 res = NULL;
2688 break;
2690 /* Lookup succeeded. */
2691 else if (res != NULL)
2692 break;
2693 /* Translation succeeded, continue walking. */
2694 translated = translated || !disambiguate_only;
2696 vuse = gimple_vuse (def_stmt);
2699 while (vuse);
2701 if (visited)
2702 BITMAP_FREE (visited);
2704 timevar_pop (TV_ALIAS_STMT_WALK);
2706 return res;
2710 /* Based on the memory reference REF call WALKER for each vdef which
2711 defining statement may clobber REF, starting with VDEF. If REF
2712 is NULL_TREE, each defining statement is visited.
2714 WALKER is called with REF, the current vdef and DATA. If WALKER
2715 returns true the walk is stopped, otherwise it continues.
2717 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2718 The pointer may be NULL and then we do not track this information.
2720 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2721 PHI argument (but only one walk continues on merge points), the
2722 return value is true if any of the walks was successful.
2724 The function returns the number of statements walked. */
2726 static unsigned int
2727 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2728 bool (*walker)(ao_ref *, tree, void *), void *data,
2729 bitmap *visited, unsigned int cnt,
2730 bool *function_entry_reached)
2734 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2736 if (*visited
2737 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2738 return cnt;
2740 if (gimple_nop_p (def_stmt))
2742 if (function_entry_reached)
2743 *function_entry_reached = true;
2744 return cnt;
2746 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2748 unsigned i;
2749 if (!*visited)
2750 *visited = BITMAP_ALLOC (NULL);
2751 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2752 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2753 walker, data, visited, 0,
2754 function_entry_reached);
2755 return cnt;
2758 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2759 cnt++;
2760 if ((!ref
2761 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2762 && (*walker) (ref, vdef, data))
2763 return cnt;
2765 vdef = gimple_vuse (def_stmt);
2767 while (1);
2770 unsigned int
2771 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2772 bool (*walker)(ao_ref *, tree, void *), void *data,
2773 bitmap *visited,
2774 bool *function_entry_reached)
2776 bitmap local_visited = NULL;
2777 unsigned int ret;
2779 timevar_push (TV_ALIAS_STMT_WALK);
2781 if (function_entry_reached)
2782 *function_entry_reached = false;
2784 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2785 visited ? visited : &local_visited, 0,
2786 function_entry_reached);
2787 if (local_visited)
2788 BITMAP_FREE (local_visited);
2790 timevar_pop (TV_ALIAS_STMT_WALK);
2792 return ret;