PR tree-optimization/66718
[official-gcc.git] / gcc / tree-ssa-alias.c
blob8376e3bd0fcdc5fc06fd925e39add490fadc5aba
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "alias.h"
26 #include "symtab.h"
27 #include "tree.h"
28 #include "fold-const.h"
29 #include "tm_p.h"
30 #include "target.h"
31 #include "predict.h"
33 #include "hard-reg-set.h"
34 #include "function.h"
35 #include "dominance.h"
36 #include "basic-block.h"
37 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
38 #include "langhooks.h"
39 #include "flags.h"
40 #include "tree-pretty-print.h"
41 #include "dumpfile.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "tree-eh.h"
45 #include "gimple-expr.h"
46 #include "gimple.h"
47 #include "gimple-ssa.h"
48 #include "stringpool.h"
49 #include "tree-ssanames.h"
50 #include "rtl.h"
51 #include "insn-config.h"
52 #include "expmed.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "calls.h"
56 #include "emit-rtl.h"
57 #include "varasm.h"
58 #include "stmt.h"
59 #include "expr.h"
60 #include "tree-dfa.h"
61 #include "tree-inline.h"
62 #include "params.h"
63 #include "alloc-pool.h"
64 #include "bitmap.h"
65 #include "cgraph.h"
66 #include "ipa-reference.h"
68 /* Broad overview of how alias analysis on gimple works:
70 Statements clobbering or using memory are linked through the
71 virtual operand factored use-def chain. The virtual operand
72 is unique per function, its symbol is accessible via gimple_vop (cfun).
73 Virtual operands are used for efficiently walking memory statements
74 in the gimple IL and are useful for things like value-numbering as
75 a generation count for memory references.
77 SSA_NAME pointers may have associated points-to information
78 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
79 points-to information is (re-)computed by the TODO_rebuild_alias
80 pass manager todo. Points-to information is also used for more
81 precise tracking of call-clobbered and call-used variables and
82 related disambiguations.
84 This file contains functions for disambiguating memory references,
85 the so called alias-oracle and tools for walking of the gimple IL.
87 The main alias-oracle entry-points are
89 bool stmt_may_clobber_ref_p (gimple, tree)
91 This function queries if a statement may invalidate (parts of)
92 the memory designated by the reference tree argument.
94 bool ref_maybe_used_by_stmt_p (gimple, tree)
96 This function queries if a statement may need (parts of) the
97 memory designated by the reference tree argument.
99 There are variants of these functions that only handle the call
100 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
101 Note that these do not disambiguate against a possible call lhs.
103 bool refs_may_alias_p (tree, tree)
105 This function tries to disambiguate two reference trees.
107 bool ptr_deref_may_alias_global_p (tree)
109 This function queries if dereferencing a pointer variable may
110 alias global memory.
112 More low-level disambiguators are available and documented in
113 this file. Low-level disambiguators dealing with points-to
114 information are in tree-ssa-structalias.c. */
117 /* Query statistics for the different low-level disambiguators.
118 A high-level query may trigger multiple of them. */
120 static struct {
121 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
122 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
123 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
124 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
125 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
126 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
127 } alias_stats;
129 void
130 dump_alias_stats (FILE *s)
132 fprintf (s, "\nAlias oracle query stats:\n");
133 fprintf (s, " refs_may_alias_p: "
134 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
135 HOST_WIDE_INT_PRINT_DEC" queries\n",
136 alias_stats.refs_may_alias_p_no_alias,
137 alias_stats.refs_may_alias_p_no_alias
138 + alias_stats.refs_may_alias_p_may_alias);
139 fprintf (s, " ref_maybe_used_by_call_p: "
140 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
141 HOST_WIDE_INT_PRINT_DEC" queries\n",
142 alias_stats.ref_maybe_used_by_call_p_no_alias,
143 alias_stats.refs_may_alias_p_no_alias
144 + alias_stats.ref_maybe_used_by_call_p_may_alias);
145 fprintf (s, " call_may_clobber_ref_p: "
146 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
147 HOST_WIDE_INT_PRINT_DEC" queries\n",
148 alias_stats.call_may_clobber_ref_p_no_alias,
149 alias_stats.call_may_clobber_ref_p_no_alias
150 + alias_stats.call_may_clobber_ref_p_may_alias);
151 dump_alias_stats_in_alias_c (s);
155 /* Return true, if dereferencing PTR may alias with a global variable. */
157 bool
158 ptr_deref_may_alias_global_p (tree ptr)
160 struct ptr_info_def *pi;
162 /* If we end up with a pointer constant here that may point
163 to global memory. */
164 if (TREE_CODE (ptr) != SSA_NAME)
165 return true;
167 pi = SSA_NAME_PTR_INFO (ptr);
169 /* If we do not have points-to information for this variable,
170 we have to punt. */
171 if (!pi)
172 return true;
174 /* ??? This does not use TBAA to prune globals ptr may not access. */
175 return pt_solution_includes_global (&pi->pt);
178 /* Return true if dereferencing PTR may alias DECL.
179 The caller is responsible for applying TBAA to see if PTR
180 may access DECL at all. */
182 static bool
183 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
185 struct ptr_info_def *pi;
187 /* Conversions are irrelevant for points-to information and
188 data-dependence analysis can feed us those. */
189 STRIP_NOPS (ptr);
191 /* Anything we do not explicilty handle aliases. */
192 if ((TREE_CODE (ptr) != SSA_NAME
193 && TREE_CODE (ptr) != ADDR_EXPR
194 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
195 || !POINTER_TYPE_P (TREE_TYPE (ptr))
196 || (TREE_CODE (decl) != VAR_DECL
197 && TREE_CODE (decl) != PARM_DECL
198 && TREE_CODE (decl) != RESULT_DECL))
199 return true;
201 /* Disregard pointer offsetting. */
202 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
206 ptr = TREE_OPERAND (ptr, 0);
208 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
209 return ptr_deref_may_alias_decl_p (ptr, decl);
212 /* ADDR_EXPR pointers either just offset another pointer or directly
213 specify the pointed-to set. */
214 if (TREE_CODE (ptr) == ADDR_EXPR)
216 tree base = get_base_address (TREE_OPERAND (ptr, 0));
217 if (base
218 && (TREE_CODE (base) == MEM_REF
219 || TREE_CODE (base) == TARGET_MEM_REF))
220 ptr = TREE_OPERAND (base, 0);
221 else if (base
222 && DECL_P (base))
223 return base == decl;
224 else if (base
225 && CONSTANT_CLASS_P (base))
226 return false;
227 else
228 return true;
231 /* Non-aliased variables can not be pointed to. */
232 if (!may_be_aliased (decl))
233 return false;
235 /* If we do not have useful points-to information for this pointer
236 we cannot disambiguate anything else. */
237 pi = SSA_NAME_PTR_INFO (ptr);
238 if (!pi)
239 return true;
241 return pt_solution_includes (&pi->pt, decl);
244 /* Return true if dereferenced PTR1 and PTR2 may alias.
245 The caller is responsible for applying TBAA to see if accesses
246 through PTR1 and PTR2 may conflict at all. */
248 bool
249 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
251 struct ptr_info_def *pi1, *pi2;
253 /* Conversions are irrelevant for points-to information and
254 data-dependence analysis can feed us those. */
255 STRIP_NOPS (ptr1);
256 STRIP_NOPS (ptr2);
258 /* Disregard pointer offsetting. */
259 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
263 ptr1 = TREE_OPERAND (ptr1, 0);
265 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
266 return ptr_derefs_may_alias_p (ptr1, ptr2);
268 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
272 ptr2 = TREE_OPERAND (ptr2, 0);
274 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
275 return ptr_derefs_may_alias_p (ptr1, ptr2);
278 /* ADDR_EXPR pointers either just offset another pointer or directly
279 specify the pointed-to set. */
280 if (TREE_CODE (ptr1) == ADDR_EXPR)
282 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
283 if (base
284 && (TREE_CODE (base) == MEM_REF
285 || TREE_CODE (base) == TARGET_MEM_REF))
286 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
287 else if (base
288 && DECL_P (base))
289 return ptr_deref_may_alias_decl_p (ptr2, base);
290 else
291 return true;
293 if (TREE_CODE (ptr2) == ADDR_EXPR)
295 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
296 if (base
297 && (TREE_CODE (base) == MEM_REF
298 || TREE_CODE (base) == TARGET_MEM_REF))
299 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
300 else if (base
301 && DECL_P (base))
302 return ptr_deref_may_alias_decl_p (ptr1, base);
303 else
304 return true;
307 /* From here we require SSA name pointers. Anything else aliases. */
308 if (TREE_CODE (ptr1) != SSA_NAME
309 || TREE_CODE (ptr2) != SSA_NAME
310 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
311 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
312 return true;
314 /* We may end up with two empty points-to solutions for two same pointers.
315 In this case we still want to say both pointers alias, so shortcut
316 that here. */
317 if (ptr1 == ptr2)
318 return true;
320 /* If we do not have useful points-to information for either pointer
321 we cannot disambiguate anything else. */
322 pi1 = SSA_NAME_PTR_INFO (ptr1);
323 pi2 = SSA_NAME_PTR_INFO (ptr2);
324 if (!pi1 || !pi2)
325 return true;
327 /* ??? This does not use TBAA to prune decls from the intersection
328 that not both pointers may access. */
329 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
332 /* Return true if dereferencing PTR may alias *REF.
333 The caller is responsible for applying TBAA to see if PTR
334 may access *REF at all. */
336 static bool
337 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
339 tree base = ao_ref_base (ref);
341 if (TREE_CODE (base) == MEM_REF
342 || TREE_CODE (base) == TARGET_MEM_REF)
343 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
344 else if (DECL_P (base))
345 return ptr_deref_may_alias_decl_p (ptr, base);
347 return true;
350 /* Returns whether reference REF to BASE may refer to global memory. */
352 static bool
353 ref_may_alias_global_p_1 (tree base)
355 if (DECL_P (base))
356 return is_global_var (base);
357 else if (TREE_CODE (base) == MEM_REF
358 || TREE_CODE (base) == TARGET_MEM_REF)
359 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
360 return true;
363 bool
364 ref_may_alias_global_p (ao_ref *ref)
366 tree base = ao_ref_base (ref);
367 return ref_may_alias_global_p_1 (base);
370 bool
371 ref_may_alias_global_p (tree ref)
373 tree base = get_base_address (ref);
374 return ref_may_alias_global_p_1 (base);
377 /* Return true whether STMT may clobber global memory. */
379 bool
380 stmt_may_clobber_global_p (gimple stmt)
382 tree lhs;
384 if (!gimple_vdef (stmt))
385 return false;
387 /* ??? We can ask the oracle whether an artificial pointer
388 dereference with a pointer with points-to information covering
389 all global memory (what about non-address taken memory?) maybe
390 clobbered by this call. As there is at the moment no convenient
391 way of doing that without generating garbage do some manual
392 checking instead.
393 ??? We could make a NULL ao_ref argument to the various
394 predicates special, meaning any global memory. */
396 switch (gimple_code (stmt))
398 case GIMPLE_ASSIGN:
399 lhs = gimple_assign_lhs (stmt);
400 return (TREE_CODE (lhs) != SSA_NAME
401 && ref_may_alias_global_p (lhs));
402 case GIMPLE_CALL:
403 return true;
404 default:
405 return true;
410 /* Dump alias information on FILE. */
412 void
413 dump_alias_info (FILE *file)
415 unsigned i;
416 const char *funcname
417 = lang_hooks.decl_printable_name (current_function_decl, 2);
418 tree var;
420 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
422 fprintf (file, "Aliased symbols\n\n");
424 FOR_EACH_LOCAL_DECL (cfun, i, var)
426 if (may_be_aliased (var))
427 dump_variable (file, var);
430 fprintf (file, "\nCall clobber information\n");
432 fprintf (file, "\nESCAPED");
433 dump_points_to_solution (file, &cfun->gimple_df->escaped);
435 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
437 for (i = 1; i < num_ssa_names; i++)
439 tree ptr = ssa_name (i);
440 struct ptr_info_def *pi;
442 if (ptr == NULL_TREE
443 || !POINTER_TYPE_P (TREE_TYPE (ptr))
444 || SSA_NAME_IN_FREE_LIST (ptr))
445 continue;
447 pi = SSA_NAME_PTR_INFO (ptr);
448 if (pi)
449 dump_points_to_info_for (file, ptr);
452 fprintf (file, "\n");
456 /* Dump alias information on stderr. */
458 DEBUG_FUNCTION void
459 debug_alias_info (void)
461 dump_alias_info (stderr);
465 /* Dump the points-to set *PT into FILE. */
467 void
468 dump_points_to_solution (FILE *file, struct pt_solution *pt)
470 if (pt->anything)
471 fprintf (file, ", points-to anything");
473 if (pt->nonlocal)
474 fprintf (file, ", points-to non-local");
476 if (pt->escaped)
477 fprintf (file, ", points-to escaped");
479 if (pt->ipa_escaped)
480 fprintf (file, ", points-to unit escaped");
482 if (pt->null)
483 fprintf (file, ", points-to NULL");
485 if (pt->vars)
487 fprintf (file, ", points-to vars: ");
488 dump_decl_set (file, pt->vars);
489 if (pt->vars_contains_nonlocal
490 && pt->vars_contains_escaped_heap)
491 fprintf (file, " (nonlocal, escaped heap)");
492 else if (pt->vars_contains_nonlocal
493 && pt->vars_contains_escaped)
494 fprintf (file, " (nonlocal, escaped)");
495 else if (pt->vars_contains_nonlocal)
496 fprintf (file, " (nonlocal)");
497 else if (pt->vars_contains_escaped_heap)
498 fprintf (file, " (escaped heap)");
499 else if (pt->vars_contains_escaped)
500 fprintf (file, " (escaped)");
505 /* Unified dump function for pt_solution. */
507 DEBUG_FUNCTION void
508 debug (pt_solution &ref)
510 dump_points_to_solution (stderr, &ref);
513 DEBUG_FUNCTION void
514 debug (pt_solution *ptr)
516 if (ptr)
517 debug (*ptr);
518 else
519 fprintf (stderr, "<nil>\n");
523 /* Dump points-to information for SSA_NAME PTR into FILE. */
525 void
526 dump_points_to_info_for (FILE *file, tree ptr)
528 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
530 print_generic_expr (file, ptr, dump_flags);
532 if (pi)
533 dump_points_to_solution (file, &pi->pt);
534 else
535 fprintf (file, ", points-to anything");
537 fprintf (file, "\n");
541 /* Dump points-to information for VAR into stderr. */
543 DEBUG_FUNCTION void
544 debug_points_to_info_for (tree var)
546 dump_points_to_info_for (stderr, var);
550 /* Initializes the alias-oracle reference representation *R from REF. */
552 void
553 ao_ref_init (ao_ref *r, tree ref)
555 r->ref = ref;
556 r->base = NULL_TREE;
557 r->offset = 0;
558 r->size = -1;
559 r->max_size = -1;
560 r->ref_alias_set = -1;
561 r->base_alias_set = -1;
562 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
565 /* Returns the base object of the memory reference *REF. */
567 tree
568 ao_ref_base (ao_ref *ref)
570 if (ref->base)
571 return ref->base;
572 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
573 &ref->max_size);
574 return ref->base;
577 /* Returns the base object alias set of the memory reference *REF. */
579 alias_set_type
580 ao_ref_base_alias_set (ao_ref *ref)
582 tree base_ref;
583 if (ref->base_alias_set != -1)
584 return ref->base_alias_set;
585 if (!ref->ref)
586 return 0;
587 base_ref = ref->ref;
588 while (handled_component_p (base_ref))
589 base_ref = TREE_OPERAND (base_ref, 0);
590 ref->base_alias_set = get_alias_set (base_ref);
591 return ref->base_alias_set;
594 /* Returns the reference alias set of the memory reference *REF. */
596 alias_set_type
597 ao_ref_alias_set (ao_ref *ref)
599 if (ref->ref_alias_set != -1)
600 return ref->ref_alias_set;
601 ref->ref_alias_set = get_alias_set (ref->ref);
602 return ref->ref_alias_set;
605 /* Init an alias-oracle reference representation from a gimple pointer
606 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
607 size is assumed to be unknown. The access is assumed to be only
608 to or after of the pointer target, not before it. */
610 void
611 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
613 HOST_WIDE_INT t, size_hwi, extra_offset = 0;
614 ref->ref = NULL_TREE;
615 if (TREE_CODE (ptr) == SSA_NAME)
617 gimple stmt = SSA_NAME_DEF_STMT (ptr);
618 if (gimple_assign_single_p (stmt)
619 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
620 ptr = gimple_assign_rhs1 (stmt);
621 else if (is_gimple_assign (stmt)
622 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
623 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
625 ptr = gimple_assign_rhs1 (stmt);
626 extra_offset = BITS_PER_UNIT
627 * int_cst_value (gimple_assign_rhs2 (stmt));
631 if (TREE_CODE (ptr) == ADDR_EXPR)
633 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
634 if (ref->base)
635 ref->offset = BITS_PER_UNIT * t;
636 else
638 size = NULL_TREE;
639 ref->offset = 0;
640 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
643 else
645 ref->base = build2 (MEM_REF, char_type_node,
646 ptr, null_pointer_node);
647 ref->offset = 0;
649 ref->offset += extra_offset;
650 if (size
651 && tree_fits_shwi_p (size)
652 && (size_hwi = tree_to_shwi (size)) <= HOST_WIDE_INT_MAX / BITS_PER_UNIT)
653 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
654 else
655 ref->max_size = ref->size = -1;
656 ref->ref_alias_set = 0;
657 ref->base_alias_set = 0;
658 ref->volatile_p = false;
661 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
662 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
663 decide. */
665 static inline int
666 same_type_for_tbaa (tree type1, tree type2)
668 type1 = TYPE_MAIN_VARIANT (type1);
669 type2 = TYPE_MAIN_VARIANT (type2);
671 /* If we would have to do structural comparison bail out. */
672 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
673 || TYPE_STRUCTURAL_EQUALITY_P (type2))
674 return -1;
676 /* Compare the canonical types. */
677 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
678 return 1;
680 /* ??? Array types are not properly unified in all cases as we have
681 spurious changes in the index types for example. Removing this
682 causes all sorts of problems with the Fortran frontend. */
683 if (TREE_CODE (type1) == ARRAY_TYPE
684 && TREE_CODE (type2) == ARRAY_TYPE)
685 return -1;
687 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
688 object of one of its constrained subtypes, e.g. when a function with an
689 unconstrained parameter passed by reference is called on an object and
690 inlined. But, even in the case of a fixed size, type and subtypes are
691 not equivalent enough as to share the same TYPE_CANONICAL, since this
692 would mean that conversions between them are useless, whereas they are
693 not (e.g. type and subtypes can have different modes). So, in the end,
694 they are only guaranteed to have the same alias set. */
695 if (get_alias_set (type1) == get_alias_set (type2))
696 return -1;
698 /* The types are known to be not equal. */
699 return 0;
702 /* Determine if the two component references REF1 and REF2 which are
703 based on access types TYPE1 and TYPE2 and of which at least one is based
704 on an indirect reference may alias. REF2 is the only one that can
705 be a decl in which case REF2_IS_DECL is true.
706 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
707 are the respective alias sets. */
709 static bool
710 aliasing_component_refs_p (tree ref1,
711 alias_set_type ref1_alias_set,
712 alias_set_type base1_alias_set,
713 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
714 tree ref2,
715 alias_set_type ref2_alias_set,
716 alias_set_type base2_alias_set,
717 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
718 bool ref2_is_decl)
720 /* If one reference is a component references through pointers try to find a
721 common base and apply offset based disambiguation. This handles
722 for example
723 struct A { int i; int j; } *q;
724 struct B { struct A a; int k; } *p;
725 disambiguating q->i and p->a.j. */
726 tree base1, base2;
727 tree type1, type2;
728 tree *refp;
729 int same_p;
731 /* Choose bases and base types to search for. */
732 base1 = ref1;
733 while (handled_component_p (base1))
734 base1 = TREE_OPERAND (base1, 0);
735 type1 = TREE_TYPE (base1);
736 base2 = ref2;
737 while (handled_component_p (base2))
738 base2 = TREE_OPERAND (base2, 0);
739 type2 = TREE_TYPE (base2);
741 /* Now search for the type1 in the access path of ref2. This
742 would be a common base for doing offset based disambiguation on. */
743 refp = &ref2;
744 while (handled_component_p (*refp)
745 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
746 refp = &TREE_OPERAND (*refp, 0);
747 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
748 /* If we couldn't compare types we have to bail out. */
749 if (same_p == -1)
750 return true;
751 else if (same_p == 1)
753 HOST_WIDE_INT offadj, sztmp, msztmp;
754 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
755 offset2 -= offadj;
756 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
757 offset1 -= offadj;
758 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
760 /* If we didn't find a common base, try the other way around. */
761 refp = &ref1;
762 while (handled_component_p (*refp)
763 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
764 refp = &TREE_OPERAND (*refp, 0);
765 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
766 /* If we couldn't compare types we have to bail out. */
767 if (same_p == -1)
768 return true;
769 else if (same_p == 1)
771 HOST_WIDE_INT offadj, sztmp, msztmp;
772 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
773 offset1 -= offadj;
774 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
775 offset2 -= offadj;
776 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
779 /* If we have two type access paths B1.path1 and B2.path2 they may
780 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
781 But we can still have a path that goes B1.path1...B2.path2 with
782 a part that we do not see. So we can only disambiguate now
783 if there is no B2 in the tail of path1 and no B1 on the
784 tail of path2. */
785 if (base1_alias_set == ref2_alias_set
786 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
787 return true;
788 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
789 if (!ref2_is_decl)
790 return (base2_alias_set == ref1_alias_set
791 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
792 return false;
795 /* Return true if we can determine that component references REF1 and REF2,
796 that are within a common DECL, cannot overlap. */
798 static bool
799 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
801 auto_vec<tree, 16> component_refs1;
802 auto_vec<tree, 16> component_refs2;
804 /* Create the stack of handled components for REF1. */
805 while (handled_component_p (ref1))
807 component_refs1.safe_push (ref1);
808 ref1 = TREE_OPERAND (ref1, 0);
810 if (TREE_CODE (ref1) == MEM_REF)
812 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
813 goto may_overlap;
814 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
817 /* Create the stack of handled components for REF2. */
818 while (handled_component_p (ref2))
820 component_refs2.safe_push (ref2);
821 ref2 = TREE_OPERAND (ref2, 0);
823 if (TREE_CODE (ref2) == MEM_REF)
825 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
826 goto may_overlap;
827 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
830 /* We must have the same base DECL. */
831 gcc_assert (ref1 == ref2);
833 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
834 rank. This is sufficient because we start from the same DECL and you
835 cannot reference several fields at a time with COMPONENT_REFs (unlike
836 with ARRAY_RANGE_REFs for arrays) so you always need the same number
837 of them to access a sub-component, unless you're in a union, in which
838 case the return value will precisely be false. */
839 while (true)
843 if (component_refs1.is_empty ())
844 goto may_overlap;
845 ref1 = component_refs1.pop ();
847 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
851 if (component_refs2.is_empty ())
852 goto may_overlap;
853 ref2 = component_refs2.pop ();
855 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
857 /* Beware of BIT_FIELD_REF. */
858 if (TREE_CODE (ref1) != COMPONENT_REF
859 || TREE_CODE (ref2) != COMPONENT_REF)
860 goto may_overlap;
862 tree field1 = TREE_OPERAND (ref1, 1);
863 tree field2 = TREE_OPERAND (ref2, 1);
865 /* ??? We cannot simply use the type of operand #0 of the refs here
866 as the Fortran compiler smuggles type punning into COMPONENT_REFs
867 for common blocks instead of using unions like everyone else. */
868 tree type1 = DECL_CONTEXT (field1);
869 tree type2 = DECL_CONTEXT (field2);
871 /* We cannot disambiguate fields in a union or qualified union. */
872 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
873 goto may_overlap;
875 /* Different fields of the same record type cannot overlap.
876 ??? Bitfields can overlap at RTL level so punt on them. */
877 if (field1 != field2)
879 component_refs1.release ();
880 component_refs2.release ();
881 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
885 may_overlap:
886 component_refs1.release ();
887 component_refs2.release ();
888 return false;
891 /* qsort compare function to sort FIELD_DECLs after their
892 DECL_FIELD_CONTEXT TYPE_UID. */
894 static inline int
895 ncr_compar (const void *field1_, const void *field2_)
897 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
898 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
899 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
900 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
901 if (uid1 < uid2)
902 return -1;
903 else if (uid1 > uid2)
904 return 1;
905 return 0;
908 /* Return true if we can determine that the fields referenced cannot
909 overlap for any pair of objects. */
911 static bool
912 nonoverlapping_component_refs_p (const_tree x, const_tree y)
914 if (!flag_strict_aliasing
915 || !x || !y
916 || TREE_CODE (x) != COMPONENT_REF
917 || TREE_CODE (y) != COMPONENT_REF)
918 return false;
920 auto_vec<const_tree, 16> fieldsx;
921 while (TREE_CODE (x) == COMPONENT_REF)
923 tree field = TREE_OPERAND (x, 1);
924 tree type = DECL_FIELD_CONTEXT (field);
925 if (TREE_CODE (type) == RECORD_TYPE)
926 fieldsx.safe_push (field);
927 x = TREE_OPERAND (x, 0);
929 if (fieldsx.length () == 0)
930 return false;
931 auto_vec<const_tree, 16> fieldsy;
932 while (TREE_CODE (y) == COMPONENT_REF)
934 tree field = TREE_OPERAND (y, 1);
935 tree type = DECL_FIELD_CONTEXT (field);
936 if (TREE_CODE (type) == RECORD_TYPE)
937 fieldsy.safe_push (TREE_OPERAND (y, 1));
938 y = TREE_OPERAND (y, 0);
940 if (fieldsy.length () == 0)
941 return false;
943 /* Most common case first. */
944 if (fieldsx.length () == 1
945 && fieldsy.length () == 1)
946 return ((DECL_FIELD_CONTEXT (fieldsx[0])
947 == DECL_FIELD_CONTEXT (fieldsy[0]))
948 && fieldsx[0] != fieldsy[0]
949 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
951 if (fieldsx.length () == 2)
953 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
954 std::swap (fieldsx[0], fieldsx[1]);
956 else
957 fieldsx.qsort (ncr_compar);
959 if (fieldsy.length () == 2)
961 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
962 std::swap (fieldsy[0], fieldsy[1]);
964 else
965 fieldsy.qsort (ncr_compar);
967 unsigned i = 0, j = 0;
970 const_tree fieldx = fieldsx[i];
971 const_tree fieldy = fieldsy[j];
972 tree typex = DECL_FIELD_CONTEXT (fieldx);
973 tree typey = DECL_FIELD_CONTEXT (fieldy);
974 if (typex == typey)
976 /* We're left with accessing different fields of a structure,
977 no possible overlap, unless they are both bitfields. */
978 if (fieldx != fieldy)
979 return !(DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy));
981 if (TYPE_UID (typex) < TYPE_UID (typey))
983 i++;
984 if (i == fieldsx.length ())
985 break;
987 else
989 j++;
990 if (j == fieldsy.length ())
991 break;
994 while (1);
996 return false;
1000 /* Return true if two memory references based on the variables BASE1
1001 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1002 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1003 if non-NULL are the complete memory reference trees. */
1005 static bool
1006 decl_refs_may_alias_p (tree ref1, tree base1,
1007 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1008 tree ref2, tree base2,
1009 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
1011 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1013 /* If both references are based on different variables, they cannot alias. */
1014 if (base1 != base2)
1015 return false;
1017 /* If both references are based on the same variable, they cannot alias if
1018 the accesses do not overlap. */
1019 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
1020 return false;
1022 /* For components with variable position, the above test isn't sufficient,
1023 so we disambiguate component references manually. */
1024 if (ref1 && ref2
1025 && handled_component_p (ref1) && handled_component_p (ref2)
1026 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1027 return false;
1029 return true;
1032 /* Return true if an indirect reference based on *PTR1 constrained
1033 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1034 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1035 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1036 in which case they are computed on-demand. REF1 and REF2
1037 if non-NULL are the complete memory reference trees. */
1039 static bool
1040 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1041 HOST_WIDE_INT offset1,
1042 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
1043 alias_set_type ref1_alias_set,
1044 alias_set_type base1_alias_set,
1045 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1046 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1047 alias_set_type ref2_alias_set,
1048 alias_set_type base2_alias_set, bool tbaa_p)
1050 tree ptr1;
1051 tree ptrtype1, dbase2;
1052 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
1053 HOST_WIDE_INT doffset1, doffset2;
1055 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1056 || TREE_CODE (base1) == TARGET_MEM_REF)
1057 && DECL_P (base2));
1059 ptr1 = TREE_OPERAND (base1, 0);
1061 /* The offset embedded in MEM_REFs can be negative. Bias them
1062 so that the resulting offset adjustment is positive. */
1063 offset_int moff = mem_ref_offset (base1);
1064 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1065 if (wi::neg_p (moff))
1066 offset2p += (-moff).to_short_addr ();
1067 else
1068 offset1p += moff.to_short_addr ();
1070 /* If only one reference is based on a variable, they cannot alias if
1071 the pointer access is beyond the extent of the variable access.
1072 (the pointer base cannot validly point to an offset less than zero
1073 of the variable).
1074 ??? IVOPTs creates bases that do not honor this restriction,
1075 so do not apply this optimization for TARGET_MEM_REFs. */
1076 if (TREE_CODE (base1) != TARGET_MEM_REF
1077 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
1078 return false;
1079 /* They also cannot alias if the pointer may not point to the decl. */
1080 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1081 return false;
1083 /* Disambiguations that rely on strict aliasing rules follow. */
1084 if (!flag_strict_aliasing || !tbaa_p)
1085 return true;
1087 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1089 /* If the alias set for a pointer access is zero all bets are off. */
1090 if (base1_alias_set == -1)
1091 base1_alias_set = get_deref_alias_set (ptrtype1);
1092 if (base1_alias_set == 0)
1093 return true;
1094 if (base2_alias_set == -1)
1095 base2_alias_set = get_alias_set (base2);
1097 /* When we are trying to disambiguate an access with a pointer dereference
1098 as base versus one with a decl as base we can use both the size
1099 of the decl and its dynamic type for extra disambiguation.
1100 ??? We do not know anything about the dynamic type of the decl
1101 other than that its alias-set contains base2_alias_set as a subset
1102 which does not help us here. */
1103 /* As we know nothing useful about the dynamic type of the decl just
1104 use the usual conflict check rather than a subset test.
1105 ??? We could introduce -fvery-strict-aliasing when the language
1106 does not allow decls to have a dynamic type that differs from their
1107 static type. Then we can check
1108 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1109 if (base1_alias_set != base2_alias_set
1110 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1111 return false;
1112 /* If the size of the access relevant for TBAA through the pointer
1113 is bigger than the size of the decl we can't possibly access the
1114 decl via that pointer. */
1115 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1116 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
1117 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
1118 /* ??? This in turn may run afoul when a decl of type T which is
1119 a member of union type U is accessed through a pointer to
1120 type U and sizeof T is smaller than sizeof U. */
1121 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1122 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1123 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
1124 return false;
1126 if (!ref2)
1127 return true;
1129 /* If the decl is accessed via a MEM_REF, reconstruct the base
1130 we can use for TBAA and an appropriately adjusted offset. */
1131 dbase2 = ref2;
1132 while (handled_component_p (dbase2))
1133 dbase2 = TREE_OPERAND (dbase2, 0);
1134 doffset1 = offset1;
1135 doffset2 = offset2;
1136 if (TREE_CODE (dbase2) == MEM_REF
1137 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1139 offset_int moff = mem_ref_offset (dbase2);
1140 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1141 if (wi::neg_p (moff))
1142 doffset1 -= (-moff).to_short_addr ();
1143 else
1144 doffset2 -= moff.to_short_addr ();
1147 /* If either reference is view-converted, give up now. */
1148 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1149 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1150 return true;
1152 /* If both references are through the same type, they do not alias
1153 if the accesses do not overlap. This does extra disambiguation
1154 for mixed/pointer accesses but requires strict aliasing.
1155 For MEM_REFs we require that the component-ref offset we computed
1156 is relative to the start of the type which we ensure by
1157 comparing rvalue and access type and disregarding the constant
1158 pointer offset. */
1159 if ((TREE_CODE (base1) != TARGET_MEM_REF
1160 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1161 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1162 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1164 if (ref1 && ref2
1165 && nonoverlapping_component_refs_p (ref1, ref2))
1166 return false;
1168 /* Do access-path based disambiguation. */
1169 if (ref1 && ref2
1170 && (handled_component_p (ref1) || handled_component_p (ref2)))
1171 return aliasing_component_refs_p (ref1,
1172 ref1_alias_set, base1_alias_set,
1173 offset1, max_size1,
1174 ref2,
1175 ref2_alias_set, base2_alias_set,
1176 offset2, max_size2, true);
1178 return true;
1181 /* Return true if two indirect references based on *PTR1
1182 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1183 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1184 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1185 in which case they are computed on-demand. REF1 and REF2
1186 if non-NULL are the complete memory reference trees. */
1188 static bool
1189 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1190 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1191 alias_set_type ref1_alias_set,
1192 alias_set_type base1_alias_set,
1193 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1194 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1195 alias_set_type ref2_alias_set,
1196 alias_set_type base2_alias_set, bool tbaa_p)
1198 tree ptr1;
1199 tree ptr2;
1200 tree ptrtype1, ptrtype2;
1202 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1203 || TREE_CODE (base1) == TARGET_MEM_REF)
1204 && (TREE_CODE (base2) == MEM_REF
1205 || TREE_CODE (base2) == TARGET_MEM_REF));
1207 ptr1 = TREE_OPERAND (base1, 0);
1208 ptr2 = TREE_OPERAND (base2, 0);
1210 /* If both bases are based on pointers they cannot alias if they may not
1211 point to the same memory object or if they point to the same object
1212 and the accesses do not overlap. */
1213 if ((!cfun || gimple_in_ssa_p (cfun))
1214 && operand_equal_p (ptr1, ptr2, 0)
1215 && (((TREE_CODE (base1) != TARGET_MEM_REF
1216 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1217 && (TREE_CODE (base2) != TARGET_MEM_REF
1218 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1219 || (TREE_CODE (base1) == TARGET_MEM_REF
1220 && TREE_CODE (base2) == TARGET_MEM_REF
1221 && (TMR_STEP (base1) == TMR_STEP (base2)
1222 || (TMR_STEP (base1) && TMR_STEP (base2)
1223 && operand_equal_p (TMR_STEP (base1),
1224 TMR_STEP (base2), 0)))
1225 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1226 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1227 && operand_equal_p (TMR_INDEX (base1),
1228 TMR_INDEX (base2), 0)))
1229 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1230 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1231 && operand_equal_p (TMR_INDEX2 (base1),
1232 TMR_INDEX2 (base2), 0))))))
1234 offset_int moff;
1235 /* The offset embedded in MEM_REFs can be negative. Bias them
1236 so that the resulting offset adjustment is positive. */
1237 moff = mem_ref_offset (base1);
1238 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1239 if (wi::neg_p (moff))
1240 offset2 += (-moff).to_short_addr ();
1241 else
1242 offset1 += moff.to_shwi ();
1243 moff = mem_ref_offset (base2);
1244 moff = wi::lshift (moff, LOG2_BITS_PER_UNIT);
1245 if (wi::neg_p (moff))
1246 offset1 += (-moff).to_short_addr ();
1247 else
1248 offset2 += moff.to_short_addr ();
1249 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1251 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1252 return false;
1254 /* Disambiguations that rely on strict aliasing rules follow. */
1255 if (!flag_strict_aliasing || !tbaa_p)
1256 return true;
1258 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1259 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1261 /* If the alias set for a pointer access is zero all bets are off. */
1262 if (base1_alias_set == -1)
1263 base1_alias_set = get_deref_alias_set (ptrtype1);
1264 if (base1_alias_set == 0)
1265 return true;
1266 if (base2_alias_set == -1)
1267 base2_alias_set = get_deref_alias_set (ptrtype2);
1268 if (base2_alias_set == 0)
1269 return true;
1271 /* If both references are through the same type, they do not alias
1272 if the accesses do not overlap. This does extra disambiguation
1273 for mixed/pointer accesses but requires strict aliasing. */
1274 if ((TREE_CODE (base1) != TARGET_MEM_REF
1275 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1276 && (TREE_CODE (base2) != TARGET_MEM_REF
1277 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1278 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1279 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1280 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1281 TREE_TYPE (ptrtype2)) == 1)
1282 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1284 /* Do type-based disambiguation. */
1285 if (base1_alias_set != base2_alias_set
1286 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1287 return false;
1289 /* If either reference is view-converted, give up now. */
1290 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1291 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1292 return true;
1294 if (ref1 && ref2
1295 && nonoverlapping_component_refs_p (ref1, ref2))
1296 return false;
1298 /* Do access-path based disambiguation. */
1299 if (ref1 && ref2
1300 && (handled_component_p (ref1) || handled_component_p (ref2)))
1301 return aliasing_component_refs_p (ref1,
1302 ref1_alias_set, base1_alias_set,
1303 offset1, max_size1,
1304 ref2,
1305 ref2_alias_set, base2_alias_set,
1306 offset2, max_size2, false);
1308 return true;
1311 /* Return true, if the two memory references REF1 and REF2 may alias. */
1313 bool
1314 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1316 tree base1, base2;
1317 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1318 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1319 bool var1_p, var2_p, ind1_p, ind2_p;
1321 gcc_checking_assert ((!ref1->ref
1322 || TREE_CODE (ref1->ref) == SSA_NAME
1323 || DECL_P (ref1->ref)
1324 || TREE_CODE (ref1->ref) == STRING_CST
1325 || handled_component_p (ref1->ref)
1326 || TREE_CODE (ref1->ref) == MEM_REF
1327 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1328 && (!ref2->ref
1329 || TREE_CODE (ref2->ref) == SSA_NAME
1330 || DECL_P (ref2->ref)
1331 || TREE_CODE (ref2->ref) == STRING_CST
1332 || handled_component_p (ref2->ref)
1333 || TREE_CODE (ref2->ref) == MEM_REF
1334 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1336 /* Decompose the references into their base objects and the access. */
1337 base1 = ao_ref_base (ref1);
1338 offset1 = ref1->offset;
1339 max_size1 = ref1->max_size;
1340 base2 = ao_ref_base (ref2);
1341 offset2 = ref2->offset;
1342 max_size2 = ref2->max_size;
1344 /* We can end up with registers or constants as bases for example from
1345 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1346 which is seen as a struct copy. */
1347 if (TREE_CODE (base1) == SSA_NAME
1348 || TREE_CODE (base1) == CONST_DECL
1349 || TREE_CODE (base1) == CONSTRUCTOR
1350 || TREE_CODE (base1) == ADDR_EXPR
1351 || CONSTANT_CLASS_P (base1)
1352 || TREE_CODE (base2) == SSA_NAME
1353 || TREE_CODE (base2) == CONST_DECL
1354 || TREE_CODE (base2) == CONSTRUCTOR
1355 || TREE_CODE (base2) == ADDR_EXPR
1356 || CONSTANT_CLASS_P (base2))
1357 return false;
1359 /* We can end up referring to code via function and label decls.
1360 As we likely do not properly track code aliases conservatively
1361 bail out. */
1362 if (TREE_CODE (base1) == FUNCTION_DECL
1363 || TREE_CODE (base1) == LABEL_DECL
1364 || TREE_CODE (base2) == FUNCTION_DECL
1365 || TREE_CODE (base2) == LABEL_DECL)
1366 return true;
1368 /* Two volatile accesses always conflict. */
1369 if (ref1->volatile_p
1370 && ref2->volatile_p)
1371 return true;
1373 /* Defer to simple offset based disambiguation if we have
1374 references based on two decls. Do this before defering to
1375 TBAA to handle must-alias cases in conformance with the
1376 GCC extension of allowing type-punning through unions. */
1377 var1_p = DECL_P (base1);
1378 var2_p = DECL_P (base2);
1379 if (var1_p && var2_p)
1380 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1381 ref2->ref, base2, offset2, max_size2);
1383 /* Handle restrict based accesses.
1384 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1385 here. */
1386 tree rbase1 = base1;
1387 tree rbase2 = base2;
1388 if (var1_p)
1390 rbase1 = ref1->ref;
1391 if (rbase1)
1392 while (handled_component_p (rbase1))
1393 rbase1 = TREE_OPERAND (rbase1, 0);
1395 if (var2_p)
1397 rbase2 = ref2->ref;
1398 if (rbase2)
1399 while (handled_component_p (rbase2))
1400 rbase2 = TREE_OPERAND (rbase2, 0);
1402 if (rbase1 && rbase2
1403 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1404 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1405 /* If the accesses are in the same restrict clique... */
1406 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1407 /* But based on different pointers they do not alias. */
1408 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1409 return false;
1411 ind1_p = (TREE_CODE (base1) == MEM_REF
1412 || TREE_CODE (base1) == TARGET_MEM_REF);
1413 ind2_p = (TREE_CODE (base2) == MEM_REF
1414 || TREE_CODE (base2) == TARGET_MEM_REF);
1416 /* Canonicalize the pointer-vs-decl case. */
1417 if (ind1_p && var2_p)
1419 std::swap (offset1, offset2);
1420 std::swap (max_size1, max_size2);
1421 std::swap (base1, base2);
1422 std::swap (ref1, ref2);
1423 var1_p = true;
1424 ind1_p = false;
1425 var2_p = false;
1426 ind2_p = true;
1429 /* First defer to TBAA if possible. */
1430 if (tbaa_p
1431 && flag_strict_aliasing
1432 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1433 ao_ref_alias_set (ref2)))
1434 return false;
1436 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1437 if (var1_p && ind2_p)
1438 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1439 offset2, max_size2,
1440 ao_ref_alias_set (ref2), -1,
1441 ref1->ref, base1,
1442 offset1, max_size1,
1443 ao_ref_alias_set (ref1),
1444 ao_ref_base_alias_set (ref1),
1445 tbaa_p);
1446 else if (ind1_p && ind2_p)
1447 return indirect_refs_may_alias_p (ref1->ref, base1,
1448 offset1, max_size1,
1449 ao_ref_alias_set (ref1), -1,
1450 ref2->ref, base2,
1451 offset2, max_size2,
1452 ao_ref_alias_set (ref2), -1,
1453 tbaa_p);
1455 /* We really do not want to end up here, but returning true is safe. */
1456 #ifdef ENABLE_CHECKING
1457 gcc_unreachable ();
1458 #else
1459 return true;
1460 #endif
1463 static bool
1464 refs_may_alias_p (tree ref1, ao_ref *ref2)
1466 ao_ref r1;
1467 ao_ref_init (&r1, ref1);
1468 return refs_may_alias_p_1 (&r1, ref2, true);
1471 bool
1472 refs_may_alias_p (tree ref1, tree ref2)
1474 ao_ref r1, r2;
1475 bool res;
1476 ao_ref_init (&r1, ref1);
1477 ao_ref_init (&r2, ref2);
1478 res = refs_may_alias_p_1 (&r1, &r2, true);
1479 if (res)
1480 ++alias_stats.refs_may_alias_p_may_alias;
1481 else
1482 ++alias_stats.refs_may_alias_p_no_alias;
1483 return res;
1486 /* Returns true if there is a anti-dependence for the STORE that
1487 executes after the LOAD. */
1489 bool
1490 refs_anti_dependent_p (tree load, tree store)
1492 ao_ref r1, r2;
1493 ao_ref_init (&r1, load);
1494 ao_ref_init (&r2, store);
1495 return refs_may_alias_p_1 (&r1, &r2, false);
1498 /* Returns true if there is a output dependence for the stores
1499 STORE1 and STORE2. */
1501 bool
1502 refs_output_dependent_p (tree store1, tree store2)
1504 ao_ref r1, r2;
1505 ao_ref_init (&r1, store1);
1506 ao_ref_init (&r2, store2);
1507 return refs_may_alias_p_1 (&r1, &r2, false);
1510 /* If the call CALL may use the memory reference REF return true,
1511 otherwise return false. */
1513 static bool
1514 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1516 tree base, callee;
1517 unsigned i;
1518 int flags = gimple_call_flags (call);
1520 /* Const functions without a static chain do not implicitly use memory. */
1521 if (!gimple_call_chain (call)
1522 && (flags & (ECF_CONST|ECF_NOVOPS)))
1523 goto process_args;
1525 base = ao_ref_base (ref);
1526 if (!base)
1527 return true;
1529 /* A call that is not without side-effects might involve volatile
1530 accesses and thus conflicts with all other volatile accesses. */
1531 if (ref->volatile_p)
1532 return true;
1534 /* If the reference is based on a decl that is not aliased the call
1535 cannot possibly use it. */
1536 if (DECL_P (base)
1537 && !may_be_aliased (base)
1538 /* But local statics can be used through recursion. */
1539 && !is_global_var (base))
1540 goto process_args;
1542 callee = gimple_call_fndecl (call);
1544 /* Handle those builtin functions explicitly that do not act as
1545 escape points. See tree-ssa-structalias.c:find_func_aliases
1546 for the list of builtins we might need to handle here. */
1547 if (callee != NULL_TREE
1548 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1549 switch (DECL_FUNCTION_CODE (callee))
1551 /* All the following functions read memory pointed to by
1552 their second argument. strcat/strncat additionally
1553 reads memory pointed to by the first argument. */
1554 case BUILT_IN_STRCAT:
1555 case BUILT_IN_STRNCAT:
1557 ao_ref dref;
1558 ao_ref_init_from_ptr_and_size (&dref,
1559 gimple_call_arg (call, 0),
1560 NULL_TREE);
1561 if (refs_may_alias_p_1 (&dref, ref, false))
1562 return true;
1564 /* FALLTHRU */
1565 case BUILT_IN_STRCPY:
1566 case BUILT_IN_STRNCPY:
1567 case BUILT_IN_MEMCPY:
1568 case BUILT_IN_MEMMOVE:
1569 case BUILT_IN_MEMPCPY:
1570 case BUILT_IN_STPCPY:
1571 case BUILT_IN_STPNCPY:
1572 case BUILT_IN_TM_MEMCPY:
1573 case BUILT_IN_TM_MEMMOVE:
1575 ao_ref dref;
1576 tree size = NULL_TREE;
1577 if (gimple_call_num_args (call) == 3)
1578 size = gimple_call_arg (call, 2);
1579 ao_ref_init_from_ptr_and_size (&dref,
1580 gimple_call_arg (call, 1),
1581 size);
1582 return refs_may_alias_p_1 (&dref, ref, false);
1584 case BUILT_IN_STRCAT_CHK:
1585 case BUILT_IN_STRNCAT_CHK:
1587 ao_ref dref;
1588 ao_ref_init_from_ptr_and_size (&dref,
1589 gimple_call_arg (call, 0),
1590 NULL_TREE);
1591 if (refs_may_alias_p_1 (&dref, ref, false))
1592 return true;
1594 /* FALLTHRU */
1595 case BUILT_IN_STRCPY_CHK:
1596 case BUILT_IN_STRNCPY_CHK:
1597 case BUILT_IN_MEMCPY_CHK:
1598 case BUILT_IN_MEMMOVE_CHK:
1599 case BUILT_IN_MEMPCPY_CHK:
1600 case BUILT_IN_STPCPY_CHK:
1601 case BUILT_IN_STPNCPY_CHK:
1603 ao_ref dref;
1604 tree size = NULL_TREE;
1605 if (gimple_call_num_args (call) == 4)
1606 size = gimple_call_arg (call, 2);
1607 ao_ref_init_from_ptr_and_size (&dref,
1608 gimple_call_arg (call, 1),
1609 size);
1610 return refs_may_alias_p_1 (&dref, ref, false);
1612 case BUILT_IN_BCOPY:
1614 ao_ref dref;
1615 tree size = gimple_call_arg (call, 2);
1616 ao_ref_init_from_ptr_and_size (&dref,
1617 gimple_call_arg (call, 0),
1618 size);
1619 return refs_may_alias_p_1 (&dref, ref, false);
1622 /* The following functions read memory pointed to by their
1623 first argument. */
1624 CASE_BUILT_IN_TM_LOAD (1):
1625 CASE_BUILT_IN_TM_LOAD (2):
1626 CASE_BUILT_IN_TM_LOAD (4):
1627 CASE_BUILT_IN_TM_LOAD (8):
1628 CASE_BUILT_IN_TM_LOAD (FLOAT):
1629 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1630 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1631 CASE_BUILT_IN_TM_LOAD (M64):
1632 CASE_BUILT_IN_TM_LOAD (M128):
1633 CASE_BUILT_IN_TM_LOAD (M256):
1634 case BUILT_IN_TM_LOG:
1635 case BUILT_IN_TM_LOG_1:
1636 case BUILT_IN_TM_LOG_2:
1637 case BUILT_IN_TM_LOG_4:
1638 case BUILT_IN_TM_LOG_8:
1639 case BUILT_IN_TM_LOG_FLOAT:
1640 case BUILT_IN_TM_LOG_DOUBLE:
1641 case BUILT_IN_TM_LOG_LDOUBLE:
1642 case BUILT_IN_TM_LOG_M64:
1643 case BUILT_IN_TM_LOG_M128:
1644 case BUILT_IN_TM_LOG_M256:
1645 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1647 /* These read memory pointed to by the first argument. */
1648 case BUILT_IN_STRDUP:
1649 case BUILT_IN_STRNDUP:
1650 case BUILT_IN_REALLOC:
1652 ao_ref dref;
1653 tree size = NULL_TREE;
1654 if (gimple_call_num_args (call) == 2)
1655 size = gimple_call_arg (call, 1);
1656 ao_ref_init_from_ptr_and_size (&dref,
1657 gimple_call_arg (call, 0),
1658 size);
1659 return refs_may_alias_p_1 (&dref, ref, false);
1661 /* These read memory pointed to by the first argument. */
1662 case BUILT_IN_INDEX:
1663 case BUILT_IN_STRCHR:
1664 case BUILT_IN_STRRCHR:
1666 ao_ref dref;
1667 ao_ref_init_from_ptr_and_size (&dref,
1668 gimple_call_arg (call, 0),
1669 NULL_TREE);
1670 return refs_may_alias_p_1 (&dref, ref, false);
1672 /* These read memory pointed to by the first argument with size
1673 in the third argument. */
1674 case BUILT_IN_MEMCHR:
1676 ao_ref dref;
1677 ao_ref_init_from_ptr_and_size (&dref,
1678 gimple_call_arg (call, 0),
1679 gimple_call_arg (call, 2));
1680 return refs_may_alias_p_1 (&dref, ref, false);
1682 /* These read memory pointed to by the first and second arguments. */
1683 case BUILT_IN_STRSTR:
1684 case BUILT_IN_STRPBRK:
1686 ao_ref dref;
1687 ao_ref_init_from_ptr_and_size (&dref,
1688 gimple_call_arg (call, 0),
1689 NULL_TREE);
1690 if (refs_may_alias_p_1 (&dref, ref, false))
1691 return true;
1692 ao_ref_init_from_ptr_and_size (&dref,
1693 gimple_call_arg (call, 1),
1694 NULL_TREE);
1695 return refs_may_alias_p_1 (&dref, ref, false);
1698 /* The following builtins do not read from memory. */
1699 case BUILT_IN_FREE:
1700 case BUILT_IN_MALLOC:
1701 case BUILT_IN_POSIX_MEMALIGN:
1702 case BUILT_IN_ALIGNED_ALLOC:
1703 case BUILT_IN_CALLOC:
1704 case BUILT_IN_ALLOCA:
1705 case BUILT_IN_ALLOCA_WITH_ALIGN:
1706 case BUILT_IN_STACK_SAVE:
1707 case BUILT_IN_STACK_RESTORE:
1708 case BUILT_IN_MEMSET:
1709 case BUILT_IN_TM_MEMSET:
1710 case BUILT_IN_MEMSET_CHK:
1711 case BUILT_IN_FREXP:
1712 case BUILT_IN_FREXPF:
1713 case BUILT_IN_FREXPL:
1714 case BUILT_IN_GAMMA_R:
1715 case BUILT_IN_GAMMAF_R:
1716 case BUILT_IN_GAMMAL_R:
1717 case BUILT_IN_LGAMMA_R:
1718 case BUILT_IN_LGAMMAF_R:
1719 case BUILT_IN_LGAMMAL_R:
1720 case BUILT_IN_MODF:
1721 case BUILT_IN_MODFF:
1722 case BUILT_IN_MODFL:
1723 case BUILT_IN_REMQUO:
1724 case BUILT_IN_REMQUOF:
1725 case BUILT_IN_REMQUOL:
1726 case BUILT_IN_SINCOS:
1727 case BUILT_IN_SINCOSF:
1728 case BUILT_IN_SINCOSL:
1729 case BUILT_IN_ASSUME_ALIGNED:
1730 case BUILT_IN_VA_END:
1731 return false;
1732 /* __sync_* builtins and some OpenMP builtins act as threading
1733 barriers. */
1734 #undef DEF_SYNC_BUILTIN
1735 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1736 #include "sync-builtins.def"
1737 #undef DEF_SYNC_BUILTIN
1738 case BUILT_IN_GOMP_ATOMIC_START:
1739 case BUILT_IN_GOMP_ATOMIC_END:
1740 case BUILT_IN_GOMP_BARRIER:
1741 case BUILT_IN_GOMP_BARRIER_CANCEL:
1742 case BUILT_IN_GOMP_TASKWAIT:
1743 case BUILT_IN_GOMP_TASKGROUP_END:
1744 case BUILT_IN_GOMP_CRITICAL_START:
1745 case BUILT_IN_GOMP_CRITICAL_END:
1746 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1747 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1748 case BUILT_IN_GOMP_LOOP_END:
1749 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1750 case BUILT_IN_GOMP_ORDERED_START:
1751 case BUILT_IN_GOMP_ORDERED_END:
1752 case BUILT_IN_GOMP_SECTIONS_END:
1753 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1754 case BUILT_IN_GOMP_SINGLE_COPY_START:
1755 case BUILT_IN_GOMP_SINGLE_COPY_END:
1756 return true;
1758 default:
1759 /* Fallthru to general call handling. */;
1762 /* Check if base is a global static variable that is not read
1763 by the function. */
1764 if (callee != NULL_TREE
1765 && TREE_CODE (base) == VAR_DECL
1766 && TREE_STATIC (base))
1768 struct cgraph_node *node = cgraph_node::get (callee);
1769 bitmap not_read;
1771 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1772 node yet. We should enforce that there are nodes for all decls in the
1773 IL and remove this check instead. */
1774 if (node
1775 && (not_read = ipa_reference_get_not_read_global (node))
1776 && bitmap_bit_p (not_read, DECL_UID (base)))
1777 goto process_args;
1780 /* Check if the base variable is call-used. */
1781 if (DECL_P (base))
1783 if (pt_solution_includes (gimple_call_use_set (call), base))
1784 return true;
1786 else if ((TREE_CODE (base) == MEM_REF
1787 || TREE_CODE (base) == TARGET_MEM_REF)
1788 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1790 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1791 if (!pi)
1792 return true;
1794 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1795 return true;
1797 else
1798 return true;
1800 /* Inspect call arguments for passed-by-value aliases. */
1801 process_args:
1802 for (i = 0; i < gimple_call_num_args (call); ++i)
1804 tree op = gimple_call_arg (call, i);
1805 int flags = gimple_call_arg_flags (call, i);
1807 if (flags & EAF_UNUSED)
1808 continue;
1810 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1811 op = TREE_OPERAND (op, 0);
1813 if (TREE_CODE (op) != SSA_NAME
1814 && !is_gimple_min_invariant (op))
1816 ao_ref r;
1817 ao_ref_init (&r, op);
1818 if (refs_may_alias_p_1 (&r, ref, true))
1819 return true;
1823 return false;
1826 static bool
1827 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1829 bool res;
1830 res = ref_maybe_used_by_call_p_1 (call, ref);
1831 if (res)
1832 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1833 else
1834 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1835 return res;
1839 /* If the statement STMT may use the memory reference REF return
1840 true, otherwise return false. */
1842 bool
1843 ref_maybe_used_by_stmt_p (gimple stmt, ao_ref *ref)
1845 if (is_gimple_assign (stmt))
1847 tree rhs;
1849 /* All memory assign statements are single. */
1850 if (!gimple_assign_single_p (stmt))
1851 return false;
1853 rhs = gimple_assign_rhs1 (stmt);
1854 if (is_gimple_reg (rhs)
1855 || is_gimple_min_invariant (rhs)
1856 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1857 return false;
1859 return refs_may_alias_p (rhs, ref);
1861 else if (is_gimple_call (stmt))
1862 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1863 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1865 tree retval = gimple_return_retval (return_stmt);
1866 if (retval
1867 && TREE_CODE (retval) != SSA_NAME
1868 && !is_gimple_min_invariant (retval)
1869 && refs_may_alias_p (retval, ref))
1870 return true;
1871 /* If ref escapes the function then the return acts as a use. */
1872 tree base = ao_ref_base (ref);
1873 if (!base)
1875 else if (DECL_P (base))
1876 return is_global_var (base);
1877 else if (TREE_CODE (base) == MEM_REF
1878 || TREE_CODE (base) == TARGET_MEM_REF)
1879 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1880 return false;
1883 return true;
1886 bool
1887 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1889 ao_ref r;
1890 ao_ref_init (&r, ref);
1891 return ref_maybe_used_by_stmt_p (stmt, &r);
1894 /* If the call in statement CALL may clobber the memory reference REF
1895 return true, otherwise return false. */
1897 bool
1898 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1900 tree base;
1901 tree callee;
1903 /* If the call is pure or const it cannot clobber anything. */
1904 if (gimple_call_flags (call)
1905 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1906 return false;
1907 if (gimple_call_internal_p (call))
1908 switch (gimple_call_internal_fn (call))
1910 /* Treat these internal calls like ECF_PURE for aliasing,
1911 they don't write to any memory the program should care about.
1912 They have important other side-effects, and read memory,
1913 so can't be ECF_NOVOPS. */
1914 case IFN_UBSAN_NULL:
1915 case IFN_UBSAN_BOUNDS:
1916 case IFN_UBSAN_VPTR:
1917 case IFN_UBSAN_OBJECT_SIZE:
1918 case IFN_ASAN_CHECK:
1919 return false;
1920 default:
1921 break;
1924 base = ao_ref_base (ref);
1925 if (!base)
1926 return true;
1928 if (TREE_CODE (base) == SSA_NAME
1929 || CONSTANT_CLASS_P (base))
1930 return false;
1932 /* A call that is not without side-effects might involve volatile
1933 accesses and thus conflicts with all other volatile accesses. */
1934 if (ref->volatile_p)
1935 return true;
1937 /* If the reference is based on a decl that is not aliased the call
1938 cannot possibly clobber it. */
1939 if (DECL_P (base)
1940 && !may_be_aliased (base)
1941 /* But local non-readonly statics can be modified through recursion
1942 or the call may implement a threading barrier which we must
1943 treat as may-def. */
1944 && (TREE_READONLY (base)
1945 || !is_global_var (base)))
1946 return false;
1948 callee = gimple_call_fndecl (call);
1950 /* Handle those builtin functions explicitly that do not act as
1951 escape points. See tree-ssa-structalias.c:find_func_aliases
1952 for the list of builtins we might need to handle here. */
1953 if (callee != NULL_TREE
1954 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1955 switch (DECL_FUNCTION_CODE (callee))
1957 /* All the following functions clobber memory pointed to by
1958 their first argument. */
1959 case BUILT_IN_STRCPY:
1960 case BUILT_IN_STRNCPY:
1961 case BUILT_IN_MEMCPY:
1962 case BUILT_IN_MEMMOVE:
1963 case BUILT_IN_MEMPCPY:
1964 case BUILT_IN_STPCPY:
1965 case BUILT_IN_STPNCPY:
1966 case BUILT_IN_STRCAT:
1967 case BUILT_IN_STRNCAT:
1968 case BUILT_IN_MEMSET:
1969 case BUILT_IN_TM_MEMSET:
1970 CASE_BUILT_IN_TM_STORE (1):
1971 CASE_BUILT_IN_TM_STORE (2):
1972 CASE_BUILT_IN_TM_STORE (4):
1973 CASE_BUILT_IN_TM_STORE (8):
1974 CASE_BUILT_IN_TM_STORE (FLOAT):
1975 CASE_BUILT_IN_TM_STORE (DOUBLE):
1976 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1977 CASE_BUILT_IN_TM_STORE (M64):
1978 CASE_BUILT_IN_TM_STORE (M128):
1979 CASE_BUILT_IN_TM_STORE (M256):
1980 case BUILT_IN_TM_MEMCPY:
1981 case BUILT_IN_TM_MEMMOVE:
1983 ao_ref dref;
1984 tree size = NULL_TREE;
1985 /* Don't pass in size for strncat, as the maximum size
1986 is strlen (dest) + n + 1 instead of n, resp.
1987 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1988 known. */
1989 if (gimple_call_num_args (call) == 3
1990 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1991 size = gimple_call_arg (call, 2);
1992 ao_ref_init_from_ptr_and_size (&dref,
1993 gimple_call_arg (call, 0),
1994 size);
1995 return refs_may_alias_p_1 (&dref, ref, false);
1997 case BUILT_IN_STRCPY_CHK:
1998 case BUILT_IN_STRNCPY_CHK:
1999 case BUILT_IN_MEMCPY_CHK:
2000 case BUILT_IN_MEMMOVE_CHK:
2001 case BUILT_IN_MEMPCPY_CHK:
2002 case BUILT_IN_STPCPY_CHK:
2003 case BUILT_IN_STPNCPY_CHK:
2004 case BUILT_IN_STRCAT_CHK:
2005 case BUILT_IN_STRNCAT_CHK:
2006 case BUILT_IN_MEMSET_CHK:
2008 ao_ref dref;
2009 tree size = NULL_TREE;
2010 /* Don't pass in size for __strncat_chk, as the maximum size
2011 is strlen (dest) + n + 1 instead of n, resp.
2012 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2013 known. */
2014 if (gimple_call_num_args (call) == 4
2015 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2016 size = gimple_call_arg (call, 2);
2017 ao_ref_init_from_ptr_and_size (&dref,
2018 gimple_call_arg (call, 0),
2019 size);
2020 return refs_may_alias_p_1 (&dref, ref, false);
2022 case BUILT_IN_BCOPY:
2024 ao_ref dref;
2025 tree size = gimple_call_arg (call, 2);
2026 ao_ref_init_from_ptr_and_size (&dref,
2027 gimple_call_arg (call, 1),
2028 size);
2029 return refs_may_alias_p_1 (&dref, ref, false);
2031 /* Allocating memory does not have any side-effects apart from
2032 being the definition point for the pointer. */
2033 case BUILT_IN_MALLOC:
2034 case BUILT_IN_ALIGNED_ALLOC:
2035 case BUILT_IN_CALLOC:
2036 case BUILT_IN_STRDUP:
2037 case BUILT_IN_STRNDUP:
2038 /* Unix98 specifies that errno is set on allocation failure. */
2039 if (flag_errno_math
2040 && targetm.ref_may_alias_errno (ref))
2041 return true;
2042 return false;
2043 case BUILT_IN_STACK_SAVE:
2044 case BUILT_IN_ALLOCA:
2045 case BUILT_IN_ALLOCA_WITH_ALIGN:
2046 case BUILT_IN_ASSUME_ALIGNED:
2047 return false;
2048 /* But posix_memalign stores a pointer into the memory pointed to
2049 by its first argument. */
2050 case BUILT_IN_POSIX_MEMALIGN:
2052 tree ptrptr = gimple_call_arg (call, 0);
2053 ao_ref dref;
2054 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2055 TYPE_SIZE_UNIT (ptr_type_node));
2056 return (refs_may_alias_p_1 (&dref, ref, false)
2057 || (flag_errno_math
2058 && targetm.ref_may_alias_errno (ref)));
2060 /* Freeing memory kills the pointed-to memory. More importantly
2061 the call has to serve as a barrier for moving loads and stores
2062 across it. */
2063 case BUILT_IN_FREE:
2064 case BUILT_IN_VA_END:
2066 tree ptr = gimple_call_arg (call, 0);
2067 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2069 /* Realloc serves both as allocation point and deallocation point. */
2070 case BUILT_IN_REALLOC:
2072 tree ptr = gimple_call_arg (call, 0);
2073 /* Unix98 specifies that errno is set on allocation failure. */
2074 return ((flag_errno_math
2075 && targetm.ref_may_alias_errno (ref))
2076 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2078 case BUILT_IN_GAMMA_R:
2079 case BUILT_IN_GAMMAF_R:
2080 case BUILT_IN_GAMMAL_R:
2081 case BUILT_IN_LGAMMA_R:
2082 case BUILT_IN_LGAMMAF_R:
2083 case BUILT_IN_LGAMMAL_R:
2085 tree out = gimple_call_arg (call, 1);
2086 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2087 return true;
2088 if (flag_errno_math)
2089 break;
2090 return false;
2092 case BUILT_IN_FREXP:
2093 case BUILT_IN_FREXPF:
2094 case BUILT_IN_FREXPL:
2095 case BUILT_IN_MODF:
2096 case BUILT_IN_MODFF:
2097 case BUILT_IN_MODFL:
2099 tree out = gimple_call_arg (call, 1);
2100 return ptr_deref_may_alias_ref_p_1 (out, ref);
2102 case BUILT_IN_REMQUO:
2103 case BUILT_IN_REMQUOF:
2104 case BUILT_IN_REMQUOL:
2106 tree out = gimple_call_arg (call, 2);
2107 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2108 return true;
2109 if (flag_errno_math)
2110 break;
2111 return false;
2113 case BUILT_IN_SINCOS:
2114 case BUILT_IN_SINCOSF:
2115 case BUILT_IN_SINCOSL:
2117 tree sin = gimple_call_arg (call, 1);
2118 tree cos = gimple_call_arg (call, 2);
2119 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2120 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2122 /* __sync_* builtins and some OpenMP builtins act as threading
2123 barriers. */
2124 #undef DEF_SYNC_BUILTIN
2125 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2126 #include "sync-builtins.def"
2127 #undef DEF_SYNC_BUILTIN
2128 case BUILT_IN_GOMP_ATOMIC_START:
2129 case BUILT_IN_GOMP_ATOMIC_END:
2130 case BUILT_IN_GOMP_BARRIER:
2131 case BUILT_IN_GOMP_BARRIER_CANCEL:
2132 case BUILT_IN_GOMP_TASKWAIT:
2133 case BUILT_IN_GOMP_TASKGROUP_END:
2134 case BUILT_IN_GOMP_CRITICAL_START:
2135 case BUILT_IN_GOMP_CRITICAL_END:
2136 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2137 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2138 case BUILT_IN_GOMP_LOOP_END:
2139 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2140 case BUILT_IN_GOMP_ORDERED_START:
2141 case BUILT_IN_GOMP_ORDERED_END:
2142 case BUILT_IN_GOMP_SECTIONS_END:
2143 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2144 case BUILT_IN_GOMP_SINGLE_COPY_START:
2145 case BUILT_IN_GOMP_SINGLE_COPY_END:
2146 return true;
2147 default:
2148 /* Fallthru to general call handling. */;
2151 /* Check if base is a global static variable that is not written
2152 by the function. */
2153 if (callee != NULL_TREE
2154 && TREE_CODE (base) == VAR_DECL
2155 && TREE_STATIC (base))
2157 struct cgraph_node *node = cgraph_node::get (callee);
2158 bitmap not_written;
2160 if (node
2161 && (not_written = ipa_reference_get_not_written_global (node))
2162 && bitmap_bit_p (not_written, DECL_UID (base)))
2163 return false;
2166 /* Check if the base variable is call-clobbered. */
2167 if (DECL_P (base))
2168 return pt_solution_includes (gimple_call_clobber_set (call), base);
2169 else if ((TREE_CODE (base) == MEM_REF
2170 || TREE_CODE (base) == TARGET_MEM_REF)
2171 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2173 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2174 if (!pi)
2175 return true;
2177 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2180 return true;
2183 /* If the call in statement CALL may clobber the memory reference REF
2184 return true, otherwise return false. */
2186 bool
2187 call_may_clobber_ref_p (gcall *call, tree ref)
2189 bool res;
2190 ao_ref r;
2191 ao_ref_init (&r, ref);
2192 res = call_may_clobber_ref_p_1 (call, &r);
2193 if (res)
2194 ++alias_stats.call_may_clobber_ref_p_may_alias;
2195 else
2196 ++alias_stats.call_may_clobber_ref_p_no_alias;
2197 return res;
2201 /* If the statement STMT may clobber the memory reference REF return true,
2202 otherwise return false. */
2204 bool
2205 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
2207 if (is_gimple_call (stmt))
2209 tree lhs = gimple_call_lhs (stmt);
2210 if (lhs
2211 && TREE_CODE (lhs) != SSA_NAME)
2213 ao_ref r;
2214 ao_ref_init (&r, lhs);
2215 if (refs_may_alias_p_1 (ref, &r, true))
2216 return true;
2219 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2221 else if (gimple_assign_single_p (stmt))
2223 tree lhs = gimple_assign_lhs (stmt);
2224 if (TREE_CODE (lhs) != SSA_NAME)
2226 ao_ref r;
2227 ao_ref_init (&r, lhs);
2228 return refs_may_alias_p_1 (ref, &r, true);
2231 else if (gimple_code (stmt) == GIMPLE_ASM)
2232 return true;
2234 return false;
2237 bool
2238 stmt_may_clobber_ref_p (gimple stmt, tree ref)
2240 ao_ref r;
2241 ao_ref_init (&r, ref);
2242 return stmt_may_clobber_ref_p_1 (stmt, &r);
2245 /* If STMT kills the memory reference REF return true, otherwise
2246 return false. */
2248 bool
2249 stmt_kills_ref_p (gimple stmt, ao_ref *ref)
2251 if (!ao_ref_base (ref))
2252 return false;
2254 if (gimple_has_lhs (stmt)
2255 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2256 /* The assignment is not necessarily carried out if it can throw
2257 and we can catch it in the current function where we could inspect
2258 the previous value.
2259 ??? We only need to care about the RHS throwing. For aggregate
2260 assignments or similar calls and non-call exceptions the LHS
2261 might throw as well. */
2262 && !stmt_can_throw_internal (stmt))
2264 tree lhs = gimple_get_lhs (stmt);
2265 /* If LHS is literally a base of the access we are done. */
2266 if (ref->ref)
2268 tree base = ref->ref;
2269 if (handled_component_p (base))
2271 tree saved_lhs0 = NULL_TREE;
2272 if (handled_component_p (lhs))
2274 saved_lhs0 = TREE_OPERAND (lhs, 0);
2275 TREE_OPERAND (lhs, 0) = integer_zero_node;
2279 /* Just compare the outermost handled component, if
2280 they are equal we have found a possible common
2281 base. */
2282 tree saved_base0 = TREE_OPERAND (base, 0);
2283 TREE_OPERAND (base, 0) = integer_zero_node;
2284 bool res = operand_equal_p (lhs, base, 0);
2285 TREE_OPERAND (base, 0) = saved_base0;
2286 if (res)
2287 break;
2288 /* Otherwise drop handled components of the access. */
2289 base = saved_base0;
2291 while (handled_component_p (base));
2292 if (saved_lhs0)
2293 TREE_OPERAND (lhs, 0) = saved_lhs0;
2295 /* Finally check if lhs is equal or equal to the base candidate
2296 of the access. */
2297 if (operand_equal_p (lhs, base, 0))
2298 return true;
2301 /* Now look for non-literal equal bases with the restriction of
2302 handling constant offset and size. */
2303 /* For a must-alias check we need to be able to constrain
2304 the access properly. */
2305 if (ref->max_size == -1)
2306 return false;
2307 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2308 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2309 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2310 so base == ref->base does not always hold. */
2311 if (base != ref->base)
2313 /* If both base and ref->base are MEM_REFs, only compare the
2314 first operand, and if the second operand isn't equal constant,
2315 try to add the offsets into offset and ref_offset. */
2316 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2317 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2319 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2320 TREE_OPERAND (ref->base, 1)))
2322 offset_int off1 = mem_ref_offset (base);
2323 off1 = wi::lshift (off1, LOG2_BITS_PER_UNIT);
2324 off1 += offset;
2325 offset_int off2 = mem_ref_offset (ref->base);
2326 off2 = wi::lshift (off2, LOG2_BITS_PER_UNIT);
2327 off2 += ref_offset;
2328 if (wi::fits_shwi_p (off1) && wi::fits_shwi_p (off2))
2330 offset = off1.to_shwi ();
2331 ref_offset = off2.to_shwi ();
2333 else
2334 size = -1;
2337 else
2338 size = -1;
2340 /* For a must-alias check we need to be able to constrain
2341 the access properly. */
2342 if (size != -1 && size == max_size)
2344 if (offset <= ref_offset
2345 && offset + size >= ref_offset + ref->max_size)
2346 return true;
2350 if (is_gimple_call (stmt))
2352 tree callee = gimple_call_fndecl (stmt);
2353 if (callee != NULL_TREE
2354 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2355 switch (DECL_FUNCTION_CODE (callee))
2357 case BUILT_IN_FREE:
2359 tree ptr = gimple_call_arg (stmt, 0);
2360 tree base = ao_ref_base (ref);
2361 if (base && TREE_CODE (base) == MEM_REF
2362 && TREE_OPERAND (base, 0) == ptr)
2363 return true;
2364 break;
2367 case BUILT_IN_MEMCPY:
2368 case BUILT_IN_MEMPCPY:
2369 case BUILT_IN_MEMMOVE:
2370 case BUILT_IN_MEMSET:
2371 case BUILT_IN_MEMCPY_CHK:
2372 case BUILT_IN_MEMPCPY_CHK:
2373 case BUILT_IN_MEMMOVE_CHK:
2374 case BUILT_IN_MEMSET_CHK:
2376 /* For a must-alias check we need to be able to constrain
2377 the access properly. */
2378 if (ref->max_size == -1)
2379 return false;
2380 tree dest = gimple_call_arg (stmt, 0);
2381 tree len = gimple_call_arg (stmt, 2);
2382 if (!tree_fits_shwi_p (len))
2383 return false;
2384 tree rbase = ref->base;
2385 offset_int roffset = ref->offset;
2386 ao_ref dref;
2387 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2388 tree base = ao_ref_base (&dref);
2389 offset_int offset = dref.offset;
2390 if (!base || dref.size == -1)
2391 return false;
2392 if (TREE_CODE (base) == MEM_REF)
2394 if (TREE_CODE (rbase) != MEM_REF)
2395 return false;
2396 // Compare pointers.
2397 offset += wi::lshift (mem_ref_offset (base),
2398 LOG2_BITS_PER_UNIT);
2399 roffset += wi::lshift (mem_ref_offset (rbase),
2400 LOG2_BITS_PER_UNIT);
2401 base = TREE_OPERAND (base, 0);
2402 rbase = TREE_OPERAND (rbase, 0);
2404 if (base == rbase
2405 && wi::les_p (offset, roffset)
2406 && wi::les_p (roffset + ref->max_size,
2407 offset + wi::lshift (wi::to_offset (len),
2408 LOG2_BITS_PER_UNIT)))
2409 return true;
2410 break;
2413 case BUILT_IN_VA_END:
2415 tree ptr = gimple_call_arg (stmt, 0);
2416 if (TREE_CODE (ptr) == ADDR_EXPR)
2418 tree base = ao_ref_base (ref);
2419 if (TREE_OPERAND (ptr, 0) == base)
2420 return true;
2422 break;
2425 default:;
2428 return false;
2431 bool
2432 stmt_kills_ref_p (gimple stmt, tree ref)
2434 ao_ref r;
2435 ao_ref_init (&r, ref);
2436 return stmt_kills_ref_p (stmt, &r);
2440 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2441 TARGET or a statement clobbering the memory reference REF in which
2442 case false is returned. The walk starts with VUSE, one argument of PHI. */
2444 static bool
2445 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2446 tree vuse, unsigned int *cnt, bitmap *visited,
2447 bool abort_on_visited,
2448 void *(*translate)(ao_ref *, tree, void *, bool),
2449 void *data)
2451 basic_block bb = gimple_bb (phi);
2453 if (!*visited)
2454 *visited = BITMAP_ALLOC (NULL);
2456 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2458 /* Walk until we hit the target. */
2459 while (vuse != target)
2461 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2462 /* Recurse for PHI nodes. */
2463 if (gimple_code (def_stmt) == GIMPLE_PHI)
2465 /* An already visited PHI node ends the walk successfully. */
2466 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2467 return !abort_on_visited;
2468 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2469 visited, abort_on_visited,
2470 translate, data);
2471 if (!vuse)
2472 return false;
2473 continue;
2475 else if (gimple_nop_p (def_stmt))
2476 return false;
2477 else
2479 /* A clobbering statement or the end of the IL ends it failing. */
2480 ++*cnt;
2481 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2483 if (translate
2484 && (*translate) (ref, vuse, data, true) == NULL)
2486 else
2487 return false;
2490 /* If we reach a new basic-block see if we already skipped it
2491 in a previous walk that ended successfully. */
2492 if (gimple_bb (def_stmt) != bb)
2494 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2495 return !abort_on_visited;
2496 bb = gimple_bb (def_stmt);
2498 vuse = gimple_vuse (def_stmt);
2500 return true;
2503 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2504 until we hit the phi argument definition that dominates the other one.
2505 Return that, or NULL_TREE if there is no such definition. */
2507 static tree
2508 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2509 ao_ref *ref, unsigned int *cnt,
2510 bitmap *visited, bool abort_on_visited,
2511 void *(*translate)(ao_ref *, tree, void *, bool),
2512 void *data)
2514 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2515 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2516 tree common_vuse;
2518 if (arg0 == arg1)
2519 return arg0;
2520 else if (gimple_nop_p (def0)
2521 || (!gimple_nop_p (def1)
2522 && dominated_by_p (CDI_DOMINATORS,
2523 gimple_bb (def1), gimple_bb (def0))))
2525 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2526 visited, abort_on_visited, translate, data))
2527 return arg0;
2529 else if (gimple_nop_p (def1)
2530 || dominated_by_p (CDI_DOMINATORS,
2531 gimple_bb (def0), gimple_bb (def1)))
2533 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2534 visited, abort_on_visited, translate, data))
2535 return arg1;
2537 /* Special case of a diamond:
2538 MEM_1 = ...
2539 goto (cond) ? L1 : L2
2540 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2541 goto L3
2542 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2543 L3: MEM_4 = PHI<MEM_2, MEM_3>
2544 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2545 dominate each other, but still we can easily skip this PHI node
2546 if we recognize that the vuse MEM operand is the same for both,
2547 and that we can skip both statements (they don't clobber us).
2548 This is still linear. Don't use maybe_skip_until, that might
2549 potentially be slow. */
2550 else if ((common_vuse = gimple_vuse (def0))
2551 && common_vuse == gimple_vuse (def1))
2553 *cnt += 2;
2554 if ((!stmt_may_clobber_ref_p_1 (def0, ref)
2555 || (translate
2556 && (*translate) (ref, arg0, data, true) == NULL))
2557 && (!stmt_may_clobber_ref_p_1 (def1, ref)
2558 || (translate
2559 && (*translate) (ref, arg1, data, true) == NULL)))
2560 return common_vuse;
2563 return NULL_TREE;
2567 /* Starting from a PHI node for the virtual operand of the memory reference
2568 REF find a continuation virtual operand that allows to continue walking
2569 statements dominating PHI skipping only statements that cannot possibly
2570 clobber REF. Increments *CNT for each alias disambiguation done.
2571 Returns NULL_TREE if no suitable virtual operand can be found. */
2573 tree
2574 get_continuation_for_phi (gimple phi, ao_ref *ref,
2575 unsigned int *cnt, bitmap *visited,
2576 bool abort_on_visited,
2577 void *(*translate)(ao_ref *, tree, void *, bool),
2578 void *data)
2580 unsigned nargs = gimple_phi_num_args (phi);
2582 /* Through a single-argument PHI we can simply look through. */
2583 if (nargs == 1)
2584 return PHI_ARG_DEF (phi, 0);
2586 /* For two or more arguments try to pairwise skip non-aliasing code
2587 until we hit the phi argument definition that dominates the other one. */
2588 else if (nargs >= 2)
2590 tree arg0, arg1;
2591 unsigned i;
2593 /* Find a candidate for the virtual operand which definition
2594 dominates those of all others. */
2595 arg0 = PHI_ARG_DEF (phi, 0);
2596 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2597 for (i = 1; i < nargs; ++i)
2599 arg1 = PHI_ARG_DEF (phi, i);
2600 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2602 arg0 = arg1;
2603 break;
2605 if (dominated_by_p (CDI_DOMINATORS,
2606 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2607 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2608 arg0 = arg1;
2611 /* Then pairwise reduce against the found candidate. */
2612 for (i = 0; i < nargs; ++i)
2614 arg1 = PHI_ARG_DEF (phi, i);
2615 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2616 cnt, visited, abort_on_visited,
2617 translate, data);
2618 if (!arg0)
2619 return NULL_TREE;
2622 return arg0;
2625 return NULL_TREE;
2628 /* Based on the memory reference REF and its virtual use VUSE call
2629 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2630 itself. That is, for each virtual use for which its defining statement
2631 does not clobber REF.
2633 WALKER is called with REF, the current virtual use and DATA. If
2634 WALKER returns non-NULL the walk stops and its result is returned.
2635 At the end of a non-successful walk NULL is returned.
2637 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2638 use which definition is a statement that may clobber REF and DATA.
2639 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2640 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2641 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2642 to adjust REF and *DATA to make that valid.
2644 VALUEIZE if non-NULL is called with the next VUSE that is considered
2645 and return value is substituted for that. This can be used to
2646 implement optimistic value-numbering for example. Note that the
2647 VUSE argument is assumed to be valueized already.
2649 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2651 void *
2652 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2653 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2654 void *(*translate)(ao_ref *, tree, void *, bool),
2655 tree (*valueize)(tree),
2656 void *data)
2658 bitmap visited = NULL;
2659 void *res;
2660 unsigned int cnt = 0;
2661 bool translated = false;
2663 timevar_push (TV_ALIAS_STMT_WALK);
2667 gimple def_stmt;
2669 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2670 res = (*walker) (ref, vuse, cnt, data);
2671 /* Abort walk. */
2672 if (res == (void *)-1)
2674 res = NULL;
2675 break;
2677 /* Lookup succeeded. */
2678 else if (res != NULL)
2679 break;
2681 if (valueize)
2682 vuse = valueize (vuse);
2683 def_stmt = SSA_NAME_DEF_STMT (vuse);
2684 if (gimple_nop_p (def_stmt))
2685 break;
2686 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2687 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2688 &visited, translated, translate, data);
2689 else
2691 cnt++;
2692 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2694 if (!translate)
2695 break;
2696 res = (*translate) (ref, vuse, data, false);
2697 /* Failed lookup and translation. */
2698 if (res == (void *)-1)
2700 res = NULL;
2701 break;
2703 /* Lookup succeeded. */
2704 else if (res != NULL)
2705 break;
2706 /* Translation succeeded, continue walking. */
2707 translated = true;
2709 vuse = gimple_vuse (def_stmt);
2712 while (vuse);
2714 if (visited)
2715 BITMAP_FREE (visited);
2717 timevar_pop (TV_ALIAS_STMT_WALK);
2719 return res;
2723 /* Based on the memory reference REF call WALKER for each vdef which
2724 defining statement may clobber REF, starting with VDEF. If REF
2725 is NULL_TREE, each defining statement is visited.
2727 WALKER is called with REF, the current vdef and DATA. If WALKER
2728 returns true the walk is stopped, otherwise it continues.
2730 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2731 The pointer may be NULL and then we do not track this information.
2733 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2734 PHI argument (but only one walk continues on merge points), the
2735 return value is true if any of the walks was successful.
2737 The function returns the number of statements walked. */
2739 static unsigned int
2740 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2741 bool (*walker)(ao_ref *, tree, void *), void *data,
2742 bitmap *visited, unsigned int cnt,
2743 bool *function_entry_reached)
2747 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2749 if (*visited
2750 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2751 return cnt;
2753 if (gimple_nop_p (def_stmt))
2755 if (function_entry_reached)
2756 *function_entry_reached = true;
2757 return cnt;
2759 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2761 unsigned i;
2762 if (!*visited)
2763 *visited = BITMAP_ALLOC (NULL);
2764 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2765 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2766 walker, data, visited, 0,
2767 function_entry_reached);
2768 return cnt;
2771 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2772 cnt++;
2773 if ((!ref
2774 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2775 && (*walker) (ref, vdef, data))
2776 return cnt;
2778 vdef = gimple_vuse (def_stmt);
2780 while (1);
2783 unsigned int
2784 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2785 bool (*walker)(ao_ref *, tree, void *), void *data,
2786 bitmap *visited,
2787 bool *function_entry_reached)
2789 bitmap local_visited = NULL;
2790 unsigned int ret;
2792 timevar_push (TV_ALIAS_STMT_WALK);
2794 if (function_entry_reached)
2795 *function_entry_reached = false;
2797 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2798 visited ? visited : &local_visited, 0,
2799 function_entry_reached);
2800 if (local_visited)
2801 BITMAP_FREE (local_visited);
2803 timevar_pop (TV_ALIAS_STMT_WALK);
2805 return ret;