Support slim switch for cfg graph dump
[official-gcc.git] / gcc / tree-ssa-alias.c
blob70e9e00878348c5183e53b11032b2e4390dec9ca
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "basic-block.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ggc.h"
31 #include "langhooks.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "tree-pretty-print.h"
35 #include "dumpfile.h"
36 #include "gimple.h"
37 #include "tree-flow.h"
38 #include "tree-inline.h"
39 #include "params.h"
40 #include "vec.h"
41 #include "bitmap.h"
42 #include "pointer-set.h"
43 #include "alloc-pool.h"
44 #include "tree-ssa-alias.h"
46 /* Broad overview of how alias analysis on gimple works:
48 Statements clobbering or using memory are linked through the
49 virtual operand factored use-def chain. The virtual operand
50 is unique per function, its symbol is accessible via gimple_vop (cfun).
51 Virtual operands are used for efficiently walking memory statements
52 in the gimple IL and are useful for things like value-numbering as
53 a generation count for memory references.
55 SSA_NAME pointers may have associated points-to information
56 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
57 points-to information is (re-)computed by the TODO_rebuild_alias
58 pass manager todo. Points-to information is also used for more
59 precise tracking of call-clobbered and call-used variables and
60 related disambiguations.
62 This file contains functions for disambiguating memory references,
63 the so called alias-oracle and tools for walking of the gimple IL.
65 The main alias-oracle entry-points are
67 bool stmt_may_clobber_ref_p (gimple, tree)
69 This function queries if a statement may invalidate (parts of)
70 the memory designated by the reference tree argument.
72 bool ref_maybe_used_by_stmt_p (gimple, tree)
74 This function queries if a statement may need (parts of) the
75 memory designated by the reference tree argument.
77 There are variants of these functions that only handle the call
78 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
79 Note that these do not disambiguate against a possible call lhs.
81 bool refs_may_alias_p (tree, tree)
83 This function tries to disambiguate two reference trees.
85 bool ptr_deref_may_alias_global_p (tree)
87 This function queries if dereferencing a pointer variable may
88 alias global memory.
90 More low-level disambiguators are available and documented in
91 this file. Low-level disambiguators dealing with points-to
92 information are in tree-ssa-structalias.c. */
95 /* Query statistics for the different low-level disambiguators.
96 A high-level query may trigger multiple of them. */
98 static struct {
99 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
100 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
101 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
102 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
103 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
104 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
105 } alias_stats;
107 void
108 dump_alias_stats (FILE *s)
110 fprintf (s, "\nAlias oracle query stats:\n");
111 fprintf (s, " refs_may_alias_p: "
112 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
113 HOST_WIDE_INT_PRINT_DEC" queries\n",
114 alias_stats.refs_may_alias_p_no_alias,
115 alias_stats.refs_may_alias_p_no_alias
116 + alias_stats.refs_may_alias_p_may_alias);
117 fprintf (s, " ref_maybe_used_by_call_p: "
118 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
119 HOST_WIDE_INT_PRINT_DEC" queries\n",
120 alias_stats.ref_maybe_used_by_call_p_no_alias,
121 alias_stats.refs_may_alias_p_no_alias
122 + alias_stats.ref_maybe_used_by_call_p_may_alias);
123 fprintf (s, " call_may_clobber_ref_p: "
124 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 HOST_WIDE_INT_PRINT_DEC" queries\n",
126 alias_stats.call_may_clobber_ref_p_no_alias,
127 alias_stats.call_may_clobber_ref_p_no_alias
128 + alias_stats.call_may_clobber_ref_p_may_alias);
132 /* Return true, if dereferencing PTR may alias with a global variable. */
134 bool
135 ptr_deref_may_alias_global_p (tree ptr)
137 struct ptr_info_def *pi;
139 /* If we end up with a pointer constant here that may point
140 to global memory. */
141 if (TREE_CODE (ptr) != SSA_NAME)
142 return true;
144 pi = SSA_NAME_PTR_INFO (ptr);
146 /* If we do not have points-to information for this variable,
147 we have to punt. */
148 if (!pi)
149 return true;
151 /* ??? This does not use TBAA to prune globals ptr may not access. */
152 return pt_solution_includes_global (&pi->pt);
155 /* Return true if dereferencing PTR may alias DECL.
156 The caller is responsible for applying TBAA to see if PTR
157 may access DECL at all. */
159 static bool
160 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
162 struct ptr_info_def *pi;
164 /* Conversions are irrelevant for points-to information and
165 data-dependence analysis can feed us those. */
166 STRIP_NOPS (ptr);
168 /* Anything we do not explicilty handle aliases. */
169 if ((TREE_CODE (ptr) != SSA_NAME
170 && TREE_CODE (ptr) != ADDR_EXPR
171 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
172 || !POINTER_TYPE_P (TREE_TYPE (ptr))
173 || (TREE_CODE (decl) != VAR_DECL
174 && TREE_CODE (decl) != PARM_DECL
175 && TREE_CODE (decl) != RESULT_DECL))
176 return true;
178 /* Disregard pointer offsetting. */
179 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
183 ptr = TREE_OPERAND (ptr, 0);
185 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
186 return ptr_deref_may_alias_decl_p (ptr, decl);
189 /* ADDR_EXPR pointers either just offset another pointer or directly
190 specify the pointed-to set. */
191 if (TREE_CODE (ptr) == ADDR_EXPR)
193 tree base = get_base_address (TREE_OPERAND (ptr, 0));
194 if (base
195 && (TREE_CODE (base) == MEM_REF
196 || TREE_CODE (base) == TARGET_MEM_REF))
197 ptr = TREE_OPERAND (base, 0);
198 else if (base
199 && DECL_P (base))
200 return base == decl;
201 else if (base
202 && CONSTANT_CLASS_P (base))
203 return false;
204 else
205 return true;
208 /* Non-aliased variables can not be pointed to. */
209 if (!may_be_aliased (decl))
210 return false;
212 /* If we do not have useful points-to information for this pointer
213 we cannot disambiguate anything else. */
214 pi = SSA_NAME_PTR_INFO (ptr);
215 if (!pi)
216 return true;
218 return pt_solution_includes (&pi->pt, decl);
221 /* Return true if dereferenced PTR1 and PTR2 may alias.
222 The caller is responsible for applying TBAA to see if accesses
223 through PTR1 and PTR2 may conflict at all. */
225 bool
226 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
228 struct ptr_info_def *pi1, *pi2;
230 /* Conversions are irrelevant for points-to information and
231 data-dependence analysis can feed us those. */
232 STRIP_NOPS (ptr1);
233 STRIP_NOPS (ptr2);
235 /* Disregard pointer offsetting. */
236 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
240 ptr1 = TREE_OPERAND (ptr1, 0);
242 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
243 return ptr_derefs_may_alias_p (ptr1, ptr2);
245 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
249 ptr2 = TREE_OPERAND (ptr2, 0);
251 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
252 return ptr_derefs_may_alias_p (ptr1, ptr2);
255 /* ADDR_EXPR pointers either just offset another pointer or directly
256 specify the pointed-to set. */
257 if (TREE_CODE (ptr1) == ADDR_EXPR)
259 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
260 if (base
261 && (TREE_CODE (base) == MEM_REF
262 || TREE_CODE (base) == TARGET_MEM_REF))
263 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
264 else if (base
265 && DECL_P (base))
266 return ptr_deref_may_alias_decl_p (ptr2, base);
267 else
268 return true;
270 if (TREE_CODE (ptr2) == ADDR_EXPR)
272 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
273 if (base
274 && (TREE_CODE (base) == MEM_REF
275 || TREE_CODE (base) == TARGET_MEM_REF))
276 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
277 else if (base
278 && DECL_P (base))
279 return ptr_deref_may_alias_decl_p (ptr1, base);
280 else
281 return true;
284 /* From here we require SSA name pointers. Anything else aliases. */
285 if (TREE_CODE (ptr1) != SSA_NAME
286 || TREE_CODE (ptr2) != SSA_NAME
287 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
288 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
289 return true;
291 /* We may end up with two empty points-to solutions for two same pointers.
292 In this case we still want to say both pointers alias, so shortcut
293 that here. */
294 if (ptr1 == ptr2)
295 return true;
297 /* If we do not have useful points-to information for either pointer
298 we cannot disambiguate anything else. */
299 pi1 = SSA_NAME_PTR_INFO (ptr1);
300 pi2 = SSA_NAME_PTR_INFO (ptr2);
301 if (!pi1 || !pi2)
302 return true;
304 /* ??? This does not use TBAA to prune decls from the intersection
305 that not both pointers may access. */
306 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
309 /* Return true if dereferencing PTR may alias *REF.
310 The caller is responsible for applying TBAA to see if PTR
311 may access *REF at all. */
313 static bool
314 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
316 tree base = ao_ref_base (ref);
318 if (TREE_CODE (base) == MEM_REF
319 || TREE_CODE (base) == TARGET_MEM_REF)
320 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
321 else if (DECL_P (base))
322 return ptr_deref_may_alias_decl_p (ptr, base);
324 return true;
327 /* Return true whether REF may refer to global memory. */
329 bool
330 ref_may_alias_global_p (tree ref)
332 tree base = get_base_address (ref);
333 if (DECL_P (base))
334 return is_global_var (base);
335 else if (TREE_CODE (base) == MEM_REF
336 || TREE_CODE (base) == TARGET_MEM_REF)
337 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
338 return true;
341 /* Return true whether STMT may clobber global memory. */
343 bool
344 stmt_may_clobber_global_p (gimple stmt)
346 tree lhs;
348 if (!gimple_vdef (stmt))
349 return false;
351 /* ??? We can ask the oracle whether an artificial pointer
352 dereference with a pointer with points-to information covering
353 all global memory (what about non-address taken memory?) maybe
354 clobbered by this call. As there is at the moment no convenient
355 way of doing that without generating garbage do some manual
356 checking instead.
357 ??? We could make a NULL ao_ref argument to the various
358 predicates special, meaning any global memory. */
360 switch (gimple_code (stmt))
362 case GIMPLE_ASSIGN:
363 lhs = gimple_assign_lhs (stmt);
364 return (TREE_CODE (lhs) != SSA_NAME
365 && ref_may_alias_global_p (lhs));
366 case GIMPLE_CALL:
367 return true;
368 default:
369 return true;
374 /* Dump alias information on FILE. */
376 void
377 dump_alias_info (FILE *file)
379 unsigned i;
380 const char *funcname
381 = lang_hooks.decl_printable_name (current_function_decl, 2);
382 tree var;
384 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
386 fprintf (file, "Aliased symbols\n\n");
388 FOR_EACH_LOCAL_DECL (cfun, i, var)
390 if (may_be_aliased (var))
391 dump_variable (file, var);
394 fprintf (file, "\nCall clobber information\n");
396 fprintf (file, "\nESCAPED");
397 dump_points_to_solution (file, &cfun->gimple_df->escaped);
399 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
401 for (i = 1; i < num_ssa_names; i++)
403 tree ptr = ssa_name (i);
404 struct ptr_info_def *pi;
406 if (ptr == NULL_TREE
407 || SSA_NAME_IN_FREE_LIST (ptr))
408 continue;
410 pi = SSA_NAME_PTR_INFO (ptr);
411 if (pi)
412 dump_points_to_info_for (file, ptr);
415 fprintf (file, "\n");
419 /* Dump alias information on stderr. */
421 DEBUG_FUNCTION void
422 debug_alias_info (void)
424 dump_alias_info (stderr);
428 /* Dump the points-to set *PT into FILE. */
430 void
431 dump_points_to_solution (FILE *file, struct pt_solution *pt)
433 if (pt->anything)
434 fprintf (file, ", points-to anything");
436 if (pt->nonlocal)
437 fprintf (file, ", points-to non-local");
439 if (pt->escaped)
440 fprintf (file, ", points-to escaped");
442 if (pt->ipa_escaped)
443 fprintf (file, ", points-to unit escaped");
445 if (pt->null)
446 fprintf (file, ", points-to NULL");
448 if (pt->vars)
450 fprintf (file, ", points-to vars: ");
451 dump_decl_set (file, pt->vars);
452 if (pt->vars_contains_global)
453 fprintf (file, " (includes global vars)");
458 /* Unified dump function for pt_solution. */
460 DEBUG_FUNCTION void
461 debug (pt_solution &ref)
463 dump_points_to_solution (stderr, &ref);
466 DEBUG_FUNCTION void
467 debug (pt_solution *ptr)
469 if (ptr)
470 debug (*ptr);
471 else
472 fprintf (stderr, "<nil>\n");
476 /* Dump points-to information for SSA_NAME PTR into FILE. */
478 void
479 dump_points_to_info_for (FILE *file, tree ptr)
481 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
483 print_generic_expr (file, ptr, dump_flags);
485 if (pi)
486 dump_points_to_solution (file, &pi->pt);
487 else
488 fprintf (file, ", points-to anything");
490 fprintf (file, "\n");
494 /* Dump points-to information for VAR into stderr. */
496 DEBUG_FUNCTION void
497 debug_points_to_info_for (tree var)
499 dump_points_to_info_for (stderr, var);
503 /* Initializes the alias-oracle reference representation *R from REF. */
505 void
506 ao_ref_init (ao_ref *r, tree ref)
508 r->ref = ref;
509 r->base = NULL_TREE;
510 r->offset = 0;
511 r->size = -1;
512 r->max_size = -1;
513 r->ref_alias_set = -1;
514 r->base_alias_set = -1;
515 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
518 /* Returns the base object of the memory reference *REF. */
520 tree
521 ao_ref_base (ao_ref *ref)
523 if (ref->base)
524 return ref->base;
525 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
526 &ref->max_size);
527 return ref->base;
530 /* Returns the base object alias set of the memory reference *REF. */
532 static alias_set_type
533 ao_ref_base_alias_set (ao_ref *ref)
535 tree base_ref;
536 if (ref->base_alias_set != -1)
537 return ref->base_alias_set;
538 if (!ref->ref)
539 return 0;
540 base_ref = ref->ref;
541 while (handled_component_p (base_ref))
542 base_ref = TREE_OPERAND (base_ref, 0);
543 ref->base_alias_set = get_alias_set (base_ref);
544 return ref->base_alias_set;
547 /* Returns the reference alias set of the memory reference *REF. */
549 alias_set_type
550 ao_ref_alias_set (ao_ref *ref)
552 if (ref->ref_alias_set != -1)
553 return ref->ref_alias_set;
554 ref->ref_alias_set = get_alias_set (ref->ref);
555 return ref->ref_alias_set;
558 /* Init an alias-oracle reference representation from a gimple pointer
559 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE the the
560 size is assumed to be unknown. The access is assumed to be only
561 to or after of the pointer target, not before it. */
563 void
564 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
566 HOST_WIDE_INT t1, t2;
567 ref->ref = NULL_TREE;
568 if (TREE_CODE (ptr) == ADDR_EXPR)
569 ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
570 &ref->offset, &t1, &t2);
571 else
573 ref->base = build2 (MEM_REF, char_type_node,
574 ptr, null_pointer_node);
575 ref->offset = 0;
577 if (size
578 && host_integerp (size, 0)
579 && TREE_INT_CST_LOW (size) * 8 / 8 == TREE_INT_CST_LOW (size))
580 ref->max_size = ref->size = TREE_INT_CST_LOW (size) * 8;
581 else
582 ref->max_size = ref->size = -1;
583 ref->ref_alias_set = 0;
584 ref->base_alias_set = 0;
585 ref->volatile_p = false;
588 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
589 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
590 decide. */
592 static inline int
593 same_type_for_tbaa (tree type1, tree type2)
595 type1 = TYPE_MAIN_VARIANT (type1);
596 type2 = TYPE_MAIN_VARIANT (type2);
598 /* If we would have to do structural comparison bail out. */
599 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
600 || TYPE_STRUCTURAL_EQUALITY_P (type2))
601 return -1;
603 /* Compare the canonical types. */
604 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
605 return 1;
607 /* ??? Array types are not properly unified in all cases as we have
608 spurious changes in the index types for example. Removing this
609 causes all sorts of problems with the Fortran frontend. */
610 if (TREE_CODE (type1) == ARRAY_TYPE
611 && TREE_CODE (type2) == ARRAY_TYPE)
612 return -1;
614 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
615 object of one of its constrained subtypes, e.g. when a function with an
616 unconstrained parameter passed by reference is called on an object and
617 inlined. But, even in the case of a fixed size, type and subtypes are
618 not equivalent enough as to share the same TYPE_CANONICAL, since this
619 would mean that conversions between them are useless, whereas they are
620 not (e.g. type and subtypes can have different modes). So, in the end,
621 they are only guaranteed to have the same alias set. */
622 if (get_alias_set (type1) == get_alias_set (type2))
623 return -1;
625 /* The types are known to be not equal. */
626 return 0;
629 /* Determine if the two component references REF1 and REF2 which are
630 based on access types TYPE1 and TYPE2 and of which at least one is based
631 on an indirect reference may alias. REF2 is the only one that can
632 be a decl in which case REF2_IS_DECL is true.
633 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
634 are the respective alias sets. */
636 static bool
637 aliasing_component_refs_p (tree ref1,
638 alias_set_type ref1_alias_set,
639 alias_set_type base1_alias_set,
640 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
641 tree ref2,
642 alias_set_type ref2_alias_set,
643 alias_set_type base2_alias_set,
644 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
645 bool ref2_is_decl)
647 /* If one reference is a component references through pointers try to find a
648 common base and apply offset based disambiguation. This handles
649 for example
650 struct A { int i; int j; } *q;
651 struct B { struct A a; int k; } *p;
652 disambiguating q->i and p->a.j. */
653 tree base1, base2;
654 tree type1, type2;
655 tree *refp;
656 int same_p;
658 /* Choose bases and base types to search for. */
659 base1 = ref1;
660 while (handled_component_p (base1))
661 base1 = TREE_OPERAND (base1, 0);
662 type1 = TREE_TYPE (base1);
663 base2 = ref2;
664 while (handled_component_p (base2))
665 base2 = TREE_OPERAND (base2, 0);
666 type2 = TREE_TYPE (base2);
668 /* Now search for the type1 in the access path of ref2. This
669 would be a common base for doing offset based disambiguation on. */
670 refp = &ref2;
671 while (handled_component_p (*refp)
672 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
673 refp = &TREE_OPERAND (*refp, 0);
674 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
675 /* If we couldn't compare types we have to bail out. */
676 if (same_p == -1)
677 return true;
678 else if (same_p == 1)
680 HOST_WIDE_INT offadj, sztmp, msztmp;
681 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
682 offset2 -= offadj;
683 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
684 offset1 -= offadj;
685 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
687 /* If we didn't find a common base, try the other way around. */
688 refp = &ref1;
689 while (handled_component_p (*refp)
690 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
691 refp = &TREE_OPERAND (*refp, 0);
692 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
693 /* If we couldn't compare types we have to bail out. */
694 if (same_p == -1)
695 return true;
696 else if (same_p == 1)
698 HOST_WIDE_INT offadj, sztmp, msztmp;
699 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
700 offset1 -= offadj;
701 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
702 offset2 -= offadj;
703 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
706 /* If we have two type access paths B1.path1 and B2.path2 they may
707 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
708 But we can still have a path that goes B1.path1...B2.path2 with
709 a part that we do not see. So we can only disambiguate now
710 if there is no B2 in the tail of path1 and no B1 on the
711 tail of path2. */
712 if (base1_alias_set == ref2_alias_set
713 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
714 return true;
715 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
716 if (!ref2_is_decl)
717 return (base2_alias_set == ref1_alias_set
718 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
719 return false;
722 /* Return true if we can determine that component references REF1 and REF2,
723 that are within a common DECL, cannot overlap. */
725 static bool
726 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
728 vec<tree, va_stack> component_refs1;
729 vec<tree, va_stack> component_refs2;
731 vec_stack_alloc (tree, component_refs1, 16);
732 vec_stack_alloc (tree, component_refs2, 16);
734 /* Create the stack of handled components for REF1. */
735 while (handled_component_p (ref1))
737 component_refs1.safe_push (ref1);
738 ref1 = TREE_OPERAND (ref1, 0);
740 if (TREE_CODE (ref1) == MEM_REF)
742 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
743 goto may_overlap;
744 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
747 /* Create the stack of handled components for REF2. */
748 while (handled_component_p (ref2))
750 component_refs2.safe_push (ref2);
751 ref2 = TREE_OPERAND (ref2, 0);
753 if (TREE_CODE (ref2) == MEM_REF)
755 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
756 goto may_overlap;
757 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
760 /* We must have the same base DECL. */
761 gcc_assert (ref1 == ref2);
763 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
764 rank. This is sufficient because we start from the same DECL and you
765 cannot reference several fields at a time with COMPONENT_REFs (unlike
766 with ARRAY_RANGE_REFs for arrays) so you always need the same number
767 of them to access a sub-component, unless you're in a union, in which
768 case the return value will precisely be false. */
769 while (true)
773 if (component_refs1.is_empty ())
774 goto may_overlap;
775 ref1 = component_refs1.pop ();
777 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
781 if (component_refs2.is_empty ())
782 goto may_overlap;
783 ref2 = component_refs2.pop ();
785 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
787 /* Beware of BIT_FIELD_REF. */
788 if (TREE_CODE (ref1) != COMPONENT_REF
789 || TREE_CODE (ref2) != COMPONENT_REF)
790 goto may_overlap;
792 tree field1 = TREE_OPERAND (ref1, 1);
793 tree field2 = TREE_OPERAND (ref2, 1);
795 /* ??? We cannot simply use the type of operand #0 of the refs here
796 as the Fortran compiler smuggles type punning into COMPONENT_REFs
797 for common blocks instead of using unions like everyone else. */
798 tree type1 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field1));
799 tree type2 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field2));
801 /* We cannot disambiguate fields in a union or qualified union. */
802 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
803 goto may_overlap;
805 /* Different fields of the same record type cannot overlap. */
806 if (field1 != field2)
808 component_refs1.release ();
809 component_refs2.release ();
810 return true;
814 may_overlap:
815 component_refs1.release ();
816 component_refs2.release ();
817 return false;
820 /* Return true if two memory references based on the variables BASE1
821 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
822 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
823 if non-NULL are the complete memory reference trees. */
825 static bool
826 decl_refs_may_alias_p (tree ref1, tree base1,
827 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
828 tree ref2, tree base2,
829 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
831 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
833 /* If both references are based on different variables, they cannot alias. */
834 if (base1 != base2)
835 return false;
837 /* If both references are based on the same variable, they cannot alias if
838 the accesses do not overlap. */
839 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
840 return false;
842 /* For components with variable position, the above test isn't sufficient,
843 so we disambiguate component references manually. */
844 if (ref1 && ref2
845 && handled_component_p (ref1) && handled_component_p (ref2)
846 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
847 return false;
849 return true;
852 /* Return true if an indirect reference based on *PTR1 constrained
853 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
854 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
855 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
856 in which case they are computed on-demand. REF1 and REF2
857 if non-NULL are the complete memory reference trees. */
859 static bool
860 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
861 HOST_WIDE_INT offset1,
862 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
863 alias_set_type ref1_alias_set,
864 alias_set_type base1_alias_set,
865 tree ref2 ATTRIBUTE_UNUSED, tree base2,
866 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
867 alias_set_type ref2_alias_set,
868 alias_set_type base2_alias_set, bool tbaa_p)
870 tree ptr1;
871 tree ptrtype1, dbase2;
872 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
873 HOST_WIDE_INT doffset1, doffset2;
874 double_int moff;
876 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
877 || TREE_CODE (base1) == TARGET_MEM_REF)
878 && DECL_P (base2));
880 ptr1 = TREE_OPERAND (base1, 0);
882 /* The offset embedded in MEM_REFs can be negative. Bias them
883 so that the resulting offset adjustment is positive. */
884 moff = mem_ref_offset (base1);
885 moff = moff.alshift (BITS_PER_UNIT == 8
886 ? 3 : exact_log2 (BITS_PER_UNIT),
887 HOST_BITS_PER_DOUBLE_INT);
888 if (moff.is_negative ())
889 offset2p += (-moff).low;
890 else
891 offset1p += moff.low;
893 /* If only one reference is based on a variable, they cannot alias if
894 the pointer access is beyond the extent of the variable access.
895 (the pointer base cannot validly point to an offset less than zero
896 of the variable).
897 ??? IVOPTs creates bases that do not honor this restriction,
898 so do not apply this optimization for TARGET_MEM_REFs. */
899 if (TREE_CODE (base1) != TARGET_MEM_REF
900 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
901 return false;
902 /* They also cannot alias if the pointer may not point to the decl. */
903 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
904 return false;
906 /* Disambiguations that rely on strict aliasing rules follow. */
907 if (!flag_strict_aliasing || !tbaa_p)
908 return true;
910 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
912 /* If the alias set for a pointer access is zero all bets are off. */
913 if (base1_alias_set == -1)
914 base1_alias_set = get_deref_alias_set (ptrtype1);
915 if (base1_alias_set == 0)
916 return true;
917 if (base2_alias_set == -1)
918 base2_alias_set = get_alias_set (base2);
920 /* When we are trying to disambiguate an access with a pointer dereference
921 as base versus one with a decl as base we can use both the size
922 of the decl and its dynamic type for extra disambiguation.
923 ??? We do not know anything about the dynamic type of the decl
924 other than that its alias-set contains base2_alias_set as a subset
925 which does not help us here. */
926 /* As we know nothing useful about the dynamic type of the decl just
927 use the usual conflict check rather than a subset test.
928 ??? We could introduce -fvery-strict-aliasing when the language
929 does not allow decls to have a dynamic type that differs from their
930 static type. Then we can check
931 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
932 if (base1_alias_set != base2_alias_set
933 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
934 return false;
935 /* If the size of the access relevant for TBAA through the pointer
936 is bigger than the size of the decl we can't possibly access the
937 decl via that pointer. */
938 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
939 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
940 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
941 /* ??? This in turn may run afoul when a decl of type T which is
942 a member of union type U is accessed through a pointer to
943 type U and sizeof T is smaller than sizeof U. */
944 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
945 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
946 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
947 return false;
949 if (!ref2)
950 return true;
952 /* If the decl is accessed via a MEM_REF, reconstruct the base
953 we can use for TBAA and an appropriately adjusted offset. */
954 dbase2 = ref2;
955 while (handled_component_p (dbase2))
956 dbase2 = TREE_OPERAND (dbase2, 0);
957 doffset1 = offset1;
958 doffset2 = offset2;
959 if (TREE_CODE (dbase2) == MEM_REF
960 || TREE_CODE (dbase2) == TARGET_MEM_REF)
962 double_int moff = mem_ref_offset (dbase2);
963 moff = moff.alshift (BITS_PER_UNIT == 8
964 ? 3 : exact_log2 (BITS_PER_UNIT),
965 HOST_BITS_PER_DOUBLE_INT);
966 if (moff.is_negative ())
967 doffset1 -= (-moff).low;
968 else
969 doffset2 -= moff.low;
972 /* If either reference is view-converted, give up now. */
973 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
974 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
975 return true;
977 /* If both references are through the same type, they do not alias
978 if the accesses do not overlap. This does extra disambiguation
979 for mixed/pointer accesses but requires strict aliasing.
980 For MEM_REFs we require that the component-ref offset we computed
981 is relative to the start of the type which we ensure by
982 comparing rvalue and access type and disregarding the constant
983 pointer offset. */
984 if ((TREE_CODE (base1) != TARGET_MEM_REF
985 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
986 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
987 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
989 /* Do access-path based disambiguation. */
990 if (ref1 && ref2
991 && (handled_component_p (ref1) || handled_component_p (ref2)))
992 return aliasing_component_refs_p (ref1,
993 ref1_alias_set, base1_alias_set,
994 offset1, max_size1,
995 ref2,
996 ref2_alias_set, base2_alias_set,
997 offset2, max_size2, true);
999 return true;
1002 /* Return true if two indirect references based on *PTR1
1003 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1004 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1005 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1006 in which case they are computed on-demand. REF1 and REF2
1007 if non-NULL are the complete memory reference trees. */
1009 static bool
1010 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1011 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1012 alias_set_type ref1_alias_set,
1013 alias_set_type base1_alias_set,
1014 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1015 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1016 alias_set_type ref2_alias_set,
1017 alias_set_type base2_alias_set, bool tbaa_p)
1019 tree ptr1;
1020 tree ptr2;
1021 tree ptrtype1, ptrtype2;
1023 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1024 || TREE_CODE (base1) == TARGET_MEM_REF)
1025 && (TREE_CODE (base2) == MEM_REF
1026 || TREE_CODE (base2) == TARGET_MEM_REF));
1028 ptr1 = TREE_OPERAND (base1, 0);
1029 ptr2 = TREE_OPERAND (base2, 0);
1031 /* If both bases are based on pointers they cannot alias if they may not
1032 point to the same memory object or if they point to the same object
1033 and the accesses do not overlap. */
1034 if ((!cfun || gimple_in_ssa_p (cfun))
1035 && operand_equal_p (ptr1, ptr2, 0)
1036 && (((TREE_CODE (base1) != TARGET_MEM_REF
1037 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1038 && (TREE_CODE (base2) != TARGET_MEM_REF
1039 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1040 || (TREE_CODE (base1) == TARGET_MEM_REF
1041 && TREE_CODE (base2) == TARGET_MEM_REF
1042 && (TMR_STEP (base1) == TMR_STEP (base2)
1043 || (TMR_STEP (base1) && TMR_STEP (base2)
1044 && operand_equal_p (TMR_STEP (base1),
1045 TMR_STEP (base2), 0)))
1046 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1047 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1048 && operand_equal_p (TMR_INDEX (base1),
1049 TMR_INDEX (base2), 0)))
1050 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1051 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1052 && operand_equal_p (TMR_INDEX2 (base1),
1053 TMR_INDEX2 (base2), 0))))))
1055 double_int moff;
1056 /* The offset embedded in MEM_REFs can be negative. Bias them
1057 so that the resulting offset adjustment is positive. */
1058 moff = mem_ref_offset (base1);
1059 moff = moff.alshift (BITS_PER_UNIT == 8
1060 ? 3 : exact_log2 (BITS_PER_UNIT),
1061 HOST_BITS_PER_DOUBLE_INT);
1062 if (moff.is_negative ())
1063 offset2 += (-moff).low;
1064 else
1065 offset1 += moff.low;
1066 moff = mem_ref_offset (base2);
1067 moff = moff.alshift (BITS_PER_UNIT == 8
1068 ? 3 : exact_log2 (BITS_PER_UNIT),
1069 HOST_BITS_PER_DOUBLE_INT);
1070 if (moff.is_negative ())
1071 offset1 += (-moff).low;
1072 else
1073 offset2 += moff.low;
1074 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1076 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1077 return false;
1079 /* Disambiguations that rely on strict aliasing rules follow. */
1080 if (!flag_strict_aliasing || !tbaa_p)
1081 return true;
1083 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1084 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1086 /* If the alias set for a pointer access is zero all bets are off. */
1087 if (base1_alias_set == -1)
1088 base1_alias_set = get_deref_alias_set (ptrtype1);
1089 if (base1_alias_set == 0)
1090 return true;
1091 if (base2_alias_set == -1)
1092 base2_alias_set = get_deref_alias_set (ptrtype2);
1093 if (base2_alias_set == 0)
1094 return true;
1096 /* If both references are through the same type, they do not alias
1097 if the accesses do not overlap. This does extra disambiguation
1098 for mixed/pointer accesses but requires strict aliasing. */
1099 if ((TREE_CODE (base1) != TARGET_MEM_REF
1100 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1101 && (TREE_CODE (base2) != TARGET_MEM_REF
1102 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1103 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1104 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1105 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1106 TREE_TYPE (ptrtype2)) == 1)
1107 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1109 /* Do type-based disambiguation. */
1110 if (base1_alias_set != base2_alias_set
1111 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1112 return false;
1114 /* Do access-path based disambiguation. */
1115 if (ref1 && ref2
1116 && (handled_component_p (ref1) || handled_component_p (ref2))
1117 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1118 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
1119 return aliasing_component_refs_p (ref1,
1120 ref1_alias_set, base1_alias_set,
1121 offset1, max_size1,
1122 ref2,
1123 ref2_alias_set, base2_alias_set,
1124 offset2, max_size2, false);
1126 return true;
1129 /* Return true, if the two memory references REF1 and REF2 may alias. */
1131 bool
1132 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1134 tree base1, base2;
1135 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1136 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1137 bool var1_p, var2_p, ind1_p, ind2_p;
1139 gcc_checking_assert ((!ref1->ref
1140 || TREE_CODE (ref1->ref) == SSA_NAME
1141 || DECL_P (ref1->ref)
1142 || TREE_CODE (ref1->ref) == STRING_CST
1143 || handled_component_p (ref1->ref)
1144 || TREE_CODE (ref1->ref) == MEM_REF
1145 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1146 && (!ref2->ref
1147 || TREE_CODE (ref2->ref) == SSA_NAME
1148 || DECL_P (ref2->ref)
1149 || TREE_CODE (ref2->ref) == STRING_CST
1150 || handled_component_p (ref2->ref)
1151 || TREE_CODE (ref2->ref) == MEM_REF
1152 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1154 /* Decompose the references into their base objects and the access. */
1155 base1 = ao_ref_base (ref1);
1156 offset1 = ref1->offset;
1157 max_size1 = ref1->max_size;
1158 base2 = ao_ref_base (ref2);
1159 offset2 = ref2->offset;
1160 max_size2 = ref2->max_size;
1162 /* We can end up with registers or constants as bases for example from
1163 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1164 which is seen as a struct copy. */
1165 if (TREE_CODE (base1) == SSA_NAME
1166 || TREE_CODE (base1) == CONST_DECL
1167 || TREE_CODE (base1) == CONSTRUCTOR
1168 || TREE_CODE (base1) == ADDR_EXPR
1169 || CONSTANT_CLASS_P (base1)
1170 || TREE_CODE (base2) == SSA_NAME
1171 || TREE_CODE (base2) == CONST_DECL
1172 || TREE_CODE (base2) == CONSTRUCTOR
1173 || TREE_CODE (base2) == ADDR_EXPR
1174 || CONSTANT_CLASS_P (base2))
1175 return false;
1177 /* We can end up referring to code via function and label decls.
1178 As we likely do not properly track code aliases conservatively
1179 bail out. */
1180 if (TREE_CODE (base1) == FUNCTION_DECL
1181 || TREE_CODE (base1) == LABEL_DECL
1182 || TREE_CODE (base2) == FUNCTION_DECL
1183 || TREE_CODE (base2) == LABEL_DECL)
1184 return true;
1186 /* Two volatile accesses always conflict. */
1187 if (ref1->volatile_p
1188 && ref2->volatile_p)
1189 return true;
1191 /* Defer to simple offset based disambiguation if we have
1192 references based on two decls. Do this before defering to
1193 TBAA to handle must-alias cases in conformance with the
1194 GCC extension of allowing type-punning through unions. */
1195 var1_p = DECL_P (base1);
1196 var2_p = DECL_P (base2);
1197 if (var1_p && var2_p)
1198 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1199 ref2->ref, base2, offset2, max_size2);
1201 ind1_p = (TREE_CODE (base1) == MEM_REF
1202 || TREE_CODE (base1) == TARGET_MEM_REF);
1203 ind2_p = (TREE_CODE (base2) == MEM_REF
1204 || TREE_CODE (base2) == TARGET_MEM_REF);
1206 /* Canonicalize the pointer-vs-decl case. */
1207 if (ind1_p && var2_p)
1209 HOST_WIDE_INT tmp1;
1210 tree tmp2;
1211 ao_ref *tmp3;
1212 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1213 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1214 tmp2 = base1; base1 = base2; base2 = tmp2;
1215 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1216 var1_p = true;
1217 ind1_p = false;
1218 var2_p = false;
1219 ind2_p = true;
1222 /* First defer to TBAA if possible. */
1223 if (tbaa_p
1224 && flag_strict_aliasing
1225 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1226 ao_ref_alias_set (ref2)))
1227 return false;
1229 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1230 if (var1_p && ind2_p)
1231 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1232 offset2, max_size2,
1233 ao_ref_alias_set (ref2), -1,
1234 ref1->ref, base1,
1235 offset1, max_size1,
1236 ao_ref_alias_set (ref1),
1237 ao_ref_base_alias_set (ref1),
1238 tbaa_p);
1239 else if (ind1_p && ind2_p)
1240 return indirect_refs_may_alias_p (ref1->ref, base1,
1241 offset1, max_size1,
1242 ao_ref_alias_set (ref1), -1,
1243 ref2->ref, base2,
1244 offset2, max_size2,
1245 ao_ref_alias_set (ref2), -1,
1246 tbaa_p);
1248 /* We really do not want to end up here, but returning true is safe. */
1249 #ifdef ENABLE_CHECKING
1250 gcc_unreachable ();
1251 #else
1252 return true;
1253 #endif
1256 bool
1257 refs_may_alias_p (tree ref1, tree ref2)
1259 ao_ref r1, r2;
1260 bool res;
1261 ao_ref_init (&r1, ref1);
1262 ao_ref_init (&r2, ref2);
1263 res = refs_may_alias_p_1 (&r1, &r2, true);
1264 if (res)
1265 ++alias_stats.refs_may_alias_p_may_alias;
1266 else
1267 ++alias_stats.refs_may_alias_p_no_alias;
1268 return res;
1271 /* Returns true if there is a anti-dependence for the STORE that
1272 executes after the LOAD. */
1274 bool
1275 refs_anti_dependent_p (tree load, tree store)
1277 ao_ref r1, r2;
1278 ao_ref_init (&r1, load);
1279 ao_ref_init (&r2, store);
1280 return refs_may_alias_p_1 (&r1, &r2, false);
1283 /* Returns true if there is a output dependence for the stores
1284 STORE1 and STORE2. */
1286 bool
1287 refs_output_dependent_p (tree store1, tree store2)
1289 ao_ref r1, r2;
1290 ao_ref_init (&r1, store1);
1291 ao_ref_init (&r2, store2);
1292 return refs_may_alias_p_1 (&r1, &r2, false);
1295 /* If the call CALL may use the memory reference REF return true,
1296 otherwise return false. */
1298 static bool
1299 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1301 tree base, callee;
1302 unsigned i;
1303 int flags = gimple_call_flags (call);
1305 /* Const functions without a static chain do not implicitly use memory. */
1306 if (!gimple_call_chain (call)
1307 && (flags & (ECF_CONST|ECF_NOVOPS)))
1308 goto process_args;
1310 base = ao_ref_base (ref);
1311 if (!base)
1312 return true;
1314 /* A call that is not without side-effects might involve volatile
1315 accesses and thus conflicts with all other volatile accesses. */
1316 if (ref->volatile_p)
1317 return true;
1319 /* If the reference is based on a decl that is not aliased the call
1320 cannot possibly use it. */
1321 if (DECL_P (base)
1322 && !may_be_aliased (base)
1323 /* But local statics can be used through recursion. */
1324 && !is_global_var (base))
1325 goto process_args;
1327 callee = gimple_call_fndecl (call);
1329 /* Handle those builtin functions explicitly that do not act as
1330 escape points. See tree-ssa-structalias.c:find_func_aliases
1331 for the list of builtins we might need to handle here. */
1332 if (callee != NULL_TREE
1333 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1334 switch (DECL_FUNCTION_CODE (callee))
1336 /* All the following functions read memory pointed to by
1337 their second argument. strcat/strncat additionally
1338 reads memory pointed to by the first argument. */
1339 case BUILT_IN_STRCAT:
1340 case BUILT_IN_STRNCAT:
1342 ao_ref dref;
1343 ao_ref_init_from_ptr_and_size (&dref,
1344 gimple_call_arg (call, 0),
1345 NULL_TREE);
1346 if (refs_may_alias_p_1 (&dref, ref, false))
1347 return true;
1349 /* FALLTHRU */
1350 case BUILT_IN_STRCPY:
1351 case BUILT_IN_STRNCPY:
1352 case BUILT_IN_MEMCPY:
1353 case BUILT_IN_MEMMOVE:
1354 case BUILT_IN_MEMPCPY:
1355 case BUILT_IN_STPCPY:
1356 case BUILT_IN_STPNCPY:
1357 case BUILT_IN_TM_MEMCPY:
1358 case BUILT_IN_TM_MEMMOVE:
1360 ao_ref dref;
1361 tree size = NULL_TREE;
1362 if (gimple_call_num_args (call) == 3)
1363 size = gimple_call_arg (call, 2);
1364 ao_ref_init_from_ptr_and_size (&dref,
1365 gimple_call_arg (call, 1),
1366 size);
1367 return refs_may_alias_p_1 (&dref, ref, false);
1369 case BUILT_IN_STRCAT_CHK:
1370 case BUILT_IN_STRNCAT_CHK:
1372 ao_ref dref;
1373 ao_ref_init_from_ptr_and_size (&dref,
1374 gimple_call_arg (call, 0),
1375 NULL_TREE);
1376 if (refs_may_alias_p_1 (&dref, ref, false))
1377 return true;
1379 /* FALLTHRU */
1380 case BUILT_IN_STRCPY_CHK:
1381 case BUILT_IN_STRNCPY_CHK:
1382 case BUILT_IN_MEMCPY_CHK:
1383 case BUILT_IN_MEMMOVE_CHK:
1384 case BUILT_IN_MEMPCPY_CHK:
1385 case BUILT_IN_STPCPY_CHK:
1386 case BUILT_IN_STPNCPY_CHK:
1388 ao_ref dref;
1389 tree size = NULL_TREE;
1390 if (gimple_call_num_args (call) == 4)
1391 size = gimple_call_arg (call, 2);
1392 ao_ref_init_from_ptr_and_size (&dref,
1393 gimple_call_arg (call, 1),
1394 size);
1395 return refs_may_alias_p_1 (&dref, ref, false);
1397 case BUILT_IN_BCOPY:
1399 ao_ref dref;
1400 tree size = gimple_call_arg (call, 2);
1401 ao_ref_init_from_ptr_and_size (&dref,
1402 gimple_call_arg (call, 0),
1403 size);
1404 return refs_may_alias_p_1 (&dref, ref, false);
1407 /* The following functions read memory pointed to by their
1408 first argument. */
1409 CASE_BUILT_IN_TM_LOAD (1):
1410 CASE_BUILT_IN_TM_LOAD (2):
1411 CASE_BUILT_IN_TM_LOAD (4):
1412 CASE_BUILT_IN_TM_LOAD (8):
1413 CASE_BUILT_IN_TM_LOAD (FLOAT):
1414 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1415 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1416 CASE_BUILT_IN_TM_LOAD (M64):
1417 CASE_BUILT_IN_TM_LOAD (M128):
1418 CASE_BUILT_IN_TM_LOAD (M256):
1419 case BUILT_IN_TM_LOG:
1420 case BUILT_IN_TM_LOG_1:
1421 case BUILT_IN_TM_LOG_2:
1422 case BUILT_IN_TM_LOG_4:
1423 case BUILT_IN_TM_LOG_8:
1424 case BUILT_IN_TM_LOG_FLOAT:
1425 case BUILT_IN_TM_LOG_DOUBLE:
1426 case BUILT_IN_TM_LOG_LDOUBLE:
1427 case BUILT_IN_TM_LOG_M64:
1428 case BUILT_IN_TM_LOG_M128:
1429 case BUILT_IN_TM_LOG_M256:
1430 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1432 /* These read memory pointed to by the first argument. */
1433 case BUILT_IN_STRDUP:
1434 case BUILT_IN_STRNDUP:
1436 ao_ref dref;
1437 tree size = NULL_TREE;
1438 if (gimple_call_num_args (call) == 2)
1439 size = gimple_call_arg (call, 1);
1440 ao_ref_init_from_ptr_and_size (&dref,
1441 gimple_call_arg (call, 0),
1442 size);
1443 return refs_may_alias_p_1 (&dref, ref, false);
1445 /* These read memory pointed to by the first argument. */
1446 case BUILT_IN_INDEX:
1447 case BUILT_IN_STRCHR:
1448 case BUILT_IN_STRRCHR:
1450 ao_ref dref;
1451 ao_ref_init_from_ptr_and_size (&dref,
1452 gimple_call_arg (call, 0),
1453 NULL_TREE);
1454 return refs_may_alias_p_1 (&dref, ref, false);
1456 /* These read memory pointed to by the first argument with size
1457 in the third argument. */
1458 case BUILT_IN_MEMCHR:
1460 ao_ref dref;
1461 ao_ref_init_from_ptr_and_size (&dref,
1462 gimple_call_arg (call, 0),
1463 gimple_call_arg (call, 2));
1464 return refs_may_alias_p_1 (&dref, ref, false);
1466 /* These read memory pointed to by the first and second arguments. */
1467 case BUILT_IN_STRSTR:
1468 case BUILT_IN_STRPBRK:
1470 ao_ref dref;
1471 ao_ref_init_from_ptr_and_size (&dref,
1472 gimple_call_arg (call, 0),
1473 NULL_TREE);
1474 if (refs_may_alias_p_1 (&dref, ref, false))
1475 return true;
1476 ao_ref_init_from_ptr_and_size (&dref,
1477 gimple_call_arg (call, 1),
1478 NULL_TREE);
1479 return refs_may_alias_p_1 (&dref, ref, false);
1482 /* The following builtins do not read from memory. */
1483 case BUILT_IN_FREE:
1484 case BUILT_IN_MALLOC:
1485 case BUILT_IN_CALLOC:
1486 case BUILT_IN_ALLOCA:
1487 case BUILT_IN_ALLOCA_WITH_ALIGN:
1488 case BUILT_IN_STACK_SAVE:
1489 case BUILT_IN_STACK_RESTORE:
1490 case BUILT_IN_MEMSET:
1491 case BUILT_IN_TM_MEMSET:
1492 case BUILT_IN_MEMSET_CHK:
1493 case BUILT_IN_FREXP:
1494 case BUILT_IN_FREXPF:
1495 case BUILT_IN_FREXPL:
1496 case BUILT_IN_GAMMA_R:
1497 case BUILT_IN_GAMMAF_R:
1498 case BUILT_IN_GAMMAL_R:
1499 case BUILT_IN_LGAMMA_R:
1500 case BUILT_IN_LGAMMAF_R:
1501 case BUILT_IN_LGAMMAL_R:
1502 case BUILT_IN_MODF:
1503 case BUILT_IN_MODFF:
1504 case BUILT_IN_MODFL:
1505 case BUILT_IN_REMQUO:
1506 case BUILT_IN_REMQUOF:
1507 case BUILT_IN_REMQUOL:
1508 case BUILT_IN_SINCOS:
1509 case BUILT_IN_SINCOSF:
1510 case BUILT_IN_SINCOSL:
1511 case BUILT_IN_ASSUME_ALIGNED:
1512 case BUILT_IN_VA_END:
1513 return false;
1514 /* __sync_* builtins and some OpenMP builtins act as threading
1515 barriers. */
1516 #undef DEF_SYNC_BUILTIN
1517 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1518 #include "sync-builtins.def"
1519 #undef DEF_SYNC_BUILTIN
1520 case BUILT_IN_GOMP_ATOMIC_START:
1521 case BUILT_IN_GOMP_ATOMIC_END:
1522 case BUILT_IN_GOMP_BARRIER:
1523 case BUILT_IN_GOMP_TASKWAIT:
1524 case BUILT_IN_GOMP_CRITICAL_START:
1525 case BUILT_IN_GOMP_CRITICAL_END:
1526 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1527 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1528 case BUILT_IN_GOMP_LOOP_END:
1529 case BUILT_IN_GOMP_ORDERED_START:
1530 case BUILT_IN_GOMP_ORDERED_END:
1531 case BUILT_IN_GOMP_PARALLEL_END:
1532 case BUILT_IN_GOMP_SECTIONS_END:
1533 case BUILT_IN_GOMP_SINGLE_COPY_START:
1534 case BUILT_IN_GOMP_SINGLE_COPY_END:
1535 return true;
1537 default:
1538 /* Fallthru to general call handling. */;
1541 /* Check if base is a global static variable that is not read
1542 by the function. */
1543 if (callee != NULL_TREE
1544 && TREE_CODE (base) == VAR_DECL
1545 && TREE_STATIC (base))
1547 struct cgraph_node *node = cgraph_get_node (callee);
1548 bitmap not_read;
1550 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1551 node yet. We should enforce that there are nodes for all decls in the
1552 IL and remove this check instead. */
1553 if (node
1554 && (not_read = ipa_reference_get_not_read_global (node))
1555 && bitmap_bit_p (not_read, DECL_UID (base)))
1556 goto process_args;
1559 /* Check if the base variable is call-used. */
1560 if (DECL_P (base))
1562 if (pt_solution_includes (gimple_call_use_set (call), base))
1563 return true;
1565 else if ((TREE_CODE (base) == MEM_REF
1566 || TREE_CODE (base) == TARGET_MEM_REF)
1567 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1569 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1570 if (!pi)
1571 return true;
1573 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1574 return true;
1576 else
1577 return true;
1579 /* Inspect call arguments for passed-by-value aliases. */
1580 process_args:
1581 for (i = 0; i < gimple_call_num_args (call); ++i)
1583 tree op = gimple_call_arg (call, i);
1584 int flags = gimple_call_arg_flags (call, i);
1586 if (flags & EAF_UNUSED)
1587 continue;
1589 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1590 op = TREE_OPERAND (op, 0);
1592 if (TREE_CODE (op) != SSA_NAME
1593 && !is_gimple_min_invariant (op))
1595 ao_ref r;
1596 ao_ref_init (&r, op);
1597 if (refs_may_alias_p_1 (&r, ref, true))
1598 return true;
1602 return false;
1605 static bool
1606 ref_maybe_used_by_call_p (gimple call, tree ref)
1608 ao_ref r;
1609 bool res;
1610 ao_ref_init (&r, ref);
1611 res = ref_maybe_used_by_call_p_1 (call, &r);
1612 if (res)
1613 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1614 else
1615 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1616 return res;
1620 /* If the statement STMT may use the memory reference REF return
1621 true, otherwise return false. */
1623 bool
1624 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1626 if (is_gimple_assign (stmt))
1628 tree rhs;
1630 /* All memory assign statements are single. */
1631 if (!gimple_assign_single_p (stmt))
1632 return false;
1634 rhs = gimple_assign_rhs1 (stmt);
1635 if (is_gimple_reg (rhs)
1636 || is_gimple_min_invariant (rhs)
1637 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1638 return false;
1640 return refs_may_alias_p (rhs, ref);
1642 else if (is_gimple_call (stmt))
1643 return ref_maybe_used_by_call_p (stmt, ref);
1644 else if (gimple_code (stmt) == GIMPLE_RETURN)
1646 tree retval = gimple_return_retval (stmt);
1647 tree base;
1648 if (retval
1649 && TREE_CODE (retval) != SSA_NAME
1650 && !is_gimple_min_invariant (retval)
1651 && refs_may_alias_p (retval, ref))
1652 return true;
1653 /* If ref escapes the function then the return acts as a use. */
1654 base = get_base_address (ref);
1655 if (!base)
1657 else if (DECL_P (base))
1658 return is_global_var (base);
1659 else if (TREE_CODE (base) == MEM_REF
1660 || TREE_CODE (base) == TARGET_MEM_REF)
1661 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1662 return false;
1665 return true;
1668 /* If the call in statement CALL may clobber the memory reference REF
1669 return true, otherwise return false. */
1671 static bool
1672 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1674 tree base;
1675 tree callee;
1677 /* If the call is pure or const it cannot clobber anything. */
1678 if (gimple_call_flags (call)
1679 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1680 return false;
1682 base = ao_ref_base (ref);
1683 if (!base)
1684 return true;
1686 if (TREE_CODE (base) == SSA_NAME
1687 || CONSTANT_CLASS_P (base))
1688 return false;
1690 /* A call that is not without side-effects might involve volatile
1691 accesses and thus conflicts with all other volatile accesses. */
1692 if (ref->volatile_p)
1693 return true;
1695 /* If the reference is based on a decl that is not aliased the call
1696 cannot possibly clobber it. */
1697 if (DECL_P (base)
1698 && !may_be_aliased (base)
1699 /* But local non-readonly statics can be modified through recursion
1700 or the call may implement a threading barrier which we must
1701 treat as may-def. */
1702 && (TREE_READONLY (base)
1703 || !is_global_var (base)))
1704 return false;
1706 callee = gimple_call_fndecl (call);
1708 /* Handle those builtin functions explicitly that do not act as
1709 escape points. See tree-ssa-structalias.c:find_func_aliases
1710 for the list of builtins we might need to handle here. */
1711 if (callee != NULL_TREE
1712 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1713 switch (DECL_FUNCTION_CODE (callee))
1715 /* All the following functions clobber memory pointed to by
1716 their first argument. */
1717 case BUILT_IN_STRCPY:
1718 case BUILT_IN_STRNCPY:
1719 case BUILT_IN_MEMCPY:
1720 case BUILT_IN_MEMMOVE:
1721 case BUILT_IN_MEMPCPY:
1722 case BUILT_IN_STPCPY:
1723 case BUILT_IN_STPNCPY:
1724 case BUILT_IN_STRCAT:
1725 case BUILT_IN_STRNCAT:
1726 case BUILT_IN_MEMSET:
1727 case BUILT_IN_TM_MEMSET:
1728 CASE_BUILT_IN_TM_STORE (1):
1729 CASE_BUILT_IN_TM_STORE (2):
1730 CASE_BUILT_IN_TM_STORE (4):
1731 CASE_BUILT_IN_TM_STORE (8):
1732 CASE_BUILT_IN_TM_STORE (FLOAT):
1733 CASE_BUILT_IN_TM_STORE (DOUBLE):
1734 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1735 CASE_BUILT_IN_TM_STORE (M64):
1736 CASE_BUILT_IN_TM_STORE (M128):
1737 CASE_BUILT_IN_TM_STORE (M256):
1738 case BUILT_IN_TM_MEMCPY:
1739 case BUILT_IN_TM_MEMMOVE:
1741 ao_ref dref;
1742 tree size = NULL_TREE;
1743 /* Don't pass in size for strncat, as the maximum size
1744 is strlen (dest) + n + 1 instead of n, resp.
1745 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1746 known. */
1747 if (gimple_call_num_args (call) == 3
1748 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1749 size = gimple_call_arg (call, 2);
1750 ao_ref_init_from_ptr_and_size (&dref,
1751 gimple_call_arg (call, 0),
1752 size);
1753 return refs_may_alias_p_1 (&dref, ref, false);
1755 case BUILT_IN_STRCPY_CHK:
1756 case BUILT_IN_STRNCPY_CHK:
1757 case BUILT_IN_MEMCPY_CHK:
1758 case BUILT_IN_MEMMOVE_CHK:
1759 case BUILT_IN_MEMPCPY_CHK:
1760 case BUILT_IN_STPCPY_CHK:
1761 case BUILT_IN_STPNCPY_CHK:
1762 case BUILT_IN_STRCAT_CHK:
1763 case BUILT_IN_STRNCAT_CHK:
1764 case BUILT_IN_MEMSET_CHK:
1766 ao_ref dref;
1767 tree size = NULL_TREE;
1768 /* Don't pass in size for __strncat_chk, as the maximum size
1769 is strlen (dest) + n + 1 instead of n, resp.
1770 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1771 known. */
1772 if (gimple_call_num_args (call) == 4
1773 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1774 size = gimple_call_arg (call, 2);
1775 ao_ref_init_from_ptr_and_size (&dref,
1776 gimple_call_arg (call, 0),
1777 size);
1778 return refs_may_alias_p_1 (&dref, ref, false);
1780 case BUILT_IN_BCOPY:
1782 ao_ref dref;
1783 tree size = gimple_call_arg (call, 2);
1784 ao_ref_init_from_ptr_and_size (&dref,
1785 gimple_call_arg (call, 1),
1786 size);
1787 return refs_may_alias_p_1 (&dref, ref, false);
1789 /* Allocating memory does not have any side-effects apart from
1790 being the definition point for the pointer. */
1791 case BUILT_IN_MALLOC:
1792 case BUILT_IN_CALLOC:
1793 case BUILT_IN_STRDUP:
1794 case BUILT_IN_STRNDUP:
1795 /* Unix98 specifies that errno is set on allocation failure. */
1796 if (flag_errno_math
1797 && targetm.ref_may_alias_errno (ref))
1798 return true;
1799 return false;
1800 case BUILT_IN_STACK_SAVE:
1801 case BUILT_IN_ALLOCA:
1802 case BUILT_IN_ALLOCA_WITH_ALIGN:
1803 case BUILT_IN_ASSUME_ALIGNED:
1804 return false;
1805 /* Freeing memory kills the pointed-to memory. More importantly
1806 the call has to serve as a barrier for moving loads and stores
1807 across it. */
1808 case BUILT_IN_FREE:
1809 case BUILT_IN_VA_END:
1811 tree ptr = gimple_call_arg (call, 0);
1812 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1814 case BUILT_IN_GAMMA_R:
1815 case BUILT_IN_GAMMAF_R:
1816 case BUILT_IN_GAMMAL_R:
1817 case BUILT_IN_LGAMMA_R:
1818 case BUILT_IN_LGAMMAF_R:
1819 case BUILT_IN_LGAMMAL_R:
1821 tree out = gimple_call_arg (call, 1);
1822 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1823 return true;
1824 if (flag_errno_math)
1825 break;
1826 return false;
1828 case BUILT_IN_FREXP:
1829 case BUILT_IN_FREXPF:
1830 case BUILT_IN_FREXPL:
1831 case BUILT_IN_MODF:
1832 case BUILT_IN_MODFF:
1833 case BUILT_IN_MODFL:
1835 tree out = gimple_call_arg (call, 1);
1836 return ptr_deref_may_alias_ref_p_1 (out, ref);
1838 case BUILT_IN_REMQUO:
1839 case BUILT_IN_REMQUOF:
1840 case BUILT_IN_REMQUOL:
1842 tree out = gimple_call_arg (call, 2);
1843 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1844 return true;
1845 if (flag_errno_math)
1846 break;
1847 return false;
1849 case BUILT_IN_SINCOS:
1850 case BUILT_IN_SINCOSF:
1851 case BUILT_IN_SINCOSL:
1853 tree sin = gimple_call_arg (call, 1);
1854 tree cos = gimple_call_arg (call, 2);
1855 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1856 || ptr_deref_may_alias_ref_p_1 (cos, ref));
1858 /* __sync_* builtins and some OpenMP builtins act as threading
1859 barriers. */
1860 #undef DEF_SYNC_BUILTIN
1861 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1862 #include "sync-builtins.def"
1863 #undef DEF_SYNC_BUILTIN
1864 case BUILT_IN_GOMP_ATOMIC_START:
1865 case BUILT_IN_GOMP_ATOMIC_END:
1866 case BUILT_IN_GOMP_BARRIER:
1867 case BUILT_IN_GOMP_TASKWAIT:
1868 case BUILT_IN_GOMP_CRITICAL_START:
1869 case BUILT_IN_GOMP_CRITICAL_END:
1870 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1871 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1872 case BUILT_IN_GOMP_LOOP_END:
1873 case BUILT_IN_GOMP_ORDERED_START:
1874 case BUILT_IN_GOMP_ORDERED_END:
1875 case BUILT_IN_GOMP_PARALLEL_END:
1876 case BUILT_IN_GOMP_SECTIONS_END:
1877 case BUILT_IN_GOMP_SINGLE_COPY_START:
1878 case BUILT_IN_GOMP_SINGLE_COPY_END:
1879 return true;
1880 default:
1881 /* Fallthru to general call handling. */;
1884 /* Check if base is a global static variable that is not written
1885 by the function. */
1886 if (callee != NULL_TREE
1887 && TREE_CODE (base) == VAR_DECL
1888 && TREE_STATIC (base))
1890 struct cgraph_node *node = cgraph_get_node (callee);
1891 bitmap not_written;
1893 if (node
1894 && (not_written = ipa_reference_get_not_written_global (node))
1895 && bitmap_bit_p (not_written, DECL_UID (base)))
1896 return false;
1899 /* Check if the base variable is call-clobbered. */
1900 if (DECL_P (base))
1901 return pt_solution_includes (gimple_call_clobber_set (call), base);
1902 else if ((TREE_CODE (base) == MEM_REF
1903 || TREE_CODE (base) == TARGET_MEM_REF)
1904 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1906 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1907 if (!pi)
1908 return true;
1910 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
1913 return true;
1916 /* If the call in statement CALL may clobber the memory reference REF
1917 return true, otherwise return false. */
1919 bool
1920 call_may_clobber_ref_p (gimple call, tree ref)
1922 bool res;
1923 ao_ref r;
1924 ao_ref_init (&r, ref);
1925 res = call_may_clobber_ref_p_1 (call, &r);
1926 if (res)
1927 ++alias_stats.call_may_clobber_ref_p_may_alias;
1928 else
1929 ++alias_stats.call_may_clobber_ref_p_no_alias;
1930 return res;
1934 /* If the statement STMT may clobber the memory reference REF return true,
1935 otherwise return false. */
1937 bool
1938 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
1940 if (is_gimple_call (stmt))
1942 tree lhs = gimple_call_lhs (stmt);
1943 if (lhs
1944 && TREE_CODE (lhs) != SSA_NAME)
1946 ao_ref r;
1947 ao_ref_init (&r, lhs);
1948 if (refs_may_alias_p_1 (ref, &r, true))
1949 return true;
1952 return call_may_clobber_ref_p_1 (stmt, ref);
1954 else if (gimple_assign_single_p (stmt))
1956 tree lhs = gimple_assign_lhs (stmt);
1957 if (TREE_CODE (lhs) != SSA_NAME)
1959 ao_ref r;
1960 ao_ref_init (&r, lhs);
1961 return refs_may_alias_p_1 (ref, &r, true);
1964 else if (gimple_code (stmt) == GIMPLE_ASM)
1965 return true;
1967 return false;
1970 bool
1971 stmt_may_clobber_ref_p (gimple stmt, tree ref)
1973 ao_ref r;
1974 ao_ref_init (&r, ref);
1975 return stmt_may_clobber_ref_p_1 (stmt, &r);
1978 /* If STMT kills the memory reference REF return true, otherwise
1979 return false. */
1981 static bool
1982 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
1984 /* For a must-alias check we need to be able to constrain
1985 the access properly. */
1986 ao_ref_base (ref);
1987 if (ref->max_size == -1)
1988 return false;
1990 if (gimple_has_lhs (stmt)
1991 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
1992 /* The assignment is not necessarily carried out if it can throw
1993 and we can catch it in the current function where we could inspect
1994 the previous value.
1995 ??? We only need to care about the RHS throwing. For aggregate
1996 assignments or similar calls and non-call exceptions the LHS
1997 might throw as well. */
1998 && !stmt_can_throw_internal (stmt))
2000 tree base, lhs = gimple_get_lhs (stmt);
2001 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2002 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2003 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2004 so base == ref->base does not always hold. */
2005 if (base != ref->base)
2007 /* If both base and ref->base are MEM_REFs, only compare the
2008 first operand, and if the second operand isn't equal constant,
2009 try to add the offsets into offset and ref_offset. */
2010 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2011 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2013 if (!tree_int_cst_equal (TREE_OPERAND (base, 0),
2014 TREE_OPERAND (ref->base, 0)))
2016 double_int off1 = mem_ref_offset (base);
2017 off1 = off1.alshift (BITS_PER_UNIT == 8
2018 ? 3 : exact_log2 (BITS_PER_UNIT),
2019 HOST_BITS_PER_DOUBLE_INT);
2020 off1 = off1 + double_int::from_shwi (offset);
2021 double_int off2 = mem_ref_offset (ref->base);
2022 off2 = off2.alshift (BITS_PER_UNIT == 8
2023 ? 3 : exact_log2 (BITS_PER_UNIT),
2024 HOST_BITS_PER_DOUBLE_INT);
2025 off2 = off2 + double_int::from_shwi (ref_offset);
2026 if (off1.fits_shwi () && off2.fits_shwi ())
2028 offset = off1.to_shwi ();
2029 ref_offset = off2.to_shwi ();
2031 else
2032 size = -1;
2035 else
2036 size = -1;
2038 /* For a must-alias check we need to be able to constrain
2039 the access properly. */
2040 if (size != -1 && size == max_size)
2042 if (offset <= ref_offset
2043 && offset + size >= ref_offset + ref->max_size)
2044 return true;
2048 if (is_gimple_call (stmt))
2050 tree callee = gimple_call_fndecl (stmt);
2051 if (callee != NULL_TREE
2052 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2053 switch (DECL_FUNCTION_CODE (callee))
2055 case BUILT_IN_MEMCPY:
2056 case BUILT_IN_MEMPCPY:
2057 case BUILT_IN_MEMMOVE:
2058 case BUILT_IN_MEMSET:
2059 case BUILT_IN_MEMCPY_CHK:
2060 case BUILT_IN_MEMPCPY_CHK:
2061 case BUILT_IN_MEMMOVE_CHK:
2062 case BUILT_IN_MEMSET_CHK:
2064 tree dest = gimple_call_arg (stmt, 0);
2065 tree len = gimple_call_arg (stmt, 2);
2066 tree base = NULL_TREE;
2067 HOST_WIDE_INT offset = 0;
2068 if (!host_integerp (len, 0))
2069 return false;
2070 if (TREE_CODE (dest) == ADDR_EXPR)
2071 base = get_addr_base_and_unit_offset (TREE_OPERAND (dest, 0),
2072 &offset);
2073 else if (TREE_CODE (dest) == SSA_NAME)
2074 base = dest;
2075 if (base
2076 && base == ao_ref_base (ref))
2078 HOST_WIDE_INT size = TREE_INT_CST_LOW (len);
2079 if (offset <= ref->offset / BITS_PER_UNIT
2080 && (offset + size
2081 >= ((ref->offset + ref->max_size + BITS_PER_UNIT - 1)
2082 / BITS_PER_UNIT)))
2083 return true;
2085 break;
2088 case BUILT_IN_VA_END:
2090 tree ptr = gimple_call_arg (stmt, 0);
2091 if (TREE_CODE (ptr) == ADDR_EXPR)
2093 tree base = ao_ref_base (ref);
2094 if (TREE_OPERAND (ptr, 0) == base)
2095 return true;
2097 break;
2100 default:;
2103 return false;
2106 bool
2107 stmt_kills_ref_p (gimple stmt, tree ref)
2109 ao_ref r;
2110 ao_ref_init (&r, ref);
2111 return stmt_kills_ref_p_1 (stmt, &r);
2115 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2116 TARGET or a statement clobbering the memory reference REF in which
2117 case false is returned. The walk starts with VUSE, one argument of PHI. */
2119 static bool
2120 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2121 tree vuse, unsigned int *cnt, bitmap *visited,
2122 bool abort_on_visited)
2124 basic_block bb = gimple_bb (phi);
2126 if (!*visited)
2127 *visited = BITMAP_ALLOC (NULL);
2129 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2131 /* Walk until we hit the target. */
2132 while (vuse != target)
2134 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2135 /* Recurse for PHI nodes. */
2136 if (gimple_code (def_stmt) == GIMPLE_PHI)
2138 /* An already visited PHI node ends the walk successfully. */
2139 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2140 return !abort_on_visited;
2141 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2142 visited, abort_on_visited);
2143 if (!vuse)
2144 return false;
2145 continue;
2147 else if (gimple_nop_p (def_stmt))
2148 return false;
2149 else
2151 /* A clobbering statement or the end of the IL ends it failing. */
2152 ++*cnt;
2153 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2154 return false;
2156 /* If we reach a new basic-block see if we already skipped it
2157 in a previous walk that ended successfully. */
2158 if (gimple_bb (def_stmt) != bb)
2160 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2161 return !abort_on_visited;
2162 bb = gimple_bb (def_stmt);
2164 vuse = gimple_vuse (def_stmt);
2166 return true;
2169 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2170 until we hit the phi argument definition that dominates the other one.
2171 Return that, or NULL_TREE if there is no such definition. */
2173 static tree
2174 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2175 ao_ref *ref, unsigned int *cnt,
2176 bitmap *visited, bool abort_on_visited)
2178 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2179 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2180 tree common_vuse;
2182 if (arg0 == arg1)
2183 return arg0;
2184 else if (gimple_nop_p (def0)
2185 || (!gimple_nop_p (def1)
2186 && dominated_by_p (CDI_DOMINATORS,
2187 gimple_bb (def1), gimple_bb (def0))))
2189 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2190 visited, abort_on_visited))
2191 return arg0;
2193 else if (gimple_nop_p (def1)
2194 || dominated_by_p (CDI_DOMINATORS,
2195 gimple_bb (def0), gimple_bb (def1)))
2197 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2198 visited, abort_on_visited))
2199 return arg1;
2201 /* Special case of a diamond:
2202 MEM_1 = ...
2203 goto (cond) ? L1 : L2
2204 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2205 goto L3
2206 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2207 L3: MEM_4 = PHI<MEM_2, MEM_3>
2208 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2209 dominate each other, but still we can easily skip this PHI node
2210 if we recognize that the vuse MEM operand is the same for both,
2211 and that we can skip both statements (they don't clobber us).
2212 This is still linear. Don't use maybe_skip_until, that might
2213 potentially be slow. */
2214 else if ((common_vuse = gimple_vuse (def0))
2215 && common_vuse == gimple_vuse (def1))
2217 *cnt += 2;
2218 if (!stmt_may_clobber_ref_p_1 (def0, ref)
2219 && !stmt_may_clobber_ref_p_1 (def1, ref))
2220 return common_vuse;
2223 return NULL_TREE;
2227 /* Starting from a PHI node for the virtual operand of the memory reference
2228 REF find a continuation virtual operand that allows to continue walking
2229 statements dominating PHI skipping only statements that cannot possibly
2230 clobber REF. Increments *CNT for each alias disambiguation done.
2231 Returns NULL_TREE if no suitable virtual operand can be found. */
2233 tree
2234 get_continuation_for_phi (gimple phi, ao_ref *ref,
2235 unsigned int *cnt, bitmap *visited,
2236 bool abort_on_visited)
2238 unsigned nargs = gimple_phi_num_args (phi);
2240 /* Through a single-argument PHI we can simply look through. */
2241 if (nargs == 1)
2242 return PHI_ARG_DEF (phi, 0);
2244 /* For two or more arguments try to pairwise skip non-aliasing code
2245 until we hit the phi argument definition that dominates the other one. */
2246 else if (nargs >= 2)
2248 tree arg0, arg1;
2249 unsigned i;
2251 /* Find a candidate for the virtual operand which definition
2252 dominates those of all others. */
2253 arg0 = PHI_ARG_DEF (phi, 0);
2254 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2255 for (i = 1; i < nargs; ++i)
2257 arg1 = PHI_ARG_DEF (phi, i);
2258 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2260 arg0 = arg1;
2261 break;
2263 if (dominated_by_p (CDI_DOMINATORS,
2264 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2265 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2266 arg0 = arg1;
2269 /* Then pairwise reduce against the found candidate. */
2270 for (i = 0; i < nargs; ++i)
2272 arg1 = PHI_ARG_DEF (phi, i);
2273 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2274 cnt, visited, abort_on_visited);
2275 if (!arg0)
2276 return NULL_TREE;
2279 return arg0;
2282 return NULL_TREE;
2285 /* Based on the memory reference REF and its virtual use VUSE call
2286 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2287 itself. That is, for each virtual use for which its defining statement
2288 does not clobber REF.
2290 WALKER is called with REF, the current virtual use and DATA. If
2291 WALKER returns non-NULL the walk stops and its result is returned.
2292 At the end of a non-successful walk NULL is returned.
2294 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2295 use which definition is a statement that may clobber REF and DATA.
2296 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2297 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2298 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2299 to adjust REF and *DATA to make that valid.
2301 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2303 void *
2304 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2305 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2306 void *(*translate)(ao_ref *, tree, void *), void *data)
2308 bitmap visited = NULL;
2309 void *res;
2310 unsigned int cnt = 0;
2311 bool translated = false;
2313 timevar_push (TV_ALIAS_STMT_WALK);
2317 gimple def_stmt;
2319 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2320 res = (*walker) (ref, vuse, cnt, data);
2321 /* Abort walk. */
2322 if (res == (void *)-1)
2324 res = NULL;
2325 break;
2327 /* Lookup succeeded. */
2328 else if (res != NULL)
2329 break;
2331 def_stmt = SSA_NAME_DEF_STMT (vuse);
2332 if (gimple_nop_p (def_stmt))
2333 break;
2334 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2335 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2336 &visited, translated);
2337 else
2339 cnt++;
2340 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2342 if (!translate)
2343 break;
2344 res = (*translate) (ref, vuse, data);
2345 /* Failed lookup and translation. */
2346 if (res == (void *)-1)
2348 res = NULL;
2349 break;
2351 /* Lookup succeeded. */
2352 else if (res != NULL)
2353 break;
2354 /* Translation succeeded, continue walking. */
2355 translated = true;
2357 vuse = gimple_vuse (def_stmt);
2360 while (vuse);
2362 if (visited)
2363 BITMAP_FREE (visited);
2365 timevar_pop (TV_ALIAS_STMT_WALK);
2367 return res;
2371 /* Based on the memory reference REF call WALKER for each vdef which
2372 defining statement may clobber REF, starting with VDEF. If REF
2373 is NULL_TREE, each defining statement is visited.
2375 WALKER is called with REF, the current vdef and DATA. If WALKER
2376 returns true the walk is stopped, otherwise it continues.
2378 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2379 PHI argument (but only one walk continues on merge points), the
2380 return value is true if any of the walks was successful.
2382 The function returns the number of statements walked. */
2384 static unsigned int
2385 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2386 bool (*walker)(ao_ref *, tree, void *), void *data,
2387 bitmap *visited, unsigned int cnt)
2391 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2393 if (*visited
2394 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2395 return cnt;
2397 if (gimple_nop_p (def_stmt))
2398 return cnt;
2399 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2401 unsigned i;
2402 if (!*visited)
2403 *visited = BITMAP_ALLOC (NULL);
2404 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2405 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2406 walker, data, visited, 0);
2407 return cnt;
2410 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2411 cnt++;
2412 if ((!ref
2413 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2414 && (*walker) (ref, vdef, data))
2415 return cnt;
2417 vdef = gimple_vuse (def_stmt);
2419 while (1);
2422 unsigned int
2423 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2424 bool (*walker)(ao_ref *, tree, void *), void *data,
2425 bitmap *visited)
2427 bitmap local_visited = NULL;
2428 unsigned int ret;
2430 timevar_push (TV_ALIAS_STMT_WALK);
2432 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2433 visited ? visited : &local_visited, 0);
2434 if (local_visited)
2435 BITMAP_FREE (local_visited);
2437 timevar_pop (TV_ALIAS_STMT_WALK);
2439 return ret;