gcc/ChangeLog
[official-gcc.git] / gcc / tree-ssa-alias.c
blob2ecd13915bc87b3128a4320e0108981790a6537f
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "basic-block.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ggc.h"
31 #include "langhooks.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "tree-pretty-print.h"
35 #include "dumpfile.h"
36 #include "gimple.h"
37 #include "tree-flow.h"
38 #include "tree-inline.h"
39 #include "params.h"
40 #include "vec.h"
41 #include "bitmap.h"
42 #include "pointer-set.h"
43 #include "alloc-pool.h"
44 #include "tree-ssa-alias.h"
46 /* Broad overview of how alias analysis on gimple works:
48 Statements clobbering or using memory are linked through the
49 virtual operand factored use-def chain. The virtual operand
50 is unique per function, its symbol is accessible via gimple_vop (cfun).
51 Virtual operands are used for efficiently walking memory statements
52 in the gimple IL and are useful for things like value-numbering as
53 a generation count for memory references.
55 SSA_NAME pointers may have associated points-to information
56 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
57 points-to information is (re-)computed by the TODO_rebuild_alias
58 pass manager todo. Points-to information is also used for more
59 precise tracking of call-clobbered and call-used variables and
60 related disambiguations.
62 This file contains functions for disambiguating memory references,
63 the so called alias-oracle and tools for walking of the gimple IL.
65 The main alias-oracle entry-points are
67 bool stmt_may_clobber_ref_p (gimple, tree)
69 This function queries if a statement may invalidate (parts of)
70 the memory designated by the reference tree argument.
72 bool ref_maybe_used_by_stmt_p (gimple, tree)
74 This function queries if a statement may need (parts of) the
75 memory designated by the reference tree argument.
77 There are variants of these functions that only handle the call
78 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
79 Note that these do not disambiguate against a possible call lhs.
81 bool refs_may_alias_p (tree, tree)
83 This function tries to disambiguate two reference trees.
85 bool ptr_deref_may_alias_global_p (tree)
87 This function queries if dereferencing a pointer variable may
88 alias global memory.
90 More low-level disambiguators are available and documented in
91 this file. Low-level disambiguators dealing with points-to
92 information are in tree-ssa-structalias.c. */
95 /* Query statistics for the different low-level disambiguators.
96 A high-level query may trigger multiple of them. */
98 static struct {
99 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
100 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
101 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
102 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
103 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
104 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
105 } alias_stats;
107 void
108 dump_alias_stats (FILE *s)
110 fprintf (s, "\nAlias oracle query stats:\n");
111 fprintf (s, " refs_may_alias_p: "
112 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
113 HOST_WIDE_INT_PRINT_DEC" queries\n",
114 alias_stats.refs_may_alias_p_no_alias,
115 alias_stats.refs_may_alias_p_no_alias
116 + alias_stats.refs_may_alias_p_may_alias);
117 fprintf (s, " ref_maybe_used_by_call_p: "
118 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
119 HOST_WIDE_INT_PRINT_DEC" queries\n",
120 alias_stats.ref_maybe_used_by_call_p_no_alias,
121 alias_stats.refs_may_alias_p_no_alias
122 + alias_stats.ref_maybe_used_by_call_p_may_alias);
123 fprintf (s, " call_may_clobber_ref_p: "
124 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
125 HOST_WIDE_INT_PRINT_DEC" queries\n",
126 alias_stats.call_may_clobber_ref_p_no_alias,
127 alias_stats.call_may_clobber_ref_p_no_alias
128 + alias_stats.call_may_clobber_ref_p_may_alias);
132 /* Return true, if dereferencing PTR may alias with a global variable. */
134 bool
135 ptr_deref_may_alias_global_p (tree ptr)
137 struct ptr_info_def *pi;
139 /* If we end up with a pointer constant here that may point
140 to global memory. */
141 if (TREE_CODE (ptr) != SSA_NAME)
142 return true;
144 pi = SSA_NAME_PTR_INFO (ptr);
146 /* If we do not have points-to information for this variable,
147 we have to punt. */
148 if (!pi)
149 return true;
151 /* ??? This does not use TBAA to prune globals ptr may not access. */
152 return pt_solution_includes_global (&pi->pt);
155 /* Return true if dereferencing PTR may alias DECL.
156 The caller is responsible for applying TBAA to see if PTR
157 may access DECL at all. */
159 static bool
160 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
162 struct ptr_info_def *pi;
164 /* Conversions are irrelevant for points-to information and
165 data-dependence analysis can feed us those. */
166 STRIP_NOPS (ptr);
168 /* Anything we do not explicilty handle aliases. */
169 if ((TREE_CODE (ptr) != SSA_NAME
170 && TREE_CODE (ptr) != ADDR_EXPR
171 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
172 || !POINTER_TYPE_P (TREE_TYPE (ptr))
173 || (TREE_CODE (decl) != VAR_DECL
174 && TREE_CODE (decl) != PARM_DECL
175 && TREE_CODE (decl) != RESULT_DECL))
176 return true;
178 /* Disregard pointer offsetting. */
179 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
183 ptr = TREE_OPERAND (ptr, 0);
185 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
186 return ptr_deref_may_alias_decl_p (ptr, decl);
189 /* ADDR_EXPR pointers either just offset another pointer or directly
190 specify the pointed-to set. */
191 if (TREE_CODE (ptr) == ADDR_EXPR)
193 tree base = get_base_address (TREE_OPERAND (ptr, 0));
194 if (base
195 && (TREE_CODE (base) == MEM_REF
196 || TREE_CODE (base) == TARGET_MEM_REF))
197 ptr = TREE_OPERAND (base, 0);
198 else if (base
199 && DECL_P (base))
200 return base == decl;
201 else if (base
202 && CONSTANT_CLASS_P (base))
203 return false;
204 else
205 return true;
208 /* Non-aliased variables can not be pointed to. */
209 if (!may_be_aliased (decl))
210 return false;
212 /* If we do not have useful points-to information for this pointer
213 we cannot disambiguate anything else. */
214 pi = SSA_NAME_PTR_INFO (ptr);
215 if (!pi)
216 return true;
218 return pt_solution_includes (&pi->pt, decl);
221 /* Return true if dereferenced PTR1 and PTR2 may alias.
222 The caller is responsible for applying TBAA to see if accesses
223 through PTR1 and PTR2 may conflict at all. */
225 bool
226 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
228 struct ptr_info_def *pi1, *pi2;
230 /* Conversions are irrelevant for points-to information and
231 data-dependence analysis can feed us those. */
232 STRIP_NOPS (ptr1);
233 STRIP_NOPS (ptr2);
235 /* Disregard pointer offsetting. */
236 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
240 ptr1 = TREE_OPERAND (ptr1, 0);
242 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
243 return ptr_derefs_may_alias_p (ptr1, ptr2);
245 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
249 ptr2 = TREE_OPERAND (ptr2, 0);
251 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
252 return ptr_derefs_may_alias_p (ptr1, ptr2);
255 /* ADDR_EXPR pointers either just offset another pointer or directly
256 specify the pointed-to set. */
257 if (TREE_CODE (ptr1) == ADDR_EXPR)
259 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
260 if (base
261 && (TREE_CODE (base) == MEM_REF
262 || TREE_CODE (base) == TARGET_MEM_REF))
263 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
264 else if (base
265 && DECL_P (base))
266 return ptr_deref_may_alias_decl_p (ptr2, base);
267 else
268 return true;
270 if (TREE_CODE (ptr2) == ADDR_EXPR)
272 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
273 if (base
274 && (TREE_CODE (base) == MEM_REF
275 || TREE_CODE (base) == TARGET_MEM_REF))
276 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
277 else if (base
278 && DECL_P (base))
279 return ptr_deref_may_alias_decl_p (ptr1, base);
280 else
281 return true;
284 /* From here we require SSA name pointers. Anything else aliases. */
285 if (TREE_CODE (ptr1) != SSA_NAME
286 || TREE_CODE (ptr2) != SSA_NAME
287 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
288 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
289 return true;
291 /* We may end up with two empty points-to solutions for two same pointers.
292 In this case we still want to say both pointers alias, so shortcut
293 that here. */
294 if (ptr1 == ptr2)
295 return true;
297 /* If we do not have useful points-to information for either pointer
298 we cannot disambiguate anything else. */
299 pi1 = SSA_NAME_PTR_INFO (ptr1);
300 pi2 = SSA_NAME_PTR_INFO (ptr2);
301 if (!pi1 || !pi2)
302 return true;
304 /* ??? This does not use TBAA to prune decls from the intersection
305 that not both pointers may access. */
306 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
309 /* Return true if dereferencing PTR may alias *REF.
310 The caller is responsible for applying TBAA to see if PTR
311 may access *REF at all. */
313 static bool
314 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
316 tree base = ao_ref_base (ref);
318 if (TREE_CODE (base) == MEM_REF
319 || TREE_CODE (base) == TARGET_MEM_REF)
320 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
321 else if (DECL_P (base))
322 return ptr_deref_may_alias_decl_p (ptr, base);
324 return true;
327 /* Return true whether REF may refer to global memory. */
329 bool
330 ref_may_alias_global_p (tree ref)
332 tree base = get_base_address (ref);
333 if (DECL_P (base))
334 return is_global_var (base);
335 else if (TREE_CODE (base) == MEM_REF
336 || TREE_CODE (base) == TARGET_MEM_REF)
337 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
338 return true;
341 /* Return true whether STMT may clobber global memory. */
343 bool
344 stmt_may_clobber_global_p (gimple stmt)
346 tree lhs;
348 if (!gimple_vdef (stmt))
349 return false;
351 /* ??? We can ask the oracle whether an artificial pointer
352 dereference with a pointer with points-to information covering
353 all global memory (what about non-address taken memory?) maybe
354 clobbered by this call. As there is at the moment no convenient
355 way of doing that without generating garbage do some manual
356 checking instead.
357 ??? We could make a NULL ao_ref argument to the various
358 predicates special, meaning any global memory. */
360 switch (gimple_code (stmt))
362 case GIMPLE_ASSIGN:
363 lhs = gimple_assign_lhs (stmt);
364 return (TREE_CODE (lhs) != SSA_NAME
365 && ref_may_alias_global_p (lhs));
366 case GIMPLE_CALL:
367 return true;
368 default:
369 return true;
374 /* Dump alias information on FILE. */
376 void
377 dump_alias_info (FILE *file)
379 unsigned i;
380 const char *funcname
381 = lang_hooks.decl_printable_name (current_function_decl, 2);
382 tree var;
384 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
386 fprintf (file, "Aliased symbols\n\n");
388 FOR_EACH_LOCAL_DECL (cfun, i, var)
390 if (may_be_aliased (var))
391 dump_variable (file, var);
394 fprintf (file, "\nCall clobber information\n");
396 fprintf (file, "\nESCAPED");
397 dump_points_to_solution (file, &cfun->gimple_df->escaped);
399 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
401 for (i = 1; i < num_ssa_names; i++)
403 tree ptr = ssa_name (i);
404 struct ptr_info_def *pi;
406 if (ptr == NULL_TREE
407 || SSA_NAME_IN_FREE_LIST (ptr))
408 continue;
410 pi = SSA_NAME_PTR_INFO (ptr);
411 if (pi)
412 dump_points_to_info_for (file, ptr);
415 fprintf (file, "\n");
419 /* Dump alias information on stderr. */
421 DEBUG_FUNCTION void
422 debug_alias_info (void)
424 dump_alias_info (stderr);
428 /* Dump the points-to set *PT into FILE. */
430 void
431 dump_points_to_solution (FILE *file, struct pt_solution *pt)
433 if (pt->anything)
434 fprintf (file, ", points-to anything");
436 if (pt->nonlocal)
437 fprintf (file, ", points-to non-local");
439 if (pt->escaped)
440 fprintf (file, ", points-to escaped");
442 if (pt->ipa_escaped)
443 fprintf (file, ", points-to unit escaped");
445 if (pt->null)
446 fprintf (file, ", points-to NULL");
448 if (pt->vars)
450 fprintf (file, ", points-to vars: ");
451 dump_decl_set (file, pt->vars);
452 if (pt->vars_contains_global)
453 fprintf (file, " (includes global vars)");
458 /* Unified dump function for pt_solution. */
460 DEBUG_FUNCTION void
461 debug (pt_solution &ref)
463 dump_points_to_solution (stderr, &ref);
466 DEBUG_FUNCTION void
467 debug (pt_solution *ptr)
469 if (ptr)
470 debug (*ptr);
471 else
472 fprintf (stderr, "<nil>\n");
476 /* Dump points-to information for SSA_NAME PTR into FILE. */
478 void
479 dump_points_to_info_for (FILE *file, tree ptr)
481 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
483 print_generic_expr (file, ptr, dump_flags);
485 if (pi)
486 dump_points_to_solution (file, &pi->pt);
487 else
488 fprintf (file, ", points-to anything");
490 fprintf (file, "\n");
494 /* Dump points-to information for VAR into stderr. */
496 DEBUG_FUNCTION void
497 debug_points_to_info_for (tree var)
499 dump_points_to_info_for (stderr, var);
503 /* Initializes the alias-oracle reference representation *R from REF. */
505 void
506 ao_ref_init (ao_ref *r, tree ref)
508 r->ref = ref;
509 r->base = NULL_TREE;
510 r->offset = 0;
511 r->size = -1;
512 r->max_size = -1;
513 r->ref_alias_set = -1;
514 r->base_alias_set = -1;
515 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
518 /* Returns the base object of the memory reference *REF. */
520 tree
521 ao_ref_base (ao_ref *ref)
523 if (ref->base)
524 return ref->base;
525 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
526 &ref->max_size);
527 return ref->base;
530 /* Returns the base object alias set of the memory reference *REF. */
532 static alias_set_type
533 ao_ref_base_alias_set (ao_ref *ref)
535 tree base_ref;
536 if (ref->base_alias_set != -1)
537 return ref->base_alias_set;
538 if (!ref->ref)
539 return 0;
540 base_ref = ref->ref;
541 while (handled_component_p (base_ref))
542 base_ref = TREE_OPERAND (base_ref, 0);
543 ref->base_alias_set = get_alias_set (base_ref);
544 return ref->base_alias_set;
547 /* Returns the reference alias set of the memory reference *REF. */
549 alias_set_type
550 ao_ref_alias_set (ao_ref *ref)
552 if (ref->ref_alias_set != -1)
553 return ref->ref_alias_set;
554 ref->ref_alias_set = get_alias_set (ref->ref);
555 return ref->ref_alias_set;
558 /* Init an alias-oracle reference representation from a gimple pointer
559 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE the the
560 size is assumed to be unknown. The access is assumed to be only
561 to or after of the pointer target, not before it. */
563 void
564 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
566 HOST_WIDE_INT t1, t2;
567 ref->ref = NULL_TREE;
568 if (TREE_CODE (ptr) == ADDR_EXPR)
569 ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
570 &ref->offset, &t1, &t2);
571 else
573 ref->base = build2 (MEM_REF, char_type_node,
574 ptr, null_pointer_node);
575 ref->offset = 0;
577 if (size
578 && host_integerp (size, 0)
579 && TREE_INT_CST_LOW (size) * 8 / 8 == TREE_INT_CST_LOW (size))
580 ref->max_size = ref->size = TREE_INT_CST_LOW (size) * 8;
581 else
582 ref->max_size = ref->size = -1;
583 ref->ref_alias_set = 0;
584 ref->base_alias_set = 0;
585 ref->volatile_p = false;
588 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
589 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
590 decide. */
592 static inline int
593 same_type_for_tbaa (tree type1, tree type2)
595 type1 = TYPE_MAIN_VARIANT (type1);
596 type2 = TYPE_MAIN_VARIANT (type2);
598 /* If we would have to do structural comparison bail out. */
599 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
600 || TYPE_STRUCTURAL_EQUALITY_P (type2))
601 return -1;
603 /* Compare the canonical types. */
604 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
605 return 1;
607 /* ??? Array types are not properly unified in all cases as we have
608 spurious changes in the index types for example. Removing this
609 causes all sorts of problems with the Fortran frontend. */
610 if (TREE_CODE (type1) == ARRAY_TYPE
611 && TREE_CODE (type2) == ARRAY_TYPE)
612 return -1;
614 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
615 object of one of its constrained subtypes, e.g. when a function with an
616 unconstrained parameter passed by reference is called on an object and
617 inlined. But, even in the case of a fixed size, type and subtypes are
618 not equivalent enough as to share the same TYPE_CANONICAL, since this
619 would mean that conversions between them are useless, whereas they are
620 not (e.g. type and subtypes can have different modes). So, in the end,
621 they are only guaranteed to have the same alias set. */
622 if (get_alias_set (type1) == get_alias_set (type2))
623 return -1;
625 /* The types are known to be not equal. */
626 return 0;
629 /* Determine if the two component references REF1 and REF2 which are
630 based on access types TYPE1 and TYPE2 and of which at least one is based
631 on an indirect reference may alias. REF2 is the only one that can
632 be a decl in which case REF2_IS_DECL is true.
633 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
634 are the respective alias sets. */
636 static bool
637 aliasing_component_refs_p (tree ref1,
638 alias_set_type ref1_alias_set,
639 alias_set_type base1_alias_set,
640 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
641 tree ref2,
642 alias_set_type ref2_alias_set,
643 alias_set_type base2_alias_set,
644 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
645 bool ref2_is_decl)
647 /* If one reference is a component references through pointers try to find a
648 common base and apply offset based disambiguation. This handles
649 for example
650 struct A { int i; int j; } *q;
651 struct B { struct A a; int k; } *p;
652 disambiguating q->i and p->a.j. */
653 tree base1, base2;
654 tree type1, type2;
655 tree *refp;
656 int same_p;
658 /* Choose bases and base types to search for. */
659 base1 = ref1;
660 while (handled_component_p (base1))
661 base1 = TREE_OPERAND (base1, 0);
662 type1 = TREE_TYPE (base1);
663 base2 = ref2;
664 while (handled_component_p (base2))
665 base2 = TREE_OPERAND (base2, 0);
666 type2 = TREE_TYPE (base2);
668 /* Now search for the type1 in the access path of ref2. This
669 would be a common base for doing offset based disambiguation on. */
670 refp = &ref2;
671 while (handled_component_p (*refp)
672 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
673 refp = &TREE_OPERAND (*refp, 0);
674 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
675 /* If we couldn't compare types we have to bail out. */
676 if (same_p == -1)
677 return true;
678 else if (same_p == 1)
680 HOST_WIDE_INT offadj, sztmp, msztmp;
681 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
682 offset2 -= offadj;
683 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
684 offset1 -= offadj;
685 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
687 /* If we didn't find a common base, try the other way around. */
688 refp = &ref1;
689 while (handled_component_p (*refp)
690 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
691 refp = &TREE_OPERAND (*refp, 0);
692 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
693 /* If we couldn't compare types we have to bail out. */
694 if (same_p == -1)
695 return true;
696 else if (same_p == 1)
698 HOST_WIDE_INT offadj, sztmp, msztmp;
699 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
700 offset1 -= offadj;
701 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
702 offset2 -= offadj;
703 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
706 /* If we have two type access paths B1.path1 and B2.path2 they may
707 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
708 But we can still have a path that goes B1.path1...B2.path2 with
709 a part that we do not see. So we can only disambiguate now
710 if there is no B2 in the tail of path1 and no B1 on the
711 tail of path2. */
712 if (base1_alias_set == ref2_alias_set
713 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
714 return true;
715 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
716 if (!ref2_is_decl)
717 return (base2_alias_set == ref1_alias_set
718 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
719 return false;
722 /* Return true if we can determine that component references REF1 and REF2,
723 that are within a common DECL, cannot overlap. */
725 static bool
726 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
728 vec<tree, va_stack> component_refs1;
729 vec<tree, va_stack> component_refs2;
731 vec_stack_alloc (tree, component_refs1, 16);
732 vec_stack_alloc (tree, component_refs2, 16);
734 /* Create the stack of handled components for REF1. */
735 while (handled_component_p (ref1))
737 component_refs1.safe_push (ref1);
738 ref1 = TREE_OPERAND (ref1, 0);
740 if (TREE_CODE (ref1) == MEM_REF)
742 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
743 goto may_overlap;
744 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
747 /* Create the stack of handled components for REF2. */
748 while (handled_component_p (ref2))
750 component_refs2.safe_push (ref2);
751 ref2 = TREE_OPERAND (ref2, 0);
753 if (TREE_CODE (ref2) == MEM_REF)
755 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
756 goto may_overlap;
757 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
760 /* We must have the same base DECL. */
761 gcc_assert (ref1 == ref2);
763 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
764 rank. This is sufficient because we start from the same DECL and you
765 cannot reference several fields at a time with COMPONENT_REFs (unlike
766 with ARRAY_RANGE_REFs for arrays) so you always need the same number
767 of them to access a sub-component, unless you're in a union, in which
768 case the return value will precisely be false. */
769 while (true)
773 if (component_refs1.is_empty ())
774 goto may_overlap;
775 ref1 = component_refs1.pop ();
777 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
781 if (component_refs2.is_empty ())
782 goto may_overlap;
783 ref2 = component_refs2.pop ();
785 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
787 /* Beware of BIT_FIELD_REF. */
788 if (TREE_CODE (ref1) != COMPONENT_REF
789 || TREE_CODE (ref2) != COMPONENT_REF)
790 goto may_overlap;
792 tree field1 = TREE_OPERAND (ref1, 1);
793 tree field2 = TREE_OPERAND (ref2, 1);
795 /* ??? We cannot simply use the type of operand #0 of the refs here
796 as the Fortran compiler smuggles type punning into COMPONENT_REFs
797 for common blocks instead of using unions like everyone else. */
798 tree type1 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field1));
799 tree type2 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field2));
801 /* We cannot disambiguate fields in a union or qualified union. */
802 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
803 goto may_overlap;
805 /* Different fields of the same record type cannot overlap. */
806 if (field1 != field2)
808 component_refs1.release ();
809 component_refs2.release ();
810 return true;
814 may_overlap:
815 component_refs1.release ();
816 component_refs2.release ();
817 return false;
820 /* Return true if two memory references based on the variables BASE1
821 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
822 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
823 if non-NULL are the complete memory reference trees. */
825 static bool
826 decl_refs_may_alias_p (tree ref1, tree base1,
827 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
828 tree ref2, tree base2,
829 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
831 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
833 /* If both references are based on different variables, they cannot alias. */
834 if (base1 != base2)
835 return false;
837 /* If both references are based on the same variable, they cannot alias if
838 the accesses do not overlap. */
839 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
840 return false;
842 /* For components with variable position, the above test isn't sufficient,
843 so we disambiguate component references manually. */
844 if (ref1 && ref2
845 && handled_component_p (ref1) && handled_component_p (ref2)
846 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
847 return false;
849 return true;
852 /* Return true if an indirect reference based on *PTR1 constrained
853 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
854 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
855 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
856 in which case they are computed on-demand. REF1 and REF2
857 if non-NULL are the complete memory reference trees. */
859 static bool
860 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
861 HOST_WIDE_INT offset1,
862 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
863 alias_set_type ref1_alias_set,
864 alias_set_type base1_alias_set,
865 tree ref2 ATTRIBUTE_UNUSED, tree base2,
866 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
867 alias_set_type ref2_alias_set,
868 alias_set_type base2_alias_set, bool tbaa_p)
870 tree ptr1;
871 tree ptrtype1, dbase2;
872 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
873 HOST_WIDE_INT doffset1, doffset2;
874 double_int moff;
876 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
877 || TREE_CODE (base1) == TARGET_MEM_REF)
878 && DECL_P (base2));
880 ptr1 = TREE_OPERAND (base1, 0);
882 /* The offset embedded in MEM_REFs can be negative. Bias them
883 so that the resulting offset adjustment is positive. */
884 moff = mem_ref_offset (base1);
885 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
886 if (moff.is_negative ())
887 offset2p += (-moff).low;
888 else
889 offset1p += moff.low;
891 /* If only one reference is based on a variable, they cannot alias if
892 the pointer access is beyond the extent of the variable access.
893 (the pointer base cannot validly point to an offset less than zero
894 of the variable).
895 ??? IVOPTs creates bases that do not honor this restriction,
896 so do not apply this optimization for TARGET_MEM_REFs. */
897 if (TREE_CODE (base1) != TARGET_MEM_REF
898 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
899 return false;
900 /* They also cannot alias if the pointer may not point to the decl. */
901 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
902 return false;
904 /* Disambiguations that rely on strict aliasing rules follow. */
905 if (!flag_strict_aliasing || !tbaa_p)
906 return true;
908 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
910 /* If the alias set for a pointer access is zero all bets are off. */
911 if (base1_alias_set == -1)
912 base1_alias_set = get_deref_alias_set (ptrtype1);
913 if (base1_alias_set == 0)
914 return true;
915 if (base2_alias_set == -1)
916 base2_alias_set = get_alias_set (base2);
918 /* When we are trying to disambiguate an access with a pointer dereference
919 as base versus one with a decl as base we can use both the size
920 of the decl and its dynamic type for extra disambiguation.
921 ??? We do not know anything about the dynamic type of the decl
922 other than that its alias-set contains base2_alias_set as a subset
923 which does not help us here. */
924 /* As we know nothing useful about the dynamic type of the decl just
925 use the usual conflict check rather than a subset test.
926 ??? We could introduce -fvery-strict-aliasing when the language
927 does not allow decls to have a dynamic type that differs from their
928 static type. Then we can check
929 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
930 if (base1_alias_set != base2_alias_set
931 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
932 return false;
933 /* If the size of the access relevant for TBAA through the pointer
934 is bigger than the size of the decl we can't possibly access the
935 decl via that pointer. */
936 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
937 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
938 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
939 /* ??? This in turn may run afoul when a decl of type T which is
940 a member of union type U is accessed through a pointer to
941 type U and sizeof T is smaller than sizeof U. */
942 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
943 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
944 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
945 return false;
947 if (!ref2)
948 return true;
950 /* If the decl is accessed via a MEM_REF, reconstruct the base
951 we can use for TBAA and an appropriately adjusted offset. */
952 dbase2 = ref2;
953 while (handled_component_p (dbase2))
954 dbase2 = TREE_OPERAND (dbase2, 0);
955 doffset1 = offset1;
956 doffset2 = offset2;
957 if (TREE_CODE (dbase2) == MEM_REF
958 || TREE_CODE (dbase2) == TARGET_MEM_REF)
960 double_int moff = mem_ref_offset (dbase2);
961 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
962 if (moff.is_negative ())
963 doffset1 -= (-moff).low;
964 else
965 doffset2 -= moff.low;
968 /* If either reference is view-converted, give up now. */
969 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
970 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
971 return true;
973 /* If both references are through the same type, they do not alias
974 if the accesses do not overlap. This does extra disambiguation
975 for mixed/pointer accesses but requires strict aliasing.
976 For MEM_REFs we require that the component-ref offset we computed
977 is relative to the start of the type which we ensure by
978 comparing rvalue and access type and disregarding the constant
979 pointer offset. */
980 if ((TREE_CODE (base1) != TARGET_MEM_REF
981 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
982 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
983 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
985 /* Do access-path based disambiguation. */
986 if (ref1 && ref2
987 && (handled_component_p (ref1) || handled_component_p (ref2)))
988 return aliasing_component_refs_p (ref1,
989 ref1_alias_set, base1_alias_set,
990 offset1, max_size1,
991 ref2,
992 ref2_alias_set, base2_alias_set,
993 offset2, max_size2, true);
995 return true;
998 /* Return true if two indirect references based on *PTR1
999 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1000 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1001 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1002 in which case they are computed on-demand. REF1 and REF2
1003 if non-NULL are the complete memory reference trees. */
1005 static bool
1006 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1007 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1008 alias_set_type ref1_alias_set,
1009 alias_set_type base1_alias_set,
1010 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1011 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1012 alias_set_type ref2_alias_set,
1013 alias_set_type base2_alias_set, bool tbaa_p)
1015 tree ptr1;
1016 tree ptr2;
1017 tree ptrtype1, ptrtype2;
1019 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1020 || TREE_CODE (base1) == TARGET_MEM_REF)
1021 && (TREE_CODE (base2) == MEM_REF
1022 || TREE_CODE (base2) == TARGET_MEM_REF));
1024 ptr1 = TREE_OPERAND (base1, 0);
1025 ptr2 = TREE_OPERAND (base2, 0);
1027 /* If both bases are based on pointers they cannot alias if they may not
1028 point to the same memory object or if they point to the same object
1029 and the accesses do not overlap. */
1030 if ((!cfun || gimple_in_ssa_p (cfun))
1031 && operand_equal_p (ptr1, ptr2, 0)
1032 && (((TREE_CODE (base1) != TARGET_MEM_REF
1033 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1034 && (TREE_CODE (base2) != TARGET_MEM_REF
1035 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1036 || (TREE_CODE (base1) == TARGET_MEM_REF
1037 && TREE_CODE (base2) == TARGET_MEM_REF
1038 && (TMR_STEP (base1) == TMR_STEP (base2)
1039 || (TMR_STEP (base1) && TMR_STEP (base2)
1040 && operand_equal_p (TMR_STEP (base1),
1041 TMR_STEP (base2), 0)))
1042 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1043 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1044 && operand_equal_p (TMR_INDEX (base1),
1045 TMR_INDEX (base2), 0)))
1046 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1047 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1048 && operand_equal_p (TMR_INDEX2 (base1),
1049 TMR_INDEX2 (base2), 0))))))
1051 double_int moff;
1052 /* The offset embedded in MEM_REFs can be negative. Bias them
1053 so that the resulting offset adjustment is positive. */
1054 moff = mem_ref_offset (base1);
1055 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1056 if (moff.is_negative ())
1057 offset2 += (-moff).low;
1058 else
1059 offset1 += moff.low;
1060 moff = mem_ref_offset (base2);
1061 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1062 if (moff.is_negative ())
1063 offset1 += (-moff).low;
1064 else
1065 offset2 += moff.low;
1066 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1068 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1069 return false;
1071 /* Disambiguations that rely on strict aliasing rules follow. */
1072 if (!flag_strict_aliasing || !tbaa_p)
1073 return true;
1075 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1076 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1078 /* If the alias set for a pointer access is zero all bets are off. */
1079 if (base1_alias_set == -1)
1080 base1_alias_set = get_deref_alias_set (ptrtype1);
1081 if (base1_alias_set == 0)
1082 return true;
1083 if (base2_alias_set == -1)
1084 base2_alias_set = get_deref_alias_set (ptrtype2);
1085 if (base2_alias_set == 0)
1086 return true;
1088 /* If both references are through the same type, they do not alias
1089 if the accesses do not overlap. This does extra disambiguation
1090 for mixed/pointer accesses but requires strict aliasing. */
1091 if ((TREE_CODE (base1) != TARGET_MEM_REF
1092 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1093 && (TREE_CODE (base2) != TARGET_MEM_REF
1094 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1095 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1096 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1097 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1098 TREE_TYPE (ptrtype2)) == 1)
1099 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1101 /* Do type-based disambiguation. */
1102 if (base1_alias_set != base2_alias_set
1103 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1104 return false;
1106 /* Do access-path based disambiguation. */
1107 if (ref1 && ref2
1108 && (handled_component_p (ref1) || handled_component_p (ref2))
1109 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1110 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
1111 return aliasing_component_refs_p (ref1,
1112 ref1_alias_set, base1_alias_set,
1113 offset1, max_size1,
1114 ref2,
1115 ref2_alias_set, base2_alias_set,
1116 offset2, max_size2, false);
1118 return true;
1121 /* Return true, if the two memory references REF1 and REF2 may alias. */
1123 bool
1124 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1126 tree base1, base2;
1127 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1128 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1129 bool var1_p, var2_p, ind1_p, ind2_p;
1131 gcc_checking_assert ((!ref1->ref
1132 || TREE_CODE (ref1->ref) == SSA_NAME
1133 || DECL_P (ref1->ref)
1134 || TREE_CODE (ref1->ref) == STRING_CST
1135 || handled_component_p (ref1->ref)
1136 || TREE_CODE (ref1->ref) == MEM_REF
1137 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1138 && (!ref2->ref
1139 || TREE_CODE (ref2->ref) == SSA_NAME
1140 || DECL_P (ref2->ref)
1141 || TREE_CODE (ref2->ref) == STRING_CST
1142 || handled_component_p (ref2->ref)
1143 || TREE_CODE (ref2->ref) == MEM_REF
1144 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1146 /* Decompose the references into their base objects and the access. */
1147 base1 = ao_ref_base (ref1);
1148 offset1 = ref1->offset;
1149 max_size1 = ref1->max_size;
1150 base2 = ao_ref_base (ref2);
1151 offset2 = ref2->offset;
1152 max_size2 = ref2->max_size;
1154 /* We can end up with registers or constants as bases for example from
1155 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1156 which is seen as a struct copy. */
1157 if (TREE_CODE (base1) == SSA_NAME
1158 || TREE_CODE (base1) == CONST_DECL
1159 || TREE_CODE (base1) == CONSTRUCTOR
1160 || TREE_CODE (base1) == ADDR_EXPR
1161 || CONSTANT_CLASS_P (base1)
1162 || TREE_CODE (base2) == SSA_NAME
1163 || TREE_CODE (base2) == CONST_DECL
1164 || TREE_CODE (base2) == CONSTRUCTOR
1165 || TREE_CODE (base2) == ADDR_EXPR
1166 || CONSTANT_CLASS_P (base2))
1167 return false;
1169 /* We can end up referring to code via function and label decls.
1170 As we likely do not properly track code aliases conservatively
1171 bail out. */
1172 if (TREE_CODE (base1) == FUNCTION_DECL
1173 || TREE_CODE (base1) == LABEL_DECL
1174 || TREE_CODE (base2) == FUNCTION_DECL
1175 || TREE_CODE (base2) == LABEL_DECL)
1176 return true;
1178 /* Two volatile accesses always conflict. */
1179 if (ref1->volatile_p
1180 && ref2->volatile_p)
1181 return true;
1183 /* Defer to simple offset based disambiguation if we have
1184 references based on two decls. Do this before defering to
1185 TBAA to handle must-alias cases in conformance with the
1186 GCC extension of allowing type-punning through unions. */
1187 var1_p = DECL_P (base1);
1188 var2_p = DECL_P (base2);
1189 if (var1_p && var2_p)
1190 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1191 ref2->ref, base2, offset2, max_size2);
1193 ind1_p = (TREE_CODE (base1) == MEM_REF
1194 || TREE_CODE (base1) == TARGET_MEM_REF);
1195 ind2_p = (TREE_CODE (base2) == MEM_REF
1196 || TREE_CODE (base2) == TARGET_MEM_REF);
1198 /* Canonicalize the pointer-vs-decl case. */
1199 if (ind1_p && var2_p)
1201 HOST_WIDE_INT tmp1;
1202 tree tmp2;
1203 ao_ref *tmp3;
1204 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1205 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1206 tmp2 = base1; base1 = base2; base2 = tmp2;
1207 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1208 var1_p = true;
1209 ind1_p = false;
1210 var2_p = false;
1211 ind2_p = true;
1214 /* First defer to TBAA if possible. */
1215 if (tbaa_p
1216 && flag_strict_aliasing
1217 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1218 ao_ref_alias_set (ref2)))
1219 return false;
1221 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1222 if (var1_p && ind2_p)
1223 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1224 offset2, max_size2,
1225 ao_ref_alias_set (ref2), -1,
1226 ref1->ref, base1,
1227 offset1, max_size1,
1228 ao_ref_alias_set (ref1),
1229 ao_ref_base_alias_set (ref1),
1230 tbaa_p);
1231 else if (ind1_p && ind2_p)
1232 return indirect_refs_may_alias_p (ref1->ref, base1,
1233 offset1, max_size1,
1234 ao_ref_alias_set (ref1), -1,
1235 ref2->ref, base2,
1236 offset2, max_size2,
1237 ao_ref_alias_set (ref2), -1,
1238 tbaa_p);
1240 /* We really do not want to end up here, but returning true is safe. */
1241 #ifdef ENABLE_CHECKING
1242 gcc_unreachable ();
1243 #else
1244 return true;
1245 #endif
1248 bool
1249 refs_may_alias_p (tree ref1, tree ref2)
1251 ao_ref r1, r2;
1252 bool res;
1253 ao_ref_init (&r1, ref1);
1254 ao_ref_init (&r2, ref2);
1255 res = refs_may_alias_p_1 (&r1, &r2, true);
1256 if (res)
1257 ++alias_stats.refs_may_alias_p_may_alias;
1258 else
1259 ++alias_stats.refs_may_alias_p_no_alias;
1260 return res;
1263 /* Returns true if there is a anti-dependence for the STORE that
1264 executes after the LOAD. */
1266 bool
1267 refs_anti_dependent_p (tree load, tree store)
1269 ao_ref r1, r2;
1270 ao_ref_init (&r1, load);
1271 ao_ref_init (&r2, store);
1272 return refs_may_alias_p_1 (&r1, &r2, false);
1275 /* Returns true if there is a output dependence for the stores
1276 STORE1 and STORE2. */
1278 bool
1279 refs_output_dependent_p (tree store1, tree store2)
1281 ao_ref r1, r2;
1282 ao_ref_init (&r1, store1);
1283 ao_ref_init (&r2, store2);
1284 return refs_may_alias_p_1 (&r1, &r2, false);
1287 /* If the call CALL may use the memory reference REF return true,
1288 otherwise return false. */
1290 static bool
1291 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1293 tree base, callee;
1294 unsigned i;
1295 int flags = gimple_call_flags (call);
1297 /* Const functions without a static chain do not implicitly use memory. */
1298 if (!gimple_call_chain (call)
1299 && (flags & (ECF_CONST|ECF_NOVOPS)))
1300 goto process_args;
1302 base = ao_ref_base (ref);
1303 if (!base)
1304 return true;
1306 /* A call that is not without side-effects might involve volatile
1307 accesses and thus conflicts with all other volatile accesses. */
1308 if (ref->volatile_p)
1309 return true;
1311 /* If the reference is based on a decl that is not aliased the call
1312 cannot possibly use it. */
1313 if (DECL_P (base)
1314 && !may_be_aliased (base)
1315 /* But local statics can be used through recursion. */
1316 && !is_global_var (base))
1317 goto process_args;
1319 callee = gimple_call_fndecl (call);
1321 /* Handle those builtin functions explicitly that do not act as
1322 escape points. See tree-ssa-structalias.c:find_func_aliases
1323 for the list of builtins we might need to handle here. */
1324 if (callee != NULL_TREE
1325 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1326 switch (DECL_FUNCTION_CODE (callee))
1328 /* All the following functions read memory pointed to by
1329 their second argument. strcat/strncat additionally
1330 reads memory pointed to by the first argument. */
1331 case BUILT_IN_STRCAT:
1332 case BUILT_IN_STRNCAT:
1334 ao_ref dref;
1335 ao_ref_init_from_ptr_and_size (&dref,
1336 gimple_call_arg (call, 0),
1337 NULL_TREE);
1338 if (refs_may_alias_p_1 (&dref, ref, false))
1339 return true;
1341 /* FALLTHRU */
1342 case BUILT_IN_STRCPY:
1343 case BUILT_IN_STRNCPY:
1344 case BUILT_IN_MEMCPY:
1345 case BUILT_IN_MEMMOVE:
1346 case BUILT_IN_MEMPCPY:
1347 case BUILT_IN_STPCPY:
1348 case BUILT_IN_STPNCPY:
1349 case BUILT_IN_TM_MEMCPY:
1350 case BUILT_IN_TM_MEMMOVE:
1352 ao_ref dref;
1353 tree size = NULL_TREE;
1354 if (gimple_call_num_args (call) == 3)
1355 size = gimple_call_arg (call, 2);
1356 ao_ref_init_from_ptr_and_size (&dref,
1357 gimple_call_arg (call, 1),
1358 size);
1359 return refs_may_alias_p_1 (&dref, ref, false);
1361 case BUILT_IN_STRCAT_CHK:
1362 case BUILT_IN_STRNCAT_CHK:
1364 ao_ref dref;
1365 ao_ref_init_from_ptr_and_size (&dref,
1366 gimple_call_arg (call, 0),
1367 NULL_TREE);
1368 if (refs_may_alias_p_1 (&dref, ref, false))
1369 return true;
1371 /* FALLTHRU */
1372 case BUILT_IN_STRCPY_CHK:
1373 case BUILT_IN_STRNCPY_CHK:
1374 case BUILT_IN_MEMCPY_CHK:
1375 case BUILT_IN_MEMMOVE_CHK:
1376 case BUILT_IN_MEMPCPY_CHK:
1377 case BUILT_IN_STPCPY_CHK:
1378 case BUILT_IN_STPNCPY_CHK:
1380 ao_ref dref;
1381 tree size = NULL_TREE;
1382 if (gimple_call_num_args (call) == 4)
1383 size = gimple_call_arg (call, 2);
1384 ao_ref_init_from_ptr_and_size (&dref,
1385 gimple_call_arg (call, 1),
1386 size);
1387 return refs_may_alias_p_1 (&dref, ref, false);
1389 case BUILT_IN_BCOPY:
1391 ao_ref dref;
1392 tree size = gimple_call_arg (call, 2);
1393 ao_ref_init_from_ptr_and_size (&dref,
1394 gimple_call_arg (call, 0),
1395 size);
1396 return refs_may_alias_p_1 (&dref, ref, false);
1399 /* The following functions read memory pointed to by their
1400 first argument. */
1401 CASE_BUILT_IN_TM_LOAD (1):
1402 CASE_BUILT_IN_TM_LOAD (2):
1403 CASE_BUILT_IN_TM_LOAD (4):
1404 CASE_BUILT_IN_TM_LOAD (8):
1405 CASE_BUILT_IN_TM_LOAD (FLOAT):
1406 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1407 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1408 CASE_BUILT_IN_TM_LOAD (M64):
1409 CASE_BUILT_IN_TM_LOAD (M128):
1410 CASE_BUILT_IN_TM_LOAD (M256):
1411 case BUILT_IN_TM_LOG:
1412 case BUILT_IN_TM_LOG_1:
1413 case BUILT_IN_TM_LOG_2:
1414 case BUILT_IN_TM_LOG_4:
1415 case BUILT_IN_TM_LOG_8:
1416 case BUILT_IN_TM_LOG_FLOAT:
1417 case BUILT_IN_TM_LOG_DOUBLE:
1418 case BUILT_IN_TM_LOG_LDOUBLE:
1419 case BUILT_IN_TM_LOG_M64:
1420 case BUILT_IN_TM_LOG_M128:
1421 case BUILT_IN_TM_LOG_M256:
1422 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1424 /* These read memory pointed to by the first argument. */
1425 case BUILT_IN_STRDUP:
1426 case BUILT_IN_STRNDUP:
1428 ao_ref dref;
1429 tree size = NULL_TREE;
1430 if (gimple_call_num_args (call) == 2)
1431 size = gimple_call_arg (call, 1);
1432 ao_ref_init_from_ptr_and_size (&dref,
1433 gimple_call_arg (call, 0),
1434 size);
1435 return refs_may_alias_p_1 (&dref, ref, false);
1437 /* These read memory pointed to by the first argument. */
1438 case BUILT_IN_INDEX:
1439 case BUILT_IN_STRCHR:
1440 case BUILT_IN_STRRCHR:
1442 ao_ref dref;
1443 ao_ref_init_from_ptr_and_size (&dref,
1444 gimple_call_arg (call, 0),
1445 NULL_TREE);
1446 return refs_may_alias_p_1 (&dref, ref, false);
1448 /* These read memory pointed to by the first argument with size
1449 in the third argument. */
1450 case BUILT_IN_MEMCHR:
1452 ao_ref dref;
1453 ao_ref_init_from_ptr_and_size (&dref,
1454 gimple_call_arg (call, 0),
1455 gimple_call_arg (call, 2));
1456 return refs_may_alias_p_1 (&dref, ref, false);
1458 /* These read memory pointed to by the first and second arguments. */
1459 case BUILT_IN_STRSTR:
1460 case BUILT_IN_STRPBRK:
1462 ao_ref dref;
1463 ao_ref_init_from_ptr_and_size (&dref,
1464 gimple_call_arg (call, 0),
1465 NULL_TREE);
1466 if (refs_may_alias_p_1 (&dref, ref, false))
1467 return true;
1468 ao_ref_init_from_ptr_and_size (&dref,
1469 gimple_call_arg (call, 1),
1470 NULL_TREE);
1471 return refs_may_alias_p_1 (&dref, ref, false);
1474 /* The following builtins do not read from memory. */
1475 case BUILT_IN_FREE:
1476 case BUILT_IN_MALLOC:
1477 case BUILT_IN_CALLOC:
1478 case BUILT_IN_ALLOCA:
1479 case BUILT_IN_ALLOCA_WITH_ALIGN:
1480 case BUILT_IN_STACK_SAVE:
1481 case BUILT_IN_STACK_RESTORE:
1482 case BUILT_IN_MEMSET:
1483 case BUILT_IN_TM_MEMSET:
1484 case BUILT_IN_MEMSET_CHK:
1485 case BUILT_IN_FREXP:
1486 case BUILT_IN_FREXPF:
1487 case BUILT_IN_FREXPL:
1488 case BUILT_IN_GAMMA_R:
1489 case BUILT_IN_GAMMAF_R:
1490 case BUILT_IN_GAMMAL_R:
1491 case BUILT_IN_LGAMMA_R:
1492 case BUILT_IN_LGAMMAF_R:
1493 case BUILT_IN_LGAMMAL_R:
1494 case BUILT_IN_MODF:
1495 case BUILT_IN_MODFF:
1496 case BUILT_IN_MODFL:
1497 case BUILT_IN_REMQUO:
1498 case BUILT_IN_REMQUOF:
1499 case BUILT_IN_REMQUOL:
1500 case BUILT_IN_SINCOS:
1501 case BUILT_IN_SINCOSF:
1502 case BUILT_IN_SINCOSL:
1503 case BUILT_IN_ASSUME_ALIGNED:
1504 case BUILT_IN_VA_END:
1505 return false;
1506 /* __sync_* builtins and some OpenMP builtins act as threading
1507 barriers. */
1508 #undef DEF_SYNC_BUILTIN
1509 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1510 #include "sync-builtins.def"
1511 #undef DEF_SYNC_BUILTIN
1512 case BUILT_IN_GOMP_ATOMIC_START:
1513 case BUILT_IN_GOMP_ATOMIC_END:
1514 case BUILT_IN_GOMP_BARRIER:
1515 case BUILT_IN_GOMP_TASKWAIT:
1516 case BUILT_IN_GOMP_CRITICAL_START:
1517 case BUILT_IN_GOMP_CRITICAL_END:
1518 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1519 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1520 case BUILT_IN_GOMP_LOOP_END:
1521 case BUILT_IN_GOMP_ORDERED_START:
1522 case BUILT_IN_GOMP_ORDERED_END:
1523 case BUILT_IN_GOMP_PARALLEL_END:
1524 case BUILT_IN_GOMP_SECTIONS_END:
1525 case BUILT_IN_GOMP_SINGLE_COPY_START:
1526 case BUILT_IN_GOMP_SINGLE_COPY_END:
1527 return true;
1529 default:
1530 /* Fallthru to general call handling. */;
1533 /* Check if base is a global static variable that is not read
1534 by the function. */
1535 if (callee != NULL_TREE
1536 && TREE_CODE (base) == VAR_DECL
1537 && TREE_STATIC (base))
1539 struct cgraph_node *node = cgraph_get_node (callee);
1540 bitmap not_read;
1542 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1543 node yet. We should enforce that there are nodes for all decls in the
1544 IL and remove this check instead. */
1545 if (node
1546 && (not_read = ipa_reference_get_not_read_global (node))
1547 && bitmap_bit_p (not_read, DECL_UID (base)))
1548 goto process_args;
1551 /* Check if the base variable is call-used. */
1552 if (DECL_P (base))
1554 if (pt_solution_includes (gimple_call_use_set (call), base))
1555 return true;
1557 else if ((TREE_CODE (base) == MEM_REF
1558 || TREE_CODE (base) == TARGET_MEM_REF)
1559 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1561 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1562 if (!pi)
1563 return true;
1565 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1566 return true;
1568 else
1569 return true;
1571 /* Inspect call arguments for passed-by-value aliases. */
1572 process_args:
1573 for (i = 0; i < gimple_call_num_args (call); ++i)
1575 tree op = gimple_call_arg (call, i);
1576 int flags = gimple_call_arg_flags (call, i);
1578 if (flags & EAF_UNUSED)
1579 continue;
1581 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1582 op = TREE_OPERAND (op, 0);
1584 if (TREE_CODE (op) != SSA_NAME
1585 && !is_gimple_min_invariant (op))
1587 ao_ref r;
1588 ao_ref_init (&r, op);
1589 if (refs_may_alias_p_1 (&r, ref, true))
1590 return true;
1594 return false;
1597 static bool
1598 ref_maybe_used_by_call_p (gimple call, tree ref)
1600 ao_ref r;
1601 bool res;
1602 ao_ref_init (&r, ref);
1603 res = ref_maybe_used_by_call_p_1 (call, &r);
1604 if (res)
1605 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1606 else
1607 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1608 return res;
1612 /* If the statement STMT may use the memory reference REF return
1613 true, otherwise return false. */
1615 bool
1616 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1618 if (is_gimple_assign (stmt))
1620 tree rhs;
1622 /* All memory assign statements are single. */
1623 if (!gimple_assign_single_p (stmt))
1624 return false;
1626 rhs = gimple_assign_rhs1 (stmt);
1627 if (is_gimple_reg (rhs)
1628 || is_gimple_min_invariant (rhs)
1629 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1630 return false;
1632 return refs_may_alias_p (rhs, ref);
1634 else if (is_gimple_call (stmt))
1635 return ref_maybe_used_by_call_p (stmt, ref);
1636 else if (gimple_code (stmt) == GIMPLE_RETURN)
1638 tree retval = gimple_return_retval (stmt);
1639 tree base;
1640 if (retval
1641 && TREE_CODE (retval) != SSA_NAME
1642 && !is_gimple_min_invariant (retval)
1643 && refs_may_alias_p (retval, ref))
1644 return true;
1645 /* If ref escapes the function then the return acts as a use. */
1646 base = get_base_address (ref);
1647 if (!base)
1649 else if (DECL_P (base))
1650 return is_global_var (base);
1651 else if (TREE_CODE (base) == MEM_REF
1652 || TREE_CODE (base) == TARGET_MEM_REF)
1653 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1654 return false;
1657 return true;
1660 /* If the call in statement CALL may clobber the memory reference REF
1661 return true, otherwise return false. */
1663 static bool
1664 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1666 tree base;
1667 tree callee;
1669 /* If the call is pure or const it cannot clobber anything. */
1670 if (gimple_call_flags (call)
1671 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1672 return false;
1674 base = ao_ref_base (ref);
1675 if (!base)
1676 return true;
1678 if (TREE_CODE (base) == SSA_NAME
1679 || CONSTANT_CLASS_P (base))
1680 return false;
1682 /* A call that is not without side-effects might involve volatile
1683 accesses and thus conflicts with all other volatile accesses. */
1684 if (ref->volatile_p)
1685 return true;
1687 /* If the reference is based on a decl that is not aliased the call
1688 cannot possibly clobber it. */
1689 if (DECL_P (base)
1690 && !may_be_aliased (base)
1691 /* But local non-readonly statics can be modified through recursion
1692 or the call may implement a threading barrier which we must
1693 treat as may-def. */
1694 && (TREE_READONLY (base)
1695 || !is_global_var (base)))
1696 return false;
1698 callee = gimple_call_fndecl (call);
1700 /* Handle those builtin functions explicitly that do not act as
1701 escape points. See tree-ssa-structalias.c:find_func_aliases
1702 for the list of builtins we might need to handle here. */
1703 if (callee != NULL_TREE
1704 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1705 switch (DECL_FUNCTION_CODE (callee))
1707 /* All the following functions clobber memory pointed to by
1708 their first argument. */
1709 case BUILT_IN_STRCPY:
1710 case BUILT_IN_STRNCPY:
1711 case BUILT_IN_MEMCPY:
1712 case BUILT_IN_MEMMOVE:
1713 case BUILT_IN_MEMPCPY:
1714 case BUILT_IN_STPCPY:
1715 case BUILT_IN_STPNCPY:
1716 case BUILT_IN_STRCAT:
1717 case BUILT_IN_STRNCAT:
1718 case BUILT_IN_MEMSET:
1719 case BUILT_IN_TM_MEMSET:
1720 CASE_BUILT_IN_TM_STORE (1):
1721 CASE_BUILT_IN_TM_STORE (2):
1722 CASE_BUILT_IN_TM_STORE (4):
1723 CASE_BUILT_IN_TM_STORE (8):
1724 CASE_BUILT_IN_TM_STORE (FLOAT):
1725 CASE_BUILT_IN_TM_STORE (DOUBLE):
1726 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1727 CASE_BUILT_IN_TM_STORE (M64):
1728 CASE_BUILT_IN_TM_STORE (M128):
1729 CASE_BUILT_IN_TM_STORE (M256):
1730 case BUILT_IN_TM_MEMCPY:
1731 case BUILT_IN_TM_MEMMOVE:
1733 ao_ref dref;
1734 tree size = NULL_TREE;
1735 /* Don't pass in size for strncat, as the maximum size
1736 is strlen (dest) + n + 1 instead of n, resp.
1737 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1738 known. */
1739 if (gimple_call_num_args (call) == 3
1740 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1741 size = gimple_call_arg (call, 2);
1742 ao_ref_init_from_ptr_and_size (&dref,
1743 gimple_call_arg (call, 0),
1744 size);
1745 return refs_may_alias_p_1 (&dref, ref, false);
1747 case BUILT_IN_STRCPY_CHK:
1748 case BUILT_IN_STRNCPY_CHK:
1749 case BUILT_IN_MEMCPY_CHK:
1750 case BUILT_IN_MEMMOVE_CHK:
1751 case BUILT_IN_MEMPCPY_CHK:
1752 case BUILT_IN_STPCPY_CHK:
1753 case BUILT_IN_STPNCPY_CHK:
1754 case BUILT_IN_STRCAT_CHK:
1755 case BUILT_IN_STRNCAT_CHK:
1756 case BUILT_IN_MEMSET_CHK:
1758 ao_ref dref;
1759 tree size = NULL_TREE;
1760 /* Don't pass in size for __strncat_chk, as the maximum size
1761 is strlen (dest) + n + 1 instead of n, resp.
1762 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1763 known. */
1764 if (gimple_call_num_args (call) == 4
1765 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1766 size = gimple_call_arg (call, 2);
1767 ao_ref_init_from_ptr_and_size (&dref,
1768 gimple_call_arg (call, 0),
1769 size);
1770 return refs_may_alias_p_1 (&dref, ref, false);
1772 case BUILT_IN_BCOPY:
1774 ao_ref dref;
1775 tree size = gimple_call_arg (call, 2);
1776 ao_ref_init_from_ptr_and_size (&dref,
1777 gimple_call_arg (call, 1),
1778 size);
1779 return refs_may_alias_p_1 (&dref, ref, false);
1781 /* Allocating memory does not have any side-effects apart from
1782 being the definition point for the pointer. */
1783 case BUILT_IN_MALLOC:
1784 case BUILT_IN_CALLOC:
1785 case BUILT_IN_STRDUP:
1786 case BUILT_IN_STRNDUP:
1787 /* Unix98 specifies that errno is set on allocation failure. */
1788 if (flag_errno_math
1789 && targetm.ref_may_alias_errno (ref))
1790 return true;
1791 return false;
1792 case BUILT_IN_STACK_SAVE:
1793 case BUILT_IN_ALLOCA:
1794 case BUILT_IN_ALLOCA_WITH_ALIGN:
1795 case BUILT_IN_ASSUME_ALIGNED:
1796 return false;
1797 /* Freeing memory kills the pointed-to memory. More importantly
1798 the call has to serve as a barrier for moving loads and stores
1799 across it. */
1800 case BUILT_IN_FREE:
1801 case BUILT_IN_VA_END:
1803 tree ptr = gimple_call_arg (call, 0);
1804 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1806 case BUILT_IN_GAMMA_R:
1807 case BUILT_IN_GAMMAF_R:
1808 case BUILT_IN_GAMMAL_R:
1809 case BUILT_IN_LGAMMA_R:
1810 case BUILT_IN_LGAMMAF_R:
1811 case BUILT_IN_LGAMMAL_R:
1813 tree out = gimple_call_arg (call, 1);
1814 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1815 return true;
1816 if (flag_errno_math)
1817 break;
1818 return false;
1820 case BUILT_IN_FREXP:
1821 case BUILT_IN_FREXPF:
1822 case BUILT_IN_FREXPL:
1823 case BUILT_IN_MODF:
1824 case BUILT_IN_MODFF:
1825 case BUILT_IN_MODFL:
1827 tree out = gimple_call_arg (call, 1);
1828 return ptr_deref_may_alias_ref_p_1 (out, ref);
1830 case BUILT_IN_REMQUO:
1831 case BUILT_IN_REMQUOF:
1832 case BUILT_IN_REMQUOL:
1834 tree out = gimple_call_arg (call, 2);
1835 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1836 return true;
1837 if (flag_errno_math)
1838 break;
1839 return false;
1841 case BUILT_IN_SINCOS:
1842 case BUILT_IN_SINCOSF:
1843 case BUILT_IN_SINCOSL:
1845 tree sin = gimple_call_arg (call, 1);
1846 tree cos = gimple_call_arg (call, 2);
1847 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1848 || ptr_deref_may_alias_ref_p_1 (cos, ref));
1850 /* __sync_* builtins and some OpenMP builtins act as threading
1851 barriers. */
1852 #undef DEF_SYNC_BUILTIN
1853 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1854 #include "sync-builtins.def"
1855 #undef DEF_SYNC_BUILTIN
1856 case BUILT_IN_GOMP_ATOMIC_START:
1857 case BUILT_IN_GOMP_ATOMIC_END:
1858 case BUILT_IN_GOMP_BARRIER:
1859 case BUILT_IN_GOMP_TASKWAIT:
1860 case BUILT_IN_GOMP_CRITICAL_START:
1861 case BUILT_IN_GOMP_CRITICAL_END:
1862 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1863 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1864 case BUILT_IN_GOMP_LOOP_END:
1865 case BUILT_IN_GOMP_ORDERED_START:
1866 case BUILT_IN_GOMP_ORDERED_END:
1867 case BUILT_IN_GOMP_PARALLEL_END:
1868 case BUILT_IN_GOMP_SECTIONS_END:
1869 case BUILT_IN_GOMP_SINGLE_COPY_START:
1870 case BUILT_IN_GOMP_SINGLE_COPY_END:
1871 return true;
1872 default:
1873 /* Fallthru to general call handling. */;
1876 /* Check if base is a global static variable that is not written
1877 by the function. */
1878 if (callee != NULL_TREE
1879 && TREE_CODE (base) == VAR_DECL
1880 && TREE_STATIC (base))
1882 struct cgraph_node *node = cgraph_get_node (callee);
1883 bitmap not_written;
1885 if (node
1886 && (not_written = ipa_reference_get_not_written_global (node))
1887 && bitmap_bit_p (not_written, DECL_UID (base)))
1888 return false;
1891 /* Check if the base variable is call-clobbered. */
1892 if (DECL_P (base))
1893 return pt_solution_includes (gimple_call_clobber_set (call), base);
1894 else if ((TREE_CODE (base) == MEM_REF
1895 || TREE_CODE (base) == TARGET_MEM_REF)
1896 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1898 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1899 if (!pi)
1900 return true;
1902 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
1905 return true;
1908 /* If the call in statement CALL may clobber the memory reference REF
1909 return true, otherwise return false. */
1911 bool
1912 call_may_clobber_ref_p (gimple call, tree ref)
1914 bool res;
1915 ao_ref r;
1916 ao_ref_init (&r, ref);
1917 res = call_may_clobber_ref_p_1 (call, &r);
1918 if (res)
1919 ++alias_stats.call_may_clobber_ref_p_may_alias;
1920 else
1921 ++alias_stats.call_may_clobber_ref_p_no_alias;
1922 return res;
1926 /* If the statement STMT may clobber the memory reference REF return true,
1927 otherwise return false. */
1929 bool
1930 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
1932 if (is_gimple_call (stmt))
1934 tree lhs = gimple_call_lhs (stmt);
1935 if (lhs
1936 && TREE_CODE (lhs) != SSA_NAME)
1938 ao_ref r;
1939 ao_ref_init (&r, lhs);
1940 if (refs_may_alias_p_1 (ref, &r, true))
1941 return true;
1944 return call_may_clobber_ref_p_1 (stmt, ref);
1946 else if (gimple_assign_single_p (stmt))
1948 tree lhs = gimple_assign_lhs (stmt);
1949 if (TREE_CODE (lhs) != SSA_NAME)
1951 ao_ref r;
1952 ao_ref_init (&r, lhs);
1953 return refs_may_alias_p_1 (ref, &r, true);
1956 else if (gimple_code (stmt) == GIMPLE_ASM)
1957 return true;
1959 return false;
1962 bool
1963 stmt_may_clobber_ref_p (gimple stmt, tree ref)
1965 ao_ref r;
1966 ao_ref_init (&r, ref);
1967 return stmt_may_clobber_ref_p_1 (stmt, &r);
1970 /* If STMT kills the memory reference REF return true, otherwise
1971 return false. */
1973 static bool
1974 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
1976 /* For a must-alias check we need to be able to constrain
1977 the access properly. */
1978 ao_ref_base (ref);
1979 if (ref->max_size == -1)
1980 return false;
1982 if (gimple_has_lhs (stmt)
1983 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
1984 /* The assignment is not necessarily carried out if it can throw
1985 and we can catch it in the current function where we could inspect
1986 the previous value.
1987 ??? We only need to care about the RHS throwing. For aggregate
1988 assignments or similar calls and non-call exceptions the LHS
1989 might throw as well. */
1990 && !stmt_can_throw_internal (stmt))
1992 tree base, lhs = gimple_get_lhs (stmt);
1993 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
1994 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
1995 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
1996 so base == ref->base does not always hold. */
1997 if (base != ref->base)
1999 /* If both base and ref->base are MEM_REFs, only compare the
2000 first operand, and if the second operand isn't equal constant,
2001 try to add the offsets into offset and ref_offset. */
2002 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2003 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2005 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2006 TREE_OPERAND (ref->base, 1)))
2008 double_int off1 = mem_ref_offset (base);
2009 off1 = off1.lshift (BITS_PER_UNIT == 8
2010 ? 3 : exact_log2 (BITS_PER_UNIT));
2011 off1 = off1 + double_int::from_shwi (offset);
2012 double_int off2 = mem_ref_offset (ref->base);
2013 off2 = off2.lshift (BITS_PER_UNIT == 8
2014 ? 3 : exact_log2 (BITS_PER_UNIT));
2015 off2 = off2 + double_int::from_shwi (ref_offset);
2016 if (off1.fits_shwi () && off2.fits_shwi ())
2018 offset = off1.to_shwi ();
2019 ref_offset = off2.to_shwi ();
2021 else
2022 size = -1;
2025 else
2026 size = -1;
2028 /* For a must-alias check we need to be able to constrain
2029 the access properly. */
2030 if (size != -1 && size == max_size)
2032 if (offset <= ref_offset
2033 && offset + size >= ref_offset + ref->max_size)
2034 return true;
2038 if (is_gimple_call (stmt))
2040 tree callee = gimple_call_fndecl (stmt);
2041 if (callee != NULL_TREE
2042 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2043 switch (DECL_FUNCTION_CODE (callee))
2045 case BUILT_IN_MEMCPY:
2046 case BUILT_IN_MEMPCPY:
2047 case BUILT_IN_MEMMOVE:
2048 case BUILT_IN_MEMSET:
2049 case BUILT_IN_MEMCPY_CHK:
2050 case BUILT_IN_MEMPCPY_CHK:
2051 case BUILT_IN_MEMMOVE_CHK:
2052 case BUILT_IN_MEMSET_CHK:
2054 tree dest = gimple_call_arg (stmt, 0);
2055 tree len = gimple_call_arg (stmt, 2);
2056 tree base = NULL_TREE;
2057 HOST_WIDE_INT offset = 0;
2058 if (!host_integerp (len, 0))
2059 return false;
2060 if (TREE_CODE (dest) == ADDR_EXPR)
2061 base = get_addr_base_and_unit_offset (TREE_OPERAND (dest, 0),
2062 &offset);
2063 else if (TREE_CODE (dest) == SSA_NAME)
2064 base = dest;
2065 if (base
2066 && base == ao_ref_base (ref))
2068 HOST_WIDE_INT size = TREE_INT_CST_LOW (len);
2069 if (offset <= ref->offset / BITS_PER_UNIT
2070 && (offset + size
2071 >= ((ref->offset + ref->max_size + BITS_PER_UNIT - 1)
2072 / BITS_PER_UNIT)))
2073 return true;
2075 break;
2078 case BUILT_IN_VA_END:
2080 tree ptr = gimple_call_arg (stmt, 0);
2081 if (TREE_CODE (ptr) == ADDR_EXPR)
2083 tree base = ao_ref_base (ref);
2084 if (TREE_OPERAND (ptr, 0) == base)
2085 return true;
2087 break;
2090 default:;
2093 return false;
2096 bool
2097 stmt_kills_ref_p (gimple stmt, tree ref)
2099 ao_ref r;
2100 ao_ref_init (&r, ref);
2101 return stmt_kills_ref_p_1 (stmt, &r);
2105 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2106 TARGET or a statement clobbering the memory reference REF in which
2107 case false is returned. The walk starts with VUSE, one argument of PHI. */
2109 static bool
2110 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2111 tree vuse, unsigned int *cnt, bitmap *visited,
2112 bool abort_on_visited)
2114 basic_block bb = gimple_bb (phi);
2116 if (!*visited)
2117 *visited = BITMAP_ALLOC (NULL);
2119 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2121 /* Walk until we hit the target. */
2122 while (vuse != target)
2124 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2125 /* Recurse for PHI nodes. */
2126 if (gimple_code (def_stmt) == GIMPLE_PHI)
2128 /* An already visited PHI node ends the walk successfully. */
2129 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2130 return !abort_on_visited;
2131 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2132 visited, abort_on_visited);
2133 if (!vuse)
2134 return false;
2135 continue;
2137 else if (gimple_nop_p (def_stmt))
2138 return false;
2139 else
2141 /* A clobbering statement or the end of the IL ends it failing. */
2142 ++*cnt;
2143 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2144 return false;
2146 /* If we reach a new basic-block see if we already skipped it
2147 in a previous walk that ended successfully. */
2148 if (gimple_bb (def_stmt) != bb)
2150 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2151 return !abort_on_visited;
2152 bb = gimple_bb (def_stmt);
2154 vuse = gimple_vuse (def_stmt);
2156 return true;
2159 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2160 until we hit the phi argument definition that dominates the other one.
2161 Return that, or NULL_TREE if there is no such definition. */
2163 static tree
2164 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2165 ao_ref *ref, unsigned int *cnt,
2166 bitmap *visited, bool abort_on_visited)
2168 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2169 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2170 tree common_vuse;
2172 if (arg0 == arg1)
2173 return arg0;
2174 else if (gimple_nop_p (def0)
2175 || (!gimple_nop_p (def1)
2176 && dominated_by_p (CDI_DOMINATORS,
2177 gimple_bb (def1), gimple_bb (def0))))
2179 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2180 visited, abort_on_visited))
2181 return arg0;
2183 else if (gimple_nop_p (def1)
2184 || dominated_by_p (CDI_DOMINATORS,
2185 gimple_bb (def0), gimple_bb (def1)))
2187 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2188 visited, abort_on_visited))
2189 return arg1;
2191 /* Special case of a diamond:
2192 MEM_1 = ...
2193 goto (cond) ? L1 : L2
2194 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2195 goto L3
2196 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2197 L3: MEM_4 = PHI<MEM_2, MEM_3>
2198 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2199 dominate each other, but still we can easily skip this PHI node
2200 if we recognize that the vuse MEM operand is the same for both,
2201 and that we can skip both statements (they don't clobber us).
2202 This is still linear. Don't use maybe_skip_until, that might
2203 potentially be slow. */
2204 else if ((common_vuse = gimple_vuse (def0))
2205 && common_vuse == gimple_vuse (def1))
2207 *cnt += 2;
2208 if (!stmt_may_clobber_ref_p_1 (def0, ref)
2209 && !stmt_may_clobber_ref_p_1 (def1, ref))
2210 return common_vuse;
2213 return NULL_TREE;
2217 /* Starting from a PHI node for the virtual operand of the memory reference
2218 REF find a continuation virtual operand that allows to continue walking
2219 statements dominating PHI skipping only statements that cannot possibly
2220 clobber REF. Increments *CNT for each alias disambiguation done.
2221 Returns NULL_TREE if no suitable virtual operand can be found. */
2223 tree
2224 get_continuation_for_phi (gimple phi, ao_ref *ref,
2225 unsigned int *cnt, bitmap *visited,
2226 bool abort_on_visited)
2228 unsigned nargs = gimple_phi_num_args (phi);
2230 /* Through a single-argument PHI we can simply look through. */
2231 if (nargs == 1)
2232 return PHI_ARG_DEF (phi, 0);
2234 /* For two or more arguments try to pairwise skip non-aliasing code
2235 until we hit the phi argument definition that dominates the other one. */
2236 else if (nargs >= 2)
2238 tree arg0, arg1;
2239 unsigned i;
2241 /* Find a candidate for the virtual operand which definition
2242 dominates those of all others. */
2243 arg0 = PHI_ARG_DEF (phi, 0);
2244 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2245 for (i = 1; i < nargs; ++i)
2247 arg1 = PHI_ARG_DEF (phi, i);
2248 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2250 arg0 = arg1;
2251 break;
2253 if (dominated_by_p (CDI_DOMINATORS,
2254 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2255 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2256 arg0 = arg1;
2259 /* Then pairwise reduce against the found candidate. */
2260 for (i = 0; i < nargs; ++i)
2262 arg1 = PHI_ARG_DEF (phi, i);
2263 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2264 cnt, visited, abort_on_visited);
2265 if (!arg0)
2266 return NULL_TREE;
2269 return arg0;
2272 return NULL_TREE;
2275 /* Based on the memory reference REF and its virtual use VUSE call
2276 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2277 itself. That is, for each virtual use for which its defining statement
2278 does not clobber REF.
2280 WALKER is called with REF, the current virtual use and DATA. If
2281 WALKER returns non-NULL the walk stops and its result is returned.
2282 At the end of a non-successful walk NULL is returned.
2284 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2285 use which definition is a statement that may clobber REF and DATA.
2286 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2287 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2288 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2289 to adjust REF and *DATA to make that valid.
2291 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2293 void *
2294 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2295 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2296 void *(*translate)(ao_ref *, tree, void *), void *data)
2298 bitmap visited = NULL;
2299 void *res;
2300 unsigned int cnt = 0;
2301 bool translated = false;
2303 timevar_push (TV_ALIAS_STMT_WALK);
2307 gimple def_stmt;
2309 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2310 res = (*walker) (ref, vuse, cnt, data);
2311 /* Abort walk. */
2312 if (res == (void *)-1)
2314 res = NULL;
2315 break;
2317 /* Lookup succeeded. */
2318 else if (res != NULL)
2319 break;
2321 def_stmt = SSA_NAME_DEF_STMT (vuse);
2322 if (gimple_nop_p (def_stmt))
2323 break;
2324 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2325 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2326 &visited, translated);
2327 else
2329 cnt++;
2330 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2332 if (!translate)
2333 break;
2334 res = (*translate) (ref, vuse, data);
2335 /* Failed lookup and translation. */
2336 if (res == (void *)-1)
2338 res = NULL;
2339 break;
2341 /* Lookup succeeded. */
2342 else if (res != NULL)
2343 break;
2344 /* Translation succeeded, continue walking. */
2345 translated = true;
2347 vuse = gimple_vuse (def_stmt);
2350 while (vuse);
2352 if (visited)
2353 BITMAP_FREE (visited);
2355 timevar_pop (TV_ALIAS_STMT_WALK);
2357 return res;
2361 /* Based on the memory reference REF call WALKER for each vdef which
2362 defining statement may clobber REF, starting with VDEF. If REF
2363 is NULL_TREE, each defining statement is visited.
2365 WALKER is called with REF, the current vdef and DATA. If WALKER
2366 returns true the walk is stopped, otherwise it continues.
2368 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2369 PHI argument (but only one walk continues on merge points), the
2370 return value is true if any of the walks was successful.
2372 The function returns the number of statements walked. */
2374 static unsigned int
2375 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2376 bool (*walker)(ao_ref *, tree, void *), void *data,
2377 bitmap *visited, unsigned int cnt)
2381 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2383 if (*visited
2384 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2385 return cnt;
2387 if (gimple_nop_p (def_stmt))
2388 return cnt;
2389 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2391 unsigned i;
2392 if (!*visited)
2393 *visited = BITMAP_ALLOC (NULL);
2394 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2395 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2396 walker, data, visited, 0);
2397 return cnt;
2400 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2401 cnt++;
2402 if ((!ref
2403 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2404 && (*walker) (ref, vdef, data))
2405 return cnt;
2407 vdef = gimple_vuse (def_stmt);
2409 while (1);
2412 unsigned int
2413 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2414 bool (*walker)(ao_ref *, tree, void *), void *data,
2415 bitmap *visited)
2417 bitmap local_visited = NULL;
2418 unsigned int ret;
2420 timevar_push (TV_ALIAS_STMT_WALK);
2422 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2423 visited ? visited : &local_visited, 0);
2424 if (local_visited)
2425 BITMAP_FREE (local_visited);
2427 timevar_pop (TV_ALIAS_STMT_WALK);
2429 return ret;