* config/rx/rx.c (ADD_RX_BUILTIN0): New macro, used for builtins
[official-gcc.git] / gcc / tree-ssa-alias.c
blobefc08c20991973d7ffe0e49de9435cb398664c33
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "basic-block.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ggc.h"
31 #include "langhooks.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "tree-pretty-print.h"
35 #include "dumpfile.h"
36 #include "gimple.h"
37 #include "gimple-ssa.h"
38 #include "tree-ssanames.h"
39 #include "tree-dfa.h"
40 #include "tree-inline.h"
41 #include "params.h"
42 #include "vec.h"
43 #include "pointer-set.h"
44 #include "alloc-pool.h"
45 #include "tree-ssa-alias.h"
46 #include "ipa-reference.h"
48 /* Broad overview of how alias analysis on gimple works:
50 Statements clobbering or using memory are linked through the
51 virtual operand factored use-def chain. The virtual operand
52 is unique per function, its symbol is accessible via gimple_vop (cfun).
53 Virtual operands are used for efficiently walking memory statements
54 in the gimple IL and are useful for things like value-numbering as
55 a generation count for memory references.
57 SSA_NAME pointers may have associated points-to information
58 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
59 points-to information is (re-)computed by the TODO_rebuild_alias
60 pass manager todo. Points-to information is also used for more
61 precise tracking of call-clobbered and call-used variables and
62 related disambiguations.
64 This file contains functions for disambiguating memory references,
65 the so called alias-oracle and tools for walking of the gimple IL.
67 The main alias-oracle entry-points are
69 bool stmt_may_clobber_ref_p (gimple, tree)
71 This function queries if a statement may invalidate (parts of)
72 the memory designated by the reference tree argument.
74 bool ref_maybe_used_by_stmt_p (gimple, tree)
76 This function queries if a statement may need (parts of) the
77 memory designated by the reference tree argument.
79 There are variants of these functions that only handle the call
80 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
81 Note that these do not disambiguate against a possible call lhs.
83 bool refs_may_alias_p (tree, tree)
85 This function tries to disambiguate two reference trees.
87 bool ptr_deref_may_alias_global_p (tree)
89 This function queries if dereferencing a pointer variable may
90 alias global memory.
92 More low-level disambiguators are available and documented in
93 this file. Low-level disambiguators dealing with points-to
94 information are in tree-ssa-structalias.c. */
97 /* Query statistics for the different low-level disambiguators.
98 A high-level query may trigger multiple of them. */
100 static struct {
101 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
102 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
103 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
104 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
105 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
106 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
107 } alias_stats;
109 void
110 dump_alias_stats (FILE *s)
112 fprintf (s, "\nAlias oracle query stats:\n");
113 fprintf (s, " refs_may_alias_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.refs_may_alias_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.refs_may_alias_p_may_alias);
119 fprintf (s, " ref_maybe_used_by_call_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.ref_maybe_used_by_call_p_no_alias,
123 alias_stats.refs_may_alias_p_no_alias
124 + alias_stats.ref_maybe_used_by_call_p_may_alias);
125 fprintf (s, " call_may_clobber_ref_p: "
126 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
127 HOST_WIDE_INT_PRINT_DEC" queries\n",
128 alias_stats.call_may_clobber_ref_p_no_alias,
129 alias_stats.call_may_clobber_ref_p_no_alias
130 + alias_stats.call_may_clobber_ref_p_may_alias);
134 /* Return true, if dereferencing PTR may alias with a global variable. */
136 bool
137 ptr_deref_may_alias_global_p (tree ptr)
139 struct ptr_info_def *pi;
141 /* If we end up with a pointer constant here that may point
142 to global memory. */
143 if (TREE_CODE (ptr) != SSA_NAME)
144 return true;
146 pi = SSA_NAME_PTR_INFO (ptr);
148 /* If we do not have points-to information for this variable,
149 we have to punt. */
150 if (!pi)
151 return true;
153 /* ??? This does not use TBAA to prune globals ptr may not access. */
154 return pt_solution_includes_global (&pi->pt);
157 /* Return true if dereferencing PTR may alias DECL.
158 The caller is responsible for applying TBAA to see if PTR
159 may access DECL at all. */
161 static bool
162 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
164 struct ptr_info_def *pi;
166 /* Conversions are irrelevant for points-to information and
167 data-dependence analysis can feed us those. */
168 STRIP_NOPS (ptr);
170 /* Anything we do not explicilty handle aliases. */
171 if ((TREE_CODE (ptr) != SSA_NAME
172 && TREE_CODE (ptr) != ADDR_EXPR
173 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
174 || !POINTER_TYPE_P (TREE_TYPE (ptr))
175 || (TREE_CODE (decl) != VAR_DECL
176 && TREE_CODE (decl) != PARM_DECL
177 && TREE_CODE (decl) != RESULT_DECL))
178 return true;
180 /* Disregard pointer offsetting. */
181 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
185 ptr = TREE_OPERAND (ptr, 0);
187 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
188 return ptr_deref_may_alias_decl_p (ptr, decl);
191 /* ADDR_EXPR pointers either just offset another pointer or directly
192 specify the pointed-to set. */
193 if (TREE_CODE (ptr) == ADDR_EXPR)
195 tree base = get_base_address (TREE_OPERAND (ptr, 0));
196 if (base
197 && (TREE_CODE (base) == MEM_REF
198 || TREE_CODE (base) == TARGET_MEM_REF))
199 ptr = TREE_OPERAND (base, 0);
200 else if (base
201 && DECL_P (base))
202 return base == decl;
203 else if (base
204 && CONSTANT_CLASS_P (base))
205 return false;
206 else
207 return true;
210 /* Non-aliased variables can not be pointed to. */
211 if (!may_be_aliased (decl))
212 return false;
214 /* If we do not have useful points-to information for this pointer
215 we cannot disambiguate anything else. */
216 pi = SSA_NAME_PTR_INFO (ptr);
217 if (!pi)
218 return true;
220 return pt_solution_includes (&pi->pt, decl);
223 /* Return true if dereferenced PTR1 and PTR2 may alias.
224 The caller is responsible for applying TBAA to see if accesses
225 through PTR1 and PTR2 may conflict at all. */
227 bool
228 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
230 struct ptr_info_def *pi1, *pi2;
232 /* Conversions are irrelevant for points-to information and
233 data-dependence analysis can feed us those. */
234 STRIP_NOPS (ptr1);
235 STRIP_NOPS (ptr2);
237 /* Disregard pointer offsetting. */
238 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
242 ptr1 = TREE_OPERAND (ptr1, 0);
244 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
245 return ptr_derefs_may_alias_p (ptr1, ptr2);
247 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
251 ptr2 = TREE_OPERAND (ptr2, 0);
253 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
254 return ptr_derefs_may_alias_p (ptr1, ptr2);
257 /* ADDR_EXPR pointers either just offset another pointer or directly
258 specify the pointed-to set. */
259 if (TREE_CODE (ptr1) == ADDR_EXPR)
261 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
262 if (base
263 && (TREE_CODE (base) == MEM_REF
264 || TREE_CODE (base) == TARGET_MEM_REF))
265 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
266 else if (base
267 && DECL_P (base))
268 return ptr_deref_may_alias_decl_p (ptr2, base);
269 else
270 return true;
272 if (TREE_CODE (ptr2) == ADDR_EXPR)
274 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
275 if (base
276 && (TREE_CODE (base) == MEM_REF
277 || TREE_CODE (base) == TARGET_MEM_REF))
278 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
279 else if (base
280 && DECL_P (base))
281 return ptr_deref_may_alias_decl_p (ptr1, base);
282 else
283 return true;
286 /* From here we require SSA name pointers. Anything else aliases. */
287 if (TREE_CODE (ptr1) != SSA_NAME
288 || TREE_CODE (ptr2) != SSA_NAME
289 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
290 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
291 return true;
293 /* We may end up with two empty points-to solutions for two same pointers.
294 In this case we still want to say both pointers alias, so shortcut
295 that here. */
296 if (ptr1 == ptr2)
297 return true;
299 /* If we do not have useful points-to information for either pointer
300 we cannot disambiguate anything else. */
301 pi1 = SSA_NAME_PTR_INFO (ptr1);
302 pi2 = SSA_NAME_PTR_INFO (ptr2);
303 if (!pi1 || !pi2)
304 return true;
306 /* ??? This does not use TBAA to prune decls from the intersection
307 that not both pointers may access. */
308 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
311 /* Return true if dereferencing PTR may alias *REF.
312 The caller is responsible for applying TBAA to see if PTR
313 may access *REF at all. */
315 static bool
316 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
318 tree base = ao_ref_base (ref);
320 if (TREE_CODE (base) == MEM_REF
321 || TREE_CODE (base) == TARGET_MEM_REF)
322 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
323 else if (DECL_P (base))
324 return ptr_deref_may_alias_decl_p (ptr, base);
326 return true;
329 /* Return true whether REF may refer to global memory. */
331 bool
332 ref_may_alias_global_p (tree ref)
334 tree base = get_base_address (ref);
335 if (DECL_P (base))
336 return is_global_var (base);
337 else if (TREE_CODE (base) == MEM_REF
338 || TREE_CODE (base) == TARGET_MEM_REF)
339 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
340 return true;
343 /* Return true whether STMT may clobber global memory. */
345 bool
346 stmt_may_clobber_global_p (gimple stmt)
348 tree lhs;
350 if (!gimple_vdef (stmt))
351 return false;
353 /* ??? We can ask the oracle whether an artificial pointer
354 dereference with a pointer with points-to information covering
355 all global memory (what about non-address taken memory?) maybe
356 clobbered by this call. As there is at the moment no convenient
357 way of doing that without generating garbage do some manual
358 checking instead.
359 ??? We could make a NULL ao_ref argument to the various
360 predicates special, meaning any global memory. */
362 switch (gimple_code (stmt))
364 case GIMPLE_ASSIGN:
365 lhs = gimple_assign_lhs (stmt);
366 return (TREE_CODE (lhs) != SSA_NAME
367 && ref_may_alias_global_p (lhs));
368 case GIMPLE_CALL:
369 return true;
370 default:
371 return true;
376 /* Dump alias information on FILE. */
378 void
379 dump_alias_info (FILE *file)
381 unsigned i;
382 const char *funcname
383 = lang_hooks.decl_printable_name (current_function_decl, 2);
384 tree var;
386 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
388 fprintf (file, "Aliased symbols\n\n");
390 FOR_EACH_LOCAL_DECL (cfun, i, var)
392 if (may_be_aliased (var))
393 dump_variable (file, var);
396 fprintf (file, "\nCall clobber information\n");
398 fprintf (file, "\nESCAPED");
399 dump_points_to_solution (file, &cfun->gimple_df->escaped);
401 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
403 for (i = 1; i < num_ssa_names; i++)
405 tree ptr = ssa_name (i);
406 struct ptr_info_def *pi;
408 if (ptr == NULL_TREE
409 || !POINTER_TYPE_P (TREE_TYPE (ptr))
410 || SSA_NAME_IN_FREE_LIST (ptr))
411 continue;
413 pi = SSA_NAME_PTR_INFO (ptr);
414 if (pi)
415 dump_points_to_info_for (file, ptr);
418 fprintf (file, "\n");
422 /* Dump alias information on stderr. */
424 DEBUG_FUNCTION void
425 debug_alias_info (void)
427 dump_alias_info (stderr);
431 /* Dump the points-to set *PT into FILE. */
433 void
434 dump_points_to_solution (FILE *file, struct pt_solution *pt)
436 if (pt->anything)
437 fprintf (file, ", points-to anything");
439 if (pt->nonlocal)
440 fprintf (file, ", points-to non-local");
442 if (pt->escaped)
443 fprintf (file, ", points-to escaped");
445 if (pt->ipa_escaped)
446 fprintf (file, ", points-to unit escaped");
448 if (pt->null)
449 fprintf (file, ", points-to NULL");
451 if (pt->vars)
453 fprintf (file, ", points-to vars: ");
454 dump_decl_set (file, pt->vars);
455 if (pt->vars_contains_global)
456 fprintf (file, " (includes global vars)");
461 /* Unified dump function for pt_solution. */
463 DEBUG_FUNCTION void
464 debug (pt_solution &ref)
466 dump_points_to_solution (stderr, &ref);
469 DEBUG_FUNCTION void
470 debug (pt_solution *ptr)
472 if (ptr)
473 debug (*ptr);
474 else
475 fprintf (stderr, "<nil>\n");
479 /* Dump points-to information for SSA_NAME PTR into FILE. */
481 void
482 dump_points_to_info_for (FILE *file, tree ptr)
484 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
486 print_generic_expr (file, ptr, dump_flags);
488 if (pi)
489 dump_points_to_solution (file, &pi->pt);
490 else
491 fprintf (file, ", points-to anything");
493 fprintf (file, "\n");
497 /* Dump points-to information for VAR into stderr. */
499 DEBUG_FUNCTION void
500 debug_points_to_info_for (tree var)
502 dump_points_to_info_for (stderr, var);
506 /* Initializes the alias-oracle reference representation *R from REF. */
508 void
509 ao_ref_init (ao_ref *r, tree ref)
511 r->ref = ref;
512 r->base = NULL_TREE;
513 r->offset = 0;
514 r->size = -1;
515 r->max_size = -1;
516 r->ref_alias_set = -1;
517 r->base_alias_set = -1;
518 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
521 /* Returns the base object of the memory reference *REF. */
523 tree
524 ao_ref_base (ao_ref *ref)
526 if (ref->base)
527 return ref->base;
528 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
529 &ref->max_size);
530 return ref->base;
533 /* Returns the base object alias set of the memory reference *REF. */
535 static alias_set_type
536 ao_ref_base_alias_set (ao_ref *ref)
538 tree base_ref;
539 if (ref->base_alias_set != -1)
540 return ref->base_alias_set;
541 if (!ref->ref)
542 return 0;
543 base_ref = ref->ref;
544 while (handled_component_p (base_ref))
545 base_ref = TREE_OPERAND (base_ref, 0);
546 ref->base_alias_set = get_alias_set (base_ref);
547 return ref->base_alias_set;
550 /* Returns the reference alias set of the memory reference *REF. */
552 alias_set_type
553 ao_ref_alias_set (ao_ref *ref)
555 if (ref->ref_alias_set != -1)
556 return ref->ref_alias_set;
557 ref->ref_alias_set = get_alias_set (ref->ref);
558 return ref->ref_alias_set;
561 /* Init an alias-oracle reference representation from a gimple pointer
562 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE the the
563 size is assumed to be unknown. The access is assumed to be only
564 to or after of the pointer target, not before it. */
566 void
567 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
569 HOST_WIDE_INT t1, t2, extra_offset = 0;
570 ref->ref = NULL_TREE;
571 if (TREE_CODE (ptr) == SSA_NAME)
573 gimple stmt = SSA_NAME_DEF_STMT (ptr);
574 if (gimple_assign_single_p (stmt)
575 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
576 ptr = gimple_assign_rhs1 (stmt);
577 else if (is_gimple_assign (stmt)
578 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
579 && host_integerp (gimple_assign_rhs2 (stmt), 0)
580 && (t1 = int_cst_value (gimple_assign_rhs2 (stmt))) >= 0)
582 ptr = gimple_assign_rhs1 (stmt);
583 extra_offset = BITS_PER_UNIT * t1;
587 if (TREE_CODE (ptr) == ADDR_EXPR)
588 ref->base = get_ref_base_and_extent (TREE_OPERAND (ptr, 0),
589 &ref->offset, &t1, &t2);
590 else
592 ref->base = build2 (MEM_REF, char_type_node,
593 ptr, null_pointer_node);
594 ref->offset = 0;
596 ref->offset += extra_offset;
597 if (size
598 && host_integerp (size, 0)
599 && TREE_INT_CST_LOW (size) * BITS_PER_UNIT / BITS_PER_UNIT
600 == TREE_INT_CST_LOW (size))
601 ref->max_size = ref->size = TREE_INT_CST_LOW (size) * BITS_PER_UNIT;
602 else
603 ref->max_size = ref->size = -1;
604 ref->ref_alias_set = 0;
605 ref->base_alias_set = 0;
606 ref->volatile_p = false;
609 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
610 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
611 decide. */
613 static inline int
614 same_type_for_tbaa (tree type1, tree type2)
616 type1 = TYPE_MAIN_VARIANT (type1);
617 type2 = TYPE_MAIN_VARIANT (type2);
619 /* If we would have to do structural comparison bail out. */
620 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
621 || TYPE_STRUCTURAL_EQUALITY_P (type2))
622 return -1;
624 /* Compare the canonical types. */
625 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
626 return 1;
628 /* ??? Array types are not properly unified in all cases as we have
629 spurious changes in the index types for example. Removing this
630 causes all sorts of problems with the Fortran frontend. */
631 if (TREE_CODE (type1) == ARRAY_TYPE
632 && TREE_CODE (type2) == ARRAY_TYPE)
633 return -1;
635 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
636 object of one of its constrained subtypes, e.g. when a function with an
637 unconstrained parameter passed by reference is called on an object and
638 inlined. But, even in the case of a fixed size, type and subtypes are
639 not equivalent enough as to share the same TYPE_CANONICAL, since this
640 would mean that conversions between them are useless, whereas they are
641 not (e.g. type and subtypes can have different modes). So, in the end,
642 they are only guaranteed to have the same alias set. */
643 if (get_alias_set (type1) == get_alias_set (type2))
644 return -1;
646 /* The types are known to be not equal. */
647 return 0;
650 /* Determine if the two component references REF1 and REF2 which are
651 based on access types TYPE1 and TYPE2 and of which at least one is based
652 on an indirect reference may alias. REF2 is the only one that can
653 be a decl in which case REF2_IS_DECL is true.
654 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
655 are the respective alias sets. */
657 static bool
658 aliasing_component_refs_p (tree ref1,
659 alias_set_type ref1_alias_set,
660 alias_set_type base1_alias_set,
661 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
662 tree ref2,
663 alias_set_type ref2_alias_set,
664 alias_set_type base2_alias_set,
665 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
666 bool ref2_is_decl)
668 /* If one reference is a component references through pointers try to find a
669 common base and apply offset based disambiguation. This handles
670 for example
671 struct A { int i; int j; } *q;
672 struct B { struct A a; int k; } *p;
673 disambiguating q->i and p->a.j. */
674 tree base1, base2;
675 tree type1, type2;
676 tree *refp;
677 int same_p;
679 /* Choose bases and base types to search for. */
680 base1 = ref1;
681 while (handled_component_p (base1))
682 base1 = TREE_OPERAND (base1, 0);
683 type1 = TREE_TYPE (base1);
684 base2 = ref2;
685 while (handled_component_p (base2))
686 base2 = TREE_OPERAND (base2, 0);
687 type2 = TREE_TYPE (base2);
689 /* Now search for the type1 in the access path of ref2. This
690 would be a common base for doing offset based disambiguation on. */
691 refp = &ref2;
692 while (handled_component_p (*refp)
693 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
694 refp = &TREE_OPERAND (*refp, 0);
695 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
696 /* If we couldn't compare types we have to bail out. */
697 if (same_p == -1)
698 return true;
699 else if (same_p == 1)
701 HOST_WIDE_INT offadj, sztmp, msztmp;
702 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
703 offset2 -= offadj;
704 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
705 offset1 -= offadj;
706 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
708 /* If we didn't find a common base, try the other way around. */
709 refp = &ref1;
710 while (handled_component_p (*refp)
711 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
712 refp = &TREE_OPERAND (*refp, 0);
713 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
714 /* If we couldn't compare types we have to bail out. */
715 if (same_p == -1)
716 return true;
717 else if (same_p == 1)
719 HOST_WIDE_INT offadj, sztmp, msztmp;
720 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
721 offset1 -= offadj;
722 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
723 offset2 -= offadj;
724 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
727 /* If we have two type access paths B1.path1 and B2.path2 they may
728 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
729 But we can still have a path that goes B1.path1...B2.path2 with
730 a part that we do not see. So we can only disambiguate now
731 if there is no B2 in the tail of path1 and no B1 on the
732 tail of path2. */
733 if (base1_alias_set == ref2_alias_set
734 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
735 return true;
736 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
737 if (!ref2_is_decl)
738 return (base2_alias_set == ref1_alias_set
739 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
740 return false;
743 /* Return true if we can determine that component references REF1 and REF2,
744 that are within a common DECL, cannot overlap. */
746 static bool
747 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
749 stack_vec<tree, 16> component_refs1;
750 stack_vec<tree, 16> component_refs2;
752 /* Create the stack of handled components for REF1. */
753 while (handled_component_p (ref1))
755 component_refs1.safe_push (ref1);
756 ref1 = TREE_OPERAND (ref1, 0);
758 if (TREE_CODE (ref1) == MEM_REF)
760 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
761 goto may_overlap;
762 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
765 /* Create the stack of handled components for REF2. */
766 while (handled_component_p (ref2))
768 component_refs2.safe_push (ref2);
769 ref2 = TREE_OPERAND (ref2, 0);
771 if (TREE_CODE (ref2) == MEM_REF)
773 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
774 goto may_overlap;
775 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
778 /* We must have the same base DECL. */
779 gcc_assert (ref1 == ref2);
781 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
782 rank. This is sufficient because we start from the same DECL and you
783 cannot reference several fields at a time with COMPONENT_REFs (unlike
784 with ARRAY_RANGE_REFs for arrays) so you always need the same number
785 of them to access a sub-component, unless you're in a union, in which
786 case the return value will precisely be false. */
787 while (true)
791 if (component_refs1.is_empty ())
792 goto may_overlap;
793 ref1 = component_refs1.pop ();
795 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
799 if (component_refs2.is_empty ())
800 goto may_overlap;
801 ref2 = component_refs2.pop ();
803 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
805 /* Beware of BIT_FIELD_REF. */
806 if (TREE_CODE (ref1) != COMPONENT_REF
807 || TREE_CODE (ref2) != COMPONENT_REF)
808 goto may_overlap;
810 tree field1 = TREE_OPERAND (ref1, 1);
811 tree field2 = TREE_OPERAND (ref2, 1);
813 /* ??? We cannot simply use the type of operand #0 of the refs here
814 as the Fortran compiler smuggles type punning into COMPONENT_REFs
815 for common blocks instead of using unions like everyone else. */
816 tree type1 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field1));
817 tree type2 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field2));
819 /* We cannot disambiguate fields in a union or qualified union. */
820 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
821 goto may_overlap;
823 /* Different fields of the same record type cannot overlap.
824 ??? Bitfields can overlap at RTL level so punt on them. */
825 if (field1 != field2)
827 component_refs1.release ();
828 component_refs2.release ();
829 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
833 may_overlap:
834 component_refs1.release ();
835 component_refs2.release ();
836 return false;
839 /* Return true if two memory references based on the variables BASE1
840 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
841 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
842 if non-NULL are the complete memory reference trees. */
844 static bool
845 decl_refs_may_alias_p (tree ref1, tree base1,
846 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
847 tree ref2, tree base2,
848 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
850 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
852 /* If both references are based on different variables, they cannot alias. */
853 if (base1 != base2)
854 return false;
856 /* If both references are based on the same variable, they cannot alias if
857 the accesses do not overlap. */
858 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
859 return false;
861 /* For components with variable position, the above test isn't sufficient,
862 so we disambiguate component references manually. */
863 if (ref1 && ref2
864 && handled_component_p (ref1) && handled_component_p (ref2)
865 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
866 return false;
868 return true;
871 /* Return true if an indirect reference based on *PTR1 constrained
872 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
873 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
874 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
875 in which case they are computed on-demand. REF1 and REF2
876 if non-NULL are the complete memory reference trees. */
878 static bool
879 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
880 HOST_WIDE_INT offset1,
881 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
882 alias_set_type ref1_alias_set,
883 alias_set_type base1_alias_set,
884 tree ref2 ATTRIBUTE_UNUSED, tree base2,
885 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
886 alias_set_type ref2_alias_set,
887 alias_set_type base2_alias_set, bool tbaa_p)
889 tree ptr1;
890 tree ptrtype1, dbase2;
891 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
892 HOST_WIDE_INT doffset1, doffset2;
893 double_int moff;
895 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
896 || TREE_CODE (base1) == TARGET_MEM_REF)
897 && DECL_P (base2));
899 ptr1 = TREE_OPERAND (base1, 0);
901 /* The offset embedded in MEM_REFs can be negative. Bias them
902 so that the resulting offset adjustment is positive. */
903 moff = mem_ref_offset (base1);
904 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
905 if (moff.is_negative ())
906 offset2p += (-moff).low;
907 else
908 offset1p += moff.low;
910 /* If only one reference is based on a variable, they cannot alias if
911 the pointer access is beyond the extent of the variable access.
912 (the pointer base cannot validly point to an offset less than zero
913 of the variable).
914 ??? IVOPTs creates bases that do not honor this restriction,
915 so do not apply this optimization for TARGET_MEM_REFs. */
916 if (TREE_CODE (base1) != TARGET_MEM_REF
917 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
918 return false;
919 /* They also cannot alias if the pointer may not point to the decl. */
920 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
921 return false;
923 /* Disambiguations that rely on strict aliasing rules follow. */
924 if (!flag_strict_aliasing || !tbaa_p)
925 return true;
927 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
929 /* If the alias set for a pointer access is zero all bets are off. */
930 if (base1_alias_set == -1)
931 base1_alias_set = get_deref_alias_set (ptrtype1);
932 if (base1_alias_set == 0)
933 return true;
934 if (base2_alias_set == -1)
935 base2_alias_set = get_alias_set (base2);
937 /* When we are trying to disambiguate an access with a pointer dereference
938 as base versus one with a decl as base we can use both the size
939 of the decl and its dynamic type for extra disambiguation.
940 ??? We do not know anything about the dynamic type of the decl
941 other than that its alias-set contains base2_alias_set as a subset
942 which does not help us here. */
943 /* As we know nothing useful about the dynamic type of the decl just
944 use the usual conflict check rather than a subset test.
945 ??? We could introduce -fvery-strict-aliasing when the language
946 does not allow decls to have a dynamic type that differs from their
947 static type. Then we can check
948 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
949 if (base1_alias_set != base2_alias_set
950 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
951 return false;
952 /* If the size of the access relevant for TBAA through the pointer
953 is bigger than the size of the decl we can't possibly access the
954 decl via that pointer. */
955 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
956 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
957 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
958 /* ??? This in turn may run afoul when a decl of type T which is
959 a member of union type U is accessed through a pointer to
960 type U and sizeof T is smaller than sizeof U. */
961 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
962 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
963 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
964 return false;
966 if (!ref2)
967 return true;
969 /* If the decl is accessed via a MEM_REF, reconstruct the base
970 we can use for TBAA and an appropriately adjusted offset. */
971 dbase2 = ref2;
972 while (handled_component_p (dbase2))
973 dbase2 = TREE_OPERAND (dbase2, 0);
974 doffset1 = offset1;
975 doffset2 = offset2;
976 if (TREE_CODE (dbase2) == MEM_REF
977 || TREE_CODE (dbase2) == TARGET_MEM_REF)
979 double_int moff = mem_ref_offset (dbase2);
980 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
981 if (moff.is_negative ())
982 doffset1 -= (-moff).low;
983 else
984 doffset2 -= moff.low;
987 /* If either reference is view-converted, give up now. */
988 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
989 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
990 return true;
992 /* If both references are through the same type, they do not alias
993 if the accesses do not overlap. This does extra disambiguation
994 for mixed/pointer accesses but requires strict aliasing.
995 For MEM_REFs we require that the component-ref offset we computed
996 is relative to the start of the type which we ensure by
997 comparing rvalue and access type and disregarding the constant
998 pointer offset. */
999 if ((TREE_CODE (base1) != TARGET_MEM_REF
1000 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1001 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1002 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1004 /* Do access-path based disambiguation. */
1005 if (ref1 && ref2
1006 && (handled_component_p (ref1) || handled_component_p (ref2)))
1007 return aliasing_component_refs_p (ref1,
1008 ref1_alias_set, base1_alias_set,
1009 offset1, max_size1,
1010 ref2,
1011 ref2_alias_set, base2_alias_set,
1012 offset2, max_size2, true);
1014 return true;
1017 /* Return true if two indirect references based on *PTR1
1018 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1019 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1020 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1021 in which case they are computed on-demand. REF1 and REF2
1022 if non-NULL are the complete memory reference trees. */
1024 static bool
1025 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1026 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1027 alias_set_type ref1_alias_set,
1028 alias_set_type base1_alias_set,
1029 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1030 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1031 alias_set_type ref2_alias_set,
1032 alias_set_type base2_alias_set, bool tbaa_p)
1034 tree ptr1;
1035 tree ptr2;
1036 tree ptrtype1, ptrtype2;
1038 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1039 || TREE_CODE (base1) == TARGET_MEM_REF)
1040 && (TREE_CODE (base2) == MEM_REF
1041 || TREE_CODE (base2) == TARGET_MEM_REF));
1043 ptr1 = TREE_OPERAND (base1, 0);
1044 ptr2 = TREE_OPERAND (base2, 0);
1046 /* If both bases are based on pointers they cannot alias if they may not
1047 point to the same memory object or if they point to the same object
1048 and the accesses do not overlap. */
1049 if ((!cfun || gimple_in_ssa_p (cfun))
1050 && operand_equal_p (ptr1, ptr2, 0)
1051 && (((TREE_CODE (base1) != TARGET_MEM_REF
1052 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1053 && (TREE_CODE (base2) != TARGET_MEM_REF
1054 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1055 || (TREE_CODE (base1) == TARGET_MEM_REF
1056 && TREE_CODE (base2) == TARGET_MEM_REF
1057 && (TMR_STEP (base1) == TMR_STEP (base2)
1058 || (TMR_STEP (base1) && TMR_STEP (base2)
1059 && operand_equal_p (TMR_STEP (base1),
1060 TMR_STEP (base2), 0)))
1061 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1062 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1063 && operand_equal_p (TMR_INDEX (base1),
1064 TMR_INDEX (base2), 0)))
1065 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1066 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1067 && operand_equal_p (TMR_INDEX2 (base1),
1068 TMR_INDEX2 (base2), 0))))))
1070 double_int moff;
1071 /* The offset embedded in MEM_REFs can be negative. Bias them
1072 so that the resulting offset adjustment is positive. */
1073 moff = mem_ref_offset (base1);
1074 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1075 if (moff.is_negative ())
1076 offset2 += (-moff).low;
1077 else
1078 offset1 += moff.low;
1079 moff = mem_ref_offset (base2);
1080 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1081 if (moff.is_negative ())
1082 offset1 += (-moff).low;
1083 else
1084 offset2 += moff.low;
1085 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1087 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1088 return false;
1090 /* Disambiguations that rely on strict aliasing rules follow. */
1091 if (!flag_strict_aliasing || !tbaa_p)
1092 return true;
1094 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1095 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1097 /* If the alias set for a pointer access is zero all bets are off. */
1098 if (base1_alias_set == -1)
1099 base1_alias_set = get_deref_alias_set (ptrtype1);
1100 if (base1_alias_set == 0)
1101 return true;
1102 if (base2_alias_set == -1)
1103 base2_alias_set = get_deref_alias_set (ptrtype2);
1104 if (base2_alias_set == 0)
1105 return true;
1107 /* If both references are through the same type, they do not alias
1108 if the accesses do not overlap. This does extra disambiguation
1109 for mixed/pointer accesses but requires strict aliasing. */
1110 if ((TREE_CODE (base1) != TARGET_MEM_REF
1111 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1112 && (TREE_CODE (base2) != TARGET_MEM_REF
1113 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1114 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1115 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1116 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1117 TREE_TYPE (ptrtype2)) == 1)
1118 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1120 /* Do type-based disambiguation. */
1121 if (base1_alias_set != base2_alias_set
1122 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1123 return false;
1125 /* Do access-path based disambiguation. */
1126 if (ref1 && ref2
1127 && (handled_component_p (ref1) || handled_component_p (ref2))
1128 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1129 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
1130 return aliasing_component_refs_p (ref1,
1131 ref1_alias_set, base1_alias_set,
1132 offset1, max_size1,
1133 ref2,
1134 ref2_alias_set, base2_alias_set,
1135 offset2, max_size2, false);
1137 return true;
1140 /* Return true, if the two memory references REF1 and REF2 may alias. */
1142 bool
1143 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1145 tree base1, base2;
1146 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1147 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1148 bool var1_p, var2_p, ind1_p, ind2_p;
1150 gcc_checking_assert ((!ref1->ref
1151 || TREE_CODE (ref1->ref) == SSA_NAME
1152 || DECL_P (ref1->ref)
1153 || TREE_CODE (ref1->ref) == STRING_CST
1154 || handled_component_p (ref1->ref)
1155 || TREE_CODE (ref1->ref) == MEM_REF
1156 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1157 && (!ref2->ref
1158 || TREE_CODE (ref2->ref) == SSA_NAME
1159 || DECL_P (ref2->ref)
1160 || TREE_CODE (ref2->ref) == STRING_CST
1161 || handled_component_p (ref2->ref)
1162 || TREE_CODE (ref2->ref) == MEM_REF
1163 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1165 /* Decompose the references into their base objects and the access. */
1166 base1 = ao_ref_base (ref1);
1167 offset1 = ref1->offset;
1168 max_size1 = ref1->max_size;
1169 base2 = ao_ref_base (ref2);
1170 offset2 = ref2->offset;
1171 max_size2 = ref2->max_size;
1173 /* We can end up with registers or constants as bases for example from
1174 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1175 which is seen as a struct copy. */
1176 if (TREE_CODE (base1) == SSA_NAME
1177 || TREE_CODE (base1) == CONST_DECL
1178 || TREE_CODE (base1) == CONSTRUCTOR
1179 || TREE_CODE (base1) == ADDR_EXPR
1180 || CONSTANT_CLASS_P (base1)
1181 || TREE_CODE (base2) == SSA_NAME
1182 || TREE_CODE (base2) == CONST_DECL
1183 || TREE_CODE (base2) == CONSTRUCTOR
1184 || TREE_CODE (base2) == ADDR_EXPR
1185 || CONSTANT_CLASS_P (base2))
1186 return false;
1188 /* We can end up referring to code via function and label decls.
1189 As we likely do not properly track code aliases conservatively
1190 bail out. */
1191 if (TREE_CODE (base1) == FUNCTION_DECL
1192 || TREE_CODE (base1) == LABEL_DECL
1193 || TREE_CODE (base2) == FUNCTION_DECL
1194 || TREE_CODE (base2) == LABEL_DECL)
1195 return true;
1197 /* Two volatile accesses always conflict. */
1198 if (ref1->volatile_p
1199 && ref2->volatile_p)
1200 return true;
1202 /* Defer to simple offset based disambiguation if we have
1203 references based on two decls. Do this before defering to
1204 TBAA to handle must-alias cases in conformance with the
1205 GCC extension of allowing type-punning through unions. */
1206 var1_p = DECL_P (base1);
1207 var2_p = DECL_P (base2);
1208 if (var1_p && var2_p)
1209 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1210 ref2->ref, base2, offset2, max_size2);
1212 ind1_p = (TREE_CODE (base1) == MEM_REF
1213 || TREE_CODE (base1) == TARGET_MEM_REF);
1214 ind2_p = (TREE_CODE (base2) == MEM_REF
1215 || TREE_CODE (base2) == TARGET_MEM_REF);
1217 /* Canonicalize the pointer-vs-decl case. */
1218 if (ind1_p && var2_p)
1220 HOST_WIDE_INT tmp1;
1221 tree tmp2;
1222 ao_ref *tmp3;
1223 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1224 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1225 tmp2 = base1; base1 = base2; base2 = tmp2;
1226 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1227 var1_p = true;
1228 ind1_p = false;
1229 var2_p = false;
1230 ind2_p = true;
1233 /* First defer to TBAA if possible. */
1234 if (tbaa_p
1235 && flag_strict_aliasing
1236 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1237 ao_ref_alias_set (ref2)))
1238 return false;
1240 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1241 if (var1_p && ind2_p)
1242 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1243 offset2, max_size2,
1244 ao_ref_alias_set (ref2), -1,
1245 ref1->ref, base1,
1246 offset1, max_size1,
1247 ao_ref_alias_set (ref1),
1248 ao_ref_base_alias_set (ref1),
1249 tbaa_p);
1250 else if (ind1_p && ind2_p)
1251 return indirect_refs_may_alias_p (ref1->ref, base1,
1252 offset1, max_size1,
1253 ao_ref_alias_set (ref1), -1,
1254 ref2->ref, base2,
1255 offset2, max_size2,
1256 ao_ref_alias_set (ref2), -1,
1257 tbaa_p);
1259 /* We really do not want to end up here, but returning true is safe. */
1260 #ifdef ENABLE_CHECKING
1261 gcc_unreachable ();
1262 #else
1263 return true;
1264 #endif
1267 bool
1268 refs_may_alias_p (tree ref1, tree ref2)
1270 ao_ref r1, r2;
1271 bool res;
1272 ao_ref_init (&r1, ref1);
1273 ao_ref_init (&r2, ref2);
1274 res = refs_may_alias_p_1 (&r1, &r2, true);
1275 if (res)
1276 ++alias_stats.refs_may_alias_p_may_alias;
1277 else
1278 ++alias_stats.refs_may_alias_p_no_alias;
1279 return res;
1282 /* Returns true if there is a anti-dependence for the STORE that
1283 executes after the LOAD. */
1285 bool
1286 refs_anti_dependent_p (tree load, tree store)
1288 ao_ref r1, r2;
1289 ao_ref_init (&r1, load);
1290 ao_ref_init (&r2, store);
1291 return refs_may_alias_p_1 (&r1, &r2, false);
1294 /* Returns true if there is a output dependence for the stores
1295 STORE1 and STORE2. */
1297 bool
1298 refs_output_dependent_p (tree store1, tree store2)
1300 ao_ref r1, r2;
1301 ao_ref_init (&r1, store1);
1302 ao_ref_init (&r2, store2);
1303 return refs_may_alias_p_1 (&r1, &r2, false);
1306 /* If the call CALL may use the memory reference REF return true,
1307 otherwise return false. */
1309 static bool
1310 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1312 tree base, callee;
1313 unsigned i;
1314 int flags = gimple_call_flags (call);
1316 /* Const functions without a static chain do not implicitly use memory. */
1317 if (!gimple_call_chain (call)
1318 && (flags & (ECF_CONST|ECF_NOVOPS)))
1319 goto process_args;
1321 base = ao_ref_base (ref);
1322 if (!base)
1323 return true;
1325 /* A call that is not without side-effects might involve volatile
1326 accesses and thus conflicts with all other volatile accesses. */
1327 if (ref->volatile_p)
1328 return true;
1330 /* If the reference is based on a decl that is not aliased the call
1331 cannot possibly use it. */
1332 if (DECL_P (base)
1333 && !may_be_aliased (base)
1334 /* But local statics can be used through recursion. */
1335 && !is_global_var (base))
1336 goto process_args;
1338 callee = gimple_call_fndecl (call);
1340 /* Handle those builtin functions explicitly that do not act as
1341 escape points. See tree-ssa-structalias.c:find_func_aliases
1342 for the list of builtins we might need to handle here. */
1343 if (callee != NULL_TREE
1344 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1345 switch (DECL_FUNCTION_CODE (callee))
1347 /* All the following functions read memory pointed to by
1348 their second argument. strcat/strncat additionally
1349 reads memory pointed to by the first argument. */
1350 case BUILT_IN_STRCAT:
1351 case BUILT_IN_STRNCAT:
1353 ao_ref dref;
1354 ao_ref_init_from_ptr_and_size (&dref,
1355 gimple_call_arg (call, 0),
1356 NULL_TREE);
1357 if (refs_may_alias_p_1 (&dref, ref, false))
1358 return true;
1360 /* FALLTHRU */
1361 case BUILT_IN_STRCPY:
1362 case BUILT_IN_STRNCPY:
1363 case BUILT_IN_MEMCPY:
1364 case BUILT_IN_MEMMOVE:
1365 case BUILT_IN_MEMPCPY:
1366 case BUILT_IN_STPCPY:
1367 case BUILT_IN_STPNCPY:
1368 case BUILT_IN_TM_MEMCPY:
1369 case BUILT_IN_TM_MEMMOVE:
1371 ao_ref dref;
1372 tree size = NULL_TREE;
1373 if (gimple_call_num_args (call) == 3)
1374 size = gimple_call_arg (call, 2);
1375 ao_ref_init_from_ptr_and_size (&dref,
1376 gimple_call_arg (call, 1),
1377 size);
1378 return refs_may_alias_p_1 (&dref, ref, false);
1380 case BUILT_IN_STRCAT_CHK:
1381 case BUILT_IN_STRNCAT_CHK:
1383 ao_ref dref;
1384 ao_ref_init_from_ptr_and_size (&dref,
1385 gimple_call_arg (call, 0),
1386 NULL_TREE);
1387 if (refs_may_alias_p_1 (&dref, ref, false))
1388 return true;
1390 /* FALLTHRU */
1391 case BUILT_IN_STRCPY_CHK:
1392 case BUILT_IN_STRNCPY_CHK:
1393 case BUILT_IN_MEMCPY_CHK:
1394 case BUILT_IN_MEMMOVE_CHK:
1395 case BUILT_IN_MEMPCPY_CHK:
1396 case BUILT_IN_STPCPY_CHK:
1397 case BUILT_IN_STPNCPY_CHK:
1399 ao_ref dref;
1400 tree size = NULL_TREE;
1401 if (gimple_call_num_args (call) == 4)
1402 size = gimple_call_arg (call, 2);
1403 ao_ref_init_from_ptr_and_size (&dref,
1404 gimple_call_arg (call, 1),
1405 size);
1406 return refs_may_alias_p_1 (&dref, ref, false);
1408 case BUILT_IN_BCOPY:
1410 ao_ref dref;
1411 tree size = gimple_call_arg (call, 2);
1412 ao_ref_init_from_ptr_and_size (&dref,
1413 gimple_call_arg (call, 0),
1414 size);
1415 return refs_may_alias_p_1 (&dref, ref, false);
1418 /* The following functions read memory pointed to by their
1419 first argument. */
1420 CASE_BUILT_IN_TM_LOAD (1):
1421 CASE_BUILT_IN_TM_LOAD (2):
1422 CASE_BUILT_IN_TM_LOAD (4):
1423 CASE_BUILT_IN_TM_LOAD (8):
1424 CASE_BUILT_IN_TM_LOAD (FLOAT):
1425 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1426 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1427 CASE_BUILT_IN_TM_LOAD (M64):
1428 CASE_BUILT_IN_TM_LOAD (M128):
1429 CASE_BUILT_IN_TM_LOAD (M256):
1430 case BUILT_IN_TM_LOG:
1431 case BUILT_IN_TM_LOG_1:
1432 case BUILT_IN_TM_LOG_2:
1433 case BUILT_IN_TM_LOG_4:
1434 case BUILT_IN_TM_LOG_8:
1435 case BUILT_IN_TM_LOG_FLOAT:
1436 case BUILT_IN_TM_LOG_DOUBLE:
1437 case BUILT_IN_TM_LOG_LDOUBLE:
1438 case BUILT_IN_TM_LOG_M64:
1439 case BUILT_IN_TM_LOG_M128:
1440 case BUILT_IN_TM_LOG_M256:
1441 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1443 /* These read memory pointed to by the first argument. */
1444 case BUILT_IN_STRDUP:
1445 case BUILT_IN_STRNDUP:
1447 ao_ref dref;
1448 tree size = NULL_TREE;
1449 if (gimple_call_num_args (call) == 2)
1450 size = gimple_call_arg (call, 1);
1451 ao_ref_init_from_ptr_and_size (&dref,
1452 gimple_call_arg (call, 0),
1453 size);
1454 return refs_may_alias_p_1 (&dref, ref, false);
1456 /* These read memory pointed to by the first argument. */
1457 case BUILT_IN_INDEX:
1458 case BUILT_IN_STRCHR:
1459 case BUILT_IN_STRRCHR:
1461 ao_ref dref;
1462 ao_ref_init_from_ptr_and_size (&dref,
1463 gimple_call_arg (call, 0),
1464 NULL_TREE);
1465 return refs_may_alias_p_1 (&dref, ref, false);
1467 /* These read memory pointed to by the first argument with size
1468 in the third argument. */
1469 case BUILT_IN_MEMCHR:
1471 ao_ref dref;
1472 ao_ref_init_from_ptr_and_size (&dref,
1473 gimple_call_arg (call, 0),
1474 gimple_call_arg (call, 2));
1475 return refs_may_alias_p_1 (&dref, ref, false);
1477 /* These read memory pointed to by the first and second arguments. */
1478 case BUILT_IN_STRSTR:
1479 case BUILT_IN_STRPBRK:
1481 ao_ref dref;
1482 ao_ref_init_from_ptr_and_size (&dref,
1483 gimple_call_arg (call, 0),
1484 NULL_TREE);
1485 if (refs_may_alias_p_1 (&dref, ref, false))
1486 return true;
1487 ao_ref_init_from_ptr_and_size (&dref,
1488 gimple_call_arg (call, 1),
1489 NULL_TREE);
1490 return refs_may_alias_p_1 (&dref, ref, false);
1493 /* The following builtins do not read from memory. */
1494 case BUILT_IN_FREE:
1495 case BUILT_IN_MALLOC:
1496 case BUILT_IN_CALLOC:
1497 case BUILT_IN_ALLOCA:
1498 case BUILT_IN_ALLOCA_WITH_ALIGN:
1499 case BUILT_IN_STACK_SAVE:
1500 case BUILT_IN_STACK_RESTORE:
1501 case BUILT_IN_MEMSET:
1502 case BUILT_IN_TM_MEMSET:
1503 case BUILT_IN_MEMSET_CHK:
1504 case BUILT_IN_FREXP:
1505 case BUILT_IN_FREXPF:
1506 case BUILT_IN_FREXPL:
1507 case BUILT_IN_GAMMA_R:
1508 case BUILT_IN_GAMMAF_R:
1509 case BUILT_IN_GAMMAL_R:
1510 case BUILT_IN_LGAMMA_R:
1511 case BUILT_IN_LGAMMAF_R:
1512 case BUILT_IN_LGAMMAL_R:
1513 case BUILT_IN_MODF:
1514 case BUILT_IN_MODFF:
1515 case BUILT_IN_MODFL:
1516 case BUILT_IN_REMQUO:
1517 case BUILT_IN_REMQUOF:
1518 case BUILT_IN_REMQUOL:
1519 case BUILT_IN_SINCOS:
1520 case BUILT_IN_SINCOSF:
1521 case BUILT_IN_SINCOSL:
1522 case BUILT_IN_ASSUME_ALIGNED:
1523 case BUILT_IN_VA_END:
1524 return false;
1525 /* __sync_* builtins and some OpenMP builtins act as threading
1526 barriers. */
1527 #undef DEF_SYNC_BUILTIN
1528 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1529 #include "sync-builtins.def"
1530 #undef DEF_SYNC_BUILTIN
1531 case BUILT_IN_GOMP_ATOMIC_START:
1532 case BUILT_IN_GOMP_ATOMIC_END:
1533 case BUILT_IN_GOMP_BARRIER:
1534 case BUILT_IN_GOMP_BARRIER_CANCEL:
1535 case BUILT_IN_GOMP_TASKWAIT:
1536 case BUILT_IN_GOMP_TASKGROUP_END:
1537 case BUILT_IN_GOMP_CRITICAL_START:
1538 case BUILT_IN_GOMP_CRITICAL_END:
1539 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1540 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1541 case BUILT_IN_GOMP_LOOP_END:
1542 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1543 case BUILT_IN_GOMP_ORDERED_START:
1544 case BUILT_IN_GOMP_ORDERED_END:
1545 case BUILT_IN_GOMP_SECTIONS_END:
1546 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1547 case BUILT_IN_GOMP_SINGLE_COPY_START:
1548 case BUILT_IN_GOMP_SINGLE_COPY_END:
1549 return true;
1551 default:
1552 /* Fallthru to general call handling. */;
1555 /* Check if base is a global static variable that is not read
1556 by the function. */
1557 if (callee != NULL_TREE
1558 && TREE_CODE (base) == VAR_DECL
1559 && TREE_STATIC (base))
1561 struct cgraph_node *node = cgraph_get_node (callee);
1562 bitmap not_read;
1564 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1565 node yet. We should enforce that there are nodes for all decls in the
1566 IL and remove this check instead. */
1567 if (node
1568 && (not_read = ipa_reference_get_not_read_global (node))
1569 && bitmap_bit_p (not_read, DECL_UID (base)))
1570 goto process_args;
1573 /* Check if the base variable is call-used. */
1574 if (DECL_P (base))
1576 if (pt_solution_includes (gimple_call_use_set (call), base))
1577 return true;
1579 else if ((TREE_CODE (base) == MEM_REF
1580 || TREE_CODE (base) == TARGET_MEM_REF)
1581 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1583 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1584 if (!pi)
1585 return true;
1587 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1588 return true;
1590 else
1591 return true;
1593 /* Inspect call arguments for passed-by-value aliases. */
1594 process_args:
1595 for (i = 0; i < gimple_call_num_args (call); ++i)
1597 tree op = gimple_call_arg (call, i);
1598 int flags = gimple_call_arg_flags (call, i);
1600 if (flags & EAF_UNUSED)
1601 continue;
1603 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1604 op = TREE_OPERAND (op, 0);
1606 if (TREE_CODE (op) != SSA_NAME
1607 && !is_gimple_min_invariant (op))
1609 ao_ref r;
1610 ao_ref_init (&r, op);
1611 if (refs_may_alias_p_1 (&r, ref, true))
1612 return true;
1616 return false;
1619 static bool
1620 ref_maybe_used_by_call_p (gimple call, tree ref)
1622 ao_ref r;
1623 bool res;
1624 ao_ref_init (&r, ref);
1625 res = ref_maybe_used_by_call_p_1 (call, &r);
1626 if (res)
1627 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1628 else
1629 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1630 return res;
1634 /* If the statement STMT may use the memory reference REF return
1635 true, otherwise return false. */
1637 bool
1638 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1640 if (is_gimple_assign (stmt))
1642 tree rhs;
1644 /* All memory assign statements are single. */
1645 if (!gimple_assign_single_p (stmt))
1646 return false;
1648 rhs = gimple_assign_rhs1 (stmt);
1649 if (is_gimple_reg (rhs)
1650 || is_gimple_min_invariant (rhs)
1651 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1652 return false;
1654 return refs_may_alias_p (rhs, ref);
1656 else if (is_gimple_call (stmt))
1657 return ref_maybe_used_by_call_p (stmt, ref);
1658 else if (gimple_code (stmt) == GIMPLE_RETURN)
1660 tree retval = gimple_return_retval (stmt);
1661 tree base;
1662 if (retval
1663 && TREE_CODE (retval) != SSA_NAME
1664 && !is_gimple_min_invariant (retval)
1665 && refs_may_alias_p (retval, ref))
1666 return true;
1667 /* If ref escapes the function then the return acts as a use. */
1668 base = get_base_address (ref);
1669 if (!base)
1671 else if (DECL_P (base))
1672 return is_global_var (base);
1673 else if (TREE_CODE (base) == MEM_REF
1674 || TREE_CODE (base) == TARGET_MEM_REF)
1675 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1676 return false;
1679 return true;
1682 /* If the call in statement CALL may clobber the memory reference REF
1683 return true, otherwise return false. */
1685 static bool
1686 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1688 tree base;
1689 tree callee;
1691 /* If the call is pure or const it cannot clobber anything. */
1692 if (gimple_call_flags (call)
1693 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1694 return false;
1696 base = ao_ref_base (ref);
1697 if (!base)
1698 return true;
1700 if (TREE_CODE (base) == SSA_NAME
1701 || CONSTANT_CLASS_P (base))
1702 return false;
1704 /* A call that is not without side-effects might involve volatile
1705 accesses and thus conflicts with all other volatile accesses. */
1706 if (ref->volatile_p)
1707 return true;
1709 /* If the reference is based on a decl that is not aliased the call
1710 cannot possibly clobber it. */
1711 if (DECL_P (base)
1712 && !may_be_aliased (base)
1713 /* But local non-readonly statics can be modified through recursion
1714 or the call may implement a threading barrier which we must
1715 treat as may-def. */
1716 && (TREE_READONLY (base)
1717 || !is_global_var (base)))
1718 return false;
1720 callee = gimple_call_fndecl (call);
1722 /* Handle those builtin functions explicitly that do not act as
1723 escape points. See tree-ssa-structalias.c:find_func_aliases
1724 for the list of builtins we might need to handle here. */
1725 if (callee != NULL_TREE
1726 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1727 switch (DECL_FUNCTION_CODE (callee))
1729 /* All the following functions clobber memory pointed to by
1730 their first argument. */
1731 case BUILT_IN_STRCPY:
1732 case BUILT_IN_STRNCPY:
1733 case BUILT_IN_MEMCPY:
1734 case BUILT_IN_MEMMOVE:
1735 case BUILT_IN_MEMPCPY:
1736 case BUILT_IN_STPCPY:
1737 case BUILT_IN_STPNCPY:
1738 case BUILT_IN_STRCAT:
1739 case BUILT_IN_STRNCAT:
1740 case BUILT_IN_MEMSET:
1741 case BUILT_IN_TM_MEMSET:
1742 CASE_BUILT_IN_TM_STORE (1):
1743 CASE_BUILT_IN_TM_STORE (2):
1744 CASE_BUILT_IN_TM_STORE (4):
1745 CASE_BUILT_IN_TM_STORE (8):
1746 CASE_BUILT_IN_TM_STORE (FLOAT):
1747 CASE_BUILT_IN_TM_STORE (DOUBLE):
1748 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1749 CASE_BUILT_IN_TM_STORE (M64):
1750 CASE_BUILT_IN_TM_STORE (M128):
1751 CASE_BUILT_IN_TM_STORE (M256):
1752 case BUILT_IN_TM_MEMCPY:
1753 case BUILT_IN_TM_MEMMOVE:
1755 ao_ref dref;
1756 tree size = NULL_TREE;
1757 /* Don't pass in size for strncat, as the maximum size
1758 is strlen (dest) + n + 1 instead of n, resp.
1759 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1760 known. */
1761 if (gimple_call_num_args (call) == 3
1762 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1763 size = gimple_call_arg (call, 2);
1764 ao_ref_init_from_ptr_and_size (&dref,
1765 gimple_call_arg (call, 0),
1766 size);
1767 return refs_may_alias_p_1 (&dref, ref, false);
1769 case BUILT_IN_STRCPY_CHK:
1770 case BUILT_IN_STRNCPY_CHK:
1771 case BUILT_IN_MEMCPY_CHK:
1772 case BUILT_IN_MEMMOVE_CHK:
1773 case BUILT_IN_MEMPCPY_CHK:
1774 case BUILT_IN_STPCPY_CHK:
1775 case BUILT_IN_STPNCPY_CHK:
1776 case BUILT_IN_STRCAT_CHK:
1777 case BUILT_IN_STRNCAT_CHK:
1778 case BUILT_IN_MEMSET_CHK:
1780 ao_ref dref;
1781 tree size = NULL_TREE;
1782 /* Don't pass in size for __strncat_chk, as the maximum size
1783 is strlen (dest) + n + 1 instead of n, resp.
1784 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1785 known. */
1786 if (gimple_call_num_args (call) == 4
1787 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1788 size = gimple_call_arg (call, 2);
1789 ao_ref_init_from_ptr_and_size (&dref,
1790 gimple_call_arg (call, 0),
1791 size);
1792 return refs_may_alias_p_1 (&dref, ref, false);
1794 case BUILT_IN_BCOPY:
1796 ao_ref dref;
1797 tree size = gimple_call_arg (call, 2);
1798 ao_ref_init_from_ptr_and_size (&dref,
1799 gimple_call_arg (call, 1),
1800 size);
1801 return refs_may_alias_p_1 (&dref, ref, false);
1803 /* Allocating memory does not have any side-effects apart from
1804 being the definition point for the pointer. */
1805 case BUILT_IN_MALLOC:
1806 case BUILT_IN_CALLOC:
1807 case BUILT_IN_STRDUP:
1808 case BUILT_IN_STRNDUP:
1809 /* Unix98 specifies that errno is set on allocation failure. */
1810 if (flag_errno_math
1811 && targetm.ref_may_alias_errno (ref))
1812 return true;
1813 return false;
1814 case BUILT_IN_STACK_SAVE:
1815 case BUILT_IN_ALLOCA:
1816 case BUILT_IN_ALLOCA_WITH_ALIGN:
1817 case BUILT_IN_ASSUME_ALIGNED:
1818 return false;
1819 /* Freeing memory kills the pointed-to memory. More importantly
1820 the call has to serve as a barrier for moving loads and stores
1821 across it. */
1822 case BUILT_IN_FREE:
1823 case BUILT_IN_VA_END:
1825 tree ptr = gimple_call_arg (call, 0);
1826 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1828 case BUILT_IN_GAMMA_R:
1829 case BUILT_IN_GAMMAF_R:
1830 case BUILT_IN_GAMMAL_R:
1831 case BUILT_IN_LGAMMA_R:
1832 case BUILT_IN_LGAMMAF_R:
1833 case BUILT_IN_LGAMMAL_R:
1835 tree out = gimple_call_arg (call, 1);
1836 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1837 return true;
1838 if (flag_errno_math)
1839 break;
1840 return false;
1842 case BUILT_IN_FREXP:
1843 case BUILT_IN_FREXPF:
1844 case BUILT_IN_FREXPL:
1845 case BUILT_IN_MODF:
1846 case BUILT_IN_MODFF:
1847 case BUILT_IN_MODFL:
1849 tree out = gimple_call_arg (call, 1);
1850 return ptr_deref_may_alias_ref_p_1 (out, ref);
1852 case BUILT_IN_REMQUO:
1853 case BUILT_IN_REMQUOF:
1854 case BUILT_IN_REMQUOL:
1856 tree out = gimple_call_arg (call, 2);
1857 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1858 return true;
1859 if (flag_errno_math)
1860 break;
1861 return false;
1863 case BUILT_IN_SINCOS:
1864 case BUILT_IN_SINCOSF:
1865 case BUILT_IN_SINCOSL:
1867 tree sin = gimple_call_arg (call, 1);
1868 tree cos = gimple_call_arg (call, 2);
1869 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1870 || ptr_deref_may_alias_ref_p_1 (cos, ref));
1872 /* __sync_* builtins and some OpenMP builtins act as threading
1873 barriers. */
1874 #undef DEF_SYNC_BUILTIN
1875 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1876 #include "sync-builtins.def"
1877 #undef DEF_SYNC_BUILTIN
1878 case BUILT_IN_GOMP_ATOMIC_START:
1879 case BUILT_IN_GOMP_ATOMIC_END:
1880 case BUILT_IN_GOMP_BARRIER:
1881 case BUILT_IN_GOMP_BARRIER_CANCEL:
1882 case BUILT_IN_GOMP_TASKWAIT:
1883 case BUILT_IN_GOMP_TASKGROUP_END:
1884 case BUILT_IN_GOMP_CRITICAL_START:
1885 case BUILT_IN_GOMP_CRITICAL_END:
1886 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1887 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1888 case BUILT_IN_GOMP_LOOP_END:
1889 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1890 case BUILT_IN_GOMP_ORDERED_START:
1891 case BUILT_IN_GOMP_ORDERED_END:
1892 case BUILT_IN_GOMP_SECTIONS_END:
1893 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1894 case BUILT_IN_GOMP_SINGLE_COPY_START:
1895 case BUILT_IN_GOMP_SINGLE_COPY_END:
1896 return true;
1897 default:
1898 /* Fallthru to general call handling. */;
1901 /* Check if base is a global static variable that is not written
1902 by the function. */
1903 if (callee != NULL_TREE
1904 && TREE_CODE (base) == VAR_DECL
1905 && TREE_STATIC (base))
1907 struct cgraph_node *node = cgraph_get_node (callee);
1908 bitmap not_written;
1910 if (node
1911 && (not_written = ipa_reference_get_not_written_global (node))
1912 && bitmap_bit_p (not_written, DECL_UID (base)))
1913 return false;
1916 /* Check if the base variable is call-clobbered. */
1917 if (DECL_P (base))
1918 return pt_solution_includes (gimple_call_clobber_set (call), base);
1919 else if ((TREE_CODE (base) == MEM_REF
1920 || TREE_CODE (base) == TARGET_MEM_REF)
1921 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1923 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1924 if (!pi)
1925 return true;
1927 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
1930 return true;
1933 /* If the call in statement CALL may clobber the memory reference REF
1934 return true, otherwise return false. */
1936 bool
1937 call_may_clobber_ref_p (gimple call, tree ref)
1939 bool res;
1940 ao_ref r;
1941 ao_ref_init (&r, ref);
1942 res = call_may_clobber_ref_p_1 (call, &r);
1943 if (res)
1944 ++alias_stats.call_may_clobber_ref_p_may_alias;
1945 else
1946 ++alias_stats.call_may_clobber_ref_p_no_alias;
1947 return res;
1951 /* If the statement STMT may clobber the memory reference REF return true,
1952 otherwise return false. */
1954 bool
1955 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
1957 if (is_gimple_call (stmt))
1959 tree lhs = gimple_call_lhs (stmt);
1960 if (lhs
1961 && TREE_CODE (lhs) != SSA_NAME)
1963 ao_ref r;
1964 ao_ref_init (&r, lhs);
1965 if (refs_may_alias_p_1 (ref, &r, true))
1966 return true;
1969 return call_may_clobber_ref_p_1 (stmt, ref);
1971 else if (gimple_assign_single_p (stmt))
1973 tree lhs = gimple_assign_lhs (stmt);
1974 if (TREE_CODE (lhs) != SSA_NAME)
1976 ao_ref r;
1977 ao_ref_init (&r, lhs);
1978 return refs_may_alias_p_1 (ref, &r, true);
1981 else if (gimple_code (stmt) == GIMPLE_ASM)
1982 return true;
1984 return false;
1987 bool
1988 stmt_may_clobber_ref_p (gimple stmt, tree ref)
1990 ao_ref r;
1991 ao_ref_init (&r, ref);
1992 return stmt_may_clobber_ref_p_1 (stmt, &r);
1995 /* If STMT kills the memory reference REF return true, otherwise
1996 return false. */
1998 static bool
1999 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
2001 /* For a must-alias check we need to be able to constrain
2002 the access properly. */
2003 ao_ref_base (ref);
2004 if (ref->max_size == -1)
2005 return false;
2007 if (gimple_has_lhs (stmt)
2008 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2009 /* The assignment is not necessarily carried out if it can throw
2010 and we can catch it in the current function where we could inspect
2011 the previous value.
2012 ??? We only need to care about the RHS throwing. For aggregate
2013 assignments or similar calls and non-call exceptions the LHS
2014 might throw as well. */
2015 && !stmt_can_throw_internal (stmt))
2017 tree base, lhs = gimple_get_lhs (stmt);
2018 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2019 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2020 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2021 so base == ref->base does not always hold. */
2022 if (base != ref->base)
2024 /* If both base and ref->base are MEM_REFs, only compare the
2025 first operand, and if the second operand isn't equal constant,
2026 try to add the offsets into offset and ref_offset. */
2027 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2028 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2030 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2031 TREE_OPERAND (ref->base, 1)))
2033 double_int off1 = mem_ref_offset (base);
2034 off1 = off1.lshift (BITS_PER_UNIT == 8
2035 ? 3 : exact_log2 (BITS_PER_UNIT));
2036 off1 = off1 + double_int::from_shwi (offset);
2037 double_int off2 = mem_ref_offset (ref->base);
2038 off2 = off2.lshift (BITS_PER_UNIT == 8
2039 ? 3 : exact_log2 (BITS_PER_UNIT));
2040 off2 = off2 + double_int::from_shwi (ref_offset);
2041 if (off1.fits_shwi () && off2.fits_shwi ())
2043 offset = off1.to_shwi ();
2044 ref_offset = off2.to_shwi ();
2046 else
2047 size = -1;
2050 else
2051 size = -1;
2053 /* For a must-alias check we need to be able to constrain
2054 the access properly. */
2055 if (size != -1 && size == max_size)
2057 if (offset <= ref_offset
2058 && offset + size >= ref_offset + ref->max_size)
2059 return true;
2063 if (is_gimple_call (stmt))
2065 tree callee = gimple_call_fndecl (stmt);
2066 if (callee != NULL_TREE
2067 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2068 switch (DECL_FUNCTION_CODE (callee))
2070 case BUILT_IN_FREE:
2072 tree ptr = gimple_call_arg (stmt, 0);
2073 tree base = ao_ref_base (ref);
2074 if (base && TREE_CODE (base) == MEM_REF
2075 && TREE_OPERAND (base, 0) == ptr)
2076 return true;
2077 break;
2080 case BUILT_IN_MEMCPY:
2081 case BUILT_IN_MEMPCPY:
2082 case BUILT_IN_MEMMOVE:
2083 case BUILT_IN_MEMSET:
2084 case BUILT_IN_MEMCPY_CHK:
2085 case BUILT_IN_MEMPCPY_CHK:
2086 case BUILT_IN_MEMMOVE_CHK:
2087 case BUILT_IN_MEMSET_CHK:
2089 tree dest = gimple_call_arg (stmt, 0);
2090 tree len = gimple_call_arg (stmt, 2);
2091 tree base = NULL_TREE;
2092 HOST_WIDE_INT offset = 0;
2093 if (!host_integerp (len, 0))
2094 return false;
2095 if (TREE_CODE (dest) == ADDR_EXPR)
2096 base = get_addr_base_and_unit_offset (TREE_OPERAND (dest, 0),
2097 &offset);
2098 else if (TREE_CODE (dest) == SSA_NAME)
2099 base = dest;
2100 if (base
2101 && base == ao_ref_base (ref))
2103 HOST_WIDE_INT size = TREE_INT_CST_LOW (len);
2104 if (offset <= ref->offset / BITS_PER_UNIT
2105 && (offset + size
2106 >= ((ref->offset + ref->max_size + BITS_PER_UNIT - 1)
2107 / BITS_PER_UNIT)))
2108 return true;
2110 break;
2113 case BUILT_IN_VA_END:
2115 tree ptr = gimple_call_arg (stmt, 0);
2116 if (TREE_CODE (ptr) == ADDR_EXPR)
2118 tree base = ao_ref_base (ref);
2119 if (TREE_OPERAND (ptr, 0) == base)
2120 return true;
2122 break;
2125 default:;
2128 return false;
2131 bool
2132 stmt_kills_ref_p (gimple stmt, tree ref)
2134 ao_ref r;
2135 ao_ref_init (&r, ref);
2136 return stmt_kills_ref_p_1 (stmt, &r);
2140 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2141 TARGET or a statement clobbering the memory reference REF in which
2142 case false is returned. The walk starts with VUSE, one argument of PHI. */
2144 static bool
2145 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2146 tree vuse, unsigned int *cnt, bitmap *visited,
2147 bool abort_on_visited)
2149 basic_block bb = gimple_bb (phi);
2151 if (!*visited)
2152 *visited = BITMAP_ALLOC (NULL);
2154 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2156 /* Walk until we hit the target. */
2157 while (vuse != target)
2159 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2160 /* Recurse for PHI nodes. */
2161 if (gimple_code (def_stmt) == GIMPLE_PHI)
2163 /* An already visited PHI node ends the walk successfully. */
2164 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2165 return !abort_on_visited;
2166 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2167 visited, abort_on_visited);
2168 if (!vuse)
2169 return false;
2170 continue;
2172 else if (gimple_nop_p (def_stmt))
2173 return false;
2174 else
2176 /* A clobbering statement or the end of the IL ends it failing. */
2177 ++*cnt;
2178 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2179 return false;
2181 /* If we reach a new basic-block see if we already skipped it
2182 in a previous walk that ended successfully. */
2183 if (gimple_bb (def_stmt) != bb)
2185 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2186 return !abort_on_visited;
2187 bb = gimple_bb (def_stmt);
2189 vuse = gimple_vuse (def_stmt);
2191 return true;
2194 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2195 until we hit the phi argument definition that dominates the other one.
2196 Return that, or NULL_TREE if there is no such definition. */
2198 static tree
2199 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2200 ao_ref *ref, unsigned int *cnt,
2201 bitmap *visited, bool abort_on_visited)
2203 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2204 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2205 tree common_vuse;
2207 if (arg0 == arg1)
2208 return arg0;
2209 else if (gimple_nop_p (def0)
2210 || (!gimple_nop_p (def1)
2211 && dominated_by_p (CDI_DOMINATORS,
2212 gimple_bb (def1), gimple_bb (def0))))
2214 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2215 visited, abort_on_visited))
2216 return arg0;
2218 else if (gimple_nop_p (def1)
2219 || dominated_by_p (CDI_DOMINATORS,
2220 gimple_bb (def0), gimple_bb (def1)))
2222 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2223 visited, abort_on_visited))
2224 return arg1;
2226 /* Special case of a diamond:
2227 MEM_1 = ...
2228 goto (cond) ? L1 : L2
2229 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2230 goto L3
2231 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2232 L3: MEM_4 = PHI<MEM_2, MEM_3>
2233 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2234 dominate each other, but still we can easily skip this PHI node
2235 if we recognize that the vuse MEM operand is the same for both,
2236 and that we can skip both statements (they don't clobber us).
2237 This is still linear. Don't use maybe_skip_until, that might
2238 potentially be slow. */
2239 else if ((common_vuse = gimple_vuse (def0))
2240 && common_vuse == gimple_vuse (def1))
2242 *cnt += 2;
2243 if (!stmt_may_clobber_ref_p_1 (def0, ref)
2244 && !stmt_may_clobber_ref_p_1 (def1, ref))
2245 return common_vuse;
2248 return NULL_TREE;
2252 /* Starting from a PHI node for the virtual operand of the memory reference
2253 REF find a continuation virtual operand that allows to continue walking
2254 statements dominating PHI skipping only statements that cannot possibly
2255 clobber REF. Increments *CNT for each alias disambiguation done.
2256 Returns NULL_TREE if no suitable virtual operand can be found. */
2258 tree
2259 get_continuation_for_phi (gimple phi, ao_ref *ref,
2260 unsigned int *cnt, bitmap *visited,
2261 bool abort_on_visited)
2263 unsigned nargs = gimple_phi_num_args (phi);
2265 /* Through a single-argument PHI we can simply look through. */
2266 if (nargs == 1)
2267 return PHI_ARG_DEF (phi, 0);
2269 /* For two or more arguments try to pairwise skip non-aliasing code
2270 until we hit the phi argument definition that dominates the other one. */
2271 else if (nargs >= 2)
2273 tree arg0, arg1;
2274 unsigned i;
2276 /* Find a candidate for the virtual operand which definition
2277 dominates those of all others. */
2278 arg0 = PHI_ARG_DEF (phi, 0);
2279 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2280 for (i = 1; i < nargs; ++i)
2282 arg1 = PHI_ARG_DEF (phi, i);
2283 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2285 arg0 = arg1;
2286 break;
2288 if (dominated_by_p (CDI_DOMINATORS,
2289 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2290 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2291 arg0 = arg1;
2294 /* Then pairwise reduce against the found candidate. */
2295 for (i = 0; i < nargs; ++i)
2297 arg1 = PHI_ARG_DEF (phi, i);
2298 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2299 cnt, visited, abort_on_visited);
2300 if (!arg0)
2301 return NULL_TREE;
2304 return arg0;
2307 return NULL_TREE;
2310 /* Based on the memory reference REF and its virtual use VUSE call
2311 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2312 itself. That is, for each virtual use for which its defining statement
2313 does not clobber REF.
2315 WALKER is called with REF, the current virtual use and DATA. If
2316 WALKER returns non-NULL the walk stops and its result is returned.
2317 At the end of a non-successful walk NULL is returned.
2319 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2320 use which definition is a statement that may clobber REF and DATA.
2321 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2322 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2323 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2324 to adjust REF and *DATA to make that valid.
2326 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2328 void *
2329 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2330 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2331 void *(*translate)(ao_ref *, tree, void *), void *data)
2333 bitmap visited = NULL;
2334 void *res;
2335 unsigned int cnt = 0;
2336 bool translated = false;
2338 timevar_push (TV_ALIAS_STMT_WALK);
2342 gimple def_stmt;
2344 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2345 res = (*walker) (ref, vuse, cnt, data);
2346 /* Abort walk. */
2347 if (res == (void *)-1)
2349 res = NULL;
2350 break;
2352 /* Lookup succeeded. */
2353 else if (res != NULL)
2354 break;
2356 def_stmt = SSA_NAME_DEF_STMT (vuse);
2357 if (gimple_nop_p (def_stmt))
2358 break;
2359 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2360 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2361 &visited, translated);
2362 else
2364 cnt++;
2365 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2367 if (!translate)
2368 break;
2369 res = (*translate) (ref, vuse, data);
2370 /* Failed lookup and translation. */
2371 if (res == (void *)-1)
2373 res = NULL;
2374 break;
2376 /* Lookup succeeded. */
2377 else if (res != NULL)
2378 break;
2379 /* Translation succeeded, continue walking. */
2380 translated = true;
2382 vuse = gimple_vuse (def_stmt);
2385 while (vuse);
2387 if (visited)
2388 BITMAP_FREE (visited);
2390 timevar_pop (TV_ALIAS_STMT_WALK);
2392 return res;
2396 /* Based on the memory reference REF call WALKER for each vdef which
2397 defining statement may clobber REF, starting with VDEF. If REF
2398 is NULL_TREE, each defining statement is visited.
2400 WALKER is called with REF, the current vdef and DATA. If WALKER
2401 returns true the walk is stopped, otherwise it continues.
2403 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2404 PHI argument (but only one walk continues on merge points), the
2405 return value is true if any of the walks was successful.
2407 The function returns the number of statements walked. */
2409 static unsigned int
2410 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2411 bool (*walker)(ao_ref *, tree, void *), void *data,
2412 bitmap *visited, unsigned int cnt)
2416 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2418 if (*visited
2419 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2420 return cnt;
2422 if (gimple_nop_p (def_stmt))
2423 return cnt;
2424 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2426 unsigned i;
2427 if (!*visited)
2428 *visited = BITMAP_ALLOC (NULL);
2429 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2430 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2431 walker, data, visited, 0);
2432 return cnt;
2435 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2436 cnt++;
2437 if ((!ref
2438 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2439 && (*walker) (ref, vdef, data))
2440 return cnt;
2442 vdef = gimple_vuse (def_stmt);
2444 while (1);
2447 unsigned int
2448 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2449 bool (*walker)(ao_ref *, tree, void *), void *data,
2450 bitmap *visited)
2452 bitmap local_visited = NULL;
2453 unsigned int ret;
2455 timevar_push (TV_ALIAS_STMT_WALK);
2457 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2458 visited ? visited : &local_visited, 0);
2459 if (local_visited)
2460 BITMAP_FREE (local_visited);
2462 timevar_pop (TV_ALIAS_STMT_WALK);
2464 return ret;