* gcc.dg/c11-complex-1.c: Use dg-add-options ieee.
[official-gcc.git] / gcc / tree-ssa-alias.c
blobc1028fb760623a0b8cc380b714052d2a64f47b7a
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2013 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "target.h"
28 #include "basic-block.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ggc.h"
31 #include "langhooks.h"
32 #include "flags.h"
33 #include "function.h"
34 #include "tree-pretty-print.h"
35 #include "dumpfile.h"
36 #include "gimple.h"
37 #include "gimple-ssa.h"
38 #include "tree-ssanames.h"
39 #include "tree-dfa.h"
40 #include "tree-inline.h"
41 #include "params.h"
42 #include "vec.h"
43 #include "pointer-set.h"
44 #include "alloc-pool.h"
45 #include "tree-ssa-alias.h"
46 #include "ipa-reference.h"
48 /* Broad overview of how alias analysis on gimple works:
50 Statements clobbering or using memory are linked through the
51 virtual operand factored use-def chain. The virtual operand
52 is unique per function, its symbol is accessible via gimple_vop (cfun).
53 Virtual operands are used for efficiently walking memory statements
54 in the gimple IL and are useful for things like value-numbering as
55 a generation count for memory references.
57 SSA_NAME pointers may have associated points-to information
58 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
59 points-to information is (re-)computed by the TODO_rebuild_alias
60 pass manager todo. Points-to information is also used for more
61 precise tracking of call-clobbered and call-used variables and
62 related disambiguations.
64 This file contains functions for disambiguating memory references,
65 the so called alias-oracle and tools for walking of the gimple IL.
67 The main alias-oracle entry-points are
69 bool stmt_may_clobber_ref_p (gimple, tree)
71 This function queries if a statement may invalidate (parts of)
72 the memory designated by the reference tree argument.
74 bool ref_maybe_used_by_stmt_p (gimple, tree)
76 This function queries if a statement may need (parts of) the
77 memory designated by the reference tree argument.
79 There are variants of these functions that only handle the call
80 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
81 Note that these do not disambiguate against a possible call lhs.
83 bool refs_may_alias_p (tree, tree)
85 This function tries to disambiguate two reference trees.
87 bool ptr_deref_may_alias_global_p (tree)
89 This function queries if dereferencing a pointer variable may
90 alias global memory.
92 More low-level disambiguators are available and documented in
93 this file. Low-level disambiguators dealing with points-to
94 information are in tree-ssa-structalias.c. */
97 /* Query statistics for the different low-level disambiguators.
98 A high-level query may trigger multiple of them. */
100 static struct {
101 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
102 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
103 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
104 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
105 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
106 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
107 } alias_stats;
109 void
110 dump_alias_stats (FILE *s)
112 fprintf (s, "\nAlias oracle query stats:\n");
113 fprintf (s, " refs_may_alias_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.refs_may_alias_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.refs_may_alias_p_may_alias);
119 fprintf (s, " ref_maybe_used_by_call_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.ref_maybe_used_by_call_p_no_alias,
123 alias_stats.refs_may_alias_p_no_alias
124 + alias_stats.ref_maybe_used_by_call_p_may_alias);
125 fprintf (s, " call_may_clobber_ref_p: "
126 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
127 HOST_WIDE_INT_PRINT_DEC" queries\n",
128 alias_stats.call_may_clobber_ref_p_no_alias,
129 alias_stats.call_may_clobber_ref_p_no_alias
130 + alias_stats.call_may_clobber_ref_p_may_alias);
134 /* Return true, if dereferencing PTR may alias with a global variable. */
136 bool
137 ptr_deref_may_alias_global_p (tree ptr)
139 struct ptr_info_def *pi;
141 /* If we end up with a pointer constant here that may point
142 to global memory. */
143 if (TREE_CODE (ptr) != SSA_NAME)
144 return true;
146 pi = SSA_NAME_PTR_INFO (ptr);
148 /* If we do not have points-to information for this variable,
149 we have to punt. */
150 if (!pi)
151 return true;
153 /* ??? This does not use TBAA to prune globals ptr may not access. */
154 return pt_solution_includes_global (&pi->pt);
157 /* Return true if dereferencing PTR may alias DECL.
158 The caller is responsible for applying TBAA to see if PTR
159 may access DECL at all. */
161 static bool
162 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
164 struct ptr_info_def *pi;
166 /* Conversions are irrelevant for points-to information and
167 data-dependence analysis can feed us those. */
168 STRIP_NOPS (ptr);
170 /* Anything we do not explicilty handle aliases. */
171 if ((TREE_CODE (ptr) != SSA_NAME
172 && TREE_CODE (ptr) != ADDR_EXPR
173 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
174 || !POINTER_TYPE_P (TREE_TYPE (ptr))
175 || (TREE_CODE (decl) != VAR_DECL
176 && TREE_CODE (decl) != PARM_DECL
177 && TREE_CODE (decl) != RESULT_DECL))
178 return true;
180 /* Disregard pointer offsetting. */
181 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
185 ptr = TREE_OPERAND (ptr, 0);
187 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
188 return ptr_deref_may_alias_decl_p (ptr, decl);
191 /* ADDR_EXPR pointers either just offset another pointer or directly
192 specify the pointed-to set. */
193 if (TREE_CODE (ptr) == ADDR_EXPR)
195 tree base = get_base_address (TREE_OPERAND (ptr, 0));
196 if (base
197 && (TREE_CODE (base) == MEM_REF
198 || TREE_CODE (base) == TARGET_MEM_REF))
199 ptr = TREE_OPERAND (base, 0);
200 else if (base
201 && DECL_P (base))
202 return base == decl;
203 else if (base
204 && CONSTANT_CLASS_P (base))
205 return false;
206 else
207 return true;
210 /* Non-aliased variables can not be pointed to. */
211 if (!may_be_aliased (decl))
212 return false;
214 /* If we do not have useful points-to information for this pointer
215 we cannot disambiguate anything else. */
216 pi = SSA_NAME_PTR_INFO (ptr);
217 if (!pi)
218 return true;
220 return pt_solution_includes (&pi->pt, decl);
223 /* Return true if dereferenced PTR1 and PTR2 may alias.
224 The caller is responsible for applying TBAA to see if accesses
225 through PTR1 and PTR2 may conflict at all. */
227 bool
228 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
230 struct ptr_info_def *pi1, *pi2;
232 /* Conversions are irrelevant for points-to information and
233 data-dependence analysis can feed us those. */
234 STRIP_NOPS (ptr1);
235 STRIP_NOPS (ptr2);
237 /* Disregard pointer offsetting. */
238 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
242 ptr1 = TREE_OPERAND (ptr1, 0);
244 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
245 return ptr_derefs_may_alias_p (ptr1, ptr2);
247 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
251 ptr2 = TREE_OPERAND (ptr2, 0);
253 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
254 return ptr_derefs_may_alias_p (ptr1, ptr2);
257 /* ADDR_EXPR pointers either just offset another pointer or directly
258 specify the pointed-to set. */
259 if (TREE_CODE (ptr1) == ADDR_EXPR)
261 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
262 if (base
263 && (TREE_CODE (base) == MEM_REF
264 || TREE_CODE (base) == TARGET_MEM_REF))
265 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
266 else if (base
267 && DECL_P (base))
268 return ptr_deref_may_alias_decl_p (ptr2, base);
269 else
270 return true;
272 if (TREE_CODE (ptr2) == ADDR_EXPR)
274 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
275 if (base
276 && (TREE_CODE (base) == MEM_REF
277 || TREE_CODE (base) == TARGET_MEM_REF))
278 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
279 else if (base
280 && DECL_P (base))
281 return ptr_deref_may_alias_decl_p (ptr1, base);
282 else
283 return true;
286 /* From here we require SSA name pointers. Anything else aliases. */
287 if (TREE_CODE (ptr1) != SSA_NAME
288 || TREE_CODE (ptr2) != SSA_NAME
289 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
290 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
291 return true;
293 /* We may end up with two empty points-to solutions for two same pointers.
294 In this case we still want to say both pointers alias, so shortcut
295 that here. */
296 if (ptr1 == ptr2)
297 return true;
299 /* If we do not have useful points-to information for either pointer
300 we cannot disambiguate anything else. */
301 pi1 = SSA_NAME_PTR_INFO (ptr1);
302 pi2 = SSA_NAME_PTR_INFO (ptr2);
303 if (!pi1 || !pi2)
304 return true;
306 /* ??? This does not use TBAA to prune decls from the intersection
307 that not both pointers may access. */
308 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
311 /* Return true if dereferencing PTR may alias *REF.
312 The caller is responsible for applying TBAA to see if PTR
313 may access *REF at all. */
315 static bool
316 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
318 tree base = ao_ref_base (ref);
320 if (TREE_CODE (base) == MEM_REF
321 || TREE_CODE (base) == TARGET_MEM_REF)
322 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
323 else if (DECL_P (base))
324 return ptr_deref_may_alias_decl_p (ptr, base);
326 return true;
329 /* Return true whether REF may refer to global memory. */
331 bool
332 ref_may_alias_global_p (tree ref)
334 tree base = get_base_address (ref);
335 if (DECL_P (base))
336 return is_global_var (base);
337 else if (TREE_CODE (base) == MEM_REF
338 || TREE_CODE (base) == TARGET_MEM_REF)
339 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
340 return true;
343 /* Return true whether STMT may clobber global memory. */
345 bool
346 stmt_may_clobber_global_p (gimple stmt)
348 tree lhs;
350 if (!gimple_vdef (stmt))
351 return false;
353 /* ??? We can ask the oracle whether an artificial pointer
354 dereference with a pointer with points-to information covering
355 all global memory (what about non-address taken memory?) maybe
356 clobbered by this call. As there is at the moment no convenient
357 way of doing that without generating garbage do some manual
358 checking instead.
359 ??? We could make a NULL ao_ref argument to the various
360 predicates special, meaning any global memory. */
362 switch (gimple_code (stmt))
364 case GIMPLE_ASSIGN:
365 lhs = gimple_assign_lhs (stmt);
366 return (TREE_CODE (lhs) != SSA_NAME
367 && ref_may_alias_global_p (lhs));
368 case GIMPLE_CALL:
369 return true;
370 default:
371 return true;
376 /* Dump alias information on FILE. */
378 void
379 dump_alias_info (FILE *file)
381 unsigned i;
382 const char *funcname
383 = lang_hooks.decl_printable_name (current_function_decl, 2);
384 tree var;
386 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
388 fprintf (file, "Aliased symbols\n\n");
390 FOR_EACH_LOCAL_DECL (cfun, i, var)
392 if (may_be_aliased (var))
393 dump_variable (file, var);
396 fprintf (file, "\nCall clobber information\n");
398 fprintf (file, "\nESCAPED");
399 dump_points_to_solution (file, &cfun->gimple_df->escaped);
401 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
403 for (i = 1; i < num_ssa_names; i++)
405 tree ptr = ssa_name (i);
406 struct ptr_info_def *pi;
408 if (ptr == NULL_TREE
409 || !POINTER_TYPE_P (TREE_TYPE (ptr))
410 || SSA_NAME_IN_FREE_LIST (ptr))
411 continue;
413 pi = SSA_NAME_PTR_INFO (ptr);
414 if (pi)
415 dump_points_to_info_for (file, ptr);
418 fprintf (file, "\n");
422 /* Dump alias information on stderr. */
424 DEBUG_FUNCTION void
425 debug_alias_info (void)
427 dump_alias_info (stderr);
431 /* Dump the points-to set *PT into FILE. */
433 void
434 dump_points_to_solution (FILE *file, struct pt_solution *pt)
436 if (pt->anything)
437 fprintf (file, ", points-to anything");
439 if (pt->nonlocal)
440 fprintf (file, ", points-to non-local");
442 if (pt->escaped)
443 fprintf (file, ", points-to escaped");
445 if (pt->ipa_escaped)
446 fprintf (file, ", points-to unit escaped");
448 if (pt->null)
449 fprintf (file, ", points-to NULL");
451 if (pt->vars)
453 fprintf (file, ", points-to vars: ");
454 dump_decl_set (file, pt->vars);
455 if (pt->vars_contains_nonlocal
456 && pt->vars_contains_escaped_heap)
457 fprintf (file, " (nonlocal, escaped heap)");
458 else if (pt->vars_contains_nonlocal
459 && pt->vars_contains_escaped)
460 fprintf (file, " (nonlocal, escaped)");
461 else if (pt->vars_contains_nonlocal)
462 fprintf (file, " (nonlocal)");
463 else if (pt->vars_contains_escaped_heap)
464 fprintf (file, " (escaped heap)");
465 else if (pt->vars_contains_escaped)
466 fprintf (file, " (escaped)");
471 /* Unified dump function for pt_solution. */
473 DEBUG_FUNCTION void
474 debug (pt_solution &ref)
476 dump_points_to_solution (stderr, &ref);
479 DEBUG_FUNCTION void
480 debug (pt_solution *ptr)
482 if (ptr)
483 debug (*ptr);
484 else
485 fprintf (stderr, "<nil>\n");
489 /* Dump points-to information for SSA_NAME PTR into FILE. */
491 void
492 dump_points_to_info_for (FILE *file, tree ptr)
494 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
496 print_generic_expr (file, ptr, dump_flags);
498 if (pi)
499 dump_points_to_solution (file, &pi->pt);
500 else
501 fprintf (file, ", points-to anything");
503 fprintf (file, "\n");
507 /* Dump points-to information for VAR into stderr. */
509 DEBUG_FUNCTION void
510 debug_points_to_info_for (tree var)
512 dump_points_to_info_for (stderr, var);
516 /* Initializes the alias-oracle reference representation *R from REF. */
518 void
519 ao_ref_init (ao_ref *r, tree ref)
521 r->ref = ref;
522 r->base = NULL_TREE;
523 r->offset = 0;
524 r->size = -1;
525 r->max_size = -1;
526 r->ref_alias_set = -1;
527 r->base_alias_set = -1;
528 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
531 /* Returns the base object of the memory reference *REF. */
533 tree
534 ao_ref_base (ao_ref *ref)
536 if (ref->base)
537 return ref->base;
538 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
539 &ref->max_size);
540 return ref->base;
543 /* Returns the base object alias set of the memory reference *REF. */
545 static alias_set_type
546 ao_ref_base_alias_set (ao_ref *ref)
548 tree base_ref;
549 if (ref->base_alias_set != -1)
550 return ref->base_alias_set;
551 if (!ref->ref)
552 return 0;
553 base_ref = ref->ref;
554 while (handled_component_p (base_ref))
555 base_ref = TREE_OPERAND (base_ref, 0);
556 ref->base_alias_set = get_alias_set (base_ref);
557 return ref->base_alias_set;
560 /* Returns the reference alias set of the memory reference *REF. */
562 alias_set_type
563 ao_ref_alias_set (ao_ref *ref)
565 if (ref->ref_alias_set != -1)
566 return ref->ref_alias_set;
567 ref->ref_alias_set = get_alias_set (ref->ref);
568 return ref->ref_alias_set;
571 /* Init an alias-oracle reference representation from a gimple pointer
572 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
573 size is assumed to be unknown. The access is assumed to be only
574 to or after of the pointer target, not before it. */
576 void
577 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
579 HOST_WIDE_INT t, extra_offset = 0;
580 ref->ref = NULL_TREE;
581 if (TREE_CODE (ptr) == SSA_NAME)
583 gimple stmt = SSA_NAME_DEF_STMT (ptr);
584 if (gimple_assign_single_p (stmt)
585 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
586 ptr = gimple_assign_rhs1 (stmt);
587 else if (is_gimple_assign (stmt)
588 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
589 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST)
591 ptr = gimple_assign_rhs1 (stmt);
592 extra_offset = BITS_PER_UNIT
593 * int_cst_value (gimple_assign_rhs2 (stmt));
597 if (TREE_CODE (ptr) == ADDR_EXPR)
599 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
600 if (ref->base)
601 ref->offset = BITS_PER_UNIT * t;
602 else
604 size = NULL_TREE;
605 ref->offset = 0;
606 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
609 else
611 ref->base = build2 (MEM_REF, char_type_node,
612 ptr, null_pointer_node);
613 ref->offset = 0;
615 ref->offset += extra_offset;
616 if (size
617 && tree_fits_shwi_p (size)
618 && TREE_INT_CST_LOW (size) * BITS_PER_UNIT / BITS_PER_UNIT
619 == TREE_INT_CST_LOW (size))
620 ref->max_size = ref->size = TREE_INT_CST_LOW (size) * BITS_PER_UNIT;
621 else
622 ref->max_size = ref->size = -1;
623 ref->ref_alias_set = 0;
624 ref->base_alias_set = 0;
625 ref->volatile_p = false;
628 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
629 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
630 decide. */
632 static inline int
633 same_type_for_tbaa (tree type1, tree type2)
635 type1 = TYPE_MAIN_VARIANT (type1);
636 type2 = TYPE_MAIN_VARIANT (type2);
638 /* If we would have to do structural comparison bail out. */
639 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
640 || TYPE_STRUCTURAL_EQUALITY_P (type2))
641 return -1;
643 /* Compare the canonical types. */
644 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
645 return 1;
647 /* ??? Array types are not properly unified in all cases as we have
648 spurious changes in the index types for example. Removing this
649 causes all sorts of problems with the Fortran frontend. */
650 if (TREE_CODE (type1) == ARRAY_TYPE
651 && TREE_CODE (type2) == ARRAY_TYPE)
652 return -1;
654 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
655 object of one of its constrained subtypes, e.g. when a function with an
656 unconstrained parameter passed by reference is called on an object and
657 inlined. But, even in the case of a fixed size, type and subtypes are
658 not equivalent enough as to share the same TYPE_CANONICAL, since this
659 would mean that conversions between them are useless, whereas they are
660 not (e.g. type and subtypes can have different modes). So, in the end,
661 they are only guaranteed to have the same alias set. */
662 if (get_alias_set (type1) == get_alias_set (type2))
663 return -1;
665 /* The types are known to be not equal. */
666 return 0;
669 /* Determine if the two component references REF1 and REF2 which are
670 based on access types TYPE1 and TYPE2 and of which at least one is based
671 on an indirect reference may alias. REF2 is the only one that can
672 be a decl in which case REF2_IS_DECL is true.
673 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
674 are the respective alias sets. */
676 static bool
677 aliasing_component_refs_p (tree ref1,
678 alias_set_type ref1_alias_set,
679 alias_set_type base1_alias_set,
680 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
681 tree ref2,
682 alias_set_type ref2_alias_set,
683 alias_set_type base2_alias_set,
684 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
685 bool ref2_is_decl)
687 /* If one reference is a component references through pointers try to find a
688 common base and apply offset based disambiguation. This handles
689 for example
690 struct A { int i; int j; } *q;
691 struct B { struct A a; int k; } *p;
692 disambiguating q->i and p->a.j. */
693 tree base1, base2;
694 tree type1, type2;
695 tree *refp;
696 int same_p;
698 /* Choose bases and base types to search for. */
699 base1 = ref1;
700 while (handled_component_p (base1))
701 base1 = TREE_OPERAND (base1, 0);
702 type1 = TREE_TYPE (base1);
703 base2 = ref2;
704 while (handled_component_p (base2))
705 base2 = TREE_OPERAND (base2, 0);
706 type2 = TREE_TYPE (base2);
708 /* Now search for the type1 in the access path of ref2. This
709 would be a common base for doing offset based disambiguation on. */
710 refp = &ref2;
711 while (handled_component_p (*refp)
712 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
713 refp = &TREE_OPERAND (*refp, 0);
714 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
715 /* If we couldn't compare types we have to bail out. */
716 if (same_p == -1)
717 return true;
718 else if (same_p == 1)
720 HOST_WIDE_INT offadj, sztmp, msztmp;
721 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
722 offset2 -= offadj;
723 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp);
724 offset1 -= offadj;
725 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
727 /* If we didn't find a common base, try the other way around. */
728 refp = &ref1;
729 while (handled_component_p (*refp)
730 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
731 refp = &TREE_OPERAND (*refp, 0);
732 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
733 /* If we couldn't compare types we have to bail out. */
734 if (same_p == -1)
735 return true;
736 else if (same_p == 1)
738 HOST_WIDE_INT offadj, sztmp, msztmp;
739 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
740 offset1 -= offadj;
741 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp);
742 offset2 -= offadj;
743 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
746 /* If we have two type access paths B1.path1 and B2.path2 they may
747 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
748 But we can still have a path that goes B1.path1...B2.path2 with
749 a part that we do not see. So we can only disambiguate now
750 if there is no B2 in the tail of path1 and no B1 on the
751 tail of path2. */
752 if (base1_alias_set == ref2_alias_set
753 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
754 return true;
755 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
756 if (!ref2_is_decl)
757 return (base2_alias_set == ref1_alias_set
758 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
759 return false;
762 /* Return true if we can determine that component references REF1 and REF2,
763 that are within a common DECL, cannot overlap. */
765 static bool
766 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
768 stack_vec<tree, 16> component_refs1;
769 stack_vec<tree, 16> component_refs2;
771 /* Create the stack of handled components for REF1. */
772 while (handled_component_p (ref1))
774 component_refs1.safe_push (ref1);
775 ref1 = TREE_OPERAND (ref1, 0);
777 if (TREE_CODE (ref1) == MEM_REF)
779 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
780 goto may_overlap;
781 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
784 /* Create the stack of handled components for REF2. */
785 while (handled_component_p (ref2))
787 component_refs2.safe_push (ref2);
788 ref2 = TREE_OPERAND (ref2, 0);
790 if (TREE_CODE (ref2) == MEM_REF)
792 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
793 goto may_overlap;
794 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
797 /* We must have the same base DECL. */
798 gcc_assert (ref1 == ref2);
800 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
801 rank. This is sufficient because we start from the same DECL and you
802 cannot reference several fields at a time with COMPONENT_REFs (unlike
803 with ARRAY_RANGE_REFs for arrays) so you always need the same number
804 of them to access a sub-component, unless you're in a union, in which
805 case the return value will precisely be false. */
806 while (true)
810 if (component_refs1.is_empty ())
811 goto may_overlap;
812 ref1 = component_refs1.pop ();
814 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
818 if (component_refs2.is_empty ())
819 goto may_overlap;
820 ref2 = component_refs2.pop ();
822 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
824 /* Beware of BIT_FIELD_REF. */
825 if (TREE_CODE (ref1) != COMPONENT_REF
826 || TREE_CODE (ref2) != COMPONENT_REF)
827 goto may_overlap;
829 tree field1 = TREE_OPERAND (ref1, 1);
830 tree field2 = TREE_OPERAND (ref2, 1);
832 /* ??? We cannot simply use the type of operand #0 of the refs here
833 as the Fortran compiler smuggles type punning into COMPONENT_REFs
834 for common blocks instead of using unions like everyone else. */
835 tree type1 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field1));
836 tree type2 = TYPE_MAIN_VARIANT (DECL_CONTEXT (field2));
838 /* We cannot disambiguate fields in a union or qualified union. */
839 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
840 goto may_overlap;
842 /* Different fields of the same record type cannot overlap.
843 ??? Bitfields can overlap at RTL level so punt on them. */
844 if (field1 != field2)
846 component_refs1.release ();
847 component_refs2.release ();
848 return !(DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2));
852 may_overlap:
853 component_refs1.release ();
854 component_refs2.release ();
855 return false;
858 /* Return true if two memory references based on the variables BASE1
859 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
860 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
861 if non-NULL are the complete memory reference trees. */
863 static bool
864 decl_refs_may_alias_p (tree ref1, tree base1,
865 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
866 tree ref2, tree base2,
867 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2)
869 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
871 /* If both references are based on different variables, they cannot alias. */
872 if (base1 != base2)
873 return false;
875 /* If both references are based on the same variable, they cannot alias if
876 the accesses do not overlap. */
877 if (!ranges_overlap_p (offset1, max_size1, offset2, max_size2))
878 return false;
880 /* For components with variable position, the above test isn't sufficient,
881 so we disambiguate component references manually. */
882 if (ref1 && ref2
883 && handled_component_p (ref1) && handled_component_p (ref2)
884 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
885 return false;
887 return true;
890 /* Return true if an indirect reference based on *PTR1 constrained
891 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
892 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
893 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
894 in which case they are computed on-demand. REF1 and REF2
895 if non-NULL are the complete memory reference trees. */
897 static bool
898 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
899 HOST_WIDE_INT offset1,
900 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
901 alias_set_type ref1_alias_set,
902 alias_set_type base1_alias_set,
903 tree ref2 ATTRIBUTE_UNUSED, tree base2,
904 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
905 alias_set_type ref2_alias_set,
906 alias_set_type base2_alias_set, bool tbaa_p)
908 tree ptr1;
909 tree ptrtype1, dbase2;
910 HOST_WIDE_INT offset1p = offset1, offset2p = offset2;
911 HOST_WIDE_INT doffset1, doffset2;
912 double_int moff;
914 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
915 || TREE_CODE (base1) == TARGET_MEM_REF)
916 && DECL_P (base2));
918 ptr1 = TREE_OPERAND (base1, 0);
920 /* The offset embedded in MEM_REFs can be negative. Bias them
921 so that the resulting offset adjustment is positive. */
922 moff = mem_ref_offset (base1);
923 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
924 if (moff.is_negative ())
925 offset2p += (-moff).low;
926 else
927 offset1p += moff.low;
929 /* If only one reference is based on a variable, they cannot alias if
930 the pointer access is beyond the extent of the variable access.
931 (the pointer base cannot validly point to an offset less than zero
932 of the variable).
933 ??? IVOPTs creates bases that do not honor this restriction,
934 so do not apply this optimization for TARGET_MEM_REFs. */
935 if (TREE_CODE (base1) != TARGET_MEM_REF
936 && !ranges_overlap_p (MAX (0, offset1p), -1, offset2p, max_size2))
937 return false;
938 /* They also cannot alias if the pointer may not point to the decl. */
939 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
940 return false;
942 /* Disambiguations that rely on strict aliasing rules follow. */
943 if (!flag_strict_aliasing || !tbaa_p)
944 return true;
946 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
948 /* If the alias set for a pointer access is zero all bets are off. */
949 if (base1_alias_set == -1)
950 base1_alias_set = get_deref_alias_set (ptrtype1);
951 if (base1_alias_set == 0)
952 return true;
953 if (base2_alias_set == -1)
954 base2_alias_set = get_alias_set (base2);
956 /* When we are trying to disambiguate an access with a pointer dereference
957 as base versus one with a decl as base we can use both the size
958 of the decl and its dynamic type for extra disambiguation.
959 ??? We do not know anything about the dynamic type of the decl
960 other than that its alias-set contains base2_alias_set as a subset
961 which does not help us here. */
962 /* As we know nothing useful about the dynamic type of the decl just
963 use the usual conflict check rather than a subset test.
964 ??? We could introduce -fvery-strict-aliasing when the language
965 does not allow decls to have a dynamic type that differs from their
966 static type. Then we can check
967 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
968 if (base1_alias_set != base2_alias_set
969 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
970 return false;
971 /* If the size of the access relevant for TBAA through the pointer
972 is bigger than the size of the decl we can't possibly access the
973 decl via that pointer. */
974 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
975 && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
976 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
977 /* ??? This in turn may run afoul when a decl of type T which is
978 a member of union type U is accessed through a pointer to
979 type U and sizeof T is smaller than sizeof U. */
980 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
981 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
982 && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
983 return false;
985 if (!ref2)
986 return true;
988 /* If the decl is accessed via a MEM_REF, reconstruct the base
989 we can use for TBAA and an appropriately adjusted offset. */
990 dbase2 = ref2;
991 while (handled_component_p (dbase2))
992 dbase2 = TREE_OPERAND (dbase2, 0);
993 doffset1 = offset1;
994 doffset2 = offset2;
995 if (TREE_CODE (dbase2) == MEM_REF
996 || TREE_CODE (dbase2) == TARGET_MEM_REF)
998 double_int moff = mem_ref_offset (dbase2);
999 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1000 if (moff.is_negative ())
1001 doffset1 -= (-moff).low;
1002 else
1003 doffset2 -= moff.low;
1006 /* If either reference is view-converted, give up now. */
1007 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1008 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1009 return true;
1011 /* If both references are through the same type, they do not alias
1012 if the accesses do not overlap. This does extra disambiguation
1013 for mixed/pointer accesses but requires strict aliasing.
1014 For MEM_REFs we require that the component-ref offset we computed
1015 is relative to the start of the type which we ensure by
1016 comparing rvalue and access type and disregarding the constant
1017 pointer offset. */
1018 if ((TREE_CODE (base1) != TARGET_MEM_REF
1019 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1020 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1021 return ranges_overlap_p (doffset1, max_size1, doffset2, max_size2);
1023 /* Do access-path based disambiguation. */
1024 if (ref1 && ref2
1025 && (handled_component_p (ref1) || handled_component_p (ref2)))
1026 return aliasing_component_refs_p (ref1,
1027 ref1_alias_set, base1_alias_set,
1028 offset1, max_size1,
1029 ref2,
1030 ref2_alias_set, base2_alias_set,
1031 offset2, max_size2, true);
1033 return true;
1036 /* Return true if two indirect references based on *PTR1
1037 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1038 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1039 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1040 in which case they are computed on-demand. REF1 and REF2
1041 if non-NULL are the complete memory reference trees. */
1043 static bool
1044 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1045 HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
1046 alias_set_type ref1_alias_set,
1047 alias_set_type base1_alias_set,
1048 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1049 HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
1050 alias_set_type ref2_alias_set,
1051 alias_set_type base2_alias_set, bool tbaa_p)
1053 tree ptr1;
1054 tree ptr2;
1055 tree ptrtype1, ptrtype2;
1057 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1058 || TREE_CODE (base1) == TARGET_MEM_REF)
1059 && (TREE_CODE (base2) == MEM_REF
1060 || TREE_CODE (base2) == TARGET_MEM_REF));
1062 ptr1 = TREE_OPERAND (base1, 0);
1063 ptr2 = TREE_OPERAND (base2, 0);
1065 /* If both bases are based on pointers they cannot alias if they may not
1066 point to the same memory object or if they point to the same object
1067 and the accesses do not overlap. */
1068 if ((!cfun || gimple_in_ssa_p (cfun))
1069 && operand_equal_p (ptr1, ptr2, 0)
1070 && (((TREE_CODE (base1) != TARGET_MEM_REF
1071 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1072 && (TREE_CODE (base2) != TARGET_MEM_REF
1073 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1074 || (TREE_CODE (base1) == TARGET_MEM_REF
1075 && TREE_CODE (base2) == TARGET_MEM_REF
1076 && (TMR_STEP (base1) == TMR_STEP (base2)
1077 || (TMR_STEP (base1) && TMR_STEP (base2)
1078 && operand_equal_p (TMR_STEP (base1),
1079 TMR_STEP (base2), 0)))
1080 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1081 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1082 && operand_equal_p (TMR_INDEX (base1),
1083 TMR_INDEX (base2), 0)))
1084 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1085 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1086 && operand_equal_p (TMR_INDEX2 (base1),
1087 TMR_INDEX2 (base2), 0))))))
1089 double_int moff;
1090 /* The offset embedded in MEM_REFs can be negative. Bias them
1091 so that the resulting offset adjustment is positive. */
1092 moff = mem_ref_offset (base1);
1093 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1094 if (moff.is_negative ())
1095 offset2 += (-moff).low;
1096 else
1097 offset1 += moff.low;
1098 moff = mem_ref_offset (base2);
1099 moff = moff.lshift (BITS_PER_UNIT == 8 ? 3 : exact_log2 (BITS_PER_UNIT));
1100 if (moff.is_negative ())
1101 offset1 += (-moff).low;
1102 else
1103 offset2 += moff.low;
1104 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1106 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1107 return false;
1109 /* Disambiguations that rely on strict aliasing rules follow. */
1110 if (!flag_strict_aliasing || !tbaa_p)
1111 return true;
1113 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1114 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1116 /* If the alias set for a pointer access is zero all bets are off. */
1117 if (base1_alias_set == -1)
1118 base1_alias_set = get_deref_alias_set (ptrtype1);
1119 if (base1_alias_set == 0)
1120 return true;
1121 if (base2_alias_set == -1)
1122 base2_alias_set = get_deref_alias_set (ptrtype2);
1123 if (base2_alias_set == 0)
1124 return true;
1126 /* If both references are through the same type, they do not alias
1127 if the accesses do not overlap. This does extra disambiguation
1128 for mixed/pointer accesses but requires strict aliasing. */
1129 if ((TREE_CODE (base1) != TARGET_MEM_REF
1130 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1131 && (TREE_CODE (base2) != TARGET_MEM_REF
1132 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1133 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1134 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1135 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1136 TREE_TYPE (ptrtype2)) == 1)
1137 return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
1139 /* Do type-based disambiguation. */
1140 if (base1_alias_set != base2_alias_set
1141 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1142 return false;
1144 /* Do access-path based disambiguation. */
1145 if (ref1 && ref2
1146 && (handled_component_p (ref1) || handled_component_p (ref2))
1147 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1148 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
1149 return aliasing_component_refs_p (ref1,
1150 ref1_alias_set, base1_alias_set,
1151 offset1, max_size1,
1152 ref2,
1153 ref2_alias_set, base2_alias_set,
1154 offset2, max_size2, false);
1156 return true;
1159 /* Return true, if the two memory references REF1 and REF2 may alias. */
1161 bool
1162 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1164 tree base1, base2;
1165 HOST_WIDE_INT offset1 = 0, offset2 = 0;
1166 HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
1167 bool var1_p, var2_p, ind1_p, ind2_p;
1169 gcc_checking_assert ((!ref1->ref
1170 || TREE_CODE (ref1->ref) == SSA_NAME
1171 || DECL_P (ref1->ref)
1172 || TREE_CODE (ref1->ref) == STRING_CST
1173 || handled_component_p (ref1->ref)
1174 || TREE_CODE (ref1->ref) == MEM_REF
1175 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1176 && (!ref2->ref
1177 || TREE_CODE (ref2->ref) == SSA_NAME
1178 || DECL_P (ref2->ref)
1179 || TREE_CODE (ref2->ref) == STRING_CST
1180 || handled_component_p (ref2->ref)
1181 || TREE_CODE (ref2->ref) == MEM_REF
1182 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1184 /* Decompose the references into their base objects and the access. */
1185 base1 = ao_ref_base (ref1);
1186 offset1 = ref1->offset;
1187 max_size1 = ref1->max_size;
1188 base2 = ao_ref_base (ref2);
1189 offset2 = ref2->offset;
1190 max_size2 = ref2->max_size;
1192 /* We can end up with registers or constants as bases for example from
1193 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1194 which is seen as a struct copy. */
1195 if (TREE_CODE (base1) == SSA_NAME
1196 || TREE_CODE (base1) == CONST_DECL
1197 || TREE_CODE (base1) == CONSTRUCTOR
1198 || TREE_CODE (base1) == ADDR_EXPR
1199 || CONSTANT_CLASS_P (base1)
1200 || TREE_CODE (base2) == SSA_NAME
1201 || TREE_CODE (base2) == CONST_DECL
1202 || TREE_CODE (base2) == CONSTRUCTOR
1203 || TREE_CODE (base2) == ADDR_EXPR
1204 || CONSTANT_CLASS_P (base2))
1205 return false;
1207 /* We can end up referring to code via function and label decls.
1208 As we likely do not properly track code aliases conservatively
1209 bail out. */
1210 if (TREE_CODE (base1) == FUNCTION_DECL
1211 || TREE_CODE (base1) == LABEL_DECL
1212 || TREE_CODE (base2) == FUNCTION_DECL
1213 || TREE_CODE (base2) == LABEL_DECL)
1214 return true;
1216 /* Two volatile accesses always conflict. */
1217 if (ref1->volatile_p
1218 && ref2->volatile_p)
1219 return true;
1221 /* Defer to simple offset based disambiguation if we have
1222 references based on two decls. Do this before defering to
1223 TBAA to handle must-alias cases in conformance with the
1224 GCC extension of allowing type-punning through unions. */
1225 var1_p = DECL_P (base1);
1226 var2_p = DECL_P (base2);
1227 if (var1_p && var2_p)
1228 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1229 ref2->ref, base2, offset2, max_size2);
1231 ind1_p = (TREE_CODE (base1) == MEM_REF
1232 || TREE_CODE (base1) == TARGET_MEM_REF);
1233 ind2_p = (TREE_CODE (base2) == MEM_REF
1234 || TREE_CODE (base2) == TARGET_MEM_REF);
1236 /* Canonicalize the pointer-vs-decl case. */
1237 if (ind1_p && var2_p)
1239 HOST_WIDE_INT tmp1;
1240 tree tmp2;
1241 ao_ref *tmp3;
1242 tmp1 = offset1; offset1 = offset2; offset2 = tmp1;
1243 tmp1 = max_size1; max_size1 = max_size2; max_size2 = tmp1;
1244 tmp2 = base1; base1 = base2; base2 = tmp2;
1245 tmp3 = ref1; ref1 = ref2; ref2 = tmp3;
1246 var1_p = true;
1247 ind1_p = false;
1248 var2_p = false;
1249 ind2_p = true;
1252 /* First defer to TBAA if possible. */
1253 if (tbaa_p
1254 && flag_strict_aliasing
1255 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1256 ao_ref_alias_set (ref2)))
1257 return false;
1259 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1260 if (var1_p && ind2_p)
1261 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1262 offset2, max_size2,
1263 ao_ref_alias_set (ref2), -1,
1264 ref1->ref, base1,
1265 offset1, max_size1,
1266 ao_ref_alias_set (ref1),
1267 ao_ref_base_alias_set (ref1),
1268 tbaa_p);
1269 else if (ind1_p && ind2_p)
1270 return indirect_refs_may_alias_p (ref1->ref, base1,
1271 offset1, max_size1,
1272 ao_ref_alias_set (ref1), -1,
1273 ref2->ref, base2,
1274 offset2, max_size2,
1275 ao_ref_alias_set (ref2), -1,
1276 tbaa_p);
1278 /* We really do not want to end up here, but returning true is safe. */
1279 #ifdef ENABLE_CHECKING
1280 gcc_unreachable ();
1281 #else
1282 return true;
1283 #endif
1286 bool
1287 refs_may_alias_p (tree ref1, tree ref2)
1289 ao_ref r1, r2;
1290 bool res;
1291 ao_ref_init (&r1, ref1);
1292 ao_ref_init (&r2, ref2);
1293 res = refs_may_alias_p_1 (&r1, &r2, true);
1294 if (res)
1295 ++alias_stats.refs_may_alias_p_may_alias;
1296 else
1297 ++alias_stats.refs_may_alias_p_no_alias;
1298 return res;
1301 /* Returns true if there is a anti-dependence for the STORE that
1302 executes after the LOAD. */
1304 bool
1305 refs_anti_dependent_p (tree load, tree store)
1307 ao_ref r1, r2;
1308 ao_ref_init (&r1, load);
1309 ao_ref_init (&r2, store);
1310 return refs_may_alias_p_1 (&r1, &r2, false);
1313 /* Returns true if there is a output dependence for the stores
1314 STORE1 and STORE2. */
1316 bool
1317 refs_output_dependent_p (tree store1, tree store2)
1319 ao_ref r1, r2;
1320 ao_ref_init (&r1, store1);
1321 ao_ref_init (&r2, store2);
1322 return refs_may_alias_p_1 (&r1, &r2, false);
1325 /* If the call CALL may use the memory reference REF return true,
1326 otherwise return false. */
1328 static bool
1329 ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
1331 tree base, callee;
1332 unsigned i;
1333 int flags = gimple_call_flags (call);
1335 /* Const functions without a static chain do not implicitly use memory. */
1336 if (!gimple_call_chain (call)
1337 && (flags & (ECF_CONST|ECF_NOVOPS)))
1338 goto process_args;
1340 base = ao_ref_base (ref);
1341 if (!base)
1342 return true;
1344 /* A call that is not without side-effects might involve volatile
1345 accesses and thus conflicts with all other volatile accesses. */
1346 if (ref->volatile_p)
1347 return true;
1349 /* If the reference is based on a decl that is not aliased the call
1350 cannot possibly use it. */
1351 if (DECL_P (base)
1352 && !may_be_aliased (base)
1353 /* But local statics can be used through recursion. */
1354 && !is_global_var (base))
1355 goto process_args;
1357 callee = gimple_call_fndecl (call);
1359 /* Handle those builtin functions explicitly that do not act as
1360 escape points. See tree-ssa-structalias.c:find_func_aliases
1361 for the list of builtins we might need to handle here. */
1362 if (callee != NULL_TREE
1363 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1364 switch (DECL_FUNCTION_CODE (callee))
1366 /* All the following functions read memory pointed to by
1367 their second argument. strcat/strncat additionally
1368 reads memory pointed to by the first argument. */
1369 case BUILT_IN_STRCAT:
1370 case BUILT_IN_STRNCAT:
1372 ao_ref dref;
1373 ao_ref_init_from_ptr_and_size (&dref,
1374 gimple_call_arg (call, 0),
1375 NULL_TREE);
1376 if (refs_may_alias_p_1 (&dref, ref, false))
1377 return true;
1379 /* FALLTHRU */
1380 case BUILT_IN_STRCPY:
1381 case BUILT_IN_STRNCPY:
1382 case BUILT_IN_MEMCPY:
1383 case BUILT_IN_MEMMOVE:
1384 case BUILT_IN_MEMPCPY:
1385 case BUILT_IN_STPCPY:
1386 case BUILT_IN_STPNCPY:
1387 case BUILT_IN_TM_MEMCPY:
1388 case BUILT_IN_TM_MEMMOVE:
1390 ao_ref dref;
1391 tree size = NULL_TREE;
1392 if (gimple_call_num_args (call) == 3)
1393 size = gimple_call_arg (call, 2);
1394 ao_ref_init_from_ptr_and_size (&dref,
1395 gimple_call_arg (call, 1),
1396 size);
1397 return refs_may_alias_p_1 (&dref, ref, false);
1399 case BUILT_IN_STRCAT_CHK:
1400 case BUILT_IN_STRNCAT_CHK:
1402 ao_ref dref;
1403 ao_ref_init_from_ptr_and_size (&dref,
1404 gimple_call_arg (call, 0),
1405 NULL_TREE);
1406 if (refs_may_alias_p_1 (&dref, ref, false))
1407 return true;
1409 /* FALLTHRU */
1410 case BUILT_IN_STRCPY_CHK:
1411 case BUILT_IN_STRNCPY_CHK:
1412 case BUILT_IN_MEMCPY_CHK:
1413 case BUILT_IN_MEMMOVE_CHK:
1414 case BUILT_IN_MEMPCPY_CHK:
1415 case BUILT_IN_STPCPY_CHK:
1416 case BUILT_IN_STPNCPY_CHK:
1418 ao_ref dref;
1419 tree size = NULL_TREE;
1420 if (gimple_call_num_args (call) == 4)
1421 size = gimple_call_arg (call, 2);
1422 ao_ref_init_from_ptr_and_size (&dref,
1423 gimple_call_arg (call, 1),
1424 size);
1425 return refs_may_alias_p_1 (&dref, ref, false);
1427 case BUILT_IN_BCOPY:
1429 ao_ref dref;
1430 tree size = gimple_call_arg (call, 2);
1431 ao_ref_init_from_ptr_and_size (&dref,
1432 gimple_call_arg (call, 0),
1433 size);
1434 return refs_may_alias_p_1 (&dref, ref, false);
1437 /* The following functions read memory pointed to by their
1438 first argument. */
1439 CASE_BUILT_IN_TM_LOAD (1):
1440 CASE_BUILT_IN_TM_LOAD (2):
1441 CASE_BUILT_IN_TM_LOAD (4):
1442 CASE_BUILT_IN_TM_LOAD (8):
1443 CASE_BUILT_IN_TM_LOAD (FLOAT):
1444 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1445 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1446 CASE_BUILT_IN_TM_LOAD (M64):
1447 CASE_BUILT_IN_TM_LOAD (M128):
1448 CASE_BUILT_IN_TM_LOAD (M256):
1449 case BUILT_IN_TM_LOG:
1450 case BUILT_IN_TM_LOG_1:
1451 case BUILT_IN_TM_LOG_2:
1452 case BUILT_IN_TM_LOG_4:
1453 case BUILT_IN_TM_LOG_8:
1454 case BUILT_IN_TM_LOG_FLOAT:
1455 case BUILT_IN_TM_LOG_DOUBLE:
1456 case BUILT_IN_TM_LOG_LDOUBLE:
1457 case BUILT_IN_TM_LOG_M64:
1458 case BUILT_IN_TM_LOG_M128:
1459 case BUILT_IN_TM_LOG_M256:
1460 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1462 /* These read memory pointed to by the first argument. */
1463 case BUILT_IN_STRDUP:
1464 case BUILT_IN_STRNDUP:
1466 ao_ref dref;
1467 tree size = NULL_TREE;
1468 if (gimple_call_num_args (call) == 2)
1469 size = gimple_call_arg (call, 1);
1470 ao_ref_init_from_ptr_and_size (&dref,
1471 gimple_call_arg (call, 0),
1472 size);
1473 return refs_may_alias_p_1 (&dref, ref, false);
1475 /* These read memory pointed to by the first argument. */
1476 case BUILT_IN_INDEX:
1477 case BUILT_IN_STRCHR:
1478 case BUILT_IN_STRRCHR:
1480 ao_ref dref;
1481 ao_ref_init_from_ptr_and_size (&dref,
1482 gimple_call_arg (call, 0),
1483 NULL_TREE);
1484 return refs_may_alias_p_1 (&dref, ref, false);
1486 /* These read memory pointed to by the first argument with size
1487 in the third argument. */
1488 case BUILT_IN_MEMCHR:
1490 ao_ref dref;
1491 ao_ref_init_from_ptr_and_size (&dref,
1492 gimple_call_arg (call, 0),
1493 gimple_call_arg (call, 2));
1494 return refs_may_alias_p_1 (&dref, ref, false);
1496 /* These read memory pointed to by the first and second arguments. */
1497 case BUILT_IN_STRSTR:
1498 case BUILT_IN_STRPBRK:
1500 ao_ref dref;
1501 ao_ref_init_from_ptr_and_size (&dref,
1502 gimple_call_arg (call, 0),
1503 NULL_TREE);
1504 if (refs_may_alias_p_1 (&dref, ref, false))
1505 return true;
1506 ao_ref_init_from_ptr_and_size (&dref,
1507 gimple_call_arg (call, 1),
1508 NULL_TREE);
1509 return refs_may_alias_p_1 (&dref, ref, false);
1512 /* The following builtins do not read from memory. */
1513 case BUILT_IN_FREE:
1514 case BUILT_IN_MALLOC:
1515 case BUILT_IN_CALLOC:
1516 case BUILT_IN_ALLOCA:
1517 case BUILT_IN_ALLOCA_WITH_ALIGN:
1518 case BUILT_IN_STACK_SAVE:
1519 case BUILT_IN_STACK_RESTORE:
1520 case BUILT_IN_MEMSET:
1521 case BUILT_IN_TM_MEMSET:
1522 case BUILT_IN_MEMSET_CHK:
1523 case BUILT_IN_FREXP:
1524 case BUILT_IN_FREXPF:
1525 case BUILT_IN_FREXPL:
1526 case BUILT_IN_GAMMA_R:
1527 case BUILT_IN_GAMMAF_R:
1528 case BUILT_IN_GAMMAL_R:
1529 case BUILT_IN_LGAMMA_R:
1530 case BUILT_IN_LGAMMAF_R:
1531 case BUILT_IN_LGAMMAL_R:
1532 case BUILT_IN_MODF:
1533 case BUILT_IN_MODFF:
1534 case BUILT_IN_MODFL:
1535 case BUILT_IN_REMQUO:
1536 case BUILT_IN_REMQUOF:
1537 case BUILT_IN_REMQUOL:
1538 case BUILT_IN_SINCOS:
1539 case BUILT_IN_SINCOSF:
1540 case BUILT_IN_SINCOSL:
1541 case BUILT_IN_ASSUME_ALIGNED:
1542 case BUILT_IN_VA_END:
1543 return false;
1544 /* __sync_* builtins and some OpenMP builtins act as threading
1545 barriers. */
1546 #undef DEF_SYNC_BUILTIN
1547 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1548 #include "sync-builtins.def"
1549 #undef DEF_SYNC_BUILTIN
1550 case BUILT_IN_GOMP_ATOMIC_START:
1551 case BUILT_IN_GOMP_ATOMIC_END:
1552 case BUILT_IN_GOMP_BARRIER:
1553 case BUILT_IN_GOMP_BARRIER_CANCEL:
1554 case BUILT_IN_GOMP_TASKWAIT:
1555 case BUILT_IN_GOMP_TASKGROUP_END:
1556 case BUILT_IN_GOMP_CRITICAL_START:
1557 case BUILT_IN_GOMP_CRITICAL_END:
1558 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1559 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1560 case BUILT_IN_GOMP_LOOP_END:
1561 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1562 case BUILT_IN_GOMP_ORDERED_START:
1563 case BUILT_IN_GOMP_ORDERED_END:
1564 case BUILT_IN_GOMP_SECTIONS_END:
1565 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1566 case BUILT_IN_GOMP_SINGLE_COPY_START:
1567 case BUILT_IN_GOMP_SINGLE_COPY_END:
1568 return true;
1570 default:
1571 /* Fallthru to general call handling. */;
1574 /* Check if base is a global static variable that is not read
1575 by the function. */
1576 if (callee != NULL_TREE
1577 && TREE_CODE (base) == VAR_DECL
1578 && TREE_STATIC (base))
1580 struct cgraph_node *node = cgraph_get_node (callee);
1581 bitmap not_read;
1583 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1584 node yet. We should enforce that there are nodes for all decls in the
1585 IL and remove this check instead. */
1586 if (node
1587 && (not_read = ipa_reference_get_not_read_global (node))
1588 && bitmap_bit_p (not_read, DECL_UID (base)))
1589 goto process_args;
1592 /* Check if the base variable is call-used. */
1593 if (DECL_P (base))
1595 if (pt_solution_includes (gimple_call_use_set (call), base))
1596 return true;
1598 else if ((TREE_CODE (base) == MEM_REF
1599 || TREE_CODE (base) == TARGET_MEM_REF)
1600 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1602 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1603 if (!pi)
1604 return true;
1606 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1607 return true;
1609 else
1610 return true;
1612 /* Inspect call arguments for passed-by-value aliases. */
1613 process_args:
1614 for (i = 0; i < gimple_call_num_args (call); ++i)
1616 tree op = gimple_call_arg (call, i);
1617 int flags = gimple_call_arg_flags (call, i);
1619 if (flags & EAF_UNUSED)
1620 continue;
1622 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1623 op = TREE_OPERAND (op, 0);
1625 if (TREE_CODE (op) != SSA_NAME
1626 && !is_gimple_min_invariant (op))
1628 ao_ref r;
1629 ao_ref_init (&r, op);
1630 if (refs_may_alias_p_1 (&r, ref, true))
1631 return true;
1635 return false;
1638 static bool
1639 ref_maybe_used_by_call_p (gimple call, tree ref)
1641 ao_ref r;
1642 bool res;
1643 ao_ref_init (&r, ref);
1644 res = ref_maybe_used_by_call_p_1 (call, &r);
1645 if (res)
1646 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1647 else
1648 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1649 return res;
1653 /* If the statement STMT may use the memory reference REF return
1654 true, otherwise return false. */
1656 bool
1657 ref_maybe_used_by_stmt_p (gimple stmt, tree ref)
1659 if (is_gimple_assign (stmt))
1661 tree rhs;
1663 /* All memory assign statements are single. */
1664 if (!gimple_assign_single_p (stmt))
1665 return false;
1667 rhs = gimple_assign_rhs1 (stmt);
1668 if (is_gimple_reg (rhs)
1669 || is_gimple_min_invariant (rhs)
1670 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1671 return false;
1673 return refs_may_alias_p (rhs, ref);
1675 else if (is_gimple_call (stmt))
1676 return ref_maybe_used_by_call_p (stmt, ref);
1677 else if (gimple_code (stmt) == GIMPLE_RETURN)
1679 tree retval = gimple_return_retval (stmt);
1680 tree base;
1681 if (retval
1682 && TREE_CODE (retval) != SSA_NAME
1683 && !is_gimple_min_invariant (retval)
1684 && refs_may_alias_p (retval, ref))
1685 return true;
1686 /* If ref escapes the function then the return acts as a use. */
1687 base = get_base_address (ref);
1688 if (!base)
1690 else if (DECL_P (base))
1691 return is_global_var (base);
1692 else if (TREE_CODE (base) == MEM_REF
1693 || TREE_CODE (base) == TARGET_MEM_REF)
1694 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1695 return false;
1698 return true;
1701 /* If the call in statement CALL may clobber the memory reference REF
1702 return true, otherwise return false. */
1704 static bool
1705 call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
1707 tree base;
1708 tree callee;
1710 /* If the call is pure or const it cannot clobber anything. */
1711 if (gimple_call_flags (call)
1712 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1713 return false;
1715 base = ao_ref_base (ref);
1716 if (!base)
1717 return true;
1719 if (TREE_CODE (base) == SSA_NAME
1720 || CONSTANT_CLASS_P (base))
1721 return false;
1723 /* A call that is not without side-effects might involve volatile
1724 accesses and thus conflicts with all other volatile accesses. */
1725 if (ref->volatile_p)
1726 return true;
1728 /* If the reference is based on a decl that is not aliased the call
1729 cannot possibly clobber it. */
1730 if (DECL_P (base)
1731 && !may_be_aliased (base)
1732 /* But local non-readonly statics can be modified through recursion
1733 or the call may implement a threading barrier which we must
1734 treat as may-def. */
1735 && (TREE_READONLY (base)
1736 || !is_global_var (base)))
1737 return false;
1739 callee = gimple_call_fndecl (call);
1741 /* Handle those builtin functions explicitly that do not act as
1742 escape points. See tree-ssa-structalias.c:find_func_aliases
1743 for the list of builtins we might need to handle here. */
1744 if (callee != NULL_TREE
1745 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
1746 switch (DECL_FUNCTION_CODE (callee))
1748 /* All the following functions clobber memory pointed to by
1749 their first argument. */
1750 case BUILT_IN_STRCPY:
1751 case BUILT_IN_STRNCPY:
1752 case BUILT_IN_MEMCPY:
1753 case BUILT_IN_MEMMOVE:
1754 case BUILT_IN_MEMPCPY:
1755 case BUILT_IN_STPCPY:
1756 case BUILT_IN_STPNCPY:
1757 case BUILT_IN_STRCAT:
1758 case BUILT_IN_STRNCAT:
1759 case BUILT_IN_MEMSET:
1760 case BUILT_IN_TM_MEMSET:
1761 CASE_BUILT_IN_TM_STORE (1):
1762 CASE_BUILT_IN_TM_STORE (2):
1763 CASE_BUILT_IN_TM_STORE (4):
1764 CASE_BUILT_IN_TM_STORE (8):
1765 CASE_BUILT_IN_TM_STORE (FLOAT):
1766 CASE_BUILT_IN_TM_STORE (DOUBLE):
1767 CASE_BUILT_IN_TM_STORE (LDOUBLE):
1768 CASE_BUILT_IN_TM_STORE (M64):
1769 CASE_BUILT_IN_TM_STORE (M128):
1770 CASE_BUILT_IN_TM_STORE (M256):
1771 case BUILT_IN_TM_MEMCPY:
1772 case BUILT_IN_TM_MEMMOVE:
1774 ao_ref dref;
1775 tree size = NULL_TREE;
1776 /* Don't pass in size for strncat, as the maximum size
1777 is strlen (dest) + n + 1 instead of n, resp.
1778 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1779 known. */
1780 if (gimple_call_num_args (call) == 3
1781 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
1782 size = gimple_call_arg (call, 2);
1783 ao_ref_init_from_ptr_and_size (&dref,
1784 gimple_call_arg (call, 0),
1785 size);
1786 return refs_may_alias_p_1 (&dref, ref, false);
1788 case BUILT_IN_STRCPY_CHK:
1789 case BUILT_IN_STRNCPY_CHK:
1790 case BUILT_IN_MEMCPY_CHK:
1791 case BUILT_IN_MEMMOVE_CHK:
1792 case BUILT_IN_MEMPCPY_CHK:
1793 case BUILT_IN_STPCPY_CHK:
1794 case BUILT_IN_STPNCPY_CHK:
1795 case BUILT_IN_STRCAT_CHK:
1796 case BUILT_IN_STRNCAT_CHK:
1797 case BUILT_IN_MEMSET_CHK:
1799 ao_ref dref;
1800 tree size = NULL_TREE;
1801 /* Don't pass in size for __strncat_chk, as the maximum size
1802 is strlen (dest) + n + 1 instead of n, resp.
1803 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1804 known. */
1805 if (gimple_call_num_args (call) == 4
1806 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
1807 size = gimple_call_arg (call, 2);
1808 ao_ref_init_from_ptr_and_size (&dref,
1809 gimple_call_arg (call, 0),
1810 size);
1811 return refs_may_alias_p_1 (&dref, ref, false);
1813 case BUILT_IN_BCOPY:
1815 ao_ref dref;
1816 tree size = gimple_call_arg (call, 2);
1817 ao_ref_init_from_ptr_and_size (&dref,
1818 gimple_call_arg (call, 1),
1819 size);
1820 return refs_may_alias_p_1 (&dref, ref, false);
1822 /* Allocating memory does not have any side-effects apart from
1823 being the definition point for the pointer. */
1824 case BUILT_IN_MALLOC:
1825 case BUILT_IN_CALLOC:
1826 case BUILT_IN_STRDUP:
1827 case BUILT_IN_STRNDUP:
1828 /* Unix98 specifies that errno is set on allocation failure. */
1829 if (flag_errno_math
1830 && targetm.ref_may_alias_errno (ref))
1831 return true;
1832 return false;
1833 case BUILT_IN_STACK_SAVE:
1834 case BUILT_IN_ALLOCA:
1835 case BUILT_IN_ALLOCA_WITH_ALIGN:
1836 case BUILT_IN_ASSUME_ALIGNED:
1837 return false;
1838 /* Freeing memory kills the pointed-to memory. More importantly
1839 the call has to serve as a barrier for moving loads and stores
1840 across it. */
1841 case BUILT_IN_FREE:
1842 case BUILT_IN_VA_END:
1844 tree ptr = gimple_call_arg (call, 0);
1845 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
1847 case BUILT_IN_GAMMA_R:
1848 case BUILT_IN_GAMMAF_R:
1849 case BUILT_IN_GAMMAL_R:
1850 case BUILT_IN_LGAMMA_R:
1851 case BUILT_IN_LGAMMAF_R:
1852 case BUILT_IN_LGAMMAL_R:
1854 tree out = gimple_call_arg (call, 1);
1855 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1856 return true;
1857 if (flag_errno_math)
1858 break;
1859 return false;
1861 case BUILT_IN_FREXP:
1862 case BUILT_IN_FREXPF:
1863 case BUILT_IN_FREXPL:
1864 case BUILT_IN_MODF:
1865 case BUILT_IN_MODFF:
1866 case BUILT_IN_MODFL:
1868 tree out = gimple_call_arg (call, 1);
1869 return ptr_deref_may_alias_ref_p_1 (out, ref);
1871 case BUILT_IN_REMQUO:
1872 case BUILT_IN_REMQUOF:
1873 case BUILT_IN_REMQUOL:
1875 tree out = gimple_call_arg (call, 2);
1876 if (ptr_deref_may_alias_ref_p_1 (out, ref))
1877 return true;
1878 if (flag_errno_math)
1879 break;
1880 return false;
1882 case BUILT_IN_SINCOS:
1883 case BUILT_IN_SINCOSF:
1884 case BUILT_IN_SINCOSL:
1886 tree sin = gimple_call_arg (call, 1);
1887 tree cos = gimple_call_arg (call, 2);
1888 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
1889 || ptr_deref_may_alias_ref_p_1 (cos, ref));
1891 /* __sync_* builtins and some OpenMP builtins act as threading
1892 barriers. */
1893 #undef DEF_SYNC_BUILTIN
1894 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1895 #include "sync-builtins.def"
1896 #undef DEF_SYNC_BUILTIN
1897 case BUILT_IN_GOMP_ATOMIC_START:
1898 case BUILT_IN_GOMP_ATOMIC_END:
1899 case BUILT_IN_GOMP_BARRIER:
1900 case BUILT_IN_GOMP_BARRIER_CANCEL:
1901 case BUILT_IN_GOMP_TASKWAIT:
1902 case BUILT_IN_GOMP_TASKGROUP_END:
1903 case BUILT_IN_GOMP_CRITICAL_START:
1904 case BUILT_IN_GOMP_CRITICAL_END:
1905 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1906 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1907 case BUILT_IN_GOMP_LOOP_END:
1908 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1909 case BUILT_IN_GOMP_ORDERED_START:
1910 case BUILT_IN_GOMP_ORDERED_END:
1911 case BUILT_IN_GOMP_SECTIONS_END:
1912 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1913 case BUILT_IN_GOMP_SINGLE_COPY_START:
1914 case BUILT_IN_GOMP_SINGLE_COPY_END:
1915 return true;
1916 default:
1917 /* Fallthru to general call handling. */;
1920 /* Check if base is a global static variable that is not written
1921 by the function. */
1922 if (callee != NULL_TREE
1923 && TREE_CODE (base) == VAR_DECL
1924 && TREE_STATIC (base))
1926 struct cgraph_node *node = cgraph_get_node (callee);
1927 bitmap not_written;
1929 if (node
1930 && (not_written = ipa_reference_get_not_written_global (node))
1931 && bitmap_bit_p (not_written, DECL_UID (base)))
1932 return false;
1935 /* Check if the base variable is call-clobbered. */
1936 if (DECL_P (base))
1937 return pt_solution_includes (gimple_call_clobber_set (call), base);
1938 else if ((TREE_CODE (base) == MEM_REF
1939 || TREE_CODE (base) == TARGET_MEM_REF)
1940 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1942 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1943 if (!pi)
1944 return true;
1946 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
1949 return true;
1952 /* If the call in statement CALL may clobber the memory reference REF
1953 return true, otherwise return false. */
1955 bool
1956 call_may_clobber_ref_p (gimple call, tree ref)
1958 bool res;
1959 ao_ref r;
1960 ao_ref_init (&r, ref);
1961 res = call_may_clobber_ref_p_1 (call, &r);
1962 if (res)
1963 ++alias_stats.call_may_clobber_ref_p_may_alias;
1964 else
1965 ++alias_stats.call_may_clobber_ref_p_no_alias;
1966 return res;
1970 /* If the statement STMT may clobber the memory reference REF return true,
1971 otherwise return false. */
1973 bool
1974 stmt_may_clobber_ref_p_1 (gimple stmt, ao_ref *ref)
1976 if (is_gimple_call (stmt))
1978 tree lhs = gimple_call_lhs (stmt);
1979 if (lhs
1980 && TREE_CODE (lhs) != SSA_NAME)
1982 ao_ref r;
1983 ao_ref_init (&r, lhs);
1984 if (refs_may_alias_p_1 (ref, &r, true))
1985 return true;
1988 return call_may_clobber_ref_p_1 (stmt, ref);
1990 else if (gimple_assign_single_p (stmt))
1992 tree lhs = gimple_assign_lhs (stmt);
1993 if (TREE_CODE (lhs) != SSA_NAME)
1995 ao_ref r;
1996 ao_ref_init (&r, lhs);
1997 return refs_may_alias_p_1 (ref, &r, true);
2000 else if (gimple_code (stmt) == GIMPLE_ASM)
2001 return true;
2003 return false;
2006 bool
2007 stmt_may_clobber_ref_p (gimple stmt, tree ref)
2009 ao_ref r;
2010 ao_ref_init (&r, ref);
2011 return stmt_may_clobber_ref_p_1 (stmt, &r);
2014 /* If STMT kills the memory reference REF return true, otherwise
2015 return false. */
2017 static bool
2018 stmt_kills_ref_p_1 (gimple stmt, ao_ref *ref)
2020 /* For a must-alias check we need to be able to constrain
2021 the access properly.
2022 FIXME: except for BUILTIN_FREE. */
2023 if (!ao_ref_base (ref)
2024 || ref->max_size == -1)
2025 return false;
2027 if (gimple_has_lhs (stmt)
2028 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2029 /* The assignment is not necessarily carried out if it can throw
2030 and we can catch it in the current function where we could inspect
2031 the previous value.
2032 ??? We only need to care about the RHS throwing. For aggregate
2033 assignments or similar calls and non-call exceptions the LHS
2034 might throw as well. */
2035 && !stmt_can_throw_internal (stmt))
2037 tree base, lhs = gimple_get_lhs (stmt);
2038 HOST_WIDE_INT size, offset, max_size, ref_offset = ref->offset;
2039 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
2040 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2041 so base == ref->base does not always hold. */
2042 if (base != ref->base)
2044 /* If both base and ref->base are MEM_REFs, only compare the
2045 first operand, and if the second operand isn't equal constant,
2046 try to add the offsets into offset and ref_offset. */
2047 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2048 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2050 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2051 TREE_OPERAND (ref->base, 1)))
2053 double_int off1 = mem_ref_offset (base);
2054 off1 = off1.lshift (BITS_PER_UNIT == 8
2055 ? 3 : exact_log2 (BITS_PER_UNIT));
2056 off1 = off1 + double_int::from_shwi (offset);
2057 double_int off2 = mem_ref_offset (ref->base);
2058 off2 = off2.lshift (BITS_PER_UNIT == 8
2059 ? 3 : exact_log2 (BITS_PER_UNIT));
2060 off2 = off2 + double_int::from_shwi (ref_offset);
2061 if (off1.fits_shwi () && off2.fits_shwi ())
2063 offset = off1.to_shwi ();
2064 ref_offset = off2.to_shwi ();
2066 else
2067 size = -1;
2070 else
2071 size = -1;
2073 /* For a must-alias check we need to be able to constrain
2074 the access properly. */
2075 if (size != -1 && size == max_size)
2077 if (offset <= ref_offset
2078 && offset + size >= ref_offset + ref->max_size)
2079 return true;
2083 if (is_gimple_call (stmt))
2085 tree callee = gimple_call_fndecl (stmt);
2086 if (callee != NULL_TREE
2087 && DECL_BUILT_IN_CLASS (callee) == BUILT_IN_NORMAL)
2088 switch (DECL_FUNCTION_CODE (callee))
2090 case BUILT_IN_FREE:
2092 tree ptr = gimple_call_arg (stmt, 0);
2093 tree base = ao_ref_base (ref);
2094 if (base && TREE_CODE (base) == MEM_REF
2095 && TREE_OPERAND (base, 0) == ptr)
2096 return true;
2097 break;
2100 case BUILT_IN_MEMCPY:
2101 case BUILT_IN_MEMPCPY:
2102 case BUILT_IN_MEMMOVE:
2103 case BUILT_IN_MEMSET:
2104 case BUILT_IN_MEMCPY_CHK:
2105 case BUILT_IN_MEMPCPY_CHK:
2106 case BUILT_IN_MEMMOVE_CHK:
2107 case BUILT_IN_MEMSET_CHK:
2109 tree dest = gimple_call_arg (stmt, 0);
2110 tree len = gimple_call_arg (stmt, 2);
2111 if (!tree_fits_shwi_p (len))
2112 return false;
2113 tree rbase = ref->base;
2114 double_int roffset = double_int::from_shwi (ref->offset);
2115 ao_ref dref;
2116 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2117 tree base = ao_ref_base (&dref);
2118 double_int offset = double_int::from_shwi (dref.offset);
2119 double_int bpu = double_int::from_uhwi (BITS_PER_UNIT);
2120 if (!base || dref.size == -1)
2121 return false;
2122 if (TREE_CODE (base) == MEM_REF)
2124 if (TREE_CODE (rbase) != MEM_REF)
2125 return false;
2126 // Compare pointers.
2127 offset += bpu * mem_ref_offset (base);
2128 roffset += bpu * mem_ref_offset (rbase);
2129 base = TREE_OPERAND (base, 0);
2130 rbase = TREE_OPERAND (rbase, 0);
2132 if (base == rbase)
2134 double_int size = bpu * tree_to_double_int (len);
2135 double_int rsize = double_int::from_uhwi (ref->max_size);
2136 if (offset.sle (roffset)
2137 && (roffset + rsize).sle (offset + size))
2138 return true;
2140 break;
2143 case BUILT_IN_VA_END:
2145 tree ptr = gimple_call_arg (stmt, 0);
2146 if (TREE_CODE (ptr) == ADDR_EXPR)
2148 tree base = ao_ref_base (ref);
2149 if (TREE_OPERAND (ptr, 0) == base)
2150 return true;
2152 break;
2155 default:;
2158 return false;
2161 bool
2162 stmt_kills_ref_p (gimple stmt, tree ref)
2164 ao_ref r;
2165 ao_ref_init (&r, ref);
2166 return stmt_kills_ref_p_1 (stmt, &r);
2170 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2171 TARGET or a statement clobbering the memory reference REF in which
2172 case false is returned. The walk starts with VUSE, one argument of PHI. */
2174 static bool
2175 maybe_skip_until (gimple phi, tree target, ao_ref *ref,
2176 tree vuse, unsigned int *cnt, bitmap *visited,
2177 bool abort_on_visited)
2179 basic_block bb = gimple_bb (phi);
2181 if (!*visited)
2182 *visited = BITMAP_ALLOC (NULL);
2184 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2186 /* Walk until we hit the target. */
2187 while (vuse != target)
2189 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
2190 /* Recurse for PHI nodes. */
2191 if (gimple_code (def_stmt) == GIMPLE_PHI)
2193 /* An already visited PHI node ends the walk successfully. */
2194 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2195 return !abort_on_visited;
2196 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2197 visited, abort_on_visited);
2198 if (!vuse)
2199 return false;
2200 continue;
2202 else if (gimple_nop_p (def_stmt))
2203 return false;
2204 else
2206 /* A clobbering statement or the end of the IL ends it failing. */
2207 ++*cnt;
2208 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2209 return false;
2211 /* If we reach a new basic-block see if we already skipped it
2212 in a previous walk that ended successfully. */
2213 if (gimple_bb (def_stmt) != bb)
2215 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2216 return !abort_on_visited;
2217 bb = gimple_bb (def_stmt);
2219 vuse = gimple_vuse (def_stmt);
2221 return true;
2224 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2225 until we hit the phi argument definition that dominates the other one.
2226 Return that, or NULL_TREE if there is no such definition. */
2228 static tree
2229 get_continuation_for_phi_1 (gimple phi, tree arg0, tree arg1,
2230 ao_ref *ref, unsigned int *cnt,
2231 bitmap *visited, bool abort_on_visited)
2233 gimple def0 = SSA_NAME_DEF_STMT (arg0);
2234 gimple def1 = SSA_NAME_DEF_STMT (arg1);
2235 tree common_vuse;
2237 if (arg0 == arg1)
2238 return arg0;
2239 else if (gimple_nop_p (def0)
2240 || (!gimple_nop_p (def1)
2241 && dominated_by_p (CDI_DOMINATORS,
2242 gimple_bb (def1), gimple_bb (def0))))
2244 if (maybe_skip_until (phi, arg0, ref, arg1, cnt,
2245 visited, abort_on_visited))
2246 return arg0;
2248 else if (gimple_nop_p (def1)
2249 || dominated_by_p (CDI_DOMINATORS,
2250 gimple_bb (def0), gimple_bb (def1)))
2252 if (maybe_skip_until (phi, arg1, ref, arg0, cnt,
2253 visited, abort_on_visited))
2254 return arg1;
2256 /* Special case of a diamond:
2257 MEM_1 = ...
2258 goto (cond) ? L1 : L2
2259 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2260 goto L3
2261 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2262 L3: MEM_4 = PHI<MEM_2, MEM_3>
2263 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2264 dominate each other, but still we can easily skip this PHI node
2265 if we recognize that the vuse MEM operand is the same for both,
2266 and that we can skip both statements (they don't clobber us).
2267 This is still linear. Don't use maybe_skip_until, that might
2268 potentially be slow. */
2269 else if ((common_vuse = gimple_vuse (def0))
2270 && common_vuse == gimple_vuse (def1))
2272 *cnt += 2;
2273 if (!stmt_may_clobber_ref_p_1 (def0, ref)
2274 && !stmt_may_clobber_ref_p_1 (def1, ref))
2275 return common_vuse;
2278 return NULL_TREE;
2282 /* Starting from a PHI node for the virtual operand of the memory reference
2283 REF find a continuation virtual operand that allows to continue walking
2284 statements dominating PHI skipping only statements that cannot possibly
2285 clobber REF. Increments *CNT for each alias disambiguation done.
2286 Returns NULL_TREE if no suitable virtual operand can be found. */
2288 tree
2289 get_continuation_for_phi (gimple phi, ao_ref *ref,
2290 unsigned int *cnt, bitmap *visited,
2291 bool abort_on_visited)
2293 unsigned nargs = gimple_phi_num_args (phi);
2295 /* Through a single-argument PHI we can simply look through. */
2296 if (nargs == 1)
2297 return PHI_ARG_DEF (phi, 0);
2299 /* For two or more arguments try to pairwise skip non-aliasing code
2300 until we hit the phi argument definition that dominates the other one. */
2301 else if (nargs >= 2)
2303 tree arg0, arg1;
2304 unsigned i;
2306 /* Find a candidate for the virtual operand which definition
2307 dominates those of all others. */
2308 arg0 = PHI_ARG_DEF (phi, 0);
2309 if (!SSA_NAME_IS_DEFAULT_DEF (arg0))
2310 for (i = 1; i < nargs; ++i)
2312 arg1 = PHI_ARG_DEF (phi, i);
2313 if (SSA_NAME_IS_DEFAULT_DEF (arg1))
2315 arg0 = arg1;
2316 break;
2318 if (dominated_by_p (CDI_DOMINATORS,
2319 gimple_bb (SSA_NAME_DEF_STMT (arg0)),
2320 gimple_bb (SSA_NAME_DEF_STMT (arg1))))
2321 arg0 = arg1;
2324 /* Then pairwise reduce against the found candidate. */
2325 for (i = 0; i < nargs; ++i)
2327 arg1 = PHI_ARG_DEF (phi, i);
2328 arg0 = get_continuation_for_phi_1 (phi, arg0, arg1, ref,
2329 cnt, visited, abort_on_visited);
2330 if (!arg0)
2331 return NULL_TREE;
2334 return arg0;
2337 return NULL_TREE;
2340 /* Based on the memory reference REF and its virtual use VUSE call
2341 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2342 itself. That is, for each virtual use for which its defining statement
2343 does not clobber REF.
2345 WALKER is called with REF, the current virtual use and DATA. If
2346 WALKER returns non-NULL the walk stops and its result is returned.
2347 At the end of a non-successful walk NULL is returned.
2349 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2350 use which definition is a statement that may clobber REF and DATA.
2351 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2352 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2353 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2354 to adjust REF and *DATA to make that valid.
2356 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2358 void *
2359 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2360 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2361 void *(*translate)(ao_ref *, tree, void *), void *data)
2363 bitmap visited = NULL;
2364 void *res;
2365 unsigned int cnt = 0;
2366 bool translated = false;
2368 timevar_push (TV_ALIAS_STMT_WALK);
2372 gimple def_stmt;
2374 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2375 res = (*walker) (ref, vuse, cnt, data);
2376 /* Abort walk. */
2377 if (res == (void *)-1)
2379 res = NULL;
2380 break;
2382 /* Lookup succeeded. */
2383 else if (res != NULL)
2384 break;
2386 def_stmt = SSA_NAME_DEF_STMT (vuse);
2387 if (gimple_nop_p (def_stmt))
2388 break;
2389 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2390 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2391 &visited, translated);
2392 else
2394 cnt++;
2395 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2397 if (!translate)
2398 break;
2399 res = (*translate) (ref, vuse, data);
2400 /* Failed lookup and translation. */
2401 if (res == (void *)-1)
2403 res = NULL;
2404 break;
2406 /* Lookup succeeded. */
2407 else if (res != NULL)
2408 break;
2409 /* Translation succeeded, continue walking. */
2410 translated = true;
2412 vuse = gimple_vuse (def_stmt);
2415 while (vuse);
2417 if (visited)
2418 BITMAP_FREE (visited);
2420 timevar_pop (TV_ALIAS_STMT_WALK);
2422 return res;
2426 /* Based on the memory reference REF call WALKER for each vdef which
2427 defining statement may clobber REF, starting with VDEF. If REF
2428 is NULL_TREE, each defining statement is visited.
2430 WALKER is called with REF, the current vdef and DATA. If WALKER
2431 returns true the walk is stopped, otherwise it continues.
2433 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2434 PHI argument (but only one walk continues on merge points), the
2435 return value is true if any of the walks was successful.
2437 The function returns the number of statements walked. */
2439 static unsigned int
2440 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2441 bool (*walker)(ao_ref *, tree, void *), void *data,
2442 bitmap *visited, unsigned int cnt)
2446 gimple def_stmt = SSA_NAME_DEF_STMT (vdef);
2448 if (*visited
2449 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2450 return cnt;
2452 if (gimple_nop_p (def_stmt))
2453 return cnt;
2454 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2456 unsigned i;
2457 if (!*visited)
2458 *visited = BITMAP_ALLOC (NULL);
2459 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2460 cnt += walk_aliased_vdefs_1 (ref, gimple_phi_arg_def (def_stmt, i),
2461 walker, data, visited, 0);
2462 return cnt;
2465 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2466 cnt++;
2467 if ((!ref
2468 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2469 && (*walker) (ref, vdef, data))
2470 return cnt;
2472 vdef = gimple_vuse (def_stmt);
2474 while (1);
2477 unsigned int
2478 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2479 bool (*walker)(ao_ref *, tree, void *), void *data,
2480 bitmap *visited)
2482 bitmap local_visited = NULL;
2483 unsigned int ret;
2485 timevar_push (TV_ALIAS_STMT_WALK);
2487 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2488 visited ? visited : &local_visited, 0);
2489 if (local_visited)
2490 BITMAP_FREE (local_visited);
2492 timevar_pop (TV_ALIAS_STMT_WALK);
2494 return ret;