1 /* Alias analysis for trees.
2 Copyright (C) 2004-2014 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "langhooks.h"
33 #include "tree-pretty-print.h"
35 #include "tree-ssa-alias.h"
36 #include "internal-fn.h"
38 #include "gimple-expr.h"
41 #include "gimple-ssa.h"
42 #include "stringpool.h"
43 #include "tree-ssanames.h"
46 #include "tree-inline.h"
48 #include "alloc-pool.h"
49 #include "tree-ssa-alias.h"
50 #include "ipa-reference.h"
52 /* Broad overview of how alias analysis on gimple works:
54 Statements clobbering or using memory are linked through the
55 virtual operand factored use-def chain. The virtual operand
56 is unique per function, its symbol is accessible via gimple_vop (cfun).
57 Virtual operands are used for efficiently walking memory statements
58 in the gimple IL and are useful for things like value-numbering as
59 a generation count for memory references.
61 SSA_NAME pointers may have associated points-to information
62 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
63 points-to information is (re-)computed by the TODO_rebuild_alias
64 pass manager todo. Points-to information is also used for more
65 precise tracking of call-clobbered and call-used variables and
66 related disambiguations.
68 This file contains functions for disambiguating memory references,
69 the so called alias-oracle and tools for walking of the gimple IL.
71 The main alias-oracle entry-points are
73 bool stmt_may_clobber_ref_p (gimple, tree)
75 This function queries if a statement may invalidate (parts of)
76 the memory designated by the reference tree argument.
78 bool ref_maybe_used_by_stmt_p (gimple, tree)
80 This function queries if a statement may need (parts of) the
81 memory designated by the reference tree argument.
83 There are variants of these functions that only handle the call
84 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
85 Note that these do not disambiguate against a possible call lhs.
87 bool refs_may_alias_p (tree, tree)
89 This function tries to disambiguate two reference trees.
91 bool ptr_deref_may_alias_global_p (tree)
93 This function queries if dereferencing a pointer variable may
96 More low-level disambiguators are available and documented in
97 this file. Low-level disambiguators dealing with points-to
98 information are in tree-ssa-structalias.c. */
101 /* Query statistics for the different low-level disambiguators.
102 A high-level query may trigger multiple of them. */
105 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
106 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
107 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
108 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
109 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
110 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
114 dump_alias_stats (FILE *s
)
116 fprintf (s
, "\nAlias oracle query stats:\n");
117 fprintf (s
, " refs_may_alias_p: "
118 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
119 HOST_WIDE_INT_PRINT_DEC
" queries\n",
120 alias_stats
.refs_may_alias_p_no_alias
,
121 alias_stats
.refs_may_alias_p_no_alias
122 + alias_stats
.refs_may_alias_p_may_alias
);
123 fprintf (s
, " ref_maybe_used_by_call_p: "
124 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
125 HOST_WIDE_INT_PRINT_DEC
" queries\n",
126 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
127 alias_stats
.refs_may_alias_p_no_alias
128 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
129 fprintf (s
, " call_may_clobber_ref_p: "
130 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
131 HOST_WIDE_INT_PRINT_DEC
" queries\n",
132 alias_stats
.call_may_clobber_ref_p_no_alias
,
133 alias_stats
.call_may_clobber_ref_p_no_alias
134 + alias_stats
.call_may_clobber_ref_p_may_alias
);
138 /* Return true, if dereferencing PTR may alias with a global variable. */
141 ptr_deref_may_alias_global_p (tree ptr
)
143 struct ptr_info_def
*pi
;
145 /* If we end up with a pointer constant here that may point
147 if (TREE_CODE (ptr
) != SSA_NAME
)
150 pi
= SSA_NAME_PTR_INFO (ptr
);
152 /* If we do not have points-to information for this variable,
157 /* ??? This does not use TBAA to prune globals ptr may not access. */
158 return pt_solution_includes_global (&pi
->pt
);
161 /* Return true if dereferencing PTR may alias DECL.
162 The caller is responsible for applying TBAA to see if PTR
163 may access DECL at all. */
166 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
168 struct ptr_info_def
*pi
;
170 /* Conversions are irrelevant for points-to information and
171 data-dependence analysis can feed us those. */
174 /* Anything we do not explicilty handle aliases. */
175 if ((TREE_CODE (ptr
) != SSA_NAME
176 && TREE_CODE (ptr
) != ADDR_EXPR
177 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
178 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
179 || (TREE_CODE (decl
) != VAR_DECL
180 && TREE_CODE (decl
) != PARM_DECL
181 && TREE_CODE (decl
) != RESULT_DECL
))
184 /* Disregard pointer offsetting. */
185 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
189 ptr
= TREE_OPERAND (ptr
, 0);
191 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
192 return ptr_deref_may_alias_decl_p (ptr
, decl
);
195 /* ADDR_EXPR pointers either just offset another pointer or directly
196 specify the pointed-to set. */
197 if (TREE_CODE (ptr
) == ADDR_EXPR
)
199 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
201 && (TREE_CODE (base
) == MEM_REF
202 || TREE_CODE (base
) == TARGET_MEM_REF
))
203 ptr
= TREE_OPERAND (base
, 0);
208 && CONSTANT_CLASS_P (base
))
214 /* Non-aliased variables can not be pointed to. */
215 if (!may_be_aliased (decl
))
218 /* If we do not have useful points-to information for this pointer
219 we cannot disambiguate anything else. */
220 pi
= SSA_NAME_PTR_INFO (ptr
);
224 return pt_solution_includes (&pi
->pt
, decl
);
227 /* Return true if dereferenced PTR1 and PTR2 may alias.
228 The caller is responsible for applying TBAA to see if accesses
229 through PTR1 and PTR2 may conflict at all. */
232 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
234 struct ptr_info_def
*pi1
, *pi2
;
236 /* Conversions are irrelevant for points-to information and
237 data-dependence analysis can feed us those. */
241 /* Disregard pointer offsetting. */
242 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
246 ptr1
= TREE_OPERAND (ptr1
, 0);
248 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
249 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
251 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
255 ptr2
= TREE_OPERAND (ptr2
, 0);
257 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
258 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
261 /* ADDR_EXPR pointers either just offset another pointer or directly
262 specify the pointed-to set. */
263 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
265 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
267 && (TREE_CODE (base
) == MEM_REF
268 || TREE_CODE (base
) == TARGET_MEM_REF
))
269 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
272 return ptr_deref_may_alias_decl_p (ptr2
, base
);
276 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
278 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
280 && (TREE_CODE (base
) == MEM_REF
281 || TREE_CODE (base
) == TARGET_MEM_REF
))
282 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
285 return ptr_deref_may_alias_decl_p (ptr1
, base
);
290 /* From here we require SSA name pointers. Anything else aliases. */
291 if (TREE_CODE (ptr1
) != SSA_NAME
292 || TREE_CODE (ptr2
) != SSA_NAME
293 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
294 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
297 /* We may end up with two empty points-to solutions for two same pointers.
298 In this case we still want to say both pointers alias, so shortcut
303 /* If we do not have useful points-to information for either pointer
304 we cannot disambiguate anything else. */
305 pi1
= SSA_NAME_PTR_INFO (ptr1
);
306 pi2
= SSA_NAME_PTR_INFO (ptr2
);
310 /* ??? This does not use TBAA to prune decls from the intersection
311 that not both pointers may access. */
312 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
315 /* Return true if dereferencing PTR may alias *REF.
316 The caller is responsible for applying TBAA to see if PTR
317 may access *REF at all. */
320 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
322 tree base
= ao_ref_base (ref
);
324 if (TREE_CODE (base
) == MEM_REF
325 || TREE_CODE (base
) == TARGET_MEM_REF
)
326 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
327 else if (DECL_P (base
))
328 return ptr_deref_may_alias_decl_p (ptr
, base
);
333 /* Return true whether REF may refer to global memory. */
336 ref_may_alias_global_p (tree ref
)
338 tree base
= get_base_address (ref
);
340 return is_global_var (base
);
341 else if (TREE_CODE (base
) == MEM_REF
342 || TREE_CODE (base
) == TARGET_MEM_REF
)
343 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
347 /* Return true whether STMT may clobber global memory. */
350 stmt_may_clobber_global_p (gimple stmt
)
354 if (!gimple_vdef (stmt
))
357 /* ??? We can ask the oracle whether an artificial pointer
358 dereference with a pointer with points-to information covering
359 all global memory (what about non-address taken memory?) maybe
360 clobbered by this call. As there is at the moment no convenient
361 way of doing that without generating garbage do some manual
363 ??? We could make a NULL ao_ref argument to the various
364 predicates special, meaning any global memory. */
366 switch (gimple_code (stmt
))
369 lhs
= gimple_assign_lhs (stmt
);
370 return (TREE_CODE (lhs
) != SSA_NAME
371 && ref_may_alias_global_p (lhs
));
380 /* Dump alias information on FILE. */
383 dump_alias_info (FILE *file
)
387 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
390 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
392 fprintf (file
, "Aliased symbols\n\n");
394 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
396 if (may_be_aliased (var
))
397 dump_variable (file
, var
);
400 fprintf (file
, "\nCall clobber information\n");
402 fprintf (file
, "\nESCAPED");
403 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
405 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
407 for (i
= 1; i
< num_ssa_names
; i
++)
409 tree ptr
= ssa_name (i
);
410 struct ptr_info_def
*pi
;
413 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
414 || SSA_NAME_IN_FREE_LIST (ptr
))
417 pi
= SSA_NAME_PTR_INFO (ptr
);
419 dump_points_to_info_for (file
, ptr
);
422 fprintf (file
, "\n");
426 /* Dump alias information on stderr. */
429 debug_alias_info (void)
431 dump_alias_info (stderr
);
435 /* Dump the points-to set *PT into FILE. */
438 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
441 fprintf (file
, ", points-to anything");
444 fprintf (file
, ", points-to non-local");
447 fprintf (file
, ", points-to escaped");
450 fprintf (file
, ", points-to unit escaped");
453 fprintf (file
, ", points-to NULL");
457 fprintf (file
, ", points-to vars: ");
458 dump_decl_set (file
, pt
->vars
);
459 if (pt
->vars_contains_nonlocal
460 && pt
->vars_contains_escaped_heap
)
461 fprintf (file
, " (nonlocal, escaped heap)");
462 else if (pt
->vars_contains_nonlocal
463 && pt
->vars_contains_escaped
)
464 fprintf (file
, " (nonlocal, escaped)");
465 else if (pt
->vars_contains_nonlocal
)
466 fprintf (file
, " (nonlocal)");
467 else if (pt
->vars_contains_escaped_heap
)
468 fprintf (file
, " (escaped heap)");
469 else if (pt
->vars_contains_escaped
)
470 fprintf (file
, " (escaped)");
475 /* Unified dump function for pt_solution. */
478 debug (pt_solution
&ref
)
480 dump_points_to_solution (stderr
, &ref
);
484 debug (pt_solution
*ptr
)
489 fprintf (stderr
, "<nil>\n");
493 /* Dump points-to information for SSA_NAME PTR into FILE. */
496 dump_points_to_info_for (FILE *file
, tree ptr
)
498 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
500 print_generic_expr (file
, ptr
, dump_flags
);
503 dump_points_to_solution (file
, &pi
->pt
);
505 fprintf (file
, ", points-to anything");
507 fprintf (file
, "\n");
511 /* Dump points-to information for VAR into stderr. */
514 debug_points_to_info_for (tree var
)
516 dump_points_to_info_for (stderr
, var
);
520 /* Initializes the alias-oracle reference representation *R from REF. */
523 ao_ref_init (ao_ref
*r
, tree ref
)
530 r
->ref_alias_set
= -1;
531 r
->base_alias_set
= -1;
532 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
535 /* Returns the base object of the memory reference *REF. */
538 ao_ref_base (ao_ref
*ref
)
542 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
547 /* Returns the base object alias set of the memory reference *REF. */
549 static alias_set_type
550 ao_ref_base_alias_set (ao_ref
*ref
)
553 if (ref
->base_alias_set
!= -1)
554 return ref
->base_alias_set
;
558 while (handled_component_p (base_ref
))
559 base_ref
= TREE_OPERAND (base_ref
, 0);
560 ref
->base_alias_set
= get_alias_set (base_ref
);
561 return ref
->base_alias_set
;
564 /* Returns the reference alias set of the memory reference *REF. */
567 ao_ref_alias_set (ao_ref
*ref
)
569 if (ref
->ref_alias_set
!= -1)
570 return ref
->ref_alias_set
;
571 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
572 return ref
->ref_alias_set
;
575 /* Init an alias-oracle reference representation from a gimple pointer
576 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
577 size is assumed to be unknown. The access is assumed to be only
578 to or after of the pointer target, not before it. */
581 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
583 HOST_WIDE_INT t
, size_hwi
, extra_offset
= 0;
584 ref
->ref
= NULL_TREE
;
585 if (TREE_CODE (ptr
) == SSA_NAME
)
587 gimple stmt
= SSA_NAME_DEF_STMT (ptr
);
588 if (gimple_assign_single_p (stmt
)
589 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
590 ptr
= gimple_assign_rhs1 (stmt
);
591 else if (is_gimple_assign (stmt
)
592 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
593 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
595 ptr
= gimple_assign_rhs1 (stmt
);
596 extra_offset
= BITS_PER_UNIT
597 * int_cst_value (gimple_assign_rhs2 (stmt
));
601 if (TREE_CODE (ptr
) == ADDR_EXPR
)
603 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
605 ref
->offset
= BITS_PER_UNIT
* t
;
610 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
615 ref
->base
= build2 (MEM_REF
, char_type_node
,
616 ptr
, null_pointer_node
);
619 ref
->offset
+= extra_offset
;
621 && tree_fits_shwi_p (size
)
622 && (size_hwi
= tree_to_shwi (size
)) <= HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
)
623 ref
->max_size
= ref
->size
= size_hwi
* BITS_PER_UNIT
;
625 ref
->max_size
= ref
->size
= -1;
626 ref
->ref_alias_set
= 0;
627 ref
->base_alias_set
= 0;
628 ref
->volatile_p
= false;
631 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
632 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
636 same_type_for_tbaa (tree type1
, tree type2
)
638 type1
= TYPE_MAIN_VARIANT (type1
);
639 type2
= TYPE_MAIN_VARIANT (type2
);
641 /* If we would have to do structural comparison bail out. */
642 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
643 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
646 /* Compare the canonical types. */
647 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
650 /* ??? Array types are not properly unified in all cases as we have
651 spurious changes in the index types for example. Removing this
652 causes all sorts of problems with the Fortran frontend. */
653 if (TREE_CODE (type1
) == ARRAY_TYPE
654 && TREE_CODE (type2
) == ARRAY_TYPE
)
657 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
658 object of one of its constrained subtypes, e.g. when a function with an
659 unconstrained parameter passed by reference is called on an object and
660 inlined. But, even in the case of a fixed size, type and subtypes are
661 not equivalent enough as to share the same TYPE_CANONICAL, since this
662 would mean that conversions between them are useless, whereas they are
663 not (e.g. type and subtypes can have different modes). So, in the end,
664 they are only guaranteed to have the same alias set. */
665 if (get_alias_set (type1
) == get_alias_set (type2
))
668 /* The types are known to be not equal. */
672 /* Determine if the two component references REF1 and REF2 which are
673 based on access types TYPE1 and TYPE2 and of which at least one is based
674 on an indirect reference may alias. REF2 is the only one that can
675 be a decl in which case REF2_IS_DECL is true.
676 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
677 are the respective alias sets. */
680 aliasing_component_refs_p (tree ref1
,
681 alias_set_type ref1_alias_set
,
682 alias_set_type base1_alias_set
,
683 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
685 alias_set_type ref2_alias_set
,
686 alias_set_type base2_alias_set
,
687 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
690 /* If one reference is a component references through pointers try to find a
691 common base and apply offset based disambiguation. This handles
693 struct A { int i; int j; } *q;
694 struct B { struct A a; int k; } *p;
695 disambiguating q->i and p->a.j. */
701 /* Choose bases and base types to search for. */
703 while (handled_component_p (base1
))
704 base1
= TREE_OPERAND (base1
, 0);
705 type1
= TREE_TYPE (base1
);
707 while (handled_component_p (base2
))
708 base2
= TREE_OPERAND (base2
, 0);
709 type2
= TREE_TYPE (base2
);
711 /* Now search for the type1 in the access path of ref2. This
712 would be a common base for doing offset based disambiguation on. */
714 while (handled_component_p (*refp
)
715 && same_type_for_tbaa (TREE_TYPE (*refp
), type1
) == 0)
716 refp
= &TREE_OPERAND (*refp
, 0);
717 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type1
);
718 /* If we couldn't compare types we have to bail out. */
721 else if (same_p
== 1)
723 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
724 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
);
726 get_ref_base_and_extent (base1
, &offadj
, &sztmp
, &msztmp
);
728 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
730 /* If we didn't find a common base, try the other way around. */
732 while (handled_component_p (*refp
)
733 && same_type_for_tbaa (TREE_TYPE (*refp
), type2
) == 0)
734 refp
= &TREE_OPERAND (*refp
, 0);
735 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type2
);
736 /* If we couldn't compare types we have to bail out. */
739 else if (same_p
== 1)
741 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
742 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
);
744 get_ref_base_and_extent (base2
, &offadj
, &sztmp
, &msztmp
);
746 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
749 /* If we have two type access paths B1.path1 and B2.path2 they may
750 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
751 But we can still have a path that goes B1.path1...B2.path2 with
752 a part that we do not see. So we can only disambiguate now
753 if there is no B2 in the tail of path1 and no B1 on the
755 if (base1_alias_set
== ref2_alias_set
756 || alias_set_subset_of (base1_alias_set
, ref2_alias_set
))
758 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
760 return (base2_alias_set
== ref1_alias_set
761 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
765 /* Return true if we can determine that component references REF1 and REF2,
766 that are within a common DECL, cannot overlap. */
769 nonoverlapping_component_refs_of_decl_p (tree ref1
, tree ref2
)
771 auto_vec
<tree
, 16> component_refs1
;
772 auto_vec
<tree
, 16> component_refs2
;
774 /* Create the stack of handled components for REF1. */
775 while (handled_component_p (ref1
))
777 component_refs1
.safe_push (ref1
);
778 ref1
= TREE_OPERAND (ref1
, 0);
780 if (TREE_CODE (ref1
) == MEM_REF
)
782 if (!integer_zerop (TREE_OPERAND (ref1
, 1)))
784 ref1
= TREE_OPERAND (TREE_OPERAND (ref1
, 0), 0);
787 /* Create the stack of handled components for REF2. */
788 while (handled_component_p (ref2
))
790 component_refs2
.safe_push (ref2
);
791 ref2
= TREE_OPERAND (ref2
, 0);
793 if (TREE_CODE (ref2
) == MEM_REF
)
795 if (!integer_zerop (TREE_OPERAND (ref2
, 1)))
797 ref2
= TREE_OPERAND (TREE_OPERAND (ref2
, 0), 0);
800 /* We must have the same base DECL. */
801 gcc_assert (ref1
== ref2
);
803 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
804 rank. This is sufficient because we start from the same DECL and you
805 cannot reference several fields at a time with COMPONENT_REFs (unlike
806 with ARRAY_RANGE_REFs for arrays) so you always need the same number
807 of them to access a sub-component, unless you're in a union, in which
808 case the return value will precisely be false. */
813 if (component_refs1
.is_empty ())
815 ref1
= component_refs1
.pop ();
817 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
821 if (component_refs2
.is_empty ())
823 ref2
= component_refs2
.pop ();
825 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
827 /* Beware of BIT_FIELD_REF. */
828 if (TREE_CODE (ref1
) != COMPONENT_REF
829 || TREE_CODE (ref2
) != COMPONENT_REF
)
832 tree field1
= TREE_OPERAND (ref1
, 1);
833 tree field2
= TREE_OPERAND (ref2
, 1);
835 /* ??? We cannot simply use the type of operand #0 of the refs here
836 as the Fortran compiler smuggles type punning into COMPONENT_REFs
837 for common blocks instead of using unions like everyone else. */
838 tree type1
= TYPE_MAIN_VARIANT (DECL_CONTEXT (field1
));
839 tree type2
= TYPE_MAIN_VARIANT (DECL_CONTEXT (field2
));
841 /* We cannot disambiguate fields in a union or qualified union. */
842 if (type1
!= type2
|| TREE_CODE (type1
) != RECORD_TYPE
)
845 /* Different fields of the same record type cannot overlap.
846 ??? Bitfields can overlap at RTL level so punt on them. */
847 if (field1
!= field2
)
849 component_refs1
.release ();
850 component_refs2
.release ();
851 return !(DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
));
856 component_refs1
.release ();
857 component_refs2
.release ();
861 /* qsort compare function to sort FIELD_DECLs after their
862 DECL_FIELD_CONTEXT TYPE_UID. */
865 ncr_compar (const void *field1_
, const void *field2_
)
867 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
868 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
870 = TYPE_UID (TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field1
)));
872 = TYPE_UID (TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field2
)));
875 else if (uid1
> uid2
)
880 /* Return true if we can determine that the fields referenced cannot
881 overlap for any pair of objects. */
884 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
886 if (!flag_strict_aliasing
888 || TREE_CODE (x
) != COMPONENT_REF
889 || TREE_CODE (y
) != COMPONENT_REF
)
892 auto_vec
<const_tree
, 16> fieldsx
;
893 while (TREE_CODE (x
) == COMPONENT_REF
)
895 tree field
= TREE_OPERAND (x
, 1);
896 tree type
= TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field
));
897 if (TREE_CODE (type
) == RECORD_TYPE
)
898 fieldsx
.safe_push (field
);
899 x
= TREE_OPERAND (x
, 0);
901 if (fieldsx
.length () == 0)
903 auto_vec
<const_tree
, 16> fieldsy
;
904 while (TREE_CODE (y
) == COMPONENT_REF
)
906 tree field
= TREE_OPERAND (y
, 1);
907 tree type
= TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (field
));
908 if (TREE_CODE (type
) == RECORD_TYPE
)
909 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
910 y
= TREE_OPERAND (y
, 0);
912 if (fieldsy
.length () == 0)
915 /* Most common case first. */
916 if (fieldsx
.length () == 1
917 && fieldsy
.length () == 1)
918 return ((TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldsx
[0]))
919 == TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldsy
[0])))
920 && fieldsx
[0] != fieldsy
[0]
921 && !(DECL_BIT_FIELD (fieldsx
[0]) && DECL_BIT_FIELD (fieldsy
[0])));
923 if (fieldsx
.length () == 2)
925 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
927 const_tree tem
= fieldsx
[0];
928 fieldsx
[0] = fieldsx
[1];
933 fieldsx
.qsort (ncr_compar
);
935 if (fieldsy
.length () == 2)
937 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
939 const_tree tem
= fieldsy
[0];
940 fieldsy
[0] = fieldsy
[1];
945 fieldsy
.qsort (ncr_compar
);
947 unsigned i
= 0, j
= 0;
950 const_tree fieldx
= fieldsx
[i
];
951 const_tree fieldy
= fieldsy
[j
];
952 tree typex
= TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx
));
953 tree typey
= TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy
));
956 /* We're left with accessing different fields of a structure,
957 no possible overlap, unless they are both bitfields. */
958 if (fieldx
!= fieldy
)
959 return !(DECL_BIT_FIELD (fieldx
) && DECL_BIT_FIELD (fieldy
));
961 if (TYPE_UID (typex
) < TYPE_UID (typey
))
964 if (i
== fieldsx
.length ())
970 if (j
== fieldsy
.length ())
980 /* Return true if two memory references based on the variables BASE1
981 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
982 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
983 if non-NULL are the complete memory reference trees. */
986 decl_refs_may_alias_p (tree ref1
, tree base1
,
987 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
988 tree ref2
, tree base2
,
989 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
)
991 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
993 /* If both references are based on different variables, they cannot alias. */
997 /* If both references are based on the same variable, they cannot alias if
998 the accesses do not overlap. */
999 if (!ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1002 /* For components with variable position, the above test isn't sufficient,
1003 so we disambiguate component references manually. */
1005 && handled_component_p (ref1
) && handled_component_p (ref2
)
1006 && nonoverlapping_component_refs_of_decl_p (ref1
, ref2
))
1012 /* Return true if an indirect reference based on *PTR1 constrained
1013 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1014 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1015 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1016 in which case they are computed on-demand. REF1 and REF2
1017 if non-NULL are the complete memory reference trees. */
1020 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1021 HOST_WIDE_INT offset1
,
1022 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED
,
1023 alias_set_type ref1_alias_set
,
1024 alias_set_type base1_alias_set
,
1025 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1026 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1027 alias_set_type ref2_alias_set
,
1028 alias_set_type base2_alias_set
, bool tbaa_p
)
1031 tree ptrtype1
, dbase2
;
1032 HOST_WIDE_INT offset1p
= offset1
, offset2p
= offset2
;
1033 HOST_WIDE_INT doffset1
, doffset2
;
1036 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1037 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1040 ptr1
= TREE_OPERAND (base1
, 0);
1042 /* The offset embedded in MEM_REFs can be negative. Bias them
1043 so that the resulting offset adjustment is positive. */
1044 moff
= mem_ref_offset (base1
);
1045 moff
= moff
.lshift (BITS_PER_UNIT
== 8 ? 3 : exact_log2 (BITS_PER_UNIT
));
1046 if (moff
.is_negative ())
1047 offset2p
+= (-moff
).low
;
1049 offset1p
+= moff
.low
;
1051 /* If only one reference is based on a variable, they cannot alias if
1052 the pointer access is beyond the extent of the variable access.
1053 (the pointer base cannot validly point to an offset less than zero
1055 ??? IVOPTs creates bases that do not honor this restriction,
1056 so do not apply this optimization for TARGET_MEM_REFs. */
1057 if (TREE_CODE (base1
) != TARGET_MEM_REF
1058 && !ranges_overlap_p (MAX (0, offset1p
), -1, offset2p
, max_size2
))
1060 /* They also cannot alias if the pointer may not point to the decl. */
1061 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
1064 /* Disambiguations that rely on strict aliasing rules follow. */
1065 if (!flag_strict_aliasing
|| !tbaa_p
)
1068 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1070 /* If the alias set for a pointer access is zero all bets are off. */
1071 if (base1_alias_set
== -1)
1072 base1_alias_set
= get_deref_alias_set (ptrtype1
);
1073 if (base1_alias_set
== 0)
1075 if (base2_alias_set
== -1)
1076 base2_alias_set
= get_alias_set (base2
);
1078 /* When we are trying to disambiguate an access with a pointer dereference
1079 as base versus one with a decl as base we can use both the size
1080 of the decl and its dynamic type for extra disambiguation.
1081 ??? We do not know anything about the dynamic type of the decl
1082 other than that its alias-set contains base2_alias_set as a subset
1083 which does not help us here. */
1084 /* As we know nothing useful about the dynamic type of the decl just
1085 use the usual conflict check rather than a subset test.
1086 ??? We could introduce -fvery-strict-aliasing when the language
1087 does not allow decls to have a dynamic type that differs from their
1088 static type. Then we can check
1089 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1090 if (base1_alias_set
!= base2_alias_set
1091 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1093 /* If the size of the access relevant for TBAA through the pointer
1094 is bigger than the size of the decl we can't possibly access the
1095 decl via that pointer. */
1096 if (DECL_SIZE (base2
) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1
))
1097 && TREE_CODE (DECL_SIZE (base2
)) == INTEGER_CST
1098 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1
))) == INTEGER_CST
1099 /* ??? This in turn may run afoul when a decl of type T which is
1100 a member of union type U is accessed through a pointer to
1101 type U and sizeof T is smaller than sizeof U. */
1102 && TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
1103 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
1104 && tree_int_cst_lt (DECL_SIZE (base2
), TYPE_SIZE (TREE_TYPE (ptrtype1
))))
1110 /* If the decl is accessed via a MEM_REF, reconstruct the base
1111 we can use for TBAA and an appropriately adjusted offset. */
1113 while (handled_component_p (dbase2
))
1114 dbase2
= TREE_OPERAND (dbase2
, 0);
1117 if (TREE_CODE (dbase2
) == MEM_REF
1118 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
1120 double_int moff
= mem_ref_offset (dbase2
);
1121 moff
= moff
.lshift (BITS_PER_UNIT
== 8 ? 3 : exact_log2 (BITS_PER_UNIT
));
1122 if (moff
.is_negative ())
1123 doffset1
-= (-moff
).low
;
1125 doffset2
-= moff
.low
;
1128 /* If either reference is view-converted, give up now. */
1129 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1130 || same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (base2
)) != 1)
1133 /* If both references are through the same type, they do not alias
1134 if the accesses do not overlap. This does extra disambiguation
1135 for mixed/pointer accesses but requires strict aliasing.
1136 For MEM_REFs we require that the component-ref offset we computed
1137 is relative to the start of the type which we ensure by
1138 comparing rvalue and access type and disregarding the constant
1140 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1141 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1142 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
1143 return ranges_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
);
1146 && nonoverlapping_component_refs_p (ref1
, ref2
))
1149 /* Do access-path based disambiguation. */
1151 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1152 return aliasing_component_refs_p (ref1
,
1153 ref1_alias_set
, base1_alias_set
,
1156 ref2_alias_set
, base2_alias_set
,
1157 offset2
, max_size2
, true);
1162 /* Return true if two indirect references based on *PTR1
1163 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1164 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1165 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1166 in which case they are computed on-demand. REF1 and REF2
1167 if non-NULL are the complete memory reference trees. */
1170 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1171 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1172 alias_set_type ref1_alias_set
,
1173 alias_set_type base1_alias_set
,
1174 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1175 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1176 alias_set_type ref2_alias_set
,
1177 alias_set_type base2_alias_set
, bool tbaa_p
)
1181 tree ptrtype1
, ptrtype2
;
1183 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1184 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1185 && (TREE_CODE (base2
) == MEM_REF
1186 || TREE_CODE (base2
) == TARGET_MEM_REF
));
1188 ptr1
= TREE_OPERAND (base1
, 0);
1189 ptr2
= TREE_OPERAND (base2
, 0);
1191 /* If both bases are based on pointers they cannot alias if they may not
1192 point to the same memory object or if they point to the same object
1193 and the accesses do not overlap. */
1194 if ((!cfun
|| gimple_in_ssa_p (cfun
))
1195 && operand_equal_p (ptr1
, ptr2
, 0)
1196 && (((TREE_CODE (base1
) != TARGET_MEM_REF
1197 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1198 && (TREE_CODE (base2
) != TARGET_MEM_REF
1199 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
1200 || (TREE_CODE (base1
) == TARGET_MEM_REF
1201 && TREE_CODE (base2
) == TARGET_MEM_REF
1202 && (TMR_STEP (base1
) == TMR_STEP (base2
)
1203 || (TMR_STEP (base1
) && TMR_STEP (base2
)
1204 && operand_equal_p (TMR_STEP (base1
),
1205 TMR_STEP (base2
), 0)))
1206 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
1207 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
1208 && operand_equal_p (TMR_INDEX (base1
),
1209 TMR_INDEX (base2
), 0)))
1210 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
1211 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
1212 && operand_equal_p (TMR_INDEX2 (base1
),
1213 TMR_INDEX2 (base2
), 0))))))
1216 /* The offset embedded in MEM_REFs can be negative. Bias them
1217 so that the resulting offset adjustment is positive. */
1218 moff
= mem_ref_offset (base1
);
1219 moff
= moff
.lshift (BITS_PER_UNIT
== 8 ? 3 : exact_log2 (BITS_PER_UNIT
));
1220 if (moff
.is_negative ())
1221 offset2
+= (-moff
).low
;
1223 offset1
+= moff
.low
;
1224 moff
= mem_ref_offset (base2
);
1225 moff
= moff
.lshift (BITS_PER_UNIT
== 8 ? 3 : exact_log2 (BITS_PER_UNIT
));
1226 if (moff
.is_negative ())
1227 offset1
+= (-moff
).low
;
1229 offset2
+= moff
.low
;
1230 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1232 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
1235 /* Disambiguations that rely on strict aliasing rules follow. */
1236 if (!flag_strict_aliasing
|| !tbaa_p
)
1239 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1240 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
1242 /* If the alias set for a pointer access is zero all bets are off. */
1243 if (base1_alias_set
== -1)
1244 base1_alias_set
= get_deref_alias_set (ptrtype1
);
1245 if (base1_alias_set
== 0)
1247 if (base2_alias_set
== -1)
1248 base2_alias_set
= get_deref_alias_set (ptrtype2
);
1249 if (base2_alias_set
== 0)
1252 /* If both references are through the same type, they do not alias
1253 if the accesses do not overlap. This does extra disambiguation
1254 for mixed/pointer accesses but requires strict aliasing. */
1255 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1256 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1257 && (TREE_CODE (base2
) != TARGET_MEM_REF
1258 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
1259 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) == 1
1260 && same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) == 1
1261 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
1262 TREE_TYPE (ptrtype2
)) == 1)
1263 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1265 /* Do type-based disambiguation. */
1266 if (base1_alias_set
!= base2_alias_set
1267 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1270 /* If either reference is view-converted, give up now. */
1271 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1272 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
1276 && nonoverlapping_component_refs_p (ref1
, ref2
))
1279 /* Do access-path based disambiguation. */
1281 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1282 return aliasing_component_refs_p (ref1
,
1283 ref1_alias_set
, base1_alias_set
,
1286 ref2_alias_set
, base2_alias_set
,
1287 offset2
, max_size2
, false);
1292 /* Return true, if the two memory references REF1 and REF2 may alias. */
1295 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
1298 HOST_WIDE_INT offset1
= 0, offset2
= 0;
1299 HOST_WIDE_INT max_size1
= -1, max_size2
= -1;
1300 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
1302 gcc_checking_assert ((!ref1
->ref
1303 || TREE_CODE (ref1
->ref
) == SSA_NAME
1304 || DECL_P (ref1
->ref
)
1305 || TREE_CODE (ref1
->ref
) == STRING_CST
1306 || handled_component_p (ref1
->ref
)
1307 || TREE_CODE (ref1
->ref
) == MEM_REF
1308 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
1310 || TREE_CODE (ref2
->ref
) == SSA_NAME
1311 || DECL_P (ref2
->ref
)
1312 || TREE_CODE (ref2
->ref
) == STRING_CST
1313 || handled_component_p (ref2
->ref
)
1314 || TREE_CODE (ref2
->ref
) == MEM_REF
1315 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
1317 /* Decompose the references into their base objects and the access. */
1318 base1
= ao_ref_base (ref1
);
1319 offset1
= ref1
->offset
;
1320 max_size1
= ref1
->max_size
;
1321 base2
= ao_ref_base (ref2
);
1322 offset2
= ref2
->offset
;
1323 max_size2
= ref2
->max_size
;
1325 /* We can end up with registers or constants as bases for example from
1326 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1327 which is seen as a struct copy. */
1328 if (TREE_CODE (base1
) == SSA_NAME
1329 || TREE_CODE (base1
) == CONST_DECL
1330 || TREE_CODE (base1
) == CONSTRUCTOR
1331 || TREE_CODE (base1
) == ADDR_EXPR
1332 || CONSTANT_CLASS_P (base1
)
1333 || TREE_CODE (base2
) == SSA_NAME
1334 || TREE_CODE (base2
) == CONST_DECL
1335 || TREE_CODE (base2
) == CONSTRUCTOR
1336 || TREE_CODE (base2
) == ADDR_EXPR
1337 || CONSTANT_CLASS_P (base2
))
1340 /* We can end up referring to code via function and label decls.
1341 As we likely do not properly track code aliases conservatively
1343 if (TREE_CODE (base1
) == FUNCTION_DECL
1344 || TREE_CODE (base1
) == LABEL_DECL
1345 || TREE_CODE (base2
) == FUNCTION_DECL
1346 || TREE_CODE (base2
) == LABEL_DECL
)
1349 /* Two volatile accesses always conflict. */
1350 if (ref1
->volatile_p
1351 && ref2
->volatile_p
)
1354 /* Defer to simple offset based disambiguation if we have
1355 references based on two decls. Do this before defering to
1356 TBAA to handle must-alias cases in conformance with the
1357 GCC extension of allowing type-punning through unions. */
1358 var1_p
= DECL_P (base1
);
1359 var2_p
= DECL_P (base2
);
1360 if (var1_p
&& var2_p
)
1361 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
1362 ref2
->ref
, base2
, offset2
, max_size2
);
1364 ind1_p
= (TREE_CODE (base1
) == MEM_REF
1365 || TREE_CODE (base1
) == TARGET_MEM_REF
);
1366 ind2_p
= (TREE_CODE (base2
) == MEM_REF
1367 || TREE_CODE (base2
) == TARGET_MEM_REF
);
1369 /* Canonicalize the pointer-vs-decl case. */
1370 if (ind1_p
&& var2_p
)
1375 tmp1
= offset1
; offset1
= offset2
; offset2
= tmp1
;
1376 tmp1
= max_size1
; max_size1
= max_size2
; max_size2
= tmp1
;
1377 tmp2
= base1
; base1
= base2
; base2
= tmp2
;
1378 tmp3
= ref1
; ref1
= ref2
; ref2
= tmp3
;
1385 /* First defer to TBAA if possible. */
1387 && flag_strict_aliasing
1388 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
1389 ao_ref_alias_set (ref2
)))
1392 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1393 if (var1_p
&& ind2_p
)
1394 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
1396 ao_ref_alias_set (ref2
), -1,
1399 ao_ref_alias_set (ref1
),
1400 ao_ref_base_alias_set (ref1
),
1402 else if (ind1_p
&& ind2_p
)
1403 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
1405 ao_ref_alias_set (ref1
), -1,
1408 ao_ref_alias_set (ref2
), -1,
1411 /* We really do not want to end up here, but returning true is safe. */
1412 #ifdef ENABLE_CHECKING
1420 refs_may_alias_p (tree ref1
, tree ref2
)
1424 ao_ref_init (&r1
, ref1
);
1425 ao_ref_init (&r2
, ref2
);
1426 res
= refs_may_alias_p_1 (&r1
, &r2
, true);
1428 ++alias_stats
.refs_may_alias_p_may_alias
;
1430 ++alias_stats
.refs_may_alias_p_no_alias
;
1434 /* Returns true if there is a anti-dependence for the STORE that
1435 executes after the LOAD. */
1438 refs_anti_dependent_p (tree load
, tree store
)
1441 ao_ref_init (&r1
, load
);
1442 ao_ref_init (&r2
, store
);
1443 return refs_may_alias_p_1 (&r1
, &r2
, false);
1446 /* Returns true if there is a output dependence for the stores
1447 STORE1 and STORE2. */
1450 refs_output_dependent_p (tree store1
, tree store2
)
1453 ao_ref_init (&r1
, store1
);
1454 ao_ref_init (&r2
, store2
);
1455 return refs_may_alias_p_1 (&r1
, &r2
, false);
1458 /* If the call CALL may use the memory reference REF return true,
1459 otherwise return false. */
1462 ref_maybe_used_by_call_p_1 (gimple call
, ao_ref
*ref
)
1466 int flags
= gimple_call_flags (call
);
1468 /* Const functions without a static chain do not implicitly use memory. */
1469 if (!gimple_call_chain (call
)
1470 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
1473 base
= ao_ref_base (ref
);
1477 /* A call that is not without side-effects might involve volatile
1478 accesses and thus conflicts with all other volatile accesses. */
1479 if (ref
->volatile_p
)
1482 /* If the reference is based on a decl that is not aliased the call
1483 cannot possibly use it. */
1485 && !may_be_aliased (base
)
1486 /* But local statics can be used through recursion. */
1487 && !is_global_var (base
))
1490 callee
= gimple_call_fndecl (call
);
1492 /* Handle those builtin functions explicitly that do not act as
1493 escape points. See tree-ssa-structalias.c:find_func_aliases
1494 for the list of builtins we might need to handle here. */
1495 if (callee
!= NULL_TREE
1496 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
1497 switch (DECL_FUNCTION_CODE (callee
))
1499 /* All the following functions read memory pointed to by
1500 their second argument. strcat/strncat additionally
1501 reads memory pointed to by the first argument. */
1502 case BUILT_IN_STRCAT
:
1503 case BUILT_IN_STRNCAT
:
1506 ao_ref_init_from_ptr_and_size (&dref
,
1507 gimple_call_arg (call
, 0),
1509 if (refs_may_alias_p_1 (&dref
, ref
, false))
1513 case BUILT_IN_STRCPY
:
1514 case BUILT_IN_STRNCPY
:
1515 case BUILT_IN_MEMCPY
:
1516 case BUILT_IN_MEMMOVE
:
1517 case BUILT_IN_MEMPCPY
:
1518 case BUILT_IN_STPCPY
:
1519 case BUILT_IN_STPNCPY
:
1520 case BUILT_IN_TM_MEMCPY
:
1521 case BUILT_IN_TM_MEMMOVE
:
1524 tree size
= NULL_TREE
;
1525 if (gimple_call_num_args (call
) == 3)
1526 size
= gimple_call_arg (call
, 2);
1527 ao_ref_init_from_ptr_and_size (&dref
,
1528 gimple_call_arg (call
, 1),
1530 return refs_may_alias_p_1 (&dref
, ref
, false);
1532 case BUILT_IN_STRCAT_CHK
:
1533 case BUILT_IN_STRNCAT_CHK
:
1536 ao_ref_init_from_ptr_and_size (&dref
,
1537 gimple_call_arg (call
, 0),
1539 if (refs_may_alias_p_1 (&dref
, ref
, false))
1543 case BUILT_IN_STRCPY_CHK
:
1544 case BUILT_IN_STRNCPY_CHK
:
1545 case BUILT_IN_MEMCPY_CHK
:
1546 case BUILT_IN_MEMMOVE_CHK
:
1547 case BUILT_IN_MEMPCPY_CHK
:
1548 case BUILT_IN_STPCPY_CHK
:
1549 case BUILT_IN_STPNCPY_CHK
:
1552 tree size
= NULL_TREE
;
1553 if (gimple_call_num_args (call
) == 4)
1554 size
= gimple_call_arg (call
, 2);
1555 ao_ref_init_from_ptr_and_size (&dref
,
1556 gimple_call_arg (call
, 1),
1558 return refs_may_alias_p_1 (&dref
, ref
, false);
1560 case BUILT_IN_BCOPY
:
1563 tree size
= gimple_call_arg (call
, 2);
1564 ao_ref_init_from_ptr_and_size (&dref
,
1565 gimple_call_arg (call
, 0),
1567 return refs_may_alias_p_1 (&dref
, ref
, false);
1570 /* The following functions read memory pointed to by their
1572 CASE_BUILT_IN_TM_LOAD (1):
1573 CASE_BUILT_IN_TM_LOAD (2):
1574 CASE_BUILT_IN_TM_LOAD (4):
1575 CASE_BUILT_IN_TM_LOAD (8):
1576 CASE_BUILT_IN_TM_LOAD (FLOAT
):
1577 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
1578 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
1579 CASE_BUILT_IN_TM_LOAD (M64
):
1580 CASE_BUILT_IN_TM_LOAD (M128
):
1581 CASE_BUILT_IN_TM_LOAD (M256
):
1582 case BUILT_IN_TM_LOG
:
1583 case BUILT_IN_TM_LOG_1
:
1584 case BUILT_IN_TM_LOG_2
:
1585 case BUILT_IN_TM_LOG_4
:
1586 case BUILT_IN_TM_LOG_8
:
1587 case BUILT_IN_TM_LOG_FLOAT
:
1588 case BUILT_IN_TM_LOG_DOUBLE
:
1589 case BUILT_IN_TM_LOG_LDOUBLE
:
1590 case BUILT_IN_TM_LOG_M64
:
1591 case BUILT_IN_TM_LOG_M128
:
1592 case BUILT_IN_TM_LOG_M256
:
1593 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call
, 0), ref
);
1595 /* These read memory pointed to by the first argument. */
1596 case BUILT_IN_STRDUP
:
1597 case BUILT_IN_STRNDUP
:
1600 tree size
= NULL_TREE
;
1601 if (gimple_call_num_args (call
) == 2)
1602 size
= gimple_call_arg (call
, 1);
1603 ao_ref_init_from_ptr_and_size (&dref
,
1604 gimple_call_arg (call
, 0),
1606 return refs_may_alias_p_1 (&dref
, ref
, false);
1608 /* These read memory pointed to by the first argument. */
1609 case BUILT_IN_INDEX
:
1610 case BUILT_IN_STRCHR
:
1611 case BUILT_IN_STRRCHR
:
1614 ao_ref_init_from_ptr_and_size (&dref
,
1615 gimple_call_arg (call
, 0),
1617 return refs_may_alias_p_1 (&dref
, ref
, false);
1619 /* These read memory pointed to by the first argument with size
1620 in the third argument. */
1621 case BUILT_IN_MEMCHR
:
1624 ao_ref_init_from_ptr_and_size (&dref
,
1625 gimple_call_arg (call
, 0),
1626 gimple_call_arg (call
, 2));
1627 return refs_may_alias_p_1 (&dref
, ref
, false);
1629 /* These read memory pointed to by the first and second arguments. */
1630 case BUILT_IN_STRSTR
:
1631 case BUILT_IN_STRPBRK
:
1634 ao_ref_init_from_ptr_and_size (&dref
,
1635 gimple_call_arg (call
, 0),
1637 if (refs_may_alias_p_1 (&dref
, ref
, false))
1639 ao_ref_init_from_ptr_and_size (&dref
,
1640 gimple_call_arg (call
, 1),
1642 return refs_may_alias_p_1 (&dref
, ref
, false);
1645 /* The following builtins do not read from memory. */
1647 case BUILT_IN_MALLOC
:
1648 case BUILT_IN_POSIX_MEMALIGN
:
1649 case BUILT_IN_CALLOC
:
1650 case BUILT_IN_ALLOCA
:
1651 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1652 case BUILT_IN_STACK_SAVE
:
1653 case BUILT_IN_STACK_RESTORE
:
1654 case BUILT_IN_MEMSET
:
1655 case BUILT_IN_TM_MEMSET
:
1656 case BUILT_IN_MEMSET_CHK
:
1657 case BUILT_IN_FREXP
:
1658 case BUILT_IN_FREXPF
:
1659 case BUILT_IN_FREXPL
:
1660 case BUILT_IN_GAMMA_R
:
1661 case BUILT_IN_GAMMAF_R
:
1662 case BUILT_IN_GAMMAL_R
:
1663 case BUILT_IN_LGAMMA_R
:
1664 case BUILT_IN_LGAMMAF_R
:
1665 case BUILT_IN_LGAMMAL_R
:
1667 case BUILT_IN_MODFF
:
1668 case BUILT_IN_MODFL
:
1669 case BUILT_IN_REMQUO
:
1670 case BUILT_IN_REMQUOF
:
1671 case BUILT_IN_REMQUOL
:
1672 case BUILT_IN_SINCOS
:
1673 case BUILT_IN_SINCOSF
:
1674 case BUILT_IN_SINCOSL
:
1675 case BUILT_IN_ASSUME_ALIGNED
:
1676 case BUILT_IN_VA_END
:
1678 /* __sync_* builtins and some OpenMP builtins act as threading
1680 #undef DEF_SYNC_BUILTIN
1681 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1682 #include "sync-builtins.def"
1683 #undef DEF_SYNC_BUILTIN
1684 case BUILT_IN_GOMP_ATOMIC_START
:
1685 case BUILT_IN_GOMP_ATOMIC_END
:
1686 case BUILT_IN_GOMP_BARRIER
:
1687 case BUILT_IN_GOMP_BARRIER_CANCEL
:
1688 case BUILT_IN_GOMP_TASKWAIT
:
1689 case BUILT_IN_GOMP_TASKGROUP_END
:
1690 case BUILT_IN_GOMP_CRITICAL_START
:
1691 case BUILT_IN_GOMP_CRITICAL_END
:
1692 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
1693 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
1694 case BUILT_IN_GOMP_LOOP_END
:
1695 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
1696 case BUILT_IN_GOMP_ORDERED_START
:
1697 case BUILT_IN_GOMP_ORDERED_END
:
1698 case BUILT_IN_GOMP_SECTIONS_END
:
1699 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
1700 case BUILT_IN_GOMP_SINGLE_COPY_START
:
1701 case BUILT_IN_GOMP_SINGLE_COPY_END
:
1705 /* Fallthru to general call handling. */;
1708 /* Check if base is a global static variable that is not read
1710 if (callee
!= NULL_TREE
1711 && TREE_CODE (base
) == VAR_DECL
1712 && TREE_STATIC (base
))
1714 struct cgraph_node
*node
= cgraph_get_node (callee
);
1717 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1718 node yet. We should enforce that there are nodes for all decls in the
1719 IL and remove this check instead. */
1721 && (not_read
= ipa_reference_get_not_read_global (node
))
1722 && bitmap_bit_p (not_read
, DECL_UID (base
)))
1726 /* Check if the base variable is call-used. */
1729 if (pt_solution_includes (gimple_call_use_set (call
), base
))
1732 else if ((TREE_CODE (base
) == MEM_REF
1733 || TREE_CODE (base
) == TARGET_MEM_REF
)
1734 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
1736 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
1740 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
1746 /* Inspect call arguments for passed-by-value aliases. */
1748 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1750 tree op
= gimple_call_arg (call
, i
);
1751 int flags
= gimple_call_arg_flags (call
, i
);
1753 if (flags
& EAF_UNUSED
)
1756 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1757 op
= TREE_OPERAND (op
, 0);
1759 if (TREE_CODE (op
) != SSA_NAME
1760 && !is_gimple_min_invariant (op
))
1763 ao_ref_init (&r
, op
);
1764 if (refs_may_alias_p_1 (&r
, ref
, true))
1773 ref_maybe_used_by_call_p (gimple call
, tree ref
)
1777 ao_ref_init (&r
, ref
);
1778 res
= ref_maybe_used_by_call_p_1 (call
, &r
);
1780 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
1782 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
1787 /* If the statement STMT may use the memory reference REF return
1788 true, otherwise return false. */
1791 ref_maybe_used_by_stmt_p (gimple stmt
, tree ref
)
1793 if (is_gimple_assign (stmt
))
1797 /* All memory assign statements are single. */
1798 if (!gimple_assign_single_p (stmt
))
1801 rhs
= gimple_assign_rhs1 (stmt
);
1802 if (is_gimple_reg (rhs
)
1803 || is_gimple_min_invariant (rhs
)
1804 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
1807 return refs_may_alias_p (rhs
, ref
);
1809 else if (is_gimple_call (stmt
))
1810 return ref_maybe_used_by_call_p (stmt
, ref
);
1811 else if (gimple_code (stmt
) == GIMPLE_RETURN
)
1813 tree retval
= gimple_return_retval (stmt
);
1816 && TREE_CODE (retval
) != SSA_NAME
1817 && !is_gimple_min_invariant (retval
)
1818 && refs_may_alias_p (retval
, ref
))
1820 /* If ref escapes the function then the return acts as a use. */
1821 base
= get_base_address (ref
);
1824 else if (DECL_P (base
))
1825 return is_global_var (base
);
1826 else if (TREE_CODE (base
) == MEM_REF
1827 || TREE_CODE (base
) == TARGET_MEM_REF
)
1828 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
1835 /* If the call in statement CALL may clobber the memory reference REF
1836 return true, otherwise return false. */
1839 call_may_clobber_ref_p_1 (gimple call
, ao_ref
*ref
)
1844 /* If the call is pure or const it cannot clobber anything. */
1845 if (gimple_call_flags (call
)
1846 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
1849 base
= ao_ref_base (ref
);
1853 if (TREE_CODE (base
) == SSA_NAME
1854 || CONSTANT_CLASS_P (base
))
1857 /* A call that is not without side-effects might involve volatile
1858 accesses and thus conflicts with all other volatile accesses. */
1859 if (ref
->volatile_p
)
1862 /* If the reference is based on a decl that is not aliased the call
1863 cannot possibly clobber it. */
1865 && !may_be_aliased (base
)
1866 /* But local non-readonly statics can be modified through recursion
1867 or the call may implement a threading barrier which we must
1868 treat as may-def. */
1869 && (TREE_READONLY (base
)
1870 || !is_global_var (base
)))
1873 callee
= gimple_call_fndecl (call
);
1875 /* Handle those builtin functions explicitly that do not act as
1876 escape points. See tree-ssa-structalias.c:find_func_aliases
1877 for the list of builtins we might need to handle here. */
1878 if (callee
!= NULL_TREE
1879 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
1880 switch (DECL_FUNCTION_CODE (callee
))
1882 /* All the following functions clobber memory pointed to by
1883 their first argument. */
1884 case BUILT_IN_STRCPY
:
1885 case BUILT_IN_STRNCPY
:
1886 case BUILT_IN_MEMCPY
:
1887 case BUILT_IN_MEMMOVE
:
1888 case BUILT_IN_MEMPCPY
:
1889 case BUILT_IN_STPCPY
:
1890 case BUILT_IN_STPNCPY
:
1891 case BUILT_IN_STRCAT
:
1892 case BUILT_IN_STRNCAT
:
1893 case BUILT_IN_MEMSET
:
1894 case BUILT_IN_TM_MEMSET
:
1895 CASE_BUILT_IN_TM_STORE (1):
1896 CASE_BUILT_IN_TM_STORE (2):
1897 CASE_BUILT_IN_TM_STORE (4):
1898 CASE_BUILT_IN_TM_STORE (8):
1899 CASE_BUILT_IN_TM_STORE (FLOAT
):
1900 CASE_BUILT_IN_TM_STORE (DOUBLE
):
1901 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
1902 CASE_BUILT_IN_TM_STORE (M64
):
1903 CASE_BUILT_IN_TM_STORE (M128
):
1904 CASE_BUILT_IN_TM_STORE (M256
):
1905 case BUILT_IN_TM_MEMCPY
:
1906 case BUILT_IN_TM_MEMMOVE
:
1909 tree size
= NULL_TREE
;
1910 /* Don't pass in size for strncat, as the maximum size
1911 is strlen (dest) + n + 1 instead of n, resp.
1912 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1914 if (gimple_call_num_args (call
) == 3
1915 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT
)
1916 size
= gimple_call_arg (call
, 2);
1917 ao_ref_init_from_ptr_and_size (&dref
,
1918 gimple_call_arg (call
, 0),
1920 return refs_may_alias_p_1 (&dref
, ref
, false);
1922 case BUILT_IN_STRCPY_CHK
:
1923 case BUILT_IN_STRNCPY_CHK
:
1924 case BUILT_IN_MEMCPY_CHK
:
1925 case BUILT_IN_MEMMOVE_CHK
:
1926 case BUILT_IN_MEMPCPY_CHK
:
1927 case BUILT_IN_STPCPY_CHK
:
1928 case BUILT_IN_STPNCPY_CHK
:
1929 case BUILT_IN_STRCAT_CHK
:
1930 case BUILT_IN_STRNCAT_CHK
:
1931 case BUILT_IN_MEMSET_CHK
:
1934 tree size
= NULL_TREE
;
1935 /* Don't pass in size for __strncat_chk, as the maximum size
1936 is strlen (dest) + n + 1 instead of n, resp.
1937 n + 1 at dest + strlen (dest), but strlen (dest) isn't
1939 if (gimple_call_num_args (call
) == 4
1940 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT_CHK
)
1941 size
= gimple_call_arg (call
, 2);
1942 ao_ref_init_from_ptr_and_size (&dref
,
1943 gimple_call_arg (call
, 0),
1945 return refs_may_alias_p_1 (&dref
, ref
, false);
1947 case BUILT_IN_BCOPY
:
1950 tree size
= gimple_call_arg (call
, 2);
1951 ao_ref_init_from_ptr_and_size (&dref
,
1952 gimple_call_arg (call
, 1),
1954 return refs_may_alias_p_1 (&dref
, ref
, false);
1956 /* Allocating memory does not have any side-effects apart from
1957 being the definition point for the pointer. */
1958 case BUILT_IN_MALLOC
:
1959 case BUILT_IN_CALLOC
:
1960 case BUILT_IN_STRDUP
:
1961 case BUILT_IN_STRNDUP
:
1962 /* Unix98 specifies that errno is set on allocation failure. */
1964 && targetm
.ref_may_alias_errno (ref
))
1967 case BUILT_IN_STACK_SAVE
:
1968 case BUILT_IN_ALLOCA
:
1969 case BUILT_IN_ALLOCA_WITH_ALIGN
:
1970 case BUILT_IN_ASSUME_ALIGNED
:
1972 /* But posix_memalign stores a pointer into the memory pointed to
1973 by its first argument. */
1974 case BUILT_IN_POSIX_MEMALIGN
:
1976 tree ptrptr
= gimple_call_arg (call
, 0);
1978 ao_ref_init_from_ptr_and_size (&dref
, ptrptr
,
1979 TYPE_SIZE_UNIT (ptr_type_node
));
1980 return (refs_may_alias_p_1 (&dref
, ref
, false)
1982 && targetm
.ref_may_alias_errno (ref
)));
1984 /* Freeing memory kills the pointed-to memory. More importantly
1985 the call has to serve as a barrier for moving loads and stores
1988 case BUILT_IN_VA_END
:
1990 tree ptr
= gimple_call_arg (call
, 0);
1991 return ptr_deref_may_alias_ref_p_1 (ptr
, ref
);
1993 case BUILT_IN_GAMMA_R
:
1994 case BUILT_IN_GAMMAF_R
:
1995 case BUILT_IN_GAMMAL_R
:
1996 case BUILT_IN_LGAMMA_R
:
1997 case BUILT_IN_LGAMMAF_R
:
1998 case BUILT_IN_LGAMMAL_R
:
2000 tree out
= gimple_call_arg (call
, 1);
2001 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2003 if (flag_errno_math
)
2007 case BUILT_IN_FREXP
:
2008 case BUILT_IN_FREXPF
:
2009 case BUILT_IN_FREXPL
:
2011 case BUILT_IN_MODFF
:
2012 case BUILT_IN_MODFL
:
2014 tree out
= gimple_call_arg (call
, 1);
2015 return ptr_deref_may_alias_ref_p_1 (out
, ref
);
2017 case BUILT_IN_REMQUO
:
2018 case BUILT_IN_REMQUOF
:
2019 case BUILT_IN_REMQUOL
:
2021 tree out
= gimple_call_arg (call
, 2);
2022 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2024 if (flag_errno_math
)
2028 case BUILT_IN_SINCOS
:
2029 case BUILT_IN_SINCOSF
:
2030 case BUILT_IN_SINCOSL
:
2032 tree sin
= gimple_call_arg (call
, 1);
2033 tree cos
= gimple_call_arg (call
, 2);
2034 return (ptr_deref_may_alias_ref_p_1 (sin
, ref
)
2035 || ptr_deref_may_alias_ref_p_1 (cos
, ref
));
2037 /* __sync_* builtins and some OpenMP builtins act as threading
2039 #undef DEF_SYNC_BUILTIN
2040 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2041 #include "sync-builtins.def"
2042 #undef DEF_SYNC_BUILTIN
2043 case BUILT_IN_GOMP_ATOMIC_START
:
2044 case BUILT_IN_GOMP_ATOMIC_END
:
2045 case BUILT_IN_GOMP_BARRIER
:
2046 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2047 case BUILT_IN_GOMP_TASKWAIT
:
2048 case BUILT_IN_GOMP_TASKGROUP_END
:
2049 case BUILT_IN_GOMP_CRITICAL_START
:
2050 case BUILT_IN_GOMP_CRITICAL_END
:
2051 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2052 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2053 case BUILT_IN_GOMP_LOOP_END
:
2054 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2055 case BUILT_IN_GOMP_ORDERED_START
:
2056 case BUILT_IN_GOMP_ORDERED_END
:
2057 case BUILT_IN_GOMP_SECTIONS_END
:
2058 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2059 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2060 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2063 /* Fallthru to general call handling. */;
2066 /* Check if base is a global static variable that is not written
2068 if (callee
!= NULL_TREE
2069 && TREE_CODE (base
) == VAR_DECL
2070 && TREE_STATIC (base
))
2072 struct cgraph_node
*node
= cgraph_get_node (callee
);
2076 && (not_written
= ipa_reference_get_not_written_global (node
))
2077 && bitmap_bit_p (not_written
, DECL_UID (base
)))
2081 /* Check if the base variable is call-clobbered. */
2083 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
2084 else if ((TREE_CODE (base
) == MEM_REF
2085 || TREE_CODE (base
) == TARGET_MEM_REF
)
2086 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2088 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2092 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
2098 /* If the call in statement CALL may clobber the memory reference REF
2099 return true, otherwise return false. */
2102 call_may_clobber_ref_p (gimple call
, tree ref
)
2106 ao_ref_init (&r
, ref
);
2107 res
= call_may_clobber_ref_p_1 (call
, &r
);
2109 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
2111 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
2116 /* If the statement STMT may clobber the memory reference REF return true,
2117 otherwise return false. */
2120 stmt_may_clobber_ref_p_1 (gimple stmt
, ao_ref
*ref
)
2122 if (is_gimple_call (stmt
))
2124 tree lhs
= gimple_call_lhs (stmt
);
2126 && TREE_CODE (lhs
) != SSA_NAME
)
2129 ao_ref_init (&r
, lhs
);
2130 if (refs_may_alias_p_1 (ref
, &r
, true))
2134 return call_may_clobber_ref_p_1 (stmt
, ref
);
2136 else if (gimple_assign_single_p (stmt
))
2138 tree lhs
= gimple_assign_lhs (stmt
);
2139 if (TREE_CODE (lhs
) != SSA_NAME
)
2142 ao_ref_init (&r
, lhs
);
2143 return refs_may_alias_p_1 (ref
, &r
, true);
2146 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2153 stmt_may_clobber_ref_p (gimple stmt
, tree ref
)
2156 ao_ref_init (&r
, ref
);
2157 return stmt_may_clobber_ref_p_1 (stmt
, &r
);
2160 /* If STMT kills the memory reference REF return true, otherwise
2164 stmt_kills_ref_p_1 (gimple stmt
, ao_ref
*ref
)
2166 /* For a must-alias check we need to be able to constrain
2167 the access properly.
2168 FIXME: except for BUILTIN_FREE. */
2169 if (!ao_ref_base (ref
)
2170 || ref
->max_size
== -1)
2173 if (gimple_has_lhs (stmt
)
2174 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
2175 /* The assignment is not necessarily carried out if it can throw
2176 and we can catch it in the current function where we could inspect
2178 ??? We only need to care about the RHS throwing. For aggregate
2179 assignments or similar calls and non-call exceptions the LHS
2180 might throw as well. */
2181 && !stmt_can_throw_internal (stmt
))
2183 tree base
, lhs
= gimple_get_lhs (stmt
);
2184 HOST_WIDE_INT size
, offset
, max_size
, ref_offset
= ref
->offset
;
2185 base
= get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
);
2186 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2187 so base == ref->base does not always hold. */
2188 if (base
!= ref
->base
)
2190 /* If both base and ref->base are MEM_REFs, only compare the
2191 first operand, and if the second operand isn't equal constant,
2192 try to add the offsets into offset and ref_offset. */
2193 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
2194 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
2196 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
2197 TREE_OPERAND (ref
->base
, 1)))
2199 double_int off1
= mem_ref_offset (base
);
2200 off1
= off1
.lshift (BITS_PER_UNIT
== 8
2201 ? 3 : exact_log2 (BITS_PER_UNIT
));
2202 off1
= off1
+ double_int::from_shwi (offset
);
2203 double_int off2
= mem_ref_offset (ref
->base
);
2204 off2
= off2
.lshift (BITS_PER_UNIT
== 8
2205 ? 3 : exact_log2 (BITS_PER_UNIT
));
2206 off2
= off2
+ double_int::from_shwi (ref_offset
);
2207 if (off1
.fits_shwi () && off2
.fits_shwi ())
2209 offset
= off1
.to_shwi ();
2210 ref_offset
= off2
.to_shwi ();
2219 /* For a must-alias check we need to be able to constrain
2220 the access properly. */
2221 if (size
!= -1 && size
== max_size
)
2223 if (offset
<= ref_offset
2224 && offset
+ size
>= ref_offset
+ ref
->max_size
)
2229 if (is_gimple_call (stmt
))
2231 tree callee
= gimple_call_fndecl (stmt
);
2232 if (callee
!= NULL_TREE
2233 && DECL_BUILT_IN_CLASS (callee
) == BUILT_IN_NORMAL
)
2234 switch (DECL_FUNCTION_CODE (callee
))
2238 tree ptr
= gimple_call_arg (stmt
, 0);
2239 tree base
= ao_ref_base (ref
);
2240 if (base
&& TREE_CODE (base
) == MEM_REF
2241 && TREE_OPERAND (base
, 0) == ptr
)
2246 case BUILT_IN_MEMCPY
:
2247 case BUILT_IN_MEMPCPY
:
2248 case BUILT_IN_MEMMOVE
:
2249 case BUILT_IN_MEMSET
:
2250 case BUILT_IN_MEMCPY_CHK
:
2251 case BUILT_IN_MEMPCPY_CHK
:
2252 case BUILT_IN_MEMMOVE_CHK
:
2253 case BUILT_IN_MEMSET_CHK
:
2255 tree dest
= gimple_call_arg (stmt
, 0);
2256 tree len
= gimple_call_arg (stmt
, 2);
2257 if (!tree_fits_shwi_p (len
))
2259 tree rbase
= ref
->base
;
2260 double_int roffset
= double_int::from_shwi (ref
->offset
);
2262 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
2263 tree base
= ao_ref_base (&dref
);
2264 double_int offset
= double_int::from_shwi (dref
.offset
);
2265 double_int bpu
= double_int::from_uhwi (BITS_PER_UNIT
);
2266 if (!base
|| dref
.size
== -1)
2268 if (TREE_CODE (base
) == MEM_REF
)
2270 if (TREE_CODE (rbase
) != MEM_REF
)
2272 // Compare pointers.
2273 offset
+= bpu
* mem_ref_offset (base
);
2274 roffset
+= bpu
* mem_ref_offset (rbase
);
2275 base
= TREE_OPERAND (base
, 0);
2276 rbase
= TREE_OPERAND (rbase
, 0);
2280 double_int size
= bpu
* tree_to_double_int (len
);
2281 double_int rsize
= double_int::from_uhwi (ref
->max_size
);
2282 if (offset
.sle (roffset
)
2283 && (roffset
+ rsize
).sle (offset
+ size
))
2289 case BUILT_IN_VA_END
:
2291 tree ptr
= gimple_call_arg (stmt
, 0);
2292 if (TREE_CODE (ptr
) == ADDR_EXPR
)
2294 tree base
= ao_ref_base (ref
);
2295 if (TREE_OPERAND (ptr
, 0) == base
)
2308 stmt_kills_ref_p (gimple stmt
, tree ref
)
2311 ao_ref_init (&r
, ref
);
2312 return stmt_kills_ref_p_1 (stmt
, &r
);
2316 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2317 TARGET or a statement clobbering the memory reference REF in which
2318 case false is returned. The walk starts with VUSE, one argument of PHI. */
2321 maybe_skip_until (gimple phi
, tree target
, ao_ref
*ref
,
2322 tree vuse
, unsigned int *cnt
, bitmap
*visited
,
2323 bool abort_on_visited
)
2325 basic_block bb
= gimple_bb (phi
);
2328 *visited
= BITMAP_ALLOC (NULL
);
2330 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
2332 /* Walk until we hit the target. */
2333 while (vuse
!= target
)
2335 gimple def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2336 /* Recurse for PHI nodes. */
2337 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2339 /* An already visited PHI node ends the walk successfully. */
2340 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
2341 return !abort_on_visited
;
2342 vuse
= get_continuation_for_phi (def_stmt
, ref
, cnt
,
2343 visited
, abort_on_visited
);
2348 else if (gimple_nop_p (def_stmt
))
2352 /* A clobbering statement or the end of the IL ends it failing. */
2354 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2357 /* If we reach a new basic-block see if we already skipped it
2358 in a previous walk that ended successfully. */
2359 if (gimple_bb (def_stmt
) != bb
)
2361 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
2362 return !abort_on_visited
;
2363 bb
= gimple_bb (def_stmt
);
2365 vuse
= gimple_vuse (def_stmt
);
2370 /* For two PHI arguments ARG0 and ARG1 try to skip non-aliasing code
2371 until we hit the phi argument definition that dominates the other one.
2372 Return that, or NULL_TREE if there is no such definition. */
2375 get_continuation_for_phi_1 (gimple phi
, tree arg0
, tree arg1
,
2376 ao_ref
*ref
, unsigned int *cnt
,
2377 bitmap
*visited
, bool abort_on_visited
)
2379 gimple def0
= SSA_NAME_DEF_STMT (arg0
);
2380 gimple def1
= SSA_NAME_DEF_STMT (arg1
);
2385 else if (gimple_nop_p (def0
)
2386 || (!gimple_nop_p (def1
)
2387 && dominated_by_p (CDI_DOMINATORS
,
2388 gimple_bb (def1
), gimple_bb (def0
))))
2390 if (maybe_skip_until (phi
, arg0
, ref
, arg1
, cnt
,
2391 visited
, abort_on_visited
))
2394 else if (gimple_nop_p (def1
)
2395 || dominated_by_p (CDI_DOMINATORS
,
2396 gimple_bb (def0
), gimple_bb (def1
)))
2398 if (maybe_skip_until (phi
, arg1
, ref
, arg0
, cnt
,
2399 visited
, abort_on_visited
))
2402 /* Special case of a diamond:
2404 goto (cond) ? L1 : L2
2405 L1: store1 = ... #MEM_2 = vuse(MEM_1)
2407 L2: store2 = ... #MEM_3 = vuse(MEM_1)
2408 L3: MEM_4 = PHI<MEM_2, MEM_3>
2409 We were called with the PHI at L3, MEM_2 and MEM_3 don't
2410 dominate each other, but still we can easily skip this PHI node
2411 if we recognize that the vuse MEM operand is the same for both,
2412 and that we can skip both statements (they don't clobber us).
2413 This is still linear. Don't use maybe_skip_until, that might
2414 potentially be slow. */
2415 else if ((common_vuse
= gimple_vuse (def0
))
2416 && common_vuse
== gimple_vuse (def1
))
2419 if (!stmt_may_clobber_ref_p_1 (def0
, ref
)
2420 && !stmt_may_clobber_ref_p_1 (def1
, ref
))
2428 /* Starting from a PHI node for the virtual operand of the memory reference
2429 REF find a continuation virtual operand that allows to continue walking
2430 statements dominating PHI skipping only statements that cannot possibly
2431 clobber REF. Increments *CNT for each alias disambiguation done.
2432 Returns NULL_TREE if no suitable virtual operand can be found. */
2435 get_continuation_for_phi (gimple phi
, ao_ref
*ref
,
2436 unsigned int *cnt
, bitmap
*visited
,
2437 bool abort_on_visited
)
2439 unsigned nargs
= gimple_phi_num_args (phi
);
2441 /* Through a single-argument PHI we can simply look through. */
2443 return PHI_ARG_DEF (phi
, 0);
2445 /* For two or more arguments try to pairwise skip non-aliasing code
2446 until we hit the phi argument definition that dominates the other one. */
2447 else if (nargs
>= 2)
2452 /* Find a candidate for the virtual operand which definition
2453 dominates those of all others. */
2454 arg0
= PHI_ARG_DEF (phi
, 0);
2455 if (!SSA_NAME_IS_DEFAULT_DEF (arg0
))
2456 for (i
= 1; i
< nargs
; ++i
)
2458 arg1
= PHI_ARG_DEF (phi
, i
);
2459 if (SSA_NAME_IS_DEFAULT_DEF (arg1
))
2464 if (dominated_by_p (CDI_DOMINATORS
,
2465 gimple_bb (SSA_NAME_DEF_STMT (arg0
)),
2466 gimple_bb (SSA_NAME_DEF_STMT (arg1
))))
2470 /* Then pairwise reduce against the found candidate. */
2471 for (i
= 0; i
< nargs
; ++i
)
2473 arg1
= PHI_ARG_DEF (phi
, i
);
2474 arg0
= get_continuation_for_phi_1 (phi
, arg0
, arg1
, ref
,
2475 cnt
, visited
, abort_on_visited
);
2486 /* Based on the memory reference REF and its virtual use VUSE call
2487 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2488 itself. That is, for each virtual use for which its defining statement
2489 does not clobber REF.
2491 WALKER is called with REF, the current virtual use and DATA. If
2492 WALKER returns non-NULL the walk stops and its result is returned.
2493 At the end of a non-successful walk NULL is returned.
2495 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2496 use which definition is a statement that may clobber REF and DATA.
2497 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2498 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2499 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2500 to adjust REF and *DATA to make that valid.
2502 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2505 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
,
2506 void *(*walker
)(ao_ref
*, tree
, unsigned int, void *),
2507 void *(*translate
)(ao_ref
*, tree
, void *), void *data
)
2509 bitmap visited
= NULL
;
2511 unsigned int cnt
= 0;
2512 bool translated
= false;
2514 timevar_push (TV_ALIAS_STMT_WALK
);
2520 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2521 res
= (*walker
) (ref
, vuse
, cnt
, data
);
2523 if (res
== (void *)-1)
2528 /* Lookup succeeded. */
2529 else if (res
!= NULL
)
2532 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2533 if (gimple_nop_p (def_stmt
))
2535 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2536 vuse
= get_continuation_for_phi (def_stmt
, ref
, &cnt
,
2537 &visited
, translated
);
2541 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2545 res
= (*translate
) (ref
, vuse
, data
);
2546 /* Failed lookup and translation. */
2547 if (res
== (void *)-1)
2552 /* Lookup succeeded. */
2553 else if (res
!= NULL
)
2555 /* Translation succeeded, continue walking. */
2558 vuse
= gimple_vuse (def_stmt
);
2564 BITMAP_FREE (visited
);
2566 timevar_pop (TV_ALIAS_STMT_WALK
);
2572 /* Based on the memory reference REF call WALKER for each vdef which
2573 defining statement may clobber REF, starting with VDEF. If REF
2574 is NULL_TREE, each defining statement is visited.
2576 WALKER is called with REF, the current vdef and DATA. If WALKER
2577 returns true the walk is stopped, otherwise it continues.
2579 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2580 PHI argument (but only one walk continues on merge points), the
2581 return value is true if any of the walks was successful.
2583 The function returns the number of statements walked. */
2586 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
2587 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2588 bitmap
*visited
, unsigned int cnt
)
2592 gimple def_stmt
= SSA_NAME_DEF_STMT (vdef
);
2595 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
2598 if (gimple_nop_p (def_stmt
))
2600 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2604 *visited
= BITMAP_ALLOC (NULL
);
2605 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
2606 cnt
+= walk_aliased_vdefs_1 (ref
, gimple_phi_arg_def (def_stmt
, i
),
2607 walker
, data
, visited
, 0);
2611 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2614 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2615 && (*walker
) (ref
, vdef
, data
))
2618 vdef
= gimple_vuse (def_stmt
);
2624 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
2625 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2628 bitmap local_visited
= NULL
;
2631 timevar_push (TV_ALIAS_STMT_WALK
);
2633 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
2634 visited
? visited
: &local_visited
, 0);
2636 BITMAP_FREE (local_visited
);
2638 timevar_pop (TV_ALIAS_STMT_WALK
);