1 /* Alias analysis for trees.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
32 #include "tree-pretty-print.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
39 #include "ipa-reference.h"
42 /* Broad overview of how alias analysis on gimple works:
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
61 The main alias-oracle entry-points are
63 bool stmt_may_clobber_ref_p (gimple *, tree)
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
77 bool refs_may_alias_p (tree, tree)
79 This function tries to disambiguate two reference trees.
81 bool ptr_deref_may_alias_global_p (tree)
83 This function queries if dereferencing a pointer variable may
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias
;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias
;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias
;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias
;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias
;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias
;
104 dump_alias_stats (FILE *s
)
106 fprintf (s
, "\nAlias oracle query stats:\n");
107 fprintf (s
, " refs_may_alias_p: "
108 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
109 HOST_WIDE_INT_PRINT_DEC
" queries\n",
110 alias_stats
.refs_may_alias_p_no_alias
,
111 alias_stats
.refs_may_alias_p_no_alias
112 + alias_stats
.refs_may_alias_p_may_alias
);
113 fprintf (s
, " ref_maybe_used_by_call_p: "
114 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC
" queries\n",
116 alias_stats
.ref_maybe_used_by_call_p_no_alias
,
117 alias_stats
.refs_may_alias_p_no_alias
118 + alias_stats
.ref_maybe_used_by_call_p_may_alias
);
119 fprintf (s
, " call_may_clobber_ref_p: "
120 HOST_WIDE_INT_PRINT_DEC
" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC
" queries\n",
122 alias_stats
.call_may_clobber_ref_p_no_alias
,
123 alias_stats
.call_may_clobber_ref_p_no_alias
124 + alias_stats
.call_may_clobber_ref_p_may_alias
);
125 dump_alias_stats_in_alias_c (s
);
129 /* Return true, if dereferencing PTR may alias with a global variable. */
132 ptr_deref_may_alias_global_p (tree ptr
)
134 struct ptr_info_def
*pi
;
136 /* If we end up with a pointer constant here that may point
138 if (TREE_CODE (ptr
) != SSA_NAME
)
141 pi
= SSA_NAME_PTR_INFO (ptr
);
143 /* If we do not have points-to information for this variable,
148 /* ??? This does not use TBAA to prune globals ptr may not access. */
149 return pt_solution_includes_global (&pi
->pt
);
152 /* Return true if dereferencing PTR may alias DECL.
153 The caller is responsible for applying TBAA to see if PTR
154 may access DECL at all. */
157 ptr_deref_may_alias_decl_p (tree ptr
, tree decl
)
159 struct ptr_info_def
*pi
;
161 /* Conversions are irrelevant for points-to information and
162 data-dependence analysis can feed us those. */
165 /* Anything we do not explicilty handle aliases. */
166 if ((TREE_CODE (ptr
) != SSA_NAME
167 && TREE_CODE (ptr
) != ADDR_EXPR
168 && TREE_CODE (ptr
) != POINTER_PLUS_EXPR
)
169 || !POINTER_TYPE_P (TREE_TYPE (ptr
))
171 && TREE_CODE (decl
) != PARM_DECL
172 && TREE_CODE (decl
) != RESULT_DECL
))
175 /* Disregard pointer offsetting. */
176 if (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
)
180 ptr
= TREE_OPERAND (ptr
, 0);
182 while (TREE_CODE (ptr
) == POINTER_PLUS_EXPR
);
183 return ptr_deref_may_alias_decl_p (ptr
, decl
);
186 /* ADDR_EXPR pointers either just offset another pointer or directly
187 specify the pointed-to set. */
188 if (TREE_CODE (ptr
) == ADDR_EXPR
)
190 tree base
= get_base_address (TREE_OPERAND (ptr
, 0));
192 && (TREE_CODE (base
) == MEM_REF
193 || TREE_CODE (base
) == TARGET_MEM_REF
))
194 ptr
= TREE_OPERAND (base
, 0);
197 return compare_base_decls (base
, decl
) != 0;
199 && CONSTANT_CLASS_P (base
))
205 /* Non-aliased variables can not be pointed to. */
206 if (!may_be_aliased (decl
))
209 /* If we do not have useful points-to information for this pointer
210 we cannot disambiguate anything else. */
211 pi
= SSA_NAME_PTR_INFO (ptr
);
215 return pt_solution_includes (&pi
->pt
, decl
);
218 /* Return true if dereferenced PTR1 and PTR2 may alias.
219 The caller is responsible for applying TBAA to see if accesses
220 through PTR1 and PTR2 may conflict at all. */
223 ptr_derefs_may_alias_p (tree ptr1
, tree ptr2
)
225 struct ptr_info_def
*pi1
, *pi2
;
227 /* Conversions are irrelevant for points-to information and
228 data-dependence analysis can feed us those. */
232 /* Disregard pointer offsetting. */
233 if (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
)
237 ptr1
= TREE_OPERAND (ptr1
, 0);
239 while (TREE_CODE (ptr1
) == POINTER_PLUS_EXPR
);
240 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
242 if (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
)
246 ptr2
= TREE_OPERAND (ptr2
, 0);
248 while (TREE_CODE (ptr2
) == POINTER_PLUS_EXPR
);
249 return ptr_derefs_may_alias_p (ptr1
, ptr2
);
252 /* ADDR_EXPR pointers either just offset another pointer or directly
253 specify the pointed-to set. */
254 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
256 tree base
= get_base_address (TREE_OPERAND (ptr1
, 0));
258 && (TREE_CODE (base
) == MEM_REF
259 || TREE_CODE (base
) == TARGET_MEM_REF
))
260 return ptr_derefs_may_alias_p (TREE_OPERAND (base
, 0), ptr2
);
263 return ptr_deref_may_alias_decl_p (ptr2
, base
);
267 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
269 tree base
= get_base_address (TREE_OPERAND (ptr2
, 0));
271 && (TREE_CODE (base
) == MEM_REF
272 || TREE_CODE (base
) == TARGET_MEM_REF
))
273 return ptr_derefs_may_alias_p (ptr1
, TREE_OPERAND (base
, 0));
276 return ptr_deref_may_alias_decl_p (ptr1
, base
);
281 /* From here we require SSA name pointers. Anything else aliases. */
282 if (TREE_CODE (ptr1
) != SSA_NAME
283 || TREE_CODE (ptr2
) != SSA_NAME
284 || !POINTER_TYPE_P (TREE_TYPE (ptr1
))
285 || !POINTER_TYPE_P (TREE_TYPE (ptr2
)))
288 /* We may end up with two empty points-to solutions for two same pointers.
289 In this case we still want to say both pointers alias, so shortcut
294 /* If we do not have useful points-to information for either pointer
295 we cannot disambiguate anything else. */
296 pi1
= SSA_NAME_PTR_INFO (ptr1
);
297 pi2
= SSA_NAME_PTR_INFO (ptr2
);
301 /* ??? This does not use TBAA to prune decls from the intersection
302 that not both pointers may access. */
303 return pt_solutions_intersect (&pi1
->pt
, &pi2
->pt
);
306 /* Return true if dereferencing PTR may alias *REF.
307 The caller is responsible for applying TBAA to see if PTR
308 may access *REF at all. */
311 ptr_deref_may_alias_ref_p_1 (tree ptr
, ao_ref
*ref
)
313 tree base
= ao_ref_base (ref
);
315 if (TREE_CODE (base
) == MEM_REF
316 || TREE_CODE (base
) == TARGET_MEM_REF
)
317 return ptr_derefs_may_alias_p (ptr
, TREE_OPERAND (base
, 0));
318 else if (DECL_P (base
))
319 return ptr_deref_may_alias_decl_p (ptr
, base
);
324 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
327 ptrs_compare_unequal (tree ptr1
, tree ptr2
)
329 /* First resolve the pointers down to a SSA name pointer base or
330 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
331 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
332 or STRING_CSTs which needs points-to adjustments to track them
333 in the points-to sets. */
334 tree obj1
= NULL_TREE
;
335 tree obj2
= NULL_TREE
;
336 if (TREE_CODE (ptr1
) == ADDR_EXPR
)
338 tree tem
= get_base_address (TREE_OPERAND (ptr1
, 0));
342 || TREE_CODE (tem
) == PARM_DECL
343 || TREE_CODE (tem
) == RESULT_DECL
)
345 else if (TREE_CODE (tem
) == MEM_REF
)
346 ptr1
= TREE_OPERAND (tem
, 0);
348 if (TREE_CODE (ptr2
) == ADDR_EXPR
)
350 tree tem
= get_base_address (TREE_OPERAND (ptr2
, 0));
354 || TREE_CODE (tem
) == PARM_DECL
355 || TREE_CODE (tem
) == RESULT_DECL
)
357 else if (TREE_CODE (tem
) == MEM_REF
)
358 ptr2
= TREE_OPERAND (tem
, 0);
361 /* Canonicalize ptr vs. object. */
362 if (TREE_CODE (ptr1
) == SSA_NAME
&& obj2
)
364 std::swap (ptr1
, ptr2
);
365 std::swap (obj1
, obj2
);
369 /* Other code handles this correctly, no need to duplicate it here. */;
370 else if (obj1
&& TREE_CODE (ptr2
) == SSA_NAME
)
372 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr2
);
373 /* We may not use restrict to optimize pointer comparisons.
374 See PR71062. So we have to assume that restrict-pointed-to
375 may be in fact obj1. */
377 || pi
->pt
.vars_contains_restrict
378 || pi
->pt
.vars_contains_interposable
)
381 && (TREE_STATIC (obj1
) || DECL_EXTERNAL (obj1
)))
383 varpool_node
*node
= varpool_node::get (obj1
);
384 /* If obj1 may bind to NULL give up (see below). */
386 || ! node
->nonzero_address ()
387 || ! decl_binds_to_current_def_p (obj1
))
390 return !pt_solution_includes (&pi
->pt
, obj1
);
393 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
394 but those require pt.null to be conservatively correct. */
399 /* Returns whether reference REF to BASE may refer to global memory. */
402 ref_may_alias_global_p_1 (tree base
)
405 return is_global_var (base
);
406 else if (TREE_CODE (base
) == MEM_REF
407 || TREE_CODE (base
) == TARGET_MEM_REF
)
408 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
413 ref_may_alias_global_p (ao_ref
*ref
)
415 tree base
= ao_ref_base (ref
);
416 return ref_may_alias_global_p_1 (base
);
420 ref_may_alias_global_p (tree ref
)
422 tree base
= get_base_address (ref
);
423 return ref_may_alias_global_p_1 (base
);
426 /* Return true whether STMT may clobber global memory. */
429 stmt_may_clobber_global_p (gimple
*stmt
)
433 if (!gimple_vdef (stmt
))
436 /* ??? We can ask the oracle whether an artificial pointer
437 dereference with a pointer with points-to information covering
438 all global memory (what about non-address taken memory?) maybe
439 clobbered by this call. As there is at the moment no convenient
440 way of doing that without generating garbage do some manual
442 ??? We could make a NULL ao_ref argument to the various
443 predicates special, meaning any global memory. */
445 switch (gimple_code (stmt
))
448 lhs
= gimple_assign_lhs (stmt
);
449 return (TREE_CODE (lhs
) != SSA_NAME
450 && ref_may_alias_global_p (lhs
));
459 /* Dump alias information on FILE. */
462 dump_alias_info (FILE *file
)
467 = lang_hooks
.decl_printable_name (current_function_decl
, 2);
470 fprintf (file
, "\n\nAlias information for %s\n\n", funcname
);
472 fprintf (file
, "Aliased symbols\n\n");
474 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
476 if (may_be_aliased (var
))
477 dump_variable (file
, var
);
480 fprintf (file
, "\nCall clobber information\n");
482 fprintf (file
, "\nESCAPED");
483 dump_points_to_solution (file
, &cfun
->gimple_df
->escaped
);
485 fprintf (file
, "\n\nFlow-insensitive points-to information\n\n");
487 FOR_EACH_SSA_NAME (i
, ptr
, cfun
)
489 struct ptr_info_def
*pi
;
491 if (!POINTER_TYPE_P (TREE_TYPE (ptr
))
492 || SSA_NAME_IN_FREE_LIST (ptr
))
495 pi
= SSA_NAME_PTR_INFO (ptr
);
497 dump_points_to_info_for (file
, ptr
);
500 fprintf (file
, "\n");
504 /* Dump alias information on stderr. */
507 debug_alias_info (void)
509 dump_alias_info (stderr
);
513 /* Dump the points-to set *PT into FILE. */
516 dump_points_to_solution (FILE *file
, struct pt_solution
*pt
)
519 fprintf (file
, ", points-to anything");
522 fprintf (file
, ", points-to non-local");
525 fprintf (file
, ", points-to escaped");
528 fprintf (file
, ", points-to unit escaped");
531 fprintf (file
, ", points-to NULL");
535 fprintf (file
, ", points-to vars: ");
536 dump_decl_set (file
, pt
->vars
);
537 if (pt
->vars_contains_nonlocal
538 || pt
->vars_contains_escaped
539 || pt
->vars_contains_escaped_heap
540 || pt
->vars_contains_restrict
)
542 const char *comma
= "";
543 fprintf (file
, " (");
544 if (pt
->vars_contains_nonlocal
)
546 fprintf (file
, "nonlocal");
549 if (pt
->vars_contains_escaped
)
551 fprintf (file
, "%sescaped", comma
);
554 if (pt
->vars_contains_escaped_heap
)
556 fprintf (file
, "%sescaped heap", comma
);
559 if (pt
->vars_contains_restrict
)
561 fprintf (file
, "%srestrict", comma
);
564 if (pt
->vars_contains_interposable
)
565 fprintf (file
, "%sinterposable", comma
);
572 /* Unified dump function for pt_solution. */
575 debug (pt_solution
&ref
)
577 dump_points_to_solution (stderr
, &ref
);
581 debug (pt_solution
*ptr
)
586 fprintf (stderr
, "<nil>\n");
590 /* Dump points-to information for SSA_NAME PTR into FILE. */
593 dump_points_to_info_for (FILE *file
, tree ptr
)
595 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
597 print_generic_expr (file
, ptr
, dump_flags
);
600 dump_points_to_solution (file
, &pi
->pt
);
602 fprintf (file
, ", points-to anything");
604 fprintf (file
, "\n");
608 /* Dump points-to information for VAR into stderr. */
611 debug_points_to_info_for (tree var
)
613 dump_points_to_info_for (stderr
, var
);
617 /* Initializes the alias-oracle reference representation *R from REF. */
620 ao_ref_init (ao_ref
*r
, tree ref
)
627 r
->ref_alias_set
= -1;
628 r
->base_alias_set
= -1;
629 r
->volatile_p
= ref
? TREE_THIS_VOLATILE (ref
) : false;
632 /* Returns the base object of the memory reference *REF. */
635 ao_ref_base (ao_ref
*ref
)
641 ref
->base
= get_ref_base_and_extent (ref
->ref
, &ref
->offset
, &ref
->size
,
642 &ref
->max_size
, &reverse
);
646 /* Returns the base object alias set of the memory reference *REF. */
649 ao_ref_base_alias_set (ao_ref
*ref
)
652 if (ref
->base_alias_set
!= -1)
653 return ref
->base_alias_set
;
657 while (handled_component_p (base_ref
))
658 base_ref
= TREE_OPERAND (base_ref
, 0);
659 ref
->base_alias_set
= get_alias_set (base_ref
);
660 return ref
->base_alias_set
;
663 /* Returns the reference alias set of the memory reference *REF. */
666 ao_ref_alias_set (ao_ref
*ref
)
668 if (ref
->ref_alias_set
!= -1)
669 return ref
->ref_alias_set
;
670 ref
->ref_alias_set
= get_alias_set (ref
->ref
);
671 return ref
->ref_alias_set
;
674 /* Init an alias-oracle reference representation from a gimple pointer
675 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
676 size is assumed to be unknown. The access is assumed to be only
677 to or after of the pointer target, not before it. */
680 ao_ref_init_from_ptr_and_size (ao_ref
*ref
, tree ptr
, tree size
)
682 HOST_WIDE_INT t
, size_hwi
, extra_offset
= 0;
683 ref
->ref
= NULL_TREE
;
684 if (TREE_CODE (ptr
) == SSA_NAME
)
686 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
687 if (gimple_assign_single_p (stmt
)
688 && gimple_assign_rhs_code (stmt
) == ADDR_EXPR
)
689 ptr
= gimple_assign_rhs1 (stmt
);
690 else if (is_gimple_assign (stmt
)
691 && gimple_assign_rhs_code (stmt
) == POINTER_PLUS_EXPR
692 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
)
694 ptr
= gimple_assign_rhs1 (stmt
);
695 extra_offset
= BITS_PER_UNIT
696 * int_cst_value (gimple_assign_rhs2 (stmt
));
700 if (TREE_CODE (ptr
) == ADDR_EXPR
)
702 ref
->base
= get_addr_base_and_unit_offset (TREE_OPERAND (ptr
, 0), &t
);
704 ref
->offset
= BITS_PER_UNIT
* t
;
709 ref
->base
= get_base_address (TREE_OPERAND (ptr
, 0));
714 ref
->base
= build2 (MEM_REF
, char_type_node
,
715 ptr
, null_pointer_node
);
718 ref
->offset
+= extra_offset
;
720 && tree_fits_shwi_p (size
)
721 && (size_hwi
= tree_to_shwi (size
)) <= HOST_WIDE_INT_MAX
/ BITS_PER_UNIT
)
722 ref
->max_size
= ref
->size
= size_hwi
* BITS_PER_UNIT
;
724 ref
->max_size
= ref
->size
= -1;
725 ref
->ref_alias_set
= 0;
726 ref
->base_alias_set
= 0;
727 ref
->volatile_p
= false;
730 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
731 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
735 same_type_for_tbaa (tree type1
, tree type2
)
737 type1
= TYPE_MAIN_VARIANT (type1
);
738 type2
= TYPE_MAIN_VARIANT (type2
);
740 /* If we would have to do structural comparison bail out. */
741 if (TYPE_STRUCTURAL_EQUALITY_P (type1
)
742 || TYPE_STRUCTURAL_EQUALITY_P (type2
))
745 /* Compare the canonical types. */
746 if (TYPE_CANONICAL (type1
) == TYPE_CANONICAL (type2
))
749 /* ??? Array types are not properly unified in all cases as we have
750 spurious changes in the index types for example. Removing this
751 causes all sorts of problems with the Fortran frontend. */
752 if (TREE_CODE (type1
) == ARRAY_TYPE
753 && TREE_CODE (type2
) == ARRAY_TYPE
)
756 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
757 object of one of its constrained subtypes, e.g. when a function with an
758 unconstrained parameter passed by reference is called on an object and
759 inlined. But, even in the case of a fixed size, type and subtypes are
760 not equivalent enough as to share the same TYPE_CANONICAL, since this
761 would mean that conversions between them are useless, whereas they are
762 not (e.g. type and subtypes can have different modes). So, in the end,
763 they are only guaranteed to have the same alias set. */
764 if (get_alias_set (type1
) == get_alias_set (type2
))
767 /* The types are known to be not equal. */
771 /* Determine if the two component references REF1 and REF2 which are
772 based on access types TYPE1 and TYPE2 and of which at least one is based
773 on an indirect reference may alias. REF2 is the only one that can
774 be a decl in which case REF2_IS_DECL is true.
775 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
776 are the respective alias sets. */
779 aliasing_component_refs_p (tree ref1
,
780 alias_set_type ref1_alias_set
,
781 alias_set_type base1_alias_set
,
782 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
784 alias_set_type ref2_alias_set
,
785 alias_set_type base2_alias_set
,
786 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
789 /* If one reference is a component references through pointers try to find a
790 common base and apply offset based disambiguation. This handles
792 struct A { int i; int j; } *q;
793 struct B { struct A a; int k; } *p;
794 disambiguating q->i and p->a.j. */
800 /* Choose bases and base types to search for. */
802 while (handled_component_p (base1
))
803 base1
= TREE_OPERAND (base1
, 0);
804 type1
= TREE_TYPE (base1
);
806 while (handled_component_p (base2
))
807 base2
= TREE_OPERAND (base2
, 0);
808 type2
= TREE_TYPE (base2
);
810 /* Now search for the type1 in the access path of ref2. This
811 would be a common base for doing offset based disambiguation on. */
813 while (handled_component_p (*refp
)
814 && same_type_for_tbaa (TREE_TYPE (*refp
), type1
) == 0)
815 refp
= &TREE_OPERAND (*refp
, 0);
816 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type1
);
817 /* If we couldn't compare types we have to bail out. */
820 else if (same_p
== 1)
822 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
824 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
, &reverse
);
826 get_ref_base_and_extent (base1
, &offadj
, &sztmp
, &msztmp
, &reverse
);
828 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
830 /* If we didn't find a common base, try the other way around. */
832 while (handled_component_p (*refp
)
833 && same_type_for_tbaa (TREE_TYPE (*refp
), type2
) == 0)
834 refp
= &TREE_OPERAND (*refp
, 0);
835 same_p
= same_type_for_tbaa (TREE_TYPE (*refp
), type2
);
836 /* If we couldn't compare types we have to bail out. */
839 else if (same_p
== 1)
841 HOST_WIDE_INT offadj
, sztmp
, msztmp
;
843 get_ref_base_and_extent (*refp
, &offadj
, &sztmp
, &msztmp
, &reverse
);
845 get_ref_base_and_extent (base2
, &offadj
, &sztmp
, &msztmp
, &reverse
);
847 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
850 /* If we have two type access paths B1.path1 and B2.path2 they may
851 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
852 But we can still have a path that goes B1.path1...B2.path2 with
853 a part that we do not see. So we can only disambiguate now
854 if there is no B2 in the tail of path1 and no B1 on the
856 if (base1_alias_set
== ref2_alias_set
857 || alias_set_subset_of (base1_alias_set
, ref2_alias_set
))
859 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
861 return (base2_alias_set
== ref1_alias_set
862 || alias_set_subset_of (base2_alias_set
, ref1_alias_set
));
866 /* Return true if we can determine that component references REF1 and REF2,
867 that are within a common DECL, cannot overlap. */
870 nonoverlapping_component_refs_of_decl_p (tree ref1
, tree ref2
)
872 auto_vec
<tree
, 16> component_refs1
;
873 auto_vec
<tree
, 16> component_refs2
;
875 /* Create the stack of handled components for REF1. */
876 while (handled_component_p (ref1
))
878 component_refs1
.safe_push (ref1
);
879 ref1
= TREE_OPERAND (ref1
, 0);
881 if (TREE_CODE (ref1
) == MEM_REF
)
883 if (!integer_zerop (TREE_OPERAND (ref1
, 1)))
885 ref1
= TREE_OPERAND (TREE_OPERAND (ref1
, 0), 0);
888 /* Create the stack of handled components for REF2. */
889 while (handled_component_p (ref2
))
891 component_refs2
.safe_push (ref2
);
892 ref2
= TREE_OPERAND (ref2
, 0);
894 if (TREE_CODE (ref2
) == MEM_REF
)
896 if (!integer_zerop (TREE_OPERAND (ref2
, 1)))
898 ref2
= TREE_OPERAND (TREE_OPERAND (ref2
, 0), 0);
901 /* Bases must be either same or uncomparable. */
902 gcc_checking_assert (ref1
== ref2
903 || (DECL_P (ref1
) && DECL_P (ref2
)
904 && compare_base_decls (ref1
, ref2
) != 0));
906 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
907 rank. This is sufficient because we start from the same DECL and you
908 cannot reference several fields at a time with COMPONENT_REFs (unlike
909 with ARRAY_RANGE_REFs for arrays) so you always need the same number
910 of them to access a sub-component, unless you're in a union, in which
911 case the return value will precisely be false. */
916 if (component_refs1
.is_empty ())
918 ref1
= component_refs1
.pop ();
920 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1
, 0))));
924 if (component_refs2
.is_empty ())
926 ref2
= component_refs2
.pop ();
928 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2
, 0))));
930 /* Beware of BIT_FIELD_REF. */
931 if (TREE_CODE (ref1
) != COMPONENT_REF
932 || TREE_CODE (ref2
) != COMPONENT_REF
)
935 tree field1
= TREE_OPERAND (ref1
, 1);
936 tree field2
= TREE_OPERAND (ref2
, 1);
938 /* ??? We cannot simply use the type of operand #0 of the refs here
939 as the Fortran compiler smuggles type punning into COMPONENT_REFs
940 for common blocks instead of using unions like everyone else. */
941 tree type1
= DECL_CONTEXT (field1
);
942 tree type2
= DECL_CONTEXT (field2
);
944 /* We cannot disambiguate fields in a union or qualified union. */
945 if (type1
!= type2
|| TREE_CODE (type1
) != RECORD_TYPE
)
948 if (field1
!= field2
)
950 /* A field and its representative need to be considered the
952 if (DECL_BIT_FIELD_REPRESENTATIVE (field1
) == field2
953 || DECL_BIT_FIELD_REPRESENTATIVE (field2
) == field1
)
955 /* Different fields of the same record type cannot overlap.
956 ??? Bitfields can overlap at RTL level so punt on them. */
957 if (DECL_BIT_FIELD (field1
) && DECL_BIT_FIELD (field2
))
966 /* qsort compare function to sort FIELD_DECLs after their
967 DECL_FIELD_CONTEXT TYPE_UID. */
970 ncr_compar (const void *field1_
, const void *field2_
)
972 const_tree field1
= *(const_tree
*) const_cast <void *>(field1_
);
973 const_tree field2
= *(const_tree
*) const_cast <void *>(field2_
);
974 unsigned int uid1
= TYPE_UID (DECL_FIELD_CONTEXT (field1
));
975 unsigned int uid2
= TYPE_UID (DECL_FIELD_CONTEXT (field2
));
978 else if (uid1
> uid2
)
983 /* Return true if we can determine that the fields referenced cannot
984 overlap for any pair of objects. */
987 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
989 if (!flag_strict_aliasing
991 || TREE_CODE (x
) != COMPONENT_REF
992 || TREE_CODE (y
) != COMPONENT_REF
)
995 auto_vec
<const_tree
, 16> fieldsx
;
996 while (TREE_CODE (x
) == COMPONENT_REF
)
998 tree field
= TREE_OPERAND (x
, 1);
999 tree type
= DECL_FIELD_CONTEXT (field
);
1000 if (TREE_CODE (type
) == RECORD_TYPE
)
1001 fieldsx
.safe_push (field
);
1002 x
= TREE_OPERAND (x
, 0);
1004 if (fieldsx
.length () == 0)
1006 auto_vec
<const_tree
, 16> fieldsy
;
1007 while (TREE_CODE (y
) == COMPONENT_REF
)
1009 tree field
= TREE_OPERAND (y
, 1);
1010 tree type
= DECL_FIELD_CONTEXT (field
);
1011 if (TREE_CODE (type
) == RECORD_TYPE
)
1012 fieldsy
.safe_push (TREE_OPERAND (y
, 1));
1013 y
= TREE_OPERAND (y
, 0);
1015 if (fieldsy
.length () == 0)
1018 /* Most common case first. */
1019 if (fieldsx
.length () == 1
1020 && fieldsy
.length () == 1)
1021 return ((DECL_FIELD_CONTEXT (fieldsx
[0])
1022 == DECL_FIELD_CONTEXT (fieldsy
[0]))
1023 && fieldsx
[0] != fieldsy
[0]
1024 && !(DECL_BIT_FIELD (fieldsx
[0]) && DECL_BIT_FIELD (fieldsy
[0])));
1026 if (fieldsx
.length () == 2)
1028 if (ncr_compar (&fieldsx
[0], &fieldsx
[1]) == 1)
1029 std::swap (fieldsx
[0], fieldsx
[1]);
1032 fieldsx
.qsort (ncr_compar
);
1034 if (fieldsy
.length () == 2)
1036 if (ncr_compar (&fieldsy
[0], &fieldsy
[1]) == 1)
1037 std::swap (fieldsy
[0], fieldsy
[1]);
1040 fieldsy
.qsort (ncr_compar
);
1042 unsigned i
= 0, j
= 0;
1045 const_tree fieldx
= fieldsx
[i
];
1046 const_tree fieldy
= fieldsy
[j
];
1047 tree typex
= DECL_FIELD_CONTEXT (fieldx
);
1048 tree typey
= DECL_FIELD_CONTEXT (fieldy
);
1051 /* We're left with accessing different fields of a structure,
1052 no possible overlap. */
1053 if (fieldx
!= fieldy
)
1055 /* A field and its representative need to be considered the
1057 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx
) == fieldy
1058 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy
) == fieldx
)
1060 /* Different fields of the same record type cannot overlap.
1061 ??? Bitfields can overlap at RTL level so punt on them. */
1062 if (DECL_BIT_FIELD (fieldx
) && DECL_BIT_FIELD (fieldy
))
1067 if (TYPE_UID (typex
) < TYPE_UID (typey
))
1070 if (i
== fieldsx
.length ())
1076 if (j
== fieldsy
.length ())
1086 /* Return true if two memory references based on the variables BASE1
1087 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1088 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1089 if non-NULL are the complete memory reference trees. */
1092 decl_refs_may_alias_p (tree ref1
, tree base1
,
1093 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1094 tree ref2
, tree base2
,
1095 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
)
1097 gcc_checking_assert (DECL_P (base1
) && DECL_P (base2
));
1099 /* If both references are based on different variables, they cannot alias. */
1100 if (compare_base_decls (base1
, base2
) == 0)
1103 /* If both references are based on the same variable, they cannot alias if
1104 the accesses do not overlap. */
1105 if (!ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
))
1108 /* For components with variable position, the above test isn't sufficient,
1109 so we disambiguate component references manually. */
1111 && handled_component_p (ref1
) && handled_component_p (ref2
)
1112 && nonoverlapping_component_refs_of_decl_p (ref1
, ref2
))
1118 /* Return true if an indirect reference based on *PTR1 constrained
1119 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1120 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1121 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1122 in which case they are computed on-demand. REF1 and REF2
1123 if non-NULL are the complete memory reference trees. */
1126 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1127 HOST_WIDE_INT offset1
,
1128 HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED
,
1129 alias_set_type ref1_alias_set
,
1130 alias_set_type base1_alias_set
,
1131 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1132 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1133 alias_set_type ref2_alias_set
,
1134 alias_set_type base2_alias_set
, bool tbaa_p
)
1137 tree ptrtype1
, dbase2
;
1138 HOST_WIDE_INT offset1p
= offset1
, offset2p
= offset2
;
1139 HOST_WIDE_INT doffset1
, doffset2
;
1141 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1142 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1145 ptr1
= TREE_OPERAND (base1
, 0);
1147 /* The offset embedded in MEM_REFs can be negative. Bias them
1148 so that the resulting offset adjustment is positive. */
1149 offset_int moff
= mem_ref_offset (base1
);
1150 moff
<<= LOG2_BITS_PER_UNIT
;
1151 if (wi::neg_p (moff
))
1152 offset2p
+= (-moff
).to_short_addr ();
1154 offset1p
+= moff
.to_short_addr ();
1156 /* If only one reference is based on a variable, they cannot alias if
1157 the pointer access is beyond the extent of the variable access.
1158 (the pointer base cannot validly point to an offset less than zero
1160 ??? IVOPTs creates bases that do not honor this restriction,
1161 so do not apply this optimization for TARGET_MEM_REFs. */
1162 if (TREE_CODE (base1
) != TARGET_MEM_REF
1163 && !ranges_overlap_p (MAX (0, offset1p
), -1, offset2p
, max_size2
))
1165 /* They also cannot alias if the pointer may not point to the decl. */
1166 if (!ptr_deref_may_alias_decl_p (ptr1
, base2
))
1169 /* Disambiguations that rely on strict aliasing rules follow. */
1170 if (!flag_strict_aliasing
|| !tbaa_p
)
1173 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1175 /* If the alias set for a pointer access is zero all bets are off. */
1176 if (base1_alias_set
== 0)
1179 /* When we are trying to disambiguate an access with a pointer dereference
1180 as base versus one with a decl as base we can use both the size
1181 of the decl and its dynamic type for extra disambiguation.
1182 ??? We do not know anything about the dynamic type of the decl
1183 other than that its alias-set contains base2_alias_set as a subset
1184 which does not help us here. */
1185 /* As we know nothing useful about the dynamic type of the decl just
1186 use the usual conflict check rather than a subset test.
1187 ??? We could introduce -fvery-strict-aliasing when the language
1188 does not allow decls to have a dynamic type that differs from their
1189 static type. Then we can check
1190 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1191 if (base1_alias_set
!= base2_alias_set
1192 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1194 /* If the size of the access relevant for TBAA through the pointer
1195 is bigger than the size of the decl we can't possibly access the
1196 decl via that pointer. */
1197 if (DECL_SIZE (base2
) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1
))
1198 && TREE_CODE (DECL_SIZE (base2
)) == INTEGER_CST
1199 && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1
))) == INTEGER_CST
1200 /* ??? This in turn may run afoul when a decl of type T which is
1201 a member of union type U is accessed through a pointer to
1202 type U and sizeof T is smaller than sizeof U. */
1203 && TREE_CODE (TREE_TYPE (ptrtype1
)) != UNION_TYPE
1204 && TREE_CODE (TREE_TYPE (ptrtype1
)) != QUAL_UNION_TYPE
1205 && tree_int_cst_lt (DECL_SIZE (base2
), TYPE_SIZE (TREE_TYPE (ptrtype1
))))
1211 /* If the decl is accessed via a MEM_REF, reconstruct the base
1212 we can use for TBAA and an appropriately adjusted offset. */
1214 while (handled_component_p (dbase2
))
1215 dbase2
= TREE_OPERAND (dbase2
, 0);
1218 if (TREE_CODE (dbase2
) == MEM_REF
1219 || TREE_CODE (dbase2
) == TARGET_MEM_REF
)
1221 offset_int moff
= mem_ref_offset (dbase2
);
1222 moff
<<= LOG2_BITS_PER_UNIT
;
1223 if (wi::neg_p (moff
))
1224 doffset1
-= (-moff
).to_short_addr ();
1226 doffset2
-= moff
.to_short_addr ();
1229 /* If either reference is view-converted, give up now. */
1230 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1231 || same_type_for_tbaa (TREE_TYPE (dbase2
), TREE_TYPE (base2
)) != 1)
1234 /* If both references are through the same type, they do not alias
1235 if the accesses do not overlap. This does extra disambiguation
1236 for mixed/pointer accesses but requires strict aliasing.
1237 For MEM_REFs we require that the component-ref offset we computed
1238 is relative to the start of the type which we ensure by
1239 comparing rvalue and access type and disregarding the constant
1241 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1242 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1243 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (dbase2
)) == 1)
1244 return ranges_overlap_p (doffset1
, max_size1
, doffset2
, max_size2
);
1247 && nonoverlapping_component_refs_p (ref1
, ref2
))
1250 /* Do access-path based disambiguation. */
1252 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1253 return aliasing_component_refs_p (ref1
,
1254 ref1_alias_set
, base1_alias_set
,
1257 ref2_alias_set
, base2_alias_set
,
1258 offset2
, max_size2
, true);
1263 /* Return true if two indirect references based on *PTR1
1264 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1265 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1266 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1267 in which case they are computed on-demand. REF1 and REF2
1268 if non-NULL are the complete memory reference trees. */
1271 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED
, tree base1
,
1272 HOST_WIDE_INT offset1
, HOST_WIDE_INT max_size1
,
1273 alias_set_type ref1_alias_set
,
1274 alias_set_type base1_alias_set
,
1275 tree ref2 ATTRIBUTE_UNUSED
, tree base2
,
1276 HOST_WIDE_INT offset2
, HOST_WIDE_INT max_size2
,
1277 alias_set_type ref2_alias_set
,
1278 alias_set_type base2_alias_set
, bool tbaa_p
)
1282 tree ptrtype1
, ptrtype2
;
1284 gcc_checking_assert ((TREE_CODE (base1
) == MEM_REF
1285 || TREE_CODE (base1
) == TARGET_MEM_REF
)
1286 && (TREE_CODE (base2
) == MEM_REF
1287 || TREE_CODE (base2
) == TARGET_MEM_REF
));
1289 ptr1
= TREE_OPERAND (base1
, 0);
1290 ptr2
= TREE_OPERAND (base2
, 0);
1292 /* If both bases are based on pointers they cannot alias if they may not
1293 point to the same memory object or if they point to the same object
1294 and the accesses do not overlap. */
1295 if ((!cfun
|| gimple_in_ssa_p (cfun
))
1296 && operand_equal_p (ptr1
, ptr2
, 0)
1297 && (((TREE_CODE (base1
) != TARGET_MEM_REF
1298 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1299 && (TREE_CODE (base2
) != TARGET_MEM_REF
1300 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
))))
1301 || (TREE_CODE (base1
) == TARGET_MEM_REF
1302 && TREE_CODE (base2
) == TARGET_MEM_REF
1303 && (TMR_STEP (base1
) == TMR_STEP (base2
)
1304 || (TMR_STEP (base1
) && TMR_STEP (base2
)
1305 && operand_equal_p (TMR_STEP (base1
),
1306 TMR_STEP (base2
), 0)))
1307 && (TMR_INDEX (base1
) == TMR_INDEX (base2
)
1308 || (TMR_INDEX (base1
) && TMR_INDEX (base2
)
1309 && operand_equal_p (TMR_INDEX (base1
),
1310 TMR_INDEX (base2
), 0)))
1311 && (TMR_INDEX2 (base1
) == TMR_INDEX2 (base2
)
1312 || (TMR_INDEX2 (base1
) && TMR_INDEX2 (base2
)
1313 && operand_equal_p (TMR_INDEX2 (base1
),
1314 TMR_INDEX2 (base2
), 0))))))
1317 /* The offset embedded in MEM_REFs can be negative. Bias them
1318 so that the resulting offset adjustment is positive. */
1319 moff
= mem_ref_offset (base1
);
1320 moff
<<= LOG2_BITS_PER_UNIT
;
1321 if (wi::neg_p (moff
))
1322 offset2
+= (-moff
).to_short_addr ();
1324 offset1
+= moff
.to_shwi ();
1325 moff
= mem_ref_offset (base2
);
1326 moff
<<= LOG2_BITS_PER_UNIT
;
1327 if (wi::neg_p (moff
))
1328 offset1
+= (-moff
).to_short_addr ();
1330 offset2
+= moff
.to_short_addr ();
1331 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1333 if (!ptr_derefs_may_alias_p (ptr1
, ptr2
))
1336 /* Disambiguations that rely on strict aliasing rules follow. */
1337 if (!flag_strict_aliasing
|| !tbaa_p
)
1340 ptrtype1
= TREE_TYPE (TREE_OPERAND (base1
, 1));
1341 ptrtype2
= TREE_TYPE (TREE_OPERAND (base2
, 1));
1343 /* If the alias set for a pointer access is zero all bets are off. */
1344 if (base1_alias_set
== 0
1345 || base2_alias_set
== 0)
1348 /* If both references are through the same type, they do not alias
1349 if the accesses do not overlap. This does extra disambiguation
1350 for mixed/pointer accesses but requires strict aliasing. */
1351 if ((TREE_CODE (base1
) != TARGET_MEM_REF
1352 || (!TMR_INDEX (base1
) && !TMR_INDEX2 (base1
)))
1353 && (TREE_CODE (base2
) != TARGET_MEM_REF
1354 || (!TMR_INDEX (base2
) && !TMR_INDEX2 (base2
)))
1355 && same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) == 1
1356 && same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) == 1
1357 && same_type_for_tbaa (TREE_TYPE (ptrtype1
),
1358 TREE_TYPE (ptrtype2
)) == 1
1359 /* But avoid treating arrays as "objects", instead assume they
1360 can overlap by an exact multiple of their element size. */
1361 && TREE_CODE (TREE_TYPE (ptrtype1
)) != ARRAY_TYPE
)
1362 return ranges_overlap_p (offset1
, max_size1
, offset2
, max_size2
);
1364 /* Do type-based disambiguation. */
1365 if (base1_alias_set
!= base2_alias_set
1366 && !alias_sets_conflict_p (base1_alias_set
, base2_alias_set
))
1369 /* If either reference is view-converted, give up now. */
1370 if (same_type_for_tbaa (TREE_TYPE (base1
), TREE_TYPE (ptrtype1
)) != 1
1371 || same_type_for_tbaa (TREE_TYPE (base2
), TREE_TYPE (ptrtype2
)) != 1)
1375 && nonoverlapping_component_refs_p (ref1
, ref2
))
1378 /* Do access-path based disambiguation. */
1380 && (handled_component_p (ref1
) || handled_component_p (ref2
)))
1381 return aliasing_component_refs_p (ref1
,
1382 ref1_alias_set
, base1_alias_set
,
1385 ref2_alias_set
, base2_alias_set
,
1386 offset2
, max_size2
, false);
1391 /* Return true, if the two memory references REF1 and REF2 may alias. */
1394 refs_may_alias_p_1 (ao_ref
*ref1
, ao_ref
*ref2
, bool tbaa_p
)
1397 HOST_WIDE_INT offset1
= 0, offset2
= 0;
1398 HOST_WIDE_INT max_size1
= -1, max_size2
= -1;
1399 bool var1_p
, var2_p
, ind1_p
, ind2_p
;
1401 gcc_checking_assert ((!ref1
->ref
1402 || TREE_CODE (ref1
->ref
) == SSA_NAME
1403 || DECL_P (ref1
->ref
)
1404 || TREE_CODE (ref1
->ref
) == STRING_CST
1405 || handled_component_p (ref1
->ref
)
1406 || TREE_CODE (ref1
->ref
) == MEM_REF
1407 || TREE_CODE (ref1
->ref
) == TARGET_MEM_REF
)
1409 || TREE_CODE (ref2
->ref
) == SSA_NAME
1410 || DECL_P (ref2
->ref
)
1411 || TREE_CODE (ref2
->ref
) == STRING_CST
1412 || handled_component_p (ref2
->ref
)
1413 || TREE_CODE (ref2
->ref
) == MEM_REF
1414 || TREE_CODE (ref2
->ref
) == TARGET_MEM_REF
));
1416 /* Decompose the references into their base objects and the access. */
1417 base1
= ao_ref_base (ref1
);
1418 offset1
= ref1
->offset
;
1419 max_size1
= ref1
->max_size
;
1420 base2
= ao_ref_base (ref2
);
1421 offset2
= ref2
->offset
;
1422 max_size2
= ref2
->max_size
;
1424 /* We can end up with registers or constants as bases for example from
1425 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1426 which is seen as a struct copy. */
1427 if (TREE_CODE (base1
) == SSA_NAME
1428 || TREE_CODE (base1
) == CONST_DECL
1429 || TREE_CODE (base1
) == CONSTRUCTOR
1430 || TREE_CODE (base1
) == ADDR_EXPR
1431 || CONSTANT_CLASS_P (base1
)
1432 || TREE_CODE (base2
) == SSA_NAME
1433 || TREE_CODE (base2
) == CONST_DECL
1434 || TREE_CODE (base2
) == CONSTRUCTOR
1435 || TREE_CODE (base2
) == ADDR_EXPR
1436 || CONSTANT_CLASS_P (base2
))
1439 /* We can end up referring to code via function and label decls.
1440 As we likely do not properly track code aliases conservatively
1442 if (TREE_CODE (base1
) == FUNCTION_DECL
1443 || TREE_CODE (base1
) == LABEL_DECL
1444 || TREE_CODE (base2
) == FUNCTION_DECL
1445 || TREE_CODE (base2
) == LABEL_DECL
)
1448 /* Two volatile accesses always conflict. */
1449 if (ref1
->volatile_p
1450 && ref2
->volatile_p
)
1453 /* Defer to simple offset based disambiguation if we have
1454 references based on two decls. Do this before defering to
1455 TBAA to handle must-alias cases in conformance with the
1456 GCC extension of allowing type-punning through unions. */
1457 var1_p
= DECL_P (base1
);
1458 var2_p
= DECL_P (base2
);
1459 if (var1_p
&& var2_p
)
1460 return decl_refs_may_alias_p (ref1
->ref
, base1
, offset1
, max_size1
,
1461 ref2
->ref
, base2
, offset2
, max_size2
);
1463 /* Handle restrict based accesses.
1464 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1466 tree rbase1
= base1
;
1467 tree rbase2
= base2
;
1472 while (handled_component_p (rbase1
))
1473 rbase1
= TREE_OPERAND (rbase1
, 0);
1479 while (handled_component_p (rbase2
))
1480 rbase2
= TREE_OPERAND (rbase2
, 0);
1482 if (rbase1
&& rbase2
1483 && (TREE_CODE (base1
) == MEM_REF
|| TREE_CODE (base1
) == TARGET_MEM_REF
)
1484 && (TREE_CODE (base2
) == MEM_REF
|| TREE_CODE (base2
) == TARGET_MEM_REF
)
1485 /* If the accesses are in the same restrict clique... */
1486 && MR_DEPENDENCE_CLIQUE (base1
) == MR_DEPENDENCE_CLIQUE (base2
)
1487 /* But based on different pointers they do not alias. */
1488 && MR_DEPENDENCE_BASE (base1
) != MR_DEPENDENCE_BASE (base2
))
1491 ind1_p
= (TREE_CODE (base1
) == MEM_REF
1492 || TREE_CODE (base1
) == TARGET_MEM_REF
);
1493 ind2_p
= (TREE_CODE (base2
) == MEM_REF
1494 || TREE_CODE (base2
) == TARGET_MEM_REF
);
1496 /* Canonicalize the pointer-vs-decl case. */
1497 if (ind1_p
&& var2_p
)
1499 std::swap (offset1
, offset2
);
1500 std::swap (max_size1
, max_size2
);
1501 std::swap (base1
, base2
);
1502 std::swap (ref1
, ref2
);
1509 /* First defer to TBAA if possible. */
1511 && flag_strict_aliasing
1512 && !alias_sets_conflict_p (ao_ref_alias_set (ref1
),
1513 ao_ref_alias_set (ref2
)))
1516 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1517 if (var1_p
&& ind2_p
)
1518 return indirect_ref_may_alias_decl_p (ref2
->ref
, base2
,
1520 ao_ref_alias_set (ref2
),
1521 ao_ref_base_alias_set (ref2
),
1524 ao_ref_alias_set (ref1
),
1525 ao_ref_base_alias_set (ref1
),
1527 else if (ind1_p
&& ind2_p
)
1528 return indirect_refs_may_alias_p (ref1
->ref
, base1
,
1530 ao_ref_alias_set (ref1
),
1531 ao_ref_base_alias_set (ref1
),
1534 ao_ref_alias_set (ref2
),
1535 ao_ref_base_alias_set (ref2
),
1542 refs_may_alias_p (tree ref1
, ao_ref
*ref2
)
1545 ao_ref_init (&r1
, ref1
);
1546 return refs_may_alias_p_1 (&r1
, ref2
, true);
1550 refs_may_alias_p (tree ref1
, tree ref2
)
1554 ao_ref_init (&r1
, ref1
);
1555 ao_ref_init (&r2
, ref2
);
1556 res
= refs_may_alias_p_1 (&r1
, &r2
, true);
1558 ++alias_stats
.refs_may_alias_p_may_alias
;
1560 ++alias_stats
.refs_may_alias_p_no_alias
;
1564 /* Returns true if there is a anti-dependence for the STORE that
1565 executes after the LOAD. */
1568 refs_anti_dependent_p (tree load
, tree store
)
1571 ao_ref_init (&r1
, load
);
1572 ao_ref_init (&r2
, store
);
1573 return refs_may_alias_p_1 (&r1
, &r2
, false);
1576 /* Returns true if there is a output dependence for the stores
1577 STORE1 and STORE2. */
1580 refs_output_dependent_p (tree store1
, tree store2
)
1583 ao_ref_init (&r1
, store1
);
1584 ao_ref_init (&r2
, store2
);
1585 return refs_may_alias_p_1 (&r1
, &r2
, false);
1588 /* If the call CALL may use the memory reference REF return true,
1589 otherwise return false. */
1592 ref_maybe_used_by_call_p_1 (gcall
*call
, ao_ref
*ref
)
1596 int flags
= gimple_call_flags (call
);
1598 /* Const functions without a static chain do not implicitly use memory. */
1599 if (!gimple_call_chain (call
)
1600 && (flags
& (ECF_CONST
|ECF_NOVOPS
)))
1603 base
= ao_ref_base (ref
);
1607 /* A call that is not without side-effects might involve volatile
1608 accesses and thus conflicts with all other volatile accesses. */
1609 if (ref
->volatile_p
)
1612 /* If the reference is based on a decl that is not aliased the call
1613 cannot possibly use it. */
1615 && !may_be_aliased (base
)
1616 /* But local statics can be used through recursion. */
1617 && !is_global_var (base
))
1620 callee
= gimple_call_fndecl (call
);
1622 /* Handle those builtin functions explicitly that do not act as
1623 escape points. See tree-ssa-structalias.c:find_func_aliases
1624 for the list of builtins we might need to handle here. */
1625 if (callee
!= NULL_TREE
1626 && gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
1627 switch (DECL_FUNCTION_CODE (callee
))
1629 /* All the following functions read memory pointed to by
1630 their second argument. strcat/strncat additionally
1631 reads memory pointed to by the first argument. */
1632 case BUILT_IN_STRCAT
:
1633 case BUILT_IN_STRNCAT
:
1636 ao_ref_init_from_ptr_and_size (&dref
,
1637 gimple_call_arg (call
, 0),
1639 if (refs_may_alias_p_1 (&dref
, ref
, false))
1643 case BUILT_IN_STRCPY
:
1644 case BUILT_IN_STRNCPY
:
1645 case BUILT_IN_MEMCPY
:
1646 case BUILT_IN_MEMMOVE
:
1647 case BUILT_IN_MEMPCPY
:
1648 case BUILT_IN_STPCPY
:
1649 case BUILT_IN_STPNCPY
:
1650 case BUILT_IN_TM_MEMCPY
:
1651 case BUILT_IN_TM_MEMMOVE
:
1654 tree size
= NULL_TREE
;
1655 if (gimple_call_num_args (call
) == 3)
1656 size
= gimple_call_arg (call
, 2);
1657 ao_ref_init_from_ptr_and_size (&dref
,
1658 gimple_call_arg (call
, 1),
1660 return refs_may_alias_p_1 (&dref
, ref
, false);
1662 case BUILT_IN_STRCAT_CHK
:
1663 case BUILT_IN_STRNCAT_CHK
:
1666 ao_ref_init_from_ptr_and_size (&dref
,
1667 gimple_call_arg (call
, 0),
1669 if (refs_may_alias_p_1 (&dref
, ref
, false))
1673 case BUILT_IN_STRCPY_CHK
:
1674 case BUILT_IN_STRNCPY_CHK
:
1675 case BUILT_IN_MEMCPY_CHK
:
1676 case BUILT_IN_MEMMOVE_CHK
:
1677 case BUILT_IN_MEMPCPY_CHK
:
1678 case BUILT_IN_STPCPY_CHK
:
1679 case BUILT_IN_STPNCPY_CHK
:
1682 tree size
= NULL_TREE
;
1683 if (gimple_call_num_args (call
) == 4)
1684 size
= gimple_call_arg (call
, 2);
1685 ao_ref_init_from_ptr_and_size (&dref
,
1686 gimple_call_arg (call
, 1),
1688 return refs_may_alias_p_1 (&dref
, ref
, false);
1690 case BUILT_IN_BCOPY
:
1693 tree size
= gimple_call_arg (call
, 2);
1694 ao_ref_init_from_ptr_and_size (&dref
,
1695 gimple_call_arg (call
, 0),
1697 return refs_may_alias_p_1 (&dref
, ref
, false);
1700 /* The following functions read memory pointed to by their
1702 CASE_BUILT_IN_TM_LOAD (1):
1703 CASE_BUILT_IN_TM_LOAD (2):
1704 CASE_BUILT_IN_TM_LOAD (4):
1705 CASE_BUILT_IN_TM_LOAD (8):
1706 CASE_BUILT_IN_TM_LOAD (FLOAT
):
1707 CASE_BUILT_IN_TM_LOAD (DOUBLE
):
1708 CASE_BUILT_IN_TM_LOAD (LDOUBLE
):
1709 CASE_BUILT_IN_TM_LOAD (M64
):
1710 CASE_BUILT_IN_TM_LOAD (M128
):
1711 CASE_BUILT_IN_TM_LOAD (M256
):
1712 case BUILT_IN_TM_LOG
:
1713 case BUILT_IN_TM_LOG_1
:
1714 case BUILT_IN_TM_LOG_2
:
1715 case BUILT_IN_TM_LOG_4
:
1716 case BUILT_IN_TM_LOG_8
:
1717 case BUILT_IN_TM_LOG_FLOAT
:
1718 case BUILT_IN_TM_LOG_DOUBLE
:
1719 case BUILT_IN_TM_LOG_LDOUBLE
:
1720 case BUILT_IN_TM_LOG_M64
:
1721 case BUILT_IN_TM_LOG_M128
:
1722 case BUILT_IN_TM_LOG_M256
:
1723 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call
, 0), ref
);
1725 /* These read memory pointed to by the first argument. */
1726 case BUILT_IN_STRDUP
:
1727 case BUILT_IN_STRNDUP
:
1728 case BUILT_IN_REALLOC
:
1731 tree size
= NULL_TREE
;
1732 if (gimple_call_num_args (call
) == 2)
1733 size
= gimple_call_arg (call
, 1);
1734 ao_ref_init_from_ptr_and_size (&dref
,
1735 gimple_call_arg (call
, 0),
1737 return refs_may_alias_p_1 (&dref
, ref
, false);
1739 /* These read memory pointed to by the first argument. */
1740 case BUILT_IN_INDEX
:
1741 case BUILT_IN_STRCHR
:
1742 case BUILT_IN_STRRCHR
:
1745 ao_ref_init_from_ptr_and_size (&dref
,
1746 gimple_call_arg (call
, 0),
1748 return refs_may_alias_p_1 (&dref
, ref
, false);
1750 /* These read memory pointed to by the first argument with size
1751 in the third argument. */
1752 case BUILT_IN_MEMCHR
:
1755 ao_ref_init_from_ptr_and_size (&dref
,
1756 gimple_call_arg (call
, 0),
1757 gimple_call_arg (call
, 2));
1758 return refs_may_alias_p_1 (&dref
, ref
, false);
1760 /* These read memory pointed to by the first and second arguments. */
1761 case BUILT_IN_STRSTR
:
1762 case BUILT_IN_STRPBRK
:
1765 ao_ref_init_from_ptr_and_size (&dref
,
1766 gimple_call_arg (call
, 0),
1768 if (refs_may_alias_p_1 (&dref
, ref
, false))
1770 ao_ref_init_from_ptr_and_size (&dref
,
1771 gimple_call_arg (call
, 1),
1773 return refs_may_alias_p_1 (&dref
, ref
, false);
1776 /* The following builtins do not read from memory. */
1778 case BUILT_IN_MALLOC
:
1779 case BUILT_IN_POSIX_MEMALIGN
:
1780 case BUILT_IN_ALIGNED_ALLOC
:
1781 case BUILT_IN_CALLOC
:
1782 CASE_BUILT_IN_ALLOCA
:
1783 case BUILT_IN_STACK_SAVE
:
1784 case BUILT_IN_STACK_RESTORE
:
1785 case BUILT_IN_MEMSET
:
1786 case BUILT_IN_TM_MEMSET
:
1787 case BUILT_IN_MEMSET_CHK
:
1788 case BUILT_IN_FREXP
:
1789 case BUILT_IN_FREXPF
:
1790 case BUILT_IN_FREXPL
:
1791 case BUILT_IN_GAMMA_R
:
1792 case BUILT_IN_GAMMAF_R
:
1793 case BUILT_IN_GAMMAL_R
:
1794 case BUILT_IN_LGAMMA_R
:
1795 case BUILT_IN_LGAMMAF_R
:
1796 case BUILT_IN_LGAMMAL_R
:
1798 case BUILT_IN_MODFF
:
1799 case BUILT_IN_MODFL
:
1800 case BUILT_IN_REMQUO
:
1801 case BUILT_IN_REMQUOF
:
1802 case BUILT_IN_REMQUOL
:
1803 case BUILT_IN_SINCOS
:
1804 case BUILT_IN_SINCOSF
:
1805 case BUILT_IN_SINCOSL
:
1806 case BUILT_IN_ASSUME_ALIGNED
:
1807 case BUILT_IN_VA_END
:
1809 /* __sync_* builtins and some OpenMP builtins act as threading
1811 #undef DEF_SYNC_BUILTIN
1812 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1813 #include "sync-builtins.def"
1814 #undef DEF_SYNC_BUILTIN
1815 case BUILT_IN_GOMP_ATOMIC_START
:
1816 case BUILT_IN_GOMP_ATOMIC_END
:
1817 case BUILT_IN_GOMP_BARRIER
:
1818 case BUILT_IN_GOMP_BARRIER_CANCEL
:
1819 case BUILT_IN_GOMP_TASKWAIT
:
1820 case BUILT_IN_GOMP_TASKGROUP_END
:
1821 case BUILT_IN_GOMP_CRITICAL_START
:
1822 case BUILT_IN_GOMP_CRITICAL_END
:
1823 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
1824 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
1825 case BUILT_IN_GOMP_LOOP_END
:
1826 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
1827 case BUILT_IN_GOMP_ORDERED_START
:
1828 case BUILT_IN_GOMP_ORDERED_END
:
1829 case BUILT_IN_GOMP_SECTIONS_END
:
1830 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
1831 case BUILT_IN_GOMP_SINGLE_COPY_START
:
1832 case BUILT_IN_GOMP_SINGLE_COPY_END
:
1836 /* Fallthru to general call handling. */;
1839 /* Check if base is a global static variable that is not read
1841 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
1843 struct cgraph_node
*node
= cgraph_node::get (callee
);
1846 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1847 node yet. We should enforce that there are nodes for all decls in the
1848 IL and remove this check instead. */
1850 && (not_read
= ipa_reference_get_not_read_global (node
))
1851 && bitmap_bit_p (not_read
, ipa_reference_var_uid (base
)))
1855 /* Check if the base variable is call-used. */
1858 if (pt_solution_includes (gimple_call_use_set (call
), base
))
1861 else if ((TREE_CODE (base
) == MEM_REF
1862 || TREE_CODE (base
) == TARGET_MEM_REF
)
1863 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
1865 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
1869 if (pt_solutions_intersect (gimple_call_use_set (call
), &pi
->pt
))
1875 /* Inspect call arguments for passed-by-value aliases. */
1877 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1879 tree op
= gimple_call_arg (call
, i
);
1880 int flags
= gimple_call_arg_flags (call
, i
);
1882 if (flags
& EAF_UNUSED
)
1885 if (TREE_CODE (op
) == WITH_SIZE_EXPR
)
1886 op
= TREE_OPERAND (op
, 0);
1888 if (TREE_CODE (op
) != SSA_NAME
1889 && !is_gimple_min_invariant (op
))
1892 ao_ref_init (&r
, op
);
1893 if (refs_may_alias_p_1 (&r
, ref
, true))
1902 ref_maybe_used_by_call_p (gcall
*call
, ao_ref
*ref
)
1905 res
= ref_maybe_used_by_call_p_1 (call
, ref
);
1907 ++alias_stats
.ref_maybe_used_by_call_p_may_alias
;
1909 ++alias_stats
.ref_maybe_used_by_call_p_no_alias
;
1914 /* If the statement STMT may use the memory reference REF return
1915 true, otherwise return false. */
1918 ref_maybe_used_by_stmt_p (gimple
*stmt
, ao_ref
*ref
)
1920 if (is_gimple_assign (stmt
))
1924 /* All memory assign statements are single. */
1925 if (!gimple_assign_single_p (stmt
))
1928 rhs
= gimple_assign_rhs1 (stmt
);
1929 if (is_gimple_reg (rhs
)
1930 || is_gimple_min_invariant (rhs
)
1931 || gimple_assign_rhs_code (stmt
) == CONSTRUCTOR
)
1934 return refs_may_alias_p (rhs
, ref
);
1936 else if (is_gimple_call (stmt
))
1937 return ref_maybe_used_by_call_p (as_a
<gcall
*> (stmt
), ref
);
1938 else if (greturn
*return_stmt
= dyn_cast
<greturn
*> (stmt
))
1940 tree retval
= gimple_return_retval (return_stmt
);
1942 && TREE_CODE (retval
) != SSA_NAME
1943 && !is_gimple_min_invariant (retval
)
1944 && refs_may_alias_p (retval
, ref
))
1946 /* If ref escapes the function then the return acts as a use. */
1947 tree base
= ao_ref_base (ref
);
1950 else if (DECL_P (base
))
1951 return is_global_var (base
);
1952 else if (TREE_CODE (base
) == MEM_REF
1953 || TREE_CODE (base
) == TARGET_MEM_REF
)
1954 return ptr_deref_may_alias_global_p (TREE_OPERAND (base
, 0));
1962 ref_maybe_used_by_stmt_p (gimple
*stmt
, tree ref
)
1965 ao_ref_init (&r
, ref
);
1966 return ref_maybe_used_by_stmt_p (stmt
, &r
);
1969 /* If the call in statement CALL may clobber the memory reference REF
1970 return true, otherwise return false. */
1973 call_may_clobber_ref_p_1 (gcall
*call
, ao_ref
*ref
)
1978 /* If the call is pure or const it cannot clobber anything. */
1979 if (gimple_call_flags (call
)
1980 & (ECF_PURE
|ECF_CONST
|ECF_LOOPING_CONST_OR_PURE
|ECF_NOVOPS
))
1982 if (gimple_call_internal_p (call
))
1983 switch (gimple_call_internal_fn (call
))
1985 /* Treat these internal calls like ECF_PURE for aliasing,
1986 they don't write to any memory the program should care about.
1987 They have important other side-effects, and read memory,
1988 so can't be ECF_NOVOPS. */
1989 case IFN_UBSAN_NULL
:
1990 case IFN_UBSAN_BOUNDS
:
1991 case IFN_UBSAN_VPTR
:
1992 case IFN_UBSAN_OBJECT_SIZE
:
1994 case IFN_ASAN_CHECK
:
2000 base
= ao_ref_base (ref
);
2004 if (TREE_CODE (base
) == SSA_NAME
2005 || CONSTANT_CLASS_P (base
))
2008 /* A call that is not without side-effects might involve volatile
2009 accesses and thus conflicts with all other volatile accesses. */
2010 if (ref
->volatile_p
)
2013 /* If the reference is based on a decl that is not aliased the call
2014 cannot possibly clobber it. */
2016 && !may_be_aliased (base
)
2017 /* But local non-readonly statics can be modified through recursion
2018 or the call may implement a threading barrier which we must
2019 treat as may-def. */
2020 && (TREE_READONLY (base
)
2021 || !is_global_var (base
)))
2024 callee
= gimple_call_fndecl (call
);
2026 /* Handle those builtin functions explicitly that do not act as
2027 escape points. See tree-ssa-structalias.c:find_func_aliases
2028 for the list of builtins we might need to handle here. */
2029 if (callee
!= NULL_TREE
2030 && gimple_call_builtin_p (call
, BUILT_IN_NORMAL
))
2031 switch (DECL_FUNCTION_CODE (callee
))
2033 /* All the following functions clobber memory pointed to by
2034 their first argument. */
2035 case BUILT_IN_STRCPY
:
2036 case BUILT_IN_STRNCPY
:
2037 case BUILT_IN_MEMCPY
:
2038 case BUILT_IN_MEMMOVE
:
2039 case BUILT_IN_MEMPCPY
:
2040 case BUILT_IN_STPCPY
:
2041 case BUILT_IN_STPNCPY
:
2042 case BUILT_IN_STRCAT
:
2043 case BUILT_IN_STRNCAT
:
2044 case BUILT_IN_MEMSET
:
2045 case BUILT_IN_TM_MEMSET
:
2046 CASE_BUILT_IN_TM_STORE (1):
2047 CASE_BUILT_IN_TM_STORE (2):
2048 CASE_BUILT_IN_TM_STORE (4):
2049 CASE_BUILT_IN_TM_STORE (8):
2050 CASE_BUILT_IN_TM_STORE (FLOAT
):
2051 CASE_BUILT_IN_TM_STORE (DOUBLE
):
2052 CASE_BUILT_IN_TM_STORE (LDOUBLE
):
2053 CASE_BUILT_IN_TM_STORE (M64
):
2054 CASE_BUILT_IN_TM_STORE (M128
):
2055 CASE_BUILT_IN_TM_STORE (M256
):
2056 case BUILT_IN_TM_MEMCPY
:
2057 case BUILT_IN_TM_MEMMOVE
:
2060 tree size
= NULL_TREE
;
2061 /* Don't pass in size for strncat, as the maximum size
2062 is strlen (dest) + n + 1 instead of n, resp.
2063 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2065 if (gimple_call_num_args (call
) == 3
2066 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT
)
2067 size
= gimple_call_arg (call
, 2);
2068 ao_ref_init_from_ptr_and_size (&dref
,
2069 gimple_call_arg (call
, 0),
2071 return refs_may_alias_p_1 (&dref
, ref
, false);
2073 case BUILT_IN_STRCPY_CHK
:
2074 case BUILT_IN_STRNCPY_CHK
:
2075 case BUILT_IN_MEMCPY_CHK
:
2076 case BUILT_IN_MEMMOVE_CHK
:
2077 case BUILT_IN_MEMPCPY_CHK
:
2078 case BUILT_IN_STPCPY_CHK
:
2079 case BUILT_IN_STPNCPY_CHK
:
2080 case BUILT_IN_STRCAT_CHK
:
2081 case BUILT_IN_STRNCAT_CHK
:
2082 case BUILT_IN_MEMSET_CHK
:
2085 tree size
= NULL_TREE
;
2086 /* Don't pass in size for __strncat_chk, as the maximum size
2087 is strlen (dest) + n + 1 instead of n, resp.
2088 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2090 if (gimple_call_num_args (call
) == 4
2091 && DECL_FUNCTION_CODE (callee
) != BUILT_IN_STRNCAT_CHK
)
2092 size
= gimple_call_arg (call
, 2);
2093 ao_ref_init_from_ptr_and_size (&dref
,
2094 gimple_call_arg (call
, 0),
2096 return refs_may_alias_p_1 (&dref
, ref
, false);
2098 case BUILT_IN_BCOPY
:
2101 tree size
= gimple_call_arg (call
, 2);
2102 ao_ref_init_from_ptr_and_size (&dref
,
2103 gimple_call_arg (call
, 1),
2105 return refs_may_alias_p_1 (&dref
, ref
, false);
2107 /* Allocating memory does not have any side-effects apart from
2108 being the definition point for the pointer. */
2109 case BUILT_IN_MALLOC
:
2110 case BUILT_IN_ALIGNED_ALLOC
:
2111 case BUILT_IN_CALLOC
:
2112 case BUILT_IN_STRDUP
:
2113 case BUILT_IN_STRNDUP
:
2114 /* Unix98 specifies that errno is set on allocation failure. */
2116 && targetm
.ref_may_alias_errno (ref
))
2119 case BUILT_IN_STACK_SAVE
:
2120 CASE_BUILT_IN_ALLOCA
:
2121 case BUILT_IN_ASSUME_ALIGNED
:
2123 /* But posix_memalign stores a pointer into the memory pointed to
2124 by its first argument. */
2125 case BUILT_IN_POSIX_MEMALIGN
:
2127 tree ptrptr
= gimple_call_arg (call
, 0);
2129 ao_ref_init_from_ptr_and_size (&dref
, ptrptr
,
2130 TYPE_SIZE_UNIT (ptr_type_node
));
2131 return (refs_may_alias_p_1 (&dref
, ref
, false)
2133 && targetm
.ref_may_alias_errno (ref
)));
2135 /* Freeing memory kills the pointed-to memory. More importantly
2136 the call has to serve as a barrier for moving loads and stores
2139 case BUILT_IN_VA_END
:
2141 tree ptr
= gimple_call_arg (call
, 0);
2142 return ptr_deref_may_alias_ref_p_1 (ptr
, ref
);
2144 /* Realloc serves both as allocation point and deallocation point. */
2145 case BUILT_IN_REALLOC
:
2147 tree ptr
= gimple_call_arg (call
, 0);
2148 /* Unix98 specifies that errno is set on allocation failure. */
2149 return ((flag_errno_math
2150 && targetm
.ref_may_alias_errno (ref
))
2151 || ptr_deref_may_alias_ref_p_1 (ptr
, ref
));
2153 case BUILT_IN_GAMMA_R
:
2154 case BUILT_IN_GAMMAF_R
:
2155 case BUILT_IN_GAMMAL_R
:
2156 case BUILT_IN_LGAMMA_R
:
2157 case BUILT_IN_LGAMMAF_R
:
2158 case BUILT_IN_LGAMMAL_R
:
2160 tree out
= gimple_call_arg (call
, 1);
2161 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2163 if (flag_errno_math
)
2167 case BUILT_IN_FREXP
:
2168 case BUILT_IN_FREXPF
:
2169 case BUILT_IN_FREXPL
:
2171 case BUILT_IN_MODFF
:
2172 case BUILT_IN_MODFL
:
2174 tree out
= gimple_call_arg (call
, 1);
2175 return ptr_deref_may_alias_ref_p_1 (out
, ref
);
2177 case BUILT_IN_REMQUO
:
2178 case BUILT_IN_REMQUOF
:
2179 case BUILT_IN_REMQUOL
:
2181 tree out
= gimple_call_arg (call
, 2);
2182 if (ptr_deref_may_alias_ref_p_1 (out
, ref
))
2184 if (flag_errno_math
)
2188 case BUILT_IN_SINCOS
:
2189 case BUILT_IN_SINCOSF
:
2190 case BUILT_IN_SINCOSL
:
2192 tree sin
= gimple_call_arg (call
, 1);
2193 tree cos
= gimple_call_arg (call
, 2);
2194 return (ptr_deref_may_alias_ref_p_1 (sin
, ref
)
2195 || ptr_deref_may_alias_ref_p_1 (cos
, ref
));
2197 /* __sync_* builtins and some OpenMP builtins act as threading
2199 #undef DEF_SYNC_BUILTIN
2200 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2201 #include "sync-builtins.def"
2202 #undef DEF_SYNC_BUILTIN
2203 case BUILT_IN_GOMP_ATOMIC_START
:
2204 case BUILT_IN_GOMP_ATOMIC_END
:
2205 case BUILT_IN_GOMP_BARRIER
:
2206 case BUILT_IN_GOMP_BARRIER_CANCEL
:
2207 case BUILT_IN_GOMP_TASKWAIT
:
2208 case BUILT_IN_GOMP_TASKGROUP_END
:
2209 case BUILT_IN_GOMP_CRITICAL_START
:
2210 case BUILT_IN_GOMP_CRITICAL_END
:
2211 case BUILT_IN_GOMP_CRITICAL_NAME_START
:
2212 case BUILT_IN_GOMP_CRITICAL_NAME_END
:
2213 case BUILT_IN_GOMP_LOOP_END
:
2214 case BUILT_IN_GOMP_LOOP_END_CANCEL
:
2215 case BUILT_IN_GOMP_ORDERED_START
:
2216 case BUILT_IN_GOMP_ORDERED_END
:
2217 case BUILT_IN_GOMP_SECTIONS_END
:
2218 case BUILT_IN_GOMP_SECTIONS_END_CANCEL
:
2219 case BUILT_IN_GOMP_SINGLE_COPY_START
:
2220 case BUILT_IN_GOMP_SINGLE_COPY_END
:
2223 /* Fallthru to general call handling. */;
2226 /* Check if base is a global static variable that is not written
2228 if (callee
!= NULL_TREE
&& VAR_P (base
) && TREE_STATIC (base
))
2230 struct cgraph_node
*node
= cgraph_node::get (callee
);
2234 && (not_written
= ipa_reference_get_not_written_global (node
))
2235 && bitmap_bit_p (not_written
, ipa_reference_var_uid (base
)))
2239 /* Check if the base variable is call-clobbered. */
2241 return pt_solution_includes (gimple_call_clobber_set (call
), base
);
2242 else if ((TREE_CODE (base
) == MEM_REF
2243 || TREE_CODE (base
) == TARGET_MEM_REF
)
2244 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
)
2246 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0));
2250 return pt_solutions_intersect (gimple_call_clobber_set (call
), &pi
->pt
);
2256 /* If the call in statement CALL may clobber the memory reference REF
2257 return true, otherwise return false. */
2260 call_may_clobber_ref_p (gcall
*call
, tree ref
)
2264 ao_ref_init (&r
, ref
);
2265 res
= call_may_clobber_ref_p_1 (call
, &r
);
2267 ++alias_stats
.call_may_clobber_ref_p_may_alias
;
2269 ++alias_stats
.call_may_clobber_ref_p_no_alias
;
2274 /* If the statement STMT may clobber the memory reference REF return true,
2275 otherwise return false. */
2278 stmt_may_clobber_ref_p_1 (gimple
*stmt
, ao_ref
*ref
)
2280 if (is_gimple_call (stmt
))
2282 tree lhs
= gimple_call_lhs (stmt
);
2284 && TREE_CODE (lhs
) != SSA_NAME
)
2287 ao_ref_init (&r
, lhs
);
2288 if (refs_may_alias_p_1 (ref
, &r
, true))
2292 return call_may_clobber_ref_p_1 (as_a
<gcall
*> (stmt
), ref
);
2294 else if (gimple_assign_single_p (stmt
))
2296 tree lhs
= gimple_assign_lhs (stmt
);
2297 if (TREE_CODE (lhs
) != SSA_NAME
)
2300 ao_ref_init (&r
, lhs
);
2301 return refs_may_alias_p_1 (ref
, &r
, true);
2304 else if (gimple_code (stmt
) == GIMPLE_ASM
)
2311 stmt_may_clobber_ref_p (gimple
*stmt
, tree ref
)
2314 ao_ref_init (&r
, ref
);
2315 return stmt_may_clobber_ref_p_1 (stmt
, &r
);
2318 /* Return true if store1 and store2 described by corresponding tuples
2319 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2323 same_addr_size_stores_p (tree base1
, HOST_WIDE_INT offset1
, HOST_WIDE_INT size1
,
2324 HOST_WIDE_INT max_size1
,
2325 tree base2
, HOST_WIDE_INT offset2
, HOST_WIDE_INT size2
,
2326 HOST_WIDE_INT max_size2
)
2328 /* Offsets need to be 0. */
2333 bool base1_obj_p
= SSA_VAR_P (base1
);
2334 bool base2_obj_p
= SSA_VAR_P (base2
);
2336 /* We need one object. */
2337 if (base1_obj_p
== base2_obj_p
)
2339 tree obj
= base1_obj_p
? base1
: base2
;
2341 /* And we need one MEM_REF. */
2342 bool base1_memref_p
= TREE_CODE (base1
) == MEM_REF
;
2343 bool base2_memref_p
= TREE_CODE (base2
) == MEM_REF
;
2344 if (base1_memref_p
== base2_memref_p
)
2346 tree memref
= base1_memref_p
? base1
: base2
;
2348 /* Sizes need to be valid. */
2349 if (max_size1
== -1 || max_size2
== -1
2350 || size1
== -1 || size2
== -1)
2353 /* Max_size needs to match size. */
2354 if (max_size1
!= size1
2355 || max_size2
!= size2
)
2358 /* Sizes need to match. */
2363 /* Check that memref is a store to pointer with singleton points-to info. */
2364 if (!integer_zerop (TREE_OPERAND (memref
, 1)))
2366 tree ptr
= TREE_OPERAND (memref
, 0);
2367 if (TREE_CODE (ptr
) != SSA_NAME
)
2369 struct ptr_info_def
*pi
= SSA_NAME_PTR_INFO (ptr
);
2370 unsigned int pt_uid
;
2372 || !pt_solution_singleton_or_null_p (&pi
->pt
, &pt_uid
))
2375 /* Be conservative with non-call exceptions when the address might
2377 if (flag_non_call_exceptions
&& pi
->pt
.null
)
2380 /* Check that ptr points relative to obj. */
2381 unsigned int obj_uid
= DECL_PT_UID (obj
);
2382 if (obj_uid
!= pt_uid
)
2385 /* Check that the object size is the same as the store size. That ensures us
2386 that ptr points to the start of obj. */
2387 if (!tree_fits_shwi_p (DECL_SIZE (obj
)))
2389 HOST_WIDE_INT obj_size
= tree_to_shwi (DECL_SIZE (obj
));
2390 return obj_size
== size1
;
2393 /* If STMT kills the memory reference REF return true, otherwise
2397 stmt_kills_ref_p (gimple
*stmt
, ao_ref
*ref
)
2399 if (!ao_ref_base (ref
))
2402 if (gimple_has_lhs (stmt
)
2403 && TREE_CODE (gimple_get_lhs (stmt
)) != SSA_NAME
2404 /* The assignment is not necessarily carried out if it can throw
2405 and we can catch it in the current function where we could inspect
2407 ??? We only need to care about the RHS throwing. For aggregate
2408 assignments or similar calls and non-call exceptions the LHS
2409 might throw as well. */
2410 && !stmt_can_throw_internal (stmt
))
2412 tree lhs
= gimple_get_lhs (stmt
);
2413 /* If LHS is literally a base of the access we are done. */
2416 tree base
= ref
->ref
;
2417 tree innermost_dropped_array_ref
= NULL_TREE
;
2418 if (handled_component_p (base
))
2420 tree saved_lhs0
= NULL_TREE
;
2421 if (handled_component_p (lhs
))
2423 saved_lhs0
= TREE_OPERAND (lhs
, 0);
2424 TREE_OPERAND (lhs
, 0) = integer_zero_node
;
2428 /* Just compare the outermost handled component, if
2429 they are equal we have found a possible common
2431 tree saved_base0
= TREE_OPERAND (base
, 0);
2432 TREE_OPERAND (base
, 0) = integer_zero_node
;
2433 bool res
= operand_equal_p (lhs
, base
, 0);
2434 TREE_OPERAND (base
, 0) = saved_base0
;
2437 /* Remember if we drop an array-ref that we need to
2438 double-check not being at struct end. */
2439 if (TREE_CODE (base
) == ARRAY_REF
2440 || TREE_CODE (base
) == ARRAY_RANGE_REF
)
2441 innermost_dropped_array_ref
= base
;
2442 /* Otherwise drop handled components of the access. */
2445 while (handled_component_p (base
));
2447 TREE_OPERAND (lhs
, 0) = saved_lhs0
;
2449 /* Finally check if the lhs has the same address and size as the
2450 base candidate of the access. Watch out if we have dropped
2451 an array-ref that was at struct end, this means ref->ref may
2452 be outside of the TYPE_SIZE of its base. */
2453 if ((! innermost_dropped_array_ref
2454 || ! array_at_struct_end_p (innermost_dropped_array_ref
))
2456 || (((TYPE_SIZE (TREE_TYPE (lhs
))
2457 == TYPE_SIZE (TREE_TYPE (base
)))
2458 || (TYPE_SIZE (TREE_TYPE (lhs
))
2459 && TYPE_SIZE (TREE_TYPE (base
))
2460 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs
)),
2461 TYPE_SIZE (TREE_TYPE (base
)),
2463 && operand_equal_p (lhs
, base
,
2465 | OEP_MATCH_SIDE_EFFECTS
))))
2469 /* Now look for non-literal equal bases with the restriction of
2470 handling constant offset and size. */
2471 /* For a must-alias check we need to be able to constrain
2472 the access properly. */
2473 if (ref
->max_size
== -1)
2475 HOST_WIDE_INT size
, offset
, max_size
, ref_offset
= ref
->offset
;
2478 = get_ref_base_and_extent (lhs
, &offset
, &size
, &max_size
, &reverse
);
2479 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2480 so base == ref->base does not always hold. */
2481 if (base
!= ref
->base
)
2483 /* Try using points-to info. */
2484 if (same_addr_size_stores_p (base
, offset
, size
, max_size
, ref
->base
,
2485 ref
->offset
, ref
->size
, ref
->max_size
))
2488 /* If both base and ref->base are MEM_REFs, only compare the
2489 first operand, and if the second operand isn't equal constant,
2490 try to add the offsets into offset and ref_offset. */
2491 if (TREE_CODE (base
) == MEM_REF
&& TREE_CODE (ref
->base
) == MEM_REF
2492 && TREE_OPERAND (base
, 0) == TREE_OPERAND (ref
->base
, 0))
2494 if (!tree_int_cst_equal (TREE_OPERAND (base
, 1),
2495 TREE_OPERAND (ref
->base
, 1)))
2497 offset_int off1
= mem_ref_offset (base
);
2498 off1
<<= LOG2_BITS_PER_UNIT
;
2500 offset_int off2
= mem_ref_offset (ref
->base
);
2501 off2
<<= LOG2_BITS_PER_UNIT
;
2503 if (wi::fits_shwi_p (off1
) && wi::fits_shwi_p (off2
))
2505 offset
= off1
.to_shwi ();
2506 ref_offset
= off2
.to_shwi ();
2515 /* For a must-alias check we need to be able to constrain
2516 the access properly. */
2517 if (size
!= -1 && size
== max_size
)
2519 if (offset
<= ref_offset
2520 && offset
+ size
>= ref_offset
+ ref
->max_size
)
2525 if (is_gimple_call (stmt
))
2527 tree callee
= gimple_call_fndecl (stmt
);
2528 if (callee
!= NULL_TREE
2529 && gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
2530 switch (DECL_FUNCTION_CODE (callee
))
2534 tree ptr
= gimple_call_arg (stmt
, 0);
2535 tree base
= ao_ref_base (ref
);
2536 if (base
&& TREE_CODE (base
) == MEM_REF
2537 && TREE_OPERAND (base
, 0) == ptr
)
2542 case BUILT_IN_MEMCPY
:
2543 case BUILT_IN_MEMPCPY
:
2544 case BUILT_IN_MEMMOVE
:
2545 case BUILT_IN_MEMSET
:
2546 case BUILT_IN_MEMCPY_CHK
:
2547 case BUILT_IN_MEMPCPY_CHK
:
2548 case BUILT_IN_MEMMOVE_CHK
:
2549 case BUILT_IN_MEMSET_CHK
:
2550 case BUILT_IN_STRNCPY
:
2551 case BUILT_IN_STPNCPY
:
2553 /* For a must-alias check we need to be able to constrain
2554 the access properly. */
2555 if (ref
->max_size
== -1)
2557 tree dest
= gimple_call_arg (stmt
, 0);
2558 tree len
= gimple_call_arg (stmt
, 2);
2559 if (!tree_fits_shwi_p (len
))
2561 tree rbase
= ref
->base
;
2562 offset_int roffset
= ref
->offset
;
2564 ao_ref_init_from_ptr_and_size (&dref
, dest
, len
);
2565 tree base
= ao_ref_base (&dref
);
2566 offset_int offset
= dref
.offset
;
2567 if (!base
|| dref
.size
== -1)
2569 if (TREE_CODE (base
) == MEM_REF
)
2571 if (TREE_CODE (rbase
) != MEM_REF
)
2573 // Compare pointers.
2574 offset
+= mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
;
2575 roffset
+= mem_ref_offset (rbase
) << LOG2_BITS_PER_UNIT
;
2576 base
= TREE_OPERAND (base
, 0);
2577 rbase
= TREE_OPERAND (rbase
, 0);
2580 && offset
<= roffset
2581 && (roffset
+ ref
->max_size
2582 <= offset
+ (wi::to_offset (len
) << LOG2_BITS_PER_UNIT
)))
2587 case BUILT_IN_VA_END
:
2589 tree ptr
= gimple_call_arg (stmt
, 0);
2590 if (TREE_CODE (ptr
) == ADDR_EXPR
)
2592 tree base
= ao_ref_base (ref
);
2593 if (TREE_OPERAND (ptr
, 0) == base
)
2606 stmt_kills_ref_p (gimple
*stmt
, tree ref
)
2609 ao_ref_init (&r
, ref
);
2610 return stmt_kills_ref_p (stmt
, &r
);
2614 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2615 TARGET or a statement clobbering the memory reference REF in which
2616 case false is returned. The walk starts with VUSE, one argument of PHI. */
2619 maybe_skip_until (gimple
*phi
, tree target
, ao_ref
*ref
,
2620 tree vuse
, unsigned int *cnt
, bitmap
*visited
,
2621 bool abort_on_visited
,
2622 void *(*translate
)(ao_ref
*, tree
, void *, bool *),
2625 basic_block bb
= gimple_bb (phi
);
2628 *visited
= BITMAP_ALLOC (NULL
);
2630 bitmap_set_bit (*visited
, SSA_NAME_VERSION (PHI_RESULT (phi
)));
2632 /* Walk until we hit the target. */
2633 while (vuse
!= target
)
2635 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2636 /* Recurse for PHI nodes. */
2637 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2639 /* An already visited PHI node ends the walk successfully. */
2640 if (bitmap_bit_p (*visited
, SSA_NAME_VERSION (PHI_RESULT (def_stmt
))))
2641 return !abort_on_visited
;
2642 vuse
= get_continuation_for_phi (def_stmt
, ref
, cnt
,
2643 visited
, abort_on_visited
,
2649 else if (gimple_nop_p (def_stmt
))
2653 /* A clobbering statement or the end of the IL ends it failing. */
2655 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2657 bool disambiguate_only
= true;
2659 && (*translate
) (ref
, vuse
, data
, &disambiguate_only
) == NULL
)
2665 /* If we reach a new basic-block see if we already skipped it
2666 in a previous walk that ended successfully. */
2667 if (gimple_bb (def_stmt
) != bb
)
2669 if (!bitmap_set_bit (*visited
, SSA_NAME_VERSION (vuse
)))
2670 return !abort_on_visited
;
2671 bb
= gimple_bb (def_stmt
);
2673 vuse
= gimple_vuse (def_stmt
);
2679 /* Starting from a PHI node for the virtual operand of the memory reference
2680 REF find a continuation virtual operand that allows to continue walking
2681 statements dominating PHI skipping only statements that cannot possibly
2682 clobber REF. Increments *CNT for each alias disambiguation done.
2683 Returns NULL_TREE if no suitable virtual operand can be found. */
2686 get_continuation_for_phi (gimple
*phi
, ao_ref
*ref
,
2687 unsigned int *cnt
, bitmap
*visited
,
2688 bool abort_on_visited
,
2689 void *(*translate
)(ao_ref
*, tree
, void *, bool *),
2692 unsigned nargs
= gimple_phi_num_args (phi
);
2694 /* Through a single-argument PHI we can simply look through. */
2696 return PHI_ARG_DEF (phi
, 0);
2698 /* For two or more arguments try to pairwise skip non-aliasing code
2699 until we hit the phi argument definition that dominates the other one. */
2700 basic_block phi_bb
= gimple_bb (phi
);
2704 /* Find a candidate for the virtual operand which definition
2705 dominates those of all others. */
2706 /* First look if any of the args themselves satisfy this. */
2707 for (i
= 0; i
< nargs
; ++i
)
2709 arg0
= PHI_ARG_DEF (phi
, i
);
2710 if (SSA_NAME_IS_DEFAULT_DEF (arg0
))
2712 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (arg0
));
2713 if (def_bb
!= phi_bb
2714 && dominated_by_p (CDI_DOMINATORS
, phi_bb
, def_bb
))
2718 /* If not, look if we can reach such candidate by walking defs
2719 of a PHI arg without crossing other PHIs. */
2721 for (i
= 0; i
< nargs
; ++i
)
2723 arg0
= PHI_ARG_DEF (phi
, i
);
2724 gimple
*def
= SSA_NAME_DEF_STMT (arg0
);
2725 /* Backedges can't work. */
2726 if (dominated_by_p (CDI_DOMINATORS
,
2727 gimple_bb (def
), phi_bb
))
2730 if (gimple_code (def
) == GIMPLE_PHI
)
2732 while (! dominated_by_p (CDI_DOMINATORS
,
2733 phi_bb
, gimple_bb (def
)))
2735 arg0
= gimple_vuse (def
);
2736 if (SSA_NAME_IS_DEFAULT_DEF (arg0
))
2738 def
= SSA_NAME_DEF_STMT (arg0
);
2739 if (gimple_code (def
) == GIMPLE_PHI
)
2741 /* Do not try to look through arbitrarily complicated
2742 CFGs. For those looking for the first VUSE starting
2743 from the end of the immediate dominator of phi_bb
2744 is likely faster. */
2755 /* Then check against the found candidate. */
2756 for (i
= 0; i
< nargs
; ++i
)
2758 arg1
= PHI_ARG_DEF (phi
, i
);
2761 else if (! maybe_skip_until (phi
, arg0
, ref
, arg1
, cnt
, visited
,
2762 abort_on_visited
, translate
, data
))
2769 /* Based on the memory reference REF and its virtual use VUSE call
2770 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2771 itself. That is, for each virtual use for which its defining statement
2772 does not clobber REF.
2774 WALKER is called with REF, the current virtual use and DATA. If
2775 WALKER returns non-NULL the walk stops and its result is returned.
2776 At the end of a non-successful walk NULL is returned.
2778 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2779 use which definition is a statement that may clobber REF and DATA.
2780 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2781 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2782 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2783 to adjust REF and *DATA to make that valid.
2785 VALUEIZE if non-NULL is called with the next VUSE that is considered
2786 and return value is substituted for that. This can be used to
2787 implement optimistic value-numbering for example. Note that the
2788 VUSE argument is assumed to be valueized already.
2790 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2793 walk_non_aliased_vuses (ao_ref
*ref
, tree vuse
,
2794 void *(*walker
)(ao_ref
*, tree
, unsigned int, void *),
2795 void *(*translate
)(ao_ref
*, tree
, void *, bool *),
2796 tree (*valueize
)(tree
),
2799 bitmap visited
= NULL
;
2801 unsigned int cnt
= 0;
2802 bool translated
= false;
2804 timevar_push (TV_ALIAS_STMT_WALK
);
2810 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2811 res
= (*walker
) (ref
, vuse
, cnt
, data
);
2813 if (res
== (void *)-1)
2818 /* Lookup succeeded. */
2819 else if (res
!= NULL
)
2823 vuse
= valueize (vuse
);
2824 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2825 if (gimple_nop_p (def_stmt
))
2827 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2828 vuse
= get_continuation_for_phi (def_stmt
, ref
, &cnt
,
2829 &visited
, translated
, translate
, data
);
2833 if (stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2837 bool disambiguate_only
= false;
2838 res
= (*translate
) (ref
, vuse
, data
, &disambiguate_only
);
2839 /* Failed lookup and translation. */
2840 if (res
== (void *)-1)
2845 /* Lookup succeeded. */
2846 else if (res
!= NULL
)
2848 /* Translation succeeded, continue walking. */
2849 translated
= translated
|| !disambiguate_only
;
2851 vuse
= gimple_vuse (def_stmt
);
2857 BITMAP_FREE (visited
);
2859 timevar_pop (TV_ALIAS_STMT_WALK
);
2865 /* Based on the memory reference REF call WALKER for each vdef which
2866 defining statement may clobber REF, starting with VDEF. If REF
2867 is NULL_TREE, each defining statement is visited.
2869 WALKER is called with REF, the current vdef and DATA. If WALKER
2870 returns true the walk is stopped, otherwise it continues.
2872 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2873 The pointer may be NULL and then we do not track this information.
2875 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2876 PHI argument (but only one walk continues on merge points), the
2877 return value is true if any of the walks was successful.
2879 The function returns the number of statements walked or -1 if
2880 LIMIT stmts were walked and the walk was aborted at this point.
2881 If LIMIT is zero the walk is not aborted. */
2884 walk_aliased_vdefs_1 (ao_ref
*ref
, tree vdef
,
2885 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2886 bitmap
*visited
, unsigned int cnt
,
2887 bool *function_entry_reached
, unsigned limit
)
2891 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vdef
);
2894 && !bitmap_set_bit (*visited
, SSA_NAME_VERSION (vdef
)))
2897 if (gimple_nop_p (def_stmt
))
2899 if (function_entry_reached
)
2900 *function_entry_reached
= true;
2903 else if (gimple_code (def_stmt
) == GIMPLE_PHI
)
2907 *visited
= BITMAP_ALLOC (NULL
);
2908 for (i
= 0; i
< gimple_phi_num_args (def_stmt
); ++i
)
2910 int res
= walk_aliased_vdefs_1 (ref
,
2911 gimple_phi_arg_def (def_stmt
, i
),
2912 walker
, data
, visited
, cnt
,
2913 function_entry_reached
, limit
);
2921 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2926 || stmt_may_clobber_ref_p_1 (def_stmt
, ref
))
2927 && (*walker
) (ref
, vdef
, data
))
2930 vdef
= gimple_vuse (def_stmt
);
2936 walk_aliased_vdefs (ao_ref
*ref
, tree vdef
,
2937 bool (*walker
)(ao_ref
*, tree
, void *), void *data
,
2939 bool *function_entry_reached
, unsigned int limit
)
2941 bitmap local_visited
= NULL
;
2944 timevar_push (TV_ALIAS_STMT_WALK
);
2946 if (function_entry_reached
)
2947 *function_entry_reached
= false;
2949 ret
= walk_aliased_vdefs_1 (ref
, vdef
, walker
, data
,
2950 visited
? visited
: &local_visited
, 0,
2951 function_entry_reached
, limit
);
2953 BITMAP_FREE (local_visited
);
2955 timevar_pop (TV_ALIAS_STMT_WALK
);