PR inline-asm/84941
[official-gcc.git] / gcc / tree-ssa-alias.c
blob5776687ea11e6699f88e0783009dc5fc5b856a2e
1 /* Alias analysis for trees.
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "timevar.h" /* for TV_ALIAS_STMT_WALK */
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "tree-pretty-print.h"
33 #include "alias.h"
34 #include "fold-const.h"
35 #include "langhooks.h"
36 #include "dumpfile.h"
37 #include "tree-eh.h"
38 #include "tree-dfa.h"
39 #include "ipa-reference.h"
40 #include "varasm.h"
42 /* Broad overview of how alias analysis on gimple works:
44 Statements clobbering or using memory are linked through the
45 virtual operand factored use-def chain. The virtual operand
46 is unique per function, its symbol is accessible via gimple_vop (cfun).
47 Virtual operands are used for efficiently walking memory statements
48 in the gimple IL and are useful for things like value-numbering as
49 a generation count for memory references.
51 SSA_NAME pointers may have associated points-to information
52 accessible via the SSA_NAME_PTR_INFO macro. Flow-insensitive
53 points-to information is (re-)computed by the TODO_rebuild_alias
54 pass manager todo. Points-to information is also used for more
55 precise tracking of call-clobbered and call-used variables and
56 related disambiguations.
58 This file contains functions for disambiguating memory references,
59 the so called alias-oracle and tools for walking of the gimple IL.
61 The main alias-oracle entry-points are
63 bool stmt_may_clobber_ref_p (gimple *, tree)
65 This function queries if a statement may invalidate (parts of)
66 the memory designated by the reference tree argument.
68 bool ref_maybe_used_by_stmt_p (gimple *, tree)
70 This function queries if a statement may need (parts of) the
71 memory designated by the reference tree argument.
73 There are variants of these functions that only handle the call
74 part of a statement, call_may_clobber_ref_p and ref_maybe_used_by_call_p.
75 Note that these do not disambiguate against a possible call lhs.
77 bool refs_may_alias_p (tree, tree)
79 This function tries to disambiguate two reference trees.
81 bool ptr_deref_may_alias_global_p (tree)
83 This function queries if dereferencing a pointer variable may
84 alias global memory.
86 More low-level disambiguators are available and documented in
87 this file. Low-level disambiguators dealing with points-to
88 information are in tree-ssa-structalias.c. */
91 /* Query statistics for the different low-level disambiguators.
92 A high-level query may trigger multiple of them. */
94 static struct {
95 unsigned HOST_WIDE_INT refs_may_alias_p_may_alias;
96 unsigned HOST_WIDE_INT refs_may_alias_p_no_alias;
97 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_may_alias;
98 unsigned HOST_WIDE_INT ref_maybe_used_by_call_p_no_alias;
99 unsigned HOST_WIDE_INT call_may_clobber_ref_p_may_alias;
100 unsigned HOST_WIDE_INT call_may_clobber_ref_p_no_alias;
101 } alias_stats;
103 void
104 dump_alias_stats (FILE *s)
106 fprintf (s, "\nAlias oracle query stats:\n");
107 fprintf (s, " refs_may_alias_p: "
108 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
109 HOST_WIDE_INT_PRINT_DEC" queries\n",
110 alias_stats.refs_may_alias_p_no_alias,
111 alias_stats.refs_may_alias_p_no_alias
112 + alias_stats.refs_may_alias_p_may_alias);
113 fprintf (s, " ref_maybe_used_by_call_p: "
114 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
115 HOST_WIDE_INT_PRINT_DEC" queries\n",
116 alias_stats.ref_maybe_used_by_call_p_no_alias,
117 alias_stats.refs_may_alias_p_no_alias
118 + alias_stats.ref_maybe_used_by_call_p_may_alias);
119 fprintf (s, " call_may_clobber_ref_p: "
120 HOST_WIDE_INT_PRINT_DEC" disambiguations, "
121 HOST_WIDE_INT_PRINT_DEC" queries\n",
122 alias_stats.call_may_clobber_ref_p_no_alias,
123 alias_stats.call_may_clobber_ref_p_no_alias
124 + alias_stats.call_may_clobber_ref_p_may_alias);
125 dump_alias_stats_in_alias_c (s);
129 /* Return true, if dereferencing PTR may alias with a global variable. */
131 bool
132 ptr_deref_may_alias_global_p (tree ptr)
134 struct ptr_info_def *pi;
136 /* If we end up with a pointer constant here that may point
137 to global memory. */
138 if (TREE_CODE (ptr) != SSA_NAME)
139 return true;
141 pi = SSA_NAME_PTR_INFO (ptr);
143 /* If we do not have points-to information for this variable,
144 we have to punt. */
145 if (!pi)
146 return true;
148 /* ??? This does not use TBAA to prune globals ptr may not access. */
149 return pt_solution_includes_global (&pi->pt);
152 /* Return true if dereferencing PTR may alias DECL.
153 The caller is responsible for applying TBAA to see if PTR
154 may access DECL at all. */
156 static bool
157 ptr_deref_may_alias_decl_p (tree ptr, tree decl)
159 struct ptr_info_def *pi;
161 /* Conversions are irrelevant for points-to information and
162 data-dependence analysis can feed us those. */
163 STRIP_NOPS (ptr);
165 /* Anything we do not explicilty handle aliases. */
166 if ((TREE_CODE (ptr) != SSA_NAME
167 && TREE_CODE (ptr) != ADDR_EXPR
168 && TREE_CODE (ptr) != POINTER_PLUS_EXPR)
169 || !POINTER_TYPE_P (TREE_TYPE (ptr))
170 || (!VAR_P (decl)
171 && TREE_CODE (decl) != PARM_DECL
172 && TREE_CODE (decl) != RESULT_DECL))
173 return true;
175 /* Disregard pointer offsetting. */
176 if (TREE_CODE (ptr) == POINTER_PLUS_EXPR)
180 ptr = TREE_OPERAND (ptr, 0);
182 while (TREE_CODE (ptr) == POINTER_PLUS_EXPR);
183 return ptr_deref_may_alias_decl_p (ptr, decl);
186 /* ADDR_EXPR pointers either just offset another pointer or directly
187 specify the pointed-to set. */
188 if (TREE_CODE (ptr) == ADDR_EXPR)
190 tree base = get_base_address (TREE_OPERAND (ptr, 0));
191 if (base
192 && (TREE_CODE (base) == MEM_REF
193 || TREE_CODE (base) == TARGET_MEM_REF))
194 ptr = TREE_OPERAND (base, 0);
195 else if (base
196 && DECL_P (base))
197 return compare_base_decls (base, decl) != 0;
198 else if (base
199 && CONSTANT_CLASS_P (base))
200 return false;
201 else
202 return true;
205 /* Non-aliased variables can not be pointed to. */
206 if (!may_be_aliased (decl))
207 return false;
209 /* If we do not have useful points-to information for this pointer
210 we cannot disambiguate anything else. */
211 pi = SSA_NAME_PTR_INFO (ptr);
212 if (!pi)
213 return true;
215 return pt_solution_includes (&pi->pt, decl);
218 /* Return true if dereferenced PTR1 and PTR2 may alias.
219 The caller is responsible for applying TBAA to see if accesses
220 through PTR1 and PTR2 may conflict at all. */
222 bool
223 ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
225 struct ptr_info_def *pi1, *pi2;
227 /* Conversions are irrelevant for points-to information and
228 data-dependence analysis can feed us those. */
229 STRIP_NOPS (ptr1);
230 STRIP_NOPS (ptr2);
232 /* Disregard pointer offsetting. */
233 if (TREE_CODE (ptr1) == POINTER_PLUS_EXPR)
237 ptr1 = TREE_OPERAND (ptr1, 0);
239 while (TREE_CODE (ptr1) == POINTER_PLUS_EXPR);
240 return ptr_derefs_may_alias_p (ptr1, ptr2);
242 if (TREE_CODE (ptr2) == POINTER_PLUS_EXPR)
246 ptr2 = TREE_OPERAND (ptr2, 0);
248 while (TREE_CODE (ptr2) == POINTER_PLUS_EXPR);
249 return ptr_derefs_may_alias_p (ptr1, ptr2);
252 /* ADDR_EXPR pointers either just offset another pointer or directly
253 specify the pointed-to set. */
254 if (TREE_CODE (ptr1) == ADDR_EXPR)
256 tree base = get_base_address (TREE_OPERAND (ptr1, 0));
257 if (base
258 && (TREE_CODE (base) == MEM_REF
259 || TREE_CODE (base) == TARGET_MEM_REF))
260 return ptr_derefs_may_alias_p (TREE_OPERAND (base, 0), ptr2);
261 else if (base
262 && DECL_P (base))
263 return ptr_deref_may_alias_decl_p (ptr2, base);
264 else
265 return true;
267 if (TREE_CODE (ptr2) == ADDR_EXPR)
269 tree base = get_base_address (TREE_OPERAND (ptr2, 0));
270 if (base
271 && (TREE_CODE (base) == MEM_REF
272 || TREE_CODE (base) == TARGET_MEM_REF))
273 return ptr_derefs_may_alias_p (ptr1, TREE_OPERAND (base, 0));
274 else if (base
275 && DECL_P (base))
276 return ptr_deref_may_alias_decl_p (ptr1, base);
277 else
278 return true;
281 /* From here we require SSA name pointers. Anything else aliases. */
282 if (TREE_CODE (ptr1) != SSA_NAME
283 || TREE_CODE (ptr2) != SSA_NAME
284 || !POINTER_TYPE_P (TREE_TYPE (ptr1))
285 || !POINTER_TYPE_P (TREE_TYPE (ptr2)))
286 return true;
288 /* We may end up with two empty points-to solutions for two same pointers.
289 In this case we still want to say both pointers alias, so shortcut
290 that here. */
291 if (ptr1 == ptr2)
292 return true;
294 /* If we do not have useful points-to information for either pointer
295 we cannot disambiguate anything else. */
296 pi1 = SSA_NAME_PTR_INFO (ptr1);
297 pi2 = SSA_NAME_PTR_INFO (ptr2);
298 if (!pi1 || !pi2)
299 return true;
301 /* ??? This does not use TBAA to prune decls from the intersection
302 that not both pointers may access. */
303 return pt_solutions_intersect (&pi1->pt, &pi2->pt);
306 /* Return true if dereferencing PTR may alias *REF.
307 The caller is responsible for applying TBAA to see if PTR
308 may access *REF at all. */
310 static bool
311 ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
313 tree base = ao_ref_base (ref);
315 if (TREE_CODE (base) == MEM_REF
316 || TREE_CODE (base) == TARGET_MEM_REF)
317 return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
318 else if (DECL_P (base))
319 return ptr_deref_may_alias_decl_p (ptr, base);
321 return true;
324 /* Returns true if PTR1 and PTR2 compare unequal because of points-to. */
326 bool
327 ptrs_compare_unequal (tree ptr1, tree ptr2)
329 /* First resolve the pointers down to a SSA name pointer base or
330 a VAR_DECL, PARM_DECL or RESULT_DECL. This explicitely does
331 not yet try to handle LABEL_DECLs, FUNCTION_DECLs, CONST_DECLs
332 or STRING_CSTs which needs points-to adjustments to track them
333 in the points-to sets. */
334 tree obj1 = NULL_TREE;
335 tree obj2 = NULL_TREE;
336 if (TREE_CODE (ptr1) == ADDR_EXPR)
338 tree tem = get_base_address (TREE_OPERAND (ptr1, 0));
339 if (! tem)
340 return false;
341 if (VAR_P (tem)
342 || TREE_CODE (tem) == PARM_DECL
343 || TREE_CODE (tem) == RESULT_DECL)
344 obj1 = tem;
345 else if (TREE_CODE (tem) == MEM_REF)
346 ptr1 = TREE_OPERAND (tem, 0);
348 if (TREE_CODE (ptr2) == ADDR_EXPR)
350 tree tem = get_base_address (TREE_OPERAND (ptr2, 0));
351 if (! tem)
352 return false;
353 if (VAR_P (tem)
354 || TREE_CODE (tem) == PARM_DECL
355 || TREE_CODE (tem) == RESULT_DECL)
356 obj2 = tem;
357 else if (TREE_CODE (tem) == MEM_REF)
358 ptr2 = TREE_OPERAND (tem, 0);
361 /* Canonicalize ptr vs. object. */
362 if (TREE_CODE (ptr1) == SSA_NAME && obj2)
364 std::swap (ptr1, ptr2);
365 std::swap (obj1, obj2);
368 if (obj1 && obj2)
369 /* Other code handles this correctly, no need to duplicate it here. */;
370 else if (obj1 && TREE_CODE (ptr2) == SSA_NAME)
372 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr2);
373 /* We may not use restrict to optimize pointer comparisons.
374 See PR71062. So we have to assume that restrict-pointed-to
375 may be in fact obj1. */
376 if (!pi
377 || pi->pt.vars_contains_restrict
378 || pi->pt.vars_contains_interposable)
379 return false;
380 if (VAR_P (obj1)
381 && (TREE_STATIC (obj1) || DECL_EXTERNAL (obj1)))
383 varpool_node *node = varpool_node::get (obj1);
384 /* If obj1 may bind to NULL give up (see below). */
385 if (! node
386 || ! node->nonzero_address ()
387 || ! decl_binds_to_current_def_p (obj1))
388 return false;
390 return !pt_solution_includes (&pi->pt, obj1);
393 /* ??? We'd like to handle ptr1 != NULL and ptr1 != ptr2
394 but those require pt.null to be conservatively correct. */
396 return false;
399 /* Returns whether reference REF to BASE may refer to global memory. */
401 static bool
402 ref_may_alias_global_p_1 (tree base)
404 if (DECL_P (base))
405 return is_global_var (base);
406 else if (TREE_CODE (base) == MEM_REF
407 || TREE_CODE (base) == TARGET_MEM_REF)
408 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
409 return true;
412 bool
413 ref_may_alias_global_p (ao_ref *ref)
415 tree base = ao_ref_base (ref);
416 return ref_may_alias_global_p_1 (base);
419 bool
420 ref_may_alias_global_p (tree ref)
422 tree base = get_base_address (ref);
423 return ref_may_alias_global_p_1 (base);
426 /* Return true whether STMT may clobber global memory. */
428 bool
429 stmt_may_clobber_global_p (gimple *stmt)
431 tree lhs;
433 if (!gimple_vdef (stmt))
434 return false;
436 /* ??? We can ask the oracle whether an artificial pointer
437 dereference with a pointer with points-to information covering
438 all global memory (what about non-address taken memory?) maybe
439 clobbered by this call. As there is at the moment no convenient
440 way of doing that without generating garbage do some manual
441 checking instead.
442 ??? We could make a NULL ao_ref argument to the various
443 predicates special, meaning any global memory. */
445 switch (gimple_code (stmt))
447 case GIMPLE_ASSIGN:
448 lhs = gimple_assign_lhs (stmt);
449 return (TREE_CODE (lhs) != SSA_NAME
450 && ref_may_alias_global_p (lhs));
451 case GIMPLE_CALL:
452 return true;
453 default:
454 return true;
459 /* Dump alias information on FILE. */
461 void
462 dump_alias_info (FILE *file)
464 unsigned i;
465 tree ptr;
466 const char *funcname
467 = lang_hooks.decl_printable_name (current_function_decl, 2);
468 tree var;
470 fprintf (file, "\n\nAlias information for %s\n\n", funcname);
472 fprintf (file, "Aliased symbols\n\n");
474 FOR_EACH_LOCAL_DECL (cfun, i, var)
476 if (may_be_aliased (var))
477 dump_variable (file, var);
480 fprintf (file, "\nCall clobber information\n");
482 fprintf (file, "\nESCAPED");
483 dump_points_to_solution (file, &cfun->gimple_df->escaped);
485 fprintf (file, "\n\nFlow-insensitive points-to information\n\n");
487 FOR_EACH_SSA_NAME (i, ptr, cfun)
489 struct ptr_info_def *pi;
491 if (!POINTER_TYPE_P (TREE_TYPE (ptr))
492 || SSA_NAME_IN_FREE_LIST (ptr))
493 continue;
495 pi = SSA_NAME_PTR_INFO (ptr);
496 if (pi)
497 dump_points_to_info_for (file, ptr);
500 fprintf (file, "\n");
504 /* Dump alias information on stderr. */
506 DEBUG_FUNCTION void
507 debug_alias_info (void)
509 dump_alias_info (stderr);
513 /* Dump the points-to set *PT into FILE. */
515 void
516 dump_points_to_solution (FILE *file, struct pt_solution *pt)
518 if (pt->anything)
519 fprintf (file, ", points-to anything");
521 if (pt->nonlocal)
522 fprintf (file, ", points-to non-local");
524 if (pt->escaped)
525 fprintf (file, ", points-to escaped");
527 if (pt->ipa_escaped)
528 fprintf (file, ", points-to unit escaped");
530 if (pt->null)
531 fprintf (file, ", points-to NULL");
533 if (pt->vars)
535 fprintf (file, ", points-to vars: ");
536 dump_decl_set (file, pt->vars);
537 if (pt->vars_contains_nonlocal
538 || pt->vars_contains_escaped
539 || pt->vars_contains_escaped_heap
540 || pt->vars_contains_restrict)
542 const char *comma = "";
543 fprintf (file, " (");
544 if (pt->vars_contains_nonlocal)
546 fprintf (file, "nonlocal");
547 comma = ", ";
549 if (pt->vars_contains_escaped)
551 fprintf (file, "%sescaped", comma);
552 comma = ", ";
554 if (pt->vars_contains_escaped_heap)
556 fprintf (file, "%sescaped heap", comma);
557 comma = ", ";
559 if (pt->vars_contains_restrict)
561 fprintf (file, "%srestrict", comma);
562 comma = ", ";
564 if (pt->vars_contains_interposable)
565 fprintf (file, "%sinterposable", comma);
566 fprintf (file, ")");
572 /* Unified dump function for pt_solution. */
574 DEBUG_FUNCTION void
575 debug (pt_solution &ref)
577 dump_points_to_solution (stderr, &ref);
580 DEBUG_FUNCTION void
581 debug (pt_solution *ptr)
583 if (ptr)
584 debug (*ptr);
585 else
586 fprintf (stderr, "<nil>\n");
590 /* Dump points-to information for SSA_NAME PTR into FILE. */
592 void
593 dump_points_to_info_for (FILE *file, tree ptr)
595 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
597 print_generic_expr (file, ptr, dump_flags);
599 if (pi)
600 dump_points_to_solution (file, &pi->pt);
601 else
602 fprintf (file, ", points-to anything");
604 fprintf (file, "\n");
608 /* Dump points-to information for VAR into stderr. */
610 DEBUG_FUNCTION void
611 debug_points_to_info_for (tree var)
613 dump_points_to_info_for (stderr, var);
617 /* Initializes the alias-oracle reference representation *R from REF. */
619 void
620 ao_ref_init (ao_ref *r, tree ref)
622 r->ref = ref;
623 r->base = NULL_TREE;
624 r->offset = 0;
625 r->size = -1;
626 r->max_size = -1;
627 r->ref_alias_set = -1;
628 r->base_alias_set = -1;
629 r->volatile_p = ref ? TREE_THIS_VOLATILE (ref) : false;
632 /* Returns the base object of the memory reference *REF. */
634 tree
635 ao_ref_base (ao_ref *ref)
637 bool reverse;
639 if (ref->base)
640 return ref->base;
641 ref->base = get_ref_base_and_extent (ref->ref, &ref->offset, &ref->size,
642 &ref->max_size, &reverse);
643 return ref->base;
646 /* Returns the base object alias set of the memory reference *REF. */
648 alias_set_type
649 ao_ref_base_alias_set (ao_ref *ref)
651 tree base_ref;
652 if (ref->base_alias_set != -1)
653 return ref->base_alias_set;
654 if (!ref->ref)
655 return 0;
656 base_ref = ref->ref;
657 while (handled_component_p (base_ref))
658 base_ref = TREE_OPERAND (base_ref, 0);
659 ref->base_alias_set = get_alias_set (base_ref);
660 return ref->base_alias_set;
663 /* Returns the reference alias set of the memory reference *REF. */
665 alias_set_type
666 ao_ref_alias_set (ao_ref *ref)
668 if (ref->ref_alias_set != -1)
669 return ref->ref_alias_set;
670 ref->ref_alias_set = get_alias_set (ref->ref);
671 return ref->ref_alias_set;
674 /* Init an alias-oracle reference representation from a gimple pointer
675 PTR and a gimple size SIZE in bytes. If SIZE is NULL_TREE then the
676 size is assumed to be unknown. The access is assumed to be only
677 to or after of the pointer target, not before it. */
679 void
680 ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
682 poly_int64 t, size_hwi, extra_offset = 0;
683 ref->ref = NULL_TREE;
684 if (TREE_CODE (ptr) == SSA_NAME)
686 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
687 if (gimple_assign_single_p (stmt)
688 && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
689 ptr = gimple_assign_rhs1 (stmt);
690 else if (is_gimple_assign (stmt)
691 && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
692 && ptrdiff_tree_p (gimple_assign_rhs2 (stmt), &extra_offset))
694 ptr = gimple_assign_rhs1 (stmt);
695 extra_offset *= BITS_PER_UNIT;
699 if (TREE_CODE (ptr) == ADDR_EXPR)
701 ref->base = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &t);
702 if (ref->base)
703 ref->offset = BITS_PER_UNIT * t;
704 else
706 size = NULL_TREE;
707 ref->offset = 0;
708 ref->base = get_base_address (TREE_OPERAND (ptr, 0));
711 else
713 ref->base = build2 (MEM_REF, char_type_node,
714 ptr, null_pointer_node);
715 ref->offset = 0;
717 ref->offset += extra_offset;
718 if (size
719 && poly_int_tree_p (size, &size_hwi)
720 && coeffs_in_range_p (size_hwi, 0, HOST_WIDE_INT_MAX / BITS_PER_UNIT))
721 ref->max_size = ref->size = size_hwi * BITS_PER_UNIT;
722 else
723 ref->max_size = ref->size = -1;
724 ref->ref_alias_set = 0;
725 ref->base_alias_set = 0;
726 ref->volatile_p = false;
729 /* Return 1 if TYPE1 and TYPE2 are to be considered equivalent for the
730 purpose of TBAA. Return 0 if they are distinct and -1 if we cannot
731 decide. */
733 static inline int
734 same_type_for_tbaa (tree type1, tree type2)
736 type1 = TYPE_MAIN_VARIANT (type1);
737 type2 = TYPE_MAIN_VARIANT (type2);
739 /* If we would have to do structural comparison bail out. */
740 if (TYPE_STRUCTURAL_EQUALITY_P (type1)
741 || TYPE_STRUCTURAL_EQUALITY_P (type2))
742 return -1;
744 /* Compare the canonical types. */
745 if (TYPE_CANONICAL (type1) == TYPE_CANONICAL (type2))
746 return 1;
748 /* ??? Array types are not properly unified in all cases as we have
749 spurious changes in the index types for example. Removing this
750 causes all sorts of problems with the Fortran frontend. */
751 if (TREE_CODE (type1) == ARRAY_TYPE
752 && TREE_CODE (type2) == ARRAY_TYPE)
753 return -1;
755 /* ??? In Ada, an lvalue of an unconstrained type can be used to access an
756 object of one of its constrained subtypes, e.g. when a function with an
757 unconstrained parameter passed by reference is called on an object and
758 inlined. But, even in the case of a fixed size, type and subtypes are
759 not equivalent enough as to share the same TYPE_CANONICAL, since this
760 would mean that conversions between them are useless, whereas they are
761 not (e.g. type and subtypes can have different modes). So, in the end,
762 they are only guaranteed to have the same alias set. */
763 if (get_alias_set (type1) == get_alias_set (type2))
764 return -1;
766 /* The types are known to be not equal. */
767 return 0;
770 /* Determine if the two component references REF1 and REF2 which are
771 based on access types TYPE1 and TYPE2 and of which at least one is based
772 on an indirect reference may alias. REF2 is the only one that can
773 be a decl in which case REF2_IS_DECL is true.
774 REF1_ALIAS_SET, BASE1_ALIAS_SET, REF2_ALIAS_SET and BASE2_ALIAS_SET
775 are the respective alias sets. */
777 static bool
778 aliasing_component_refs_p (tree ref1,
779 alias_set_type ref1_alias_set,
780 alias_set_type base1_alias_set,
781 poly_int64 offset1, poly_int64 max_size1,
782 tree ref2,
783 alias_set_type ref2_alias_set,
784 alias_set_type base2_alias_set,
785 poly_int64 offset2, poly_int64 max_size2,
786 bool ref2_is_decl)
788 /* If one reference is a component references through pointers try to find a
789 common base and apply offset based disambiguation. This handles
790 for example
791 struct A { int i; int j; } *q;
792 struct B { struct A a; int k; } *p;
793 disambiguating q->i and p->a.j. */
794 tree base1, base2;
795 tree type1, type2;
796 tree *refp;
797 int same_p;
799 /* Choose bases and base types to search for. */
800 base1 = ref1;
801 while (handled_component_p (base1))
802 base1 = TREE_OPERAND (base1, 0);
803 type1 = TREE_TYPE (base1);
804 base2 = ref2;
805 while (handled_component_p (base2))
806 base2 = TREE_OPERAND (base2, 0);
807 type2 = TREE_TYPE (base2);
809 /* Now search for the type1 in the access path of ref2. This
810 would be a common base for doing offset based disambiguation on. */
811 refp = &ref2;
812 while (handled_component_p (*refp)
813 && same_type_for_tbaa (TREE_TYPE (*refp), type1) == 0)
814 refp = &TREE_OPERAND (*refp, 0);
815 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type1);
816 /* If we couldn't compare types we have to bail out. */
817 if (same_p == -1)
818 return true;
819 else if (same_p == 1)
821 poly_int64 offadj, sztmp, msztmp;
822 bool reverse;
823 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
824 offset2 -= offadj;
825 get_ref_base_and_extent (base1, &offadj, &sztmp, &msztmp, &reverse);
826 offset1 -= offadj;
827 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
829 /* If we didn't find a common base, try the other way around. */
830 refp = &ref1;
831 while (handled_component_p (*refp)
832 && same_type_for_tbaa (TREE_TYPE (*refp), type2) == 0)
833 refp = &TREE_OPERAND (*refp, 0);
834 same_p = same_type_for_tbaa (TREE_TYPE (*refp), type2);
835 /* If we couldn't compare types we have to bail out. */
836 if (same_p == -1)
837 return true;
838 else if (same_p == 1)
840 poly_int64 offadj, sztmp, msztmp;
841 bool reverse;
842 get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp, &reverse);
843 offset1 -= offadj;
844 get_ref_base_and_extent (base2, &offadj, &sztmp, &msztmp, &reverse);
845 offset2 -= offadj;
846 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
849 /* If we have two type access paths B1.path1 and B2.path2 they may
850 only alias if either B1 is in B2.path2 or B2 is in B1.path1.
851 But we can still have a path that goes B1.path1...B2.path2 with
852 a part that we do not see. So we can only disambiguate now
853 if there is no B2 in the tail of path1 and no B1 on the
854 tail of path2. */
855 if (base1_alias_set == ref2_alias_set
856 || alias_set_subset_of (base1_alias_set, ref2_alias_set))
857 return true;
858 /* If this is ptr vs. decl then we know there is no ptr ... decl path. */
859 if (!ref2_is_decl)
860 return (base2_alias_set == ref1_alias_set
861 || alias_set_subset_of (base2_alias_set, ref1_alias_set));
862 return false;
865 /* Return true if we can determine that component references REF1 and REF2,
866 that are within a common DECL, cannot overlap. */
868 static bool
869 nonoverlapping_component_refs_of_decl_p (tree ref1, tree ref2)
871 auto_vec<tree, 16> component_refs1;
872 auto_vec<tree, 16> component_refs2;
874 /* Create the stack of handled components for REF1. */
875 while (handled_component_p (ref1))
877 component_refs1.safe_push (ref1);
878 ref1 = TREE_OPERAND (ref1, 0);
880 if (TREE_CODE (ref1) == MEM_REF)
882 if (!integer_zerop (TREE_OPERAND (ref1, 1)))
883 return false;
884 ref1 = TREE_OPERAND (TREE_OPERAND (ref1, 0), 0);
887 /* Create the stack of handled components for REF2. */
888 while (handled_component_p (ref2))
890 component_refs2.safe_push (ref2);
891 ref2 = TREE_OPERAND (ref2, 0);
893 if (TREE_CODE (ref2) == MEM_REF)
895 if (!integer_zerop (TREE_OPERAND (ref2, 1)))
896 return false;
897 ref2 = TREE_OPERAND (TREE_OPERAND (ref2, 0), 0);
900 /* Bases must be either same or uncomparable. */
901 gcc_checking_assert (ref1 == ref2
902 || (DECL_P (ref1) && DECL_P (ref2)
903 && compare_base_decls (ref1, ref2) != 0));
905 /* Pop the stacks in parallel and examine the COMPONENT_REFs of the same
906 rank. This is sufficient because we start from the same DECL and you
907 cannot reference several fields at a time with COMPONENT_REFs (unlike
908 with ARRAY_RANGE_REFs for arrays) so you always need the same number
909 of them to access a sub-component, unless you're in a union, in which
910 case the return value will precisely be false. */
911 while (true)
915 if (component_refs1.is_empty ())
916 return false;
917 ref1 = component_refs1.pop ();
919 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref1, 0))));
923 if (component_refs2.is_empty ())
924 return false;
925 ref2 = component_refs2.pop ();
927 while (!RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (ref2, 0))));
929 /* Beware of BIT_FIELD_REF. */
930 if (TREE_CODE (ref1) != COMPONENT_REF
931 || TREE_CODE (ref2) != COMPONENT_REF)
932 return false;
934 tree field1 = TREE_OPERAND (ref1, 1);
935 tree field2 = TREE_OPERAND (ref2, 1);
937 /* ??? We cannot simply use the type of operand #0 of the refs here
938 as the Fortran compiler smuggles type punning into COMPONENT_REFs
939 for common blocks instead of using unions like everyone else. */
940 tree type1 = DECL_CONTEXT (field1);
941 tree type2 = DECL_CONTEXT (field2);
943 /* We cannot disambiguate fields in a union or qualified union. */
944 if (type1 != type2 || TREE_CODE (type1) != RECORD_TYPE)
945 return false;
947 if (field1 != field2)
949 /* A field and its representative need to be considered the
950 same. */
951 if (DECL_BIT_FIELD_REPRESENTATIVE (field1) == field2
952 || DECL_BIT_FIELD_REPRESENTATIVE (field2) == field1)
953 return false;
954 /* Different fields of the same record type cannot overlap.
955 ??? Bitfields can overlap at RTL level so punt on them. */
956 if (DECL_BIT_FIELD (field1) && DECL_BIT_FIELD (field2))
957 return false;
958 return true;
962 return false;
965 /* qsort compare function to sort FIELD_DECLs after their
966 DECL_FIELD_CONTEXT TYPE_UID. */
968 static inline int
969 ncr_compar (const void *field1_, const void *field2_)
971 const_tree field1 = *(const_tree *) const_cast <void *>(field1_);
972 const_tree field2 = *(const_tree *) const_cast <void *>(field2_);
973 unsigned int uid1 = TYPE_UID (DECL_FIELD_CONTEXT (field1));
974 unsigned int uid2 = TYPE_UID (DECL_FIELD_CONTEXT (field2));
975 if (uid1 < uid2)
976 return -1;
977 else if (uid1 > uid2)
978 return 1;
979 return 0;
982 /* Return true if we can determine that the fields referenced cannot
983 overlap for any pair of objects. */
985 static bool
986 nonoverlapping_component_refs_p (const_tree x, const_tree y)
988 if (!flag_strict_aliasing
989 || !x || !y
990 || TREE_CODE (x) != COMPONENT_REF
991 || TREE_CODE (y) != COMPONENT_REF)
992 return false;
994 auto_vec<const_tree, 16> fieldsx;
995 while (TREE_CODE (x) == COMPONENT_REF)
997 tree field = TREE_OPERAND (x, 1);
998 tree type = DECL_FIELD_CONTEXT (field);
999 if (TREE_CODE (type) == RECORD_TYPE)
1000 fieldsx.safe_push (field);
1001 x = TREE_OPERAND (x, 0);
1003 if (fieldsx.length () == 0)
1004 return false;
1005 auto_vec<const_tree, 16> fieldsy;
1006 while (TREE_CODE (y) == COMPONENT_REF)
1008 tree field = TREE_OPERAND (y, 1);
1009 tree type = DECL_FIELD_CONTEXT (field);
1010 if (TREE_CODE (type) == RECORD_TYPE)
1011 fieldsy.safe_push (TREE_OPERAND (y, 1));
1012 y = TREE_OPERAND (y, 0);
1014 if (fieldsy.length () == 0)
1015 return false;
1017 /* Most common case first. */
1018 if (fieldsx.length () == 1
1019 && fieldsy.length () == 1)
1020 return ((DECL_FIELD_CONTEXT (fieldsx[0])
1021 == DECL_FIELD_CONTEXT (fieldsy[0]))
1022 && fieldsx[0] != fieldsy[0]
1023 && !(DECL_BIT_FIELD (fieldsx[0]) && DECL_BIT_FIELD (fieldsy[0])));
1025 if (fieldsx.length () == 2)
1027 if (ncr_compar (&fieldsx[0], &fieldsx[1]) == 1)
1028 std::swap (fieldsx[0], fieldsx[1]);
1030 else
1031 fieldsx.qsort (ncr_compar);
1033 if (fieldsy.length () == 2)
1035 if (ncr_compar (&fieldsy[0], &fieldsy[1]) == 1)
1036 std::swap (fieldsy[0], fieldsy[1]);
1038 else
1039 fieldsy.qsort (ncr_compar);
1041 unsigned i = 0, j = 0;
1044 const_tree fieldx = fieldsx[i];
1045 const_tree fieldy = fieldsy[j];
1046 tree typex = DECL_FIELD_CONTEXT (fieldx);
1047 tree typey = DECL_FIELD_CONTEXT (fieldy);
1048 if (typex == typey)
1050 /* We're left with accessing different fields of a structure,
1051 no possible overlap. */
1052 if (fieldx != fieldy)
1054 /* A field and its representative need to be considered the
1055 same. */
1056 if (DECL_BIT_FIELD_REPRESENTATIVE (fieldx) == fieldy
1057 || DECL_BIT_FIELD_REPRESENTATIVE (fieldy) == fieldx)
1058 return false;
1059 /* Different fields of the same record type cannot overlap.
1060 ??? Bitfields can overlap at RTL level so punt on them. */
1061 if (DECL_BIT_FIELD (fieldx) && DECL_BIT_FIELD (fieldy))
1062 return false;
1063 return true;
1066 if (TYPE_UID (typex) < TYPE_UID (typey))
1068 i++;
1069 if (i == fieldsx.length ())
1070 break;
1072 else
1074 j++;
1075 if (j == fieldsy.length ())
1076 break;
1079 while (1);
1081 return false;
1085 /* Return true if two memory references based on the variables BASE1
1086 and BASE2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1087 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. REF1 and REF2
1088 if non-NULL are the complete memory reference trees. */
1090 static bool
1091 decl_refs_may_alias_p (tree ref1, tree base1,
1092 poly_int64 offset1, poly_int64 max_size1,
1093 tree ref2, tree base2,
1094 poly_int64 offset2, poly_int64 max_size2)
1096 gcc_checking_assert (DECL_P (base1) && DECL_P (base2));
1098 /* If both references are based on different variables, they cannot alias. */
1099 if (compare_base_decls (base1, base2) == 0)
1100 return false;
1102 /* If both references are based on the same variable, they cannot alias if
1103 the accesses do not overlap. */
1104 if (!ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2))
1105 return false;
1107 /* For components with variable position, the above test isn't sufficient,
1108 so we disambiguate component references manually. */
1109 if (ref1 && ref2
1110 && handled_component_p (ref1) && handled_component_p (ref2)
1111 && nonoverlapping_component_refs_of_decl_p (ref1, ref2))
1112 return false;
1114 return true;
1117 /* Return true if an indirect reference based on *PTR1 constrained
1118 to [OFFSET1, OFFSET1 + MAX_SIZE1) may alias a variable based on BASE2
1119 constrained to [OFFSET2, OFFSET2 + MAX_SIZE2). *PTR1 and BASE2 have
1120 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1121 in which case they are computed on-demand. REF1 and REF2
1122 if non-NULL are the complete memory reference trees. */
1124 static bool
1125 indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1126 poly_int64 offset1, poly_int64 max_size1,
1127 alias_set_type ref1_alias_set,
1128 alias_set_type base1_alias_set,
1129 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1130 poly_int64 offset2, poly_int64 max_size2,
1131 alias_set_type ref2_alias_set,
1132 alias_set_type base2_alias_set, bool tbaa_p)
1134 tree ptr1;
1135 tree ptrtype1, dbase2;
1137 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1138 || TREE_CODE (base1) == TARGET_MEM_REF)
1139 && DECL_P (base2));
1141 ptr1 = TREE_OPERAND (base1, 0);
1142 poly_offset_int moff = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1144 /* If only one reference is based on a variable, they cannot alias if
1145 the pointer access is beyond the extent of the variable access.
1146 (the pointer base cannot validly point to an offset less than zero
1147 of the variable).
1148 ??? IVOPTs creates bases that do not honor this restriction,
1149 so do not apply this optimization for TARGET_MEM_REFs. */
1150 if (TREE_CODE (base1) != TARGET_MEM_REF
1151 && !ranges_maybe_overlap_p (offset1 + moff, -1, offset2, max_size2))
1152 return false;
1153 /* They also cannot alias if the pointer may not point to the decl. */
1154 if (!ptr_deref_may_alias_decl_p (ptr1, base2))
1155 return false;
1157 /* Disambiguations that rely on strict aliasing rules follow. */
1158 if (!flag_strict_aliasing || !tbaa_p)
1159 return true;
1161 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1163 /* If the alias set for a pointer access is zero all bets are off. */
1164 if (base1_alias_set == 0)
1165 return true;
1167 /* When we are trying to disambiguate an access with a pointer dereference
1168 as base versus one with a decl as base we can use both the size
1169 of the decl and its dynamic type for extra disambiguation.
1170 ??? We do not know anything about the dynamic type of the decl
1171 other than that its alias-set contains base2_alias_set as a subset
1172 which does not help us here. */
1173 /* As we know nothing useful about the dynamic type of the decl just
1174 use the usual conflict check rather than a subset test.
1175 ??? We could introduce -fvery-strict-aliasing when the language
1176 does not allow decls to have a dynamic type that differs from their
1177 static type. Then we can check
1178 !alias_set_subset_of (base1_alias_set, base2_alias_set) instead. */
1179 if (base1_alias_set != base2_alias_set
1180 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1181 return false;
1182 /* If the size of the access relevant for TBAA through the pointer
1183 is bigger than the size of the decl we can't possibly access the
1184 decl via that pointer. */
1185 if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
1186 && poly_int_tree_p (DECL_SIZE (base2))
1187 && poly_int_tree_p (TYPE_SIZE (TREE_TYPE (ptrtype1)))
1188 /* ??? This in turn may run afoul when a decl of type T which is
1189 a member of union type U is accessed through a pointer to
1190 type U and sizeof T is smaller than sizeof U. */
1191 && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
1192 && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
1193 && known_lt (wi::to_poly_widest (DECL_SIZE (base2)),
1194 wi::to_poly_widest (TYPE_SIZE (TREE_TYPE (ptrtype1)))))
1195 return false;
1197 if (!ref2)
1198 return true;
1200 /* If the decl is accessed via a MEM_REF, reconstruct the base
1201 we can use for TBAA and an appropriately adjusted offset. */
1202 dbase2 = ref2;
1203 while (handled_component_p (dbase2))
1204 dbase2 = TREE_OPERAND (dbase2, 0);
1205 poly_int64 doffset1 = offset1;
1206 poly_offset_int doffset2 = offset2;
1207 if (TREE_CODE (dbase2) == MEM_REF
1208 || TREE_CODE (dbase2) == TARGET_MEM_REF)
1209 doffset2 -= mem_ref_offset (dbase2) << LOG2_BITS_PER_UNIT;
1211 /* If either reference is view-converted, give up now. */
1212 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1213 || same_type_for_tbaa (TREE_TYPE (dbase2), TREE_TYPE (base2)) != 1)
1214 return true;
1216 /* If both references are through the same type, they do not alias
1217 if the accesses do not overlap. This does extra disambiguation
1218 for mixed/pointer accesses but requires strict aliasing.
1219 For MEM_REFs we require that the component-ref offset we computed
1220 is relative to the start of the type which we ensure by
1221 comparing rvalue and access type and disregarding the constant
1222 pointer offset. */
1223 if ((TREE_CODE (base1) != TARGET_MEM_REF
1224 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1225 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (dbase2)) == 1)
1226 return ranges_maybe_overlap_p (doffset1, max_size1, doffset2, max_size2);
1228 if (ref1 && ref2
1229 && nonoverlapping_component_refs_p (ref1, ref2))
1230 return false;
1232 /* Do access-path based disambiguation. */
1233 if (ref1 && ref2
1234 && (handled_component_p (ref1) || handled_component_p (ref2)))
1235 return aliasing_component_refs_p (ref1,
1236 ref1_alias_set, base1_alias_set,
1237 offset1, max_size1,
1238 ref2,
1239 ref2_alias_set, base2_alias_set,
1240 offset2, max_size2, true);
1242 return true;
1245 /* Return true if two indirect references based on *PTR1
1246 and *PTR2 constrained to [OFFSET1, OFFSET1 + MAX_SIZE1) and
1247 [OFFSET2, OFFSET2 + MAX_SIZE2) may alias. *PTR1 and *PTR2 have
1248 the alias sets BASE1_ALIAS_SET and BASE2_ALIAS_SET which can be -1
1249 in which case they are computed on-demand. REF1 and REF2
1250 if non-NULL are the complete memory reference trees. */
1252 static bool
1253 indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
1254 poly_int64 offset1, poly_int64 max_size1,
1255 alias_set_type ref1_alias_set,
1256 alias_set_type base1_alias_set,
1257 tree ref2 ATTRIBUTE_UNUSED, tree base2,
1258 poly_int64 offset2, poly_int64 max_size2,
1259 alias_set_type ref2_alias_set,
1260 alias_set_type base2_alias_set, bool tbaa_p)
1262 tree ptr1;
1263 tree ptr2;
1264 tree ptrtype1, ptrtype2;
1266 gcc_checking_assert ((TREE_CODE (base1) == MEM_REF
1267 || TREE_CODE (base1) == TARGET_MEM_REF)
1268 && (TREE_CODE (base2) == MEM_REF
1269 || TREE_CODE (base2) == TARGET_MEM_REF));
1271 ptr1 = TREE_OPERAND (base1, 0);
1272 ptr2 = TREE_OPERAND (base2, 0);
1274 /* If both bases are based on pointers they cannot alias if they may not
1275 point to the same memory object or if they point to the same object
1276 and the accesses do not overlap. */
1277 if ((!cfun || gimple_in_ssa_p (cfun))
1278 && operand_equal_p (ptr1, ptr2, 0)
1279 && (((TREE_CODE (base1) != TARGET_MEM_REF
1280 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1281 && (TREE_CODE (base2) != TARGET_MEM_REF
1282 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2))))
1283 || (TREE_CODE (base1) == TARGET_MEM_REF
1284 && TREE_CODE (base2) == TARGET_MEM_REF
1285 && (TMR_STEP (base1) == TMR_STEP (base2)
1286 || (TMR_STEP (base1) && TMR_STEP (base2)
1287 && operand_equal_p (TMR_STEP (base1),
1288 TMR_STEP (base2), 0)))
1289 && (TMR_INDEX (base1) == TMR_INDEX (base2)
1290 || (TMR_INDEX (base1) && TMR_INDEX (base2)
1291 && operand_equal_p (TMR_INDEX (base1),
1292 TMR_INDEX (base2), 0)))
1293 && (TMR_INDEX2 (base1) == TMR_INDEX2 (base2)
1294 || (TMR_INDEX2 (base1) && TMR_INDEX2 (base2)
1295 && operand_equal_p (TMR_INDEX2 (base1),
1296 TMR_INDEX2 (base2), 0))))))
1298 poly_offset_int moff1 = mem_ref_offset (base1) << LOG2_BITS_PER_UNIT;
1299 poly_offset_int moff2 = mem_ref_offset (base2) << LOG2_BITS_PER_UNIT;
1300 return ranges_maybe_overlap_p (offset1 + moff1, max_size1,
1301 offset2 + moff2, max_size2);
1303 if (!ptr_derefs_may_alias_p (ptr1, ptr2))
1304 return false;
1306 /* Disambiguations that rely on strict aliasing rules follow. */
1307 if (!flag_strict_aliasing || !tbaa_p)
1308 return true;
1310 ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
1311 ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
1313 /* If the alias set for a pointer access is zero all bets are off. */
1314 if (base1_alias_set == 0
1315 || base2_alias_set == 0)
1316 return true;
1318 /* If both references are through the same type, they do not alias
1319 if the accesses do not overlap. This does extra disambiguation
1320 for mixed/pointer accesses but requires strict aliasing. */
1321 if ((TREE_CODE (base1) != TARGET_MEM_REF
1322 || (!TMR_INDEX (base1) && !TMR_INDEX2 (base1)))
1323 && (TREE_CODE (base2) != TARGET_MEM_REF
1324 || (!TMR_INDEX (base2) && !TMR_INDEX2 (base2)))
1325 && same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1
1326 && same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1
1327 && same_type_for_tbaa (TREE_TYPE (ptrtype1),
1328 TREE_TYPE (ptrtype2)) == 1
1329 /* But avoid treating arrays as "objects", instead assume they
1330 can overlap by an exact multiple of their element size. */
1331 && TREE_CODE (TREE_TYPE (ptrtype1)) != ARRAY_TYPE)
1332 return ranges_maybe_overlap_p (offset1, max_size1, offset2, max_size2);
1334 /* Do type-based disambiguation. */
1335 if (base1_alias_set != base2_alias_set
1336 && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
1337 return false;
1339 /* If either reference is view-converted, give up now. */
1340 if (same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) != 1
1341 || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) != 1)
1342 return true;
1344 if (ref1 && ref2
1345 && nonoverlapping_component_refs_p (ref1, ref2))
1346 return false;
1348 /* Do access-path based disambiguation. */
1349 if (ref1 && ref2
1350 && (handled_component_p (ref1) || handled_component_p (ref2)))
1351 return aliasing_component_refs_p (ref1,
1352 ref1_alias_set, base1_alias_set,
1353 offset1, max_size1,
1354 ref2,
1355 ref2_alias_set, base2_alias_set,
1356 offset2, max_size2, false);
1358 return true;
1361 /* Return true, if the two memory references REF1 and REF2 may alias. */
1363 bool
1364 refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
1366 tree base1, base2;
1367 poly_int64 offset1 = 0, offset2 = 0;
1368 poly_int64 max_size1 = -1, max_size2 = -1;
1369 bool var1_p, var2_p, ind1_p, ind2_p;
1371 gcc_checking_assert ((!ref1->ref
1372 || TREE_CODE (ref1->ref) == SSA_NAME
1373 || DECL_P (ref1->ref)
1374 || TREE_CODE (ref1->ref) == STRING_CST
1375 || handled_component_p (ref1->ref)
1376 || TREE_CODE (ref1->ref) == MEM_REF
1377 || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
1378 && (!ref2->ref
1379 || TREE_CODE (ref2->ref) == SSA_NAME
1380 || DECL_P (ref2->ref)
1381 || TREE_CODE (ref2->ref) == STRING_CST
1382 || handled_component_p (ref2->ref)
1383 || TREE_CODE (ref2->ref) == MEM_REF
1384 || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
1386 /* Decompose the references into their base objects and the access. */
1387 base1 = ao_ref_base (ref1);
1388 offset1 = ref1->offset;
1389 max_size1 = ref1->max_size;
1390 base2 = ao_ref_base (ref2);
1391 offset2 = ref2->offset;
1392 max_size2 = ref2->max_size;
1394 /* We can end up with registers or constants as bases for example from
1395 *D.1663_44 = VIEW_CONVERT_EXPR<struct DB_LSN>(__tmp$B0F64_59);
1396 which is seen as a struct copy. */
1397 if (TREE_CODE (base1) == SSA_NAME
1398 || TREE_CODE (base1) == CONST_DECL
1399 || TREE_CODE (base1) == CONSTRUCTOR
1400 || TREE_CODE (base1) == ADDR_EXPR
1401 || CONSTANT_CLASS_P (base1)
1402 || TREE_CODE (base2) == SSA_NAME
1403 || TREE_CODE (base2) == CONST_DECL
1404 || TREE_CODE (base2) == CONSTRUCTOR
1405 || TREE_CODE (base2) == ADDR_EXPR
1406 || CONSTANT_CLASS_P (base2))
1407 return false;
1409 /* We can end up referring to code via function and label decls.
1410 As we likely do not properly track code aliases conservatively
1411 bail out. */
1412 if (TREE_CODE (base1) == FUNCTION_DECL
1413 || TREE_CODE (base1) == LABEL_DECL
1414 || TREE_CODE (base2) == FUNCTION_DECL
1415 || TREE_CODE (base2) == LABEL_DECL)
1416 return true;
1418 /* Two volatile accesses always conflict. */
1419 if (ref1->volatile_p
1420 && ref2->volatile_p)
1421 return true;
1423 /* Defer to simple offset based disambiguation if we have
1424 references based on two decls. Do this before defering to
1425 TBAA to handle must-alias cases in conformance with the
1426 GCC extension of allowing type-punning through unions. */
1427 var1_p = DECL_P (base1);
1428 var2_p = DECL_P (base2);
1429 if (var1_p && var2_p)
1430 return decl_refs_may_alias_p (ref1->ref, base1, offset1, max_size1,
1431 ref2->ref, base2, offset2, max_size2);
1433 /* Handle restrict based accesses.
1434 ??? ao_ref_base strips inner MEM_REF [&decl], recover from that
1435 here. */
1436 tree rbase1 = base1;
1437 tree rbase2 = base2;
1438 if (var1_p)
1440 rbase1 = ref1->ref;
1441 if (rbase1)
1442 while (handled_component_p (rbase1))
1443 rbase1 = TREE_OPERAND (rbase1, 0);
1445 if (var2_p)
1447 rbase2 = ref2->ref;
1448 if (rbase2)
1449 while (handled_component_p (rbase2))
1450 rbase2 = TREE_OPERAND (rbase2, 0);
1452 if (rbase1 && rbase2
1453 && (TREE_CODE (base1) == MEM_REF || TREE_CODE (base1) == TARGET_MEM_REF)
1454 && (TREE_CODE (base2) == MEM_REF || TREE_CODE (base2) == TARGET_MEM_REF)
1455 /* If the accesses are in the same restrict clique... */
1456 && MR_DEPENDENCE_CLIQUE (base1) == MR_DEPENDENCE_CLIQUE (base2)
1457 /* But based on different pointers they do not alias. */
1458 && MR_DEPENDENCE_BASE (base1) != MR_DEPENDENCE_BASE (base2))
1459 return false;
1461 ind1_p = (TREE_CODE (base1) == MEM_REF
1462 || TREE_CODE (base1) == TARGET_MEM_REF);
1463 ind2_p = (TREE_CODE (base2) == MEM_REF
1464 || TREE_CODE (base2) == TARGET_MEM_REF);
1466 /* Canonicalize the pointer-vs-decl case. */
1467 if (ind1_p && var2_p)
1469 std::swap (offset1, offset2);
1470 std::swap (max_size1, max_size2);
1471 std::swap (base1, base2);
1472 std::swap (ref1, ref2);
1473 var1_p = true;
1474 ind1_p = false;
1475 var2_p = false;
1476 ind2_p = true;
1479 /* First defer to TBAA if possible. */
1480 if (tbaa_p
1481 && flag_strict_aliasing
1482 && !alias_sets_conflict_p (ao_ref_alias_set (ref1),
1483 ao_ref_alias_set (ref2)))
1484 return false;
1486 /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators. */
1487 if (var1_p && ind2_p)
1488 return indirect_ref_may_alias_decl_p (ref2->ref, base2,
1489 offset2, max_size2,
1490 ao_ref_alias_set (ref2),
1491 ao_ref_base_alias_set (ref2),
1492 ref1->ref, base1,
1493 offset1, max_size1,
1494 ao_ref_alias_set (ref1),
1495 ao_ref_base_alias_set (ref1),
1496 tbaa_p);
1497 else if (ind1_p && ind2_p)
1498 return indirect_refs_may_alias_p (ref1->ref, base1,
1499 offset1, max_size1,
1500 ao_ref_alias_set (ref1),
1501 ao_ref_base_alias_set (ref1),
1502 ref2->ref, base2,
1503 offset2, max_size2,
1504 ao_ref_alias_set (ref2),
1505 ao_ref_base_alias_set (ref2),
1506 tbaa_p);
1508 gcc_unreachable ();
1511 static bool
1512 refs_may_alias_p (tree ref1, ao_ref *ref2)
1514 ao_ref r1;
1515 ao_ref_init (&r1, ref1);
1516 return refs_may_alias_p_1 (&r1, ref2, true);
1519 bool
1520 refs_may_alias_p (tree ref1, tree ref2)
1522 ao_ref r1, r2;
1523 bool res;
1524 ao_ref_init (&r1, ref1);
1525 ao_ref_init (&r2, ref2);
1526 res = refs_may_alias_p_1 (&r1, &r2, true);
1527 if (res)
1528 ++alias_stats.refs_may_alias_p_may_alias;
1529 else
1530 ++alias_stats.refs_may_alias_p_no_alias;
1531 return res;
1534 /* Returns true if there is a anti-dependence for the STORE that
1535 executes after the LOAD. */
1537 bool
1538 refs_anti_dependent_p (tree load, tree store)
1540 ao_ref r1, r2;
1541 ao_ref_init (&r1, load);
1542 ao_ref_init (&r2, store);
1543 return refs_may_alias_p_1 (&r1, &r2, false);
1546 /* Returns true if there is a output dependence for the stores
1547 STORE1 and STORE2. */
1549 bool
1550 refs_output_dependent_p (tree store1, tree store2)
1552 ao_ref r1, r2;
1553 ao_ref_init (&r1, store1);
1554 ao_ref_init (&r2, store2);
1555 return refs_may_alias_p_1 (&r1, &r2, false);
1558 /* If the call CALL may use the memory reference REF return true,
1559 otherwise return false. */
1561 static bool
1562 ref_maybe_used_by_call_p_1 (gcall *call, ao_ref *ref)
1564 tree base, callee;
1565 unsigned i;
1566 int flags = gimple_call_flags (call);
1568 /* Const functions without a static chain do not implicitly use memory. */
1569 if (!gimple_call_chain (call)
1570 && (flags & (ECF_CONST|ECF_NOVOPS)))
1571 goto process_args;
1573 base = ao_ref_base (ref);
1574 if (!base)
1575 return true;
1577 /* A call that is not without side-effects might involve volatile
1578 accesses and thus conflicts with all other volatile accesses. */
1579 if (ref->volatile_p)
1580 return true;
1582 /* If the reference is based on a decl that is not aliased the call
1583 cannot possibly use it. */
1584 if (DECL_P (base)
1585 && !may_be_aliased (base)
1586 /* But local statics can be used through recursion. */
1587 && !is_global_var (base))
1588 goto process_args;
1590 callee = gimple_call_fndecl (call);
1592 /* Handle those builtin functions explicitly that do not act as
1593 escape points. See tree-ssa-structalias.c:find_func_aliases
1594 for the list of builtins we might need to handle here. */
1595 if (callee != NULL_TREE
1596 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
1597 switch (DECL_FUNCTION_CODE (callee))
1599 /* All the following functions read memory pointed to by
1600 their second argument. strcat/strncat additionally
1601 reads memory pointed to by the first argument. */
1602 case BUILT_IN_STRCAT:
1603 case BUILT_IN_STRNCAT:
1605 ao_ref dref;
1606 ao_ref_init_from_ptr_and_size (&dref,
1607 gimple_call_arg (call, 0),
1608 NULL_TREE);
1609 if (refs_may_alias_p_1 (&dref, ref, false))
1610 return true;
1612 /* FALLTHRU */
1613 case BUILT_IN_STRCPY:
1614 case BUILT_IN_STRNCPY:
1615 case BUILT_IN_MEMCPY:
1616 case BUILT_IN_MEMMOVE:
1617 case BUILT_IN_MEMPCPY:
1618 case BUILT_IN_STPCPY:
1619 case BUILT_IN_STPNCPY:
1620 case BUILT_IN_TM_MEMCPY:
1621 case BUILT_IN_TM_MEMMOVE:
1623 ao_ref dref;
1624 tree size = NULL_TREE;
1625 if (gimple_call_num_args (call) == 3)
1626 size = gimple_call_arg (call, 2);
1627 ao_ref_init_from_ptr_and_size (&dref,
1628 gimple_call_arg (call, 1),
1629 size);
1630 return refs_may_alias_p_1 (&dref, ref, false);
1632 case BUILT_IN_STRCAT_CHK:
1633 case BUILT_IN_STRNCAT_CHK:
1635 ao_ref dref;
1636 ao_ref_init_from_ptr_and_size (&dref,
1637 gimple_call_arg (call, 0),
1638 NULL_TREE);
1639 if (refs_may_alias_p_1 (&dref, ref, false))
1640 return true;
1642 /* FALLTHRU */
1643 case BUILT_IN_STRCPY_CHK:
1644 case BUILT_IN_STRNCPY_CHK:
1645 case BUILT_IN_MEMCPY_CHK:
1646 case BUILT_IN_MEMMOVE_CHK:
1647 case BUILT_IN_MEMPCPY_CHK:
1648 case BUILT_IN_STPCPY_CHK:
1649 case BUILT_IN_STPNCPY_CHK:
1651 ao_ref dref;
1652 tree size = NULL_TREE;
1653 if (gimple_call_num_args (call) == 4)
1654 size = gimple_call_arg (call, 2);
1655 ao_ref_init_from_ptr_and_size (&dref,
1656 gimple_call_arg (call, 1),
1657 size);
1658 return refs_may_alias_p_1 (&dref, ref, false);
1660 case BUILT_IN_BCOPY:
1662 ao_ref dref;
1663 tree size = gimple_call_arg (call, 2);
1664 ao_ref_init_from_ptr_and_size (&dref,
1665 gimple_call_arg (call, 0),
1666 size);
1667 return refs_may_alias_p_1 (&dref, ref, false);
1670 /* The following functions read memory pointed to by their
1671 first argument. */
1672 CASE_BUILT_IN_TM_LOAD (1):
1673 CASE_BUILT_IN_TM_LOAD (2):
1674 CASE_BUILT_IN_TM_LOAD (4):
1675 CASE_BUILT_IN_TM_LOAD (8):
1676 CASE_BUILT_IN_TM_LOAD (FLOAT):
1677 CASE_BUILT_IN_TM_LOAD (DOUBLE):
1678 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
1679 CASE_BUILT_IN_TM_LOAD (M64):
1680 CASE_BUILT_IN_TM_LOAD (M128):
1681 CASE_BUILT_IN_TM_LOAD (M256):
1682 case BUILT_IN_TM_LOG:
1683 case BUILT_IN_TM_LOG_1:
1684 case BUILT_IN_TM_LOG_2:
1685 case BUILT_IN_TM_LOG_4:
1686 case BUILT_IN_TM_LOG_8:
1687 case BUILT_IN_TM_LOG_FLOAT:
1688 case BUILT_IN_TM_LOG_DOUBLE:
1689 case BUILT_IN_TM_LOG_LDOUBLE:
1690 case BUILT_IN_TM_LOG_M64:
1691 case BUILT_IN_TM_LOG_M128:
1692 case BUILT_IN_TM_LOG_M256:
1693 return ptr_deref_may_alias_ref_p_1 (gimple_call_arg (call, 0), ref);
1695 /* These read memory pointed to by the first argument. */
1696 case BUILT_IN_STRDUP:
1697 case BUILT_IN_STRNDUP:
1698 case BUILT_IN_REALLOC:
1700 ao_ref dref;
1701 tree size = NULL_TREE;
1702 if (gimple_call_num_args (call) == 2)
1703 size = gimple_call_arg (call, 1);
1704 ao_ref_init_from_ptr_and_size (&dref,
1705 gimple_call_arg (call, 0),
1706 size);
1707 return refs_may_alias_p_1 (&dref, ref, false);
1709 /* These read memory pointed to by the first argument. */
1710 case BUILT_IN_INDEX:
1711 case BUILT_IN_STRCHR:
1712 case BUILT_IN_STRRCHR:
1714 ao_ref dref;
1715 ao_ref_init_from_ptr_and_size (&dref,
1716 gimple_call_arg (call, 0),
1717 NULL_TREE);
1718 return refs_may_alias_p_1 (&dref, ref, false);
1720 /* These read memory pointed to by the first argument with size
1721 in the third argument. */
1722 case BUILT_IN_MEMCHR:
1724 ao_ref dref;
1725 ao_ref_init_from_ptr_and_size (&dref,
1726 gimple_call_arg (call, 0),
1727 gimple_call_arg (call, 2));
1728 return refs_may_alias_p_1 (&dref, ref, false);
1730 /* These read memory pointed to by the first and second arguments. */
1731 case BUILT_IN_STRSTR:
1732 case BUILT_IN_STRPBRK:
1734 ao_ref dref;
1735 ao_ref_init_from_ptr_and_size (&dref,
1736 gimple_call_arg (call, 0),
1737 NULL_TREE);
1738 if (refs_may_alias_p_1 (&dref, ref, false))
1739 return true;
1740 ao_ref_init_from_ptr_and_size (&dref,
1741 gimple_call_arg (call, 1),
1742 NULL_TREE);
1743 return refs_may_alias_p_1 (&dref, ref, false);
1746 /* The following builtins do not read from memory. */
1747 case BUILT_IN_FREE:
1748 case BUILT_IN_MALLOC:
1749 case BUILT_IN_POSIX_MEMALIGN:
1750 case BUILT_IN_ALIGNED_ALLOC:
1751 case BUILT_IN_CALLOC:
1752 CASE_BUILT_IN_ALLOCA:
1753 case BUILT_IN_STACK_SAVE:
1754 case BUILT_IN_STACK_RESTORE:
1755 case BUILT_IN_MEMSET:
1756 case BUILT_IN_TM_MEMSET:
1757 case BUILT_IN_MEMSET_CHK:
1758 case BUILT_IN_FREXP:
1759 case BUILT_IN_FREXPF:
1760 case BUILT_IN_FREXPL:
1761 case BUILT_IN_GAMMA_R:
1762 case BUILT_IN_GAMMAF_R:
1763 case BUILT_IN_GAMMAL_R:
1764 case BUILT_IN_LGAMMA_R:
1765 case BUILT_IN_LGAMMAF_R:
1766 case BUILT_IN_LGAMMAL_R:
1767 case BUILT_IN_MODF:
1768 case BUILT_IN_MODFF:
1769 case BUILT_IN_MODFL:
1770 case BUILT_IN_REMQUO:
1771 case BUILT_IN_REMQUOF:
1772 case BUILT_IN_REMQUOL:
1773 case BUILT_IN_SINCOS:
1774 case BUILT_IN_SINCOSF:
1775 case BUILT_IN_SINCOSL:
1776 case BUILT_IN_ASSUME_ALIGNED:
1777 case BUILT_IN_VA_END:
1778 return false;
1779 /* __sync_* builtins and some OpenMP builtins act as threading
1780 barriers. */
1781 #undef DEF_SYNC_BUILTIN
1782 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
1783 #include "sync-builtins.def"
1784 #undef DEF_SYNC_BUILTIN
1785 case BUILT_IN_GOMP_ATOMIC_START:
1786 case BUILT_IN_GOMP_ATOMIC_END:
1787 case BUILT_IN_GOMP_BARRIER:
1788 case BUILT_IN_GOMP_BARRIER_CANCEL:
1789 case BUILT_IN_GOMP_TASKWAIT:
1790 case BUILT_IN_GOMP_TASKGROUP_END:
1791 case BUILT_IN_GOMP_CRITICAL_START:
1792 case BUILT_IN_GOMP_CRITICAL_END:
1793 case BUILT_IN_GOMP_CRITICAL_NAME_START:
1794 case BUILT_IN_GOMP_CRITICAL_NAME_END:
1795 case BUILT_IN_GOMP_LOOP_END:
1796 case BUILT_IN_GOMP_LOOP_END_CANCEL:
1797 case BUILT_IN_GOMP_ORDERED_START:
1798 case BUILT_IN_GOMP_ORDERED_END:
1799 case BUILT_IN_GOMP_SECTIONS_END:
1800 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
1801 case BUILT_IN_GOMP_SINGLE_COPY_START:
1802 case BUILT_IN_GOMP_SINGLE_COPY_END:
1803 return true;
1805 default:
1806 /* Fallthru to general call handling. */;
1809 /* Check if base is a global static variable that is not read
1810 by the function. */
1811 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
1813 struct cgraph_node *node = cgraph_node::get (callee);
1814 bitmap not_read;
1816 /* FIXME: Callee can be an OMP builtin that does not have a call graph
1817 node yet. We should enforce that there are nodes for all decls in the
1818 IL and remove this check instead. */
1819 if (node
1820 && (not_read = ipa_reference_get_not_read_global (node))
1821 && bitmap_bit_p (not_read, ipa_reference_var_uid (base)))
1822 goto process_args;
1825 /* Check if the base variable is call-used. */
1826 if (DECL_P (base))
1828 if (pt_solution_includes (gimple_call_use_set (call), base))
1829 return true;
1831 else if ((TREE_CODE (base) == MEM_REF
1832 || TREE_CODE (base) == TARGET_MEM_REF)
1833 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
1835 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
1836 if (!pi)
1837 return true;
1839 if (pt_solutions_intersect (gimple_call_use_set (call), &pi->pt))
1840 return true;
1842 else
1843 return true;
1845 /* Inspect call arguments for passed-by-value aliases. */
1846 process_args:
1847 for (i = 0; i < gimple_call_num_args (call); ++i)
1849 tree op = gimple_call_arg (call, i);
1850 int flags = gimple_call_arg_flags (call, i);
1852 if (flags & EAF_UNUSED)
1853 continue;
1855 if (TREE_CODE (op) == WITH_SIZE_EXPR)
1856 op = TREE_OPERAND (op, 0);
1858 if (TREE_CODE (op) != SSA_NAME
1859 && !is_gimple_min_invariant (op))
1861 ao_ref r;
1862 ao_ref_init (&r, op);
1863 if (refs_may_alias_p_1 (&r, ref, true))
1864 return true;
1868 return false;
1871 static bool
1872 ref_maybe_used_by_call_p (gcall *call, ao_ref *ref)
1874 bool res;
1875 res = ref_maybe_used_by_call_p_1 (call, ref);
1876 if (res)
1877 ++alias_stats.ref_maybe_used_by_call_p_may_alias;
1878 else
1879 ++alias_stats.ref_maybe_used_by_call_p_no_alias;
1880 return res;
1884 /* If the statement STMT may use the memory reference REF return
1885 true, otherwise return false. */
1887 bool
1888 ref_maybe_used_by_stmt_p (gimple *stmt, ao_ref *ref)
1890 if (is_gimple_assign (stmt))
1892 tree rhs;
1894 /* All memory assign statements are single. */
1895 if (!gimple_assign_single_p (stmt))
1896 return false;
1898 rhs = gimple_assign_rhs1 (stmt);
1899 if (is_gimple_reg (rhs)
1900 || is_gimple_min_invariant (rhs)
1901 || gimple_assign_rhs_code (stmt) == CONSTRUCTOR)
1902 return false;
1904 return refs_may_alias_p (rhs, ref);
1906 else if (is_gimple_call (stmt))
1907 return ref_maybe_used_by_call_p (as_a <gcall *> (stmt), ref);
1908 else if (greturn *return_stmt = dyn_cast <greturn *> (stmt))
1910 tree retval = gimple_return_retval (return_stmt);
1911 if (retval
1912 && TREE_CODE (retval) != SSA_NAME
1913 && !is_gimple_min_invariant (retval)
1914 && refs_may_alias_p (retval, ref))
1915 return true;
1916 /* If ref escapes the function then the return acts as a use. */
1917 tree base = ao_ref_base (ref);
1918 if (!base)
1920 else if (DECL_P (base))
1921 return is_global_var (base);
1922 else if (TREE_CODE (base) == MEM_REF
1923 || TREE_CODE (base) == TARGET_MEM_REF)
1924 return ptr_deref_may_alias_global_p (TREE_OPERAND (base, 0));
1925 return false;
1928 return true;
1931 bool
1932 ref_maybe_used_by_stmt_p (gimple *stmt, tree ref)
1934 ao_ref r;
1935 ao_ref_init (&r, ref);
1936 return ref_maybe_used_by_stmt_p (stmt, &r);
1939 /* If the call in statement CALL may clobber the memory reference REF
1940 return true, otherwise return false. */
1942 bool
1943 call_may_clobber_ref_p_1 (gcall *call, ao_ref *ref)
1945 tree base;
1946 tree callee;
1948 /* If the call is pure or const it cannot clobber anything. */
1949 if (gimple_call_flags (call)
1950 & (ECF_PURE|ECF_CONST|ECF_LOOPING_CONST_OR_PURE|ECF_NOVOPS))
1951 return false;
1952 if (gimple_call_internal_p (call))
1953 switch (gimple_call_internal_fn (call))
1955 /* Treat these internal calls like ECF_PURE for aliasing,
1956 they don't write to any memory the program should care about.
1957 They have important other side-effects, and read memory,
1958 so can't be ECF_NOVOPS. */
1959 case IFN_UBSAN_NULL:
1960 case IFN_UBSAN_BOUNDS:
1961 case IFN_UBSAN_VPTR:
1962 case IFN_UBSAN_OBJECT_SIZE:
1963 case IFN_UBSAN_PTR:
1964 case IFN_ASAN_CHECK:
1965 return false;
1966 default:
1967 break;
1970 base = ao_ref_base (ref);
1971 if (!base)
1972 return true;
1974 if (TREE_CODE (base) == SSA_NAME
1975 || CONSTANT_CLASS_P (base))
1976 return false;
1978 /* A call that is not without side-effects might involve volatile
1979 accesses and thus conflicts with all other volatile accesses. */
1980 if (ref->volatile_p)
1981 return true;
1983 /* If the reference is based on a decl that is not aliased the call
1984 cannot possibly clobber it. */
1985 if (DECL_P (base)
1986 && !may_be_aliased (base)
1987 /* But local non-readonly statics can be modified through recursion
1988 or the call may implement a threading barrier which we must
1989 treat as may-def. */
1990 && (TREE_READONLY (base)
1991 || !is_global_var (base)))
1992 return false;
1994 callee = gimple_call_fndecl (call);
1996 /* Handle those builtin functions explicitly that do not act as
1997 escape points. See tree-ssa-structalias.c:find_func_aliases
1998 for the list of builtins we might need to handle here. */
1999 if (callee != NULL_TREE
2000 && gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2001 switch (DECL_FUNCTION_CODE (callee))
2003 /* All the following functions clobber memory pointed to by
2004 their first argument. */
2005 case BUILT_IN_STRCPY:
2006 case BUILT_IN_STRNCPY:
2007 case BUILT_IN_MEMCPY:
2008 case BUILT_IN_MEMMOVE:
2009 case BUILT_IN_MEMPCPY:
2010 case BUILT_IN_STPCPY:
2011 case BUILT_IN_STPNCPY:
2012 case BUILT_IN_STRCAT:
2013 case BUILT_IN_STRNCAT:
2014 case BUILT_IN_MEMSET:
2015 case BUILT_IN_TM_MEMSET:
2016 CASE_BUILT_IN_TM_STORE (1):
2017 CASE_BUILT_IN_TM_STORE (2):
2018 CASE_BUILT_IN_TM_STORE (4):
2019 CASE_BUILT_IN_TM_STORE (8):
2020 CASE_BUILT_IN_TM_STORE (FLOAT):
2021 CASE_BUILT_IN_TM_STORE (DOUBLE):
2022 CASE_BUILT_IN_TM_STORE (LDOUBLE):
2023 CASE_BUILT_IN_TM_STORE (M64):
2024 CASE_BUILT_IN_TM_STORE (M128):
2025 CASE_BUILT_IN_TM_STORE (M256):
2026 case BUILT_IN_TM_MEMCPY:
2027 case BUILT_IN_TM_MEMMOVE:
2029 ao_ref dref;
2030 tree size = NULL_TREE;
2031 /* Don't pass in size for strncat, as the maximum size
2032 is strlen (dest) + n + 1 instead of n, resp.
2033 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2034 known. */
2035 if (gimple_call_num_args (call) == 3
2036 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT)
2037 size = gimple_call_arg (call, 2);
2038 ao_ref_init_from_ptr_and_size (&dref,
2039 gimple_call_arg (call, 0),
2040 size);
2041 return refs_may_alias_p_1 (&dref, ref, false);
2043 case BUILT_IN_STRCPY_CHK:
2044 case BUILT_IN_STRNCPY_CHK:
2045 case BUILT_IN_MEMCPY_CHK:
2046 case BUILT_IN_MEMMOVE_CHK:
2047 case BUILT_IN_MEMPCPY_CHK:
2048 case BUILT_IN_STPCPY_CHK:
2049 case BUILT_IN_STPNCPY_CHK:
2050 case BUILT_IN_STRCAT_CHK:
2051 case BUILT_IN_STRNCAT_CHK:
2052 case BUILT_IN_MEMSET_CHK:
2054 ao_ref dref;
2055 tree size = NULL_TREE;
2056 /* Don't pass in size for __strncat_chk, as the maximum size
2057 is strlen (dest) + n + 1 instead of n, resp.
2058 n + 1 at dest + strlen (dest), but strlen (dest) isn't
2059 known. */
2060 if (gimple_call_num_args (call) == 4
2061 && DECL_FUNCTION_CODE (callee) != BUILT_IN_STRNCAT_CHK)
2062 size = gimple_call_arg (call, 2);
2063 ao_ref_init_from_ptr_and_size (&dref,
2064 gimple_call_arg (call, 0),
2065 size);
2066 return refs_may_alias_p_1 (&dref, ref, false);
2068 case BUILT_IN_BCOPY:
2070 ao_ref dref;
2071 tree size = gimple_call_arg (call, 2);
2072 ao_ref_init_from_ptr_and_size (&dref,
2073 gimple_call_arg (call, 1),
2074 size);
2075 return refs_may_alias_p_1 (&dref, ref, false);
2077 /* Allocating memory does not have any side-effects apart from
2078 being the definition point for the pointer. */
2079 case BUILT_IN_MALLOC:
2080 case BUILT_IN_ALIGNED_ALLOC:
2081 case BUILT_IN_CALLOC:
2082 case BUILT_IN_STRDUP:
2083 case BUILT_IN_STRNDUP:
2084 /* Unix98 specifies that errno is set on allocation failure. */
2085 if (flag_errno_math
2086 && targetm.ref_may_alias_errno (ref))
2087 return true;
2088 return false;
2089 case BUILT_IN_STACK_SAVE:
2090 CASE_BUILT_IN_ALLOCA:
2091 case BUILT_IN_ASSUME_ALIGNED:
2092 return false;
2093 /* But posix_memalign stores a pointer into the memory pointed to
2094 by its first argument. */
2095 case BUILT_IN_POSIX_MEMALIGN:
2097 tree ptrptr = gimple_call_arg (call, 0);
2098 ao_ref dref;
2099 ao_ref_init_from_ptr_and_size (&dref, ptrptr,
2100 TYPE_SIZE_UNIT (ptr_type_node));
2101 return (refs_may_alias_p_1 (&dref, ref, false)
2102 || (flag_errno_math
2103 && targetm.ref_may_alias_errno (ref)));
2105 /* Freeing memory kills the pointed-to memory. More importantly
2106 the call has to serve as a barrier for moving loads and stores
2107 across it. */
2108 case BUILT_IN_FREE:
2109 case BUILT_IN_VA_END:
2111 tree ptr = gimple_call_arg (call, 0);
2112 return ptr_deref_may_alias_ref_p_1 (ptr, ref);
2114 /* Realloc serves both as allocation point and deallocation point. */
2115 case BUILT_IN_REALLOC:
2117 tree ptr = gimple_call_arg (call, 0);
2118 /* Unix98 specifies that errno is set on allocation failure. */
2119 return ((flag_errno_math
2120 && targetm.ref_may_alias_errno (ref))
2121 || ptr_deref_may_alias_ref_p_1 (ptr, ref));
2123 case BUILT_IN_GAMMA_R:
2124 case BUILT_IN_GAMMAF_R:
2125 case BUILT_IN_GAMMAL_R:
2126 case BUILT_IN_LGAMMA_R:
2127 case BUILT_IN_LGAMMAF_R:
2128 case BUILT_IN_LGAMMAL_R:
2130 tree out = gimple_call_arg (call, 1);
2131 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2132 return true;
2133 if (flag_errno_math)
2134 break;
2135 return false;
2137 case BUILT_IN_FREXP:
2138 case BUILT_IN_FREXPF:
2139 case BUILT_IN_FREXPL:
2140 case BUILT_IN_MODF:
2141 case BUILT_IN_MODFF:
2142 case BUILT_IN_MODFL:
2144 tree out = gimple_call_arg (call, 1);
2145 return ptr_deref_may_alias_ref_p_1 (out, ref);
2147 case BUILT_IN_REMQUO:
2148 case BUILT_IN_REMQUOF:
2149 case BUILT_IN_REMQUOL:
2151 tree out = gimple_call_arg (call, 2);
2152 if (ptr_deref_may_alias_ref_p_1 (out, ref))
2153 return true;
2154 if (flag_errno_math)
2155 break;
2156 return false;
2158 case BUILT_IN_SINCOS:
2159 case BUILT_IN_SINCOSF:
2160 case BUILT_IN_SINCOSL:
2162 tree sin = gimple_call_arg (call, 1);
2163 tree cos = gimple_call_arg (call, 2);
2164 return (ptr_deref_may_alias_ref_p_1 (sin, ref)
2165 || ptr_deref_may_alias_ref_p_1 (cos, ref));
2167 /* __sync_* builtins and some OpenMP builtins act as threading
2168 barriers. */
2169 #undef DEF_SYNC_BUILTIN
2170 #define DEF_SYNC_BUILTIN(ENUM, NAME, TYPE, ATTRS) case ENUM:
2171 #include "sync-builtins.def"
2172 #undef DEF_SYNC_BUILTIN
2173 case BUILT_IN_GOMP_ATOMIC_START:
2174 case BUILT_IN_GOMP_ATOMIC_END:
2175 case BUILT_IN_GOMP_BARRIER:
2176 case BUILT_IN_GOMP_BARRIER_CANCEL:
2177 case BUILT_IN_GOMP_TASKWAIT:
2178 case BUILT_IN_GOMP_TASKGROUP_END:
2179 case BUILT_IN_GOMP_CRITICAL_START:
2180 case BUILT_IN_GOMP_CRITICAL_END:
2181 case BUILT_IN_GOMP_CRITICAL_NAME_START:
2182 case BUILT_IN_GOMP_CRITICAL_NAME_END:
2183 case BUILT_IN_GOMP_LOOP_END:
2184 case BUILT_IN_GOMP_LOOP_END_CANCEL:
2185 case BUILT_IN_GOMP_ORDERED_START:
2186 case BUILT_IN_GOMP_ORDERED_END:
2187 case BUILT_IN_GOMP_SECTIONS_END:
2188 case BUILT_IN_GOMP_SECTIONS_END_CANCEL:
2189 case BUILT_IN_GOMP_SINGLE_COPY_START:
2190 case BUILT_IN_GOMP_SINGLE_COPY_END:
2191 return true;
2192 default:
2193 /* Fallthru to general call handling. */;
2196 /* Check if base is a global static variable that is not written
2197 by the function. */
2198 if (callee != NULL_TREE && VAR_P (base) && TREE_STATIC (base))
2200 struct cgraph_node *node = cgraph_node::get (callee);
2201 bitmap not_written;
2203 if (node
2204 && (not_written = ipa_reference_get_not_written_global (node))
2205 && bitmap_bit_p (not_written, ipa_reference_var_uid (base)))
2206 return false;
2209 /* Check if the base variable is call-clobbered. */
2210 if (DECL_P (base))
2211 return pt_solution_includes (gimple_call_clobber_set (call), base);
2212 else if ((TREE_CODE (base) == MEM_REF
2213 || TREE_CODE (base) == TARGET_MEM_REF)
2214 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2216 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
2217 if (!pi)
2218 return true;
2220 return pt_solutions_intersect (gimple_call_clobber_set (call), &pi->pt);
2223 return true;
2226 /* If the call in statement CALL may clobber the memory reference REF
2227 return true, otherwise return false. */
2229 bool
2230 call_may_clobber_ref_p (gcall *call, tree ref)
2232 bool res;
2233 ao_ref r;
2234 ao_ref_init (&r, ref);
2235 res = call_may_clobber_ref_p_1 (call, &r);
2236 if (res)
2237 ++alias_stats.call_may_clobber_ref_p_may_alias;
2238 else
2239 ++alias_stats.call_may_clobber_ref_p_no_alias;
2240 return res;
2244 /* If the statement STMT may clobber the memory reference REF return true,
2245 otherwise return false. */
2247 bool
2248 stmt_may_clobber_ref_p_1 (gimple *stmt, ao_ref *ref)
2250 if (is_gimple_call (stmt))
2252 tree lhs = gimple_call_lhs (stmt);
2253 if (lhs
2254 && TREE_CODE (lhs) != SSA_NAME)
2256 ao_ref r;
2257 ao_ref_init (&r, lhs);
2258 if (refs_may_alias_p_1 (ref, &r, true))
2259 return true;
2262 return call_may_clobber_ref_p_1 (as_a <gcall *> (stmt), ref);
2264 else if (gimple_assign_single_p (stmt))
2266 tree lhs = gimple_assign_lhs (stmt);
2267 if (TREE_CODE (lhs) != SSA_NAME)
2269 ao_ref r;
2270 ao_ref_init (&r, lhs);
2271 return refs_may_alias_p_1 (ref, &r, true);
2274 else if (gimple_code (stmt) == GIMPLE_ASM)
2275 return true;
2277 return false;
2280 bool
2281 stmt_may_clobber_ref_p (gimple *stmt, tree ref)
2283 ao_ref r;
2284 ao_ref_init (&r, ref);
2285 return stmt_may_clobber_ref_p_1 (stmt, &r);
2288 /* Return true if store1 and store2 described by corresponding tuples
2289 <BASE, OFFSET, SIZE, MAX_SIZE> have the same size and store to the same
2290 address. */
2292 static bool
2293 same_addr_size_stores_p (tree base1, poly_int64 offset1, poly_int64 size1,
2294 poly_int64 max_size1,
2295 tree base2, poly_int64 offset2, poly_int64 size2,
2296 poly_int64 max_size2)
2298 /* Offsets need to be 0. */
2299 if (maybe_ne (offset1, 0)
2300 || maybe_ne (offset2, 0))
2301 return false;
2303 bool base1_obj_p = SSA_VAR_P (base1);
2304 bool base2_obj_p = SSA_VAR_P (base2);
2306 /* We need one object. */
2307 if (base1_obj_p == base2_obj_p)
2308 return false;
2309 tree obj = base1_obj_p ? base1 : base2;
2311 /* And we need one MEM_REF. */
2312 bool base1_memref_p = TREE_CODE (base1) == MEM_REF;
2313 bool base2_memref_p = TREE_CODE (base2) == MEM_REF;
2314 if (base1_memref_p == base2_memref_p)
2315 return false;
2316 tree memref = base1_memref_p ? base1 : base2;
2318 /* Sizes need to be valid. */
2319 if (!known_size_p (max_size1)
2320 || !known_size_p (max_size2)
2321 || !known_size_p (size1)
2322 || !known_size_p (size2))
2323 return false;
2325 /* Max_size needs to match size. */
2326 if (maybe_ne (max_size1, size1)
2327 || maybe_ne (max_size2, size2))
2328 return false;
2330 /* Sizes need to match. */
2331 if (maybe_ne (size1, size2))
2332 return false;
2335 /* Check that memref is a store to pointer with singleton points-to info. */
2336 if (!integer_zerop (TREE_OPERAND (memref, 1)))
2337 return false;
2338 tree ptr = TREE_OPERAND (memref, 0);
2339 if (TREE_CODE (ptr) != SSA_NAME)
2340 return false;
2341 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2342 unsigned int pt_uid;
2343 if (pi == NULL
2344 || !pt_solution_singleton_or_null_p (&pi->pt, &pt_uid))
2345 return false;
2347 /* Be conservative with non-call exceptions when the address might
2348 be NULL. */
2349 if (flag_non_call_exceptions && pi->pt.null)
2350 return false;
2352 /* Check that ptr points relative to obj. */
2353 unsigned int obj_uid = DECL_PT_UID (obj);
2354 if (obj_uid != pt_uid)
2355 return false;
2357 /* Check that the object size is the same as the store size. That ensures us
2358 that ptr points to the start of obj. */
2359 return (DECL_SIZE (obj)
2360 && poly_int_tree_p (DECL_SIZE (obj))
2361 && known_eq (wi::to_poly_offset (DECL_SIZE (obj)), size1));
2364 /* If STMT kills the memory reference REF return true, otherwise
2365 return false. */
2367 bool
2368 stmt_kills_ref_p (gimple *stmt, ao_ref *ref)
2370 if (!ao_ref_base (ref))
2371 return false;
2373 if (gimple_has_lhs (stmt)
2374 && TREE_CODE (gimple_get_lhs (stmt)) != SSA_NAME
2375 /* The assignment is not necessarily carried out if it can throw
2376 and we can catch it in the current function where we could inspect
2377 the previous value.
2378 ??? We only need to care about the RHS throwing. For aggregate
2379 assignments or similar calls and non-call exceptions the LHS
2380 might throw as well. */
2381 && !stmt_can_throw_internal (stmt))
2383 tree lhs = gimple_get_lhs (stmt);
2384 /* If LHS is literally a base of the access we are done. */
2385 if (ref->ref)
2387 tree base = ref->ref;
2388 tree innermost_dropped_array_ref = NULL_TREE;
2389 if (handled_component_p (base))
2391 tree saved_lhs0 = NULL_TREE;
2392 if (handled_component_p (lhs))
2394 saved_lhs0 = TREE_OPERAND (lhs, 0);
2395 TREE_OPERAND (lhs, 0) = integer_zero_node;
2399 /* Just compare the outermost handled component, if
2400 they are equal we have found a possible common
2401 base. */
2402 tree saved_base0 = TREE_OPERAND (base, 0);
2403 TREE_OPERAND (base, 0) = integer_zero_node;
2404 bool res = operand_equal_p (lhs, base, 0);
2405 TREE_OPERAND (base, 0) = saved_base0;
2406 if (res)
2407 break;
2408 /* Remember if we drop an array-ref that we need to
2409 double-check not being at struct end. */
2410 if (TREE_CODE (base) == ARRAY_REF
2411 || TREE_CODE (base) == ARRAY_RANGE_REF)
2412 innermost_dropped_array_ref = base;
2413 /* Otherwise drop handled components of the access. */
2414 base = saved_base0;
2416 while (handled_component_p (base));
2417 if (saved_lhs0)
2418 TREE_OPERAND (lhs, 0) = saved_lhs0;
2420 /* Finally check if the lhs has the same address and size as the
2421 base candidate of the access. Watch out if we have dropped
2422 an array-ref that was at struct end, this means ref->ref may
2423 be outside of the TYPE_SIZE of its base. */
2424 if ((! innermost_dropped_array_ref
2425 || ! array_at_struct_end_p (innermost_dropped_array_ref))
2426 && (lhs == base
2427 || (((TYPE_SIZE (TREE_TYPE (lhs))
2428 == TYPE_SIZE (TREE_TYPE (base)))
2429 || (TYPE_SIZE (TREE_TYPE (lhs))
2430 && TYPE_SIZE (TREE_TYPE (base))
2431 && operand_equal_p (TYPE_SIZE (TREE_TYPE (lhs)),
2432 TYPE_SIZE (TREE_TYPE (base)),
2433 0)))
2434 && operand_equal_p (lhs, base,
2435 OEP_ADDRESS_OF
2436 | OEP_MATCH_SIDE_EFFECTS))))
2437 return true;
2440 /* Now look for non-literal equal bases with the restriction of
2441 handling constant offset and size. */
2442 /* For a must-alias check we need to be able to constrain
2443 the access properly. */
2444 if (!ref->max_size_known_p ())
2445 return false;
2446 poly_int64 size, offset, max_size, ref_offset = ref->offset;
2447 bool reverse;
2448 tree base = get_ref_base_and_extent (lhs, &offset, &size, &max_size,
2449 &reverse);
2450 /* We can get MEM[symbol: sZ, index: D.8862_1] here,
2451 so base == ref->base does not always hold. */
2452 if (base != ref->base)
2454 /* Try using points-to info. */
2455 if (same_addr_size_stores_p (base, offset, size, max_size, ref->base,
2456 ref->offset, ref->size, ref->max_size))
2457 return true;
2459 /* If both base and ref->base are MEM_REFs, only compare the
2460 first operand, and if the second operand isn't equal constant,
2461 try to add the offsets into offset and ref_offset. */
2462 if (TREE_CODE (base) == MEM_REF && TREE_CODE (ref->base) == MEM_REF
2463 && TREE_OPERAND (base, 0) == TREE_OPERAND (ref->base, 0))
2465 if (!tree_int_cst_equal (TREE_OPERAND (base, 1),
2466 TREE_OPERAND (ref->base, 1)))
2468 poly_offset_int off1 = mem_ref_offset (base);
2469 off1 <<= LOG2_BITS_PER_UNIT;
2470 off1 += offset;
2471 poly_offset_int off2 = mem_ref_offset (ref->base);
2472 off2 <<= LOG2_BITS_PER_UNIT;
2473 off2 += ref_offset;
2474 if (!off1.to_shwi (&offset) || !off2.to_shwi (&ref_offset))
2475 size = -1;
2478 else
2479 size = -1;
2481 /* For a must-alias check we need to be able to constrain
2482 the access properly. */
2483 if (known_eq (size, max_size)
2484 && known_subrange_p (ref_offset, ref->max_size, offset, size))
2485 return true;
2488 if (is_gimple_call (stmt))
2490 tree callee = gimple_call_fndecl (stmt);
2491 if (callee != NULL_TREE
2492 && gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2493 switch (DECL_FUNCTION_CODE (callee))
2495 case BUILT_IN_FREE:
2497 tree ptr = gimple_call_arg (stmt, 0);
2498 tree base = ao_ref_base (ref);
2499 if (base && TREE_CODE (base) == MEM_REF
2500 && TREE_OPERAND (base, 0) == ptr)
2501 return true;
2502 break;
2505 case BUILT_IN_MEMCPY:
2506 case BUILT_IN_MEMPCPY:
2507 case BUILT_IN_MEMMOVE:
2508 case BUILT_IN_MEMSET:
2509 case BUILT_IN_MEMCPY_CHK:
2510 case BUILT_IN_MEMPCPY_CHK:
2511 case BUILT_IN_MEMMOVE_CHK:
2512 case BUILT_IN_MEMSET_CHK:
2513 case BUILT_IN_STRNCPY:
2514 case BUILT_IN_STPNCPY:
2516 /* For a must-alias check we need to be able to constrain
2517 the access properly. */
2518 if (!ref->max_size_known_p ())
2519 return false;
2520 tree dest = gimple_call_arg (stmt, 0);
2521 tree len = gimple_call_arg (stmt, 2);
2522 if (!poly_int_tree_p (len))
2523 return false;
2524 tree rbase = ref->base;
2525 poly_offset_int roffset = ref->offset;
2526 ao_ref dref;
2527 ao_ref_init_from_ptr_and_size (&dref, dest, len);
2528 tree base = ao_ref_base (&dref);
2529 poly_offset_int offset = dref.offset;
2530 if (!base || !known_size_p (dref.size))
2531 return false;
2532 if (TREE_CODE (base) == MEM_REF)
2534 if (TREE_CODE (rbase) != MEM_REF)
2535 return false;
2536 // Compare pointers.
2537 offset += mem_ref_offset (base) << LOG2_BITS_PER_UNIT;
2538 roffset += mem_ref_offset (rbase) << LOG2_BITS_PER_UNIT;
2539 base = TREE_OPERAND (base, 0);
2540 rbase = TREE_OPERAND (rbase, 0);
2542 if (base == rbase
2543 && known_subrange_p (roffset, ref->max_size, offset,
2544 wi::to_poly_offset (len)
2545 << LOG2_BITS_PER_UNIT))
2546 return true;
2547 break;
2550 case BUILT_IN_VA_END:
2552 tree ptr = gimple_call_arg (stmt, 0);
2553 if (TREE_CODE (ptr) == ADDR_EXPR)
2555 tree base = ao_ref_base (ref);
2556 if (TREE_OPERAND (ptr, 0) == base)
2557 return true;
2559 break;
2562 default:;
2565 return false;
2568 bool
2569 stmt_kills_ref_p (gimple *stmt, tree ref)
2571 ao_ref r;
2572 ao_ref_init (&r, ref);
2573 return stmt_kills_ref_p (stmt, &r);
2577 /* Walk the virtual use-def chain of VUSE until hitting the virtual operand
2578 TARGET or a statement clobbering the memory reference REF in which
2579 case false is returned. The walk starts with VUSE, one argument of PHI. */
2581 static bool
2582 maybe_skip_until (gimple *phi, tree target, ao_ref *ref,
2583 tree vuse, unsigned int *cnt, bitmap *visited,
2584 bool abort_on_visited,
2585 void *(*translate)(ao_ref *, tree, void *, bool *),
2586 void *data)
2588 basic_block bb = gimple_bb (phi);
2590 if (!*visited)
2591 *visited = BITMAP_ALLOC (NULL);
2593 bitmap_set_bit (*visited, SSA_NAME_VERSION (PHI_RESULT (phi)));
2595 /* Walk until we hit the target. */
2596 while (vuse != target)
2598 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2599 /* Recurse for PHI nodes. */
2600 if (gimple_code (def_stmt) == GIMPLE_PHI)
2602 /* An already visited PHI node ends the walk successfully. */
2603 if (bitmap_bit_p (*visited, SSA_NAME_VERSION (PHI_RESULT (def_stmt))))
2604 return !abort_on_visited;
2605 vuse = get_continuation_for_phi (def_stmt, ref, cnt,
2606 visited, abort_on_visited,
2607 translate, data);
2608 if (!vuse)
2609 return false;
2610 continue;
2612 else if (gimple_nop_p (def_stmt))
2613 return false;
2614 else
2616 /* A clobbering statement or the end of the IL ends it failing. */
2617 ++*cnt;
2618 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2620 bool disambiguate_only = true;
2621 if (translate
2622 && (*translate) (ref, vuse, data, &disambiguate_only) == NULL)
2624 else
2625 return false;
2628 /* If we reach a new basic-block see if we already skipped it
2629 in a previous walk that ended successfully. */
2630 if (gimple_bb (def_stmt) != bb)
2632 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (vuse)))
2633 return !abort_on_visited;
2634 bb = gimple_bb (def_stmt);
2636 vuse = gimple_vuse (def_stmt);
2638 return true;
2642 /* Starting from a PHI node for the virtual operand of the memory reference
2643 REF find a continuation virtual operand that allows to continue walking
2644 statements dominating PHI skipping only statements that cannot possibly
2645 clobber REF. Increments *CNT for each alias disambiguation done.
2646 Returns NULL_TREE if no suitable virtual operand can be found. */
2648 tree
2649 get_continuation_for_phi (gimple *phi, ao_ref *ref,
2650 unsigned int *cnt, bitmap *visited,
2651 bool abort_on_visited,
2652 void *(*translate)(ao_ref *, tree, void *, bool *),
2653 void *data)
2655 unsigned nargs = gimple_phi_num_args (phi);
2657 /* Through a single-argument PHI we can simply look through. */
2658 if (nargs == 1)
2659 return PHI_ARG_DEF (phi, 0);
2661 /* For two or more arguments try to pairwise skip non-aliasing code
2662 until we hit the phi argument definition that dominates the other one. */
2663 basic_block phi_bb = gimple_bb (phi);
2664 tree arg0, arg1;
2665 unsigned i;
2667 /* Find a candidate for the virtual operand which definition
2668 dominates those of all others. */
2669 /* First look if any of the args themselves satisfy this. */
2670 for (i = 0; i < nargs; ++i)
2672 arg0 = PHI_ARG_DEF (phi, i);
2673 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2674 break;
2675 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (arg0));
2676 if (def_bb != phi_bb
2677 && dominated_by_p (CDI_DOMINATORS, phi_bb, def_bb))
2678 break;
2679 arg0 = NULL_TREE;
2681 /* If not, look if we can reach such candidate by walking defs
2682 of a PHI arg without crossing other PHIs. */
2683 if (! arg0)
2684 for (i = 0; i < nargs; ++i)
2686 arg0 = PHI_ARG_DEF (phi, i);
2687 gimple *def = SSA_NAME_DEF_STMT (arg0);
2688 /* Backedges can't work. */
2689 if (dominated_by_p (CDI_DOMINATORS,
2690 gimple_bb (def), phi_bb))
2691 continue;
2692 /* See below. */
2693 if (gimple_code (def) == GIMPLE_PHI)
2694 continue;
2695 while (! dominated_by_p (CDI_DOMINATORS,
2696 phi_bb, gimple_bb (def)))
2698 arg0 = gimple_vuse (def);
2699 if (SSA_NAME_IS_DEFAULT_DEF (arg0))
2700 break;
2701 def = SSA_NAME_DEF_STMT (arg0);
2702 if (gimple_code (def) == GIMPLE_PHI)
2704 /* Do not try to look through arbitrarily complicated
2705 CFGs. For those looking for the first VUSE starting
2706 from the end of the immediate dominator of phi_bb
2707 is likely faster. */
2708 arg0 = NULL_TREE;
2709 goto next;
2712 break;
2713 next:;
2715 if (! arg0)
2716 return NULL_TREE;
2718 /* Then check against the found candidate. */
2719 for (i = 0; i < nargs; ++i)
2721 arg1 = PHI_ARG_DEF (phi, i);
2722 if (arg1 == arg0)
2724 else if (! maybe_skip_until (phi, arg0, ref, arg1, cnt, visited,
2725 abort_on_visited, translate, data))
2726 return NULL_TREE;
2729 return arg0;
2732 /* Based on the memory reference REF and its virtual use VUSE call
2733 WALKER for each virtual use that is equivalent to VUSE, including VUSE
2734 itself. That is, for each virtual use for which its defining statement
2735 does not clobber REF.
2737 WALKER is called with REF, the current virtual use and DATA. If
2738 WALKER returns non-NULL the walk stops and its result is returned.
2739 At the end of a non-successful walk NULL is returned.
2741 TRANSLATE if non-NULL is called with a pointer to REF, the virtual
2742 use which definition is a statement that may clobber REF and DATA.
2743 If TRANSLATE returns (void *)-1 the walk stops and NULL is returned.
2744 If TRANSLATE returns non-NULL the walk stops and its result is returned.
2745 If TRANSLATE returns NULL the walk continues and TRANSLATE is supposed
2746 to adjust REF and *DATA to make that valid.
2748 VALUEIZE if non-NULL is called with the next VUSE that is considered
2749 and return value is substituted for that. This can be used to
2750 implement optimistic value-numbering for example. Note that the
2751 VUSE argument is assumed to be valueized already.
2753 TODO: Cache the vector of equivalent vuses per ref, vuse pair. */
2755 void *
2756 walk_non_aliased_vuses (ao_ref *ref, tree vuse,
2757 void *(*walker)(ao_ref *, tree, unsigned int, void *),
2758 void *(*translate)(ao_ref *, tree, void *, bool *),
2759 tree (*valueize)(tree),
2760 void *data)
2762 bitmap visited = NULL;
2763 void *res;
2764 unsigned int cnt = 0;
2765 bool translated = false;
2767 timevar_push (TV_ALIAS_STMT_WALK);
2771 gimple *def_stmt;
2773 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2774 res = (*walker) (ref, vuse, cnt, data);
2775 /* Abort walk. */
2776 if (res == (void *)-1)
2778 res = NULL;
2779 break;
2781 /* Lookup succeeded. */
2782 else if (res != NULL)
2783 break;
2785 if (valueize)
2786 vuse = valueize (vuse);
2787 def_stmt = SSA_NAME_DEF_STMT (vuse);
2788 if (gimple_nop_p (def_stmt))
2789 break;
2790 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2791 vuse = get_continuation_for_phi (def_stmt, ref, &cnt,
2792 &visited, translated, translate, data);
2793 else
2795 cnt++;
2796 if (stmt_may_clobber_ref_p_1 (def_stmt, ref))
2798 if (!translate)
2799 break;
2800 bool disambiguate_only = false;
2801 res = (*translate) (ref, vuse, data, &disambiguate_only);
2802 /* Failed lookup and translation. */
2803 if (res == (void *)-1)
2805 res = NULL;
2806 break;
2808 /* Lookup succeeded. */
2809 else if (res != NULL)
2810 break;
2811 /* Translation succeeded, continue walking. */
2812 translated = translated || !disambiguate_only;
2814 vuse = gimple_vuse (def_stmt);
2817 while (vuse);
2819 if (visited)
2820 BITMAP_FREE (visited);
2822 timevar_pop (TV_ALIAS_STMT_WALK);
2824 return res;
2828 /* Based on the memory reference REF call WALKER for each vdef which
2829 defining statement may clobber REF, starting with VDEF. If REF
2830 is NULL_TREE, each defining statement is visited.
2832 WALKER is called with REF, the current vdef and DATA. If WALKER
2833 returns true the walk is stopped, otherwise it continues.
2835 If function entry is reached, FUNCTION_ENTRY_REACHED is set to true.
2836 The pointer may be NULL and then we do not track this information.
2838 At PHI nodes walk_aliased_vdefs forks into one walk for reach
2839 PHI argument (but only one walk continues on merge points), the
2840 return value is true if any of the walks was successful.
2842 The function returns the number of statements walked or -1 if
2843 LIMIT stmts were walked and the walk was aborted at this point.
2844 If LIMIT is zero the walk is not aborted. */
2846 static int
2847 walk_aliased_vdefs_1 (ao_ref *ref, tree vdef,
2848 bool (*walker)(ao_ref *, tree, void *), void *data,
2849 bitmap *visited, unsigned int cnt,
2850 bool *function_entry_reached, unsigned limit)
2854 gimple *def_stmt = SSA_NAME_DEF_STMT (vdef);
2856 if (*visited
2857 && !bitmap_set_bit (*visited, SSA_NAME_VERSION (vdef)))
2858 return cnt;
2860 if (gimple_nop_p (def_stmt))
2862 if (function_entry_reached)
2863 *function_entry_reached = true;
2864 return cnt;
2866 else if (gimple_code (def_stmt) == GIMPLE_PHI)
2868 unsigned i;
2869 if (!*visited)
2870 *visited = BITMAP_ALLOC (NULL);
2871 for (i = 0; i < gimple_phi_num_args (def_stmt); ++i)
2873 int res = walk_aliased_vdefs_1 (ref,
2874 gimple_phi_arg_def (def_stmt, i),
2875 walker, data, visited, cnt,
2876 function_entry_reached, limit);
2877 if (res == -1)
2878 return -1;
2879 cnt = res;
2881 return cnt;
2884 /* ??? Do we want to account this to TV_ALIAS_STMT_WALK? */
2885 cnt++;
2886 if (cnt == limit)
2887 return -1;
2888 if ((!ref
2889 || stmt_may_clobber_ref_p_1 (def_stmt, ref))
2890 && (*walker) (ref, vdef, data))
2891 return cnt;
2893 vdef = gimple_vuse (def_stmt);
2895 while (1);
2899 walk_aliased_vdefs (ao_ref *ref, tree vdef,
2900 bool (*walker)(ao_ref *, tree, void *), void *data,
2901 bitmap *visited,
2902 bool *function_entry_reached, unsigned int limit)
2904 bitmap local_visited = NULL;
2905 int ret;
2907 timevar_push (TV_ALIAS_STMT_WALK);
2909 if (function_entry_reached)
2910 *function_entry_reached = false;
2912 ret = walk_aliased_vdefs_1 (ref, vdef, walker, data,
2913 visited ? visited : &local_visited, 0,
2914 function_entry_reached, limit);
2915 if (local_visited)
2916 BITMAP_FREE (local_visited);
2918 timevar_pop (TV_ALIAS_STMT_WALK);
2920 return ret;