diagnostic.c (diagnostic_report_diagnostic): Do not warn about loaded plugins for...
[official-gcc.git] / gcc / alias.c
blob2bc87024f551fed825ca4c923b2670b38d651d71
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "function.h"
30 #include "alias.h"
31 #include "emit-rtl.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "output.h"
37 #include "toplev.h"
38 #include "cselib.h"
39 #include "splay-tree.h"
40 #include "ggc.h"
41 #include "langhooks.h"
42 #include "timevar.h"
43 #include "target.h"
44 #include "cgraph.h"
45 #include "varray.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
48 #include "df.h"
50 /* The aliasing API provided here solves related but different problems:
52 Say there exists (in c)
54 struct X {
55 struct Y y1;
56 struct Z z2;
57 } x1, *px1, *px2;
59 struct Y y2, *py;
60 struct Z z2, *pz;
63 py = &px1.y1;
64 px2 = &x1;
66 Consider the four questions:
68 Can a store to x1 interfere with px2->y1?
69 Can a store to x1 interfere with px2->z2?
70 (*px2).z2
71 Can a store to x1 change the value pointed to by with py?
72 Can a store to x1 change the value pointed to by with pz?
74 The answer to these questions can be yes, yes, yes, and maybe.
76 The first two questions can be answered with a simple examination
77 of the type system. If structure X contains a field of type Y then
78 a store thru a pointer to an X can overwrite any field that is
79 contained (recursively) in an X (unless we know that px1 != px2).
81 The last two of the questions can be solved in the same way as the
82 first two questions but this is too conservative. The observation
83 is that in some cases analysis we can know if which (if any) fields
84 are addressed and if those addresses are used in bad ways. This
85 analysis may be language specific. In C, arbitrary operations may
86 be applied to pointers. However, there is some indication that
87 this may be too conservative for some C++ types.
89 The pass ipa-type-escape does this analysis for the types whose
90 instances do not escape across the compilation boundary.
92 Historically in GCC, these two problems were combined and a single
93 data structure was used to represent the solution to these
94 problems. We now have two similar but different data structures,
95 The data structure to solve the last two question is similar to the
96 first, but does not contain have the fields in it whose address are
97 never taken. For types that do escape the compilation unit, the
98 data structures will have identical information.
101 /* The alias sets assigned to MEMs assist the back-end in determining
102 which MEMs can alias which other MEMs. In general, two MEMs in
103 different alias sets cannot alias each other, with one important
104 exception. Consider something like:
106 struct S { int i; double d; };
108 a store to an `S' can alias something of either type `int' or type
109 `double'. (However, a store to an `int' cannot alias a `double'
110 and vice versa.) We indicate this via a tree structure that looks
111 like:
112 struct S
115 |/_ _\|
116 int double
118 (The arrows are directed and point downwards.)
119 In this situation we say the alias set for `struct S' is the
120 `superset' and that those for `int' and `double' are `subsets'.
122 To see whether two alias sets can point to the same memory, we must
123 see if either alias set is a subset of the other. We need not trace
124 past immediate descendants, however, since we propagate all
125 grandchildren up one level.
127 Alias set zero is implicitly a superset of all other alias sets.
128 However, this is no actual entry for alias set zero. It is an
129 error to attempt to explicitly construct a subset of zero. */
131 struct alias_set_entry GTY(())
133 /* The alias set number, as stored in MEM_ALIAS_SET. */
134 alias_set_type alias_set;
136 /* Nonzero if would have a child of zero: this effectively makes this
137 alias set the same as alias set zero. */
138 int has_zero_child;
140 /* The children of the alias set. These are not just the immediate
141 children, but, in fact, all descendants. So, if we have:
143 struct T { struct S s; float f; }
145 continuing our example above, the children here will be all of
146 `int', `double', `float', and `struct S'. */
147 splay_tree GTY((param1_is (int), param2_is (int))) children;
149 typedef struct alias_set_entry *alias_set_entry;
151 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
152 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
153 static void record_set (rtx, const_rtx, void *);
154 static int base_alias_check (rtx, rtx, enum machine_mode,
155 enum machine_mode);
156 static rtx find_base_value (rtx);
157 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
158 static int insert_subset_children (splay_tree_node, void*);
159 static tree find_base_decl (tree);
160 static alias_set_entry get_alias_set_entry (alias_set_type);
161 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
162 bool (*) (const_rtx, bool));
163 static int aliases_everything_p (const_rtx);
164 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
165 static tree decl_for_component_ref (tree);
166 static rtx adjust_offset_for_component_ref (tree, rtx);
167 static int write_dependence_p (const_rtx, const_rtx, int);
169 static void memory_modified_1 (rtx, const_rtx, void *);
171 /* Set up all info needed to perform alias analysis on memory references. */
173 /* Returns the size in bytes of the mode of X. */
174 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
176 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
177 different alias sets. We ignore alias sets in functions making use
178 of variable arguments because the va_arg macros on some systems are
179 not legal ANSI C. */
180 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
181 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
183 /* Cap the number of passes we make over the insns propagating alias
184 information through set chains. 10 is a completely arbitrary choice. */
185 #define MAX_ALIAS_LOOP_PASSES 10
187 /* reg_base_value[N] gives an address to which register N is related.
188 If all sets after the first add or subtract to the current value
189 or otherwise modify it so it does not point to a different top level
190 object, reg_base_value[N] is equal to the address part of the source
191 of the first set.
193 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
194 expressions represent certain special values: function arguments and
195 the stack, frame, and argument pointers.
197 The contents of an ADDRESS is not normally used, the mode of the
198 ADDRESS determines whether the ADDRESS is a function argument or some
199 other special value. Pointer equality, not rtx_equal_p, determines whether
200 two ADDRESS expressions refer to the same base address.
202 The only use of the contents of an ADDRESS is for determining if the
203 current function performs nonlocal memory memory references for the
204 purposes of marking the function as a constant function. */
206 static GTY(()) VEC(rtx,gc) *reg_base_value;
207 static rtx *new_reg_base_value;
209 /* We preserve the copy of old array around to avoid amount of garbage
210 produced. About 8% of garbage produced were attributed to this
211 array. */
212 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
214 /* Static hunks of RTL used by the aliasing code; these are initialized
215 once per function to avoid unnecessary RTL allocations. */
216 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
218 #define REG_BASE_VALUE(X) \
219 (REGNO (X) < VEC_length (rtx, reg_base_value) \
220 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
222 /* Vector indexed by N giving the initial (unchanging) value known for
223 pseudo-register N. This array is initialized in init_alias_analysis,
224 and does not change until end_alias_analysis is called. */
225 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
227 /* Indicates number of valid entries in reg_known_value. */
228 static GTY(()) unsigned int reg_known_value_size;
230 /* Vector recording for each reg_known_value whether it is due to a
231 REG_EQUIV note. Future passes (viz., reload) may replace the
232 pseudo with the equivalent expression and so we account for the
233 dependences that would be introduced if that happens.
235 The REG_EQUIV notes created in assign_parms may mention the arg
236 pointer, and there are explicit insns in the RTL that modify the
237 arg pointer. Thus we must ensure that such insns don't get
238 scheduled across each other because that would invalidate the
239 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
240 wrong, but solving the problem in the scheduler will likely give
241 better code, so we do it here. */
242 static bool *reg_known_equiv_p;
244 /* True when scanning insns from the start of the rtl to the
245 NOTE_INSN_FUNCTION_BEG note. */
246 static bool copying_arguments;
248 DEF_VEC_P(alias_set_entry);
249 DEF_VEC_ALLOC_P(alias_set_entry,gc);
251 /* The splay-tree used to store the various alias set entries. */
252 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
254 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
255 such an entry, or NULL otherwise. */
257 static inline alias_set_entry
258 get_alias_set_entry (alias_set_type alias_set)
260 return VEC_index (alias_set_entry, alias_sets, alias_set);
263 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
264 the two MEMs cannot alias each other. */
266 static inline int
267 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
269 /* Perform a basic sanity check. Namely, that there are no alias sets
270 if we're not using strict aliasing. This helps to catch bugs
271 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
272 where a MEM is allocated in some way other than by the use of
273 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
274 use alias sets to indicate that spilled registers cannot alias each
275 other, we might need to remove this check. */
276 gcc_assert (flag_strict_aliasing
277 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
279 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
282 /* Insert the NODE into the splay tree given by DATA. Used by
283 record_alias_subset via splay_tree_foreach. */
285 static int
286 insert_subset_children (splay_tree_node node, void *data)
288 splay_tree_insert ((splay_tree) data, node->key, node->value);
290 return 0;
293 /* Return true if the first alias set is a subset of the second. */
295 bool
296 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
298 alias_set_entry ase;
300 /* Everything is a subset of the "aliases everything" set. */
301 if (set2 == 0)
302 return true;
304 /* Otherwise, check if set1 is a subset of set2. */
305 ase = get_alias_set_entry (set2);
306 if (ase != 0
307 && ((ase->has_zero_child && set1 == 0)
308 || splay_tree_lookup (ase->children,
309 (splay_tree_key) set1)))
310 return true;
311 return false;
314 /* Return 1 if the two specified alias sets may conflict. */
317 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
319 alias_set_entry ase;
321 /* The easy case. */
322 if (alias_sets_must_conflict_p (set1, set2))
323 return 1;
325 /* See if the first alias set is a subset of the second. */
326 ase = get_alias_set_entry (set1);
327 if (ase != 0
328 && (ase->has_zero_child
329 || splay_tree_lookup (ase->children,
330 (splay_tree_key) set2)))
331 return 1;
333 /* Now do the same, but with the alias sets reversed. */
334 ase = get_alias_set_entry (set2);
335 if (ase != 0
336 && (ase->has_zero_child
337 || splay_tree_lookup (ase->children,
338 (splay_tree_key) set1)))
339 return 1;
341 /* The two alias sets are distinct and neither one is the
342 child of the other. Therefore, they cannot conflict. */
343 return 0;
346 static int
347 walk_mems_2 (rtx *x, rtx mem)
349 if (MEM_P (*x))
351 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
352 return 1;
354 return -1;
356 return 0;
359 static int
360 walk_mems_1 (rtx *x, rtx *pat)
362 if (MEM_P (*x))
364 /* Visit all MEMs in *PAT and check indepedence. */
365 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
366 /* Indicate that dependence was determined and stop traversal. */
367 return 1;
369 return -1;
371 return 0;
374 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
375 bool
376 insn_alias_sets_conflict_p (rtx insn1, rtx insn2)
378 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
379 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
380 &PATTERN (insn2));
383 /* Return 1 if the two specified alias sets will always conflict. */
386 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
388 if (set1 == 0 || set2 == 0 || set1 == set2)
389 return 1;
391 return 0;
394 /* Return 1 if any MEM object of type T1 will always conflict (using the
395 dependency routines in this file) with any MEM object of type T2.
396 This is used when allocating temporary storage. If T1 and/or T2 are
397 NULL_TREE, it means we know nothing about the storage. */
400 objects_must_conflict_p (tree t1, tree t2)
402 alias_set_type set1, set2;
404 /* If neither has a type specified, we don't know if they'll conflict
405 because we may be using them to store objects of various types, for
406 example the argument and local variables areas of inlined functions. */
407 if (t1 == 0 && t2 == 0)
408 return 0;
410 /* If they are the same type, they must conflict. */
411 if (t1 == t2
412 /* Likewise if both are volatile. */
413 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
414 return 1;
416 set1 = t1 ? get_alias_set (t1) : 0;
417 set2 = t2 ? get_alias_set (t2) : 0;
419 /* We can't use alias_sets_conflict_p because we must make sure
420 that every subtype of t1 will conflict with every subtype of
421 t2 for which a pair of subobjects of these respective subtypes
422 overlaps on the stack. */
423 return alias_sets_must_conflict_p (set1, set2);
426 /* T is an expression with pointer type. Find the DECL on which this
427 expression is based. (For example, in `a[i]' this would be `a'.)
428 If there is no such DECL, or a unique decl cannot be determined,
429 NULL_TREE is returned. */
431 static tree
432 find_base_decl (tree t)
434 tree d0, d1;
436 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
437 return 0;
439 /* If this is a declaration, return it. If T is based on a restrict
440 qualified decl, return that decl. */
441 if (DECL_P (t))
443 if (TREE_CODE (t) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (t))
444 t = DECL_GET_RESTRICT_BASE (t);
445 return t;
448 /* Handle general expressions. It would be nice to deal with
449 COMPONENT_REFs here. If we could tell that `a' and `b' were the
450 same, then `a->f' and `b->f' are also the same. */
451 switch (TREE_CODE_CLASS (TREE_CODE (t)))
453 case tcc_unary:
454 return find_base_decl (TREE_OPERAND (t, 0));
456 case tcc_binary:
457 /* Return 0 if found in neither or both are the same. */
458 d0 = find_base_decl (TREE_OPERAND (t, 0));
459 d1 = find_base_decl (TREE_OPERAND (t, 1));
460 if (d0 == d1)
461 return d0;
462 else if (d0 == 0)
463 return d1;
464 else if (d1 == 0)
465 return d0;
466 else
467 return 0;
469 default:
470 return 0;
474 /* Return true if all nested component references handled by
475 get_inner_reference in T are such that we should use the alias set
476 provided by the object at the heart of T.
478 This is true for non-addressable components (which don't have their
479 own alias set), as well as components of objects in alias set zero.
480 This later point is a special case wherein we wish to override the
481 alias set used by the component, but we don't have per-FIELD_DECL
482 assignable alias sets. */
484 bool
485 component_uses_parent_alias_set (const_tree t)
487 while (1)
489 /* If we're at the end, it vacuously uses its own alias set. */
490 if (!handled_component_p (t))
491 return false;
493 switch (TREE_CODE (t))
495 case COMPONENT_REF:
496 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
497 return true;
498 break;
500 case ARRAY_REF:
501 case ARRAY_RANGE_REF:
502 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
503 return true;
504 break;
506 case REALPART_EXPR:
507 case IMAGPART_EXPR:
508 break;
510 default:
511 /* Bitfields and casts are never addressable. */
512 return true;
515 t = TREE_OPERAND (t, 0);
516 if (get_alias_set (TREE_TYPE (t)) == 0)
517 return true;
521 /* Return the alias set for the memory pointed to by T, which may be
522 either a type or an expression. Return -1 if there is nothing
523 special about dereferencing T. */
525 static alias_set_type
526 get_deref_alias_set_1 (tree t)
528 /* If we're not doing any alias analysis, just assume everything
529 aliases everything else. */
530 if (!flag_strict_aliasing)
531 return 0;
533 if (! TYPE_P (t))
535 tree decl = find_base_decl (t);
537 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
539 /* If we haven't computed the actual alias set, do it now. */
540 if (DECL_POINTER_ALIAS_SET (decl) == -2)
542 tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
544 /* No two restricted pointers can point at the same thing.
545 However, a restricted pointer can point at the same thing
546 as an unrestricted pointer, if that unrestricted pointer
547 is based on the restricted pointer. So, we make the
548 alias set for the restricted pointer a subset of the
549 alias set for the type pointed to by the type of the
550 decl. */
551 alias_set_type pointed_to_alias_set
552 = get_alias_set (pointed_to_type);
554 if (pointed_to_alias_set == 0)
555 /* It's not legal to make a subset of alias set zero. */
556 DECL_POINTER_ALIAS_SET (decl) = 0;
557 else if (AGGREGATE_TYPE_P (pointed_to_type))
558 /* For an aggregate, we must treat the restricted
559 pointer the same as an ordinary pointer. If we
560 were to make the type pointed to by the
561 restricted pointer a subset of the pointed-to
562 type, then we would believe that other subsets
563 of the pointed-to type (such as fields of that
564 type) do not conflict with the type pointed to
565 by the restricted pointer. */
566 DECL_POINTER_ALIAS_SET (decl)
567 = pointed_to_alias_set;
568 else
570 DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
571 record_alias_subset (pointed_to_alias_set,
572 DECL_POINTER_ALIAS_SET (decl));
576 /* We use the alias set indicated in the declaration. */
577 return DECL_POINTER_ALIAS_SET (decl);
580 /* Now all we care about is the type. */
581 t = TREE_TYPE (t);
584 /* If we have an INDIRECT_REF via a void pointer, we don't
585 know anything about what that might alias. Likewise if the
586 pointer is marked that way. */
587 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
588 || TYPE_REF_CAN_ALIAS_ALL (t))
589 return 0;
591 return -1;
594 /* Return the alias set for the memory pointed to by T, which may be
595 either a type or an expression. */
597 alias_set_type
598 get_deref_alias_set (tree t)
600 alias_set_type set = get_deref_alias_set_1 (t);
602 /* Fall back to the alias-set of the pointed-to type. */
603 if (set == -1)
605 if (! TYPE_P (t))
606 t = TREE_TYPE (t);
607 set = get_alias_set (TREE_TYPE (t));
610 return set;
613 /* Return the alias set for T, which may be either a type or an
614 expression. Call language-specific routine for help, if needed. */
616 alias_set_type
617 get_alias_set (tree t)
619 alias_set_type set;
621 /* If we're not doing any alias analysis, just assume everything
622 aliases everything else. Also return 0 if this or its type is
623 an error. */
624 if (! flag_strict_aliasing || t == error_mark_node
625 || (! TYPE_P (t)
626 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
627 return 0;
629 /* We can be passed either an expression or a type. This and the
630 language-specific routine may make mutually-recursive calls to each other
631 to figure out what to do. At each juncture, we see if this is a tree
632 that the language may need to handle specially. First handle things that
633 aren't types. */
634 if (! TYPE_P (t))
636 tree inner = t;
638 /* Remove any nops, then give the language a chance to do
639 something with this tree before we look at it. */
640 STRIP_NOPS (t);
641 set = lang_hooks.get_alias_set (t);
642 if (set != -1)
643 return set;
645 /* First see if the actual object referenced is an INDIRECT_REF from a
646 restrict-qualified pointer or a "void *". */
647 while (handled_component_p (inner))
649 inner = TREE_OPERAND (inner, 0);
650 STRIP_NOPS (inner);
653 if (INDIRECT_REF_P (inner))
655 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
656 if (set != -1)
657 return set;
660 /* Otherwise, pick up the outermost object that we could have a pointer
661 to, processing conversions as above. */
662 while (component_uses_parent_alias_set (t))
664 t = TREE_OPERAND (t, 0);
665 STRIP_NOPS (t);
668 /* If we've already determined the alias set for a decl, just return
669 it. This is necessary for C++ anonymous unions, whose component
670 variables don't look like union members (boo!). */
671 if (TREE_CODE (t) == VAR_DECL
672 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
673 return MEM_ALIAS_SET (DECL_RTL (t));
675 /* Now all we care about is the type. */
676 t = TREE_TYPE (t);
679 /* Variant qualifiers don't affect the alias set, so get the main
680 variant. Always use the canonical type as well.
681 If this is a type with a known alias set, return it. */
682 t = TYPE_MAIN_VARIANT (t);
683 if (TYPE_CANONICAL (t))
684 t = TYPE_CANONICAL (t);
685 if (TYPE_ALIAS_SET_KNOWN_P (t))
686 return TYPE_ALIAS_SET (t);
688 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
689 if (!COMPLETE_TYPE_P (t))
691 /* For arrays with unknown size the conservative answer is the
692 alias set of the element type. */
693 if (TREE_CODE (t) == ARRAY_TYPE)
694 return get_alias_set (TREE_TYPE (t));
696 /* But return zero as a conservative answer for incomplete types. */
697 return 0;
700 /* See if the language has special handling for this type. */
701 set = lang_hooks.get_alias_set (t);
702 if (set != -1)
703 return set;
705 /* There are no objects of FUNCTION_TYPE, so there's no point in
706 using up an alias set for them. (There are, of course, pointers
707 and references to functions, but that's different.) */
708 else if (TREE_CODE (t) == FUNCTION_TYPE
709 || TREE_CODE (t) == METHOD_TYPE)
710 set = 0;
712 /* Unless the language specifies otherwise, let vector types alias
713 their components. This avoids some nasty type punning issues in
714 normal usage. And indeed lets vectors be treated more like an
715 array slice. */
716 else if (TREE_CODE (t) == VECTOR_TYPE)
717 set = get_alias_set (TREE_TYPE (t));
719 /* Unless the language specifies otherwise, treat array types the
720 same as their components. This avoids the asymmetry we get
721 through recording the components. Consider accessing a
722 character(kind=1) through a reference to a character(kind=1)[1:1].
723 Or consider if we want to assign integer(kind=4)[0:D.1387] and
724 integer(kind=4)[4] the same alias set or not.
725 Just be pragmatic here and make sure the array and its element
726 type get the same alias set assigned. */
727 else if (TREE_CODE (t) == ARRAY_TYPE
728 && !TYPE_NONALIASED_COMPONENT (t))
729 set = get_alias_set (TREE_TYPE (t));
731 else
732 /* Otherwise make a new alias set for this type. */
733 set = new_alias_set ();
735 TYPE_ALIAS_SET (t) = set;
737 /* If this is an aggregate type, we must record any component aliasing
738 information. */
739 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
740 record_component_aliases (t);
742 return set;
745 /* Return a brand-new alias set. */
747 alias_set_type
748 new_alias_set (void)
750 if (flag_strict_aliasing)
752 if (alias_sets == 0)
753 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
754 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
755 return VEC_length (alias_set_entry, alias_sets) - 1;
757 else
758 return 0;
761 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
762 not everything that aliases SUPERSET also aliases SUBSET. For example,
763 in C, a store to an `int' can alias a load of a structure containing an
764 `int', and vice versa. But it can't alias a load of a 'double' member
765 of the same structure. Here, the structure would be the SUPERSET and
766 `int' the SUBSET. This relationship is also described in the comment at
767 the beginning of this file.
769 This function should be called only once per SUPERSET/SUBSET pair.
771 It is illegal for SUPERSET to be zero; everything is implicitly a
772 subset of alias set zero. */
774 void
775 record_alias_subset (alias_set_type superset, alias_set_type subset)
777 alias_set_entry superset_entry;
778 alias_set_entry subset_entry;
780 /* It is possible in complex type situations for both sets to be the same,
781 in which case we can ignore this operation. */
782 if (superset == subset)
783 return;
785 gcc_assert (superset);
787 superset_entry = get_alias_set_entry (superset);
788 if (superset_entry == 0)
790 /* Create an entry for the SUPERSET, so that we have a place to
791 attach the SUBSET. */
792 superset_entry = GGC_NEW (struct alias_set_entry);
793 superset_entry->alias_set = superset;
794 superset_entry->children
795 = splay_tree_new_ggc (splay_tree_compare_ints);
796 superset_entry->has_zero_child = 0;
797 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
800 if (subset == 0)
801 superset_entry->has_zero_child = 1;
802 else
804 subset_entry = get_alias_set_entry (subset);
805 /* If there is an entry for the subset, enter all of its children
806 (if they are not already present) as children of the SUPERSET. */
807 if (subset_entry)
809 if (subset_entry->has_zero_child)
810 superset_entry->has_zero_child = 1;
812 splay_tree_foreach (subset_entry->children, insert_subset_children,
813 superset_entry->children);
816 /* Enter the SUBSET itself as a child of the SUPERSET. */
817 splay_tree_insert (superset_entry->children,
818 (splay_tree_key) subset, 0);
822 /* Record that component types of TYPE, if any, are part of that type for
823 aliasing purposes. For record types, we only record component types
824 for fields that are not marked non-addressable. For array types, we
825 only record the component type if it is not marked non-aliased. */
827 void
828 record_component_aliases (tree type)
830 alias_set_type superset = get_alias_set (type);
831 tree field;
833 if (superset == 0)
834 return;
836 switch (TREE_CODE (type))
838 case RECORD_TYPE:
839 case UNION_TYPE:
840 case QUAL_UNION_TYPE:
841 /* Recursively record aliases for the base classes, if there are any. */
842 if (TYPE_BINFO (type))
844 int i;
845 tree binfo, base_binfo;
847 for (binfo = TYPE_BINFO (type), i = 0;
848 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
849 record_alias_subset (superset,
850 get_alias_set (BINFO_TYPE (base_binfo)));
852 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
853 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
854 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
855 break;
857 case COMPLEX_TYPE:
858 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
859 break;
861 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
862 element type. */
864 default:
865 break;
869 /* Allocate an alias set for use in storing and reading from the varargs
870 spill area. */
872 static GTY(()) alias_set_type varargs_set = -1;
874 alias_set_type
875 get_varargs_alias_set (void)
877 #if 1
878 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
879 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
880 consistently use the varargs alias set for loads from the varargs
881 area. So don't use it anywhere. */
882 return 0;
883 #else
884 if (varargs_set == -1)
885 varargs_set = new_alias_set ();
887 return varargs_set;
888 #endif
891 /* Likewise, but used for the fixed portions of the frame, e.g., register
892 save areas. */
894 static GTY(()) alias_set_type frame_set = -1;
896 alias_set_type
897 get_frame_alias_set (void)
899 if (frame_set == -1)
900 frame_set = new_alias_set ();
902 return frame_set;
905 /* Inside SRC, the source of a SET, find a base address. */
907 static rtx
908 find_base_value (rtx src)
910 unsigned int regno;
912 #if defined (FIND_BASE_TERM)
913 /* Try machine-dependent ways to find the base term. */
914 src = FIND_BASE_TERM (src);
915 #endif
917 switch (GET_CODE (src))
919 case SYMBOL_REF:
920 case LABEL_REF:
921 return src;
923 case REG:
924 regno = REGNO (src);
925 /* At the start of a function, argument registers have known base
926 values which may be lost later. Returning an ADDRESS
927 expression here allows optimization based on argument values
928 even when the argument registers are used for other purposes. */
929 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
930 return new_reg_base_value[regno];
932 /* If a pseudo has a known base value, return it. Do not do this
933 for non-fixed hard regs since it can result in a circular
934 dependency chain for registers which have values at function entry.
936 The test above is not sufficient because the scheduler may move
937 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
938 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
939 && regno < VEC_length (rtx, reg_base_value))
941 /* If we're inside init_alias_analysis, use new_reg_base_value
942 to reduce the number of relaxation iterations. */
943 if (new_reg_base_value && new_reg_base_value[regno]
944 && DF_REG_DEF_COUNT (regno) == 1)
945 return new_reg_base_value[regno];
947 if (VEC_index (rtx, reg_base_value, regno))
948 return VEC_index (rtx, reg_base_value, regno);
951 return 0;
953 case MEM:
954 /* Check for an argument passed in memory. Only record in the
955 copying-arguments block; it is too hard to track changes
956 otherwise. */
957 if (copying_arguments
958 && (XEXP (src, 0) == arg_pointer_rtx
959 || (GET_CODE (XEXP (src, 0)) == PLUS
960 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
961 return gen_rtx_ADDRESS (VOIDmode, src);
962 return 0;
964 case CONST:
965 src = XEXP (src, 0);
966 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
967 break;
969 /* ... fall through ... */
971 case PLUS:
972 case MINUS:
974 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
976 /* If either operand is a REG that is a known pointer, then it
977 is the base. */
978 if (REG_P (src_0) && REG_POINTER (src_0))
979 return find_base_value (src_0);
980 if (REG_P (src_1) && REG_POINTER (src_1))
981 return find_base_value (src_1);
983 /* If either operand is a REG, then see if we already have
984 a known value for it. */
985 if (REG_P (src_0))
987 temp = find_base_value (src_0);
988 if (temp != 0)
989 src_0 = temp;
992 if (REG_P (src_1))
994 temp = find_base_value (src_1);
995 if (temp!= 0)
996 src_1 = temp;
999 /* If either base is named object or a special address
1000 (like an argument or stack reference), then use it for the
1001 base term. */
1002 if (src_0 != 0
1003 && (GET_CODE (src_0) == SYMBOL_REF
1004 || GET_CODE (src_0) == LABEL_REF
1005 || (GET_CODE (src_0) == ADDRESS
1006 && GET_MODE (src_0) != VOIDmode)))
1007 return src_0;
1009 if (src_1 != 0
1010 && (GET_CODE (src_1) == SYMBOL_REF
1011 || GET_CODE (src_1) == LABEL_REF
1012 || (GET_CODE (src_1) == ADDRESS
1013 && GET_MODE (src_1) != VOIDmode)))
1014 return src_1;
1016 /* Guess which operand is the base address:
1017 If either operand is a symbol, then it is the base. If
1018 either operand is a CONST_INT, then the other is the base. */
1019 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
1020 return find_base_value (src_0);
1021 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
1022 return find_base_value (src_1);
1024 return 0;
1027 case LO_SUM:
1028 /* The standard form is (lo_sum reg sym) so look only at the
1029 second operand. */
1030 return find_base_value (XEXP (src, 1));
1032 case AND:
1033 /* If the second operand is constant set the base
1034 address to the first operand. */
1035 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
1036 return find_base_value (XEXP (src, 0));
1037 return 0;
1039 case TRUNCATE:
1040 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1041 break;
1042 /* Fall through. */
1043 case HIGH:
1044 case PRE_INC:
1045 case PRE_DEC:
1046 case POST_INC:
1047 case POST_DEC:
1048 case PRE_MODIFY:
1049 case POST_MODIFY:
1050 return find_base_value (XEXP (src, 0));
1052 case ZERO_EXTEND:
1053 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1055 rtx temp = find_base_value (XEXP (src, 0));
1057 if (temp != 0 && CONSTANT_P (temp))
1058 temp = convert_memory_address (Pmode, temp);
1060 return temp;
1063 default:
1064 break;
1067 return 0;
1070 /* Called from init_alias_analysis indirectly through note_stores. */
1072 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1073 register N has been set in this function. */
1074 static char *reg_seen;
1076 /* Addresses which are known not to alias anything else are identified
1077 by a unique integer. */
1078 static int unique_id;
1080 static void
1081 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1083 unsigned regno;
1084 rtx src;
1085 int n;
1087 if (!REG_P (dest))
1088 return;
1090 regno = REGNO (dest);
1092 gcc_assert (regno < VEC_length (rtx, reg_base_value));
1094 /* If this spans multiple hard registers, then we must indicate that every
1095 register has an unusable value. */
1096 if (regno < FIRST_PSEUDO_REGISTER)
1097 n = hard_regno_nregs[regno][GET_MODE (dest)];
1098 else
1099 n = 1;
1100 if (n != 1)
1102 while (--n >= 0)
1104 reg_seen[regno + n] = 1;
1105 new_reg_base_value[regno + n] = 0;
1107 return;
1110 if (set)
1112 /* A CLOBBER wipes out any old value but does not prevent a previously
1113 unset register from acquiring a base address (i.e. reg_seen is not
1114 set). */
1115 if (GET_CODE (set) == CLOBBER)
1117 new_reg_base_value[regno] = 0;
1118 return;
1120 src = SET_SRC (set);
1122 else
1124 if (reg_seen[regno])
1126 new_reg_base_value[regno] = 0;
1127 return;
1129 reg_seen[regno] = 1;
1130 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1131 GEN_INT (unique_id++));
1132 return;
1135 /* If this is not the first set of REGNO, see whether the new value
1136 is related to the old one. There are two cases of interest:
1138 (1) The register might be assigned an entirely new value
1139 that has the same base term as the original set.
1141 (2) The set might be a simple self-modification that
1142 cannot change REGNO's base value.
1144 If neither case holds, reject the original base value as invalid.
1145 Note that the following situation is not detected:
1147 extern int x, y; int *p = &x; p += (&y-&x);
1149 ANSI C does not allow computing the difference of addresses
1150 of distinct top level objects. */
1151 if (new_reg_base_value[regno] != 0
1152 && find_base_value (src) != new_reg_base_value[regno])
1153 switch (GET_CODE (src))
1155 case LO_SUM:
1156 case MINUS:
1157 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1158 new_reg_base_value[regno] = 0;
1159 break;
1160 case PLUS:
1161 /* If the value we add in the PLUS is also a valid base value,
1162 this might be the actual base value, and the original value
1163 an index. */
1165 rtx other = NULL_RTX;
1167 if (XEXP (src, 0) == dest)
1168 other = XEXP (src, 1);
1169 else if (XEXP (src, 1) == dest)
1170 other = XEXP (src, 0);
1172 if (! other || find_base_value (other))
1173 new_reg_base_value[regno] = 0;
1174 break;
1176 case AND:
1177 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1178 new_reg_base_value[regno] = 0;
1179 break;
1180 default:
1181 new_reg_base_value[regno] = 0;
1182 break;
1184 /* If this is the first set of a register, record the value. */
1185 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1186 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1187 new_reg_base_value[regno] = find_base_value (src);
1189 reg_seen[regno] = 1;
1192 /* If a value is known for REGNO, return it. */
1195 get_reg_known_value (unsigned int regno)
1197 if (regno >= FIRST_PSEUDO_REGISTER)
1199 regno -= FIRST_PSEUDO_REGISTER;
1200 if (regno < reg_known_value_size)
1201 return reg_known_value[regno];
1203 return NULL;
1206 /* Set it. */
1208 static void
1209 set_reg_known_value (unsigned int regno, rtx val)
1211 if (regno >= FIRST_PSEUDO_REGISTER)
1213 regno -= FIRST_PSEUDO_REGISTER;
1214 if (regno < reg_known_value_size)
1215 reg_known_value[regno] = val;
1219 /* Similarly for reg_known_equiv_p. */
1221 bool
1222 get_reg_known_equiv_p (unsigned int regno)
1224 if (regno >= FIRST_PSEUDO_REGISTER)
1226 regno -= FIRST_PSEUDO_REGISTER;
1227 if (regno < reg_known_value_size)
1228 return reg_known_equiv_p[regno];
1230 return false;
1233 static void
1234 set_reg_known_equiv_p (unsigned int regno, bool val)
1236 if (regno >= FIRST_PSEUDO_REGISTER)
1238 regno -= FIRST_PSEUDO_REGISTER;
1239 if (regno < reg_known_value_size)
1240 reg_known_equiv_p[regno] = val;
1245 /* Returns a canonical version of X, from the point of view alias
1246 analysis. (For example, if X is a MEM whose address is a register,
1247 and the register has a known value (say a SYMBOL_REF), then a MEM
1248 whose address is the SYMBOL_REF is returned.) */
1251 canon_rtx (rtx x)
1253 /* Recursively look for equivalences. */
1254 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1256 rtx t = get_reg_known_value (REGNO (x));
1257 if (t == x)
1258 return x;
1259 if (t)
1260 return canon_rtx (t);
1263 if (GET_CODE (x) == PLUS)
1265 rtx x0 = canon_rtx (XEXP (x, 0));
1266 rtx x1 = canon_rtx (XEXP (x, 1));
1268 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1270 if (GET_CODE (x0) == CONST_INT)
1271 return plus_constant (x1, INTVAL (x0));
1272 else if (GET_CODE (x1) == CONST_INT)
1273 return plus_constant (x0, INTVAL (x1));
1274 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1278 /* This gives us much better alias analysis when called from
1279 the loop optimizer. Note we want to leave the original
1280 MEM alone, but need to return the canonicalized MEM with
1281 all the flags with their original values. */
1282 else if (MEM_P (x))
1283 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1285 return x;
1288 /* Return 1 if X and Y are identical-looking rtx's.
1289 Expect that X and Y has been already canonicalized.
1291 We use the data in reg_known_value above to see if two registers with
1292 different numbers are, in fact, equivalent. */
1294 static int
1295 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1297 int i;
1298 int j;
1299 enum rtx_code code;
1300 const char *fmt;
1302 if (x == 0 && y == 0)
1303 return 1;
1304 if (x == 0 || y == 0)
1305 return 0;
1307 if (x == y)
1308 return 1;
1310 code = GET_CODE (x);
1311 /* Rtx's of different codes cannot be equal. */
1312 if (code != GET_CODE (y))
1313 return 0;
1315 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1316 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1318 if (GET_MODE (x) != GET_MODE (y))
1319 return 0;
1321 /* Some RTL can be compared without a recursive examination. */
1322 switch (code)
1324 case REG:
1325 return REGNO (x) == REGNO (y);
1327 case LABEL_REF:
1328 return XEXP (x, 0) == XEXP (y, 0);
1330 case SYMBOL_REF:
1331 return XSTR (x, 0) == XSTR (y, 0);
1333 case VALUE:
1334 case CONST_INT:
1335 case CONST_DOUBLE:
1336 case CONST_FIXED:
1337 /* There's no need to compare the contents of CONST_DOUBLEs or
1338 CONST_INTs because pointer equality is a good enough
1339 comparison for these nodes. */
1340 return 0;
1342 default:
1343 break;
1346 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1347 if (code == PLUS)
1348 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1349 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1350 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1351 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1352 /* For commutative operations, the RTX match if the operand match in any
1353 order. Also handle the simple binary and unary cases without a loop. */
1354 if (COMMUTATIVE_P (x))
1356 rtx xop0 = canon_rtx (XEXP (x, 0));
1357 rtx yop0 = canon_rtx (XEXP (y, 0));
1358 rtx yop1 = canon_rtx (XEXP (y, 1));
1360 return ((rtx_equal_for_memref_p (xop0, yop0)
1361 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1362 || (rtx_equal_for_memref_p (xop0, yop1)
1363 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1365 else if (NON_COMMUTATIVE_P (x))
1367 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1368 canon_rtx (XEXP (y, 0)))
1369 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1370 canon_rtx (XEXP (y, 1))));
1372 else if (UNARY_P (x))
1373 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1374 canon_rtx (XEXP (y, 0)));
1376 /* Compare the elements. If any pair of corresponding elements
1377 fail to match, return 0 for the whole things.
1379 Limit cases to types which actually appear in addresses. */
1381 fmt = GET_RTX_FORMAT (code);
1382 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1384 switch (fmt[i])
1386 case 'i':
1387 if (XINT (x, i) != XINT (y, i))
1388 return 0;
1389 break;
1391 case 'E':
1392 /* Two vectors must have the same length. */
1393 if (XVECLEN (x, i) != XVECLEN (y, i))
1394 return 0;
1396 /* And the corresponding elements must match. */
1397 for (j = 0; j < XVECLEN (x, i); j++)
1398 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1399 canon_rtx (XVECEXP (y, i, j))) == 0)
1400 return 0;
1401 break;
1403 case 'e':
1404 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1405 canon_rtx (XEXP (y, i))) == 0)
1406 return 0;
1407 break;
1409 /* This can happen for asm operands. */
1410 case 's':
1411 if (strcmp (XSTR (x, i), XSTR (y, i)))
1412 return 0;
1413 break;
1415 /* This can happen for an asm which clobbers memory. */
1416 case '0':
1417 break;
1419 /* It is believed that rtx's at this level will never
1420 contain anything but integers and other rtx's,
1421 except for within LABEL_REFs and SYMBOL_REFs. */
1422 default:
1423 gcc_unreachable ();
1426 return 1;
1430 find_base_term (rtx x)
1432 cselib_val *val;
1433 struct elt_loc_list *l;
1435 #if defined (FIND_BASE_TERM)
1436 /* Try machine-dependent ways to find the base term. */
1437 x = FIND_BASE_TERM (x);
1438 #endif
1440 switch (GET_CODE (x))
1442 case REG:
1443 return REG_BASE_VALUE (x);
1445 case TRUNCATE:
1446 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1447 return 0;
1448 /* Fall through. */
1449 case HIGH:
1450 case PRE_INC:
1451 case PRE_DEC:
1452 case POST_INC:
1453 case POST_DEC:
1454 case PRE_MODIFY:
1455 case POST_MODIFY:
1456 return find_base_term (XEXP (x, 0));
1458 case ZERO_EXTEND:
1459 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1461 rtx temp = find_base_term (XEXP (x, 0));
1463 if (temp != 0 && CONSTANT_P (temp))
1464 temp = convert_memory_address (Pmode, temp);
1466 return temp;
1469 case VALUE:
1470 val = CSELIB_VAL_PTR (x);
1471 if (!val)
1472 return 0;
1473 for (l = val->locs; l; l = l->next)
1474 if ((x = find_base_term (l->loc)) != 0)
1475 return x;
1476 return 0;
1478 case CONST:
1479 x = XEXP (x, 0);
1480 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1481 return 0;
1482 /* Fall through. */
1483 case LO_SUM:
1484 /* The standard form is (lo_sum reg sym) so look only at the
1485 second operand. */
1486 return find_base_term (XEXP (x, 1));
1487 case PLUS:
1488 case MINUS:
1490 rtx tmp1 = XEXP (x, 0);
1491 rtx tmp2 = XEXP (x, 1);
1493 /* This is a little bit tricky since we have to determine which of
1494 the two operands represents the real base address. Otherwise this
1495 routine may return the index register instead of the base register.
1497 That may cause us to believe no aliasing was possible, when in
1498 fact aliasing is possible.
1500 We use a few simple tests to guess the base register. Additional
1501 tests can certainly be added. For example, if one of the operands
1502 is a shift or multiply, then it must be the index register and the
1503 other operand is the base register. */
1505 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1506 return find_base_term (tmp2);
1508 /* If either operand is known to be a pointer, then use it
1509 to determine the base term. */
1510 if (REG_P (tmp1) && REG_POINTER (tmp1))
1511 return find_base_term (tmp1);
1513 if (REG_P (tmp2) && REG_POINTER (tmp2))
1514 return find_base_term (tmp2);
1516 /* Neither operand was known to be a pointer. Go ahead and find the
1517 base term for both operands. */
1518 tmp1 = find_base_term (tmp1);
1519 tmp2 = find_base_term (tmp2);
1521 /* If either base term is named object or a special address
1522 (like an argument or stack reference), then use it for the
1523 base term. */
1524 if (tmp1 != 0
1525 && (GET_CODE (tmp1) == SYMBOL_REF
1526 || GET_CODE (tmp1) == LABEL_REF
1527 || (GET_CODE (tmp1) == ADDRESS
1528 && GET_MODE (tmp1) != VOIDmode)))
1529 return tmp1;
1531 if (tmp2 != 0
1532 && (GET_CODE (tmp2) == SYMBOL_REF
1533 || GET_CODE (tmp2) == LABEL_REF
1534 || (GET_CODE (tmp2) == ADDRESS
1535 && GET_MODE (tmp2) != VOIDmode)))
1536 return tmp2;
1538 /* We could not determine which of the two operands was the
1539 base register and which was the index. So we can determine
1540 nothing from the base alias check. */
1541 return 0;
1544 case AND:
1545 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1546 return find_base_term (XEXP (x, 0));
1547 return 0;
1549 case SYMBOL_REF:
1550 case LABEL_REF:
1551 return x;
1553 default:
1554 return 0;
1558 /* Return 0 if the addresses X and Y are known to point to different
1559 objects, 1 if they might be pointers to the same object. */
1561 static int
1562 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1563 enum machine_mode y_mode)
1565 rtx x_base = find_base_term (x);
1566 rtx y_base = find_base_term (y);
1568 /* If the address itself has no known base see if a known equivalent
1569 value has one. If either address still has no known base, nothing
1570 is known about aliasing. */
1571 if (x_base == 0)
1573 rtx x_c;
1575 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1576 return 1;
1578 x_base = find_base_term (x_c);
1579 if (x_base == 0)
1580 return 1;
1583 if (y_base == 0)
1585 rtx y_c;
1586 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1587 return 1;
1589 y_base = find_base_term (y_c);
1590 if (y_base == 0)
1591 return 1;
1594 /* If the base addresses are equal nothing is known about aliasing. */
1595 if (rtx_equal_p (x_base, y_base))
1596 return 1;
1598 /* The base addresses are different expressions. If they are not accessed
1599 via AND, there is no conflict. We can bring knowledge of object
1600 alignment into play here. For example, on alpha, "char a, b;" can
1601 alias one another, though "char a; long b;" cannot. AND addesses may
1602 implicitly alias surrounding objects; i.e. unaligned access in DImode
1603 via AND address can alias all surrounding object types except those
1604 with aligment 8 or higher. */
1605 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1606 return 1;
1607 if (GET_CODE (x) == AND
1608 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1609 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1610 return 1;
1611 if (GET_CODE (y) == AND
1612 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1613 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1614 return 1;
1616 /* Differing symbols not accessed via AND never alias. */
1617 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1618 return 0;
1620 /* If one address is a stack reference there can be no alias:
1621 stack references using different base registers do not alias,
1622 a stack reference can not alias a parameter, and a stack reference
1623 can not alias a global. */
1624 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1625 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1626 return 0;
1628 if (! flag_argument_noalias)
1629 return 1;
1631 if (flag_argument_noalias > 1)
1632 return 0;
1634 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1635 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1638 /* Convert the address X into something we can use. This is done by returning
1639 it unchanged unless it is a value; in the latter case we call cselib to get
1640 a more useful rtx. */
1643 get_addr (rtx x)
1645 cselib_val *v;
1646 struct elt_loc_list *l;
1648 if (GET_CODE (x) != VALUE)
1649 return x;
1650 v = CSELIB_VAL_PTR (x);
1651 if (v)
1653 for (l = v->locs; l; l = l->next)
1654 if (CONSTANT_P (l->loc))
1655 return l->loc;
1656 for (l = v->locs; l; l = l->next)
1657 if (!REG_P (l->loc) && !MEM_P (l->loc))
1658 return l->loc;
1659 if (v->locs)
1660 return v->locs->loc;
1662 return x;
1665 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1666 where SIZE is the size in bytes of the memory reference. If ADDR
1667 is not modified by the memory reference then ADDR is returned. */
1669 static rtx
1670 addr_side_effect_eval (rtx addr, int size, int n_refs)
1672 int offset = 0;
1674 switch (GET_CODE (addr))
1676 case PRE_INC:
1677 offset = (n_refs + 1) * size;
1678 break;
1679 case PRE_DEC:
1680 offset = -(n_refs + 1) * size;
1681 break;
1682 case POST_INC:
1683 offset = n_refs * size;
1684 break;
1685 case POST_DEC:
1686 offset = -n_refs * size;
1687 break;
1689 default:
1690 return addr;
1693 if (offset)
1694 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1695 GEN_INT (offset));
1696 else
1697 addr = XEXP (addr, 0);
1698 addr = canon_rtx (addr);
1700 return addr;
1703 /* Return nonzero if X and Y (memory addresses) could reference the
1704 same location in memory. C is an offset accumulator. When
1705 C is nonzero, we are testing aliases between X and Y + C.
1706 XSIZE is the size in bytes of the X reference,
1707 similarly YSIZE is the size in bytes for Y.
1708 Expect that canon_rtx has been already called for X and Y.
1710 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1711 referenced (the reference was BLKmode), so make the most pessimistic
1712 assumptions.
1714 If XSIZE or YSIZE is negative, we may access memory outside the object
1715 being referenced as a side effect. This can happen when using AND to
1716 align memory references, as is done on the Alpha.
1718 Nice to notice that varying addresses cannot conflict with fp if no
1719 local variables had their addresses taken, but that's too hard now. */
1721 static int
1722 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1724 if (GET_CODE (x) == VALUE)
1725 x = get_addr (x);
1726 if (GET_CODE (y) == VALUE)
1727 y = get_addr (y);
1728 if (GET_CODE (x) == HIGH)
1729 x = XEXP (x, 0);
1730 else if (GET_CODE (x) == LO_SUM)
1731 x = XEXP (x, 1);
1732 else
1733 x = addr_side_effect_eval (x, xsize, 0);
1734 if (GET_CODE (y) == HIGH)
1735 y = XEXP (y, 0);
1736 else if (GET_CODE (y) == LO_SUM)
1737 y = XEXP (y, 1);
1738 else
1739 y = addr_side_effect_eval (y, ysize, 0);
1741 if (rtx_equal_for_memref_p (x, y))
1743 if (xsize <= 0 || ysize <= 0)
1744 return 1;
1745 if (c >= 0 && xsize > c)
1746 return 1;
1747 if (c < 0 && ysize+c > 0)
1748 return 1;
1749 return 0;
1752 /* This code used to check for conflicts involving stack references and
1753 globals but the base address alias code now handles these cases. */
1755 if (GET_CODE (x) == PLUS)
1757 /* The fact that X is canonicalized means that this
1758 PLUS rtx is canonicalized. */
1759 rtx x0 = XEXP (x, 0);
1760 rtx x1 = XEXP (x, 1);
1762 if (GET_CODE (y) == PLUS)
1764 /* The fact that Y is canonicalized means that this
1765 PLUS rtx is canonicalized. */
1766 rtx y0 = XEXP (y, 0);
1767 rtx y1 = XEXP (y, 1);
1769 if (rtx_equal_for_memref_p (x1, y1))
1770 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1771 if (rtx_equal_for_memref_p (x0, y0))
1772 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1773 if (GET_CODE (x1) == CONST_INT)
1775 if (GET_CODE (y1) == CONST_INT)
1776 return memrefs_conflict_p (xsize, x0, ysize, y0,
1777 c - INTVAL (x1) + INTVAL (y1));
1778 else
1779 return memrefs_conflict_p (xsize, x0, ysize, y,
1780 c - INTVAL (x1));
1782 else if (GET_CODE (y1) == CONST_INT)
1783 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1785 return 1;
1787 else if (GET_CODE (x1) == CONST_INT)
1788 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1790 else if (GET_CODE (y) == PLUS)
1792 /* The fact that Y is canonicalized means that this
1793 PLUS rtx is canonicalized. */
1794 rtx y0 = XEXP (y, 0);
1795 rtx y1 = XEXP (y, 1);
1797 if (GET_CODE (y1) == CONST_INT)
1798 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1799 else
1800 return 1;
1803 if (GET_CODE (x) == GET_CODE (y))
1804 switch (GET_CODE (x))
1806 case MULT:
1808 /* Handle cases where we expect the second operands to be the
1809 same, and check only whether the first operand would conflict
1810 or not. */
1811 rtx x0, y0;
1812 rtx x1 = canon_rtx (XEXP (x, 1));
1813 rtx y1 = canon_rtx (XEXP (y, 1));
1814 if (! rtx_equal_for_memref_p (x1, y1))
1815 return 1;
1816 x0 = canon_rtx (XEXP (x, 0));
1817 y0 = canon_rtx (XEXP (y, 0));
1818 if (rtx_equal_for_memref_p (x0, y0))
1819 return (xsize == 0 || ysize == 0
1820 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1822 /* Can't properly adjust our sizes. */
1823 if (GET_CODE (x1) != CONST_INT)
1824 return 1;
1825 xsize /= INTVAL (x1);
1826 ysize /= INTVAL (x1);
1827 c /= INTVAL (x1);
1828 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1831 default:
1832 break;
1835 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1836 as an access with indeterminate size. Assume that references
1837 besides AND are aligned, so if the size of the other reference is
1838 at least as large as the alignment, assume no other overlap. */
1839 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1841 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1842 xsize = -1;
1843 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1845 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1847 /* ??? If we are indexing far enough into the array/structure, we
1848 may yet be able to determine that we can not overlap. But we
1849 also need to that we are far enough from the end not to overlap
1850 a following reference, so we do nothing with that for now. */
1851 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1852 ysize = -1;
1853 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1856 if (CONSTANT_P (x))
1858 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1860 c += (INTVAL (y) - INTVAL (x));
1861 return (xsize <= 0 || ysize <= 0
1862 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1865 if (GET_CODE (x) == CONST)
1867 if (GET_CODE (y) == CONST)
1868 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1869 ysize, canon_rtx (XEXP (y, 0)), c);
1870 else
1871 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1872 ysize, y, c);
1874 if (GET_CODE (y) == CONST)
1875 return memrefs_conflict_p (xsize, x, ysize,
1876 canon_rtx (XEXP (y, 0)), c);
1878 if (CONSTANT_P (y))
1879 return (xsize <= 0 || ysize <= 0
1880 || (rtx_equal_for_memref_p (x, y)
1881 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1883 return 1;
1885 return 1;
1888 /* Functions to compute memory dependencies.
1890 Since we process the insns in execution order, we can build tables
1891 to keep track of what registers are fixed (and not aliased), what registers
1892 are varying in known ways, and what registers are varying in unknown
1893 ways.
1895 If both memory references are volatile, then there must always be a
1896 dependence between the two references, since their order can not be
1897 changed. A volatile and non-volatile reference can be interchanged
1898 though.
1900 A MEM_IN_STRUCT reference at a non-AND varying address can never
1901 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1902 also must allow AND addresses, because they may generate accesses
1903 outside the object being referenced. This is used to generate
1904 aligned addresses from unaligned addresses, for instance, the alpha
1905 storeqi_unaligned pattern. */
1907 /* Read dependence: X is read after read in MEM takes place. There can
1908 only be a dependence here if both reads are volatile. */
1911 read_dependence (const_rtx mem, const_rtx x)
1913 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1916 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1917 MEM2 is a reference to a structure at a varying address, or returns
1918 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1919 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1920 to decide whether or not an address may vary; it should return
1921 nonzero whenever variation is possible.
1922 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1924 static const_rtx
1925 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
1926 rtx mem2_addr,
1927 bool (*varies_p) (const_rtx, bool))
1929 if (! flag_strict_aliasing)
1930 return NULL_RTX;
1932 if (MEM_ALIAS_SET (mem2)
1933 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1934 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1935 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1936 varying address. */
1937 return mem1;
1939 if (MEM_ALIAS_SET (mem1)
1940 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1941 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1942 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1943 varying address. */
1944 return mem2;
1946 return NULL_RTX;
1949 /* Returns nonzero if something about the mode or address format MEM1
1950 indicates that it might well alias *anything*. */
1952 static int
1953 aliases_everything_p (const_rtx mem)
1955 if (GET_CODE (XEXP (mem, 0)) == AND)
1956 /* If the address is an AND, it's very hard to know at what it is
1957 actually pointing. */
1958 return 1;
1960 return 0;
1963 /* Return true if we can determine that the fields referenced cannot
1964 overlap for any pair of objects. */
1966 static bool
1967 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1969 const_tree fieldx, fieldy, typex, typey, orig_y;
1973 /* The comparison has to be done at a common type, since we don't
1974 know how the inheritance hierarchy works. */
1975 orig_y = y;
1978 fieldx = TREE_OPERAND (x, 1);
1979 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
1981 y = orig_y;
1984 fieldy = TREE_OPERAND (y, 1);
1985 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
1987 if (typex == typey)
1988 goto found;
1990 y = TREE_OPERAND (y, 0);
1992 while (y && TREE_CODE (y) == COMPONENT_REF);
1994 x = TREE_OPERAND (x, 0);
1996 while (x && TREE_CODE (x) == COMPONENT_REF);
1997 /* Never found a common type. */
1998 return false;
2000 found:
2001 /* If we're left with accessing different fields of a structure,
2002 then no overlap. */
2003 if (TREE_CODE (typex) == RECORD_TYPE
2004 && fieldx != fieldy)
2005 return true;
2007 /* The comparison on the current field failed. If we're accessing
2008 a very nested structure, look at the next outer level. */
2009 x = TREE_OPERAND (x, 0);
2010 y = TREE_OPERAND (y, 0);
2012 while (x && y
2013 && TREE_CODE (x) == COMPONENT_REF
2014 && TREE_CODE (y) == COMPONENT_REF);
2016 return false;
2019 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2021 static tree
2022 decl_for_component_ref (tree x)
2026 x = TREE_OPERAND (x, 0);
2028 while (x && TREE_CODE (x) == COMPONENT_REF);
2030 return x && DECL_P (x) ? x : NULL_TREE;
2033 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2034 offset of the field reference. */
2036 static rtx
2037 adjust_offset_for_component_ref (tree x, rtx offset)
2039 HOST_WIDE_INT ioffset;
2041 if (! offset)
2042 return NULL_RTX;
2044 ioffset = INTVAL (offset);
2047 tree offset = component_ref_field_offset (x);
2048 tree field = TREE_OPERAND (x, 1);
2050 if (! host_integerp (offset, 1))
2051 return NULL_RTX;
2052 ioffset += (tree_low_cst (offset, 1)
2053 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2054 / BITS_PER_UNIT));
2056 x = TREE_OPERAND (x, 0);
2058 while (x && TREE_CODE (x) == COMPONENT_REF);
2060 return GEN_INT (ioffset);
2063 /* Return nonzero if we can determine the exprs corresponding to memrefs
2064 X and Y and they do not overlap. */
2067 nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
2069 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2070 rtx rtlx, rtly;
2071 rtx basex, basey;
2072 rtx moffsetx, moffsety;
2073 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2075 /* Unless both have exprs, we can't tell anything. */
2076 if (exprx == 0 || expry == 0)
2077 return 0;
2079 /* If both are field references, we may be able to determine something. */
2080 if (TREE_CODE (exprx) == COMPONENT_REF
2081 && TREE_CODE (expry) == COMPONENT_REF
2082 && nonoverlapping_component_refs_p (exprx, expry))
2083 return 1;
2086 /* If the field reference test failed, look at the DECLs involved. */
2087 moffsetx = MEM_OFFSET (x);
2088 if (TREE_CODE (exprx) == COMPONENT_REF)
2090 if (TREE_CODE (expry) == VAR_DECL
2091 && POINTER_TYPE_P (TREE_TYPE (expry)))
2093 tree field = TREE_OPERAND (exprx, 1);
2094 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2095 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2096 TREE_TYPE (field)))
2097 return 1;
2100 tree t = decl_for_component_ref (exprx);
2101 if (! t)
2102 return 0;
2103 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2104 exprx = t;
2107 else if (INDIRECT_REF_P (exprx))
2109 exprx = TREE_OPERAND (exprx, 0);
2110 if (flag_argument_noalias < 2
2111 || TREE_CODE (exprx) != PARM_DECL)
2112 return 0;
2115 moffsety = MEM_OFFSET (y);
2116 if (TREE_CODE (expry) == COMPONENT_REF)
2118 if (TREE_CODE (exprx) == VAR_DECL
2119 && POINTER_TYPE_P (TREE_TYPE (exprx)))
2121 tree field = TREE_OPERAND (expry, 1);
2122 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2123 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2124 TREE_TYPE (field)))
2125 return 1;
2128 tree t = decl_for_component_ref (expry);
2129 if (! t)
2130 return 0;
2131 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2132 expry = t;
2135 else if (INDIRECT_REF_P (expry))
2137 expry = TREE_OPERAND (expry, 0);
2138 if (flag_argument_noalias < 2
2139 || TREE_CODE (expry) != PARM_DECL)
2140 return 0;
2143 if (! DECL_P (exprx) || ! DECL_P (expry))
2144 return 0;
2146 rtlx = DECL_RTL (exprx);
2147 rtly = DECL_RTL (expry);
2149 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2150 can't overlap unless they are the same because we never reuse that part
2151 of the stack frame used for locals for spilled pseudos. */
2152 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2153 && ! rtx_equal_p (rtlx, rtly))
2154 return 1;
2156 /* Get the base and offsets of both decls. If either is a register, we
2157 know both are and are the same, so use that as the base. The only
2158 we can avoid overlap is if we can deduce that they are nonoverlapping
2159 pieces of that decl, which is very rare. */
2160 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2161 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2162 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2164 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2165 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2166 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2168 /* If the bases are different, we know they do not overlap if both
2169 are constants or if one is a constant and the other a pointer into the
2170 stack frame. Otherwise a different base means we can't tell if they
2171 overlap or not. */
2172 if (! rtx_equal_p (basex, basey))
2173 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2174 || (CONSTANT_P (basex) && REG_P (basey)
2175 && REGNO_PTR_FRAME_P (REGNO (basey)))
2176 || (CONSTANT_P (basey) && REG_P (basex)
2177 && REGNO_PTR_FRAME_P (REGNO (basex))));
2179 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2180 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2181 : -1);
2182 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2183 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2184 -1);
2186 /* If we have an offset for either memref, it can update the values computed
2187 above. */
2188 if (moffsetx)
2189 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2190 if (moffsety)
2191 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2193 /* If a memref has both a size and an offset, we can use the smaller size.
2194 We can't do this if the offset isn't known because we must view this
2195 memref as being anywhere inside the DECL's MEM. */
2196 if (MEM_SIZE (x) && moffsetx)
2197 sizex = INTVAL (MEM_SIZE (x));
2198 if (MEM_SIZE (y) && moffsety)
2199 sizey = INTVAL (MEM_SIZE (y));
2201 /* Put the values of the memref with the lower offset in X's values. */
2202 if (offsetx > offsety)
2204 tem = offsetx, offsetx = offsety, offsety = tem;
2205 tem = sizex, sizex = sizey, sizey = tem;
2208 /* If we don't know the size of the lower-offset value, we can't tell
2209 if they conflict. Otherwise, we do the test. */
2210 return sizex >= 0 && offsety >= offsetx + sizex;
2213 /* True dependence: X is read after store in MEM takes place. */
2216 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2217 bool (*varies) (const_rtx, bool))
2219 rtx x_addr, mem_addr;
2220 rtx base;
2222 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2223 return 1;
2225 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2226 This is used in epilogue deallocation functions, and in cselib. */
2227 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2228 return 1;
2229 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2230 return 1;
2231 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2232 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2233 return 1;
2235 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2236 return 0;
2238 /* Read-only memory is by definition never modified, and therefore can't
2239 conflict with anything. We don't expect to find read-only set on MEM,
2240 but stupid user tricks can produce them, so don't die. */
2241 if (MEM_READONLY_P (x))
2242 return 0;
2244 if (nonoverlapping_memrefs_p (mem, x))
2245 return 0;
2247 if (mem_mode == VOIDmode)
2248 mem_mode = GET_MODE (mem);
2250 x_addr = get_addr (XEXP (x, 0));
2251 mem_addr = get_addr (XEXP (mem, 0));
2253 base = find_base_term (x_addr);
2254 if (base && (GET_CODE (base) == LABEL_REF
2255 || (GET_CODE (base) == SYMBOL_REF
2256 && CONSTANT_POOL_ADDRESS_P (base))))
2257 return 0;
2259 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2260 return 0;
2262 x_addr = canon_rtx (x_addr);
2263 mem_addr = canon_rtx (mem_addr);
2265 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2266 SIZE_FOR_MODE (x), x_addr, 0))
2267 return 0;
2269 if (aliases_everything_p (x))
2270 return 1;
2272 /* We cannot use aliases_everything_p to test MEM, since we must look
2273 at MEM_MODE, rather than GET_MODE (MEM). */
2274 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2275 return 1;
2277 /* In true_dependence we also allow BLKmode to alias anything. Why
2278 don't we do this in anti_dependence and output_dependence? */
2279 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2280 return 1;
2282 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2283 varies);
2286 /* Canonical true dependence: X is read after store in MEM takes place.
2287 Variant of true_dependence which assumes MEM has already been
2288 canonicalized (hence we no longer do that here).
2289 The mem_addr argument has been added, since true_dependence computed
2290 this value prior to canonicalizing. */
2293 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2294 const_rtx x, bool (*varies) (const_rtx, bool))
2296 rtx x_addr;
2298 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2299 return 1;
2301 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2302 This is used in epilogue deallocation functions. */
2303 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2304 return 1;
2305 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2306 return 1;
2307 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2308 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2309 return 1;
2311 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2312 return 0;
2314 /* Read-only memory is by definition never modified, and therefore can't
2315 conflict with anything. We don't expect to find read-only set on MEM,
2316 but stupid user tricks can produce them, so don't die. */
2317 if (MEM_READONLY_P (x))
2318 return 0;
2320 if (nonoverlapping_memrefs_p (x, mem))
2321 return 0;
2323 x_addr = get_addr (XEXP (x, 0));
2325 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2326 return 0;
2328 x_addr = canon_rtx (x_addr);
2329 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2330 SIZE_FOR_MODE (x), x_addr, 0))
2331 return 0;
2333 if (aliases_everything_p (x))
2334 return 1;
2336 /* We cannot use aliases_everything_p to test MEM, since we must look
2337 at MEM_MODE, rather than GET_MODE (MEM). */
2338 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2339 return 1;
2341 /* In true_dependence we also allow BLKmode to alias anything. Why
2342 don't we do this in anti_dependence and output_dependence? */
2343 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2344 return 1;
2346 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2347 varies);
2350 /* Returns nonzero if a write to X might alias a previous read from
2351 (or, if WRITEP is nonzero, a write to) MEM. */
2353 static int
2354 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2356 rtx x_addr, mem_addr;
2357 const_rtx fixed_scalar;
2358 rtx base;
2360 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2361 return 1;
2363 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2364 This is used in epilogue deallocation functions. */
2365 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2366 return 1;
2367 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2368 return 1;
2369 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2370 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2371 return 1;
2373 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2374 return 0;
2376 /* A read from read-only memory can't conflict with read-write memory. */
2377 if (!writep && MEM_READONLY_P (mem))
2378 return 0;
2380 if (nonoverlapping_memrefs_p (x, mem))
2381 return 0;
2383 x_addr = get_addr (XEXP (x, 0));
2384 mem_addr = get_addr (XEXP (mem, 0));
2386 if (! writep)
2388 base = find_base_term (mem_addr);
2389 if (base && (GET_CODE (base) == LABEL_REF
2390 || (GET_CODE (base) == SYMBOL_REF
2391 && CONSTANT_POOL_ADDRESS_P (base))))
2392 return 0;
2395 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2396 GET_MODE (mem)))
2397 return 0;
2399 x_addr = canon_rtx (x_addr);
2400 mem_addr = canon_rtx (mem_addr);
2402 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2403 SIZE_FOR_MODE (x), x_addr, 0))
2404 return 0;
2406 fixed_scalar
2407 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2408 rtx_addr_varies_p);
2410 return (!(fixed_scalar == mem && !aliases_everything_p (x))
2411 && !(fixed_scalar == x && !aliases_everything_p (mem)));
2414 /* Anti dependence: X is written after read in MEM takes place. */
2417 anti_dependence (const_rtx mem, const_rtx x)
2419 return write_dependence_p (mem, x, /*writep=*/0);
2422 /* Output dependence: X is written after store in MEM takes place. */
2425 output_dependence (const_rtx mem, const_rtx x)
2427 return write_dependence_p (mem, x, /*writep=*/1);
2431 void
2432 init_alias_target (void)
2434 int i;
2436 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2438 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2439 /* Check whether this register can hold an incoming pointer
2440 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2441 numbers, so translate if necessary due to register windows. */
2442 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2443 && HARD_REGNO_MODE_OK (i, Pmode))
2444 static_reg_base_value[i]
2445 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2447 static_reg_base_value[STACK_POINTER_REGNUM]
2448 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2449 static_reg_base_value[ARG_POINTER_REGNUM]
2450 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2451 static_reg_base_value[FRAME_POINTER_REGNUM]
2452 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2453 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2454 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2455 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2456 #endif
2459 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2460 to be memory reference. */
2461 static bool memory_modified;
2462 static void
2463 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2465 if (MEM_P (x))
2467 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2468 memory_modified = true;
2473 /* Return true when INSN possibly modify memory contents of MEM
2474 (i.e. address can be modified). */
2475 bool
2476 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2478 if (!INSN_P (insn))
2479 return false;
2480 memory_modified = false;
2481 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2482 return memory_modified;
2485 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2486 array. */
2488 void
2489 init_alias_analysis (void)
2491 unsigned int maxreg = max_reg_num ();
2492 int changed, pass;
2493 int i;
2494 unsigned int ui;
2495 rtx insn;
2497 timevar_push (TV_ALIAS_ANALYSIS);
2499 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2500 reg_known_value = GGC_CNEWVEC (rtx, reg_known_value_size);
2501 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2503 /* If we have memory allocated from the previous run, use it. */
2504 if (old_reg_base_value)
2505 reg_base_value = old_reg_base_value;
2507 if (reg_base_value)
2508 VEC_truncate (rtx, reg_base_value, 0);
2510 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2512 new_reg_base_value = XNEWVEC (rtx, maxreg);
2513 reg_seen = XNEWVEC (char, maxreg);
2515 /* The basic idea is that each pass through this loop will use the
2516 "constant" information from the previous pass to propagate alias
2517 information through another level of assignments.
2519 This could get expensive if the assignment chains are long. Maybe
2520 we should throttle the number of iterations, possibly based on
2521 the optimization level or flag_expensive_optimizations.
2523 We could propagate more information in the first pass by making use
2524 of DF_REG_DEF_COUNT to determine immediately that the alias information
2525 for a pseudo is "constant".
2527 A program with an uninitialized variable can cause an infinite loop
2528 here. Instead of doing a full dataflow analysis to detect such problems
2529 we just cap the number of iterations for the loop.
2531 The state of the arrays for the set chain in question does not matter
2532 since the program has undefined behavior. */
2534 pass = 0;
2537 /* Assume nothing will change this iteration of the loop. */
2538 changed = 0;
2540 /* We want to assign the same IDs each iteration of this loop, so
2541 start counting from zero each iteration of the loop. */
2542 unique_id = 0;
2544 /* We're at the start of the function each iteration through the
2545 loop, so we're copying arguments. */
2546 copying_arguments = true;
2548 /* Wipe the potential alias information clean for this pass. */
2549 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2551 /* Wipe the reg_seen array clean. */
2552 memset (reg_seen, 0, maxreg);
2554 /* Mark all hard registers which may contain an address.
2555 The stack, frame and argument pointers may contain an address.
2556 An argument register which can hold a Pmode value may contain
2557 an address even if it is not in BASE_REGS.
2559 The address expression is VOIDmode for an argument and
2560 Pmode for other registers. */
2562 memcpy (new_reg_base_value, static_reg_base_value,
2563 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2565 /* Walk the insns adding values to the new_reg_base_value array. */
2566 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2568 if (INSN_P (insn))
2570 rtx note, set;
2572 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2573 /* The prologue/epilogue insns are not threaded onto the
2574 insn chain until after reload has completed. Thus,
2575 there is no sense wasting time checking if INSN is in
2576 the prologue/epilogue until after reload has completed. */
2577 if (reload_completed
2578 && prologue_epilogue_contains (insn))
2579 continue;
2580 #endif
2582 /* If this insn has a noalias note, process it, Otherwise,
2583 scan for sets. A simple set will have no side effects
2584 which could change the base value of any other register. */
2586 if (GET_CODE (PATTERN (insn)) == SET
2587 && REG_NOTES (insn) != 0
2588 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2589 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2590 else
2591 note_stores (PATTERN (insn), record_set, NULL);
2593 set = single_set (insn);
2595 if (set != 0
2596 && REG_P (SET_DEST (set))
2597 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2599 unsigned int regno = REGNO (SET_DEST (set));
2600 rtx src = SET_SRC (set);
2601 rtx t;
2603 note = find_reg_equal_equiv_note (insn);
2604 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2605 && DF_REG_DEF_COUNT (regno) != 1)
2606 note = NULL_RTX;
2608 if (note != NULL_RTX
2609 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2610 && ! rtx_varies_p (XEXP (note, 0), 1)
2611 && ! reg_overlap_mentioned_p (SET_DEST (set),
2612 XEXP (note, 0)))
2614 set_reg_known_value (regno, XEXP (note, 0));
2615 set_reg_known_equiv_p (regno,
2616 REG_NOTE_KIND (note) == REG_EQUIV);
2618 else if (DF_REG_DEF_COUNT (regno) == 1
2619 && GET_CODE (src) == PLUS
2620 && REG_P (XEXP (src, 0))
2621 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2622 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2624 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2625 set_reg_known_value (regno, t);
2626 set_reg_known_equiv_p (regno, 0);
2628 else if (DF_REG_DEF_COUNT (regno) == 1
2629 && ! rtx_varies_p (src, 1))
2631 set_reg_known_value (regno, src);
2632 set_reg_known_equiv_p (regno, 0);
2636 else if (NOTE_P (insn)
2637 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2638 copying_arguments = false;
2641 /* Now propagate values from new_reg_base_value to reg_base_value. */
2642 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2644 for (ui = 0; ui < maxreg; ui++)
2646 if (new_reg_base_value[ui]
2647 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2648 && ! rtx_equal_p (new_reg_base_value[ui],
2649 VEC_index (rtx, reg_base_value, ui)))
2651 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2652 changed = 1;
2656 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2658 /* Fill in the remaining entries. */
2659 for (i = 0; i < (int)reg_known_value_size; i++)
2660 if (reg_known_value[i] == 0)
2661 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2663 /* Clean up. */
2664 free (new_reg_base_value);
2665 new_reg_base_value = 0;
2666 free (reg_seen);
2667 reg_seen = 0;
2668 timevar_pop (TV_ALIAS_ANALYSIS);
2671 void
2672 end_alias_analysis (void)
2674 old_reg_base_value = reg_base_value;
2675 ggc_free (reg_known_value);
2676 reg_known_value = 0;
2677 reg_known_value_size = 0;
2678 free (reg_known_equiv_p);
2679 reg_known_equiv_p = 0;
2682 #include "gt-alias.h"