* xcoffout.h (xcoffout_source_line): Update prototype.
[official-gcc.git] / gcc / alias.c
blob3488382df6c7f002f283a5262097c25166ff7472
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "function.h"
30 #include "alias.h"
31 #include "emit-rtl.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "output.h"
37 #include "toplev.h"
38 #include "cselib.h"
39 #include "splay-tree.h"
40 #include "ggc.h"
41 #include "langhooks.h"
42 #include "timevar.h"
43 #include "target.h"
44 #include "cgraph.h"
45 #include "varray.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
48 #include "df.h"
50 /* The aliasing API provided here solves related but different problems:
52 Say there exists (in c)
54 struct X {
55 struct Y y1;
56 struct Z z2;
57 } x1, *px1, *px2;
59 struct Y y2, *py;
60 struct Z z2, *pz;
63 py = &px1.y1;
64 px2 = &x1;
66 Consider the four questions:
68 Can a store to x1 interfere with px2->y1?
69 Can a store to x1 interfere with px2->z2?
70 (*px2).z2
71 Can a store to x1 change the value pointed to by with py?
72 Can a store to x1 change the value pointed to by with pz?
74 The answer to these questions can be yes, yes, yes, and maybe.
76 The first two questions can be answered with a simple examination
77 of the type system. If structure X contains a field of type Y then
78 a store thru a pointer to an X can overwrite any field that is
79 contained (recursively) in an X (unless we know that px1 != px2).
81 The last two of the questions can be solved in the same way as the
82 first two questions but this is too conservative. The observation
83 is that in some cases analysis we can know if which (if any) fields
84 are addressed and if those addresses are used in bad ways. This
85 analysis may be language specific. In C, arbitrary operations may
86 be applied to pointers. However, there is some indication that
87 this may be too conservative for some C++ types.
89 The pass ipa-type-escape does this analysis for the types whose
90 instances do not escape across the compilation boundary.
92 Historically in GCC, these two problems were combined and a single
93 data structure was used to represent the solution to these
94 problems. We now have two similar but different data structures,
95 The data structure to solve the last two question is similar to the
96 first, but does not contain have the fields in it whose address are
97 never taken. For types that do escape the compilation unit, the
98 data structures will have identical information.
101 /* The alias sets assigned to MEMs assist the back-end in determining
102 which MEMs can alias which other MEMs. In general, two MEMs in
103 different alias sets cannot alias each other, with one important
104 exception. Consider something like:
106 struct S { int i; double d; };
108 a store to an `S' can alias something of either type `int' or type
109 `double'. (However, a store to an `int' cannot alias a `double'
110 and vice versa.) We indicate this via a tree structure that looks
111 like:
112 struct S
115 |/_ _\|
116 int double
118 (The arrows are directed and point downwards.)
119 In this situation we say the alias set for `struct S' is the
120 `superset' and that those for `int' and `double' are `subsets'.
122 To see whether two alias sets can point to the same memory, we must
123 see if either alias set is a subset of the other. We need not trace
124 past immediate descendants, however, since we propagate all
125 grandchildren up one level.
127 Alias set zero is implicitly a superset of all other alias sets.
128 However, this is no actual entry for alias set zero. It is an
129 error to attempt to explicitly construct a subset of zero. */
131 struct GTY(()) alias_set_entry_d {
132 /* The alias set number, as stored in MEM_ALIAS_SET. */
133 alias_set_type alias_set;
135 /* Nonzero if would have a child of zero: this effectively makes this
136 alias set the same as alias set zero. */
137 int has_zero_child;
139 /* The children of the alias set. These are not just the immediate
140 children, but, in fact, all descendants. So, if we have:
142 struct T { struct S s; float f; }
144 continuing our example above, the children here will be all of
145 `int', `double', `float', and `struct S'. */
146 splay_tree GTY((param1_is (int), param2_is (int))) children;
148 typedef struct alias_set_entry_d *alias_set_entry;
150 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
151 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
152 static void record_set (rtx, const_rtx, void *);
153 static int base_alias_check (rtx, rtx, enum machine_mode,
154 enum machine_mode);
155 static rtx find_base_value (rtx);
156 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
157 static int insert_subset_children (splay_tree_node, void*);
158 static tree find_base_decl (tree);
159 static alias_set_entry get_alias_set_entry (alias_set_type);
160 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
161 bool (*) (const_rtx, bool));
162 static int aliases_everything_p (const_rtx);
163 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
164 static tree decl_for_component_ref (tree);
165 static rtx adjust_offset_for_component_ref (tree, rtx);
166 static int write_dependence_p (const_rtx, const_rtx, int);
168 static void memory_modified_1 (rtx, const_rtx, void *);
170 /* Set up all info needed to perform alias analysis on memory references. */
172 /* Returns the size in bytes of the mode of X. */
173 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
175 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
176 different alias sets. We ignore alias sets in functions making use
177 of variable arguments because the va_arg macros on some systems are
178 not legal ANSI C. */
179 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
180 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
182 /* Cap the number of passes we make over the insns propagating alias
183 information through set chains. 10 is a completely arbitrary choice. */
184 #define MAX_ALIAS_LOOP_PASSES 10
186 /* reg_base_value[N] gives an address to which register N is related.
187 If all sets after the first add or subtract to the current value
188 or otherwise modify it so it does not point to a different top level
189 object, reg_base_value[N] is equal to the address part of the source
190 of the first set.
192 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
193 expressions represent certain special values: function arguments and
194 the stack, frame, and argument pointers.
196 The contents of an ADDRESS is not normally used, the mode of the
197 ADDRESS determines whether the ADDRESS is a function argument or some
198 other special value. Pointer equality, not rtx_equal_p, determines whether
199 two ADDRESS expressions refer to the same base address.
201 The only use of the contents of an ADDRESS is for determining if the
202 current function performs nonlocal memory memory references for the
203 purposes of marking the function as a constant function. */
205 static GTY(()) VEC(rtx,gc) *reg_base_value;
206 static rtx *new_reg_base_value;
208 /* We preserve the copy of old array around to avoid amount of garbage
209 produced. About 8% of garbage produced were attributed to this
210 array. */
211 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
213 /* Static hunks of RTL used by the aliasing code; these are initialized
214 once per function to avoid unnecessary RTL allocations. */
215 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
217 #define REG_BASE_VALUE(X) \
218 (REGNO (X) < VEC_length (rtx, reg_base_value) \
219 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
221 /* Vector indexed by N giving the initial (unchanging) value known for
222 pseudo-register N. This array is initialized in init_alias_analysis,
223 and does not change until end_alias_analysis is called. */
224 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
226 /* Indicates number of valid entries in reg_known_value. */
227 static GTY(()) unsigned int reg_known_value_size;
229 /* Vector recording for each reg_known_value whether it is due to a
230 REG_EQUIV note. Future passes (viz., reload) may replace the
231 pseudo with the equivalent expression and so we account for the
232 dependences that would be introduced if that happens.
234 The REG_EQUIV notes created in assign_parms may mention the arg
235 pointer, and there are explicit insns in the RTL that modify the
236 arg pointer. Thus we must ensure that such insns don't get
237 scheduled across each other because that would invalidate the
238 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
239 wrong, but solving the problem in the scheduler will likely give
240 better code, so we do it here. */
241 static bool *reg_known_equiv_p;
243 /* True when scanning insns from the start of the rtl to the
244 NOTE_INSN_FUNCTION_BEG note. */
245 static bool copying_arguments;
247 DEF_VEC_P(alias_set_entry);
248 DEF_VEC_ALLOC_P(alias_set_entry,gc);
250 /* The splay-tree used to store the various alias set entries. */
251 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
253 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
254 such an entry, or NULL otherwise. */
256 static inline alias_set_entry
257 get_alias_set_entry (alias_set_type alias_set)
259 return VEC_index (alias_set_entry, alias_sets, alias_set);
262 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
263 the two MEMs cannot alias each other. */
265 static inline int
266 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
268 /* Perform a basic sanity check. Namely, that there are no alias sets
269 if we're not using strict aliasing. This helps to catch bugs
270 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
271 where a MEM is allocated in some way other than by the use of
272 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
273 use alias sets to indicate that spilled registers cannot alias each
274 other, we might need to remove this check. */
275 gcc_assert (flag_strict_aliasing
276 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
278 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
281 /* Insert the NODE into the splay tree given by DATA. Used by
282 record_alias_subset via splay_tree_foreach. */
284 static int
285 insert_subset_children (splay_tree_node node, void *data)
287 splay_tree_insert ((splay_tree) data, node->key, node->value);
289 return 0;
292 /* Return true if the first alias set is a subset of the second. */
294 bool
295 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
297 alias_set_entry ase;
299 /* Everything is a subset of the "aliases everything" set. */
300 if (set2 == 0)
301 return true;
303 /* Otherwise, check if set1 is a subset of set2. */
304 ase = get_alias_set_entry (set2);
305 if (ase != 0
306 && ((ase->has_zero_child && set1 == 0)
307 || splay_tree_lookup (ase->children,
308 (splay_tree_key) set1)))
309 return true;
310 return false;
313 /* Return 1 if the two specified alias sets may conflict. */
316 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
318 alias_set_entry ase;
320 /* The easy case. */
321 if (alias_sets_must_conflict_p (set1, set2))
322 return 1;
324 /* See if the first alias set is a subset of the second. */
325 ase = get_alias_set_entry (set1);
326 if (ase != 0
327 && (ase->has_zero_child
328 || splay_tree_lookup (ase->children,
329 (splay_tree_key) set2)))
330 return 1;
332 /* Now do the same, but with the alias sets reversed. */
333 ase = get_alias_set_entry (set2);
334 if (ase != 0
335 && (ase->has_zero_child
336 || splay_tree_lookup (ase->children,
337 (splay_tree_key) set1)))
338 return 1;
340 /* The two alias sets are distinct and neither one is the
341 child of the other. Therefore, they cannot conflict. */
342 return 0;
345 static int
346 walk_mems_2 (rtx *x, rtx mem)
348 if (MEM_P (*x))
350 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
351 return 1;
353 return -1;
355 return 0;
358 static int
359 walk_mems_1 (rtx *x, rtx *pat)
361 if (MEM_P (*x))
363 /* Visit all MEMs in *PAT and check indepedence. */
364 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
365 /* Indicate that dependence was determined and stop traversal. */
366 return 1;
368 return -1;
370 return 0;
373 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
374 bool
375 insn_alias_sets_conflict_p (rtx insn1, rtx insn2)
377 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
378 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
379 &PATTERN (insn2));
382 /* Return 1 if the two specified alias sets will always conflict. */
385 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
387 if (set1 == 0 || set2 == 0 || set1 == set2)
388 return 1;
390 return 0;
393 /* Return 1 if any MEM object of type T1 will always conflict (using the
394 dependency routines in this file) with any MEM object of type T2.
395 This is used when allocating temporary storage. If T1 and/or T2 are
396 NULL_TREE, it means we know nothing about the storage. */
399 objects_must_conflict_p (tree t1, tree t2)
401 alias_set_type set1, set2;
403 /* If neither has a type specified, we don't know if they'll conflict
404 because we may be using them to store objects of various types, for
405 example the argument and local variables areas of inlined functions. */
406 if (t1 == 0 && t2 == 0)
407 return 0;
409 /* If they are the same type, they must conflict. */
410 if (t1 == t2
411 /* Likewise if both are volatile. */
412 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
413 return 1;
415 set1 = t1 ? get_alias_set (t1) : 0;
416 set2 = t2 ? get_alias_set (t2) : 0;
418 /* We can't use alias_sets_conflict_p because we must make sure
419 that every subtype of t1 will conflict with every subtype of
420 t2 for which a pair of subobjects of these respective subtypes
421 overlaps on the stack. */
422 return alias_sets_must_conflict_p (set1, set2);
425 /* T is an expression with pointer type. Find the DECL on which this
426 expression is based. (For example, in `a[i]' this would be `a'.)
427 If there is no such DECL, or a unique decl cannot be determined,
428 NULL_TREE is returned. */
430 static tree
431 find_base_decl (tree t)
433 tree d0, d1;
435 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
436 return 0;
438 if (TREE_CODE (t) == SSA_NAME)
439 t = SSA_NAME_VAR (t);
441 /* If this is a declaration, return it. If T is based on a restrict
442 qualified decl, return that decl. */
443 if (DECL_P (t))
445 if (TREE_CODE (t) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (t))
446 t = DECL_GET_RESTRICT_BASE (t);
447 return t;
450 /* Handle general expressions. It would be nice to deal with
451 COMPONENT_REFs here. If we could tell that `a' and `b' were the
452 same, then `a->f' and `b->f' are also the same. */
453 switch (TREE_CODE_CLASS (TREE_CODE (t)))
455 case tcc_unary:
456 return find_base_decl (TREE_OPERAND (t, 0));
458 case tcc_binary:
459 /* Return 0 if found in neither or both are the same. */
460 d0 = find_base_decl (TREE_OPERAND (t, 0));
461 d1 = find_base_decl (TREE_OPERAND (t, 1));
462 if (d0 == d1)
463 return d0;
464 else if (d0 == 0)
465 return d1;
466 else if (d1 == 0)
467 return d0;
468 else
469 return 0;
471 default:
472 return 0;
476 /* Return true if all nested component references handled by
477 get_inner_reference in T are such that we should use the alias set
478 provided by the object at the heart of T.
480 This is true for non-addressable components (which don't have their
481 own alias set), as well as components of objects in alias set zero.
482 This later point is a special case wherein we wish to override the
483 alias set used by the component, but we don't have per-FIELD_DECL
484 assignable alias sets. */
486 bool
487 component_uses_parent_alias_set (const_tree t)
489 while (1)
491 /* If we're at the end, it vacuously uses its own alias set. */
492 if (!handled_component_p (t))
493 return false;
495 switch (TREE_CODE (t))
497 case COMPONENT_REF:
498 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
499 return true;
500 break;
502 case ARRAY_REF:
503 case ARRAY_RANGE_REF:
504 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
505 return true;
506 break;
508 case REALPART_EXPR:
509 case IMAGPART_EXPR:
510 break;
512 default:
513 /* Bitfields and casts are never addressable. */
514 return true;
517 t = TREE_OPERAND (t, 0);
518 if (get_alias_set (TREE_TYPE (t)) == 0)
519 return true;
523 /* Return the alias set for the memory pointed to by T, which may be
524 either a type or an expression. Return -1 if there is nothing
525 special about dereferencing T. */
527 static alias_set_type
528 get_deref_alias_set_1 (tree t)
530 /* If we're not doing any alias analysis, just assume everything
531 aliases everything else. */
532 if (!flag_strict_aliasing)
533 return 0;
535 if (! TYPE_P (t))
537 tree decl = find_base_decl (t);
539 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
541 /* If we haven't computed the actual alias set, do it now. */
542 if (DECL_POINTER_ALIAS_SET (decl) == -2)
544 tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
546 /* No two restricted pointers can point at the same thing.
547 However, a restricted pointer can point at the same thing
548 as an unrestricted pointer, if that unrestricted pointer
549 is based on the restricted pointer. So, we make the
550 alias set for the restricted pointer a subset of the
551 alias set for the type pointed to by the type of the
552 decl. */
553 alias_set_type pointed_to_alias_set
554 = get_alias_set (pointed_to_type);
556 if (pointed_to_alias_set == 0)
557 /* It's not legal to make a subset of alias set zero. */
558 DECL_POINTER_ALIAS_SET (decl) = 0;
559 else if (AGGREGATE_TYPE_P (pointed_to_type))
560 /* For an aggregate, we must treat the restricted
561 pointer the same as an ordinary pointer. If we
562 were to make the type pointed to by the
563 restricted pointer a subset of the pointed-to
564 type, then we would believe that other subsets
565 of the pointed-to type (such as fields of that
566 type) do not conflict with the type pointed to
567 by the restricted pointer. */
568 DECL_POINTER_ALIAS_SET (decl)
569 = pointed_to_alias_set;
570 else
572 DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
573 record_alias_subset (pointed_to_alias_set,
574 DECL_POINTER_ALIAS_SET (decl));
578 /* We use the alias set indicated in the declaration. */
579 return DECL_POINTER_ALIAS_SET (decl);
582 /* Now all we care about is the type. */
583 t = TREE_TYPE (t);
586 /* If we have an INDIRECT_REF via a void pointer, we don't
587 know anything about what that might alias. Likewise if the
588 pointer is marked that way. */
589 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
590 || TYPE_REF_CAN_ALIAS_ALL (t))
591 return 0;
593 return -1;
596 /* Return the alias set for the memory pointed to by T, which may be
597 either a type or an expression. */
599 alias_set_type
600 get_deref_alias_set (tree t)
602 alias_set_type set = get_deref_alias_set_1 (t);
604 /* Fall back to the alias-set of the pointed-to type. */
605 if (set == -1)
607 if (! TYPE_P (t))
608 t = TREE_TYPE (t);
609 set = get_alias_set (TREE_TYPE (t));
612 return set;
615 /* Return the alias set for T, which may be either a type or an
616 expression. Call language-specific routine for help, if needed. */
618 alias_set_type
619 get_alias_set (tree t)
621 alias_set_type set;
623 /* If we're not doing any alias analysis, just assume everything
624 aliases everything else. Also return 0 if this or its type is
625 an error. */
626 if (! flag_strict_aliasing || t == error_mark_node
627 || (! TYPE_P (t)
628 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
629 return 0;
631 /* We can be passed either an expression or a type. This and the
632 language-specific routine may make mutually-recursive calls to each other
633 to figure out what to do. At each juncture, we see if this is a tree
634 that the language may need to handle specially. First handle things that
635 aren't types. */
636 if (! TYPE_P (t))
638 tree inner = t;
640 /* Remove any nops, then give the language a chance to do
641 something with this tree before we look at it. */
642 STRIP_NOPS (t);
643 set = lang_hooks.get_alias_set (t);
644 if (set != -1)
645 return set;
647 /* First see if the actual object referenced is an INDIRECT_REF from a
648 restrict-qualified pointer or a "void *". */
649 while (handled_component_p (inner))
651 inner = TREE_OPERAND (inner, 0);
652 STRIP_NOPS (inner);
655 if (INDIRECT_REF_P (inner))
657 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
658 if (set != -1)
659 return set;
662 /* Otherwise, pick up the outermost object that we could have a pointer
663 to, processing conversions as above. */
664 while (component_uses_parent_alias_set (t))
666 t = TREE_OPERAND (t, 0);
667 STRIP_NOPS (t);
670 /* If we've already determined the alias set for a decl, just return
671 it. This is necessary for C++ anonymous unions, whose component
672 variables don't look like union members (boo!). */
673 if (TREE_CODE (t) == VAR_DECL
674 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
675 return MEM_ALIAS_SET (DECL_RTL (t));
677 /* Now all we care about is the type. */
678 t = TREE_TYPE (t);
681 /* Variant qualifiers don't affect the alias set, so get the main
682 variant. Always use the canonical type as well.
683 If this is a type with a known alias set, return it. */
684 t = TYPE_MAIN_VARIANT (t);
685 if (TYPE_CANONICAL (t))
686 t = TYPE_CANONICAL (t);
687 if (TYPE_ALIAS_SET_KNOWN_P (t))
688 return TYPE_ALIAS_SET (t);
690 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
691 if (!COMPLETE_TYPE_P (t))
693 /* For arrays with unknown size the conservative answer is the
694 alias set of the element type. */
695 if (TREE_CODE (t) == ARRAY_TYPE)
696 return get_alias_set (TREE_TYPE (t));
698 /* But return zero as a conservative answer for incomplete types. */
699 return 0;
702 /* See if the language has special handling for this type. */
703 set = lang_hooks.get_alias_set (t);
704 if (set != -1)
705 return set;
707 /* There are no objects of FUNCTION_TYPE, so there's no point in
708 using up an alias set for them. (There are, of course, pointers
709 and references to functions, but that's different.) */
710 else if (TREE_CODE (t) == FUNCTION_TYPE
711 || TREE_CODE (t) == METHOD_TYPE)
712 set = 0;
714 /* Unless the language specifies otherwise, let vector types alias
715 their components. This avoids some nasty type punning issues in
716 normal usage. And indeed lets vectors be treated more like an
717 array slice. */
718 else if (TREE_CODE (t) == VECTOR_TYPE)
719 set = get_alias_set (TREE_TYPE (t));
721 /* Unless the language specifies otherwise, treat array types the
722 same as their components. This avoids the asymmetry we get
723 through recording the components. Consider accessing a
724 character(kind=1) through a reference to a character(kind=1)[1:1].
725 Or consider if we want to assign integer(kind=4)[0:D.1387] and
726 integer(kind=4)[4] the same alias set or not.
727 Just be pragmatic here and make sure the array and its element
728 type get the same alias set assigned. */
729 else if (TREE_CODE (t) == ARRAY_TYPE
730 && !TYPE_NONALIASED_COMPONENT (t))
731 set = get_alias_set (TREE_TYPE (t));
733 else
734 /* Otherwise make a new alias set for this type. */
735 set = new_alias_set ();
737 TYPE_ALIAS_SET (t) = set;
739 /* If this is an aggregate type, we must record any component aliasing
740 information. */
741 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
742 record_component_aliases (t);
744 return set;
747 /* Return a brand-new alias set. */
749 alias_set_type
750 new_alias_set (void)
752 if (flag_strict_aliasing)
754 if (alias_sets == 0)
755 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
756 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
757 return VEC_length (alias_set_entry, alias_sets) - 1;
759 else
760 return 0;
763 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
764 not everything that aliases SUPERSET also aliases SUBSET. For example,
765 in C, a store to an `int' can alias a load of a structure containing an
766 `int', and vice versa. But it can't alias a load of a 'double' member
767 of the same structure. Here, the structure would be the SUPERSET and
768 `int' the SUBSET. This relationship is also described in the comment at
769 the beginning of this file.
771 This function should be called only once per SUPERSET/SUBSET pair.
773 It is illegal for SUPERSET to be zero; everything is implicitly a
774 subset of alias set zero. */
776 void
777 record_alias_subset (alias_set_type superset, alias_set_type subset)
779 alias_set_entry superset_entry;
780 alias_set_entry subset_entry;
782 /* It is possible in complex type situations for both sets to be the same,
783 in which case we can ignore this operation. */
784 if (superset == subset)
785 return;
787 gcc_assert (superset);
789 superset_entry = get_alias_set_entry (superset);
790 if (superset_entry == 0)
792 /* Create an entry for the SUPERSET, so that we have a place to
793 attach the SUBSET. */
794 superset_entry = GGC_NEW (struct alias_set_entry_d);
795 superset_entry->alias_set = superset;
796 superset_entry->children
797 = splay_tree_new_ggc (splay_tree_compare_ints);
798 superset_entry->has_zero_child = 0;
799 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
802 if (subset == 0)
803 superset_entry->has_zero_child = 1;
804 else
806 subset_entry = get_alias_set_entry (subset);
807 /* If there is an entry for the subset, enter all of its children
808 (if they are not already present) as children of the SUPERSET. */
809 if (subset_entry)
811 if (subset_entry->has_zero_child)
812 superset_entry->has_zero_child = 1;
814 splay_tree_foreach (subset_entry->children, insert_subset_children,
815 superset_entry->children);
818 /* Enter the SUBSET itself as a child of the SUPERSET. */
819 splay_tree_insert (superset_entry->children,
820 (splay_tree_key) subset, 0);
824 /* Record that component types of TYPE, if any, are part of that type for
825 aliasing purposes. For record types, we only record component types
826 for fields that are not marked non-addressable. For array types, we
827 only record the component type if it is not marked non-aliased. */
829 void
830 record_component_aliases (tree type)
832 alias_set_type superset = get_alias_set (type);
833 tree field;
835 if (superset == 0)
836 return;
838 switch (TREE_CODE (type))
840 case RECORD_TYPE:
841 case UNION_TYPE:
842 case QUAL_UNION_TYPE:
843 /* Recursively record aliases for the base classes, if there are any. */
844 if (TYPE_BINFO (type))
846 int i;
847 tree binfo, base_binfo;
849 for (binfo = TYPE_BINFO (type), i = 0;
850 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
851 record_alias_subset (superset,
852 get_alias_set (BINFO_TYPE (base_binfo)));
854 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
855 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
856 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
857 break;
859 case COMPLEX_TYPE:
860 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
861 break;
863 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
864 element type. */
866 default:
867 break;
871 /* Allocate an alias set for use in storing and reading from the varargs
872 spill area. */
874 static GTY(()) alias_set_type varargs_set = -1;
876 alias_set_type
877 get_varargs_alias_set (void)
879 #if 1
880 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
881 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
882 consistently use the varargs alias set for loads from the varargs
883 area. So don't use it anywhere. */
884 return 0;
885 #else
886 if (varargs_set == -1)
887 varargs_set = new_alias_set ();
889 return varargs_set;
890 #endif
893 /* Likewise, but used for the fixed portions of the frame, e.g., register
894 save areas. */
896 static GTY(()) alias_set_type frame_set = -1;
898 alias_set_type
899 get_frame_alias_set (void)
901 if (frame_set == -1)
902 frame_set = new_alias_set ();
904 return frame_set;
907 /* Inside SRC, the source of a SET, find a base address. */
909 static rtx
910 find_base_value (rtx src)
912 unsigned int regno;
914 #if defined (FIND_BASE_TERM)
915 /* Try machine-dependent ways to find the base term. */
916 src = FIND_BASE_TERM (src);
917 #endif
919 switch (GET_CODE (src))
921 case SYMBOL_REF:
922 case LABEL_REF:
923 return src;
925 case REG:
926 regno = REGNO (src);
927 /* At the start of a function, argument registers have known base
928 values which may be lost later. Returning an ADDRESS
929 expression here allows optimization based on argument values
930 even when the argument registers are used for other purposes. */
931 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
932 return new_reg_base_value[regno];
934 /* If a pseudo has a known base value, return it. Do not do this
935 for non-fixed hard regs since it can result in a circular
936 dependency chain for registers which have values at function entry.
938 The test above is not sufficient because the scheduler may move
939 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
940 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
941 && regno < VEC_length (rtx, reg_base_value))
943 /* If we're inside init_alias_analysis, use new_reg_base_value
944 to reduce the number of relaxation iterations. */
945 if (new_reg_base_value && new_reg_base_value[regno]
946 && DF_REG_DEF_COUNT (regno) == 1)
947 return new_reg_base_value[regno];
949 if (VEC_index (rtx, reg_base_value, regno))
950 return VEC_index (rtx, reg_base_value, regno);
953 return 0;
955 case MEM:
956 /* Check for an argument passed in memory. Only record in the
957 copying-arguments block; it is too hard to track changes
958 otherwise. */
959 if (copying_arguments
960 && (XEXP (src, 0) == arg_pointer_rtx
961 || (GET_CODE (XEXP (src, 0)) == PLUS
962 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
963 return gen_rtx_ADDRESS (VOIDmode, src);
964 return 0;
966 case CONST:
967 src = XEXP (src, 0);
968 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
969 break;
971 /* ... fall through ... */
973 case PLUS:
974 case MINUS:
976 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
978 /* If either operand is a REG that is a known pointer, then it
979 is the base. */
980 if (REG_P (src_0) && REG_POINTER (src_0))
981 return find_base_value (src_0);
982 if (REG_P (src_1) && REG_POINTER (src_1))
983 return find_base_value (src_1);
985 /* If either operand is a REG, then see if we already have
986 a known value for it. */
987 if (REG_P (src_0))
989 temp = find_base_value (src_0);
990 if (temp != 0)
991 src_0 = temp;
994 if (REG_P (src_1))
996 temp = find_base_value (src_1);
997 if (temp!= 0)
998 src_1 = temp;
1001 /* If either base is named object or a special address
1002 (like an argument or stack reference), then use it for the
1003 base term. */
1004 if (src_0 != 0
1005 && (GET_CODE (src_0) == SYMBOL_REF
1006 || GET_CODE (src_0) == LABEL_REF
1007 || (GET_CODE (src_0) == ADDRESS
1008 && GET_MODE (src_0) != VOIDmode)))
1009 return src_0;
1011 if (src_1 != 0
1012 && (GET_CODE (src_1) == SYMBOL_REF
1013 || GET_CODE (src_1) == LABEL_REF
1014 || (GET_CODE (src_1) == ADDRESS
1015 && GET_MODE (src_1) != VOIDmode)))
1016 return src_1;
1018 /* Guess which operand is the base address:
1019 If either operand is a symbol, then it is the base. If
1020 either operand is a CONST_INT, then the other is the base. */
1021 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
1022 return find_base_value (src_0);
1023 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
1024 return find_base_value (src_1);
1026 return 0;
1029 case LO_SUM:
1030 /* The standard form is (lo_sum reg sym) so look only at the
1031 second operand. */
1032 return find_base_value (XEXP (src, 1));
1034 case AND:
1035 /* If the second operand is constant set the base
1036 address to the first operand. */
1037 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
1038 return find_base_value (XEXP (src, 0));
1039 return 0;
1041 case TRUNCATE:
1042 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1043 break;
1044 /* Fall through. */
1045 case HIGH:
1046 case PRE_INC:
1047 case PRE_DEC:
1048 case POST_INC:
1049 case POST_DEC:
1050 case PRE_MODIFY:
1051 case POST_MODIFY:
1052 return find_base_value (XEXP (src, 0));
1054 case ZERO_EXTEND:
1055 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1057 rtx temp = find_base_value (XEXP (src, 0));
1059 if (temp != 0 && CONSTANT_P (temp))
1060 temp = convert_memory_address (Pmode, temp);
1062 return temp;
1065 default:
1066 break;
1069 return 0;
1072 /* Called from init_alias_analysis indirectly through note_stores. */
1074 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1075 register N has been set in this function. */
1076 static char *reg_seen;
1078 /* Addresses which are known not to alias anything else are identified
1079 by a unique integer. */
1080 static int unique_id;
1082 static void
1083 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1085 unsigned regno;
1086 rtx src;
1087 int n;
1089 if (!REG_P (dest))
1090 return;
1092 regno = REGNO (dest);
1094 gcc_assert (regno < VEC_length (rtx, reg_base_value));
1096 /* If this spans multiple hard registers, then we must indicate that every
1097 register has an unusable value. */
1098 if (regno < FIRST_PSEUDO_REGISTER)
1099 n = hard_regno_nregs[regno][GET_MODE (dest)];
1100 else
1101 n = 1;
1102 if (n != 1)
1104 while (--n >= 0)
1106 reg_seen[regno + n] = 1;
1107 new_reg_base_value[regno + n] = 0;
1109 return;
1112 if (set)
1114 /* A CLOBBER wipes out any old value but does not prevent a previously
1115 unset register from acquiring a base address (i.e. reg_seen is not
1116 set). */
1117 if (GET_CODE (set) == CLOBBER)
1119 new_reg_base_value[regno] = 0;
1120 return;
1122 src = SET_SRC (set);
1124 else
1126 if (reg_seen[regno])
1128 new_reg_base_value[regno] = 0;
1129 return;
1131 reg_seen[regno] = 1;
1132 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1133 GEN_INT (unique_id++));
1134 return;
1137 /* If this is not the first set of REGNO, see whether the new value
1138 is related to the old one. There are two cases of interest:
1140 (1) The register might be assigned an entirely new value
1141 that has the same base term as the original set.
1143 (2) The set might be a simple self-modification that
1144 cannot change REGNO's base value.
1146 If neither case holds, reject the original base value as invalid.
1147 Note that the following situation is not detected:
1149 extern int x, y; int *p = &x; p += (&y-&x);
1151 ANSI C does not allow computing the difference of addresses
1152 of distinct top level objects. */
1153 if (new_reg_base_value[regno] != 0
1154 && find_base_value (src) != new_reg_base_value[regno])
1155 switch (GET_CODE (src))
1157 case LO_SUM:
1158 case MINUS:
1159 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1160 new_reg_base_value[regno] = 0;
1161 break;
1162 case PLUS:
1163 /* If the value we add in the PLUS is also a valid base value,
1164 this might be the actual base value, and the original value
1165 an index. */
1167 rtx other = NULL_RTX;
1169 if (XEXP (src, 0) == dest)
1170 other = XEXP (src, 1);
1171 else if (XEXP (src, 1) == dest)
1172 other = XEXP (src, 0);
1174 if (! other || find_base_value (other))
1175 new_reg_base_value[regno] = 0;
1176 break;
1178 case AND:
1179 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1180 new_reg_base_value[regno] = 0;
1181 break;
1182 default:
1183 new_reg_base_value[regno] = 0;
1184 break;
1186 /* If this is the first set of a register, record the value. */
1187 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1188 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1189 new_reg_base_value[regno] = find_base_value (src);
1191 reg_seen[regno] = 1;
1194 /* If a value is known for REGNO, return it. */
1197 get_reg_known_value (unsigned int regno)
1199 if (regno >= FIRST_PSEUDO_REGISTER)
1201 regno -= FIRST_PSEUDO_REGISTER;
1202 if (regno < reg_known_value_size)
1203 return reg_known_value[regno];
1205 return NULL;
1208 /* Set it. */
1210 static void
1211 set_reg_known_value (unsigned int regno, rtx val)
1213 if (regno >= FIRST_PSEUDO_REGISTER)
1215 regno -= FIRST_PSEUDO_REGISTER;
1216 if (regno < reg_known_value_size)
1217 reg_known_value[regno] = val;
1221 /* Similarly for reg_known_equiv_p. */
1223 bool
1224 get_reg_known_equiv_p (unsigned int regno)
1226 if (regno >= FIRST_PSEUDO_REGISTER)
1228 regno -= FIRST_PSEUDO_REGISTER;
1229 if (regno < reg_known_value_size)
1230 return reg_known_equiv_p[regno];
1232 return false;
1235 static void
1236 set_reg_known_equiv_p (unsigned int regno, bool val)
1238 if (regno >= FIRST_PSEUDO_REGISTER)
1240 regno -= FIRST_PSEUDO_REGISTER;
1241 if (regno < reg_known_value_size)
1242 reg_known_equiv_p[regno] = val;
1247 /* Returns a canonical version of X, from the point of view alias
1248 analysis. (For example, if X is a MEM whose address is a register,
1249 and the register has a known value (say a SYMBOL_REF), then a MEM
1250 whose address is the SYMBOL_REF is returned.) */
1253 canon_rtx (rtx x)
1255 /* Recursively look for equivalences. */
1256 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1258 rtx t = get_reg_known_value (REGNO (x));
1259 if (t == x)
1260 return x;
1261 if (t)
1262 return canon_rtx (t);
1265 if (GET_CODE (x) == PLUS)
1267 rtx x0 = canon_rtx (XEXP (x, 0));
1268 rtx x1 = canon_rtx (XEXP (x, 1));
1270 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1272 if (GET_CODE (x0) == CONST_INT)
1273 return plus_constant (x1, INTVAL (x0));
1274 else if (GET_CODE (x1) == CONST_INT)
1275 return plus_constant (x0, INTVAL (x1));
1276 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1280 /* This gives us much better alias analysis when called from
1281 the loop optimizer. Note we want to leave the original
1282 MEM alone, but need to return the canonicalized MEM with
1283 all the flags with their original values. */
1284 else if (MEM_P (x))
1285 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1287 return x;
1290 /* Return 1 if X and Y are identical-looking rtx's.
1291 Expect that X and Y has been already canonicalized.
1293 We use the data in reg_known_value above to see if two registers with
1294 different numbers are, in fact, equivalent. */
1296 static int
1297 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1299 int i;
1300 int j;
1301 enum rtx_code code;
1302 const char *fmt;
1304 if (x == 0 && y == 0)
1305 return 1;
1306 if (x == 0 || y == 0)
1307 return 0;
1309 if (x == y)
1310 return 1;
1312 code = GET_CODE (x);
1313 /* Rtx's of different codes cannot be equal. */
1314 if (code != GET_CODE (y))
1315 return 0;
1317 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1318 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1320 if (GET_MODE (x) != GET_MODE (y))
1321 return 0;
1323 /* Some RTL can be compared without a recursive examination. */
1324 switch (code)
1326 case REG:
1327 return REGNO (x) == REGNO (y);
1329 case LABEL_REF:
1330 return XEXP (x, 0) == XEXP (y, 0);
1332 case SYMBOL_REF:
1333 return XSTR (x, 0) == XSTR (y, 0);
1335 case VALUE:
1336 case CONST_INT:
1337 case CONST_DOUBLE:
1338 case CONST_FIXED:
1339 /* There's no need to compare the contents of CONST_DOUBLEs or
1340 CONST_INTs because pointer equality is a good enough
1341 comparison for these nodes. */
1342 return 0;
1344 default:
1345 break;
1348 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1349 if (code == PLUS)
1350 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1351 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1352 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1353 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1354 /* For commutative operations, the RTX match if the operand match in any
1355 order. Also handle the simple binary and unary cases without a loop. */
1356 if (COMMUTATIVE_P (x))
1358 rtx xop0 = canon_rtx (XEXP (x, 0));
1359 rtx yop0 = canon_rtx (XEXP (y, 0));
1360 rtx yop1 = canon_rtx (XEXP (y, 1));
1362 return ((rtx_equal_for_memref_p (xop0, yop0)
1363 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1364 || (rtx_equal_for_memref_p (xop0, yop1)
1365 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1367 else if (NON_COMMUTATIVE_P (x))
1369 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1370 canon_rtx (XEXP (y, 0)))
1371 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1372 canon_rtx (XEXP (y, 1))));
1374 else if (UNARY_P (x))
1375 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1376 canon_rtx (XEXP (y, 0)));
1378 /* Compare the elements. If any pair of corresponding elements
1379 fail to match, return 0 for the whole things.
1381 Limit cases to types which actually appear in addresses. */
1383 fmt = GET_RTX_FORMAT (code);
1384 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1386 switch (fmt[i])
1388 case 'i':
1389 if (XINT (x, i) != XINT (y, i))
1390 return 0;
1391 break;
1393 case 'E':
1394 /* Two vectors must have the same length. */
1395 if (XVECLEN (x, i) != XVECLEN (y, i))
1396 return 0;
1398 /* And the corresponding elements must match. */
1399 for (j = 0; j < XVECLEN (x, i); j++)
1400 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1401 canon_rtx (XVECEXP (y, i, j))) == 0)
1402 return 0;
1403 break;
1405 case 'e':
1406 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1407 canon_rtx (XEXP (y, i))) == 0)
1408 return 0;
1409 break;
1411 /* This can happen for asm operands. */
1412 case 's':
1413 if (strcmp (XSTR (x, i), XSTR (y, i)))
1414 return 0;
1415 break;
1417 /* This can happen for an asm which clobbers memory. */
1418 case '0':
1419 break;
1421 /* It is believed that rtx's at this level will never
1422 contain anything but integers and other rtx's,
1423 except for within LABEL_REFs and SYMBOL_REFs. */
1424 default:
1425 gcc_unreachable ();
1428 return 1;
1432 find_base_term (rtx x)
1434 cselib_val *val;
1435 struct elt_loc_list *l;
1437 #if defined (FIND_BASE_TERM)
1438 /* Try machine-dependent ways to find the base term. */
1439 x = FIND_BASE_TERM (x);
1440 #endif
1442 switch (GET_CODE (x))
1444 case REG:
1445 return REG_BASE_VALUE (x);
1447 case TRUNCATE:
1448 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1449 return 0;
1450 /* Fall through. */
1451 case HIGH:
1452 case PRE_INC:
1453 case PRE_DEC:
1454 case POST_INC:
1455 case POST_DEC:
1456 case PRE_MODIFY:
1457 case POST_MODIFY:
1458 return find_base_term (XEXP (x, 0));
1460 case ZERO_EXTEND:
1461 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1463 rtx temp = find_base_term (XEXP (x, 0));
1465 if (temp != 0 && CONSTANT_P (temp))
1466 temp = convert_memory_address (Pmode, temp);
1468 return temp;
1471 case VALUE:
1472 val = CSELIB_VAL_PTR (x);
1473 if (!val)
1474 return 0;
1475 for (l = val->locs; l; l = l->next)
1476 if ((x = find_base_term (l->loc)) != 0)
1477 return x;
1478 return 0;
1480 case LO_SUM:
1481 /* The standard form is (lo_sum reg sym) so look only at the
1482 second operand. */
1483 return find_base_term (XEXP (x, 1));
1485 case CONST:
1486 x = XEXP (x, 0);
1487 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1488 return 0;
1489 /* Fall through. */
1490 case PLUS:
1491 case MINUS:
1493 rtx tmp1 = XEXP (x, 0);
1494 rtx tmp2 = XEXP (x, 1);
1496 /* This is a little bit tricky since we have to determine which of
1497 the two operands represents the real base address. Otherwise this
1498 routine may return the index register instead of the base register.
1500 That may cause us to believe no aliasing was possible, when in
1501 fact aliasing is possible.
1503 We use a few simple tests to guess the base register. Additional
1504 tests can certainly be added. For example, if one of the operands
1505 is a shift or multiply, then it must be the index register and the
1506 other operand is the base register. */
1508 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1509 return find_base_term (tmp2);
1511 /* If either operand is known to be a pointer, then use it
1512 to determine the base term. */
1513 if (REG_P (tmp1) && REG_POINTER (tmp1))
1515 rtx base = find_base_term (tmp1);
1516 if (base)
1517 return base;
1520 if (REG_P (tmp2) && REG_POINTER (tmp2))
1522 rtx base = find_base_term (tmp2);
1523 if (base)
1524 return base;
1527 /* Neither operand was known to be a pointer. Go ahead and find the
1528 base term for both operands. */
1529 tmp1 = find_base_term (tmp1);
1530 tmp2 = find_base_term (tmp2);
1532 /* If either base term is named object or a special address
1533 (like an argument or stack reference), then use it for the
1534 base term. */
1535 if (tmp1 != 0
1536 && (GET_CODE (tmp1) == SYMBOL_REF
1537 || GET_CODE (tmp1) == LABEL_REF
1538 || (GET_CODE (tmp1) == ADDRESS
1539 && GET_MODE (tmp1) != VOIDmode)))
1540 return tmp1;
1542 if (tmp2 != 0
1543 && (GET_CODE (tmp2) == SYMBOL_REF
1544 || GET_CODE (tmp2) == LABEL_REF
1545 || (GET_CODE (tmp2) == ADDRESS
1546 && GET_MODE (tmp2) != VOIDmode)))
1547 return tmp2;
1549 /* We could not determine which of the two operands was the
1550 base register and which was the index. So we can determine
1551 nothing from the base alias check. */
1552 return 0;
1555 case AND:
1556 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1557 return find_base_term (XEXP (x, 0));
1558 return 0;
1560 case SYMBOL_REF:
1561 case LABEL_REF:
1562 return x;
1564 default:
1565 return 0;
1569 /* Return 0 if the addresses X and Y are known to point to different
1570 objects, 1 if they might be pointers to the same object. */
1572 static int
1573 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1574 enum machine_mode y_mode)
1576 rtx x_base = find_base_term (x);
1577 rtx y_base = find_base_term (y);
1579 /* If the address itself has no known base see if a known equivalent
1580 value has one. If either address still has no known base, nothing
1581 is known about aliasing. */
1582 if (x_base == 0)
1584 rtx x_c;
1586 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1587 return 1;
1589 x_base = find_base_term (x_c);
1590 if (x_base == 0)
1591 return 1;
1594 if (y_base == 0)
1596 rtx y_c;
1597 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1598 return 1;
1600 y_base = find_base_term (y_c);
1601 if (y_base == 0)
1602 return 1;
1605 /* If the base addresses are equal nothing is known about aliasing. */
1606 if (rtx_equal_p (x_base, y_base))
1607 return 1;
1609 /* The base addresses are different expressions. If they are not accessed
1610 via AND, there is no conflict. We can bring knowledge of object
1611 alignment into play here. For example, on alpha, "char a, b;" can
1612 alias one another, though "char a; long b;" cannot. AND addesses may
1613 implicitly alias surrounding objects; i.e. unaligned access in DImode
1614 via AND address can alias all surrounding object types except those
1615 with aligment 8 or higher. */
1616 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1617 return 1;
1618 if (GET_CODE (x) == AND
1619 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1620 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1621 return 1;
1622 if (GET_CODE (y) == AND
1623 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1624 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1625 return 1;
1627 /* Differing symbols not accessed via AND never alias. */
1628 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1629 return 0;
1631 /* If one address is a stack reference there can be no alias:
1632 stack references using different base registers do not alias,
1633 a stack reference can not alias a parameter, and a stack reference
1634 can not alias a global. */
1635 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1636 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1637 return 0;
1639 if (! flag_argument_noalias)
1640 return 1;
1642 if (flag_argument_noalias > 1)
1643 return 0;
1645 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1646 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1649 /* Convert the address X into something we can use. This is done by returning
1650 it unchanged unless it is a value; in the latter case we call cselib to get
1651 a more useful rtx. */
1654 get_addr (rtx x)
1656 cselib_val *v;
1657 struct elt_loc_list *l;
1659 if (GET_CODE (x) != VALUE)
1660 return x;
1661 v = CSELIB_VAL_PTR (x);
1662 if (v)
1664 for (l = v->locs; l; l = l->next)
1665 if (CONSTANT_P (l->loc))
1666 return l->loc;
1667 for (l = v->locs; l; l = l->next)
1668 if (!REG_P (l->loc) && !MEM_P (l->loc))
1669 return l->loc;
1670 if (v->locs)
1671 return v->locs->loc;
1673 return x;
1676 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1677 where SIZE is the size in bytes of the memory reference. If ADDR
1678 is not modified by the memory reference then ADDR is returned. */
1680 static rtx
1681 addr_side_effect_eval (rtx addr, int size, int n_refs)
1683 int offset = 0;
1685 switch (GET_CODE (addr))
1687 case PRE_INC:
1688 offset = (n_refs + 1) * size;
1689 break;
1690 case PRE_DEC:
1691 offset = -(n_refs + 1) * size;
1692 break;
1693 case POST_INC:
1694 offset = n_refs * size;
1695 break;
1696 case POST_DEC:
1697 offset = -n_refs * size;
1698 break;
1700 default:
1701 return addr;
1704 if (offset)
1705 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1706 GEN_INT (offset));
1707 else
1708 addr = XEXP (addr, 0);
1709 addr = canon_rtx (addr);
1711 return addr;
1714 /* Return nonzero if X and Y (memory addresses) could reference the
1715 same location in memory. C is an offset accumulator. When
1716 C is nonzero, we are testing aliases between X and Y + C.
1717 XSIZE is the size in bytes of the X reference,
1718 similarly YSIZE is the size in bytes for Y.
1719 Expect that canon_rtx has been already called for X and Y.
1721 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1722 referenced (the reference was BLKmode), so make the most pessimistic
1723 assumptions.
1725 If XSIZE or YSIZE is negative, we may access memory outside the object
1726 being referenced as a side effect. This can happen when using AND to
1727 align memory references, as is done on the Alpha.
1729 Nice to notice that varying addresses cannot conflict with fp if no
1730 local variables had their addresses taken, but that's too hard now. */
1732 static int
1733 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1735 if (GET_CODE (x) == VALUE)
1736 x = get_addr (x);
1737 if (GET_CODE (y) == VALUE)
1738 y = get_addr (y);
1739 if (GET_CODE (x) == HIGH)
1740 x = XEXP (x, 0);
1741 else if (GET_CODE (x) == LO_SUM)
1742 x = XEXP (x, 1);
1743 else
1744 x = addr_side_effect_eval (x, xsize, 0);
1745 if (GET_CODE (y) == HIGH)
1746 y = XEXP (y, 0);
1747 else if (GET_CODE (y) == LO_SUM)
1748 y = XEXP (y, 1);
1749 else
1750 y = addr_side_effect_eval (y, ysize, 0);
1752 if (rtx_equal_for_memref_p (x, y))
1754 if (xsize <= 0 || ysize <= 0)
1755 return 1;
1756 if (c >= 0 && xsize > c)
1757 return 1;
1758 if (c < 0 && ysize+c > 0)
1759 return 1;
1760 return 0;
1763 /* This code used to check for conflicts involving stack references and
1764 globals but the base address alias code now handles these cases. */
1766 if (GET_CODE (x) == PLUS)
1768 /* The fact that X is canonicalized means that this
1769 PLUS rtx is canonicalized. */
1770 rtx x0 = XEXP (x, 0);
1771 rtx x1 = XEXP (x, 1);
1773 if (GET_CODE (y) == PLUS)
1775 /* The fact that Y is canonicalized means that this
1776 PLUS rtx is canonicalized. */
1777 rtx y0 = XEXP (y, 0);
1778 rtx y1 = XEXP (y, 1);
1780 if (rtx_equal_for_memref_p (x1, y1))
1781 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1782 if (rtx_equal_for_memref_p (x0, y0))
1783 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1784 if (GET_CODE (x1) == CONST_INT)
1786 if (GET_CODE (y1) == CONST_INT)
1787 return memrefs_conflict_p (xsize, x0, ysize, y0,
1788 c - INTVAL (x1) + INTVAL (y1));
1789 else
1790 return memrefs_conflict_p (xsize, x0, ysize, y,
1791 c - INTVAL (x1));
1793 else if (GET_CODE (y1) == CONST_INT)
1794 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1796 return 1;
1798 else if (GET_CODE (x1) == CONST_INT)
1799 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1801 else if (GET_CODE (y) == PLUS)
1803 /* The fact that Y is canonicalized means that this
1804 PLUS rtx is canonicalized. */
1805 rtx y0 = XEXP (y, 0);
1806 rtx y1 = XEXP (y, 1);
1808 if (GET_CODE (y1) == CONST_INT)
1809 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1810 else
1811 return 1;
1814 if (GET_CODE (x) == GET_CODE (y))
1815 switch (GET_CODE (x))
1817 case MULT:
1819 /* Handle cases where we expect the second operands to be the
1820 same, and check only whether the first operand would conflict
1821 or not. */
1822 rtx x0, y0;
1823 rtx x1 = canon_rtx (XEXP (x, 1));
1824 rtx y1 = canon_rtx (XEXP (y, 1));
1825 if (! rtx_equal_for_memref_p (x1, y1))
1826 return 1;
1827 x0 = canon_rtx (XEXP (x, 0));
1828 y0 = canon_rtx (XEXP (y, 0));
1829 if (rtx_equal_for_memref_p (x0, y0))
1830 return (xsize == 0 || ysize == 0
1831 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1833 /* Can't properly adjust our sizes. */
1834 if (GET_CODE (x1) != CONST_INT)
1835 return 1;
1836 xsize /= INTVAL (x1);
1837 ysize /= INTVAL (x1);
1838 c /= INTVAL (x1);
1839 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1842 default:
1843 break;
1846 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1847 as an access with indeterminate size. Assume that references
1848 besides AND are aligned, so if the size of the other reference is
1849 at least as large as the alignment, assume no other overlap. */
1850 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1852 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1853 xsize = -1;
1854 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1856 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1858 /* ??? If we are indexing far enough into the array/structure, we
1859 may yet be able to determine that we can not overlap. But we
1860 also need to that we are far enough from the end not to overlap
1861 a following reference, so we do nothing with that for now. */
1862 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1863 ysize = -1;
1864 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1867 if (CONSTANT_P (x))
1869 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1871 c += (INTVAL (y) - INTVAL (x));
1872 return (xsize <= 0 || ysize <= 0
1873 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1876 if (GET_CODE (x) == CONST)
1878 if (GET_CODE (y) == CONST)
1879 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1880 ysize, canon_rtx (XEXP (y, 0)), c);
1881 else
1882 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1883 ysize, y, c);
1885 if (GET_CODE (y) == CONST)
1886 return memrefs_conflict_p (xsize, x, ysize,
1887 canon_rtx (XEXP (y, 0)), c);
1889 if (CONSTANT_P (y))
1890 return (xsize <= 0 || ysize <= 0
1891 || (rtx_equal_for_memref_p (x, y)
1892 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1894 return 1;
1896 return 1;
1899 /* Functions to compute memory dependencies.
1901 Since we process the insns in execution order, we can build tables
1902 to keep track of what registers are fixed (and not aliased), what registers
1903 are varying in known ways, and what registers are varying in unknown
1904 ways.
1906 If both memory references are volatile, then there must always be a
1907 dependence between the two references, since their order can not be
1908 changed. A volatile and non-volatile reference can be interchanged
1909 though.
1911 A MEM_IN_STRUCT reference at a non-AND varying address can never
1912 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1913 also must allow AND addresses, because they may generate accesses
1914 outside the object being referenced. This is used to generate
1915 aligned addresses from unaligned addresses, for instance, the alpha
1916 storeqi_unaligned pattern. */
1918 /* Read dependence: X is read after read in MEM takes place. There can
1919 only be a dependence here if both reads are volatile. */
1922 read_dependence (const_rtx mem, const_rtx x)
1924 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1927 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1928 MEM2 is a reference to a structure at a varying address, or returns
1929 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1930 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1931 to decide whether or not an address may vary; it should return
1932 nonzero whenever variation is possible.
1933 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1935 static const_rtx
1936 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
1937 rtx mem2_addr,
1938 bool (*varies_p) (const_rtx, bool))
1940 if (! flag_strict_aliasing)
1941 return NULL_RTX;
1943 if (MEM_ALIAS_SET (mem2)
1944 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1945 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1946 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1947 varying address. */
1948 return mem1;
1950 if (MEM_ALIAS_SET (mem1)
1951 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1952 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1953 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1954 varying address. */
1955 return mem2;
1957 return NULL_RTX;
1960 /* Returns nonzero if something about the mode or address format MEM1
1961 indicates that it might well alias *anything*. */
1963 static int
1964 aliases_everything_p (const_rtx mem)
1966 if (GET_CODE (XEXP (mem, 0)) == AND)
1967 /* If the address is an AND, it's very hard to know at what it is
1968 actually pointing. */
1969 return 1;
1971 return 0;
1974 /* Return true if we can determine that the fields referenced cannot
1975 overlap for any pair of objects. */
1977 static bool
1978 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1980 const_tree fieldx, fieldy, typex, typey, orig_y;
1984 /* The comparison has to be done at a common type, since we don't
1985 know how the inheritance hierarchy works. */
1986 orig_y = y;
1989 fieldx = TREE_OPERAND (x, 1);
1990 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
1992 y = orig_y;
1995 fieldy = TREE_OPERAND (y, 1);
1996 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
1998 if (typex == typey)
1999 goto found;
2001 y = TREE_OPERAND (y, 0);
2003 while (y && TREE_CODE (y) == COMPONENT_REF);
2005 x = TREE_OPERAND (x, 0);
2007 while (x && TREE_CODE (x) == COMPONENT_REF);
2008 /* Never found a common type. */
2009 return false;
2011 found:
2012 /* If we're left with accessing different fields of a structure,
2013 then no overlap. */
2014 if (TREE_CODE (typex) == RECORD_TYPE
2015 && fieldx != fieldy)
2016 return true;
2018 /* The comparison on the current field failed. If we're accessing
2019 a very nested structure, look at the next outer level. */
2020 x = TREE_OPERAND (x, 0);
2021 y = TREE_OPERAND (y, 0);
2023 while (x && y
2024 && TREE_CODE (x) == COMPONENT_REF
2025 && TREE_CODE (y) == COMPONENT_REF);
2027 return false;
2030 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2032 static tree
2033 decl_for_component_ref (tree x)
2037 x = TREE_OPERAND (x, 0);
2039 while (x && TREE_CODE (x) == COMPONENT_REF);
2041 return x && DECL_P (x) ? x : NULL_TREE;
2044 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2045 offset of the field reference. */
2047 static rtx
2048 adjust_offset_for_component_ref (tree x, rtx offset)
2050 HOST_WIDE_INT ioffset;
2052 if (! offset)
2053 return NULL_RTX;
2055 ioffset = INTVAL (offset);
2058 tree offset = component_ref_field_offset (x);
2059 tree field = TREE_OPERAND (x, 1);
2061 if (! host_integerp (offset, 1))
2062 return NULL_RTX;
2063 ioffset += (tree_low_cst (offset, 1)
2064 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2065 / BITS_PER_UNIT));
2067 x = TREE_OPERAND (x, 0);
2069 while (x && TREE_CODE (x) == COMPONENT_REF);
2071 return GEN_INT (ioffset);
2074 /* Return nonzero if we can determine the exprs corresponding to memrefs
2075 X and Y and they do not overlap. */
2078 nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
2080 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2081 rtx rtlx, rtly;
2082 rtx basex, basey;
2083 rtx moffsetx, moffsety;
2084 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2086 /* Unless both have exprs, we can't tell anything. */
2087 if (exprx == 0 || expry == 0)
2088 return 0;
2090 /* If both are field references, we may be able to determine something. */
2091 if (TREE_CODE (exprx) == COMPONENT_REF
2092 && TREE_CODE (expry) == COMPONENT_REF
2093 && nonoverlapping_component_refs_p (exprx, expry))
2094 return 1;
2097 /* If the field reference test failed, look at the DECLs involved. */
2098 moffsetx = MEM_OFFSET (x);
2099 if (TREE_CODE (exprx) == COMPONENT_REF)
2101 if (TREE_CODE (expry) == VAR_DECL
2102 && POINTER_TYPE_P (TREE_TYPE (expry)))
2104 tree field = TREE_OPERAND (exprx, 1);
2105 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2106 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2107 TREE_TYPE (field)))
2108 return 1;
2111 tree t = decl_for_component_ref (exprx);
2112 if (! t)
2113 return 0;
2114 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2115 exprx = t;
2118 else if (INDIRECT_REF_P (exprx))
2120 exprx = TREE_OPERAND (exprx, 0);
2121 if (flag_argument_noalias < 2
2122 || TREE_CODE (exprx) != PARM_DECL)
2123 return 0;
2126 moffsety = MEM_OFFSET (y);
2127 if (TREE_CODE (expry) == COMPONENT_REF)
2129 if (TREE_CODE (exprx) == VAR_DECL
2130 && POINTER_TYPE_P (TREE_TYPE (exprx)))
2132 tree field = TREE_OPERAND (expry, 1);
2133 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2134 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2135 TREE_TYPE (field)))
2136 return 1;
2139 tree t = decl_for_component_ref (expry);
2140 if (! t)
2141 return 0;
2142 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2143 expry = t;
2146 else if (INDIRECT_REF_P (expry))
2148 expry = TREE_OPERAND (expry, 0);
2149 if (flag_argument_noalias < 2
2150 || TREE_CODE (expry) != PARM_DECL)
2151 return 0;
2154 if (! DECL_P (exprx) || ! DECL_P (expry))
2155 return 0;
2157 rtlx = DECL_RTL (exprx);
2158 rtly = DECL_RTL (expry);
2160 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2161 can't overlap unless they are the same because we never reuse that part
2162 of the stack frame used for locals for spilled pseudos. */
2163 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2164 && ! rtx_equal_p (rtlx, rtly))
2165 return 1;
2167 /* Get the base and offsets of both decls. If either is a register, we
2168 know both are and are the same, so use that as the base. The only
2169 we can avoid overlap is if we can deduce that they are nonoverlapping
2170 pieces of that decl, which is very rare. */
2171 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2172 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2173 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2175 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2176 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2177 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2179 /* If the bases are different, we know they do not overlap if both
2180 are constants or if one is a constant and the other a pointer into the
2181 stack frame. Otherwise a different base means we can't tell if they
2182 overlap or not. */
2183 if (! rtx_equal_p (basex, basey))
2184 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2185 || (CONSTANT_P (basex) && REG_P (basey)
2186 && REGNO_PTR_FRAME_P (REGNO (basey)))
2187 || (CONSTANT_P (basey) && REG_P (basex)
2188 && REGNO_PTR_FRAME_P (REGNO (basex))));
2190 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2191 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2192 : -1);
2193 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2194 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2195 -1);
2197 /* If we have an offset for either memref, it can update the values computed
2198 above. */
2199 if (moffsetx)
2200 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2201 if (moffsety)
2202 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2204 /* If a memref has both a size and an offset, we can use the smaller size.
2205 We can't do this if the offset isn't known because we must view this
2206 memref as being anywhere inside the DECL's MEM. */
2207 if (MEM_SIZE (x) && moffsetx)
2208 sizex = INTVAL (MEM_SIZE (x));
2209 if (MEM_SIZE (y) && moffsety)
2210 sizey = INTVAL (MEM_SIZE (y));
2212 /* Put the values of the memref with the lower offset in X's values. */
2213 if (offsetx > offsety)
2215 tem = offsetx, offsetx = offsety, offsety = tem;
2216 tem = sizex, sizex = sizey, sizey = tem;
2219 /* If we don't know the size of the lower-offset value, we can't tell
2220 if they conflict. Otherwise, we do the test. */
2221 return sizex >= 0 && offsety >= offsetx + sizex;
2224 /* True dependence: X is read after store in MEM takes place. */
2227 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2228 bool (*varies) (const_rtx, bool))
2230 rtx x_addr, mem_addr;
2231 rtx base;
2233 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2234 return 1;
2236 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2237 This is used in epilogue deallocation functions, and in cselib. */
2238 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2239 return 1;
2240 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2241 return 1;
2242 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2243 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2244 return 1;
2246 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2247 return 0;
2249 /* Read-only memory is by definition never modified, and therefore can't
2250 conflict with anything. We don't expect to find read-only set on MEM,
2251 but stupid user tricks can produce them, so don't die. */
2252 if (MEM_READONLY_P (x))
2253 return 0;
2255 if (nonoverlapping_memrefs_p (mem, x))
2256 return 0;
2258 if (mem_mode == VOIDmode)
2259 mem_mode = GET_MODE (mem);
2261 x_addr = get_addr (XEXP (x, 0));
2262 mem_addr = get_addr (XEXP (mem, 0));
2264 base = find_base_term (x_addr);
2265 if (base && (GET_CODE (base) == LABEL_REF
2266 || (GET_CODE (base) == SYMBOL_REF
2267 && CONSTANT_POOL_ADDRESS_P (base))))
2268 return 0;
2270 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2271 return 0;
2273 x_addr = canon_rtx (x_addr);
2274 mem_addr = canon_rtx (mem_addr);
2276 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2277 SIZE_FOR_MODE (x), x_addr, 0))
2278 return 0;
2280 if (aliases_everything_p (x))
2281 return 1;
2283 /* We cannot use aliases_everything_p to test MEM, since we must look
2284 at MEM_MODE, rather than GET_MODE (MEM). */
2285 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2286 return 1;
2288 /* In true_dependence we also allow BLKmode to alias anything. Why
2289 don't we do this in anti_dependence and output_dependence? */
2290 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2291 return 1;
2293 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2294 varies);
2297 /* Canonical true dependence: X is read after store in MEM takes place.
2298 Variant of true_dependence which assumes MEM has already been
2299 canonicalized (hence we no longer do that here).
2300 The mem_addr argument has been added, since true_dependence computed
2301 this value prior to canonicalizing.
2302 If x_addr is non-NULL, it is used in preference of XEXP (x, 0). */
2305 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2306 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
2308 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2309 return 1;
2311 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2312 This is used in epilogue deallocation functions. */
2313 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2314 return 1;
2315 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2316 return 1;
2317 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2318 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2319 return 1;
2321 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2322 return 0;
2324 /* Read-only memory is by definition never modified, and therefore can't
2325 conflict with anything. We don't expect to find read-only set on MEM,
2326 but stupid user tricks can produce them, so don't die. */
2327 if (MEM_READONLY_P (x))
2328 return 0;
2330 if (nonoverlapping_memrefs_p (x, mem))
2331 return 0;
2333 if (! x_addr)
2334 x_addr = get_addr (XEXP (x, 0));
2336 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2337 return 0;
2339 x_addr = canon_rtx (x_addr);
2340 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2341 SIZE_FOR_MODE (x), x_addr, 0))
2342 return 0;
2344 if (aliases_everything_p (x))
2345 return 1;
2347 /* We cannot use aliases_everything_p to test MEM, since we must look
2348 at MEM_MODE, rather than GET_MODE (MEM). */
2349 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2350 return 1;
2352 /* In true_dependence we also allow BLKmode to alias anything. Why
2353 don't we do this in anti_dependence and output_dependence? */
2354 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2355 return 1;
2357 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2358 varies);
2361 /* Returns nonzero if a write to X might alias a previous read from
2362 (or, if WRITEP is nonzero, a write to) MEM. */
2364 static int
2365 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2367 rtx x_addr, mem_addr;
2368 const_rtx fixed_scalar;
2369 rtx base;
2371 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2372 return 1;
2374 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2375 This is used in epilogue deallocation functions. */
2376 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2377 return 1;
2378 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2379 return 1;
2380 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2381 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2382 return 1;
2384 /* A read from read-only memory can't conflict with read-write memory. */
2385 if (!writep && MEM_READONLY_P (mem))
2386 return 0;
2388 if (nonoverlapping_memrefs_p (x, mem))
2389 return 0;
2391 x_addr = get_addr (XEXP (x, 0));
2392 mem_addr = get_addr (XEXP (mem, 0));
2394 if (! writep)
2396 base = find_base_term (mem_addr);
2397 if (base && (GET_CODE (base) == LABEL_REF
2398 || (GET_CODE (base) == SYMBOL_REF
2399 && CONSTANT_POOL_ADDRESS_P (base))))
2400 return 0;
2403 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2404 GET_MODE (mem)))
2405 return 0;
2407 x_addr = canon_rtx (x_addr);
2408 mem_addr = canon_rtx (mem_addr);
2410 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2411 SIZE_FOR_MODE (x), x_addr, 0))
2412 return 0;
2414 fixed_scalar
2415 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2416 rtx_addr_varies_p);
2418 return (!(fixed_scalar == mem && !aliases_everything_p (x))
2419 && !(fixed_scalar == x && !aliases_everything_p (mem)));
2422 /* Anti dependence: X is written after read in MEM takes place. */
2425 anti_dependence (const_rtx mem, const_rtx x)
2427 return write_dependence_p (mem, x, /*writep=*/0);
2430 /* Output dependence: X is written after store in MEM takes place. */
2433 output_dependence (const_rtx mem, const_rtx x)
2435 return write_dependence_p (mem, x, /*writep=*/1);
2439 void
2440 init_alias_target (void)
2442 int i;
2444 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2446 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2447 /* Check whether this register can hold an incoming pointer
2448 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2449 numbers, so translate if necessary due to register windows. */
2450 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2451 && HARD_REGNO_MODE_OK (i, Pmode))
2452 static_reg_base_value[i]
2453 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2455 static_reg_base_value[STACK_POINTER_REGNUM]
2456 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2457 static_reg_base_value[ARG_POINTER_REGNUM]
2458 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2459 static_reg_base_value[FRAME_POINTER_REGNUM]
2460 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2461 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2462 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2463 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2464 #endif
2467 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2468 to be memory reference. */
2469 static bool memory_modified;
2470 static void
2471 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2473 if (MEM_P (x))
2475 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2476 memory_modified = true;
2481 /* Return true when INSN possibly modify memory contents of MEM
2482 (i.e. address can be modified). */
2483 bool
2484 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2486 if (!INSN_P (insn))
2487 return false;
2488 memory_modified = false;
2489 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2490 return memory_modified;
2493 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2494 array. */
2496 void
2497 init_alias_analysis (void)
2499 unsigned int maxreg = max_reg_num ();
2500 int changed, pass;
2501 int i;
2502 unsigned int ui;
2503 rtx insn;
2505 timevar_push (TV_ALIAS_ANALYSIS);
2507 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2508 reg_known_value = GGC_CNEWVEC (rtx, reg_known_value_size);
2509 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2511 /* If we have memory allocated from the previous run, use it. */
2512 if (old_reg_base_value)
2513 reg_base_value = old_reg_base_value;
2515 if (reg_base_value)
2516 VEC_truncate (rtx, reg_base_value, 0);
2518 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2520 new_reg_base_value = XNEWVEC (rtx, maxreg);
2521 reg_seen = XNEWVEC (char, maxreg);
2523 /* The basic idea is that each pass through this loop will use the
2524 "constant" information from the previous pass to propagate alias
2525 information through another level of assignments.
2527 This could get expensive if the assignment chains are long. Maybe
2528 we should throttle the number of iterations, possibly based on
2529 the optimization level or flag_expensive_optimizations.
2531 We could propagate more information in the first pass by making use
2532 of DF_REG_DEF_COUNT to determine immediately that the alias information
2533 for a pseudo is "constant".
2535 A program with an uninitialized variable can cause an infinite loop
2536 here. Instead of doing a full dataflow analysis to detect such problems
2537 we just cap the number of iterations for the loop.
2539 The state of the arrays for the set chain in question does not matter
2540 since the program has undefined behavior. */
2542 pass = 0;
2545 /* Assume nothing will change this iteration of the loop. */
2546 changed = 0;
2548 /* We want to assign the same IDs each iteration of this loop, so
2549 start counting from zero each iteration of the loop. */
2550 unique_id = 0;
2552 /* We're at the start of the function each iteration through the
2553 loop, so we're copying arguments. */
2554 copying_arguments = true;
2556 /* Wipe the potential alias information clean for this pass. */
2557 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2559 /* Wipe the reg_seen array clean. */
2560 memset (reg_seen, 0, maxreg);
2562 /* Mark all hard registers which may contain an address.
2563 The stack, frame and argument pointers may contain an address.
2564 An argument register which can hold a Pmode value may contain
2565 an address even if it is not in BASE_REGS.
2567 The address expression is VOIDmode for an argument and
2568 Pmode for other registers. */
2570 memcpy (new_reg_base_value, static_reg_base_value,
2571 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2573 /* Walk the insns adding values to the new_reg_base_value array. */
2574 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2576 if (INSN_P (insn))
2578 rtx note, set;
2580 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2581 /* The prologue/epilogue insns are not threaded onto the
2582 insn chain until after reload has completed. Thus,
2583 there is no sense wasting time checking if INSN is in
2584 the prologue/epilogue until after reload has completed. */
2585 if (reload_completed
2586 && prologue_epilogue_contains (insn))
2587 continue;
2588 #endif
2590 /* If this insn has a noalias note, process it, Otherwise,
2591 scan for sets. A simple set will have no side effects
2592 which could change the base value of any other register. */
2594 if (GET_CODE (PATTERN (insn)) == SET
2595 && REG_NOTES (insn) != 0
2596 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2597 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2598 else
2599 note_stores (PATTERN (insn), record_set, NULL);
2601 set = single_set (insn);
2603 if (set != 0
2604 && REG_P (SET_DEST (set))
2605 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2607 unsigned int regno = REGNO (SET_DEST (set));
2608 rtx src = SET_SRC (set);
2609 rtx t;
2611 note = find_reg_equal_equiv_note (insn);
2612 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2613 && DF_REG_DEF_COUNT (regno) != 1)
2614 note = NULL_RTX;
2616 if (note != NULL_RTX
2617 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2618 && ! rtx_varies_p (XEXP (note, 0), 1)
2619 && ! reg_overlap_mentioned_p (SET_DEST (set),
2620 XEXP (note, 0)))
2622 set_reg_known_value (regno, XEXP (note, 0));
2623 set_reg_known_equiv_p (regno,
2624 REG_NOTE_KIND (note) == REG_EQUIV);
2626 else if (DF_REG_DEF_COUNT (regno) == 1
2627 && GET_CODE (src) == PLUS
2628 && REG_P (XEXP (src, 0))
2629 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2630 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2632 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2633 set_reg_known_value (regno, t);
2634 set_reg_known_equiv_p (regno, 0);
2636 else if (DF_REG_DEF_COUNT (regno) == 1
2637 && ! rtx_varies_p (src, 1))
2639 set_reg_known_value (regno, src);
2640 set_reg_known_equiv_p (regno, 0);
2644 else if (NOTE_P (insn)
2645 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2646 copying_arguments = false;
2649 /* Now propagate values from new_reg_base_value to reg_base_value. */
2650 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2652 for (ui = 0; ui < maxreg; ui++)
2654 if (new_reg_base_value[ui]
2655 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2656 && ! rtx_equal_p (new_reg_base_value[ui],
2657 VEC_index (rtx, reg_base_value, ui)))
2659 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2660 changed = 1;
2664 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2666 /* Fill in the remaining entries. */
2667 for (i = 0; i < (int)reg_known_value_size; i++)
2668 if (reg_known_value[i] == 0)
2669 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2671 /* Clean up. */
2672 free (new_reg_base_value);
2673 new_reg_base_value = 0;
2674 free (reg_seen);
2675 reg_seen = 0;
2676 timevar_pop (TV_ALIAS_ANALYSIS);
2679 void
2680 end_alias_analysis (void)
2682 old_reg_base_value = reg_base_value;
2683 ggc_free (reg_known_value);
2684 reg_known_value = 0;
2685 reg_known_value_size = 0;
2686 free (reg_known_equiv_p);
2687 reg_known_equiv_p = 0;
2690 #include "gt-alias.h"