PR preprocessor/15824
[official-gcc.git] / gcc / alias.c
blobdd2f35e502ceb12f9bea8eccff2f04cc85b460b2
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "function.h"
31 #include "alias.h"
32 #include "emit-rtl.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "flags.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "cselib.h"
40 #include "splay-tree.h"
41 #include "ggc.h"
42 #include "langhooks.h"
43 #include "timevar.h"
44 #include "target.h"
45 #include "cgraph.h"
46 #include "varray.h"
48 /* The alias sets assigned to MEMs assist the back-end in determining
49 which MEMs can alias which other MEMs. In general, two MEMs in
50 different alias sets cannot alias each other, with one important
51 exception. Consider something like:
53 struct S { int i; double d; };
55 a store to an `S' can alias something of either type `int' or type
56 `double'. (However, a store to an `int' cannot alias a `double'
57 and vice versa.) We indicate this via a tree structure that looks
58 like:
59 struct S
60 / \
61 / \
62 |/_ _\|
63 int double
65 (The arrows are directed and point downwards.)
66 In this situation we say the alias set for `struct S' is the
67 `superset' and that those for `int' and `double' are `subsets'.
69 To see whether two alias sets can point to the same memory, we must
70 see if either alias set is a subset of the other. We need not trace
71 past immediate descendants, however, since we propagate all
72 grandchildren up one level.
74 Alias set zero is implicitly a superset of all other alias sets.
75 However, this is no actual entry for alias set zero. It is an
76 error to attempt to explicitly construct a subset of zero. */
78 struct alias_set_entry GTY(())
80 /* The alias set number, as stored in MEM_ALIAS_SET. */
81 HOST_WIDE_INT alias_set;
83 /* The children of the alias set. These are not just the immediate
84 children, but, in fact, all descendants. So, if we have:
86 struct T { struct S s; float f; }
88 continuing our example above, the children here will be all of
89 `int', `double', `float', and `struct S'. */
90 splay_tree GTY((param1_is (int), param2_is (int))) children;
92 /* Nonzero if would have a child of zero: this effectively makes this
93 alias set the same as alias set zero. */
94 int has_zero_child;
96 typedef struct alias_set_entry *alias_set_entry;
98 static int rtx_equal_for_memref_p (rtx, rtx);
99 static rtx find_symbolic_term (rtx);
100 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
101 static void record_set (rtx, rtx, void *);
102 static int base_alias_check (rtx, rtx, enum machine_mode,
103 enum machine_mode);
104 static rtx find_base_value (rtx);
105 static int mems_in_disjoint_alias_sets_p (rtx, rtx);
106 static int insert_subset_children (splay_tree_node, void*);
107 static tree find_base_decl (tree);
108 static alias_set_entry get_alias_set_entry (HOST_WIDE_INT);
109 static rtx fixed_scalar_and_varying_struct_p (rtx, rtx, rtx, rtx,
110 int (*) (rtx, int));
111 static int aliases_everything_p (rtx);
112 static bool nonoverlapping_component_refs_p (tree, tree);
113 static tree decl_for_component_ref (tree);
114 static rtx adjust_offset_for_component_ref (tree, rtx);
115 static int nonoverlapping_memrefs_p (rtx, rtx);
116 static int write_dependence_p (rtx, rtx, int);
118 static int nonlocal_mentioned_p_1 (rtx *, void *);
119 static int nonlocal_mentioned_p (rtx);
120 static int nonlocal_referenced_p_1 (rtx *, void *);
121 static int nonlocal_referenced_p (rtx);
122 static int nonlocal_set_p_1 (rtx *, void *);
123 static int nonlocal_set_p (rtx);
124 static void memory_modified_1 (rtx, rtx, void *);
125 static void record_alias_subset (HOST_WIDE_INT, HOST_WIDE_INT);
127 /* Set up all info needed to perform alias analysis on memory references. */
129 /* Returns the size in bytes of the mode of X. */
130 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
132 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
133 different alias sets. We ignore alias sets in functions making use
134 of variable arguments because the va_arg macros on some systems are
135 not legal ANSI C. */
136 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
137 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
139 /* Cap the number of passes we make over the insns propagating alias
140 information through set chains. 10 is a completely arbitrary choice. */
141 #define MAX_ALIAS_LOOP_PASSES 10
143 /* reg_base_value[N] gives an address to which register N is related.
144 If all sets after the first add or subtract to the current value
145 or otherwise modify it so it does not point to a different top level
146 object, reg_base_value[N] is equal to the address part of the source
147 of the first set.
149 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
150 expressions represent certain special values: function arguments and
151 the stack, frame, and argument pointers.
153 The contents of an ADDRESS is not normally used, the mode of the
154 ADDRESS determines whether the ADDRESS is a function argument or some
155 other special value. Pointer equality, not rtx_equal_p, determines whether
156 two ADDRESS expressions refer to the same base address.
158 The only use of the contents of an ADDRESS is for determining if the
159 current function performs nonlocal memory memory references for the
160 purposes of marking the function as a constant function. */
162 static GTY(()) varray_type reg_base_value;
163 static rtx *new_reg_base_value;
165 /* We preserve the copy of old array around to avoid amount of garbage
166 produced. About 8% of garbage produced were attributed to this
167 array. */
168 static GTY((deletable)) varray_type old_reg_base_value;
170 /* Static hunks of RTL used by the aliasing code; these are initialized
171 once per function to avoid unnecessary RTL allocations. */
172 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
174 #define REG_BASE_VALUE(X) \
175 (reg_base_value && REGNO (X) < VARRAY_SIZE (reg_base_value) \
176 ? VARRAY_RTX (reg_base_value, REGNO (X)) : 0)
178 /* Vector of known invariant relationships between registers. Set in
179 loop unrolling. Indexed by register number, if nonzero the value
180 is an expression describing this register in terms of another.
182 The length of this array is REG_BASE_VALUE_SIZE.
184 Because this array contains only pseudo registers it has no effect
185 after reload. */
186 static GTY((length("alias_invariant_size"))) rtx *alias_invariant;
187 static GTY(()) unsigned int alias_invariant_size;
189 /* Vector indexed by N giving the initial (unchanging) value known for
190 pseudo-register N. This array is initialized in init_alias_analysis,
191 and does not change until end_alias_analysis is called. */
192 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
194 /* Indicates number of valid entries in reg_known_value. */
195 static GTY(()) unsigned int reg_known_value_size;
197 /* Vector recording for each reg_known_value whether it is due to a
198 REG_EQUIV note. Future passes (viz., reload) may replace the
199 pseudo with the equivalent expression and so we account for the
200 dependences that would be introduced if that happens.
202 The REG_EQUIV notes created in assign_parms may mention the arg
203 pointer, and there are explicit insns in the RTL that modify the
204 arg pointer. Thus we must ensure that such insns don't get
205 scheduled across each other because that would invalidate the
206 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
207 wrong, but solving the problem in the scheduler will likely give
208 better code, so we do it here. */
209 static bool *reg_known_equiv_p;
211 /* True when scanning insns from the start of the rtl to the
212 NOTE_INSN_FUNCTION_BEG note. */
213 static bool copying_arguments;
215 /* The splay-tree used to store the various alias set entries. */
216 static GTY ((param_is (struct alias_set_entry))) varray_type alias_sets;
218 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
219 such an entry, or NULL otherwise. */
221 static inline alias_set_entry
222 get_alias_set_entry (HOST_WIDE_INT alias_set)
224 return (alias_set_entry)VARRAY_GENERIC_PTR (alias_sets, alias_set);
227 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
228 the two MEMs cannot alias each other. */
230 static inline int
231 mems_in_disjoint_alias_sets_p (rtx mem1, rtx mem2)
233 /* Perform a basic sanity check. Namely, that there are no alias sets
234 if we're not using strict aliasing. This helps to catch bugs
235 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
236 where a MEM is allocated in some way other than by the use of
237 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
238 use alias sets to indicate that spilled registers cannot alias each
239 other, we might need to remove this check. */
240 gcc_assert (flag_strict_aliasing
241 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
243 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
246 /* Insert the NODE into the splay tree given by DATA. Used by
247 record_alias_subset via splay_tree_foreach. */
249 static int
250 insert_subset_children (splay_tree_node node, void *data)
252 splay_tree_insert ((splay_tree) data, node->key, node->value);
254 return 0;
257 /* Return 1 if the two specified alias sets may conflict. */
260 alias_sets_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2)
262 alias_set_entry ase;
264 /* If have no alias set information for one of the operands, we have
265 to assume it can alias anything. */
266 if (set1 == 0 || set2 == 0
267 /* If the two alias sets are the same, they may alias. */
268 || set1 == set2)
269 return 1;
271 /* See if the first alias set is a subset of the second. */
272 ase = get_alias_set_entry (set1);
273 if (ase != 0
274 && (ase->has_zero_child
275 || splay_tree_lookup (ase->children,
276 (splay_tree_key) set2)))
277 return 1;
279 /* Now do the same, but with the alias sets reversed. */
280 ase = get_alias_set_entry (set2);
281 if (ase != 0
282 && (ase->has_zero_child
283 || splay_tree_lookup (ase->children,
284 (splay_tree_key) set1)))
285 return 1;
287 /* The two alias sets are distinct and neither one is the
288 child of the other. Therefore, they cannot alias. */
289 return 0;
292 /* Return 1 if the two specified alias sets might conflict, or if any subtype
293 of these alias sets might conflict. */
296 alias_sets_might_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2)
298 if (set1 == 0 || set2 == 0 || set1 == set2)
299 return 1;
301 return 0;
305 /* Return 1 if any MEM object of type T1 will always conflict (using the
306 dependency routines in this file) with any MEM object of type T2.
307 This is used when allocating temporary storage. If T1 and/or T2 are
308 NULL_TREE, it means we know nothing about the storage. */
311 objects_must_conflict_p (tree t1, tree t2)
313 HOST_WIDE_INT set1, set2;
315 /* If neither has a type specified, we don't know if they'll conflict
316 because we may be using them to store objects of various types, for
317 example the argument and local variables areas of inlined functions. */
318 if (t1 == 0 && t2 == 0)
319 return 0;
321 /* If they are the same type, they must conflict. */
322 if (t1 == t2
323 /* Likewise if both are volatile. */
324 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
325 return 1;
327 set1 = t1 ? get_alias_set (t1) : 0;
328 set2 = t2 ? get_alias_set (t2) : 0;
330 /* Otherwise they conflict if they have no alias set or the same. We
331 can't simply use alias_sets_conflict_p here, because we must make
332 sure that every subtype of t1 will conflict with every subtype of
333 t2 for which a pair of subobjects of these respective subtypes
334 overlaps on the stack. */
335 return set1 == 0 || set2 == 0 || set1 == set2;
338 /* T is an expression with pointer type. Find the DECL on which this
339 expression is based. (For example, in `a[i]' this would be `a'.)
340 If there is no such DECL, or a unique decl cannot be determined,
341 NULL_TREE is returned. */
343 static tree
344 find_base_decl (tree t)
346 tree d0, d1;
348 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
349 return 0;
351 /* If this is a declaration, return it. */
352 if (DECL_P (t))
353 return t;
355 /* Handle general expressions. It would be nice to deal with
356 COMPONENT_REFs here. If we could tell that `a' and `b' were the
357 same, then `a->f' and `b->f' are also the same. */
358 switch (TREE_CODE_CLASS (TREE_CODE (t)))
360 case tcc_unary:
361 return find_base_decl (TREE_OPERAND (t, 0));
363 case tcc_binary:
364 /* Return 0 if found in neither or both are the same. */
365 d0 = find_base_decl (TREE_OPERAND (t, 0));
366 d1 = find_base_decl (TREE_OPERAND (t, 1));
367 if (d0 == d1)
368 return d0;
369 else if (d0 == 0)
370 return d1;
371 else if (d1 == 0)
372 return d0;
373 else
374 return 0;
376 default:
377 return 0;
381 /* Return 1 if all the nested component references handled by
382 get_inner_reference in T are such that we can address the object in T. */
385 can_address_p (tree t)
387 /* If we're at the end, it is vacuously addressable. */
388 if (! handled_component_p (t))
389 return 1;
391 /* Bitfields are never addressable. */
392 else if (TREE_CODE (t) == BIT_FIELD_REF)
393 return 0;
395 /* Fields are addressable unless they are marked as nonaddressable or
396 the containing type has alias set 0. */
397 else if (TREE_CODE (t) == COMPONENT_REF
398 && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
399 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
400 && can_address_p (TREE_OPERAND (t, 0)))
401 return 1;
403 /* Likewise for arrays. */
404 else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
405 && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
406 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
407 && can_address_p (TREE_OPERAND (t, 0)))
408 return 1;
410 return 0;
413 /* Return the alias set for T, which may be either a type or an
414 expression. Call language-specific routine for help, if needed. */
416 HOST_WIDE_INT
417 get_alias_set (tree t)
419 HOST_WIDE_INT set;
421 /* If we're not doing any alias analysis, just assume everything
422 aliases everything else. Also return 0 if this or its type is
423 an error. */
424 if (! flag_strict_aliasing || t == error_mark_node
425 || (! TYPE_P (t)
426 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
427 return 0;
429 /* We can be passed either an expression or a type. This and the
430 language-specific routine may make mutually-recursive calls to each other
431 to figure out what to do. At each juncture, we see if this is a tree
432 that the language may need to handle specially. First handle things that
433 aren't types. */
434 if (! TYPE_P (t))
436 tree inner = t;
438 /* Remove any nops, then give the language a chance to do
439 something with this tree before we look at it. */
440 STRIP_NOPS (t);
441 set = lang_hooks.get_alias_set (t);
442 if (set != -1)
443 return set;
445 /* First see if the actual object referenced is an INDIRECT_REF from a
446 restrict-qualified pointer or a "void *". */
447 while (handled_component_p (inner))
449 inner = TREE_OPERAND (inner, 0);
450 STRIP_NOPS (inner);
453 /* Check for accesses through restrict-qualified pointers. */
454 if (INDIRECT_REF_P (inner))
456 tree decl = find_base_decl (TREE_OPERAND (inner, 0));
458 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
460 /* If we haven't computed the actual alias set, do it now. */
461 if (DECL_POINTER_ALIAS_SET (decl) == -2)
463 tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
465 /* No two restricted pointers can point at the same thing.
466 However, a restricted pointer can point at the same thing
467 as an unrestricted pointer, if that unrestricted pointer
468 is based on the restricted pointer. So, we make the
469 alias set for the restricted pointer a subset of the
470 alias set for the type pointed to by the type of the
471 decl. */
472 HOST_WIDE_INT pointed_to_alias_set
473 = get_alias_set (pointed_to_type);
475 if (pointed_to_alias_set == 0)
476 /* It's not legal to make a subset of alias set zero. */
477 DECL_POINTER_ALIAS_SET (decl) = 0;
478 else if (AGGREGATE_TYPE_P (pointed_to_type))
479 /* For an aggregate, we must treat the restricted
480 pointer the same as an ordinary pointer. If we
481 were to make the type pointed to by the
482 restricted pointer a subset of the pointed-to
483 type, then we would believe that other subsets
484 of the pointed-to type (such as fields of that
485 type) do not conflict with the type pointed to
486 by the restricted pointer. */
487 DECL_POINTER_ALIAS_SET (decl)
488 = pointed_to_alias_set;
489 else
491 DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
492 record_alias_subset (pointed_to_alias_set,
493 DECL_POINTER_ALIAS_SET (decl));
497 /* We use the alias set indicated in the declaration. */
498 return DECL_POINTER_ALIAS_SET (decl);
501 /* If we have an INDIRECT_REF via a void pointer, we don't
502 know anything about what that might alias. Likewise if the
503 pointer is marked that way. */
504 else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE
505 || (TYPE_REF_CAN_ALIAS_ALL
506 (TREE_TYPE (TREE_OPERAND (inner, 0)))))
507 return 0;
510 /* Otherwise, pick up the outermost object that we could have a pointer
511 to, processing conversions as above. */
512 while (handled_component_p (t) && ! can_address_p (t))
514 t = TREE_OPERAND (t, 0);
515 STRIP_NOPS (t);
518 /* If we've already determined the alias set for a decl, just return
519 it. This is necessary for C++ anonymous unions, whose component
520 variables don't look like union members (boo!). */
521 if (TREE_CODE (t) == VAR_DECL
522 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
523 return MEM_ALIAS_SET (DECL_RTL (t));
525 /* Now all we care about is the type. */
526 t = TREE_TYPE (t);
529 /* Variant qualifiers don't affect the alias set, so get the main
530 variant. If this is a type with a known alias set, return it. */
531 t = TYPE_MAIN_VARIANT (t);
532 if (TYPE_ALIAS_SET_KNOWN_P (t))
533 return TYPE_ALIAS_SET (t);
535 /* See if the language has special handling for this type. */
536 set = lang_hooks.get_alias_set (t);
537 if (set != -1)
538 return set;
540 /* There are no objects of FUNCTION_TYPE, so there's no point in
541 using up an alias set for them. (There are, of course, pointers
542 and references to functions, but that's different.) */
543 else if (TREE_CODE (t) == FUNCTION_TYPE)
544 set = 0;
546 /* Unless the language specifies otherwise, let vector types alias
547 their components. This avoids some nasty type punning issues in
548 normal usage. And indeed lets vectors be treated more like an
549 array slice. */
550 else if (TREE_CODE (t) == VECTOR_TYPE)
551 set = get_alias_set (TREE_TYPE (t));
553 else
554 /* Otherwise make a new alias set for this type. */
555 set = new_alias_set ();
557 TYPE_ALIAS_SET (t) = set;
559 /* If this is an aggregate type, we must record any component aliasing
560 information. */
561 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
562 record_component_aliases (t);
564 return set;
567 /* Return a brand-new alias set. */
569 static GTY(()) HOST_WIDE_INT last_alias_set;
571 HOST_WIDE_INT
572 new_alias_set (void)
574 if (flag_strict_aliasing)
576 if (!alias_sets)
577 VARRAY_GENERIC_PTR_INIT (alias_sets, 10, "alias sets");
578 else
579 VARRAY_GROW (alias_sets, last_alias_set + 2);
580 return ++last_alias_set;
582 else
583 return 0;
586 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
587 not everything that aliases SUPERSET also aliases SUBSET. For example,
588 in C, a store to an `int' can alias a load of a structure containing an
589 `int', and vice versa. But it can't alias a load of a 'double' member
590 of the same structure. Here, the structure would be the SUPERSET and
591 `int' the SUBSET. This relationship is also described in the comment at
592 the beginning of this file.
594 This function should be called only once per SUPERSET/SUBSET pair.
596 It is illegal for SUPERSET to be zero; everything is implicitly a
597 subset of alias set zero. */
599 static void
600 record_alias_subset (HOST_WIDE_INT superset, HOST_WIDE_INT subset)
602 alias_set_entry superset_entry;
603 alias_set_entry subset_entry;
605 /* It is possible in complex type situations for both sets to be the same,
606 in which case we can ignore this operation. */
607 if (superset == subset)
608 return;
610 gcc_assert (superset);
612 superset_entry = get_alias_set_entry (superset);
613 if (superset_entry == 0)
615 /* Create an entry for the SUPERSET, so that we have a place to
616 attach the SUBSET. */
617 superset_entry = ggc_alloc (sizeof (struct alias_set_entry));
618 superset_entry->alias_set = superset;
619 superset_entry->children
620 = splay_tree_new_ggc (splay_tree_compare_ints);
621 superset_entry->has_zero_child = 0;
622 VARRAY_GENERIC_PTR (alias_sets, superset) = superset_entry;
625 if (subset == 0)
626 superset_entry->has_zero_child = 1;
627 else
629 subset_entry = get_alias_set_entry (subset);
630 /* If there is an entry for the subset, enter all of its children
631 (if they are not already present) as children of the SUPERSET. */
632 if (subset_entry)
634 if (subset_entry->has_zero_child)
635 superset_entry->has_zero_child = 1;
637 splay_tree_foreach (subset_entry->children, insert_subset_children,
638 superset_entry->children);
641 /* Enter the SUBSET itself as a child of the SUPERSET. */
642 splay_tree_insert (superset_entry->children,
643 (splay_tree_key) subset, 0);
647 /* Record that component types of TYPE, if any, are part of that type for
648 aliasing purposes. For record types, we only record component types
649 for fields that are marked addressable. For array types, we always
650 record the component types, so the front end should not call this
651 function if the individual component aren't addressable. */
653 void
654 record_component_aliases (tree type)
656 HOST_WIDE_INT superset = get_alias_set (type);
657 tree field;
659 if (superset == 0)
660 return;
662 switch (TREE_CODE (type))
664 case ARRAY_TYPE:
665 if (! TYPE_NONALIASED_COMPONENT (type))
666 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
667 break;
669 case RECORD_TYPE:
670 case UNION_TYPE:
671 case QUAL_UNION_TYPE:
672 /* Recursively record aliases for the base classes, if there are any. */
673 if (TYPE_BINFO (type))
675 int i;
676 tree binfo, base_binfo;
678 for (binfo = TYPE_BINFO (type), i = 0;
679 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
680 record_alias_subset (superset,
681 get_alias_set (BINFO_TYPE (base_binfo)));
683 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
684 if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
685 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
686 break;
688 case COMPLEX_TYPE:
689 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
690 break;
692 default:
693 break;
697 /* Allocate an alias set for use in storing and reading from the varargs
698 spill area. */
700 static GTY(()) HOST_WIDE_INT varargs_set = -1;
702 HOST_WIDE_INT
703 get_varargs_alias_set (void)
705 #if 1
706 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
707 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
708 consistently use the varargs alias set for loads from the varargs
709 area. So don't use it anywhere. */
710 return 0;
711 #else
712 if (varargs_set == -1)
713 varargs_set = new_alias_set ();
715 return varargs_set;
716 #endif
719 /* Likewise, but used for the fixed portions of the frame, e.g., register
720 save areas. */
722 static GTY(()) HOST_WIDE_INT frame_set = -1;
724 HOST_WIDE_INT
725 get_frame_alias_set (void)
727 if (frame_set == -1)
728 frame_set = new_alias_set ();
730 return frame_set;
733 /* Inside SRC, the source of a SET, find a base address. */
735 static rtx
736 find_base_value (rtx src)
738 unsigned int regno;
740 switch (GET_CODE (src))
742 case SYMBOL_REF:
743 case LABEL_REF:
744 return src;
746 case REG:
747 regno = REGNO (src);
748 /* At the start of a function, argument registers have known base
749 values which may be lost later. Returning an ADDRESS
750 expression here allows optimization based on argument values
751 even when the argument registers are used for other purposes. */
752 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
753 return new_reg_base_value[regno];
755 /* If a pseudo has a known base value, return it. Do not do this
756 for non-fixed hard regs since it can result in a circular
757 dependency chain for registers which have values at function entry.
759 The test above is not sufficient because the scheduler may move
760 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
761 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
762 && regno < VARRAY_SIZE (reg_base_value))
764 /* If we're inside init_alias_analysis, use new_reg_base_value
765 to reduce the number of relaxation iterations. */
766 if (new_reg_base_value && new_reg_base_value[regno]
767 && REG_N_SETS (regno) == 1)
768 return new_reg_base_value[regno];
770 if (VARRAY_RTX (reg_base_value, regno))
771 return VARRAY_RTX (reg_base_value, regno);
774 return 0;
776 case MEM:
777 /* Check for an argument passed in memory. Only record in the
778 copying-arguments block; it is too hard to track changes
779 otherwise. */
780 if (copying_arguments
781 && (XEXP (src, 0) == arg_pointer_rtx
782 || (GET_CODE (XEXP (src, 0)) == PLUS
783 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
784 return gen_rtx_ADDRESS (VOIDmode, src);
785 return 0;
787 case CONST:
788 src = XEXP (src, 0);
789 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
790 break;
792 /* ... fall through ... */
794 case PLUS:
795 case MINUS:
797 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
799 /* If either operand is a REG that is a known pointer, then it
800 is the base. */
801 if (REG_P (src_0) && REG_POINTER (src_0))
802 return find_base_value (src_0);
803 if (REG_P (src_1) && REG_POINTER (src_1))
804 return find_base_value (src_1);
806 /* If either operand is a REG, then see if we already have
807 a known value for it. */
808 if (REG_P (src_0))
810 temp = find_base_value (src_0);
811 if (temp != 0)
812 src_0 = temp;
815 if (REG_P (src_1))
817 temp = find_base_value (src_1);
818 if (temp!= 0)
819 src_1 = temp;
822 /* If either base is named object or a special address
823 (like an argument or stack reference), then use it for the
824 base term. */
825 if (src_0 != 0
826 && (GET_CODE (src_0) == SYMBOL_REF
827 || GET_CODE (src_0) == LABEL_REF
828 || (GET_CODE (src_0) == ADDRESS
829 && GET_MODE (src_0) != VOIDmode)))
830 return src_0;
832 if (src_1 != 0
833 && (GET_CODE (src_1) == SYMBOL_REF
834 || GET_CODE (src_1) == LABEL_REF
835 || (GET_CODE (src_1) == ADDRESS
836 && GET_MODE (src_1) != VOIDmode)))
837 return src_1;
839 /* Guess which operand is the base address:
840 If either operand is a symbol, then it is the base. If
841 either operand is a CONST_INT, then the other is the base. */
842 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
843 return find_base_value (src_0);
844 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
845 return find_base_value (src_1);
847 return 0;
850 case LO_SUM:
851 /* The standard form is (lo_sum reg sym) so look only at the
852 second operand. */
853 return find_base_value (XEXP (src, 1));
855 case AND:
856 /* If the second operand is constant set the base
857 address to the first operand. */
858 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
859 return find_base_value (XEXP (src, 0));
860 return 0;
862 case TRUNCATE:
863 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
864 break;
865 /* Fall through. */
866 case HIGH:
867 case PRE_INC:
868 case PRE_DEC:
869 case POST_INC:
870 case POST_DEC:
871 case PRE_MODIFY:
872 case POST_MODIFY:
873 return find_base_value (XEXP (src, 0));
875 case ZERO_EXTEND:
876 case SIGN_EXTEND: /* used for NT/Alpha pointers */
878 rtx temp = find_base_value (XEXP (src, 0));
880 if (temp != 0 && CONSTANT_P (temp))
881 temp = convert_memory_address (Pmode, temp);
883 return temp;
886 default:
887 break;
890 return 0;
893 /* Called from init_alias_analysis indirectly through note_stores. */
895 /* While scanning insns to find base values, reg_seen[N] is nonzero if
896 register N has been set in this function. */
897 static char *reg_seen;
899 /* Addresses which are known not to alias anything else are identified
900 by a unique integer. */
901 static int unique_id;
903 static void
904 record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
906 unsigned regno;
907 rtx src;
908 int n;
910 if (!REG_P (dest))
911 return;
913 regno = REGNO (dest);
915 gcc_assert (regno < VARRAY_SIZE (reg_base_value));
917 /* If this spans multiple hard registers, then we must indicate that every
918 register has an unusable value. */
919 if (regno < FIRST_PSEUDO_REGISTER)
920 n = hard_regno_nregs[regno][GET_MODE (dest)];
921 else
922 n = 1;
923 if (n != 1)
925 while (--n >= 0)
927 reg_seen[regno + n] = 1;
928 new_reg_base_value[regno + n] = 0;
930 return;
933 if (set)
935 /* A CLOBBER wipes out any old value but does not prevent a previously
936 unset register from acquiring a base address (i.e. reg_seen is not
937 set). */
938 if (GET_CODE (set) == CLOBBER)
940 new_reg_base_value[regno] = 0;
941 return;
943 src = SET_SRC (set);
945 else
947 if (reg_seen[regno])
949 new_reg_base_value[regno] = 0;
950 return;
952 reg_seen[regno] = 1;
953 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
954 GEN_INT (unique_id++));
955 return;
958 /* If this is not the first set of REGNO, see whether the new value
959 is related to the old one. There are two cases of interest:
961 (1) The register might be assigned an entirely new value
962 that has the same base term as the original set.
964 (2) The set might be a simple self-modification that
965 cannot change REGNO's base value.
967 If neither case holds, reject the original base value as invalid.
968 Note that the following situation is not detected:
970 extern int x, y; int *p = &x; p += (&y-&x);
972 ANSI C does not allow computing the difference of addresses
973 of distinct top level objects. */
974 if (new_reg_base_value[regno] != 0
975 && find_base_value (src) != new_reg_base_value[regno])
976 switch (GET_CODE (src))
978 case LO_SUM:
979 case MINUS:
980 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
981 new_reg_base_value[regno] = 0;
982 break;
983 case PLUS:
984 /* If the value we add in the PLUS is also a valid base value,
985 this might be the actual base value, and the original value
986 an index. */
988 rtx other = NULL_RTX;
990 if (XEXP (src, 0) == dest)
991 other = XEXP (src, 1);
992 else if (XEXP (src, 1) == dest)
993 other = XEXP (src, 0);
995 if (! other || find_base_value (other))
996 new_reg_base_value[regno] = 0;
997 break;
999 case AND:
1000 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1001 new_reg_base_value[regno] = 0;
1002 break;
1003 default:
1004 new_reg_base_value[regno] = 0;
1005 break;
1007 /* If this is the first set of a register, record the value. */
1008 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1009 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1010 new_reg_base_value[regno] = find_base_value (src);
1012 reg_seen[regno] = 1;
1015 /* Called from loop optimization when a new pseudo-register is
1016 created. It indicates that REGNO is being set to VAL. f INVARIANT
1017 is true then this value also describes an invariant relationship
1018 which can be used to deduce that two registers with unknown values
1019 are different. */
1021 void
1022 record_base_value (unsigned int regno, rtx val, int invariant)
1024 if (invariant && alias_invariant && regno < alias_invariant_size)
1025 alias_invariant[regno] = val;
1027 if (regno >= VARRAY_SIZE (reg_base_value))
1028 VARRAY_GROW (reg_base_value, max_reg_num ());
1030 if (REG_P (val))
1032 VARRAY_RTX (reg_base_value, regno)
1033 = REG_BASE_VALUE (val);
1034 return;
1036 VARRAY_RTX (reg_base_value, regno)
1037 = find_base_value (val);
1040 /* Clear alias info for a register. This is used if an RTL transformation
1041 changes the value of a register. This is used in flow by AUTO_INC_DEC
1042 optimizations. We don't need to clear reg_base_value, since flow only
1043 changes the offset. */
1045 void
1046 clear_reg_alias_info (rtx reg)
1048 unsigned int regno = REGNO (reg);
1050 if (regno >= FIRST_PSEUDO_REGISTER)
1052 regno -= FIRST_PSEUDO_REGISTER;
1053 if (regno < reg_known_value_size)
1055 reg_known_value[regno] = reg;
1056 reg_known_equiv_p[regno] = false;
1061 /* If a value is known for REGNO, return it. */
1063 rtx
1064 get_reg_known_value (unsigned int regno)
1066 if (regno >= FIRST_PSEUDO_REGISTER)
1068 regno -= FIRST_PSEUDO_REGISTER;
1069 if (regno < reg_known_value_size)
1070 return reg_known_value[regno];
1072 return NULL;
1075 /* Set it. */
1077 static void
1078 set_reg_known_value (unsigned int regno, rtx val)
1080 if (regno >= FIRST_PSEUDO_REGISTER)
1082 regno -= FIRST_PSEUDO_REGISTER;
1083 if (regno < reg_known_value_size)
1084 reg_known_value[regno] = val;
1088 /* Similarly for reg_known_equiv_p. */
1090 bool
1091 get_reg_known_equiv_p (unsigned int regno)
1093 if (regno >= FIRST_PSEUDO_REGISTER)
1095 regno -= FIRST_PSEUDO_REGISTER;
1096 if (regno < reg_known_value_size)
1097 return reg_known_equiv_p[regno];
1099 return false;
1102 static void
1103 set_reg_known_equiv_p (unsigned int regno, bool val)
1105 if (regno >= FIRST_PSEUDO_REGISTER)
1107 regno -= FIRST_PSEUDO_REGISTER;
1108 if (regno < reg_known_value_size)
1109 reg_known_equiv_p[regno] = val;
1114 /* Returns a canonical version of X, from the point of view alias
1115 analysis. (For example, if X is a MEM whose address is a register,
1116 and the register has a known value (say a SYMBOL_REF), then a MEM
1117 whose address is the SYMBOL_REF is returned.) */
1120 canon_rtx (rtx x)
1122 /* Recursively look for equivalences. */
1123 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1125 rtx t = get_reg_known_value (REGNO (x));
1126 if (t == x)
1127 return x;
1128 if (t)
1129 return canon_rtx (t);
1132 if (GET_CODE (x) == PLUS)
1134 rtx x0 = canon_rtx (XEXP (x, 0));
1135 rtx x1 = canon_rtx (XEXP (x, 1));
1137 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1139 if (GET_CODE (x0) == CONST_INT)
1140 return plus_constant (x1, INTVAL (x0));
1141 else if (GET_CODE (x1) == CONST_INT)
1142 return plus_constant (x0, INTVAL (x1));
1143 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1147 /* This gives us much better alias analysis when called from
1148 the loop optimizer. Note we want to leave the original
1149 MEM alone, but need to return the canonicalized MEM with
1150 all the flags with their original values. */
1151 else if (MEM_P (x))
1152 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1154 return x;
1157 /* Return 1 if X and Y are identical-looking rtx's.
1158 Expect that X and Y has been already canonicalized.
1160 We use the data in reg_known_value above to see if two registers with
1161 different numbers are, in fact, equivalent. */
1163 static int
1164 rtx_equal_for_memref_p (rtx x, rtx y)
1166 int i;
1167 int j;
1168 enum rtx_code code;
1169 const char *fmt;
1171 if (x == 0 && y == 0)
1172 return 1;
1173 if (x == 0 || y == 0)
1174 return 0;
1176 if (x == y)
1177 return 1;
1179 code = GET_CODE (x);
1180 /* Rtx's of different codes cannot be equal. */
1181 if (code != GET_CODE (y))
1182 return 0;
1184 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1185 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1187 if (GET_MODE (x) != GET_MODE (y))
1188 return 0;
1190 /* Some RTL can be compared without a recursive examination. */
1191 switch (code)
1193 case REG:
1194 return REGNO (x) == REGNO (y);
1196 case LABEL_REF:
1197 return XEXP (x, 0) == XEXP (y, 0);
1199 case SYMBOL_REF:
1200 return XSTR (x, 0) == XSTR (y, 0);
1202 case VALUE:
1203 case CONST_INT:
1204 case CONST_DOUBLE:
1205 /* There's no need to compare the contents of CONST_DOUBLEs or
1206 CONST_INTs because pointer equality is a good enough
1207 comparison for these nodes. */
1208 return 0;
1210 default:
1211 break;
1214 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1215 if (code == PLUS)
1216 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1217 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1218 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1219 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1220 /* For commutative operations, the RTX match if the operand match in any
1221 order. Also handle the simple binary and unary cases without a loop. */
1222 if (COMMUTATIVE_P (x))
1224 rtx xop0 = canon_rtx (XEXP (x, 0));
1225 rtx yop0 = canon_rtx (XEXP (y, 0));
1226 rtx yop1 = canon_rtx (XEXP (y, 1));
1228 return ((rtx_equal_for_memref_p (xop0, yop0)
1229 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1230 || (rtx_equal_for_memref_p (xop0, yop1)
1231 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1233 else if (NON_COMMUTATIVE_P (x))
1235 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1236 canon_rtx (XEXP (y, 0)))
1237 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1238 canon_rtx (XEXP (y, 1))));
1240 else if (UNARY_P (x))
1241 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1242 canon_rtx (XEXP (y, 0)));
1244 /* Compare the elements. If any pair of corresponding elements
1245 fail to match, return 0 for the whole things.
1247 Limit cases to types which actually appear in addresses. */
1249 fmt = GET_RTX_FORMAT (code);
1250 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1252 switch (fmt[i])
1254 case 'i':
1255 if (XINT (x, i) != XINT (y, i))
1256 return 0;
1257 break;
1259 case 'E':
1260 /* Two vectors must have the same length. */
1261 if (XVECLEN (x, i) != XVECLEN (y, i))
1262 return 0;
1264 /* And the corresponding elements must match. */
1265 for (j = 0; j < XVECLEN (x, i); j++)
1266 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1267 canon_rtx (XVECEXP (y, i, j))) == 0)
1268 return 0;
1269 break;
1271 case 'e':
1272 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1273 canon_rtx (XEXP (y, i))) == 0)
1274 return 0;
1275 break;
1277 /* This can happen for asm operands. */
1278 case 's':
1279 if (strcmp (XSTR (x, i), XSTR (y, i)))
1280 return 0;
1281 break;
1283 /* This can happen for an asm which clobbers memory. */
1284 case '0':
1285 break;
1287 /* It is believed that rtx's at this level will never
1288 contain anything but integers and other rtx's,
1289 except for within LABEL_REFs and SYMBOL_REFs. */
1290 default:
1291 gcc_unreachable ();
1294 return 1;
1297 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1298 X and return it, or return 0 if none found. */
1300 static rtx
1301 find_symbolic_term (rtx x)
1303 int i;
1304 enum rtx_code code;
1305 const char *fmt;
1307 code = GET_CODE (x);
1308 if (code == SYMBOL_REF || code == LABEL_REF)
1309 return x;
1310 if (OBJECT_P (x))
1311 return 0;
1313 fmt = GET_RTX_FORMAT (code);
1314 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1316 rtx t;
1318 if (fmt[i] == 'e')
1320 t = find_symbolic_term (XEXP (x, i));
1321 if (t != 0)
1322 return t;
1324 else if (fmt[i] == 'E')
1325 break;
1327 return 0;
1331 find_base_term (rtx x)
1333 cselib_val *val;
1334 struct elt_loc_list *l;
1336 #if defined (FIND_BASE_TERM)
1337 /* Try machine-dependent ways to find the base term. */
1338 x = FIND_BASE_TERM (x);
1339 #endif
1341 switch (GET_CODE (x))
1343 case REG:
1344 return REG_BASE_VALUE (x);
1346 case TRUNCATE:
1347 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1348 return 0;
1349 /* Fall through. */
1350 case HIGH:
1351 case PRE_INC:
1352 case PRE_DEC:
1353 case POST_INC:
1354 case POST_DEC:
1355 case PRE_MODIFY:
1356 case POST_MODIFY:
1357 return find_base_term (XEXP (x, 0));
1359 case ZERO_EXTEND:
1360 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1362 rtx temp = find_base_term (XEXP (x, 0));
1364 if (temp != 0 && CONSTANT_P (temp))
1365 temp = convert_memory_address (Pmode, temp);
1367 return temp;
1370 case VALUE:
1371 val = CSELIB_VAL_PTR (x);
1372 if (!val)
1373 return 0;
1374 for (l = val->locs; l; l = l->next)
1375 if ((x = find_base_term (l->loc)) != 0)
1376 return x;
1377 return 0;
1379 case CONST:
1380 x = XEXP (x, 0);
1381 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1382 return 0;
1383 /* Fall through. */
1384 case LO_SUM:
1385 case PLUS:
1386 case MINUS:
1388 rtx tmp1 = XEXP (x, 0);
1389 rtx tmp2 = XEXP (x, 1);
1391 /* This is a little bit tricky since we have to determine which of
1392 the two operands represents the real base address. Otherwise this
1393 routine may return the index register instead of the base register.
1395 That may cause us to believe no aliasing was possible, when in
1396 fact aliasing is possible.
1398 We use a few simple tests to guess the base register. Additional
1399 tests can certainly be added. For example, if one of the operands
1400 is a shift or multiply, then it must be the index register and the
1401 other operand is the base register. */
1403 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1404 return find_base_term (tmp2);
1406 /* If either operand is known to be a pointer, then use it
1407 to determine the base term. */
1408 if (REG_P (tmp1) && REG_POINTER (tmp1))
1409 return find_base_term (tmp1);
1411 if (REG_P (tmp2) && REG_POINTER (tmp2))
1412 return find_base_term (tmp2);
1414 /* Neither operand was known to be a pointer. Go ahead and find the
1415 base term for both operands. */
1416 tmp1 = find_base_term (tmp1);
1417 tmp2 = find_base_term (tmp2);
1419 /* If either base term is named object or a special address
1420 (like an argument or stack reference), then use it for the
1421 base term. */
1422 if (tmp1 != 0
1423 && (GET_CODE (tmp1) == SYMBOL_REF
1424 || GET_CODE (tmp1) == LABEL_REF
1425 || (GET_CODE (tmp1) == ADDRESS
1426 && GET_MODE (tmp1) != VOIDmode)))
1427 return tmp1;
1429 if (tmp2 != 0
1430 && (GET_CODE (tmp2) == SYMBOL_REF
1431 || GET_CODE (tmp2) == LABEL_REF
1432 || (GET_CODE (tmp2) == ADDRESS
1433 && GET_MODE (tmp2) != VOIDmode)))
1434 return tmp2;
1436 /* We could not determine which of the two operands was the
1437 base register and which was the index. So we can determine
1438 nothing from the base alias check. */
1439 return 0;
1442 case AND:
1443 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1444 return find_base_term (XEXP (x, 0));
1445 return 0;
1447 case SYMBOL_REF:
1448 case LABEL_REF:
1449 return x;
1451 default:
1452 return 0;
1456 /* Return 0 if the addresses X and Y are known to point to different
1457 objects, 1 if they might be pointers to the same object. */
1459 static int
1460 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1461 enum machine_mode y_mode)
1463 rtx x_base = find_base_term (x);
1464 rtx y_base = find_base_term (y);
1466 /* If the address itself has no known base see if a known equivalent
1467 value has one. If either address still has no known base, nothing
1468 is known about aliasing. */
1469 if (x_base == 0)
1471 rtx x_c;
1473 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1474 return 1;
1476 x_base = find_base_term (x_c);
1477 if (x_base == 0)
1478 return 1;
1481 if (y_base == 0)
1483 rtx y_c;
1484 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1485 return 1;
1487 y_base = find_base_term (y_c);
1488 if (y_base == 0)
1489 return 1;
1492 /* If the base addresses are equal nothing is known about aliasing. */
1493 if (rtx_equal_p (x_base, y_base))
1494 return 1;
1496 /* The base addresses of the read and write are different expressions.
1497 If they are both symbols and they are not accessed via AND, there is
1498 no conflict. We can bring knowledge of object alignment into play
1499 here. For example, on alpha, "char a, b;" can alias one another,
1500 though "char a; long b;" cannot. */
1501 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1503 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1504 return 1;
1505 if (GET_CODE (x) == AND
1506 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1507 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1508 return 1;
1509 if (GET_CODE (y) == AND
1510 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1511 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1512 return 1;
1513 /* Differing symbols never alias. */
1514 return 0;
1517 /* If one address is a stack reference there can be no alias:
1518 stack references using different base registers do not alias,
1519 a stack reference can not alias a parameter, and a stack reference
1520 can not alias a global. */
1521 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1522 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1523 return 0;
1525 if (! flag_argument_noalias)
1526 return 1;
1528 if (flag_argument_noalias > 1)
1529 return 0;
1531 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1532 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1535 /* Convert the address X into something we can use. This is done by returning
1536 it unchanged unless it is a value; in the latter case we call cselib to get
1537 a more useful rtx. */
1540 get_addr (rtx x)
1542 cselib_val *v;
1543 struct elt_loc_list *l;
1545 if (GET_CODE (x) != VALUE)
1546 return x;
1547 v = CSELIB_VAL_PTR (x);
1548 if (v)
1550 for (l = v->locs; l; l = l->next)
1551 if (CONSTANT_P (l->loc))
1552 return l->loc;
1553 for (l = v->locs; l; l = l->next)
1554 if (!REG_P (l->loc) && !MEM_P (l->loc))
1555 return l->loc;
1556 if (v->locs)
1557 return v->locs->loc;
1559 return x;
1562 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1563 where SIZE is the size in bytes of the memory reference. If ADDR
1564 is not modified by the memory reference then ADDR is returned. */
1566 static rtx
1567 addr_side_effect_eval (rtx addr, int size, int n_refs)
1569 int offset = 0;
1571 switch (GET_CODE (addr))
1573 case PRE_INC:
1574 offset = (n_refs + 1) * size;
1575 break;
1576 case PRE_DEC:
1577 offset = -(n_refs + 1) * size;
1578 break;
1579 case POST_INC:
1580 offset = n_refs * size;
1581 break;
1582 case POST_DEC:
1583 offset = -n_refs * size;
1584 break;
1586 default:
1587 return addr;
1590 if (offset)
1591 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1592 GEN_INT (offset));
1593 else
1594 addr = XEXP (addr, 0);
1595 addr = canon_rtx (addr);
1597 return addr;
1600 /* Return nonzero if X and Y (memory addresses) could reference the
1601 same location in memory. C is an offset accumulator. When
1602 C is nonzero, we are testing aliases between X and Y + C.
1603 XSIZE is the size in bytes of the X reference,
1604 similarly YSIZE is the size in bytes for Y.
1605 Expect that canon_rtx has been already called for X and Y.
1607 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1608 referenced (the reference was BLKmode), so make the most pessimistic
1609 assumptions.
1611 If XSIZE or YSIZE is negative, we may access memory outside the object
1612 being referenced as a side effect. This can happen when using AND to
1613 align memory references, as is done on the Alpha.
1615 Nice to notice that varying addresses cannot conflict with fp if no
1616 local variables had their addresses taken, but that's too hard now. */
1618 static int
1619 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1621 if (GET_CODE (x) == VALUE)
1622 x = get_addr (x);
1623 if (GET_CODE (y) == VALUE)
1624 y = get_addr (y);
1625 if (GET_CODE (x) == HIGH)
1626 x = XEXP (x, 0);
1627 else if (GET_CODE (x) == LO_SUM)
1628 x = XEXP (x, 1);
1629 else
1630 x = addr_side_effect_eval (x, xsize, 0);
1631 if (GET_CODE (y) == HIGH)
1632 y = XEXP (y, 0);
1633 else if (GET_CODE (y) == LO_SUM)
1634 y = XEXP (y, 1);
1635 else
1636 y = addr_side_effect_eval (y, ysize, 0);
1638 if (rtx_equal_for_memref_p (x, y))
1640 if (xsize <= 0 || ysize <= 0)
1641 return 1;
1642 if (c >= 0 && xsize > c)
1643 return 1;
1644 if (c < 0 && ysize+c > 0)
1645 return 1;
1646 return 0;
1649 /* This code used to check for conflicts involving stack references and
1650 globals but the base address alias code now handles these cases. */
1652 if (GET_CODE (x) == PLUS)
1654 /* The fact that X is canonicalized means that this
1655 PLUS rtx is canonicalized. */
1656 rtx x0 = XEXP (x, 0);
1657 rtx x1 = XEXP (x, 1);
1659 if (GET_CODE (y) == PLUS)
1661 /* The fact that Y is canonicalized means that this
1662 PLUS rtx is canonicalized. */
1663 rtx y0 = XEXP (y, 0);
1664 rtx y1 = XEXP (y, 1);
1666 if (rtx_equal_for_memref_p (x1, y1))
1667 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1668 if (rtx_equal_for_memref_p (x0, y0))
1669 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1670 if (GET_CODE (x1) == CONST_INT)
1672 if (GET_CODE (y1) == CONST_INT)
1673 return memrefs_conflict_p (xsize, x0, ysize, y0,
1674 c - INTVAL (x1) + INTVAL (y1));
1675 else
1676 return memrefs_conflict_p (xsize, x0, ysize, y,
1677 c - INTVAL (x1));
1679 else if (GET_CODE (y1) == CONST_INT)
1680 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1682 return 1;
1684 else if (GET_CODE (x1) == CONST_INT)
1685 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1687 else if (GET_CODE (y) == PLUS)
1689 /* The fact that Y is canonicalized means that this
1690 PLUS rtx is canonicalized. */
1691 rtx y0 = XEXP (y, 0);
1692 rtx y1 = XEXP (y, 1);
1694 if (GET_CODE (y1) == CONST_INT)
1695 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1696 else
1697 return 1;
1700 if (GET_CODE (x) == GET_CODE (y))
1701 switch (GET_CODE (x))
1703 case MULT:
1705 /* Handle cases where we expect the second operands to be the
1706 same, and check only whether the first operand would conflict
1707 or not. */
1708 rtx x0, y0;
1709 rtx x1 = canon_rtx (XEXP (x, 1));
1710 rtx y1 = canon_rtx (XEXP (y, 1));
1711 if (! rtx_equal_for_memref_p (x1, y1))
1712 return 1;
1713 x0 = canon_rtx (XEXP (x, 0));
1714 y0 = canon_rtx (XEXP (y, 0));
1715 if (rtx_equal_for_memref_p (x0, y0))
1716 return (xsize == 0 || ysize == 0
1717 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1719 /* Can't properly adjust our sizes. */
1720 if (GET_CODE (x1) != CONST_INT)
1721 return 1;
1722 xsize /= INTVAL (x1);
1723 ysize /= INTVAL (x1);
1724 c /= INTVAL (x1);
1725 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1728 case REG:
1729 /* Are these registers known not to be equal? */
1730 if (alias_invariant)
1732 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1733 rtx i_x, i_y; /* invariant relationships of X and Y */
1735 i_x = r_x >= alias_invariant_size ? 0 : alias_invariant[r_x];
1736 i_y = r_y >= alias_invariant_size ? 0 : alias_invariant[r_y];
1738 if (i_x == 0 && i_y == 0)
1739 break;
1741 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1742 ysize, i_y ? i_y : y, c))
1743 return 0;
1745 break;
1747 default:
1748 break;
1751 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1752 as an access with indeterminate size. Assume that references
1753 besides AND are aligned, so if the size of the other reference is
1754 at least as large as the alignment, assume no other overlap. */
1755 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1757 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1758 xsize = -1;
1759 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1761 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1763 /* ??? If we are indexing far enough into the array/structure, we
1764 may yet be able to determine that we can not overlap. But we
1765 also need to that we are far enough from the end not to overlap
1766 a following reference, so we do nothing with that for now. */
1767 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1768 ysize = -1;
1769 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1772 if (CONSTANT_P (x))
1774 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1776 c += (INTVAL (y) - INTVAL (x));
1777 return (xsize <= 0 || ysize <= 0
1778 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1781 if (GET_CODE (x) == CONST)
1783 if (GET_CODE (y) == CONST)
1784 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1785 ysize, canon_rtx (XEXP (y, 0)), c);
1786 else
1787 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1788 ysize, y, c);
1790 if (GET_CODE (y) == CONST)
1791 return memrefs_conflict_p (xsize, x, ysize,
1792 canon_rtx (XEXP (y, 0)), c);
1794 if (CONSTANT_P (y))
1795 return (xsize <= 0 || ysize <= 0
1796 || (rtx_equal_for_memref_p (x, y)
1797 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1799 return 1;
1801 return 1;
1804 /* Functions to compute memory dependencies.
1806 Since we process the insns in execution order, we can build tables
1807 to keep track of what registers are fixed (and not aliased), what registers
1808 are varying in known ways, and what registers are varying in unknown
1809 ways.
1811 If both memory references are volatile, then there must always be a
1812 dependence between the two references, since their order can not be
1813 changed. A volatile and non-volatile reference can be interchanged
1814 though.
1816 A MEM_IN_STRUCT reference at a non-AND varying address can never
1817 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1818 also must allow AND addresses, because they may generate accesses
1819 outside the object being referenced. This is used to generate
1820 aligned addresses from unaligned addresses, for instance, the alpha
1821 storeqi_unaligned pattern. */
1823 /* Read dependence: X is read after read in MEM takes place. There can
1824 only be a dependence here if both reads are volatile. */
1827 read_dependence (rtx mem, rtx x)
1829 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1832 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1833 MEM2 is a reference to a structure at a varying address, or returns
1834 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1835 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1836 to decide whether or not an address may vary; it should return
1837 nonzero whenever variation is possible.
1838 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1840 static rtx
1841 fixed_scalar_and_varying_struct_p (rtx mem1, rtx mem2, rtx mem1_addr,
1842 rtx mem2_addr,
1843 int (*varies_p) (rtx, int))
1845 if (! flag_strict_aliasing)
1846 return NULL_RTX;
1848 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1849 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1850 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1851 varying address. */
1852 return mem1;
1854 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1855 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1856 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1857 varying address. */
1858 return mem2;
1860 return NULL_RTX;
1863 /* Returns nonzero if something about the mode or address format MEM1
1864 indicates that it might well alias *anything*. */
1866 static int
1867 aliases_everything_p (rtx mem)
1869 if (GET_CODE (XEXP (mem, 0)) == AND)
1870 /* If the address is an AND, its very hard to know at what it is
1871 actually pointing. */
1872 return 1;
1874 return 0;
1877 /* Return true if we can determine that the fields referenced cannot
1878 overlap for any pair of objects. */
1880 static bool
1881 nonoverlapping_component_refs_p (tree x, tree y)
1883 tree fieldx, fieldy, typex, typey, orig_y;
1887 /* The comparison has to be done at a common type, since we don't
1888 know how the inheritance hierarchy works. */
1889 orig_y = y;
1892 fieldx = TREE_OPERAND (x, 1);
1893 typex = DECL_FIELD_CONTEXT (fieldx);
1895 y = orig_y;
1898 fieldy = TREE_OPERAND (y, 1);
1899 typey = DECL_FIELD_CONTEXT (fieldy);
1901 if (typex == typey)
1902 goto found;
1904 y = TREE_OPERAND (y, 0);
1906 while (y && TREE_CODE (y) == COMPONENT_REF);
1908 x = TREE_OPERAND (x, 0);
1910 while (x && TREE_CODE (x) == COMPONENT_REF);
1912 /* Never found a common type. */
1913 return false;
1915 found:
1916 /* If we're left with accessing different fields of a structure,
1917 then no overlap. */
1918 if (TREE_CODE (typex) == RECORD_TYPE
1919 && fieldx != fieldy)
1920 return true;
1922 /* The comparison on the current field failed. If we're accessing
1923 a very nested structure, look at the next outer level. */
1924 x = TREE_OPERAND (x, 0);
1925 y = TREE_OPERAND (y, 0);
1927 while (x && y
1928 && TREE_CODE (x) == COMPONENT_REF
1929 && TREE_CODE (y) == COMPONENT_REF);
1931 return false;
1934 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
1936 static tree
1937 decl_for_component_ref (tree x)
1941 x = TREE_OPERAND (x, 0);
1943 while (x && TREE_CODE (x) == COMPONENT_REF);
1945 return x && DECL_P (x) ? x : NULL_TREE;
1948 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1949 offset of the field reference. */
1951 static rtx
1952 adjust_offset_for_component_ref (tree x, rtx offset)
1954 HOST_WIDE_INT ioffset;
1956 if (! offset)
1957 return NULL_RTX;
1959 ioffset = INTVAL (offset);
1962 tree offset = component_ref_field_offset (x);
1963 tree field = TREE_OPERAND (x, 1);
1965 if (! host_integerp (offset, 1))
1966 return NULL_RTX;
1967 ioffset += (tree_low_cst (offset, 1)
1968 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1969 / BITS_PER_UNIT));
1971 x = TREE_OPERAND (x, 0);
1973 while (x && TREE_CODE (x) == COMPONENT_REF);
1975 return GEN_INT (ioffset);
1978 /* Return nonzero if we can determine the exprs corresponding to memrefs
1979 X and Y and they do not overlap. */
1981 static int
1982 nonoverlapping_memrefs_p (rtx x, rtx y)
1984 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
1985 rtx rtlx, rtly;
1986 rtx basex, basey;
1987 rtx moffsetx, moffsety;
1988 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
1990 /* Unless both have exprs, we can't tell anything. */
1991 if (exprx == 0 || expry == 0)
1992 return 0;
1994 /* If both are field references, we may be able to determine something. */
1995 if (TREE_CODE (exprx) == COMPONENT_REF
1996 && TREE_CODE (expry) == COMPONENT_REF
1997 && nonoverlapping_component_refs_p (exprx, expry))
1998 return 1;
2000 /* If the field reference test failed, look at the DECLs involved. */
2001 moffsetx = MEM_OFFSET (x);
2002 if (TREE_CODE (exprx) == COMPONENT_REF)
2004 tree t = decl_for_component_ref (exprx);
2005 if (! t)
2006 return 0;
2007 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2008 exprx = t;
2010 else if (INDIRECT_REF_P (exprx))
2012 exprx = TREE_OPERAND (exprx, 0);
2013 if (flag_argument_noalias < 2
2014 || TREE_CODE (exprx) != PARM_DECL)
2015 return 0;
2018 moffsety = MEM_OFFSET (y);
2019 if (TREE_CODE (expry) == COMPONENT_REF)
2021 tree t = decl_for_component_ref (expry);
2022 if (! t)
2023 return 0;
2024 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2025 expry = t;
2027 else if (INDIRECT_REF_P (expry))
2029 expry = TREE_OPERAND (expry, 0);
2030 if (flag_argument_noalias < 2
2031 || TREE_CODE (expry) != PARM_DECL)
2032 return 0;
2035 if (! DECL_P (exprx) || ! DECL_P (expry))
2036 return 0;
2038 rtlx = DECL_RTL (exprx);
2039 rtly = DECL_RTL (expry);
2041 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2042 can't overlap unless they are the same because we never reuse that part
2043 of the stack frame used for locals for spilled pseudos. */
2044 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2045 && ! rtx_equal_p (rtlx, rtly))
2046 return 1;
2048 /* Get the base and offsets of both decls. If either is a register, we
2049 know both are and are the same, so use that as the base. The only
2050 we can avoid overlap is if we can deduce that they are nonoverlapping
2051 pieces of that decl, which is very rare. */
2052 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2053 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2054 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2056 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2057 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2058 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2060 /* If the bases are different, we know they do not overlap if both
2061 are constants or if one is a constant and the other a pointer into the
2062 stack frame. Otherwise a different base means we can't tell if they
2063 overlap or not. */
2064 if (! rtx_equal_p (basex, basey))
2065 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2066 || (CONSTANT_P (basex) && REG_P (basey)
2067 && REGNO_PTR_FRAME_P (REGNO (basey)))
2068 || (CONSTANT_P (basey) && REG_P (basex)
2069 && REGNO_PTR_FRAME_P (REGNO (basex))));
2071 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2072 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2073 : -1);
2074 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2075 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2076 -1);
2078 /* If we have an offset for either memref, it can update the values computed
2079 above. */
2080 if (moffsetx)
2081 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2082 if (moffsety)
2083 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2085 /* If a memref has both a size and an offset, we can use the smaller size.
2086 We can't do this if the offset isn't known because we must view this
2087 memref as being anywhere inside the DECL's MEM. */
2088 if (MEM_SIZE (x) && moffsetx)
2089 sizex = INTVAL (MEM_SIZE (x));
2090 if (MEM_SIZE (y) && moffsety)
2091 sizey = INTVAL (MEM_SIZE (y));
2093 /* Put the values of the memref with the lower offset in X's values. */
2094 if (offsetx > offsety)
2096 tem = offsetx, offsetx = offsety, offsety = tem;
2097 tem = sizex, sizex = sizey, sizey = tem;
2100 /* If we don't know the size of the lower-offset value, we can't tell
2101 if they conflict. Otherwise, we do the test. */
2102 return sizex >= 0 && offsety >= offsetx + sizex;
2105 /* True dependence: X is read after store in MEM takes place. */
2108 true_dependence (rtx mem, enum machine_mode mem_mode, rtx x,
2109 int (*varies) (rtx, int))
2111 rtx x_addr, mem_addr;
2112 rtx base;
2114 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2115 return 1;
2117 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2118 This is used in epilogue deallocation functions. */
2119 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2120 return 1;
2121 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2122 return 1;
2124 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2125 return 0;
2127 /* Read-only memory is by definition never modified, and therefore can't
2128 conflict with anything. We don't expect to find read-only set on MEM,
2129 but stupid user tricks can produce them, so don't abort. */
2130 if (MEM_READONLY_P (x))
2131 return 0;
2133 if (nonoverlapping_memrefs_p (mem, x))
2134 return 0;
2136 if (mem_mode == VOIDmode)
2137 mem_mode = GET_MODE (mem);
2139 x_addr = get_addr (XEXP (x, 0));
2140 mem_addr = get_addr (XEXP (mem, 0));
2142 base = find_base_term (x_addr);
2143 if (base && (GET_CODE (base) == LABEL_REF
2144 || (GET_CODE (base) == SYMBOL_REF
2145 && CONSTANT_POOL_ADDRESS_P (base))))
2146 return 0;
2148 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2149 return 0;
2151 x_addr = canon_rtx (x_addr);
2152 mem_addr = canon_rtx (mem_addr);
2154 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2155 SIZE_FOR_MODE (x), x_addr, 0))
2156 return 0;
2158 if (aliases_everything_p (x))
2159 return 1;
2161 /* We cannot use aliases_everything_p to test MEM, since we must look
2162 at MEM_MODE, rather than GET_MODE (MEM). */
2163 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2164 return 1;
2166 /* In true_dependence we also allow BLKmode to alias anything. Why
2167 don't we do this in anti_dependence and output_dependence? */
2168 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2169 return 1;
2171 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2172 varies);
2175 /* Canonical true dependence: X is read after store in MEM takes place.
2176 Variant of true_dependence which assumes MEM has already been
2177 canonicalized (hence we no longer do that here).
2178 The mem_addr argument has been added, since true_dependence computed
2179 this value prior to canonicalizing. */
2182 canon_true_dependence (rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2183 rtx x, int (*varies) (rtx, int))
2185 rtx x_addr;
2187 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2188 return 1;
2190 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2191 This is used in epilogue deallocation functions. */
2192 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2193 return 1;
2194 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2195 return 1;
2197 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2198 return 0;
2200 /* Read-only memory is by definition never modified, and therefore can't
2201 conflict with anything. We don't expect to find read-only set on MEM,
2202 but stupid user tricks can produce them, so don't abort. */
2203 if (MEM_READONLY_P (x))
2204 return 0;
2206 if (nonoverlapping_memrefs_p (x, mem))
2207 return 0;
2209 x_addr = get_addr (XEXP (x, 0));
2211 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2212 return 0;
2214 x_addr = canon_rtx (x_addr);
2215 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2216 SIZE_FOR_MODE (x), x_addr, 0))
2217 return 0;
2219 if (aliases_everything_p (x))
2220 return 1;
2222 /* We cannot use aliases_everything_p to test MEM, since we must look
2223 at MEM_MODE, rather than GET_MODE (MEM). */
2224 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2225 return 1;
2227 /* In true_dependence we also allow BLKmode to alias anything. Why
2228 don't we do this in anti_dependence and output_dependence? */
2229 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2230 return 1;
2232 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2233 varies);
2236 /* Returns nonzero if a write to X might alias a previous read from
2237 (or, if WRITEP is nonzero, a write to) MEM. */
2239 static int
2240 write_dependence_p (rtx mem, rtx x, int writep)
2242 rtx x_addr, mem_addr;
2243 rtx fixed_scalar;
2244 rtx base;
2246 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2247 return 1;
2249 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2250 This is used in epilogue deallocation functions. */
2251 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2252 return 1;
2253 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2254 return 1;
2256 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2257 return 0;
2259 /* A read from read-only memory can't conflict with read-write memory. */
2260 if (!writep && MEM_READONLY_P (mem))
2261 return 0;
2263 if (nonoverlapping_memrefs_p (x, mem))
2264 return 0;
2266 x_addr = get_addr (XEXP (x, 0));
2267 mem_addr = get_addr (XEXP (mem, 0));
2269 if (! writep)
2271 base = find_base_term (mem_addr);
2272 if (base && (GET_CODE (base) == LABEL_REF
2273 || (GET_CODE (base) == SYMBOL_REF
2274 && CONSTANT_POOL_ADDRESS_P (base))))
2275 return 0;
2278 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2279 GET_MODE (mem)))
2280 return 0;
2282 x_addr = canon_rtx (x_addr);
2283 mem_addr = canon_rtx (mem_addr);
2285 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2286 SIZE_FOR_MODE (x), x_addr, 0))
2287 return 0;
2289 fixed_scalar
2290 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2291 rtx_addr_varies_p);
2293 return (!(fixed_scalar == mem && !aliases_everything_p (x))
2294 && !(fixed_scalar == x && !aliases_everything_p (mem)));
2297 /* Anti dependence: X is written after read in MEM takes place. */
2300 anti_dependence (rtx mem, rtx x)
2302 return write_dependence_p (mem, x, /*writep=*/0);
2305 /* Output dependence: X is written after store in MEM takes place. */
2308 output_dependence (rtx mem, rtx x)
2310 return write_dependence_p (mem, x, /*writep=*/1);
2313 /* A subroutine of nonlocal_mentioned_p, returns 1 if *LOC mentions
2314 something which is not local to the function and is not constant. */
2316 static int
2317 nonlocal_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2319 rtx x = *loc;
2320 rtx base;
2321 int regno;
2323 if (! x)
2324 return 0;
2326 switch (GET_CODE (x))
2328 case SUBREG:
2329 if (REG_P (SUBREG_REG (x)))
2331 /* Global registers are not local. */
2332 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2333 && global_regs[subreg_regno (x)])
2334 return 1;
2335 return 0;
2337 break;
2339 case REG:
2340 regno = REGNO (x);
2341 /* Global registers are not local. */
2342 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2343 return 1;
2344 return 0;
2346 case SCRATCH:
2347 case PC:
2348 case CC0:
2349 case CONST_INT:
2350 case CONST_DOUBLE:
2351 case CONST_VECTOR:
2352 case CONST:
2353 case LABEL_REF:
2354 return 0;
2356 case SYMBOL_REF:
2357 /* Constants in the function's constants pool are constant. */
2358 if (CONSTANT_POOL_ADDRESS_P (x))
2359 return 0;
2360 return 1;
2362 case CALL:
2363 /* Non-constant calls and recursion are not local. */
2364 return 1;
2366 case MEM:
2367 /* Be overly conservative and consider any volatile memory
2368 reference as not local. */
2369 if (MEM_VOLATILE_P (x))
2370 return 1;
2371 base = find_base_term (XEXP (x, 0));
2372 if (base)
2374 /* A Pmode ADDRESS could be a reference via the structure value
2375 address or static chain. Such memory references are nonlocal.
2377 Thus, we have to examine the contents of the ADDRESS to find
2378 out if this is a local reference or not. */
2379 if (GET_CODE (base) == ADDRESS
2380 && GET_MODE (base) == Pmode
2381 && (XEXP (base, 0) == stack_pointer_rtx
2382 || XEXP (base, 0) == arg_pointer_rtx
2383 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2384 || XEXP (base, 0) == hard_frame_pointer_rtx
2385 #endif
2386 || XEXP (base, 0) == frame_pointer_rtx))
2387 return 0;
2388 /* Constants in the function's constant pool are constant. */
2389 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
2390 return 0;
2392 return 1;
2394 case UNSPEC_VOLATILE:
2395 case ASM_INPUT:
2396 return 1;
2398 case ASM_OPERANDS:
2399 if (MEM_VOLATILE_P (x))
2400 return 1;
2402 /* Fall through. */
2404 default:
2405 break;
2408 return 0;
2411 /* Returns nonzero if X might mention something which is not
2412 local to the function and is not constant. */
2414 static int
2415 nonlocal_mentioned_p (rtx x)
2417 if (INSN_P (x))
2419 if (CALL_P (x))
2421 if (! CONST_OR_PURE_CALL_P (x))
2422 return 1;
2423 x = CALL_INSN_FUNCTION_USAGE (x);
2424 if (x == 0)
2425 return 0;
2427 else
2428 x = PATTERN (x);
2431 return for_each_rtx (&x, nonlocal_mentioned_p_1, NULL);
2434 /* A subroutine of nonlocal_referenced_p, returns 1 if *LOC references
2435 something which is not local to the function and is not constant. */
2437 static int
2438 nonlocal_referenced_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2440 rtx x = *loc;
2442 if (! x)
2443 return 0;
2445 switch (GET_CODE (x))
2447 case MEM:
2448 case REG:
2449 case SYMBOL_REF:
2450 case SUBREG:
2451 return nonlocal_mentioned_p (x);
2453 case CALL:
2454 /* Non-constant calls and recursion are not local. */
2455 return 1;
2457 case SET:
2458 if (nonlocal_mentioned_p (SET_SRC (x)))
2459 return 1;
2461 if (MEM_P (SET_DEST (x)))
2462 return nonlocal_mentioned_p (XEXP (SET_DEST (x), 0));
2464 /* If the destination is anything other than a CC0, PC,
2465 MEM, REG, or a SUBREG of a REG that occupies all of
2466 the REG, then X references nonlocal memory if it is
2467 mentioned in the destination. */
2468 if (GET_CODE (SET_DEST (x)) != CC0
2469 && GET_CODE (SET_DEST (x)) != PC
2470 && !REG_P (SET_DEST (x))
2471 && ! (GET_CODE (SET_DEST (x)) == SUBREG
2472 && REG_P (SUBREG_REG (SET_DEST (x)))
2473 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
2474 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2475 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2476 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
2477 return nonlocal_mentioned_p (SET_DEST (x));
2478 return 0;
2480 case CLOBBER:
2481 if (MEM_P (XEXP (x, 0)))
2482 return nonlocal_mentioned_p (XEXP (XEXP (x, 0), 0));
2483 return 0;
2485 case USE:
2486 return nonlocal_mentioned_p (XEXP (x, 0));
2488 case ASM_INPUT:
2489 case UNSPEC_VOLATILE:
2490 return 1;
2492 case ASM_OPERANDS:
2493 if (MEM_VOLATILE_P (x))
2494 return 1;
2496 /* Fall through. */
2498 default:
2499 break;
2502 return 0;
2505 /* Returns nonzero if X might reference something which is not
2506 local to the function and is not constant. */
2508 static int
2509 nonlocal_referenced_p (rtx x)
2511 if (INSN_P (x))
2513 if (CALL_P (x))
2515 if (! CONST_OR_PURE_CALL_P (x))
2516 return 1;
2517 x = CALL_INSN_FUNCTION_USAGE (x);
2518 if (x == 0)
2519 return 0;
2521 else
2522 x = PATTERN (x);
2525 return for_each_rtx (&x, nonlocal_referenced_p_1, NULL);
2528 /* A subroutine of nonlocal_set_p, returns 1 if *LOC sets
2529 something which is not local to the function and is not constant. */
2531 static int
2532 nonlocal_set_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2534 rtx x = *loc;
2536 if (! x)
2537 return 0;
2539 switch (GET_CODE (x))
2541 case CALL:
2542 /* Non-constant calls and recursion are not local. */
2543 return 1;
2545 case PRE_INC:
2546 case PRE_DEC:
2547 case POST_INC:
2548 case POST_DEC:
2549 case PRE_MODIFY:
2550 case POST_MODIFY:
2551 return nonlocal_mentioned_p (XEXP (x, 0));
2553 case SET:
2554 if (nonlocal_mentioned_p (SET_DEST (x)))
2555 return 1;
2556 return nonlocal_set_p (SET_SRC (x));
2558 case CLOBBER:
2559 return nonlocal_mentioned_p (XEXP (x, 0));
2561 case USE:
2562 return 0;
2564 case ASM_INPUT:
2565 case UNSPEC_VOLATILE:
2566 return 1;
2568 case ASM_OPERANDS:
2569 if (MEM_VOLATILE_P (x))
2570 return 1;
2572 /* Fall through. */
2574 default:
2575 break;
2578 return 0;
2581 /* Returns nonzero if X might set something which is not
2582 local to the function and is not constant. */
2584 static int
2585 nonlocal_set_p (rtx x)
2587 if (INSN_P (x))
2589 if (CALL_P (x))
2591 if (! CONST_OR_PURE_CALL_P (x))
2592 return 1;
2593 x = CALL_INSN_FUNCTION_USAGE (x);
2594 if (x == 0)
2595 return 0;
2597 else
2598 x = PATTERN (x);
2601 return for_each_rtx (&x, nonlocal_set_p_1, NULL);
2604 /* Mark the function if it is pure or constant. */
2606 void
2607 mark_constant_function (void)
2609 rtx insn;
2610 int nonlocal_memory_referenced;
2612 if (TREE_READONLY (current_function_decl)
2613 || DECL_IS_PURE (current_function_decl)
2614 || TREE_THIS_VOLATILE (current_function_decl)
2615 || current_function_has_nonlocal_goto
2616 || !targetm.binds_local_p (current_function_decl))
2617 return;
2619 /* A loop might not return which counts as a side effect. */
2620 if (mark_dfs_back_edges ())
2621 return;
2623 nonlocal_memory_referenced = 0;
2625 init_alias_analysis ();
2627 /* Determine if this is a constant or pure function. */
2629 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2631 if (! INSN_P (insn))
2632 continue;
2634 if (nonlocal_set_p (insn) || global_reg_mentioned_p (insn)
2635 || volatile_refs_p (PATTERN (insn)))
2636 break;
2638 if (! nonlocal_memory_referenced)
2639 nonlocal_memory_referenced = nonlocal_referenced_p (insn);
2642 end_alias_analysis ();
2644 /* Mark the function. */
2646 if (insn)
2648 else if (nonlocal_memory_referenced)
2650 cgraph_rtl_info (current_function_decl)->pure_function = 1;
2651 DECL_IS_PURE (current_function_decl) = 1;
2653 else
2655 cgraph_rtl_info (current_function_decl)->const_function = 1;
2656 TREE_READONLY (current_function_decl) = 1;
2661 void
2662 init_alias_once (void)
2664 int i;
2666 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2667 /* Check whether this register can hold an incoming pointer
2668 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2669 numbers, so translate if necessary due to register windows. */
2670 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2671 && HARD_REGNO_MODE_OK (i, Pmode))
2672 static_reg_base_value[i]
2673 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2675 static_reg_base_value[STACK_POINTER_REGNUM]
2676 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2677 static_reg_base_value[ARG_POINTER_REGNUM]
2678 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2679 static_reg_base_value[FRAME_POINTER_REGNUM]
2680 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2681 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2682 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2683 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2684 #endif
2687 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2688 to be memory reference. */
2689 static bool memory_modified;
2690 static void
2691 memory_modified_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
2693 if (MEM_P (x))
2695 if (anti_dependence (x, (rtx)data) || output_dependence (x, (rtx)data))
2696 memory_modified = true;
2701 /* Return true when INSN possibly modify memory contents of MEM
2702 (i.e. address can be modified). */
2703 bool
2704 memory_modified_in_insn_p (rtx mem, rtx insn)
2706 if (!INSN_P (insn))
2707 return false;
2708 memory_modified = false;
2709 note_stores (PATTERN (insn), memory_modified_1, mem);
2710 return memory_modified;
2713 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2714 array. */
2716 void
2717 init_alias_analysis (void)
2719 unsigned int maxreg = max_reg_num ();
2720 int changed, pass;
2721 int i;
2722 unsigned int ui;
2723 rtx insn;
2725 timevar_push (TV_ALIAS_ANALYSIS);
2727 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2728 reg_known_value = ggc_calloc (reg_known_value_size, sizeof (rtx));
2729 reg_known_equiv_p = xcalloc (reg_known_value_size, sizeof (bool));
2731 /* Overallocate reg_base_value to allow some growth during loop
2732 optimization. Loop unrolling can create a large number of
2733 registers. */
2734 if (old_reg_base_value)
2736 reg_base_value = old_reg_base_value;
2737 /* If varray gets large zeroing cost may get important. */
2738 if (VARRAY_SIZE (reg_base_value) > 256
2739 && VARRAY_SIZE (reg_base_value) > 4 * maxreg)
2740 VARRAY_GROW (reg_base_value, maxreg);
2741 VARRAY_CLEAR (reg_base_value);
2742 if (VARRAY_SIZE (reg_base_value) < maxreg)
2743 VARRAY_GROW (reg_base_value, maxreg);
2745 else
2747 VARRAY_RTX_INIT (reg_base_value, maxreg, "reg_base_value");
2750 new_reg_base_value = xmalloc (maxreg * sizeof (rtx));
2751 reg_seen = xmalloc (maxreg);
2753 /* The basic idea is that each pass through this loop will use the
2754 "constant" information from the previous pass to propagate alias
2755 information through another level of assignments.
2757 This could get expensive if the assignment chains are long. Maybe
2758 we should throttle the number of iterations, possibly based on
2759 the optimization level or flag_expensive_optimizations.
2761 We could propagate more information in the first pass by making use
2762 of REG_N_SETS to determine immediately that the alias information
2763 for a pseudo is "constant".
2765 A program with an uninitialized variable can cause an infinite loop
2766 here. Instead of doing a full dataflow analysis to detect such problems
2767 we just cap the number of iterations for the loop.
2769 The state of the arrays for the set chain in question does not matter
2770 since the program has undefined behavior. */
2772 pass = 0;
2775 /* Assume nothing will change this iteration of the loop. */
2776 changed = 0;
2778 /* We want to assign the same IDs each iteration of this loop, so
2779 start counting from zero each iteration of the loop. */
2780 unique_id = 0;
2782 /* We're at the start of the function each iteration through the
2783 loop, so we're copying arguments. */
2784 copying_arguments = true;
2786 /* Wipe the potential alias information clean for this pass. */
2787 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2789 /* Wipe the reg_seen array clean. */
2790 memset (reg_seen, 0, maxreg);
2792 /* Mark all hard registers which may contain an address.
2793 The stack, frame and argument pointers may contain an address.
2794 An argument register which can hold a Pmode value may contain
2795 an address even if it is not in BASE_REGS.
2797 The address expression is VOIDmode for an argument and
2798 Pmode for other registers. */
2800 memcpy (new_reg_base_value, static_reg_base_value,
2801 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2803 /* Walk the insns adding values to the new_reg_base_value array. */
2804 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2806 if (INSN_P (insn))
2808 rtx note, set;
2810 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2811 /* The prologue/epilogue insns are not threaded onto the
2812 insn chain until after reload has completed. Thus,
2813 there is no sense wasting time checking if INSN is in
2814 the prologue/epilogue until after reload has completed. */
2815 if (reload_completed
2816 && prologue_epilogue_contains (insn))
2817 continue;
2818 #endif
2820 /* If this insn has a noalias note, process it, Otherwise,
2821 scan for sets. A simple set will have no side effects
2822 which could change the base value of any other register. */
2824 if (GET_CODE (PATTERN (insn)) == SET
2825 && REG_NOTES (insn) != 0
2826 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2827 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2828 else
2829 note_stores (PATTERN (insn), record_set, NULL);
2831 set = single_set (insn);
2833 if (set != 0
2834 && REG_P (SET_DEST (set))
2835 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2837 unsigned int regno = REGNO (SET_DEST (set));
2838 rtx src = SET_SRC (set);
2839 rtx t;
2841 if (REG_NOTES (insn) != 0
2842 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2843 && REG_N_SETS (regno) == 1)
2844 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2845 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2846 && ! rtx_varies_p (XEXP (note, 0), 1)
2847 && ! reg_overlap_mentioned_p (SET_DEST (set),
2848 XEXP (note, 0)))
2850 set_reg_known_value (regno, XEXP (note, 0));
2851 set_reg_known_equiv_p (regno,
2852 REG_NOTE_KIND (note) == REG_EQUIV);
2854 else if (REG_N_SETS (regno) == 1
2855 && GET_CODE (src) == PLUS
2856 && REG_P (XEXP (src, 0))
2857 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2858 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2860 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2861 set_reg_known_value (regno, t);
2862 set_reg_known_equiv_p (regno, 0);
2864 else if (REG_N_SETS (regno) == 1
2865 && ! rtx_varies_p (src, 1))
2867 set_reg_known_value (regno, src);
2868 set_reg_known_equiv_p (regno, 0);
2872 else if (NOTE_P (insn)
2873 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2874 copying_arguments = false;
2877 /* Now propagate values from new_reg_base_value to reg_base_value. */
2878 gcc_assert (maxreg == (unsigned int) max_reg_num());
2880 for (ui = 0; ui < maxreg; ui++)
2882 if (new_reg_base_value[ui]
2883 && new_reg_base_value[ui] != VARRAY_RTX (reg_base_value, ui)
2884 && ! rtx_equal_p (new_reg_base_value[ui],
2885 VARRAY_RTX (reg_base_value, ui)))
2887 VARRAY_RTX (reg_base_value, ui) = new_reg_base_value[ui];
2888 changed = 1;
2892 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2894 /* Fill in the remaining entries. */
2895 for (i = 0; i < (int)reg_known_value_size; i++)
2896 if (reg_known_value[i] == 0)
2897 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2899 /* Simplify the reg_base_value array so that no register refers to
2900 another register, except to special registers indirectly through
2901 ADDRESS expressions.
2903 In theory this loop can take as long as O(registers^2), but unless
2904 there are very long dependency chains it will run in close to linear
2905 time.
2907 This loop may not be needed any longer now that the main loop does
2908 a better job at propagating alias information. */
2909 pass = 0;
2912 changed = 0;
2913 pass++;
2914 for (ui = 0; ui < maxreg; ui++)
2916 rtx base = VARRAY_RTX (reg_base_value, ui);
2917 if (base && REG_P (base))
2919 unsigned int base_regno = REGNO (base);
2920 if (base_regno == ui) /* register set from itself */
2921 VARRAY_RTX (reg_base_value, ui) = 0;
2922 else
2923 VARRAY_RTX (reg_base_value, ui)
2924 = VARRAY_RTX (reg_base_value, base_regno);
2925 changed = 1;
2929 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2931 /* Clean up. */
2932 free (new_reg_base_value);
2933 new_reg_base_value = 0;
2934 free (reg_seen);
2935 reg_seen = 0;
2936 timevar_pop (TV_ALIAS_ANALYSIS);
2939 void
2940 end_alias_analysis (void)
2942 old_reg_base_value = reg_base_value;
2943 ggc_free (reg_known_value);
2944 reg_known_value = 0;
2945 reg_known_value_size = 0;
2946 free (reg_known_equiv_p);
2947 reg_known_equiv_p = 0;
2948 if (alias_invariant)
2950 ggc_free (alias_invariant);
2951 alias_invariant = 0;
2952 alias_invariant_size = 0;
2956 #include "gt-alias.h"