* final.c (output_asm_insn): Correct problem with -fverbose-asm.
[official-gcc.git] / gcc / alias.c
blob02ba2603baae04a796bcd2a8828cc15167bc822c
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004
3 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "function.h"
31 #include "alias.h"
32 #include "emit-rtl.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "flags.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "cselib.h"
40 #include "splay-tree.h"
41 #include "ggc.h"
42 #include "langhooks.h"
43 #include "timevar.h"
44 #include "target.h"
45 #include "cgraph.h"
46 #include "varray.h"
48 /* The alias sets assigned to MEMs assist the back-end in determining
49 which MEMs can alias which other MEMs. In general, two MEMs in
50 different alias sets cannot alias each other, with one important
51 exception. Consider something like:
53 struct S { int i; double d; };
55 a store to an `S' can alias something of either type `int' or type
56 `double'. (However, a store to an `int' cannot alias a `double'
57 and vice versa.) We indicate this via a tree structure that looks
58 like:
59 struct S
60 / \
61 / \
62 |/_ _\|
63 int double
65 (The arrows are directed and point downwards.)
66 In this situation we say the alias set for `struct S' is the
67 `superset' and that those for `int' and `double' are `subsets'.
69 To see whether two alias sets can point to the same memory, we must
70 see if either alias set is a subset of the other. We need not trace
71 past immediate descendants, however, since we propagate all
72 grandchildren up one level.
74 Alias set zero is implicitly a superset of all other alias sets.
75 However, this is no actual entry for alias set zero. It is an
76 error to attempt to explicitly construct a subset of zero. */
78 struct alias_set_entry GTY(())
80 /* The alias set number, as stored in MEM_ALIAS_SET. */
81 HOST_WIDE_INT alias_set;
83 /* The children of the alias set. These are not just the immediate
84 children, but, in fact, all descendants. So, if we have:
86 struct T { struct S s; float f; }
88 continuing our example above, the children here will be all of
89 `int', `double', `float', and `struct S'. */
90 splay_tree GTY((param1_is (int), param2_is (int))) children;
92 /* Nonzero if would have a child of zero: this effectively makes this
93 alias set the same as alias set zero. */
94 int has_zero_child;
96 typedef struct alias_set_entry *alias_set_entry;
98 static int rtx_equal_for_memref_p (rtx, rtx);
99 static rtx find_symbolic_term (rtx);
100 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
101 static void record_set (rtx, rtx, void *);
102 static int base_alias_check (rtx, rtx, enum machine_mode,
103 enum machine_mode);
104 static rtx find_base_value (rtx);
105 static int mems_in_disjoint_alias_sets_p (rtx, rtx);
106 static int insert_subset_children (splay_tree_node, void*);
107 static tree find_base_decl (tree);
108 static alias_set_entry get_alias_set_entry (HOST_WIDE_INT);
109 static rtx fixed_scalar_and_varying_struct_p (rtx, rtx, rtx, rtx,
110 int (*) (rtx, int));
111 static int aliases_everything_p (rtx);
112 static bool nonoverlapping_component_refs_p (tree, tree);
113 static tree decl_for_component_ref (tree);
114 static rtx adjust_offset_for_component_ref (tree, rtx);
115 static int nonoverlapping_memrefs_p (rtx, rtx);
116 static int write_dependence_p (rtx, rtx, int);
118 static int nonlocal_mentioned_p_1 (rtx *, void *);
119 static int nonlocal_mentioned_p (rtx);
120 static int nonlocal_referenced_p_1 (rtx *, void *);
121 static int nonlocal_referenced_p (rtx);
122 static int nonlocal_set_p_1 (rtx *, void *);
123 static int nonlocal_set_p (rtx);
124 static void memory_modified_1 (rtx, rtx, void *);
126 /* Set up all info needed to perform alias analysis on memory references. */
128 /* Returns the size in bytes of the mode of X. */
129 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
131 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
132 different alias sets. We ignore alias sets in functions making use
133 of variable arguments because the va_arg macros on some systems are
134 not legal ANSI C. */
135 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
136 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
138 /* Cap the number of passes we make over the insns propagating alias
139 information through set chains. 10 is a completely arbitrary choice. */
140 #define MAX_ALIAS_LOOP_PASSES 10
142 /* reg_base_value[N] gives an address to which register N is related.
143 If all sets after the first add or subtract to the current value
144 or otherwise modify it so it does not point to a different top level
145 object, reg_base_value[N] is equal to the address part of the source
146 of the first set.
148 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
149 expressions represent certain special values: function arguments and
150 the stack, frame, and argument pointers.
152 The contents of an ADDRESS is not normally used, the mode of the
153 ADDRESS determines whether the ADDRESS is a function argument or some
154 other special value. Pointer equality, not rtx_equal_p, determines whether
155 two ADDRESS expressions refer to the same base address.
157 The only use of the contents of an ADDRESS is for determining if the
158 current function performs nonlocal memory memory references for the
159 purposes of marking the function as a constant function. */
161 static GTY(()) varray_type reg_base_value;
162 static rtx *new_reg_base_value;
164 /* We preserve the copy of old array around to avoid amount of garbage
165 produced. About 8% of garbage produced were attributed to this
166 array. */
167 static GTY((deletable)) varray_type old_reg_base_value;
169 /* Static hunks of RTL used by the aliasing code; these are initialized
170 once per function to avoid unnecessary RTL allocations. */
171 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
173 #define REG_BASE_VALUE(X) \
174 (reg_base_value && REGNO (X) < VARRAY_SIZE (reg_base_value) \
175 ? VARRAY_RTX (reg_base_value, REGNO (X)) : 0)
177 /* Vector of known invariant relationships between registers. Set in
178 loop unrolling. Indexed by register number, if nonzero the value
179 is an expression describing this register in terms of another.
181 The length of this array is REG_BASE_VALUE_SIZE.
183 Because this array contains only pseudo registers it has no effect
184 after reload. */
185 static GTY((length("alias_invariant_size"))) rtx *alias_invariant;
186 static GTY(()) unsigned int alias_invariant_size;
188 /* Vector indexed by N giving the initial (unchanging) value known for
189 pseudo-register N. This array is initialized in init_alias_analysis,
190 and does not change until end_alias_analysis is called. */
191 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
193 /* Indicates number of valid entries in reg_known_value. */
194 static GTY(()) unsigned int reg_known_value_size;
196 /* Vector recording for each reg_known_value whether it is due to a
197 REG_EQUIV note. Future passes (viz., reload) may replace the
198 pseudo with the equivalent expression and so we account for the
199 dependences that would be introduced if that happens.
201 The REG_EQUIV notes created in assign_parms may mention the arg
202 pointer, and there are explicit insns in the RTL that modify the
203 arg pointer. Thus we must ensure that such insns don't get
204 scheduled across each other because that would invalidate the
205 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
206 wrong, but solving the problem in the scheduler will likely give
207 better code, so we do it here. */
208 static bool *reg_known_equiv_p;
210 /* True when scanning insns from the start of the rtl to the
211 NOTE_INSN_FUNCTION_BEG note. */
212 static bool copying_arguments;
214 /* The splay-tree used to store the various alias set entries. */
215 static GTY ((param_is (struct alias_set_entry))) varray_type alias_sets;
217 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
218 such an entry, or NULL otherwise. */
220 static inline alias_set_entry
221 get_alias_set_entry (HOST_WIDE_INT alias_set)
223 return (alias_set_entry)VARRAY_GENERIC_PTR (alias_sets, alias_set);
226 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
227 the two MEMs cannot alias each other. */
229 static inline int
230 mems_in_disjoint_alias_sets_p (rtx mem1, rtx mem2)
232 /* Perform a basic sanity check. Namely, that there are no alias sets
233 if we're not using strict aliasing. This helps to catch bugs
234 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
235 where a MEM is allocated in some way other than by the use of
236 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
237 use alias sets to indicate that spilled registers cannot alias each
238 other, we might need to remove this check. */
239 gcc_assert (flag_strict_aliasing
240 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
242 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
245 /* Insert the NODE into the splay tree given by DATA. Used by
246 record_alias_subset via splay_tree_foreach. */
248 static int
249 insert_subset_children (splay_tree_node node, void *data)
251 splay_tree_insert ((splay_tree) data, node->key, node->value);
253 return 0;
256 /* Return 1 if the two specified alias sets may conflict. */
259 alias_sets_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2)
261 alias_set_entry ase;
263 /* If have no alias set information for one of the operands, we have
264 to assume it can alias anything. */
265 if (set1 == 0 || set2 == 0
266 /* If the two alias sets are the same, they may alias. */
267 || set1 == set2)
268 return 1;
270 /* See if the first alias set is a subset of the second. */
271 ase = get_alias_set_entry (set1);
272 if (ase != 0
273 && (ase->has_zero_child
274 || splay_tree_lookup (ase->children,
275 (splay_tree_key) set2)))
276 return 1;
278 /* Now do the same, but with the alias sets reversed. */
279 ase = get_alias_set_entry (set2);
280 if (ase != 0
281 && (ase->has_zero_child
282 || splay_tree_lookup (ase->children,
283 (splay_tree_key) set1)))
284 return 1;
286 /* The two alias sets are distinct and neither one is the
287 child of the other. Therefore, they cannot alias. */
288 return 0;
291 /* Return 1 if the two specified alias sets might conflict, or if any subtype
292 of these alias sets might conflict. */
295 alias_sets_might_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2)
297 if (set1 == 0 || set2 == 0 || set1 == set2)
298 return 1;
300 return 0;
304 /* Return 1 if any MEM object of type T1 will always conflict (using the
305 dependency routines in this file) with any MEM object of type T2.
306 This is used when allocating temporary storage. If T1 and/or T2 are
307 NULL_TREE, it means we know nothing about the storage. */
310 objects_must_conflict_p (tree t1, tree t2)
312 HOST_WIDE_INT set1, set2;
314 /* If neither has a type specified, we don't know if they'll conflict
315 because we may be using them to store objects of various types, for
316 example the argument and local variables areas of inlined functions. */
317 if (t1 == 0 && t2 == 0)
318 return 0;
320 /* If they are the same type, they must conflict. */
321 if (t1 == t2
322 /* Likewise if both are volatile. */
323 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
324 return 1;
326 set1 = t1 ? get_alias_set (t1) : 0;
327 set2 = t2 ? get_alias_set (t2) : 0;
329 /* Otherwise they conflict if they have no alias set or the same. We
330 can't simply use alias_sets_conflict_p here, because we must make
331 sure that every subtype of t1 will conflict with every subtype of
332 t2 for which a pair of subobjects of these respective subtypes
333 overlaps on the stack. */
334 return set1 == 0 || set2 == 0 || set1 == set2;
337 /* T is an expression with pointer type. Find the DECL on which this
338 expression is based. (For example, in `a[i]' this would be `a'.)
339 If there is no such DECL, or a unique decl cannot be determined,
340 NULL_TREE is returned. */
342 static tree
343 find_base_decl (tree t)
345 tree d0, d1, d2;
347 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
348 return 0;
350 /* If this is a declaration, return it. */
351 if (TREE_CODE_CLASS (TREE_CODE (t)) == 'd')
352 return t;
354 /* Handle general expressions. It would be nice to deal with
355 COMPONENT_REFs here. If we could tell that `a' and `b' were the
356 same, then `a->f' and `b->f' are also the same. */
357 switch (TREE_CODE_CLASS (TREE_CODE (t)))
359 case '1':
360 return find_base_decl (TREE_OPERAND (t, 0));
362 case '2':
363 /* Return 0 if found in neither or both are the same. */
364 d0 = find_base_decl (TREE_OPERAND (t, 0));
365 d1 = find_base_decl (TREE_OPERAND (t, 1));
366 if (d0 == d1)
367 return d0;
368 else if (d0 == 0)
369 return d1;
370 else if (d1 == 0)
371 return d0;
372 else
373 return 0;
375 case '3':
376 d0 = find_base_decl (TREE_OPERAND (t, 0));
377 d1 = find_base_decl (TREE_OPERAND (t, 1));
378 d2 = find_base_decl (TREE_OPERAND (t, 2));
380 /* Set any nonzero values from the last, then from the first. */
381 if (d1 == 0) d1 = d2;
382 if (d0 == 0) d0 = d1;
383 if (d1 == 0) d1 = d0;
384 if (d2 == 0) d2 = d1;
386 /* At this point all are nonzero or all are zero. If all three are the
387 same, return it. Otherwise, return zero. */
388 return (d0 == d1 && d1 == d2) ? d0 : 0;
390 default:
391 return 0;
395 /* Return 1 if all the nested component references handled by
396 get_inner_reference in T are such that we can address the object in T. */
399 can_address_p (tree t)
401 /* If we're at the end, it is vacuously addressable. */
402 if (! handled_component_p (t))
403 return 1;
405 /* Bitfields are never addressable. */
406 else if (TREE_CODE (t) == BIT_FIELD_REF)
407 return 0;
409 /* Fields are addressable unless they are marked as nonaddressable or
410 the containing type has alias set 0. */
411 else if (TREE_CODE (t) == COMPONENT_REF
412 && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
413 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
414 && can_address_p (TREE_OPERAND (t, 0)))
415 return 1;
417 /* Likewise for arrays. */
418 else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
419 && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
420 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
421 && can_address_p (TREE_OPERAND (t, 0)))
422 return 1;
424 return 0;
427 /* Return the alias set for T, which may be either a type or an
428 expression. Call language-specific routine for help, if needed. */
430 HOST_WIDE_INT
431 get_alias_set (tree t)
433 HOST_WIDE_INT set;
435 /* If we're not doing any alias analysis, just assume everything
436 aliases everything else. Also return 0 if this or its type is
437 an error. */
438 if (! flag_strict_aliasing || t == error_mark_node
439 || (! TYPE_P (t)
440 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
441 return 0;
443 /* We can be passed either an expression or a type. This and the
444 language-specific routine may make mutually-recursive calls to each other
445 to figure out what to do. At each juncture, we see if this is a tree
446 that the language may need to handle specially. First handle things that
447 aren't types. */
448 if (! TYPE_P (t))
450 tree inner = t;
452 /* Remove any nops, then give the language a chance to do
453 something with this tree before we look at it. */
454 STRIP_NOPS (t);
455 set = lang_hooks.get_alias_set (t);
456 if (set != -1)
457 return set;
459 /* First see if the actual object referenced is an INDIRECT_REF from a
460 restrict-qualified pointer or a "void *". */
461 while (handled_component_p (inner))
463 inner = TREE_OPERAND (inner, 0);
464 STRIP_NOPS (inner);
467 /* Check for accesses through restrict-qualified pointers. */
468 if (TREE_CODE (inner) == INDIRECT_REF)
470 tree decl = find_base_decl (TREE_OPERAND (inner, 0));
472 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
474 /* If we haven't computed the actual alias set, do it now. */
475 if (DECL_POINTER_ALIAS_SET (decl) == -2)
477 tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
479 /* No two restricted pointers can point at the same thing.
480 However, a restricted pointer can point at the same thing
481 as an unrestricted pointer, if that unrestricted pointer
482 is based on the restricted pointer. So, we make the
483 alias set for the restricted pointer a subset of the
484 alias set for the type pointed to by the type of the
485 decl. */
486 HOST_WIDE_INT pointed_to_alias_set
487 = get_alias_set (pointed_to_type);
489 if (pointed_to_alias_set == 0)
490 /* It's not legal to make a subset of alias set zero. */
491 DECL_POINTER_ALIAS_SET (decl) = 0;
492 else if (AGGREGATE_TYPE_P (pointed_to_type))
493 /* For an aggregate, we must treat the restricted
494 pointer the same as an ordinary pointer. If we
495 were to make the type pointed to by the
496 restricted pointer a subset of the pointed-to
497 type, then we would believe that other subsets
498 of the pointed-to type (such as fields of that
499 type) do not conflict with the type pointed to
500 by the restricted pointer. */
501 DECL_POINTER_ALIAS_SET (decl)
502 = pointed_to_alias_set;
503 else
505 DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
506 record_alias_subset (pointed_to_alias_set,
507 DECL_POINTER_ALIAS_SET (decl));
511 /* We use the alias set indicated in the declaration. */
512 return DECL_POINTER_ALIAS_SET (decl);
515 /* If we have an INDIRECT_REF via a void pointer, we don't
516 know anything about what that might alias. Likewise if the
517 pointer is marked that way. */
518 else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE
519 || (TYPE_REF_CAN_ALIAS_ALL
520 (TREE_TYPE (TREE_OPERAND (inner, 0)))))
521 return 0;
524 /* Otherwise, pick up the outermost object that we could have a pointer
525 to, processing conversions as above. */
526 while (handled_component_p (t) && ! can_address_p (t))
528 t = TREE_OPERAND (t, 0);
529 STRIP_NOPS (t);
532 /* If we've already determined the alias set for a decl, just return
533 it. This is necessary for C++ anonymous unions, whose component
534 variables don't look like union members (boo!). */
535 if (TREE_CODE (t) == VAR_DECL
536 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
537 return MEM_ALIAS_SET (DECL_RTL (t));
539 /* Now all we care about is the type. */
540 t = TREE_TYPE (t);
543 /* Variant qualifiers don't affect the alias set, so get the main
544 variant. If this is a type with a known alias set, return it. */
545 t = TYPE_MAIN_VARIANT (t);
546 if (TYPE_ALIAS_SET_KNOWN_P (t))
547 return TYPE_ALIAS_SET (t);
549 /* See if the language has special handling for this type. */
550 set = lang_hooks.get_alias_set (t);
551 if (set != -1)
552 return set;
554 /* There are no objects of FUNCTION_TYPE, so there's no point in
555 using up an alias set for them. (There are, of course, pointers
556 and references to functions, but that's different.) */
557 else if (TREE_CODE (t) == FUNCTION_TYPE)
558 set = 0;
560 /* Unless the language specifies otherwise, let vector types alias
561 their components. This avoids some nasty type punning issues in
562 normal usage. And indeed lets vectors be treated more like an
563 array slice. */
564 else if (TREE_CODE (t) == VECTOR_TYPE)
565 set = get_alias_set (TREE_TYPE (t));
567 else
568 /* Otherwise make a new alias set for this type. */
569 set = new_alias_set ();
571 TYPE_ALIAS_SET (t) = set;
573 /* If this is an aggregate type, we must record any component aliasing
574 information. */
575 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
576 record_component_aliases (t);
578 return set;
581 /* Return a brand-new alias set. */
583 static GTY(()) HOST_WIDE_INT last_alias_set;
585 HOST_WIDE_INT
586 new_alias_set (void)
588 if (flag_strict_aliasing)
590 if (!alias_sets)
591 VARRAY_GENERIC_PTR_INIT (alias_sets, 10, "alias sets");
592 else
593 VARRAY_GROW (alias_sets, last_alias_set + 2);
594 return ++last_alias_set;
596 else
597 return 0;
600 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
601 not everything that aliases SUPERSET also aliases SUBSET. For example,
602 in C, a store to an `int' can alias a load of a structure containing an
603 `int', and vice versa. But it can't alias a load of a 'double' member
604 of the same structure. Here, the structure would be the SUPERSET and
605 `int' the SUBSET. This relationship is also described in the comment at
606 the beginning of this file.
608 This function should be called only once per SUPERSET/SUBSET pair.
610 It is illegal for SUPERSET to be zero; everything is implicitly a
611 subset of alias set zero. */
613 void
614 record_alias_subset (HOST_WIDE_INT superset, HOST_WIDE_INT subset)
616 alias_set_entry superset_entry;
617 alias_set_entry subset_entry;
619 /* It is possible in complex type situations for both sets to be the same,
620 in which case we can ignore this operation. */
621 if (superset == subset)
622 return;
624 gcc_assert (superset);
626 superset_entry = get_alias_set_entry (superset);
627 if (superset_entry == 0)
629 /* Create an entry for the SUPERSET, so that we have a place to
630 attach the SUBSET. */
631 superset_entry = ggc_alloc (sizeof (struct alias_set_entry));
632 superset_entry->alias_set = superset;
633 superset_entry->children
634 = splay_tree_new_ggc (splay_tree_compare_ints);
635 superset_entry->has_zero_child = 0;
636 VARRAY_GENERIC_PTR (alias_sets, superset) = superset_entry;
639 if (subset == 0)
640 superset_entry->has_zero_child = 1;
641 else
643 subset_entry = get_alias_set_entry (subset);
644 /* If there is an entry for the subset, enter all of its children
645 (if they are not already present) as children of the SUPERSET. */
646 if (subset_entry)
648 if (subset_entry->has_zero_child)
649 superset_entry->has_zero_child = 1;
651 splay_tree_foreach (subset_entry->children, insert_subset_children,
652 superset_entry->children);
655 /* Enter the SUBSET itself as a child of the SUPERSET. */
656 splay_tree_insert (superset_entry->children,
657 (splay_tree_key) subset, 0);
661 /* Record that component types of TYPE, if any, are part of that type for
662 aliasing purposes. For record types, we only record component types
663 for fields that are marked addressable. For array types, we always
664 record the component types, so the front end should not call this
665 function if the individual component aren't addressable. */
667 void
668 record_component_aliases (tree type)
670 HOST_WIDE_INT superset = get_alias_set (type);
671 tree field;
673 if (superset == 0)
674 return;
676 switch (TREE_CODE (type))
678 case ARRAY_TYPE:
679 if (! TYPE_NONALIASED_COMPONENT (type))
680 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
681 break;
683 case RECORD_TYPE:
684 case UNION_TYPE:
685 case QUAL_UNION_TYPE:
686 /* Recursively record aliases for the base classes, if there are any. */
687 if (TYPE_BINFO (type))
689 int i;
690 tree binfo, base_binfo;
692 for (binfo = TYPE_BINFO (type), i = 0;
693 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
694 record_alias_subset (superset,
695 get_alias_set (BINFO_TYPE (base_binfo)));
697 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
698 if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
699 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
700 break;
702 case COMPLEX_TYPE:
703 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
704 break;
706 default:
707 break;
711 /* Allocate an alias set for use in storing and reading from the varargs
712 spill area. */
714 static GTY(()) HOST_WIDE_INT varargs_set = -1;
716 HOST_WIDE_INT
717 get_varargs_alias_set (void)
719 #if 1
720 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
721 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
722 consistently use the varargs alias set for loads from the varargs
723 area. So don't use it anywhere. */
724 return 0;
725 #else
726 if (varargs_set == -1)
727 varargs_set = new_alias_set ();
729 return varargs_set;
730 #endif
733 /* Likewise, but used for the fixed portions of the frame, e.g., register
734 save areas. */
736 static GTY(()) HOST_WIDE_INT frame_set = -1;
738 HOST_WIDE_INT
739 get_frame_alias_set (void)
741 if (frame_set == -1)
742 frame_set = new_alias_set ();
744 return frame_set;
747 /* Inside SRC, the source of a SET, find a base address. */
749 static rtx
750 find_base_value (rtx src)
752 unsigned int regno;
754 switch (GET_CODE (src))
756 case SYMBOL_REF:
757 case LABEL_REF:
758 return src;
760 case REG:
761 regno = REGNO (src);
762 /* At the start of a function, argument registers have known base
763 values which may be lost later. Returning an ADDRESS
764 expression here allows optimization based on argument values
765 even when the argument registers are used for other purposes. */
766 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
767 return new_reg_base_value[regno];
769 /* If a pseudo has a known base value, return it. Do not do this
770 for non-fixed hard regs since it can result in a circular
771 dependency chain for registers which have values at function entry.
773 The test above is not sufficient because the scheduler may move
774 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
775 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
776 && regno < VARRAY_SIZE (reg_base_value))
778 /* If we're inside init_alias_analysis, use new_reg_base_value
779 to reduce the number of relaxation iterations. */
780 if (new_reg_base_value && new_reg_base_value[regno]
781 && REG_N_SETS (regno) == 1)
782 return new_reg_base_value[regno];
784 if (VARRAY_RTX (reg_base_value, regno))
785 return VARRAY_RTX (reg_base_value, regno);
788 return 0;
790 case MEM:
791 /* Check for an argument passed in memory. Only record in the
792 copying-arguments block; it is too hard to track changes
793 otherwise. */
794 if (copying_arguments
795 && (XEXP (src, 0) == arg_pointer_rtx
796 || (GET_CODE (XEXP (src, 0)) == PLUS
797 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
798 return gen_rtx_ADDRESS (VOIDmode, src);
799 return 0;
801 case CONST:
802 src = XEXP (src, 0);
803 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
804 break;
806 /* ... fall through ... */
808 case PLUS:
809 case MINUS:
811 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
813 /* If either operand is a REG that is a known pointer, then it
814 is the base. */
815 if (REG_P (src_0) && REG_POINTER (src_0))
816 return find_base_value (src_0);
817 if (REG_P (src_1) && REG_POINTER (src_1))
818 return find_base_value (src_1);
820 /* If either operand is a REG, then see if we already have
821 a known value for it. */
822 if (REG_P (src_0))
824 temp = find_base_value (src_0);
825 if (temp != 0)
826 src_0 = temp;
829 if (REG_P (src_1))
831 temp = find_base_value (src_1);
832 if (temp!= 0)
833 src_1 = temp;
836 /* If either base is named object or a special address
837 (like an argument or stack reference), then use it for the
838 base term. */
839 if (src_0 != 0
840 && (GET_CODE (src_0) == SYMBOL_REF
841 || GET_CODE (src_0) == LABEL_REF
842 || (GET_CODE (src_0) == ADDRESS
843 && GET_MODE (src_0) != VOIDmode)))
844 return src_0;
846 if (src_1 != 0
847 && (GET_CODE (src_1) == SYMBOL_REF
848 || GET_CODE (src_1) == LABEL_REF
849 || (GET_CODE (src_1) == ADDRESS
850 && GET_MODE (src_1) != VOIDmode)))
851 return src_1;
853 /* Guess which operand is the base address:
854 If either operand is a symbol, then it is the base. If
855 either operand is a CONST_INT, then the other is the base. */
856 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
857 return find_base_value (src_0);
858 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
859 return find_base_value (src_1);
861 return 0;
864 case LO_SUM:
865 /* The standard form is (lo_sum reg sym) so look only at the
866 second operand. */
867 return find_base_value (XEXP (src, 1));
869 case AND:
870 /* If the second operand is constant set the base
871 address to the first operand. */
872 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
873 return find_base_value (XEXP (src, 0));
874 return 0;
876 case TRUNCATE:
877 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
878 break;
879 /* Fall through. */
880 case HIGH:
881 case PRE_INC:
882 case PRE_DEC:
883 case POST_INC:
884 case POST_DEC:
885 case PRE_MODIFY:
886 case POST_MODIFY:
887 return find_base_value (XEXP (src, 0));
889 case ZERO_EXTEND:
890 case SIGN_EXTEND: /* used for NT/Alpha pointers */
892 rtx temp = find_base_value (XEXP (src, 0));
894 if (temp != 0 && CONSTANT_P (temp))
895 temp = convert_memory_address (Pmode, temp);
897 return temp;
900 default:
901 break;
904 return 0;
907 /* Called from init_alias_analysis indirectly through note_stores. */
909 /* While scanning insns to find base values, reg_seen[N] is nonzero if
910 register N has been set in this function. */
911 static char *reg_seen;
913 /* Addresses which are known not to alias anything else are identified
914 by a unique integer. */
915 static int unique_id;
917 static void
918 record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
920 unsigned regno;
921 rtx src;
922 int n;
924 if (!REG_P (dest))
925 return;
927 regno = REGNO (dest);
929 gcc_assert (regno < VARRAY_SIZE (reg_base_value));
931 /* If this spans multiple hard registers, then we must indicate that every
932 register has an unusable value. */
933 if (regno < FIRST_PSEUDO_REGISTER)
934 n = hard_regno_nregs[regno][GET_MODE (dest)];
935 else
936 n = 1;
937 if (n != 1)
939 while (--n >= 0)
941 reg_seen[regno + n] = 1;
942 new_reg_base_value[regno + n] = 0;
944 return;
947 if (set)
949 /* A CLOBBER wipes out any old value but does not prevent a previously
950 unset register from acquiring a base address (i.e. reg_seen is not
951 set). */
952 if (GET_CODE (set) == CLOBBER)
954 new_reg_base_value[regno] = 0;
955 return;
957 src = SET_SRC (set);
959 else
961 if (reg_seen[regno])
963 new_reg_base_value[regno] = 0;
964 return;
966 reg_seen[regno] = 1;
967 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
968 GEN_INT (unique_id++));
969 return;
972 /* If this is not the first set of REGNO, see whether the new value
973 is related to the old one. There are two cases of interest:
975 (1) The register might be assigned an entirely new value
976 that has the same base term as the original set.
978 (2) The set might be a simple self-modification that
979 cannot change REGNO's base value.
981 If neither case holds, reject the original base value as invalid.
982 Note that the following situation is not detected:
984 extern int x, y; int *p = &x; p += (&y-&x);
986 ANSI C does not allow computing the difference of addresses
987 of distinct top level objects. */
988 if (new_reg_base_value[regno] != 0
989 && find_base_value (src) != new_reg_base_value[regno])
990 switch (GET_CODE (src))
992 case LO_SUM:
993 case MINUS:
994 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
995 new_reg_base_value[regno] = 0;
996 break;
997 case PLUS:
998 /* If the value we add in the PLUS is also a valid base value,
999 this might be the actual base value, and the original value
1000 an index. */
1002 rtx other = NULL_RTX;
1004 if (XEXP (src, 0) == dest)
1005 other = XEXP (src, 1);
1006 else if (XEXP (src, 1) == dest)
1007 other = XEXP (src, 0);
1009 if (! other || find_base_value (other))
1010 new_reg_base_value[regno] = 0;
1011 break;
1013 case AND:
1014 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1015 new_reg_base_value[regno] = 0;
1016 break;
1017 default:
1018 new_reg_base_value[regno] = 0;
1019 break;
1021 /* If this is the first set of a register, record the value. */
1022 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1023 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1024 new_reg_base_value[regno] = find_base_value (src);
1026 reg_seen[regno] = 1;
1029 /* Called from loop optimization when a new pseudo-register is
1030 created. It indicates that REGNO is being set to VAL. f INVARIANT
1031 is true then this value also describes an invariant relationship
1032 which can be used to deduce that two registers with unknown values
1033 are different. */
1035 void
1036 record_base_value (unsigned int regno, rtx val, int invariant)
1038 if (invariant && alias_invariant && regno < alias_invariant_size)
1039 alias_invariant[regno] = val;
1041 if (regno >= VARRAY_SIZE (reg_base_value))
1042 VARRAY_GROW (reg_base_value, max_reg_num ());
1044 if (REG_P (val))
1046 VARRAY_RTX (reg_base_value, regno)
1047 = REG_BASE_VALUE (val);
1048 return;
1050 VARRAY_RTX (reg_base_value, regno)
1051 = find_base_value (val);
1054 /* Clear alias info for a register. This is used if an RTL transformation
1055 changes the value of a register. This is used in flow by AUTO_INC_DEC
1056 optimizations. We don't need to clear reg_base_value, since flow only
1057 changes the offset. */
1059 void
1060 clear_reg_alias_info (rtx reg)
1062 unsigned int regno = REGNO (reg);
1064 if (regno >= FIRST_PSEUDO_REGISTER)
1066 regno -= FIRST_PSEUDO_REGISTER;
1067 if (regno < reg_known_value_size)
1069 reg_known_value[regno] = reg;
1070 reg_known_equiv_p[regno] = false;
1075 /* If a value is known for REGNO, return it. */
1077 rtx
1078 get_reg_known_value (unsigned int regno)
1080 if (regno >= FIRST_PSEUDO_REGISTER)
1082 regno -= FIRST_PSEUDO_REGISTER;
1083 if (regno < reg_known_value_size)
1084 return reg_known_value[regno];
1086 return NULL;
1089 /* Set it. */
1091 static void
1092 set_reg_known_value (unsigned int regno, rtx val)
1094 if (regno >= FIRST_PSEUDO_REGISTER)
1096 regno -= FIRST_PSEUDO_REGISTER;
1097 if (regno < reg_known_value_size)
1098 reg_known_value[regno] = val;
1102 /* Similarly for reg_known_equiv_p. */
1104 bool
1105 get_reg_known_equiv_p (unsigned int regno)
1107 if (regno >= FIRST_PSEUDO_REGISTER)
1109 regno -= FIRST_PSEUDO_REGISTER;
1110 if (regno < reg_known_value_size)
1111 return reg_known_equiv_p[regno];
1113 return false;
1116 static void
1117 set_reg_known_equiv_p (unsigned int regno, bool val)
1119 if (regno >= FIRST_PSEUDO_REGISTER)
1121 regno -= FIRST_PSEUDO_REGISTER;
1122 if (regno < reg_known_value_size)
1123 reg_known_equiv_p[regno] = val;
1128 /* Returns a canonical version of X, from the point of view alias
1129 analysis. (For example, if X is a MEM whose address is a register,
1130 and the register has a known value (say a SYMBOL_REF), then a MEM
1131 whose address is the SYMBOL_REF is returned.) */
1134 canon_rtx (rtx x)
1136 /* Recursively look for equivalences. */
1137 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1139 rtx t = get_reg_known_value (REGNO (x));
1140 if (t == x)
1141 return x;
1142 if (t)
1143 return canon_rtx (t);
1146 if (GET_CODE (x) == PLUS)
1148 rtx x0 = canon_rtx (XEXP (x, 0));
1149 rtx x1 = canon_rtx (XEXP (x, 1));
1151 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1153 if (GET_CODE (x0) == CONST_INT)
1154 return plus_constant (x1, INTVAL (x0));
1155 else if (GET_CODE (x1) == CONST_INT)
1156 return plus_constant (x0, INTVAL (x1));
1157 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1161 /* This gives us much better alias analysis when called from
1162 the loop optimizer. Note we want to leave the original
1163 MEM alone, but need to return the canonicalized MEM with
1164 all the flags with their original values. */
1165 else if (MEM_P (x))
1166 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1168 return x;
1171 /* Return 1 if X and Y are identical-looking rtx's.
1172 Expect that X and Y has been already canonicalized.
1174 We use the data in reg_known_value above to see if two registers with
1175 different numbers are, in fact, equivalent. */
1177 static int
1178 rtx_equal_for_memref_p (rtx x, rtx y)
1180 int i;
1181 int j;
1182 enum rtx_code code;
1183 const char *fmt;
1185 if (x == 0 && y == 0)
1186 return 1;
1187 if (x == 0 || y == 0)
1188 return 0;
1190 if (x == y)
1191 return 1;
1193 code = GET_CODE (x);
1194 /* Rtx's of different codes cannot be equal. */
1195 if (code != GET_CODE (y))
1196 return 0;
1198 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1199 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1201 if (GET_MODE (x) != GET_MODE (y))
1202 return 0;
1204 /* Some RTL can be compared without a recursive examination. */
1205 switch (code)
1207 case REG:
1208 return REGNO (x) == REGNO (y);
1210 case LABEL_REF:
1211 return XEXP (x, 0) == XEXP (y, 0);
1213 case SYMBOL_REF:
1214 return XSTR (x, 0) == XSTR (y, 0);
1216 case VALUE:
1217 case CONST_INT:
1218 case CONST_DOUBLE:
1219 /* There's no need to compare the contents of CONST_DOUBLEs or
1220 CONST_INTs because pointer equality is a good enough
1221 comparison for these nodes. */
1222 return 0;
1224 default:
1225 break;
1228 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1229 if (code == PLUS)
1230 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1231 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1232 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1233 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1234 /* For commutative operations, the RTX match if the operand match in any
1235 order. Also handle the simple binary and unary cases without a loop. */
1236 if (COMMUTATIVE_P (x))
1238 rtx xop0 = canon_rtx (XEXP (x, 0));
1239 rtx yop0 = canon_rtx (XEXP (y, 0));
1240 rtx yop1 = canon_rtx (XEXP (y, 1));
1242 return ((rtx_equal_for_memref_p (xop0, yop0)
1243 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1244 || (rtx_equal_for_memref_p (xop0, yop1)
1245 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1247 else if (NON_COMMUTATIVE_P (x))
1249 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1250 canon_rtx (XEXP (y, 0)))
1251 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1252 canon_rtx (XEXP (y, 1))));
1254 else if (UNARY_P (x))
1255 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1256 canon_rtx (XEXP (y, 0)));
1258 /* Compare the elements. If any pair of corresponding elements
1259 fail to match, return 0 for the whole things.
1261 Limit cases to types which actually appear in addresses. */
1263 fmt = GET_RTX_FORMAT (code);
1264 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1266 switch (fmt[i])
1268 case 'i':
1269 if (XINT (x, i) != XINT (y, i))
1270 return 0;
1271 break;
1273 case 'E':
1274 /* Two vectors must have the same length. */
1275 if (XVECLEN (x, i) != XVECLEN (y, i))
1276 return 0;
1278 /* And the corresponding elements must match. */
1279 for (j = 0; j < XVECLEN (x, i); j++)
1280 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1281 canon_rtx (XVECEXP (y, i, j))) == 0)
1282 return 0;
1283 break;
1285 case 'e':
1286 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1287 canon_rtx (XEXP (y, i))) == 0)
1288 return 0;
1289 break;
1291 /* This can happen for asm operands. */
1292 case 's':
1293 if (strcmp (XSTR (x, i), XSTR (y, i)))
1294 return 0;
1295 break;
1297 /* This can happen for an asm which clobbers memory. */
1298 case '0':
1299 break;
1301 /* It is believed that rtx's at this level will never
1302 contain anything but integers and other rtx's,
1303 except for within LABEL_REFs and SYMBOL_REFs. */
1304 default:
1305 gcc_unreachable ();
1308 return 1;
1311 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1312 X and return it, or return 0 if none found. */
1314 static rtx
1315 find_symbolic_term (rtx x)
1317 int i;
1318 enum rtx_code code;
1319 const char *fmt;
1321 code = GET_CODE (x);
1322 if (code == SYMBOL_REF || code == LABEL_REF)
1323 return x;
1324 if (OBJECT_P (x))
1325 return 0;
1327 fmt = GET_RTX_FORMAT (code);
1328 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1330 rtx t;
1332 if (fmt[i] == 'e')
1334 t = find_symbolic_term (XEXP (x, i));
1335 if (t != 0)
1336 return t;
1338 else if (fmt[i] == 'E')
1339 break;
1341 return 0;
1345 find_base_term (rtx x)
1347 cselib_val *val;
1348 struct elt_loc_list *l;
1350 #if defined (FIND_BASE_TERM)
1351 /* Try machine-dependent ways to find the base term. */
1352 x = FIND_BASE_TERM (x);
1353 #endif
1355 switch (GET_CODE (x))
1357 case REG:
1358 return REG_BASE_VALUE (x);
1360 case TRUNCATE:
1361 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1362 return 0;
1363 /* Fall through. */
1364 case HIGH:
1365 case PRE_INC:
1366 case PRE_DEC:
1367 case POST_INC:
1368 case POST_DEC:
1369 case PRE_MODIFY:
1370 case POST_MODIFY:
1371 return find_base_term (XEXP (x, 0));
1373 case ZERO_EXTEND:
1374 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1376 rtx temp = find_base_term (XEXP (x, 0));
1378 if (temp != 0 && CONSTANT_P (temp))
1379 temp = convert_memory_address (Pmode, temp);
1381 return temp;
1384 case VALUE:
1385 val = CSELIB_VAL_PTR (x);
1386 if (!val)
1387 return 0;
1388 for (l = val->locs; l; l = l->next)
1389 if ((x = find_base_term (l->loc)) != 0)
1390 return x;
1391 return 0;
1393 case CONST:
1394 x = XEXP (x, 0);
1395 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1396 return 0;
1397 /* Fall through. */
1398 case LO_SUM:
1399 case PLUS:
1400 case MINUS:
1402 rtx tmp1 = XEXP (x, 0);
1403 rtx tmp2 = XEXP (x, 1);
1405 /* This is a little bit tricky since we have to determine which of
1406 the two operands represents the real base address. Otherwise this
1407 routine may return the index register instead of the base register.
1409 That may cause us to believe no aliasing was possible, when in
1410 fact aliasing is possible.
1412 We use a few simple tests to guess the base register. Additional
1413 tests can certainly be added. For example, if one of the operands
1414 is a shift or multiply, then it must be the index register and the
1415 other operand is the base register. */
1417 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1418 return find_base_term (tmp2);
1420 /* If either operand is known to be a pointer, then use it
1421 to determine the base term. */
1422 if (REG_P (tmp1) && REG_POINTER (tmp1))
1423 return find_base_term (tmp1);
1425 if (REG_P (tmp2) && REG_POINTER (tmp2))
1426 return find_base_term (tmp2);
1428 /* Neither operand was known to be a pointer. Go ahead and find the
1429 base term for both operands. */
1430 tmp1 = find_base_term (tmp1);
1431 tmp2 = find_base_term (tmp2);
1433 /* If either base term is named object or a special address
1434 (like an argument or stack reference), then use it for the
1435 base term. */
1436 if (tmp1 != 0
1437 && (GET_CODE (tmp1) == SYMBOL_REF
1438 || GET_CODE (tmp1) == LABEL_REF
1439 || (GET_CODE (tmp1) == ADDRESS
1440 && GET_MODE (tmp1) != VOIDmode)))
1441 return tmp1;
1443 if (tmp2 != 0
1444 && (GET_CODE (tmp2) == SYMBOL_REF
1445 || GET_CODE (tmp2) == LABEL_REF
1446 || (GET_CODE (tmp2) == ADDRESS
1447 && GET_MODE (tmp2) != VOIDmode)))
1448 return tmp2;
1450 /* We could not determine which of the two operands was the
1451 base register and which was the index. So we can determine
1452 nothing from the base alias check. */
1453 return 0;
1456 case AND:
1457 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1458 return find_base_term (XEXP (x, 0));
1459 return 0;
1461 case SYMBOL_REF:
1462 case LABEL_REF:
1463 return x;
1465 default:
1466 return 0;
1470 /* Return 0 if the addresses X and Y are known to point to different
1471 objects, 1 if they might be pointers to the same object. */
1473 static int
1474 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1475 enum machine_mode y_mode)
1477 rtx x_base = find_base_term (x);
1478 rtx y_base = find_base_term (y);
1480 /* If the address itself has no known base see if a known equivalent
1481 value has one. If either address still has no known base, nothing
1482 is known about aliasing. */
1483 if (x_base == 0)
1485 rtx x_c;
1487 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1488 return 1;
1490 x_base = find_base_term (x_c);
1491 if (x_base == 0)
1492 return 1;
1495 if (y_base == 0)
1497 rtx y_c;
1498 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1499 return 1;
1501 y_base = find_base_term (y_c);
1502 if (y_base == 0)
1503 return 1;
1506 /* If the base addresses are equal nothing is known about aliasing. */
1507 if (rtx_equal_p (x_base, y_base))
1508 return 1;
1510 /* The base addresses of the read and write are different expressions.
1511 If they are both symbols and they are not accessed via AND, there is
1512 no conflict. We can bring knowledge of object alignment into play
1513 here. For example, on alpha, "char a, b;" can alias one another,
1514 though "char a; long b;" cannot. */
1515 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1517 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1518 return 1;
1519 if (GET_CODE (x) == AND
1520 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1521 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1522 return 1;
1523 if (GET_CODE (y) == AND
1524 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1525 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1526 return 1;
1527 /* Differing symbols never alias. */
1528 return 0;
1531 /* If one address is a stack reference there can be no alias:
1532 stack references using different base registers do not alias,
1533 a stack reference can not alias a parameter, and a stack reference
1534 can not alias a global. */
1535 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1536 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1537 return 0;
1539 if (! flag_argument_noalias)
1540 return 1;
1542 if (flag_argument_noalias > 1)
1543 return 0;
1545 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1546 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1549 /* Convert the address X into something we can use. This is done by returning
1550 it unchanged unless it is a value; in the latter case we call cselib to get
1551 a more useful rtx. */
1554 get_addr (rtx x)
1556 cselib_val *v;
1557 struct elt_loc_list *l;
1559 if (GET_CODE (x) != VALUE)
1560 return x;
1561 v = CSELIB_VAL_PTR (x);
1562 if (v)
1564 for (l = v->locs; l; l = l->next)
1565 if (CONSTANT_P (l->loc))
1566 return l->loc;
1567 for (l = v->locs; l; l = l->next)
1568 if (!REG_P (l->loc) && !MEM_P (l->loc))
1569 return l->loc;
1570 if (v->locs)
1571 return v->locs->loc;
1573 return x;
1576 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1577 where SIZE is the size in bytes of the memory reference. If ADDR
1578 is not modified by the memory reference then ADDR is returned. */
1581 addr_side_effect_eval (rtx addr, int size, int n_refs)
1583 int offset = 0;
1585 switch (GET_CODE (addr))
1587 case PRE_INC:
1588 offset = (n_refs + 1) * size;
1589 break;
1590 case PRE_DEC:
1591 offset = -(n_refs + 1) * size;
1592 break;
1593 case POST_INC:
1594 offset = n_refs * size;
1595 break;
1596 case POST_DEC:
1597 offset = -n_refs * size;
1598 break;
1600 default:
1601 return addr;
1604 if (offset)
1605 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1606 GEN_INT (offset));
1607 else
1608 addr = XEXP (addr, 0);
1609 addr = canon_rtx (addr);
1611 return addr;
1614 /* Return nonzero if X and Y (memory addresses) could reference the
1615 same location in memory. C is an offset accumulator. When
1616 C is nonzero, we are testing aliases between X and Y + C.
1617 XSIZE is the size in bytes of the X reference,
1618 similarly YSIZE is the size in bytes for Y.
1619 Expect that canon_rtx has been already called for X and Y.
1621 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1622 referenced (the reference was BLKmode), so make the most pessimistic
1623 assumptions.
1625 If XSIZE or YSIZE is negative, we may access memory outside the object
1626 being referenced as a side effect. This can happen when using AND to
1627 align memory references, as is done on the Alpha.
1629 Nice to notice that varying addresses cannot conflict with fp if no
1630 local variables had their addresses taken, but that's too hard now. */
1632 static int
1633 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1635 if (GET_CODE (x) == VALUE)
1636 x = get_addr (x);
1637 if (GET_CODE (y) == VALUE)
1638 y = get_addr (y);
1639 if (GET_CODE (x) == HIGH)
1640 x = XEXP (x, 0);
1641 else if (GET_CODE (x) == LO_SUM)
1642 x = XEXP (x, 1);
1643 else
1644 x = addr_side_effect_eval (x, xsize, 0);
1645 if (GET_CODE (y) == HIGH)
1646 y = XEXP (y, 0);
1647 else if (GET_CODE (y) == LO_SUM)
1648 y = XEXP (y, 1);
1649 else
1650 y = addr_side_effect_eval (y, ysize, 0);
1652 if (rtx_equal_for_memref_p (x, y))
1654 if (xsize <= 0 || ysize <= 0)
1655 return 1;
1656 if (c >= 0 && xsize > c)
1657 return 1;
1658 if (c < 0 && ysize+c > 0)
1659 return 1;
1660 return 0;
1663 /* This code used to check for conflicts involving stack references and
1664 globals but the base address alias code now handles these cases. */
1666 if (GET_CODE (x) == PLUS)
1668 /* The fact that X is canonicalized means that this
1669 PLUS rtx is canonicalized. */
1670 rtx x0 = XEXP (x, 0);
1671 rtx x1 = XEXP (x, 1);
1673 if (GET_CODE (y) == PLUS)
1675 /* The fact that Y is canonicalized means that this
1676 PLUS rtx is canonicalized. */
1677 rtx y0 = XEXP (y, 0);
1678 rtx y1 = XEXP (y, 1);
1680 if (rtx_equal_for_memref_p (x1, y1))
1681 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1682 if (rtx_equal_for_memref_p (x0, y0))
1683 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1684 if (GET_CODE (x1) == CONST_INT)
1686 if (GET_CODE (y1) == CONST_INT)
1687 return memrefs_conflict_p (xsize, x0, ysize, y0,
1688 c - INTVAL (x1) + INTVAL (y1));
1689 else
1690 return memrefs_conflict_p (xsize, x0, ysize, y,
1691 c - INTVAL (x1));
1693 else if (GET_CODE (y1) == CONST_INT)
1694 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1696 return 1;
1698 else if (GET_CODE (x1) == CONST_INT)
1699 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1701 else if (GET_CODE (y) == PLUS)
1703 /* The fact that Y is canonicalized means that this
1704 PLUS rtx is canonicalized. */
1705 rtx y0 = XEXP (y, 0);
1706 rtx y1 = XEXP (y, 1);
1708 if (GET_CODE (y1) == CONST_INT)
1709 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1710 else
1711 return 1;
1714 if (GET_CODE (x) == GET_CODE (y))
1715 switch (GET_CODE (x))
1717 case MULT:
1719 /* Handle cases where we expect the second operands to be the
1720 same, and check only whether the first operand would conflict
1721 or not. */
1722 rtx x0, y0;
1723 rtx x1 = canon_rtx (XEXP (x, 1));
1724 rtx y1 = canon_rtx (XEXP (y, 1));
1725 if (! rtx_equal_for_memref_p (x1, y1))
1726 return 1;
1727 x0 = canon_rtx (XEXP (x, 0));
1728 y0 = canon_rtx (XEXP (y, 0));
1729 if (rtx_equal_for_memref_p (x0, y0))
1730 return (xsize == 0 || ysize == 0
1731 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1733 /* Can't properly adjust our sizes. */
1734 if (GET_CODE (x1) != CONST_INT)
1735 return 1;
1736 xsize /= INTVAL (x1);
1737 ysize /= INTVAL (x1);
1738 c /= INTVAL (x1);
1739 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1742 case REG:
1743 /* Are these registers known not to be equal? */
1744 if (alias_invariant)
1746 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1747 rtx i_x, i_y; /* invariant relationships of X and Y */
1749 i_x = r_x >= alias_invariant_size ? 0 : alias_invariant[r_x];
1750 i_y = r_y >= alias_invariant_size ? 0 : alias_invariant[r_y];
1752 if (i_x == 0 && i_y == 0)
1753 break;
1755 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1756 ysize, i_y ? i_y : y, c))
1757 return 0;
1759 break;
1761 default:
1762 break;
1765 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1766 as an access with indeterminate size. Assume that references
1767 besides AND are aligned, so if the size of the other reference is
1768 at least as large as the alignment, assume no other overlap. */
1769 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1771 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1772 xsize = -1;
1773 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1775 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1777 /* ??? If we are indexing far enough into the array/structure, we
1778 may yet be able to determine that we can not overlap. But we
1779 also need to that we are far enough from the end not to overlap
1780 a following reference, so we do nothing with that for now. */
1781 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1782 ysize = -1;
1783 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1786 if (CONSTANT_P (x))
1788 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1790 c += (INTVAL (y) - INTVAL (x));
1791 return (xsize <= 0 || ysize <= 0
1792 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1795 if (GET_CODE (x) == CONST)
1797 if (GET_CODE (y) == CONST)
1798 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1799 ysize, canon_rtx (XEXP (y, 0)), c);
1800 else
1801 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1802 ysize, y, c);
1804 if (GET_CODE (y) == CONST)
1805 return memrefs_conflict_p (xsize, x, ysize,
1806 canon_rtx (XEXP (y, 0)), c);
1808 if (CONSTANT_P (y))
1809 return (xsize <= 0 || ysize <= 0
1810 || (rtx_equal_for_memref_p (x, y)
1811 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1813 return 1;
1815 return 1;
1818 /* Functions to compute memory dependencies.
1820 Since we process the insns in execution order, we can build tables
1821 to keep track of what registers are fixed (and not aliased), what registers
1822 are varying in known ways, and what registers are varying in unknown
1823 ways.
1825 If both memory references are volatile, then there must always be a
1826 dependence between the two references, since their order can not be
1827 changed. A volatile and non-volatile reference can be interchanged
1828 though.
1830 A MEM_IN_STRUCT reference at a non-AND varying address can never
1831 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1832 also must allow AND addresses, because they may generate accesses
1833 outside the object being referenced. This is used to generate
1834 aligned addresses from unaligned addresses, for instance, the alpha
1835 storeqi_unaligned pattern. */
1837 /* Read dependence: X is read after read in MEM takes place. There can
1838 only be a dependence here if both reads are volatile. */
1841 read_dependence (rtx mem, rtx x)
1843 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1846 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1847 MEM2 is a reference to a structure at a varying address, or returns
1848 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1849 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1850 to decide whether or not an address may vary; it should return
1851 nonzero whenever variation is possible.
1852 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1854 static rtx
1855 fixed_scalar_and_varying_struct_p (rtx mem1, rtx mem2, rtx mem1_addr,
1856 rtx mem2_addr,
1857 int (*varies_p) (rtx, int))
1859 if (! flag_strict_aliasing)
1860 return NULL_RTX;
1862 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1863 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1864 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1865 varying address. */
1866 return mem1;
1868 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1869 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1870 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1871 varying address. */
1872 return mem2;
1874 return NULL_RTX;
1877 /* Returns nonzero if something about the mode or address format MEM1
1878 indicates that it might well alias *anything*. */
1880 static int
1881 aliases_everything_p (rtx mem)
1883 if (GET_CODE (XEXP (mem, 0)) == AND)
1884 /* If the address is an AND, its very hard to know at what it is
1885 actually pointing. */
1886 return 1;
1888 return 0;
1891 /* Return true if we can determine that the fields referenced cannot
1892 overlap for any pair of objects. */
1894 static bool
1895 nonoverlapping_component_refs_p (tree x, tree y)
1897 tree fieldx, fieldy, typex, typey, orig_y;
1901 /* The comparison has to be done at a common type, since we don't
1902 know how the inheritance hierarchy works. */
1903 orig_y = y;
1906 fieldx = TREE_OPERAND (x, 1);
1907 typex = DECL_FIELD_CONTEXT (fieldx);
1909 y = orig_y;
1912 fieldy = TREE_OPERAND (y, 1);
1913 typey = DECL_FIELD_CONTEXT (fieldy);
1915 if (typex == typey)
1916 goto found;
1918 y = TREE_OPERAND (y, 0);
1920 while (y && TREE_CODE (y) == COMPONENT_REF);
1922 x = TREE_OPERAND (x, 0);
1924 while (x && TREE_CODE (x) == COMPONENT_REF);
1926 /* Never found a common type. */
1927 return false;
1929 found:
1930 /* If we're left with accessing different fields of a structure,
1931 then no overlap. */
1932 if (TREE_CODE (typex) == RECORD_TYPE
1933 && fieldx != fieldy)
1934 return true;
1936 /* The comparison on the current field failed. If we're accessing
1937 a very nested structure, look at the next outer level. */
1938 x = TREE_OPERAND (x, 0);
1939 y = TREE_OPERAND (y, 0);
1941 while (x && y
1942 && TREE_CODE (x) == COMPONENT_REF
1943 && TREE_CODE (y) == COMPONENT_REF);
1945 return false;
1948 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
1950 static tree
1951 decl_for_component_ref (tree x)
1955 x = TREE_OPERAND (x, 0);
1957 while (x && TREE_CODE (x) == COMPONENT_REF);
1959 return x && DECL_P (x) ? x : NULL_TREE;
1962 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1963 offset of the field reference. */
1965 static rtx
1966 adjust_offset_for_component_ref (tree x, rtx offset)
1968 HOST_WIDE_INT ioffset;
1970 if (! offset)
1971 return NULL_RTX;
1973 ioffset = INTVAL (offset);
1976 tree offset = component_ref_field_offset (x);
1977 tree field = TREE_OPERAND (x, 1);
1979 if (! host_integerp (offset, 1))
1980 return NULL_RTX;
1981 ioffset += (tree_low_cst (offset, 1)
1982 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1983 / BITS_PER_UNIT));
1985 x = TREE_OPERAND (x, 0);
1987 while (x && TREE_CODE (x) == COMPONENT_REF);
1989 return GEN_INT (ioffset);
1992 /* Return nonzero if we can determine the exprs corresponding to memrefs
1993 X and Y and they do not overlap. */
1995 static int
1996 nonoverlapping_memrefs_p (rtx x, rtx y)
1998 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
1999 rtx rtlx, rtly;
2000 rtx basex, basey;
2001 rtx moffsetx, moffsety;
2002 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2004 /* Unless both have exprs, we can't tell anything. */
2005 if (exprx == 0 || expry == 0)
2006 return 0;
2008 /* If both are field references, we may be able to determine something. */
2009 if (TREE_CODE (exprx) == COMPONENT_REF
2010 && TREE_CODE (expry) == COMPONENT_REF
2011 && nonoverlapping_component_refs_p (exprx, expry))
2012 return 1;
2014 /* If the field reference test failed, look at the DECLs involved. */
2015 moffsetx = MEM_OFFSET (x);
2016 if (TREE_CODE (exprx) == COMPONENT_REF)
2018 tree t = decl_for_component_ref (exprx);
2019 if (! t)
2020 return 0;
2021 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2022 exprx = t;
2024 else if (TREE_CODE (exprx) == INDIRECT_REF)
2026 exprx = TREE_OPERAND (exprx, 0);
2027 if (flag_argument_noalias < 2
2028 || TREE_CODE (exprx) != PARM_DECL)
2029 return 0;
2032 moffsety = MEM_OFFSET (y);
2033 if (TREE_CODE (expry) == COMPONENT_REF)
2035 tree t = decl_for_component_ref (expry);
2036 if (! t)
2037 return 0;
2038 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2039 expry = t;
2041 else if (TREE_CODE (expry) == INDIRECT_REF)
2043 expry = TREE_OPERAND (expry, 0);
2044 if (flag_argument_noalias < 2
2045 || TREE_CODE (expry) != PARM_DECL)
2046 return 0;
2049 if (! DECL_P (exprx) || ! DECL_P (expry))
2050 return 0;
2052 rtlx = DECL_RTL (exprx);
2053 rtly = DECL_RTL (expry);
2055 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2056 can't overlap unless they are the same because we never reuse that part
2057 of the stack frame used for locals for spilled pseudos. */
2058 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2059 && ! rtx_equal_p (rtlx, rtly))
2060 return 1;
2062 /* Get the base and offsets of both decls. If either is a register, we
2063 know both are and are the same, so use that as the base. The only
2064 we can avoid overlap is if we can deduce that they are nonoverlapping
2065 pieces of that decl, which is very rare. */
2066 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2067 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2068 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2070 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2071 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2072 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2074 /* If the bases are different, we know they do not overlap if both
2075 are constants or if one is a constant and the other a pointer into the
2076 stack frame. Otherwise a different base means we can't tell if they
2077 overlap or not. */
2078 if (! rtx_equal_p (basex, basey))
2079 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2080 || (CONSTANT_P (basex) && REG_P (basey)
2081 && REGNO_PTR_FRAME_P (REGNO (basey)))
2082 || (CONSTANT_P (basey) && REG_P (basex)
2083 && REGNO_PTR_FRAME_P (REGNO (basex))));
2085 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2086 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2087 : -1);
2088 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2089 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2090 -1);
2092 /* If we have an offset for either memref, it can update the values computed
2093 above. */
2094 if (moffsetx)
2095 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2096 if (moffsety)
2097 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2099 /* If a memref has both a size and an offset, we can use the smaller size.
2100 We can't do this if the offset isn't known because we must view this
2101 memref as being anywhere inside the DECL's MEM. */
2102 if (MEM_SIZE (x) && moffsetx)
2103 sizex = INTVAL (MEM_SIZE (x));
2104 if (MEM_SIZE (y) && moffsety)
2105 sizey = INTVAL (MEM_SIZE (y));
2107 /* Put the values of the memref with the lower offset in X's values. */
2108 if (offsetx > offsety)
2110 tem = offsetx, offsetx = offsety, offsety = tem;
2111 tem = sizex, sizex = sizey, sizey = tem;
2114 /* If we don't know the size of the lower-offset value, we can't tell
2115 if they conflict. Otherwise, we do the test. */
2116 return sizex >= 0 && offsety >= offsetx + sizex;
2119 /* True dependence: X is read after store in MEM takes place. */
2122 true_dependence (rtx mem, enum machine_mode mem_mode, rtx x,
2123 int (*varies) (rtx, int))
2125 rtx x_addr, mem_addr;
2126 rtx base;
2128 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2129 return 1;
2131 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2132 This is used in epilogue deallocation functions. */
2133 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2134 return 1;
2135 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2136 return 1;
2138 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2139 return 0;
2141 /* Read-only memory is by definition never modified, and therefore can't
2142 conflict with anything. We don't expect to find read-only set on MEM,
2143 but stupid user tricks can produce them, so don't abort. */
2144 if (MEM_READONLY_P (x))
2145 return 0;
2147 if (nonoverlapping_memrefs_p (mem, x))
2148 return 0;
2150 if (mem_mode == VOIDmode)
2151 mem_mode = GET_MODE (mem);
2153 x_addr = get_addr (XEXP (x, 0));
2154 mem_addr = get_addr (XEXP (mem, 0));
2156 base = find_base_term (x_addr);
2157 if (base && (GET_CODE (base) == LABEL_REF
2158 || (GET_CODE (base) == SYMBOL_REF
2159 && CONSTANT_POOL_ADDRESS_P (base))))
2160 return 0;
2162 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2163 return 0;
2165 x_addr = canon_rtx (x_addr);
2166 mem_addr = canon_rtx (mem_addr);
2168 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2169 SIZE_FOR_MODE (x), x_addr, 0))
2170 return 0;
2172 if (aliases_everything_p (x))
2173 return 1;
2175 /* We cannot use aliases_everything_p to test MEM, since we must look
2176 at MEM_MODE, rather than GET_MODE (MEM). */
2177 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2178 return 1;
2180 /* In true_dependence we also allow BLKmode to alias anything. Why
2181 don't we do this in anti_dependence and output_dependence? */
2182 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2183 return 1;
2185 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2186 varies);
2189 /* Canonical true dependence: X is read after store in MEM takes place.
2190 Variant of true_dependence which assumes MEM has already been
2191 canonicalized (hence we no longer do that here).
2192 The mem_addr argument has been added, since true_dependence computed
2193 this value prior to canonicalizing. */
2196 canon_true_dependence (rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2197 rtx x, int (*varies) (rtx, int))
2199 rtx x_addr;
2201 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2202 return 1;
2204 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2205 This is used in epilogue deallocation functions. */
2206 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2207 return 1;
2208 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2209 return 1;
2211 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2212 return 0;
2214 /* Read-only memory is by definition never modified, and therefore can't
2215 conflict with anything. We don't expect to find read-only set on MEM,
2216 but stupid user tricks can produce them, so don't abort. */
2217 if (MEM_READONLY_P (x))
2218 return 0;
2220 if (nonoverlapping_memrefs_p (x, mem))
2221 return 0;
2223 x_addr = get_addr (XEXP (x, 0));
2225 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2226 return 0;
2228 x_addr = canon_rtx (x_addr);
2229 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2230 SIZE_FOR_MODE (x), x_addr, 0))
2231 return 0;
2233 if (aliases_everything_p (x))
2234 return 1;
2236 /* We cannot use aliases_everything_p to test MEM, since we must look
2237 at MEM_MODE, rather than GET_MODE (MEM). */
2238 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2239 return 1;
2241 /* In true_dependence we also allow BLKmode to alias anything. Why
2242 don't we do this in anti_dependence and output_dependence? */
2243 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2244 return 1;
2246 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2247 varies);
2250 /* Returns nonzero if a write to X might alias a previous read from
2251 (or, if WRITEP is nonzero, a write to) MEM. */
2253 static int
2254 write_dependence_p (rtx mem, rtx x, int writep)
2256 rtx x_addr, mem_addr;
2257 rtx fixed_scalar;
2258 rtx base;
2260 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2261 return 1;
2263 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2264 This is used in epilogue deallocation functions. */
2265 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2266 return 1;
2267 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2268 return 1;
2270 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2271 return 0;
2273 /* A read from read-only memory can't conflict with read-write memory. */
2274 if (!writep && MEM_READONLY_P (mem))
2275 return 0;
2277 if (nonoverlapping_memrefs_p (x, mem))
2278 return 0;
2280 x_addr = get_addr (XEXP (x, 0));
2281 mem_addr = get_addr (XEXP (mem, 0));
2283 if (! writep)
2285 base = find_base_term (mem_addr);
2286 if (base && (GET_CODE (base) == LABEL_REF
2287 || (GET_CODE (base) == SYMBOL_REF
2288 && CONSTANT_POOL_ADDRESS_P (base))))
2289 return 0;
2292 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2293 GET_MODE (mem)))
2294 return 0;
2296 x_addr = canon_rtx (x_addr);
2297 mem_addr = canon_rtx (mem_addr);
2299 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2300 SIZE_FOR_MODE (x), x_addr, 0))
2301 return 0;
2303 fixed_scalar
2304 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2305 rtx_addr_varies_p);
2307 return (!(fixed_scalar == mem && !aliases_everything_p (x))
2308 && !(fixed_scalar == x && !aliases_everything_p (mem)));
2311 /* Anti dependence: X is written after read in MEM takes place. */
2314 anti_dependence (rtx mem, rtx x)
2316 return write_dependence_p (mem, x, /*writep=*/0);
2319 /* Output dependence: X is written after store in MEM takes place. */
2322 output_dependence (rtx mem, rtx x)
2324 return write_dependence_p (mem, x, /*writep=*/1);
2327 /* A subroutine of nonlocal_mentioned_p, returns 1 if *LOC mentions
2328 something which is not local to the function and is not constant. */
2330 static int
2331 nonlocal_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2333 rtx x = *loc;
2334 rtx base;
2335 int regno;
2337 if (! x)
2338 return 0;
2340 switch (GET_CODE (x))
2342 case SUBREG:
2343 if (REG_P (SUBREG_REG (x)))
2345 /* Global registers are not local. */
2346 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2347 && global_regs[subreg_regno (x)])
2348 return 1;
2349 return 0;
2351 break;
2353 case REG:
2354 regno = REGNO (x);
2355 /* Global registers are not local. */
2356 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2357 return 1;
2358 return 0;
2360 case SCRATCH:
2361 case PC:
2362 case CC0:
2363 case CONST_INT:
2364 case CONST_DOUBLE:
2365 case CONST_VECTOR:
2366 case CONST:
2367 case LABEL_REF:
2368 return 0;
2370 case SYMBOL_REF:
2371 /* Constants in the function's constants pool are constant. */
2372 if (CONSTANT_POOL_ADDRESS_P (x))
2373 return 0;
2374 return 1;
2376 case CALL:
2377 /* Non-constant calls and recursion are not local. */
2378 return 1;
2380 case MEM:
2381 /* Be overly conservative and consider any volatile memory
2382 reference as not local. */
2383 if (MEM_VOLATILE_P (x))
2384 return 1;
2385 base = find_base_term (XEXP (x, 0));
2386 if (base)
2388 /* A Pmode ADDRESS could be a reference via the structure value
2389 address or static chain. Such memory references are nonlocal.
2391 Thus, we have to examine the contents of the ADDRESS to find
2392 out if this is a local reference or not. */
2393 if (GET_CODE (base) == ADDRESS
2394 && GET_MODE (base) == Pmode
2395 && (XEXP (base, 0) == stack_pointer_rtx
2396 || XEXP (base, 0) == arg_pointer_rtx
2397 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2398 || XEXP (base, 0) == hard_frame_pointer_rtx
2399 #endif
2400 || XEXP (base, 0) == frame_pointer_rtx))
2401 return 0;
2402 /* Constants in the function's constant pool are constant. */
2403 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
2404 return 0;
2406 return 1;
2408 case UNSPEC_VOLATILE:
2409 case ASM_INPUT:
2410 return 1;
2412 case ASM_OPERANDS:
2413 if (MEM_VOLATILE_P (x))
2414 return 1;
2416 /* Fall through. */
2418 default:
2419 break;
2422 return 0;
2425 /* Returns nonzero if X might mention something which is not
2426 local to the function and is not constant. */
2428 static int
2429 nonlocal_mentioned_p (rtx x)
2431 if (INSN_P (x))
2433 if (CALL_P (x))
2435 if (! CONST_OR_PURE_CALL_P (x))
2436 return 1;
2437 x = CALL_INSN_FUNCTION_USAGE (x);
2438 if (x == 0)
2439 return 0;
2441 else
2442 x = PATTERN (x);
2445 return for_each_rtx (&x, nonlocal_mentioned_p_1, NULL);
2448 /* A subroutine of nonlocal_referenced_p, returns 1 if *LOC references
2449 something which is not local to the function and is not constant. */
2451 static int
2452 nonlocal_referenced_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2454 rtx x = *loc;
2456 if (! x)
2457 return 0;
2459 switch (GET_CODE (x))
2461 case MEM:
2462 case REG:
2463 case SYMBOL_REF:
2464 case SUBREG:
2465 return nonlocal_mentioned_p (x);
2467 case CALL:
2468 /* Non-constant calls and recursion are not local. */
2469 return 1;
2471 case SET:
2472 if (nonlocal_mentioned_p (SET_SRC (x)))
2473 return 1;
2475 if (MEM_P (SET_DEST (x)))
2476 return nonlocal_mentioned_p (XEXP (SET_DEST (x), 0));
2478 /* If the destination is anything other than a CC0, PC,
2479 MEM, REG, or a SUBREG of a REG that occupies all of
2480 the REG, then X references nonlocal memory if it is
2481 mentioned in the destination. */
2482 if (GET_CODE (SET_DEST (x)) != CC0
2483 && GET_CODE (SET_DEST (x)) != PC
2484 && !REG_P (SET_DEST (x))
2485 && ! (GET_CODE (SET_DEST (x)) == SUBREG
2486 && REG_P (SUBREG_REG (SET_DEST (x)))
2487 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
2488 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2489 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2490 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
2491 return nonlocal_mentioned_p (SET_DEST (x));
2492 return 0;
2494 case CLOBBER:
2495 if (MEM_P (XEXP (x, 0)))
2496 return nonlocal_mentioned_p (XEXP (XEXP (x, 0), 0));
2497 return 0;
2499 case USE:
2500 return nonlocal_mentioned_p (XEXP (x, 0));
2502 case ASM_INPUT:
2503 case UNSPEC_VOLATILE:
2504 return 1;
2506 case ASM_OPERANDS:
2507 if (MEM_VOLATILE_P (x))
2508 return 1;
2510 /* Fall through. */
2512 default:
2513 break;
2516 return 0;
2519 /* Returns nonzero if X might reference something which is not
2520 local to the function and is not constant. */
2522 static int
2523 nonlocal_referenced_p (rtx x)
2525 if (INSN_P (x))
2527 if (CALL_P (x))
2529 if (! CONST_OR_PURE_CALL_P (x))
2530 return 1;
2531 x = CALL_INSN_FUNCTION_USAGE (x);
2532 if (x == 0)
2533 return 0;
2535 else
2536 x = PATTERN (x);
2539 return for_each_rtx (&x, nonlocal_referenced_p_1, NULL);
2542 /* A subroutine of nonlocal_set_p, returns 1 if *LOC sets
2543 something which is not local to the function and is not constant. */
2545 static int
2546 nonlocal_set_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2548 rtx x = *loc;
2550 if (! x)
2551 return 0;
2553 switch (GET_CODE (x))
2555 case CALL:
2556 /* Non-constant calls and recursion are not local. */
2557 return 1;
2559 case PRE_INC:
2560 case PRE_DEC:
2561 case POST_INC:
2562 case POST_DEC:
2563 case PRE_MODIFY:
2564 case POST_MODIFY:
2565 return nonlocal_mentioned_p (XEXP (x, 0));
2567 case SET:
2568 if (nonlocal_mentioned_p (SET_DEST (x)))
2569 return 1;
2570 return nonlocal_set_p (SET_SRC (x));
2572 case CLOBBER:
2573 return nonlocal_mentioned_p (XEXP (x, 0));
2575 case USE:
2576 return 0;
2578 case ASM_INPUT:
2579 case UNSPEC_VOLATILE:
2580 return 1;
2582 case ASM_OPERANDS:
2583 if (MEM_VOLATILE_P (x))
2584 return 1;
2586 /* Fall through. */
2588 default:
2589 break;
2592 return 0;
2595 /* Returns nonzero if X might set something which is not
2596 local to the function and is not constant. */
2598 static int
2599 nonlocal_set_p (rtx x)
2601 if (INSN_P (x))
2603 if (CALL_P (x))
2605 if (! CONST_OR_PURE_CALL_P (x))
2606 return 1;
2607 x = CALL_INSN_FUNCTION_USAGE (x);
2608 if (x == 0)
2609 return 0;
2611 else
2612 x = PATTERN (x);
2615 return for_each_rtx (&x, nonlocal_set_p_1, NULL);
2618 /* Mark the function if it is pure or constant. */
2620 void
2621 mark_constant_function (void)
2623 rtx insn;
2624 int nonlocal_memory_referenced;
2626 if (TREE_READONLY (current_function_decl)
2627 || DECL_IS_PURE (current_function_decl)
2628 || TREE_THIS_VOLATILE (current_function_decl)
2629 || current_function_has_nonlocal_goto
2630 || !targetm.binds_local_p (current_function_decl))
2631 return;
2633 /* A loop might not return which counts as a side effect. */
2634 if (mark_dfs_back_edges ())
2635 return;
2637 nonlocal_memory_referenced = 0;
2639 init_alias_analysis ();
2641 /* Determine if this is a constant or pure function. */
2643 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2645 if (! INSN_P (insn))
2646 continue;
2648 if (nonlocal_set_p (insn) || global_reg_mentioned_p (insn)
2649 || volatile_refs_p (PATTERN (insn)))
2650 break;
2652 if (! nonlocal_memory_referenced)
2653 nonlocal_memory_referenced = nonlocal_referenced_p (insn);
2656 end_alias_analysis ();
2658 /* Mark the function. */
2660 if (insn)
2662 else if (nonlocal_memory_referenced)
2664 cgraph_rtl_info (current_function_decl)->pure_function = 1;
2665 DECL_IS_PURE (current_function_decl) = 1;
2667 else
2669 cgraph_rtl_info (current_function_decl)->const_function = 1;
2670 TREE_READONLY (current_function_decl) = 1;
2675 void
2676 init_alias_once (void)
2678 int i;
2680 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2681 /* Check whether this register can hold an incoming pointer
2682 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2683 numbers, so translate if necessary due to register windows. */
2684 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2685 && HARD_REGNO_MODE_OK (i, Pmode))
2686 static_reg_base_value[i]
2687 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2689 static_reg_base_value[STACK_POINTER_REGNUM]
2690 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2691 static_reg_base_value[ARG_POINTER_REGNUM]
2692 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2693 static_reg_base_value[FRAME_POINTER_REGNUM]
2694 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2695 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2696 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2697 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2698 #endif
2701 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2702 to be memory reference. */
2703 static bool memory_modified;
2704 static void
2705 memory_modified_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
2707 if (MEM_P (x))
2709 if (anti_dependence (x, (rtx)data) || output_dependence (x, (rtx)data))
2710 memory_modified = true;
2715 /* Return true when INSN possibly modify memory contents of MEM
2716 (ie address can be modified). */
2717 bool
2718 memory_modified_in_insn_p (rtx mem, rtx insn)
2720 if (!INSN_P (insn))
2721 return false;
2722 memory_modified = false;
2723 note_stores (PATTERN (insn), memory_modified_1, mem);
2724 return memory_modified;
2727 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2728 array. */
2730 void
2731 init_alias_analysis (void)
2733 unsigned int maxreg = max_reg_num ();
2734 int changed, pass;
2735 int i;
2736 unsigned int ui;
2737 rtx insn;
2739 timevar_push (TV_ALIAS_ANALYSIS);
2741 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2742 reg_known_value = ggc_calloc (reg_known_value_size, sizeof (rtx));
2743 reg_known_equiv_p = xcalloc (reg_known_value_size, sizeof (bool));
2745 /* Overallocate reg_base_value to allow some growth during loop
2746 optimization. Loop unrolling can create a large number of
2747 registers. */
2748 if (old_reg_base_value)
2750 reg_base_value = old_reg_base_value;
2751 /* If varray gets large zeroing cost may get important. */
2752 if (VARRAY_SIZE (reg_base_value) > 256
2753 && VARRAY_SIZE (reg_base_value) > 4 * maxreg)
2754 VARRAY_GROW (reg_base_value, maxreg);
2755 VARRAY_CLEAR (reg_base_value);
2756 if (VARRAY_SIZE (reg_base_value) < maxreg)
2757 VARRAY_GROW (reg_base_value, maxreg);
2759 else
2761 VARRAY_RTX_INIT (reg_base_value, maxreg, "reg_base_value");
2764 new_reg_base_value = xmalloc (maxreg * sizeof (rtx));
2765 reg_seen = xmalloc (maxreg);
2766 if (! reload_completed && flag_old_unroll_loops)
2768 alias_invariant = ggc_calloc (maxreg, sizeof (rtx));
2769 alias_invariant_size = maxreg;
2772 /* The basic idea is that each pass through this loop will use the
2773 "constant" information from the previous pass to propagate alias
2774 information through another level of assignments.
2776 This could get expensive if the assignment chains are long. Maybe
2777 we should throttle the number of iterations, possibly based on
2778 the optimization level or flag_expensive_optimizations.
2780 We could propagate more information in the first pass by making use
2781 of REG_N_SETS to determine immediately that the alias information
2782 for a pseudo is "constant".
2784 A program with an uninitialized variable can cause an infinite loop
2785 here. Instead of doing a full dataflow analysis to detect such problems
2786 we just cap the number of iterations for the loop.
2788 The state of the arrays for the set chain in question does not matter
2789 since the program has undefined behavior. */
2791 pass = 0;
2794 /* Assume nothing will change this iteration of the loop. */
2795 changed = 0;
2797 /* We want to assign the same IDs each iteration of this loop, so
2798 start counting from zero each iteration of the loop. */
2799 unique_id = 0;
2801 /* We're at the start of the function each iteration through the
2802 loop, so we're copying arguments. */
2803 copying_arguments = true;
2805 /* Wipe the potential alias information clean for this pass. */
2806 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2808 /* Wipe the reg_seen array clean. */
2809 memset (reg_seen, 0, maxreg);
2811 /* Mark all hard registers which may contain an address.
2812 The stack, frame and argument pointers may contain an address.
2813 An argument register which can hold a Pmode value may contain
2814 an address even if it is not in BASE_REGS.
2816 The address expression is VOIDmode for an argument and
2817 Pmode for other registers. */
2819 memcpy (new_reg_base_value, static_reg_base_value,
2820 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2822 /* Walk the insns adding values to the new_reg_base_value array. */
2823 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2825 if (INSN_P (insn))
2827 rtx note, set;
2829 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2830 /* The prologue/epilogue insns are not threaded onto the
2831 insn chain until after reload has completed. Thus,
2832 there is no sense wasting time checking if INSN is in
2833 the prologue/epilogue until after reload has completed. */
2834 if (reload_completed
2835 && prologue_epilogue_contains (insn))
2836 continue;
2837 #endif
2839 /* If this insn has a noalias note, process it, Otherwise,
2840 scan for sets. A simple set will have no side effects
2841 which could change the base value of any other register. */
2843 if (GET_CODE (PATTERN (insn)) == SET
2844 && REG_NOTES (insn) != 0
2845 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2846 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2847 else
2848 note_stores (PATTERN (insn), record_set, NULL);
2850 set = single_set (insn);
2852 if (set != 0
2853 && REG_P (SET_DEST (set))
2854 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2856 unsigned int regno = REGNO (SET_DEST (set));
2857 rtx src = SET_SRC (set);
2858 rtx t;
2860 if (REG_NOTES (insn) != 0
2861 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2862 && REG_N_SETS (regno) == 1)
2863 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2864 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2865 && ! rtx_varies_p (XEXP (note, 0), 1)
2866 && ! reg_overlap_mentioned_p (SET_DEST (set),
2867 XEXP (note, 0)))
2869 set_reg_known_value (regno, XEXP (note, 0));
2870 set_reg_known_equiv_p (regno,
2871 REG_NOTE_KIND (note) == REG_EQUIV);
2873 else if (REG_N_SETS (regno) == 1
2874 && GET_CODE (src) == PLUS
2875 && REG_P (XEXP (src, 0))
2876 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2877 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2879 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2880 set_reg_known_value (regno, t);
2881 set_reg_known_equiv_p (regno, 0);
2883 else if (REG_N_SETS (regno) == 1
2884 && ! rtx_varies_p (src, 1))
2886 set_reg_known_value (regno, src);
2887 set_reg_known_equiv_p (regno, 0);
2891 else if (NOTE_P (insn)
2892 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2893 copying_arguments = false;
2896 /* Now propagate values from new_reg_base_value to reg_base_value. */
2897 gcc_assert (maxreg == (unsigned int) max_reg_num());
2899 for (ui = 0; ui < maxreg; ui++)
2901 if (new_reg_base_value[ui]
2902 && new_reg_base_value[ui] != VARRAY_RTX (reg_base_value, ui)
2903 && ! rtx_equal_p (new_reg_base_value[ui],
2904 VARRAY_RTX (reg_base_value, ui)))
2906 VARRAY_RTX (reg_base_value, ui) = new_reg_base_value[ui];
2907 changed = 1;
2911 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2913 /* Fill in the remaining entries. */
2914 for (i = 0; i < (int)reg_known_value_size; i++)
2915 if (reg_known_value[i] == 0)
2916 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2918 /* Simplify the reg_base_value array so that no register refers to
2919 another register, except to special registers indirectly through
2920 ADDRESS expressions.
2922 In theory this loop can take as long as O(registers^2), but unless
2923 there are very long dependency chains it will run in close to linear
2924 time.
2926 This loop may not be needed any longer now that the main loop does
2927 a better job at propagating alias information. */
2928 pass = 0;
2931 changed = 0;
2932 pass++;
2933 for (ui = 0; ui < maxreg; ui++)
2935 rtx base = VARRAY_RTX (reg_base_value, ui);
2936 if (base && REG_P (base))
2938 unsigned int base_regno = REGNO (base);
2939 if (base_regno == ui) /* register set from itself */
2940 VARRAY_RTX (reg_base_value, ui) = 0;
2941 else
2942 VARRAY_RTX (reg_base_value, ui)
2943 = VARRAY_RTX (reg_base_value, base_regno);
2944 changed = 1;
2948 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2950 /* Clean up. */
2951 free (new_reg_base_value);
2952 new_reg_base_value = 0;
2953 free (reg_seen);
2954 reg_seen = 0;
2955 timevar_pop (TV_ALIAS_ANALYSIS);
2958 void
2959 end_alias_analysis (void)
2961 old_reg_base_value = reg_base_value;
2962 ggc_free (reg_known_value);
2963 reg_known_value = 0;
2964 reg_known_value_size = 0;
2965 free (reg_known_equiv_p);
2966 reg_known_equiv_p = 0;
2967 if (alias_invariant)
2969 ggc_free (alias_invariant);
2970 alias_invariant = 0;
2971 alias_invariant_size = 0;
2975 #include "gt-alias.h"