PR c++/13659
[official-gcc.git] / gcc / alias.c
blobf1975cead5b423fedfddf2b5c1c8f8194f9f523d
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "function.h"
31 #include "expr.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "output.h"
37 #include "toplev.h"
38 #include "cselib.h"
39 #include "splay-tree.h"
40 #include "ggc.h"
41 #include "langhooks.h"
42 #include "timevar.h"
43 #include "target.h"
44 #include "cgraph.h"
45 #include "varray.h"
47 /* The alias sets assigned to MEMs assist the back-end in determining
48 which MEMs can alias which other MEMs. In general, two MEMs in
49 different alias sets cannot alias each other, with one important
50 exception. Consider something like:
52 struct S { int i; double d; };
54 a store to an `S' can alias something of either type `int' or type
55 `double'. (However, a store to an `int' cannot alias a `double'
56 and vice versa.) We indicate this via a tree structure that looks
57 like:
58 struct S
59 / \
60 / \
61 |/_ _\|
62 int double
64 (The arrows are directed and point downwards.)
65 In this situation we say the alias set for `struct S' is the
66 `superset' and that those for `int' and `double' are `subsets'.
68 To see whether two alias sets can point to the same memory, we must
69 see if either alias set is a subset of the other. We need not trace
70 past immediate descendants, however, since we propagate all
71 grandchildren up one level.
73 Alias set zero is implicitly a superset of all other alias sets.
74 However, this is no actual entry for alias set zero. It is an
75 error to attempt to explicitly construct a subset of zero. */
77 typedef struct alias_set_entry
79 /* The alias set number, as stored in MEM_ALIAS_SET. */
80 HOST_WIDE_INT alias_set;
82 /* The children of the alias set. These are not just the immediate
83 children, but, in fact, all descendants. So, if we have:
85 struct T { struct S s; float f; }
87 continuing our example above, the children here will be all of
88 `int', `double', `float', and `struct S'. */
89 splay_tree children;
91 /* Nonzero if would have a child of zero: this effectively makes this
92 alias set the same as alias set zero. */
93 int has_zero_child;
94 } *alias_set_entry;
96 static int rtx_equal_for_memref_p (rtx, rtx);
97 static rtx find_symbolic_term (rtx);
98 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
99 static void record_set (rtx, rtx, void *);
100 static int base_alias_check (rtx, rtx, enum machine_mode,
101 enum machine_mode);
102 static rtx find_base_value (rtx);
103 static int mems_in_disjoint_alias_sets_p (rtx, rtx);
104 static int insert_subset_children (splay_tree_node, void*);
105 static tree find_base_decl (tree);
106 static alias_set_entry get_alias_set_entry (HOST_WIDE_INT);
107 static rtx fixed_scalar_and_varying_struct_p (rtx, rtx, rtx, rtx,
108 int (*) (rtx, int));
109 static int aliases_everything_p (rtx);
110 static bool nonoverlapping_component_refs_p (tree, tree);
111 static tree decl_for_component_ref (tree);
112 static rtx adjust_offset_for_component_ref (tree, rtx);
113 static int nonoverlapping_memrefs_p (rtx, rtx);
114 static int write_dependence_p (rtx, rtx, int, int);
116 static int nonlocal_mentioned_p_1 (rtx *, void *);
117 static int nonlocal_mentioned_p (rtx);
118 static int nonlocal_referenced_p_1 (rtx *, void *);
119 static int nonlocal_referenced_p (rtx);
120 static int nonlocal_set_p_1 (rtx *, void *);
121 static int nonlocal_set_p (rtx);
122 static void memory_modified_1 (rtx, rtx, void *);
124 /* Set up all info needed to perform alias analysis on memory references. */
126 /* Returns the size in bytes of the mode of X. */
127 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
129 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
130 different alias sets. We ignore alias sets in functions making use
131 of variable arguments because the va_arg macros on some systems are
132 not legal ANSI C. */
133 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
134 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
136 /* Cap the number of passes we make over the insns propagating alias
137 information through set chains. 10 is a completely arbitrary choice. */
138 #define MAX_ALIAS_LOOP_PASSES 10
140 /* reg_base_value[N] gives an address to which register N is related.
141 If all sets after the first add or subtract to the current value
142 or otherwise modify it so it does not point to a different top level
143 object, reg_base_value[N] is equal to the address part of the source
144 of the first set.
146 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
147 expressions represent certain special values: function arguments and
148 the stack, frame, and argument pointers.
150 The contents of an ADDRESS is not normally used, the mode of the
151 ADDRESS determines whether the ADDRESS is a function argument or some
152 other special value. Pointer equality, not rtx_equal_p, determines whether
153 two ADDRESS expressions refer to the same base address.
155 The only use of the contents of an ADDRESS is for determining if the
156 current function performs nonlocal memory memory references for the
157 purposes of marking the function as a constant function. */
159 static GTY((length ("reg_base_value_size"))) rtx *reg_base_value;
160 static rtx *new_reg_base_value;
161 static unsigned int reg_base_value_size; /* size of reg_base_value array */
163 /* Static hunks of RTL used by the aliasing code; these are initialized
164 once per function to avoid unnecessary RTL allocations. */
165 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
167 #define REG_BASE_VALUE(X) \
168 (REGNO (X) < reg_base_value_size \
169 ? reg_base_value[REGNO (X)] : 0)
171 /* Vector of known invariant relationships between registers. Set in
172 loop unrolling. Indexed by register number, if nonzero the value
173 is an expression describing this register in terms of another.
175 The length of this array is REG_BASE_VALUE_SIZE.
177 Because this array contains only pseudo registers it has no effect
178 after reload. */
179 static rtx *alias_invariant;
181 /* Vector indexed by N giving the initial (unchanging) value known for
182 pseudo-register N. This array is initialized in
183 init_alias_analysis, and does not change until end_alias_analysis
184 is called. */
185 rtx *reg_known_value;
187 /* Indicates number of valid entries in reg_known_value. */
188 static unsigned int reg_known_value_size;
190 /* Vector recording for each reg_known_value whether it is due to a
191 REG_EQUIV note. Future passes (viz., reload) may replace the
192 pseudo with the equivalent expression and so we account for the
193 dependences that would be introduced if that happens.
195 The REG_EQUIV notes created in assign_parms may mention the arg
196 pointer, and there are explicit insns in the RTL that modify the
197 arg pointer. Thus we must ensure that such insns don't get
198 scheduled across each other because that would invalidate the
199 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
200 wrong, but solving the problem in the scheduler will likely give
201 better code, so we do it here. */
202 char *reg_known_equiv_p;
204 /* True when scanning insns from the start of the rtl to the
205 NOTE_INSN_FUNCTION_BEG note. */
206 static bool copying_arguments;
208 /* The splay-tree used to store the various alias set entries. */
209 varray_type alias_sets;
211 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
212 such an entry, or NULL otherwise. */
214 static inline alias_set_entry
215 get_alias_set_entry (HOST_WIDE_INT alias_set)
217 return (alias_set_entry)VARRAY_GENERIC_PTR (alias_sets, alias_set);
220 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
221 the two MEMs cannot alias each other. */
223 static inline int
224 mems_in_disjoint_alias_sets_p (rtx mem1, rtx mem2)
226 #ifdef ENABLE_CHECKING
227 /* Perform a basic sanity check. Namely, that there are no alias sets
228 if we're not using strict aliasing. This helps to catch bugs
229 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
230 where a MEM is allocated in some way other than by the use of
231 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
232 use alias sets to indicate that spilled registers cannot alias each
233 other, we might need to remove this check. */
234 if (! flag_strict_aliasing
235 && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
236 abort ();
237 #endif
239 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
242 /* Insert the NODE into the splay tree given by DATA. Used by
243 record_alias_subset via splay_tree_foreach. */
245 static int
246 insert_subset_children (splay_tree_node node, void *data)
248 splay_tree_insert ((splay_tree) data, node->key, node->value);
250 return 0;
253 /* Return 1 if the two specified alias sets may conflict. */
256 alias_sets_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2)
258 alias_set_entry ase;
260 /* If have no alias set information for one of the operands, we have
261 to assume it can alias anything. */
262 if (set1 == 0 || set2 == 0
263 /* If the two alias sets are the same, they may alias. */
264 || set1 == set2)
265 return 1;
267 /* See if the first alias set is a subset of the second. */
268 ase = get_alias_set_entry (set1);
269 if (ase != 0
270 && (ase->has_zero_child
271 || splay_tree_lookup (ase->children,
272 (splay_tree_key) set2)))
273 return 1;
275 /* Now do the same, but with the alias sets reversed. */
276 ase = get_alias_set_entry (set2);
277 if (ase != 0
278 && (ase->has_zero_child
279 || splay_tree_lookup (ase->children,
280 (splay_tree_key) set1)))
281 return 1;
283 /* The two alias sets are distinct and neither one is the
284 child of the other. Therefore, they cannot alias. */
285 return 0;
288 /* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
289 has any readonly fields. If any of the fields have types that
290 contain readonly fields, return true as well. */
293 readonly_fields_p (tree type)
295 tree field;
297 if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
298 && TREE_CODE (type) != QUAL_UNION_TYPE)
299 return 0;
301 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
302 if (TREE_CODE (field) == FIELD_DECL
303 && (TREE_READONLY (field)
304 || readonly_fields_p (TREE_TYPE (field))))
305 return 1;
307 return 0;
310 /* Return 1 if any MEM object of type T1 will always conflict (using the
311 dependency routines in this file) with any MEM object of type T2.
312 This is used when allocating temporary storage. If T1 and/or T2 are
313 NULL_TREE, it means we know nothing about the storage. */
316 objects_must_conflict_p (tree t1, tree t2)
318 HOST_WIDE_INT set1, set2;
320 /* If neither has a type specified, we don't know if they'll conflict
321 because we may be using them to store objects of various types, for
322 example the argument and local variables areas of inlined functions. */
323 if (t1 == 0 && t2 == 0)
324 return 0;
326 /* If one or the other has readonly fields or is readonly,
327 then they may not conflict. */
328 if ((t1 != 0 && readonly_fields_p (t1))
329 || (t2 != 0 && readonly_fields_p (t2))
330 || (t1 != 0 && lang_hooks.honor_readonly && TYPE_READONLY (t1))
331 || (t2 != 0 && lang_hooks.honor_readonly && TYPE_READONLY (t2)))
332 return 0;
334 /* If they are the same type, they must conflict. */
335 if (t1 == t2
336 /* Likewise if both are volatile. */
337 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
338 return 1;
340 set1 = t1 ? get_alias_set (t1) : 0;
341 set2 = t2 ? get_alias_set (t2) : 0;
343 /* Otherwise they conflict if they have no alias set or the same. We
344 can't simply use alias_sets_conflict_p here, because we must make
345 sure that every subtype of t1 will conflict with every subtype of
346 t2 for which a pair of subobjects of these respective subtypes
347 overlaps on the stack. */
348 return set1 == 0 || set2 == 0 || set1 == set2;
351 /* T is an expression with pointer type. Find the DECL on which this
352 expression is based. (For example, in `a[i]' this would be `a'.)
353 If there is no such DECL, or a unique decl cannot be determined,
354 NULL_TREE is returned. */
356 static tree
357 find_base_decl (tree t)
359 tree d0, d1, d2;
361 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
362 return 0;
364 /* If this is a declaration, return it. */
365 if (TREE_CODE_CLASS (TREE_CODE (t)) == 'd')
366 return t;
368 /* Handle general expressions. It would be nice to deal with
369 COMPONENT_REFs here. If we could tell that `a' and `b' were the
370 same, then `a->f' and `b->f' are also the same. */
371 switch (TREE_CODE_CLASS (TREE_CODE (t)))
373 case '1':
374 return find_base_decl (TREE_OPERAND (t, 0));
376 case '2':
377 /* Return 0 if found in neither or both are the same. */
378 d0 = find_base_decl (TREE_OPERAND (t, 0));
379 d1 = find_base_decl (TREE_OPERAND (t, 1));
380 if (d0 == d1)
381 return d0;
382 else if (d0 == 0)
383 return d1;
384 else if (d1 == 0)
385 return d0;
386 else
387 return 0;
389 case '3':
390 d0 = find_base_decl (TREE_OPERAND (t, 0));
391 d1 = find_base_decl (TREE_OPERAND (t, 1));
392 d2 = find_base_decl (TREE_OPERAND (t, 2));
394 /* Set any nonzero values from the last, then from the first. */
395 if (d1 == 0) d1 = d2;
396 if (d0 == 0) d0 = d1;
397 if (d1 == 0) d1 = d0;
398 if (d2 == 0) d2 = d1;
400 /* At this point all are nonzero or all are zero. If all three are the
401 same, return it. Otherwise, return zero. */
402 return (d0 == d1 && d1 == d2) ? d0 : 0;
404 default:
405 return 0;
409 /* Return 1 if all the nested component references handled by
410 get_inner_reference in T are such that we can address the object in T. */
413 can_address_p (tree t)
415 /* If we're at the end, it is vacuously addressable. */
416 if (! handled_component_p (t))
417 return 1;
419 /* Bitfields are never addressable. */
420 else if (TREE_CODE (t) == BIT_FIELD_REF)
421 return 0;
423 /* Fields are addressable unless they are marked as nonaddressable or
424 the containing type has alias set 0. */
425 else if (TREE_CODE (t) == COMPONENT_REF
426 && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
427 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
428 && can_address_p (TREE_OPERAND (t, 0)))
429 return 1;
431 /* Likewise for arrays. */
432 else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
433 && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
434 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
435 && can_address_p (TREE_OPERAND (t, 0)))
436 return 1;
438 return 0;
441 /* Return the alias set for T, which may be either a type or an
442 expression. Call language-specific routine for help, if needed. */
444 HOST_WIDE_INT
445 get_alias_set (tree t)
447 HOST_WIDE_INT set;
449 /* If we're not doing any alias analysis, just assume everything
450 aliases everything else. Also return 0 if this or its type is
451 an error. */
452 if (! flag_strict_aliasing || t == error_mark_node
453 || (! TYPE_P (t)
454 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
455 return 0;
457 /* We can be passed either an expression or a type. This and the
458 language-specific routine may make mutually-recursive calls to each other
459 to figure out what to do. At each juncture, we see if this is a tree
460 that the language may need to handle specially. First handle things that
461 aren't types. */
462 if (! TYPE_P (t))
464 tree inner = t;
465 tree placeholder_ptr = 0;
467 /* Remove any nops, then give the language a chance to do
468 something with this tree before we look at it. */
469 STRIP_NOPS (t);
470 set = (*lang_hooks.get_alias_set) (t);
471 if (set != -1)
472 return set;
474 /* First see if the actual object referenced is an INDIRECT_REF from a
475 restrict-qualified pointer or a "void *". Replace
476 PLACEHOLDER_EXPRs. */
477 while (TREE_CODE (inner) == PLACEHOLDER_EXPR
478 || handled_component_p (inner))
480 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
481 inner = find_placeholder (inner, &placeholder_ptr);
482 else
483 inner = TREE_OPERAND (inner, 0);
485 STRIP_NOPS (inner);
488 /* Check for accesses through restrict-qualified pointers. */
489 if (TREE_CODE (inner) == INDIRECT_REF)
491 tree decl = find_base_decl (TREE_OPERAND (inner, 0));
493 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
495 /* If we haven't computed the actual alias set, do it now. */
496 if (DECL_POINTER_ALIAS_SET (decl) == -2)
498 /* No two restricted pointers can point at the same thing.
499 However, a restricted pointer can point at the same thing
500 as an unrestricted pointer, if that unrestricted pointer
501 is based on the restricted pointer. So, we make the
502 alias set for the restricted pointer a subset of the
503 alias set for the type pointed to by the type of the
504 decl. */
505 HOST_WIDE_INT pointed_to_alias_set
506 = get_alias_set (TREE_TYPE (TREE_TYPE (decl)));
508 if (pointed_to_alias_set == 0)
509 /* It's not legal to make a subset of alias set zero. */
510 DECL_POINTER_ALIAS_SET (decl) = 0;
511 else
513 DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
514 record_alias_subset (pointed_to_alias_set,
515 DECL_POINTER_ALIAS_SET (decl));
519 /* We use the alias set indicated in the declaration. */
520 return DECL_POINTER_ALIAS_SET (decl);
523 /* If we have an INDIRECT_REF via a void pointer, we don't
524 know anything about what that might alias. */
525 else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE)
526 return 0;
529 /* Otherwise, pick up the outermost object that we could have a pointer
530 to, processing conversion and PLACEHOLDER_EXPR as above. */
531 placeholder_ptr = 0;
532 while (TREE_CODE (t) == PLACEHOLDER_EXPR
533 || (handled_component_p (t) && ! can_address_p (t)))
535 if (TREE_CODE (t) == PLACEHOLDER_EXPR)
536 t = find_placeholder (t, &placeholder_ptr);
537 else
538 t = TREE_OPERAND (t, 0);
540 STRIP_NOPS (t);
543 /* If we've already determined the alias set for a decl, just return
544 it. This is necessary for C++ anonymous unions, whose component
545 variables don't look like union members (boo!). */
546 if (TREE_CODE (t) == VAR_DECL
547 && DECL_RTL_SET_P (t) && GET_CODE (DECL_RTL (t)) == MEM)
548 return MEM_ALIAS_SET (DECL_RTL (t));
550 /* Now all we care about is the type. */
551 t = TREE_TYPE (t);
554 /* Variant qualifiers don't affect the alias set, so get the main
555 variant. If this is a type with a known alias set, return it. */
556 t = TYPE_MAIN_VARIANT (t);
557 if (TYPE_ALIAS_SET_KNOWN_P (t))
558 return TYPE_ALIAS_SET (t);
560 /* See if the language has special handling for this type. */
561 set = (*lang_hooks.get_alias_set) (t);
562 if (set != -1)
563 return set;
565 /* There are no objects of FUNCTION_TYPE, so there's no point in
566 using up an alias set for them. (There are, of course, pointers
567 and references to functions, but that's different.) */
568 else if (TREE_CODE (t) == FUNCTION_TYPE)
569 set = 0;
571 /* Unless the language specifies otherwise, let vector types alias
572 their components. This avoids some nasty type punning issues in
573 normal usage. And indeed lets vectors be treated more like an
574 array slice. */
575 else if (TREE_CODE (t) == VECTOR_TYPE)
576 set = get_alias_set (TREE_TYPE (t));
578 else
579 /* Otherwise make a new alias set for this type. */
580 set = new_alias_set ();
582 TYPE_ALIAS_SET (t) = set;
584 /* If this is an aggregate type, we must record any component aliasing
585 information. */
586 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
587 record_component_aliases (t);
589 return set;
592 /* Return a brand-new alias set. */
594 HOST_WIDE_INT
595 new_alias_set (void)
597 static HOST_WIDE_INT last_alias_set;
599 if (flag_strict_aliasing)
601 if (!alias_sets)
602 VARRAY_GENERIC_PTR_INIT (alias_sets, 10, "alias sets");
603 else
604 VARRAY_GROW (alias_sets, last_alias_set + 2);
605 return ++last_alias_set;
607 else
608 return 0;
611 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
612 not everything that aliases SUPERSET also aliases SUBSET. For example,
613 in C, a store to an `int' can alias a load of a structure containing an
614 `int', and vice versa. But it can't alias a load of a 'double' member
615 of the same structure. Here, the structure would be the SUPERSET and
616 `int' the SUBSET. This relationship is also described in the comment at
617 the beginning of this file.
619 This function should be called only once per SUPERSET/SUBSET pair.
621 It is illegal for SUPERSET to be zero; everything is implicitly a
622 subset of alias set zero. */
624 void
625 record_alias_subset (HOST_WIDE_INT superset, HOST_WIDE_INT subset)
627 alias_set_entry superset_entry;
628 alias_set_entry subset_entry;
630 /* It is possible in complex type situations for both sets to be the same,
631 in which case we can ignore this operation. */
632 if (superset == subset)
633 return;
635 if (superset == 0)
636 abort ();
638 superset_entry = get_alias_set_entry (superset);
639 if (superset_entry == 0)
641 /* Create an entry for the SUPERSET, so that we have a place to
642 attach the SUBSET. */
643 superset_entry = xmalloc (sizeof (struct alias_set_entry));
644 superset_entry->alias_set = superset;
645 superset_entry->children
646 = splay_tree_new (splay_tree_compare_ints, 0, 0);
647 superset_entry->has_zero_child = 0;
648 VARRAY_GENERIC_PTR (alias_sets, superset) = superset_entry;
651 if (subset == 0)
652 superset_entry->has_zero_child = 1;
653 else
655 subset_entry = get_alias_set_entry (subset);
656 /* If there is an entry for the subset, enter all of its children
657 (if they are not already present) as children of the SUPERSET. */
658 if (subset_entry)
660 if (subset_entry->has_zero_child)
661 superset_entry->has_zero_child = 1;
663 splay_tree_foreach (subset_entry->children, insert_subset_children,
664 superset_entry->children);
667 /* Enter the SUBSET itself as a child of the SUPERSET. */
668 splay_tree_insert (superset_entry->children,
669 (splay_tree_key) subset, 0);
673 /* Record that component types of TYPE, if any, are part of that type for
674 aliasing purposes. For record types, we only record component types
675 for fields that are marked addressable. For array types, we always
676 record the component types, so the front end should not call this
677 function if the individual component aren't addressable. */
679 void
680 record_component_aliases (tree type)
682 HOST_WIDE_INT superset = get_alias_set (type);
683 tree field;
685 if (superset == 0)
686 return;
688 switch (TREE_CODE (type))
690 case ARRAY_TYPE:
691 if (! TYPE_NONALIASED_COMPONENT (type))
692 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
693 break;
695 case RECORD_TYPE:
696 case UNION_TYPE:
697 case QUAL_UNION_TYPE:
698 /* Recursively record aliases for the base classes, if there are any. */
699 if (TYPE_BINFO (type) != NULL && TYPE_BINFO_BASETYPES (type) != NULL)
701 int i;
702 for (i = 0; i < TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (type)); i++)
704 tree binfo = TREE_VEC_ELT (TYPE_BINFO_BASETYPES (type), i);
705 record_alias_subset (superset,
706 get_alias_set (BINFO_TYPE (binfo)));
709 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
710 if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
711 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
712 break;
714 case COMPLEX_TYPE:
715 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
716 break;
718 default:
719 break;
723 /* Allocate an alias set for use in storing and reading from the varargs
724 spill area. */
726 HOST_WIDE_INT
727 get_varargs_alias_set (void)
729 static HOST_WIDE_INT set = -1;
731 if (set == -1)
732 set = new_alias_set ();
734 return set;
737 /* Likewise, but used for the fixed portions of the frame, e.g., register
738 save areas. */
740 HOST_WIDE_INT
741 get_frame_alias_set (void)
743 static HOST_WIDE_INT set = -1;
745 if (set == -1)
746 set = new_alias_set ();
748 return set;
751 /* Inside SRC, the source of a SET, find a base address. */
753 static rtx
754 find_base_value (rtx src)
756 unsigned int regno;
758 switch (GET_CODE (src))
760 case SYMBOL_REF:
761 case LABEL_REF:
762 return src;
764 case REG:
765 regno = REGNO (src);
766 /* At the start of a function, argument registers have known base
767 values which may be lost later. Returning an ADDRESS
768 expression here allows optimization based on argument values
769 even when the argument registers are used for other purposes. */
770 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
771 return new_reg_base_value[regno];
773 /* If a pseudo has a known base value, return it. Do not do this
774 for non-fixed hard regs since it can result in a circular
775 dependency chain for registers which have values at function entry.
777 The test above is not sufficient because the scheduler may move
778 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
779 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
780 && regno < reg_base_value_size)
782 /* If we're inside init_alias_analysis, use new_reg_base_value
783 to reduce the number of relaxation iterations. */
784 if (new_reg_base_value && new_reg_base_value[regno]
785 && REG_N_SETS (regno) == 1)
786 return new_reg_base_value[regno];
788 if (reg_base_value[regno])
789 return reg_base_value[regno];
792 return 0;
794 case MEM:
795 /* Check for an argument passed in memory. Only record in the
796 copying-arguments block; it is too hard to track changes
797 otherwise. */
798 if (copying_arguments
799 && (XEXP (src, 0) == arg_pointer_rtx
800 || (GET_CODE (XEXP (src, 0)) == PLUS
801 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
802 return gen_rtx_ADDRESS (VOIDmode, src);
803 return 0;
805 case CONST:
806 src = XEXP (src, 0);
807 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
808 break;
810 /* ... fall through ... */
812 case PLUS:
813 case MINUS:
815 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
817 /* If either operand is a REG that is a known pointer, then it
818 is the base. */
819 if (REG_P (src_0) && REG_POINTER (src_0))
820 return find_base_value (src_0);
821 if (REG_P (src_1) && REG_POINTER (src_1))
822 return find_base_value (src_1);
824 /* If either operand is a REG, then see if we already have
825 a known value for it. */
826 if (REG_P (src_0))
828 temp = find_base_value (src_0);
829 if (temp != 0)
830 src_0 = temp;
833 if (REG_P (src_1))
835 temp = find_base_value (src_1);
836 if (temp!= 0)
837 src_1 = temp;
840 /* If either base is named object or a special address
841 (like an argument or stack reference), then use it for the
842 base term. */
843 if (src_0 != 0
844 && (GET_CODE (src_0) == SYMBOL_REF
845 || GET_CODE (src_0) == LABEL_REF
846 || (GET_CODE (src_0) == ADDRESS
847 && GET_MODE (src_0) != VOIDmode)))
848 return src_0;
850 if (src_1 != 0
851 && (GET_CODE (src_1) == SYMBOL_REF
852 || GET_CODE (src_1) == LABEL_REF
853 || (GET_CODE (src_1) == ADDRESS
854 && GET_MODE (src_1) != VOIDmode)))
855 return src_1;
857 /* Guess which operand is the base address:
858 If either operand is a symbol, then it is the base. If
859 either operand is a CONST_INT, then the other is the base. */
860 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
861 return find_base_value (src_0);
862 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
863 return find_base_value (src_1);
865 return 0;
868 case LO_SUM:
869 /* The standard form is (lo_sum reg sym) so look only at the
870 second operand. */
871 return find_base_value (XEXP (src, 1));
873 case AND:
874 /* If the second operand is constant set the base
875 address to the first operand. */
876 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
877 return find_base_value (XEXP (src, 0));
878 return 0;
880 case TRUNCATE:
881 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
882 break;
883 /* Fall through. */
884 case HIGH:
885 case PRE_INC:
886 case PRE_DEC:
887 case POST_INC:
888 case POST_DEC:
889 case PRE_MODIFY:
890 case POST_MODIFY:
891 return find_base_value (XEXP (src, 0));
893 case ZERO_EXTEND:
894 case SIGN_EXTEND: /* used for NT/Alpha pointers */
896 rtx temp = find_base_value (XEXP (src, 0));
898 if (temp != 0 && CONSTANT_P (temp))
899 temp = convert_memory_address (Pmode, temp);
901 return temp;
904 default:
905 break;
908 return 0;
911 /* Called from init_alias_analysis indirectly through note_stores. */
913 /* While scanning insns to find base values, reg_seen[N] is nonzero if
914 register N has been set in this function. */
915 static char *reg_seen;
917 /* Addresses which are known not to alias anything else are identified
918 by a unique integer. */
919 static int unique_id;
921 static void
922 record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
924 unsigned regno;
925 rtx src;
926 int n;
928 if (GET_CODE (dest) != REG)
929 return;
931 regno = REGNO (dest);
933 if (regno >= reg_base_value_size)
934 abort ();
936 /* If this spans multiple hard registers, then we must indicate that every
937 register has an unusable value. */
938 if (regno < FIRST_PSEUDO_REGISTER)
939 n = HARD_REGNO_NREGS (regno, GET_MODE (dest));
940 else
941 n = 1;
942 if (n != 1)
944 while (--n >= 0)
946 reg_seen[regno + n] = 1;
947 new_reg_base_value[regno + n] = 0;
949 return;
952 if (set)
954 /* A CLOBBER wipes out any old value but does not prevent a previously
955 unset register from acquiring a base address (i.e. reg_seen is not
956 set). */
957 if (GET_CODE (set) == CLOBBER)
959 new_reg_base_value[regno] = 0;
960 return;
962 src = SET_SRC (set);
964 else
966 if (reg_seen[regno])
968 new_reg_base_value[regno] = 0;
969 return;
971 reg_seen[regno] = 1;
972 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
973 GEN_INT (unique_id++));
974 return;
977 /* This is not the first set. If the new value is not related to the
978 old value, forget the base value. Note that the following code is
979 not detected:
980 extern int x, y; int *p = &x; p += (&y-&x);
981 ANSI C does not allow computing the difference of addresses
982 of distinct top level objects. */
983 if (new_reg_base_value[regno])
984 switch (GET_CODE (src))
986 case LO_SUM:
987 case MINUS:
988 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
989 new_reg_base_value[regno] = 0;
990 break;
991 case PLUS:
992 /* If the value we add in the PLUS is also a valid base value,
993 this might be the actual base value, and the original value
994 an index. */
996 rtx other = NULL_RTX;
998 if (XEXP (src, 0) == dest)
999 other = XEXP (src, 1);
1000 else if (XEXP (src, 1) == dest)
1001 other = XEXP (src, 0);
1003 if (! other || find_base_value (other))
1004 new_reg_base_value[regno] = 0;
1005 break;
1007 case AND:
1008 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1009 new_reg_base_value[regno] = 0;
1010 break;
1011 default:
1012 new_reg_base_value[regno] = 0;
1013 break;
1015 /* If this is the first set of a register, record the value. */
1016 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1017 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1018 new_reg_base_value[regno] = find_base_value (src);
1020 reg_seen[regno] = 1;
1023 /* Called from loop optimization when a new pseudo-register is
1024 created. It indicates that REGNO is being set to VAL. f INVARIANT
1025 is true then this value also describes an invariant relationship
1026 which can be used to deduce that two registers with unknown values
1027 are different. */
1029 void
1030 record_base_value (unsigned int regno, rtx val, int invariant)
1032 if (regno >= reg_base_value_size)
1033 return;
1035 if (invariant && alias_invariant)
1036 alias_invariant[regno] = val;
1038 if (GET_CODE (val) == REG)
1040 if (REGNO (val) < reg_base_value_size)
1041 reg_base_value[regno] = reg_base_value[REGNO (val)];
1043 return;
1046 reg_base_value[regno] = find_base_value (val);
1049 /* Clear alias info for a register. This is used if an RTL transformation
1050 changes the value of a register. This is used in flow by AUTO_INC_DEC
1051 optimizations. We don't need to clear reg_base_value, since flow only
1052 changes the offset. */
1054 void
1055 clear_reg_alias_info (rtx reg)
1057 unsigned int regno = REGNO (reg);
1059 if (regno < reg_known_value_size && regno >= FIRST_PSEUDO_REGISTER)
1060 reg_known_value[regno] = reg;
1063 /* Returns a canonical version of X, from the point of view alias
1064 analysis. (For example, if X is a MEM whose address is a register,
1065 and the register has a known value (say a SYMBOL_REF), then a MEM
1066 whose address is the SYMBOL_REF is returned.) */
1069 canon_rtx (rtx x)
1071 /* Recursively look for equivalences. */
1072 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
1073 && REGNO (x) < reg_known_value_size)
1074 return reg_known_value[REGNO (x)] == x
1075 ? x : canon_rtx (reg_known_value[REGNO (x)]);
1076 else if (GET_CODE (x) == PLUS)
1078 rtx x0 = canon_rtx (XEXP (x, 0));
1079 rtx x1 = canon_rtx (XEXP (x, 1));
1081 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1083 if (GET_CODE (x0) == CONST_INT)
1084 return plus_constant (x1, INTVAL (x0));
1085 else if (GET_CODE (x1) == CONST_INT)
1086 return plus_constant (x0, INTVAL (x1));
1087 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1091 /* This gives us much better alias analysis when called from
1092 the loop optimizer. Note we want to leave the original
1093 MEM alone, but need to return the canonicalized MEM with
1094 all the flags with their original values. */
1095 else if (GET_CODE (x) == MEM)
1096 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1098 return x;
1101 /* Return 1 if X and Y are identical-looking rtx's.
1102 Expect that X and Y has been already canonicalized.
1104 We use the data in reg_known_value above to see if two registers with
1105 different numbers are, in fact, equivalent. */
1107 static int
1108 rtx_equal_for_memref_p (rtx x, rtx y)
1110 int i;
1111 int j;
1112 enum rtx_code code;
1113 const char *fmt;
1115 if (x == 0 && y == 0)
1116 return 1;
1117 if (x == 0 || y == 0)
1118 return 0;
1120 if (x == y)
1121 return 1;
1123 code = GET_CODE (x);
1124 /* Rtx's of different codes cannot be equal. */
1125 if (code != GET_CODE (y))
1126 return 0;
1128 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1129 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1131 if (GET_MODE (x) != GET_MODE (y))
1132 return 0;
1134 /* Some RTL can be compared without a recursive examination. */
1135 switch (code)
1137 case VALUE:
1138 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
1140 case REG:
1141 return REGNO (x) == REGNO (y);
1143 case LABEL_REF:
1144 return XEXP (x, 0) == XEXP (y, 0);
1146 case SYMBOL_REF:
1147 return XSTR (x, 0) == XSTR (y, 0);
1149 case CONST_INT:
1150 case CONST_DOUBLE:
1151 /* There's no need to compare the contents of CONST_DOUBLEs or
1152 CONST_INTs because pointer equality is a good enough
1153 comparison for these nodes. */
1154 return 0;
1156 case ADDRESSOF:
1157 return (XINT (x, 1) == XINT (y, 1)
1158 && rtx_equal_for_memref_p (XEXP (x, 0),
1159 XEXP (y, 0)));
1161 default:
1162 break;
1165 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1166 if (code == PLUS)
1167 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1168 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1169 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1170 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1171 /* For commutative operations, the RTX match if the operand match in any
1172 order. Also handle the simple binary and unary cases without a loop. */
1173 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
1175 rtx xop0 = canon_rtx (XEXP (x, 0));
1176 rtx yop0 = canon_rtx (XEXP (y, 0));
1177 rtx yop1 = canon_rtx (XEXP (y, 1));
1179 return ((rtx_equal_for_memref_p (xop0, yop0)
1180 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1181 || (rtx_equal_for_memref_p (xop0, yop1)
1182 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1184 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
1186 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1187 canon_rtx (XEXP (y, 0)))
1188 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1189 canon_rtx (XEXP (y, 1))));
1191 else if (GET_RTX_CLASS (code) == '1')
1192 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1193 canon_rtx (XEXP (y, 0)));
1195 /* Compare the elements. If any pair of corresponding elements
1196 fail to match, return 0 for the whole things.
1198 Limit cases to types which actually appear in addresses. */
1200 fmt = GET_RTX_FORMAT (code);
1201 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1203 switch (fmt[i])
1205 case 'i':
1206 if (XINT (x, i) != XINT (y, i))
1207 return 0;
1208 break;
1210 case 'E':
1211 /* Two vectors must have the same length. */
1212 if (XVECLEN (x, i) != XVECLEN (y, i))
1213 return 0;
1215 /* And the corresponding elements must match. */
1216 for (j = 0; j < XVECLEN (x, i); j++)
1217 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1218 canon_rtx (XVECEXP (y, i, j))) == 0)
1219 return 0;
1220 break;
1222 case 'e':
1223 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1224 canon_rtx (XEXP (y, i))) == 0)
1225 return 0;
1226 break;
1228 /* This can happen for asm operands. */
1229 case 's':
1230 if (strcmp (XSTR (x, i), XSTR (y, i)))
1231 return 0;
1232 break;
1234 /* This can happen for an asm which clobbers memory. */
1235 case '0':
1236 break;
1238 /* It is believed that rtx's at this level will never
1239 contain anything but integers and other rtx's,
1240 except for within LABEL_REFs and SYMBOL_REFs. */
1241 default:
1242 abort ();
1245 return 1;
1248 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1249 X and return it, or return 0 if none found. */
1251 static rtx
1252 find_symbolic_term (rtx x)
1254 int i;
1255 enum rtx_code code;
1256 const char *fmt;
1258 code = GET_CODE (x);
1259 if (code == SYMBOL_REF || code == LABEL_REF)
1260 return x;
1261 if (GET_RTX_CLASS (code) == 'o')
1262 return 0;
1264 fmt = GET_RTX_FORMAT (code);
1265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1267 rtx t;
1269 if (fmt[i] == 'e')
1271 t = find_symbolic_term (XEXP (x, i));
1272 if (t != 0)
1273 return t;
1275 else if (fmt[i] == 'E')
1276 break;
1278 return 0;
1282 find_base_term (rtx x)
1284 cselib_val *val;
1285 struct elt_loc_list *l;
1287 #if defined (FIND_BASE_TERM)
1288 /* Try machine-dependent ways to find the base term. */
1289 x = FIND_BASE_TERM (x);
1290 #endif
1292 switch (GET_CODE (x))
1294 case REG:
1295 return REG_BASE_VALUE (x);
1297 case TRUNCATE:
1298 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1299 return 0;
1300 /* Fall through. */
1301 case HIGH:
1302 case PRE_INC:
1303 case PRE_DEC:
1304 case POST_INC:
1305 case POST_DEC:
1306 case PRE_MODIFY:
1307 case POST_MODIFY:
1308 return find_base_term (XEXP (x, 0));
1310 case ZERO_EXTEND:
1311 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1313 rtx temp = find_base_term (XEXP (x, 0));
1315 if (temp != 0 && CONSTANT_P (temp))
1316 temp = convert_memory_address (Pmode, temp);
1318 return temp;
1321 case VALUE:
1322 val = CSELIB_VAL_PTR (x);
1323 for (l = val->locs; l; l = l->next)
1324 if ((x = find_base_term (l->loc)) != 0)
1325 return x;
1326 return 0;
1328 case CONST:
1329 x = XEXP (x, 0);
1330 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1331 return 0;
1332 /* Fall through. */
1333 case LO_SUM:
1334 case PLUS:
1335 case MINUS:
1337 rtx tmp1 = XEXP (x, 0);
1338 rtx tmp2 = XEXP (x, 1);
1340 /* This is a little bit tricky since we have to determine which of
1341 the two operands represents the real base address. Otherwise this
1342 routine may return the index register instead of the base register.
1344 That may cause us to believe no aliasing was possible, when in
1345 fact aliasing is possible.
1347 We use a few simple tests to guess the base register. Additional
1348 tests can certainly be added. For example, if one of the operands
1349 is a shift or multiply, then it must be the index register and the
1350 other operand is the base register. */
1352 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1353 return find_base_term (tmp2);
1355 /* If either operand is known to be a pointer, then use it
1356 to determine the base term. */
1357 if (REG_P (tmp1) && REG_POINTER (tmp1))
1358 return find_base_term (tmp1);
1360 if (REG_P (tmp2) && REG_POINTER (tmp2))
1361 return find_base_term (tmp2);
1363 /* Neither operand was known to be a pointer. Go ahead and find the
1364 base term for both operands. */
1365 tmp1 = find_base_term (tmp1);
1366 tmp2 = find_base_term (tmp2);
1368 /* If either base term is named object or a special address
1369 (like an argument or stack reference), then use it for the
1370 base term. */
1371 if (tmp1 != 0
1372 && (GET_CODE (tmp1) == SYMBOL_REF
1373 || GET_CODE (tmp1) == LABEL_REF
1374 || (GET_CODE (tmp1) == ADDRESS
1375 && GET_MODE (tmp1) != VOIDmode)))
1376 return tmp1;
1378 if (tmp2 != 0
1379 && (GET_CODE (tmp2) == SYMBOL_REF
1380 || GET_CODE (tmp2) == LABEL_REF
1381 || (GET_CODE (tmp2) == ADDRESS
1382 && GET_MODE (tmp2) != VOIDmode)))
1383 return tmp2;
1385 /* We could not determine which of the two operands was the
1386 base register and which was the index. So we can determine
1387 nothing from the base alias check. */
1388 return 0;
1391 case AND:
1392 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1393 return find_base_term (XEXP (x, 0));
1394 return 0;
1396 case SYMBOL_REF:
1397 case LABEL_REF:
1398 return x;
1400 case ADDRESSOF:
1401 return REG_BASE_VALUE (frame_pointer_rtx);
1403 default:
1404 return 0;
1408 /* Return 0 if the addresses X and Y are known to point to different
1409 objects, 1 if they might be pointers to the same object. */
1411 static int
1412 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1413 enum machine_mode y_mode)
1415 rtx x_base = find_base_term (x);
1416 rtx y_base = find_base_term (y);
1418 /* If the address itself has no known base see if a known equivalent
1419 value has one. If either address still has no known base, nothing
1420 is known about aliasing. */
1421 if (x_base == 0)
1423 rtx x_c;
1425 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1426 return 1;
1428 x_base = find_base_term (x_c);
1429 if (x_base == 0)
1430 return 1;
1433 if (y_base == 0)
1435 rtx y_c;
1436 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1437 return 1;
1439 y_base = find_base_term (y_c);
1440 if (y_base == 0)
1441 return 1;
1444 /* If the base addresses are equal nothing is known about aliasing. */
1445 if (rtx_equal_p (x_base, y_base))
1446 return 1;
1448 /* The base addresses of the read and write are different expressions.
1449 If they are both symbols and they are not accessed via AND, there is
1450 no conflict. We can bring knowledge of object alignment into play
1451 here. For example, on alpha, "char a, b;" can alias one another,
1452 though "char a; long b;" cannot. */
1453 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1455 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1456 return 1;
1457 if (GET_CODE (x) == AND
1458 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1459 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1460 return 1;
1461 if (GET_CODE (y) == AND
1462 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1463 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1464 return 1;
1465 /* Differing symbols never alias. */
1466 return 0;
1469 /* If one address is a stack reference there can be no alias:
1470 stack references using different base registers do not alias,
1471 a stack reference can not alias a parameter, and a stack reference
1472 can not alias a global. */
1473 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1474 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1475 return 0;
1477 if (! flag_argument_noalias)
1478 return 1;
1480 if (flag_argument_noalias > 1)
1481 return 0;
1483 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1484 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1487 /* Convert the address X into something we can use. This is done by returning
1488 it unchanged unless it is a value; in the latter case we call cselib to get
1489 a more useful rtx. */
1492 get_addr (rtx x)
1494 cselib_val *v;
1495 struct elt_loc_list *l;
1497 if (GET_CODE (x) != VALUE)
1498 return x;
1499 v = CSELIB_VAL_PTR (x);
1500 for (l = v->locs; l; l = l->next)
1501 if (CONSTANT_P (l->loc))
1502 return l->loc;
1503 for (l = v->locs; l; l = l->next)
1504 if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
1505 return l->loc;
1506 if (v->locs)
1507 return v->locs->loc;
1508 return x;
1511 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1512 where SIZE is the size in bytes of the memory reference. If ADDR
1513 is not modified by the memory reference then ADDR is returned. */
1516 addr_side_effect_eval (rtx addr, int size, int n_refs)
1518 int offset = 0;
1520 switch (GET_CODE (addr))
1522 case PRE_INC:
1523 offset = (n_refs + 1) * size;
1524 break;
1525 case PRE_DEC:
1526 offset = -(n_refs + 1) * size;
1527 break;
1528 case POST_INC:
1529 offset = n_refs * size;
1530 break;
1531 case POST_DEC:
1532 offset = -n_refs * size;
1533 break;
1535 default:
1536 return addr;
1539 if (offset)
1540 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1541 GEN_INT (offset));
1542 else
1543 addr = XEXP (addr, 0);
1544 addr = canon_rtx (addr);
1546 return addr;
1549 /* Return nonzero if X and Y (memory addresses) could reference the
1550 same location in memory. C is an offset accumulator. When
1551 C is nonzero, we are testing aliases between X and Y + C.
1552 XSIZE is the size in bytes of the X reference,
1553 similarly YSIZE is the size in bytes for Y.
1554 Expect that canon_rtx has been already called for X and Y.
1556 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1557 referenced (the reference was BLKmode), so make the most pessimistic
1558 assumptions.
1560 If XSIZE or YSIZE is negative, we may access memory outside the object
1561 being referenced as a side effect. This can happen when using AND to
1562 align memory references, as is done on the Alpha.
1564 Nice to notice that varying addresses cannot conflict with fp if no
1565 local variables had their addresses taken, but that's too hard now. */
1567 static int
1568 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1570 if (GET_CODE (x) == VALUE)
1571 x = get_addr (x);
1572 if (GET_CODE (y) == VALUE)
1573 y = get_addr (y);
1574 if (GET_CODE (x) == HIGH)
1575 x = XEXP (x, 0);
1576 else if (GET_CODE (x) == LO_SUM)
1577 x = XEXP (x, 1);
1578 else
1579 x = addr_side_effect_eval (x, xsize, 0);
1580 if (GET_CODE (y) == HIGH)
1581 y = XEXP (y, 0);
1582 else if (GET_CODE (y) == LO_SUM)
1583 y = XEXP (y, 1);
1584 else
1585 y = addr_side_effect_eval (y, ysize, 0);
1587 if (rtx_equal_for_memref_p (x, y))
1589 if (xsize <= 0 || ysize <= 0)
1590 return 1;
1591 if (c >= 0 && xsize > c)
1592 return 1;
1593 if (c < 0 && ysize+c > 0)
1594 return 1;
1595 return 0;
1598 /* This code used to check for conflicts involving stack references and
1599 globals but the base address alias code now handles these cases. */
1601 if (GET_CODE (x) == PLUS)
1603 /* The fact that X is canonicalized means that this
1604 PLUS rtx is canonicalized. */
1605 rtx x0 = XEXP (x, 0);
1606 rtx x1 = XEXP (x, 1);
1608 if (GET_CODE (y) == PLUS)
1610 /* The fact that Y is canonicalized means that this
1611 PLUS rtx is canonicalized. */
1612 rtx y0 = XEXP (y, 0);
1613 rtx y1 = XEXP (y, 1);
1615 if (rtx_equal_for_memref_p (x1, y1))
1616 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1617 if (rtx_equal_for_memref_p (x0, y0))
1618 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1619 if (GET_CODE (x1) == CONST_INT)
1621 if (GET_CODE (y1) == CONST_INT)
1622 return memrefs_conflict_p (xsize, x0, ysize, y0,
1623 c - INTVAL (x1) + INTVAL (y1));
1624 else
1625 return memrefs_conflict_p (xsize, x0, ysize, y,
1626 c - INTVAL (x1));
1628 else if (GET_CODE (y1) == CONST_INT)
1629 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1631 return 1;
1633 else if (GET_CODE (x1) == CONST_INT)
1634 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1636 else if (GET_CODE (y) == PLUS)
1638 /* The fact that Y is canonicalized means that this
1639 PLUS rtx is canonicalized. */
1640 rtx y0 = XEXP (y, 0);
1641 rtx y1 = XEXP (y, 1);
1643 if (GET_CODE (y1) == CONST_INT)
1644 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1645 else
1646 return 1;
1649 if (GET_CODE (x) == GET_CODE (y))
1650 switch (GET_CODE (x))
1652 case MULT:
1654 /* Handle cases where we expect the second operands to be the
1655 same, and check only whether the first operand would conflict
1656 or not. */
1657 rtx x0, y0;
1658 rtx x1 = canon_rtx (XEXP (x, 1));
1659 rtx y1 = canon_rtx (XEXP (y, 1));
1660 if (! rtx_equal_for_memref_p (x1, y1))
1661 return 1;
1662 x0 = canon_rtx (XEXP (x, 0));
1663 y0 = canon_rtx (XEXP (y, 0));
1664 if (rtx_equal_for_memref_p (x0, y0))
1665 return (xsize == 0 || ysize == 0
1666 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1668 /* Can't properly adjust our sizes. */
1669 if (GET_CODE (x1) != CONST_INT)
1670 return 1;
1671 xsize /= INTVAL (x1);
1672 ysize /= INTVAL (x1);
1673 c /= INTVAL (x1);
1674 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1677 case REG:
1678 /* Are these registers known not to be equal? */
1679 if (alias_invariant)
1681 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1682 rtx i_x, i_y; /* invariant relationships of X and Y */
1684 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1685 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1687 if (i_x == 0 && i_y == 0)
1688 break;
1690 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1691 ysize, i_y ? i_y : y, c))
1692 return 0;
1694 break;
1696 default:
1697 break;
1700 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1701 as an access with indeterminate size. Assume that references
1702 besides AND are aligned, so if the size of the other reference is
1703 at least as large as the alignment, assume no other overlap. */
1704 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1706 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1707 xsize = -1;
1708 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1710 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1712 /* ??? If we are indexing far enough into the array/structure, we
1713 may yet be able to determine that we can not overlap. But we
1714 also need to that we are far enough from the end not to overlap
1715 a following reference, so we do nothing with that for now. */
1716 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1717 ysize = -1;
1718 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1721 if (GET_CODE (x) == ADDRESSOF)
1723 if (y == frame_pointer_rtx
1724 || GET_CODE (y) == ADDRESSOF)
1725 return xsize <= 0 || ysize <= 0;
1727 if (GET_CODE (y) == ADDRESSOF)
1729 if (x == frame_pointer_rtx)
1730 return xsize <= 0 || ysize <= 0;
1733 if (CONSTANT_P (x))
1735 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1737 c += (INTVAL (y) - INTVAL (x));
1738 return (xsize <= 0 || ysize <= 0
1739 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1742 if (GET_CODE (x) == CONST)
1744 if (GET_CODE (y) == CONST)
1745 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1746 ysize, canon_rtx (XEXP (y, 0)), c);
1747 else
1748 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1749 ysize, y, c);
1751 if (GET_CODE (y) == CONST)
1752 return memrefs_conflict_p (xsize, x, ysize,
1753 canon_rtx (XEXP (y, 0)), c);
1755 if (CONSTANT_P (y))
1756 return (xsize <= 0 || ysize <= 0
1757 || (rtx_equal_for_memref_p (x, y)
1758 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1760 return 1;
1762 return 1;
1765 /* Functions to compute memory dependencies.
1767 Since we process the insns in execution order, we can build tables
1768 to keep track of what registers are fixed (and not aliased), what registers
1769 are varying in known ways, and what registers are varying in unknown
1770 ways.
1772 If both memory references are volatile, then there must always be a
1773 dependence between the two references, since their order can not be
1774 changed. A volatile and non-volatile reference can be interchanged
1775 though.
1777 A MEM_IN_STRUCT reference at a non-AND varying address can never
1778 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1779 also must allow AND addresses, because they may generate accesses
1780 outside the object being referenced. This is used to generate
1781 aligned addresses from unaligned addresses, for instance, the alpha
1782 storeqi_unaligned pattern. */
1784 /* Read dependence: X is read after read in MEM takes place. There can
1785 only be a dependence here if both reads are volatile. */
1788 read_dependence (rtx mem, rtx x)
1790 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1793 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1794 MEM2 is a reference to a structure at a varying address, or returns
1795 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1796 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1797 to decide whether or not an address may vary; it should return
1798 nonzero whenever variation is possible.
1799 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1801 static rtx
1802 fixed_scalar_and_varying_struct_p (rtx mem1, rtx mem2, rtx mem1_addr,
1803 rtx mem2_addr,
1804 int (*varies_p) (rtx, int))
1806 if (! flag_strict_aliasing)
1807 return NULL_RTX;
1809 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1810 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1811 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1812 varying address. */
1813 return mem1;
1815 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1816 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1817 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1818 varying address. */
1819 return mem2;
1821 return NULL_RTX;
1824 /* Returns nonzero if something about the mode or address format MEM1
1825 indicates that it might well alias *anything*. */
1827 static int
1828 aliases_everything_p (rtx mem)
1830 if (GET_CODE (XEXP (mem, 0)) == AND)
1831 /* If the address is an AND, its very hard to know at what it is
1832 actually pointing. */
1833 return 1;
1835 return 0;
1838 /* Return true if we can determine that the fields referenced cannot
1839 overlap for any pair of objects. */
1841 static bool
1842 nonoverlapping_component_refs_p (tree x, tree y)
1844 tree fieldx, fieldy, typex, typey, orig_y;
1848 /* The comparison has to be done at a common type, since we don't
1849 know how the inheritance hierarchy works. */
1850 orig_y = y;
1853 fieldx = TREE_OPERAND (x, 1);
1854 typex = DECL_FIELD_CONTEXT (fieldx);
1856 y = orig_y;
1859 fieldy = TREE_OPERAND (y, 1);
1860 typey = DECL_FIELD_CONTEXT (fieldy);
1862 if (typex == typey)
1863 goto found;
1865 y = TREE_OPERAND (y, 0);
1867 while (y && TREE_CODE (y) == COMPONENT_REF);
1869 x = TREE_OPERAND (x, 0);
1871 while (x && TREE_CODE (x) == COMPONENT_REF);
1873 /* Never found a common type. */
1874 return false;
1876 found:
1877 /* If we're left with accessing different fields of a structure,
1878 then no overlap. */
1879 if (TREE_CODE (typex) == RECORD_TYPE
1880 && fieldx != fieldy)
1881 return true;
1883 /* The comparison on the current field failed. If we're accessing
1884 a very nested structure, look at the next outer level. */
1885 x = TREE_OPERAND (x, 0);
1886 y = TREE_OPERAND (y, 0);
1888 while (x && y
1889 && TREE_CODE (x) == COMPONENT_REF
1890 && TREE_CODE (y) == COMPONENT_REF);
1892 return false;
1895 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
1897 static tree
1898 decl_for_component_ref (tree x)
1902 x = TREE_OPERAND (x, 0);
1904 while (x && TREE_CODE (x) == COMPONENT_REF);
1906 return x && DECL_P (x) ? x : NULL_TREE;
1909 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1910 offset of the field reference. */
1912 static rtx
1913 adjust_offset_for_component_ref (tree x, rtx offset)
1915 HOST_WIDE_INT ioffset;
1917 if (! offset)
1918 return NULL_RTX;
1920 ioffset = INTVAL (offset);
1923 tree field = TREE_OPERAND (x, 1);
1925 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
1926 return NULL_RTX;
1927 ioffset += (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1928 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1929 / BITS_PER_UNIT));
1931 x = TREE_OPERAND (x, 0);
1933 while (x && TREE_CODE (x) == COMPONENT_REF);
1935 return GEN_INT (ioffset);
1938 /* Return nonzero if we can determine the exprs corresponding to memrefs
1939 X and Y and they do not overlap. */
1941 static int
1942 nonoverlapping_memrefs_p (rtx x, rtx y)
1944 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
1945 rtx rtlx, rtly;
1946 rtx basex, basey;
1947 rtx moffsetx, moffsety;
1948 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
1950 /* Unless both have exprs, we can't tell anything. */
1951 if (exprx == 0 || expry == 0)
1952 return 0;
1954 /* If both are field references, we may be able to determine something. */
1955 if (TREE_CODE (exprx) == COMPONENT_REF
1956 && TREE_CODE (expry) == COMPONENT_REF
1957 && nonoverlapping_component_refs_p (exprx, expry))
1958 return 1;
1960 /* If the field reference test failed, look at the DECLs involved. */
1961 moffsetx = MEM_OFFSET (x);
1962 if (TREE_CODE (exprx) == COMPONENT_REF)
1964 tree t = decl_for_component_ref (exprx);
1965 if (! t)
1966 return 0;
1967 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
1968 exprx = t;
1970 else if (TREE_CODE (exprx) == INDIRECT_REF)
1972 exprx = TREE_OPERAND (exprx, 0);
1973 if (flag_argument_noalias < 2
1974 || TREE_CODE (exprx) != PARM_DECL)
1975 return 0;
1978 moffsety = MEM_OFFSET (y);
1979 if (TREE_CODE (expry) == COMPONENT_REF)
1981 tree t = decl_for_component_ref (expry);
1982 if (! t)
1983 return 0;
1984 moffsety = adjust_offset_for_component_ref (expry, moffsety);
1985 expry = t;
1987 else if (TREE_CODE (expry) == INDIRECT_REF)
1989 expry = TREE_OPERAND (expry, 0);
1990 if (flag_argument_noalias < 2
1991 || TREE_CODE (expry) != PARM_DECL)
1992 return 0;
1995 if (! DECL_P (exprx) || ! DECL_P (expry))
1996 return 0;
1998 rtlx = DECL_RTL (exprx);
1999 rtly = DECL_RTL (expry);
2001 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2002 can't overlap unless they are the same because we never reuse that part
2003 of the stack frame used for locals for spilled pseudos. */
2004 if ((GET_CODE (rtlx) != MEM || GET_CODE (rtly) != MEM)
2005 && ! rtx_equal_p (rtlx, rtly))
2006 return 1;
2008 /* Get the base and offsets of both decls. If either is a register, we
2009 know both are and are the same, so use that as the base. The only
2010 we can avoid overlap is if we can deduce that they are nonoverlapping
2011 pieces of that decl, which is very rare. */
2012 basex = GET_CODE (rtlx) == MEM ? XEXP (rtlx, 0) : rtlx;
2013 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2014 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2016 basey = GET_CODE (rtly) == MEM ? XEXP (rtly, 0) : rtly;
2017 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2018 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2020 /* If the bases are different, we know they do not overlap if both
2021 are constants or if one is a constant and the other a pointer into the
2022 stack frame. Otherwise a different base means we can't tell if they
2023 overlap or not. */
2024 if (! rtx_equal_p (basex, basey))
2025 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2026 || (CONSTANT_P (basex) && REG_P (basey)
2027 && REGNO_PTR_FRAME_P (REGNO (basey)))
2028 || (CONSTANT_P (basey) && REG_P (basex)
2029 && REGNO_PTR_FRAME_P (REGNO (basex))));
2031 sizex = (GET_CODE (rtlx) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2032 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2033 : -1);
2034 sizey = (GET_CODE (rtly) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2035 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2036 -1);
2038 /* If we have an offset for either memref, it can update the values computed
2039 above. */
2040 if (moffsetx)
2041 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2042 if (moffsety)
2043 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2045 /* If a memref has both a size and an offset, we can use the smaller size.
2046 We can't do this if the offset isn't known because we must view this
2047 memref as being anywhere inside the DECL's MEM. */
2048 if (MEM_SIZE (x) && moffsetx)
2049 sizex = INTVAL (MEM_SIZE (x));
2050 if (MEM_SIZE (y) && moffsety)
2051 sizey = INTVAL (MEM_SIZE (y));
2053 /* Put the values of the memref with the lower offset in X's values. */
2054 if (offsetx > offsety)
2056 tem = offsetx, offsetx = offsety, offsety = tem;
2057 tem = sizex, sizex = sizey, sizey = tem;
2060 /* If we don't know the size of the lower-offset value, we can't tell
2061 if they conflict. Otherwise, we do the test. */
2062 return sizex >= 0 && offsety >= offsetx + sizex;
2065 /* True dependence: X is read after store in MEM takes place. */
2068 true_dependence (rtx mem, enum machine_mode mem_mode, rtx x,
2069 int (*varies) (rtx, int))
2071 rtx x_addr, mem_addr;
2072 rtx base;
2074 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2075 return 1;
2077 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2078 This is used in epilogue deallocation functions. */
2079 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2080 return 1;
2081 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2082 return 1;
2084 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2085 return 0;
2087 /* Unchanging memory can't conflict with non-unchanging memory.
2088 A non-unchanging read can conflict with a non-unchanging write.
2089 An unchanging read can conflict with an unchanging write since
2090 there may be a single store to this address to initialize it.
2091 Note that an unchanging store can conflict with a non-unchanging read
2092 since we have to make conservative assumptions when we have a
2093 record with readonly fields and we are copying the whole thing.
2094 Just fall through to the code below to resolve potential conflicts.
2095 This won't handle all cases optimally, but the possible performance
2096 loss should be negligible. */
2097 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2098 return 0;
2100 if (nonoverlapping_memrefs_p (mem, x))
2101 return 0;
2103 if (mem_mode == VOIDmode)
2104 mem_mode = GET_MODE (mem);
2106 x_addr = get_addr (XEXP (x, 0));
2107 mem_addr = get_addr (XEXP (mem, 0));
2109 base = find_base_term (x_addr);
2110 if (base && (GET_CODE (base) == LABEL_REF
2111 || (GET_CODE (base) == SYMBOL_REF
2112 && CONSTANT_POOL_ADDRESS_P (base))))
2113 return 0;
2115 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2116 return 0;
2118 x_addr = canon_rtx (x_addr);
2119 mem_addr = canon_rtx (mem_addr);
2121 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2122 SIZE_FOR_MODE (x), x_addr, 0))
2123 return 0;
2125 if (aliases_everything_p (x))
2126 return 1;
2128 /* We cannot use aliases_everything_p to test MEM, since we must look
2129 at MEM_MODE, rather than GET_MODE (MEM). */
2130 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2131 return 1;
2133 /* In true_dependence we also allow BLKmode to alias anything. Why
2134 don't we do this in anti_dependence and output_dependence? */
2135 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2136 return 1;
2138 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2139 varies);
2142 /* Canonical true dependence: X is read after store in MEM takes place.
2143 Variant of true_dependence which assumes MEM has already been
2144 canonicalized (hence we no longer do that here).
2145 The mem_addr argument has been added, since true_dependence computed
2146 this value prior to canonicalizing. */
2149 canon_true_dependence (rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2150 rtx x, int (*varies) (rtx, int))
2152 rtx x_addr;
2154 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2155 return 1;
2157 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2158 This is used in epilogue deallocation functions. */
2159 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2160 return 1;
2161 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2162 return 1;
2164 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2165 return 0;
2167 /* If X is an unchanging read, then it can't possibly conflict with any
2168 non-unchanging store. It may conflict with an unchanging write though,
2169 because there may be a single store to this address to initialize it.
2170 Just fall through to the code below to resolve the case where we have
2171 both an unchanging read and an unchanging write. This won't handle all
2172 cases optimally, but the possible performance loss should be
2173 negligible. */
2174 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2175 return 0;
2177 if (nonoverlapping_memrefs_p (x, mem))
2178 return 0;
2180 x_addr = get_addr (XEXP (x, 0));
2182 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2183 return 0;
2185 x_addr = canon_rtx (x_addr);
2186 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2187 SIZE_FOR_MODE (x), x_addr, 0))
2188 return 0;
2190 if (aliases_everything_p (x))
2191 return 1;
2193 /* We cannot use aliases_everything_p to test MEM, since we must look
2194 at MEM_MODE, rather than GET_MODE (MEM). */
2195 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2196 return 1;
2198 /* In true_dependence we also allow BLKmode to alias anything. Why
2199 don't we do this in anti_dependence and output_dependence? */
2200 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2201 return 1;
2203 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2204 varies);
2207 /* Returns nonzero if a write to X might alias a previous read from
2208 (or, if WRITEP is nonzero, a write to) MEM. If CONSTP is nonzero,
2209 honor the RTX_UNCHANGING_P flags on X and MEM. */
2211 static int
2212 write_dependence_p (rtx mem, rtx x, int writep, int constp)
2214 rtx x_addr, mem_addr;
2215 rtx fixed_scalar;
2216 rtx base;
2218 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2219 return 1;
2221 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2222 This is used in epilogue deallocation functions. */
2223 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2224 return 1;
2225 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2226 return 1;
2228 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2229 return 0;
2231 if (constp)
2233 /* Unchanging memory can't conflict with non-unchanging memory. */
2234 if (RTX_UNCHANGING_P (x) != RTX_UNCHANGING_P (mem))
2235 return 0;
2237 /* If MEM is an unchanging read, then it can't possibly conflict with
2238 the store to X, because there is at most one store to MEM, and it
2239 must have occurred somewhere before MEM. */
2240 if (! writep && RTX_UNCHANGING_P (mem))
2241 return 0;
2244 if (nonoverlapping_memrefs_p (x, mem))
2245 return 0;
2247 x_addr = get_addr (XEXP (x, 0));
2248 mem_addr = get_addr (XEXP (mem, 0));
2250 if (! writep)
2252 base = find_base_term (mem_addr);
2253 if (base && (GET_CODE (base) == LABEL_REF
2254 || (GET_CODE (base) == SYMBOL_REF
2255 && CONSTANT_POOL_ADDRESS_P (base))))
2256 return 0;
2259 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2260 GET_MODE (mem)))
2261 return 0;
2263 x_addr = canon_rtx (x_addr);
2264 mem_addr = canon_rtx (mem_addr);
2266 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2267 SIZE_FOR_MODE (x), x_addr, 0))
2268 return 0;
2270 fixed_scalar
2271 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2272 rtx_addr_varies_p);
2274 return (!(fixed_scalar == mem && !aliases_everything_p (x))
2275 && !(fixed_scalar == x && !aliases_everything_p (mem)));
2278 /* Anti dependence: X is written after read in MEM takes place. */
2281 anti_dependence (rtx mem, rtx x)
2283 return write_dependence_p (mem, x, /*writep=*/0, /*constp*/1);
2286 /* Output dependence: X is written after store in MEM takes place. */
2289 output_dependence (rtx mem, rtx x)
2291 return write_dependence_p (mem, x, /*writep=*/1, /*constp*/1);
2294 /* Unchanging anti dependence: Like anti_dependence but ignores
2295 the UNCHANGING_RTX_P property on const variable references. */
2298 unchanging_anti_dependence (rtx mem, rtx x)
2300 return write_dependence_p (mem, x, /*writep=*/0, /*constp*/0);
2303 /* A subroutine of nonlocal_mentioned_p, returns 1 if *LOC mentions
2304 something which is not local to the function and is not constant. */
2306 static int
2307 nonlocal_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2309 rtx x = *loc;
2310 rtx base;
2311 int regno;
2313 if (! x)
2314 return 0;
2316 switch (GET_CODE (x))
2318 case SUBREG:
2319 if (GET_CODE (SUBREG_REG (x)) == REG)
2321 /* Global registers are not local. */
2322 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2323 && global_regs[subreg_regno (x)])
2324 return 1;
2325 return 0;
2327 break;
2329 case REG:
2330 regno = REGNO (x);
2331 /* Global registers are not local. */
2332 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2333 return 1;
2334 return 0;
2336 case SCRATCH:
2337 case PC:
2338 case CC0:
2339 case CONST_INT:
2340 case CONST_DOUBLE:
2341 case CONST_VECTOR:
2342 case CONST:
2343 case LABEL_REF:
2344 return 0;
2346 case SYMBOL_REF:
2347 /* Constants in the function's constants pool are constant. */
2348 if (CONSTANT_POOL_ADDRESS_P (x))
2349 return 0;
2350 return 1;
2352 case CALL:
2353 /* Non-constant calls and recursion are not local. */
2354 return 1;
2356 case MEM:
2357 /* Be overly conservative and consider any volatile memory
2358 reference as not local. */
2359 if (MEM_VOLATILE_P (x))
2360 return 1;
2361 base = find_base_term (XEXP (x, 0));
2362 if (base)
2364 /* A Pmode ADDRESS could be a reference via the structure value
2365 address or static chain. Such memory references are nonlocal.
2367 Thus, we have to examine the contents of the ADDRESS to find
2368 out if this is a local reference or not. */
2369 if (GET_CODE (base) == ADDRESS
2370 && GET_MODE (base) == Pmode
2371 && (XEXP (base, 0) == stack_pointer_rtx
2372 || XEXP (base, 0) == arg_pointer_rtx
2373 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2374 || XEXP (base, 0) == hard_frame_pointer_rtx
2375 #endif
2376 || XEXP (base, 0) == frame_pointer_rtx))
2377 return 0;
2378 /* Constants in the function's constant pool are constant. */
2379 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
2380 return 0;
2382 return 1;
2384 case UNSPEC_VOLATILE:
2385 case ASM_INPUT:
2386 return 1;
2388 case ASM_OPERANDS:
2389 if (MEM_VOLATILE_P (x))
2390 return 1;
2392 /* Fall through. */
2394 default:
2395 break;
2398 return 0;
2401 /* Returns nonzero if X might mention something which is not
2402 local to the function and is not constant. */
2404 static int
2405 nonlocal_mentioned_p (rtx x)
2407 if (INSN_P (x))
2409 if (GET_CODE (x) == CALL_INSN)
2411 if (! CONST_OR_PURE_CALL_P (x))
2412 return 1;
2413 x = CALL_INSN_FUNCTION_USAGE (x);
2414 if (x == 0)
2415 return 0;
2417 else
2418 x = PATTERN (x);
2421 return for_each_rtx (&x, nonlocal_mentioned_p_1, NULL);
2424 /* A subroutine of nonlocal_referenced_p, returns 1 if *LOC references
2425 something which is not local to the function and is not constant. */
2427 static int
2428 nonlocal_referenced_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2430 rtx x = *loc;
2432 if (! x)
2433 return 0;
2435 switch (GET_CODE (x))
2437 case MEM:
2438 case REG:
2439 case SYMBOL_REF:
2440 case SUBREG:
2441 return nonlocal_mentioned_p (x);
2443 case CALL:
2444 /* Non-constant calls and recursion are not local. */
2445 return 1;
2447 case SET:
2448 if (nonlocal_mentioned_p (SET_SRC (x)))
2449 return 1;
2451 if (GET_CODE (SET_DEST (x)) == MEM)
2452 return nonlocal_mentioned_p (XEXP (SET_DEST (x), 0));
2454 /* If the destination is anything other than a CC0, PC,
2455 MEM, REG, or a SUBREG of a REG that occupies all of
2456 the REG, then X references nonlocal memory if it is
2457 mentioned in the destination. */
2458 if (GET_CODE (SET_DEST (x)) != CC0
2459 && GET_CODE (SET_DEST (x)) != PC
2460 && GET_CODE (SET_DEST (x)) != REG
2461 && ! (GET_CODE (SET_DEST (x)) == SUBREG
2462 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
2463 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
2464 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2465 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2466 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
2467 return nonlocal_mentioned_p (SET_DEST (x));
2468 return 0;
2470 case CLOBBER:
2471 if (GET_CODE (XEXP (x, 0)) == MEM)
2472 return nonlocal_mentioned_p (XEXP (XEXP (x, 0), 0));
2473 return 0;
2475 case USE:
2476 return nonlocal_mentioned_p (XEXP (x, 0));
2478 case ASM_INPUT:
2479 case UNSPEC_VOLATILE:
2480 return 1;
2482 case ASM_OPERANDS:
2483 if (MEM_VOLATILE_P (x))
2484 return 1;
2486 /* Fall through. */
2488 default:
2489 break;
2492 return 0;
2495 /* Returns nonzero if X might reference something which is not
2496 local to the function and is not constant. */
2498 static int
2499 nonlocal_referenced_p (rtx x)
2501 if (INSN_P (x))
2503 if (GET_CODE (x) == CALL_INSN)
2505 if (! CONST_OR_PURE_CALL_P (x))
2506 return 1;
2507 x = CALL_INSN_FUNCTION_USAGE (x);
2508 if (x == 0)
2509 return 0;
2511 else
2512 x = PATTERN (x);
2515 return for_each_rtx (&x, nonlocal_referenced_p_1, NULL);
2518 /* A subroutine of nonlocal_set_p, returns 1 if *LOC sets
2519 something which is not local to the function and is not constant. */
2521 static int
2522 nonlocal_set_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2524 rtx x = *loc;
2526 if (! x)
2527 return 0;
2529 switch (GET_CODE (x))
2531 case CALL:
2532 /* Non-constant calls and recursion are not local. */
2533 return 1;
2535 case PRE_INC:
2536 case PRE_DEC:
2537 case POST_INC:
2538 case POST_DEC:
2539 case PRE_MODIFY:
2540 case POST_MODIFY:
2541 return nonlocal_mentioned_p (XEXP (x, 0));
2543 case SET:
2544 if (nonlocal_mentioned_p (SET_DEST (x)))
2545 return 1;
2546 return nonlocal_set_p (SET_SRC (x));
2548 case CLOBBER:
2549 return nonlocal_mentioned_p (XEXP (x, 0));
2551 case USE:
2552 return 0;
2554 case ASM_INPUT:
2555 case UNSPEC_VOLATILE:
2556 return 1;
2558 case ASM_OPERANDS:
2559 if (MEM_VOLATILE_P (x))
2560 return 1;
2562 /* Fall through. */
2564 default:
2565 break;
2568 return 0;
2571 /* Returns nonzero if X might set something which is not
2572 local to the function and is not constant. */
2574 static int
2575 nonlocal_set_p (rtx x)
2577 if (INSN_P (x))
2579 if (GET_CODE (x) == CALL_INSN)
2581 if (! CONST_OR_PURE_CALL_P (x))
2582 return 1;
2583 x = CALL_INSN_FUNCTION_USAGE (x);
2584 if (x == 0)
2585 return 0;
2587 else
2588 x = PATTERN (x);
2591 return for_each_rtx (&x, nonlocal_set_p_1, NULL);
2594 /* Mark the function if it is pure or constant. */
2596 void
2597 mark_constant_function (void)
2599 rtx insn;
2600 int nonlocal_memory_referenced;
2602 if (TREE_READONLY (current_function_decl)
2603 || DECL_IS_PURE (current_function_decl)
2604 || TREE_THIS_VOLATILE (current_function_decl)
2605 || current_function_has_nonlocal_goto
2606 || !(*targetm.binds_local_p) (current_function_decl))
2607 return;
2609 /* A loop might not return which counts as a side effect. */
2610 if (mark_dfs_back_edges ())
2611 return;
2613 nonlocal_memory_referenced = 0;
2615 init_alias_analysis ();
2617 /* Determine if this is a constant or pure function. */
2619 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2621 if (! INSN_P (insn))
2622 continue;
2624 if (nonlocal_set_p (insn) || global_reg_mentioned_p (insn)
2625 || volatile_refs_p (PATTERN (insn)))
2626 break;
2628 if (! nonlocal_memory_referenced)
2629 nonlocal_memory_referenced = nonlocal_referenced_p (insn);
2632 end_alias_analysis ();
2634 /* Mark the function. */
2636 if (insn)
2638 else if (nonlocal_memory_referenced)
2640 cgraph_rtl_info (current_function_decl)->pure_function = 1;
2641 DECL_IS_PURE (current_function_decl) = 1;
2643 else
2645 cgraph_rtl_info (current_function_decl)->const_function = 1;
2646 TREE_READONLY (current_function_decl) = 1;
2651 void
2652 init_alias_once (void)
2654 int i;
2656 #ifndef OUTGOING_REGNO
2657 #define OUTGOING_REGNO(N) N
2658 #endif
2659 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2660 /* Check whether this register can hold an incoming pointer
2661 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2662 numbers, so translate if necessary due to register windows. */
2663 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2664 && HARD_REGNO_MODE_OK (i, Pmode))
2665 static_reg_base_value[i]
2666 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2668 static_reg_base_value[STACK_POINTER_REGNUM]
2669 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2670 static_reg_base_value[ARG_POINTER_REGNUM]
2671 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2672 static_reg_base_value[FRAME_POINTER_REGNUM]
2673 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2674 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2675 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2676 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2677 #endif
2680 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2681 to be memory reference. */
2682 static bool memory_modified;
2683 static void
2684 memory_modified_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
2686 if (GET_CODE (x) == MEM)
2688 if (anti_dependence (x, (rtx)data) || output_dependence (x, (rtx)data))
2689 memory_modified = true;
2694 /* Return true when INSN possibly modify memory contents of MEM
2695 (ie address can be modified). */
2696 bool
2697 memory_modified_in_insn_p (rtx mem, rtx insn)
2699 if (!INSN_P (insn))
2700 return false;
2701 memory_modified = false;
2702 note_stores (PATTERN (insn), memory_modified_1, mem);
2703 return memory_modified;
2706 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2707 array. */
2709 void
2710 init_alias_analysis (void)
2712 int maxreg = max_reg_num ();
2713 int changed, pass;
2714 int i;
2715 unsigned int ui;
2716 rtx insn;
2718 timevar_push (TV_ALIAS_ANALYSIS);
2720 reg_known_value_size = maxreg;
2722 reg_known_value
2723 = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
2724 - FIRST_PSEUDO_REGISTER;
2725 reg_known_equiv_p
2726 = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
2727 - FIRST_PSEUDO_REGISTER;
2729 /* Overallocate reg_base_value to allow some growth during loop
2730 optimization. Loop unrolling can create a large number of
2731 registers. */
2732 reg_base_value_size = maxreg * 2;
2733 reg_base_value = ggc_alloc_cleared (reg_base_value_size * sizeof (rtx));
2735 new_reg_base_value = xmalloc (reg_base_value_size * sizeof (rtx));
2736 reg_seen = xmalloc (reg_base_value_size);
2737 if (! reload_completed && flag_old_unroll_loops)
2739 /* ??? Why are we realloc'ing if we're just going to zero it? */
2740 alias_invariant = xrealloc (alias_invariant,
2741 reg_base_value_size * sizeof (rtx));
2742 memset (alias_invariant, 0, reg_base_value_size * sizeof (rtx));
2745 /* The basic idea is that each pass through this loop will use the
2746 "constant" information from the previous pass to propagate alias
2747 information through another level of assignments.
2749 This could get expensive if the assignment chains are long. Maybe
2750 we should throttle the number of iterations, possibly based on
2751 the optimization level or flag_expensive_optimizations.
2753 We could propagate more information in the first pass by making use
2754 of REG_N_SETS to determine immediately that the alias information
2755 for a pseudo is "constant".
2757 A program with an uninitialized variable can cause an infinite loop
2758 here. Instead of doing a full dataflow analysis to detect such problems
2759 we just cap the number of iterations for the loop.
2761 The state of the arrays for the set chain in question does not matter
2762 since the program has undefined behavior. */
2764 pass = 0;
2767 /* Assume nothing will change this iteration of the loop. */
2768 changed = 0;
2770 /* We want to assign the same IDs each iteration of this loop, so
2771 start counting from zero each iteration of the loop. */
2772 unique_id = 0;
2774 /* We're at the start of the function each iteration through the
2775 loop, so we're copying arguments. */
2776 copying_arguments = true;
2778 /* Wipe the potential alias information clean for this pass. */
2779 memset (new_reg_base_value, 0, reg_base_value_size * sizeof (rtx));
2781 /* Wipe the reg_seen array clean. */
2782 memset (reg_seen, 0, reg_base_value_size);
2784 /* Mark all hard registers which may contain an address.
2785 The stack, frame and argument pointers may contain an address.
2786 An argument register which can hold a Pmode value may contain
2787 an address even if it is not in BASE_REGS.
2789 The address expression is VOIDmode for an argument and
2790 Pmode for other registers. */
2792 memcpy (new_reg_base_value, static_reg_base_value,
2793 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2795 /* Walk the insns adding values to the new_reg_base_value array. */
2796 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2798 if (INSN_P (insn))
2800 rtx note, set;
2802 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2803 /* The prologue/epilogue insns are not threaded onto the
2804 insn chain until after reload has completed. Thus,
2805 there is no sense wasting time checking if INSN is in
2806 the prologue/epilogue until after reload has completed. */
2807 if (reload_completed
2808 && prologue_epilogue_contains (insn))
2809 continue;
2810 #endif
2812 /* If this insn has a noalias note, process it, Otherwise,
2813 scan for sets. A simple set will have no side effects
2814 which could change the base value of any other register. */
2816 if (GET_CODE (PATTERN (insn)) == SET
2817 && REG_NOTES (insn) != 0
2818 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2819 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2820 else
2821 note_stores (PATTERN (insn), record_set, NULL);
2823 set = single_set (insn);
2825 if (set != 0
2826 && GET_CODE (SET_DEST (set)) == REG
2827 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2829 unsigned int regno = REGNO (SET_DEST (set));
2830 rtx src = SET_SRC (set);
2832 if (REG_NOTES (insn) != 0
2833 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2834 && REG_N_SETS (regno) == 1)
2835 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2836 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2837 && ! rtx_varies_p (XEXP (note, 0), 1)
2838 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
2840 reg_known_value[regno] = XEXP (note, 0);
2841 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
2843 else if (REG_N_SETS (regno) == 1
2844 && GET_CODE (src) == PLUS
2845 && GET_CODE (XEXP (src, 0)) == REG
2846 && REGNO (XEXP (src, 0)) >= FIRST_PSEUDO_REGISTER
2847 && (reg_known_value[REGNO (XEXP (src, 0))])
2848 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2850 rtx op0 = XEXP (src, 0);
2851 op0 = reg_known_value[REGNO (op0)];
2852 reg_known_value[regno]
2853 = plus_constant (op0, INTVAL (XEXP (src, 1)));
2854 reg_known_equiv_p[regno] = 0;
2856 else if (REG_N_SETS (regno) == 1
2857 && ! rtx_varies_p (src, 1))
2859 reg_known_value[regno] = src;
2860 reg_known_equiv_p[regno] = 0;
2864 else if (GET_CODE (insn) == NOTE
2865 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2866 copying_arguments = false;
2869 /* Now propagate values from new_reg_base_value to reg_base_value. */
2870 for (ui = 0; ui < reg_base_value_size; ui++)
2872 if (new_reg_base_value[ui]
2873 && new_reg_base_value[ui] != reg_base_value[ui]
2874 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
2876 reg_base_value[ui] = new_reg_base_value[ui];
2877 changed = 1;
2881 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2883 /* Fill in the remaining entries. */
2884 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
2885 if (reg_known_value[i] == 0)
2886 reg_known_value[i] = regno_reg_rtx[i];
2888 /* Simplify the reg_base_value array so that no register refers to
2889 another register, except to special registers indirectly through
2890 ADDRESS expressions.
2892 In theory this loop can take as long as O(registers^2), but unless
2893 there are very long dependency chains it will run in close to linear
2894 time.
2896 This loop may not be needed any longer now that the main loop does
2897 a better job at propagating alias information. */
2898 pass = 0;
2901 changed = 0;
2902 pass++;
2903 for (ui = 0; ui < reg_base_value_size; ui++)
2905 rtx base = reg_base_value[ui];
2906 if (base && GET_CODE (base) == REG)
2908 unsigned int base_regno = REGNO (base);
2909 if (base_regno == ui) /* register set from itself */
2910 reg_base_value[ui] = 0;
2911 else
2912 reg_base_value[ui] = reg_base_value[base_regno];
2913 changed = 1;
2917 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2919 /* Clean up. */
2920 free (new_reg_base_value);
2921 new_reg_base_value = 0;
2922 free (reg_seen);
2923 reg_seen = 0;
2924 timevar_pop (TV_ALIAS_ANALYSIS);
2927 void
2928 end_alias_analysis (void)
2930 free (reg_known_value + FIRST_PSEUDO_REGISTER);
2931 reg_known_value = 0;
2932 reg_known_value_size = 0;
2933 free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
2934 reg_known_equiv_p = 0;
2935 reg_base_value = 0;
2936 reg_base_value_size = 0;
2937 if (alias_invariant)
2939 free (alias_invariant);
2940 alias_invariant = 0;
2944 #include "gt-alias.h"