Missed one in last change.
[official-gcc.git] / gcc / alias.c
blobc68a0ad10b91e8d0c6494a9765292ca2016d9dd5
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
3 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
21 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "function.h"
31 #include "expr.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "output.h"
37 #include "toplev.h"
38 #include "cselib.h"
39 #include "splay-tree.h"
40 #include "ggc.h"
41 #include "langhooks.h"
42 #include "timevar.h"
43 #include "target.h"
44 #include "cgraph.h"
46 /* The alias sets assigned to MEMs assist the back-end in determining
47 which MEMs can alias which other MEMs. In general, two MEMs in
48 different alias sets cannot alias each other, with one important
49 exception. Consider something like:
51 struct S {int i; double d; };
53 a store to an `S' can alias something of either type `int' or type
54 `double'. (However, a store to an `int' cannot alias a `double'
55 and vice versa.) We indicate this via a tree structure that looks
56 like:
57 struct S
58 / \
59 / \
60 |/_ _\|
61 int double
63 (The arrows are directed and point downwards.)
64 In this situation we say the alias set for `struct S' is the
65 `superset' and that those for `int' and `double' are `subsets'.
67 To see whether two alias sets can point to the same memory, we must
68 see if either alias set is a subset of the other. We need not trace
69 past immediate descendants, however, since we propagate all
70 grandchildren up one level.
72 Alias set zero is implicitly a superset of all other alias sets.
73 However, this is no actual entry for alias set zero. It is an
74 error to attempt to explicitly construct a subset of zero. */
76 typedef struct alias_set_entry
78 /* The alias set number, as stored in MEM_ALIAS_SET. */
79 HOST_WIDE_INT alias_set;
81 /* The children of the alias set. These are not just the immediate
82 children, but, in fact, all descendants. So, if we have:
84 struct T { struct S s; float f; }
86 continuing our example above, the children here will be all of
87 `int', `double', `float', and `struct S'. */
88 splay_tree children;
90 /* Nonzero if would have a child of zero: this effectively makes this
91 alias set the same as alias set zero. */
92 int has_zero_child;
93 } *alias_set_entry;
95 static int rtx_equal_for_memref_p (rtx, rtx);
96 static rtx find_symbolic_term (rtx);
97 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
98 static void record_set (rtx, rtx, void *);
99 static int base_alias_check (rtx, rtx, enum machine_mode,
100 enum machine_mode);
101 static rtx find_base_value (rtx);
102 static int mems_in_disjoint_alias_sets_p (rtx, rtx);
103 static int insert_subset_children (splay_tree_node, void*);
104 static tree find_base_decl (tree);
105 static alias_set_entry get_alias_set_entry (HOST_WIDE_INT);
106 static rtx fixed_scalar_and_varying_struct_p (rtx, rtx, rtx, rtx,
107 int (*) (rtx, int));
108 static int aliases_everything_p (rtx);
109 static bool nonoverlapping_component_refs_p (tree, tree);
110 static tree decl_for_component_ref (tree);
111 static rtx adjust_offset_for_component_ref (tree, rtx);
112 static int nonoverlapping_memrefs_p (rtx, rtx);
113 static int write_dependence_p (rtx, rtx, int);
115 static int nonlocal_mentioned_p_1 (rtx *, void *);
116 static int nonlocal_mentioned_p (rtx);
117 static int nonlocal_referenced_p_1 (rtx *, void *);
118 static int nonlocal_referenced_p (rtx);
119 static int nonlocal_set_p_1 (rtx *, void *);
120 static int nonlocal_set_p (rtx);
121 static void memory_modified_1 (rtx, rtx, void *);
123 /* Set up all info needed to perform alias analysis on memory references. */
125 /* Returns the size in bytes of the mode of X. */
126 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
128 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
129 different alias sets. We ignore alias sets in functions making use
130 of variable arguments because the va_arg macros on some systems are
131 not legal ANSI C. */
132 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
133 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
135 /* Cap the number of passes we make over the insns propagating alias
136 information through set chains. 10 is a completely arbitrary choice. */
137 #define MAX_ALIAS_LOOP_PASSES 10
139 /* reg_base_value[N] gives an address to which register N is related.
140 If all sets after the first add or subtract to the current value
141 or otherwise modify it so it does not point to a different top level
142 object, reg_base_value[N] is equal to the address part of the source
143 of the first set.
145 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
146 expressions represent certain special values: function arguments and
147 the stack, frame, and argument pointers.
149 The contents of an ADDRESS is not normally used, the mode of the
150 ADDRESS determines whether the ADDRESS is a function argument or some
151 other special value. Pointer equality, not rtx_equal_p, determines whether
152 two ADDRESS expressions refer to the same base address.
154 The only use of the contents of an ADDRESS is for determining if the
155 current function performs nonlocal memory memory references for the
156 purposes of marking the function as a constant function. */
158 static GTY((length ("reg_base_value_size"))) rtx *reg_base_value;
159 static rtx *new_reg_base_value;
160 static unsigned int reg_base_value_size; /* size of reg_base_value array */
162 /* Static hunks of RTL used by the aliasing code; these are initialized
163 once per function to avoid unnecessary RTL allocations. */
164 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
166 #define REG_BASE_VALUE(X) \
167 (REGNO (X) < reg_base_value_size \
168 ? reg_base_value[REGNO (X)] : 0)
170 /* Vector of known invariant relationships between registers. Set in
171 loop unrolling. Indexed by register number, if nonzero the value
172 is an expression describing this register in terms of another.
174 The length of this array is REG_BASE_VALUE_SIZE.
176 Because this array contains only pseudo registers it has no effect
177 after reload. */
178 static rtx *alias_invariant;
180 /* Vector indexed by N giving the initial (unchanging) value known for
181 pseudo-register N. This array is initialized in
182 init_alias_analysis, and does not change until end_alias_analysis
183 is called. */
184 rtx *reg_known_value;
186 /* Indicates number of valid entries in reg_known_value. */
187 static unsigned int reg_known_value_size;
189 /* Vector recording for each reg_known_value whether it is due to a
190 REG_EQUIV note. Future passes (viz., reload) may replace the
191 pseudo with the equivalent expression and so we account for the
192 dependences that would be introduced if that happens.
194 The REG_EQUIV notes created in assign_parms may mention the arg
195 pointer, and there are explicit insns in the RTL that modify the
196 arg pointer. Thus we must ensure that such insns don't get
197 scheduled across each other because that would invalidate the
198 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
199 wrong, but solving the problem in the scheduler will likely give
200 better code, so we do it here. */
201 char *reg_known_equiv_p;
203 /* True when scanning insns from the start of the rtl to the
204 NOTE_INSN_FUNCTION_BEG note. */
205 static bool copying_arguments;
207 /* The splay-tree used to store the various alias set entries. */
208 static splay_tree alias_sets;
210 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
211 such an entry, or NULL otherwise. */
213 static alias_set_entry
214 get_alias_set_entry (HOST_WIDE_INT alias_set)
216 splay_tree_node sn
217 = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
219 return sn != 0 ? ((alias_set_entry) sn->value) : 0;
222 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
223 the two MEMs cannot alias each other. */
225 static int
226 mems_in_disjoint_alias_sets_p (rtx mem1, rtx mem2)
228 #ifdef ENABLE_CHECKING
229 /* Perform a basic sanity check. Namely, that there are no alias sets
230 if we're not using strict aliasing. This helps to catch bugs
231 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
232 where a MEM is allocated in some way other than by the use of
233 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
234 use alias sets to indicate that spilled registers cannot alias each
235 other, we might need to remove this check. */
236 if (! flag_strict_aliasing
237 && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
238 abort ();
239 #endif
241 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
244 /* Insert the NODE into the splay tree given by DATA. Used by
245 record_alias_subset via splay_tree_foreach. */
247 static int
248 insert_subset_children (splay_tree_node node, void *data)
250 splay_tree_insert ((splay_tree) data, node->key, node->value);
252 return 0;
255 /* Return 1 if the two specified alias sets may conflict. */
258 alias_sets_conflict_p (HOST_WIDE_INT set1, HOST_WIDE_INT set2)
260 alias_set_entry ase;
262 /* If have no alias set information for one of the operands, we have
263 to assume it can alias anything. */
264 if (set1 == 0 || set2 == 0
265 /* If the two alias sets are the same, they may alias. */
266 || set1 == set2)
267 return 1;
269 /* See if the first alias set is a subset of the second. */
270 ase = get_alias_set_entry (set1);
271 if (ase != 0
272 && (ase->has_zero_child
273 || splay_tree_lookup (ase->children,
274 (splay_tree_key) set2)))
275 return 1;
277 /* Now do the same, but with the alias sets reversed. */
278 ase = get_alias_set_entry (set2);
279 if (ase != 0
280 && (ase->has_zero_child
281 || splay_tree_lookup (ase->children,
282 (splay_tree_key) set1)))
283 return 1;
285 /* The two alias sets are distinct and neither one is the
286 child of the other. Therefore, they cannot alias. */
287 return 0;
290 /* Return 1 if TYPE is a RECORD_TYPE, UNION_TYPE, or QUAL_UNION_TYPE and has
291 has any readonly fields. If any of the fields have types that
292 contain readonly fields, return true as well. */
295 readonly_fields_p (tree type)
297 tree field;
299 if (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
300 && TREE_CODE (type) != QUAL_UNION_TYPE)
301 return 0;
303 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
304 if (TREE_CODE (field) == FIELD_DECL
305 && (TREE_READONLY (field)
306 || readonly_fields_p (TREE_TYPE (field))))
307 return 1;
309 return 0;
312 /* Return 1 if any MEM object of type T1 will always conflict (using the
313 dependency routines in this file) with any MEM object of type T2.
314 This is used when allocating temporary storage. If T1 and/or T2 are
315 NULL_TREE, it means we know nothing about the storage. */
318 objects_must_conflict_p (tree t1, tree t2)
320 /* If neither has a type specified, we don't know if they'll conflict
321 because we may be using them to store objects of various types, for
322 example the argument and local variables areas of inlined functions. */
323 if (t1 == 0 && t2 == 0)
324 return 0;
326 /* If one or the other has readonly fields or is readonly,
327 then they may not conflict. */
328 if ((t1 != 0 && readonly_fields_p (t1))
329 || (t2 != 0 && readonly_fields_p (t2))
330 || (t1 != 0 && lang_hooks.honor_readonly && TYPE_READONLY (t1))
331 || (t2 != 0 && lang_hooks.honor_readonly && TYPE_READONLY (t2)))
332 return 0;
334 /* If they are the same type, they must conflict. */
335 if (t1 == t2
336 /* Likewise if both are volatile. */
337 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
338 return 1;
340 /* If one is aggregate and the other is scalar then they may not
341 conflict. */
342 if ((t1 != 0 && AGGREGATE_TYPE_P (t1))
343 != (t2 != 0 && AGGREGATE_TYPE_P (t2)))
344 return 0;
346 /* Otherwise they conflict only if the alias sets conflict. */
347 return alias_sets_conflict_p (t1 ? get_alias_set (t1) : 0,
348 t2 ? get_alias_set (t2) : 0);
351 /* T is an expression with pointer type. Find the DECL on which this
352 expression is based. (For example, in `a[i]' this would be `a'.)
353 If there is no such DECL, or a unique decl cannot be determined,
354 NULL_TREE is returned. */
356 static tree
357 find_base_decl (tree t)
359 tree d0, d1, d2;
361 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
362 return 0;
364 /* If this is a declaration, return it. */
365 if (TREE_CODE_CLASS (TREE_CODE (t)) == 'd')
366 return t;
368 /* Handle general expressions. It would be nice to deal with
369 COMPONENT_REFs here. If we could tell that `a' and `b' were the
370 same, then `a->f' and `b->f' are also the same. */
371 switch (TREE_CODE_CLASS (TREE_CODE (t)))
373 case '1':
374 return find_base_decl (TREE_OPERAND (t, 0));
376 case '2':
377 /* Return 0 if found in neither or both are the same. */
378 d0 = find_base_decl (TREE_OPERAND (t, 0));
379 d1 = find_base_decl (TREE_OPERAND (t, 1));
380 if (d0 == d1)
381 return d0;
382 else if (d0 == 0)
383 return d1;
384 else if (d1 == 0)
385 return d0;
386 else
387 return 0;
389 case '3':
390 d0 = find_base_decl (TREE_OPERAND (t, 0));
391 d1 = find_base_decl (TREE_OPERAND (t, 1));
392 d2 = find_base_decl (TREE_OPERAND (t, 2));
394 /* Set any nonzero values from the last, then from the first. */
395 if (d1 == 0) d1 = d2;
396 if (d0 == 0) d0 = d1;
397 if (d1 == 0) d1 = d0;
398 if (d2 == 0) d2 = d1;
400 /* At this point all are nonzero or all are zero. If all three are the
401 same, return it. Otherwise, return zero. */
402 return (d0 == d1 && d1 == d2) ? d0 : 0;
404 default:
405 return 0;
409 /* Return 1 if all the nested component references handled by
410 get_inner_reference in T are such that we can address the object in T. */
413 can_address_p (tree t)
415 /* If we're at the end, it is vacuously addressable. */
416 if (! handled_component_p (t))
417 return 1;
419 /* Bitfields are never addressable. */
420 else if (TREE_CODE (t) == BIT_FIELD_REF)
421 return 0;
423 /* Fields are addressable unless they are marked as nonaddressable or
424 the containing type has alias set 0. */
425 else if (TREE_CODE (t) == COMPONENT_REF
426 && ! DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1))
427 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
428 && can_address_p (TREE_OPERAND (t, 0)))
429 return 1;
431 /* Likewise for arrays. */
432 else if ((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
433 && ! TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0)))
434 && get_alias_set (TREE_TYPE (TREE_OPERAND (t, 0))) != 0
435 && can_address_p (TREE_OPERAND (t, 0)))
436 return 1;
438 return 0;
441 /* Return the alias set for T, which may be either a type or an
442 expression. Call language-specific routine for help, if needed. */
444 HOST_WIDE_INT
445 get_alias_set (tree t)
447 HOST_WIDE_INT set;
449 /* If we're not doing any alias analysis, just assume everything
450 aliases everything else. Also return 0 if this or its type is
451 an error. */
452 if (! flag_strict_aliasing || t == error_mark_node
453 || (! TYPE_P (t)
454 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
455 return 0;
457 /* We can be passed either an expression or a type. This and the
458 language-specific routine may make mutually-recursive calls to each other
459 to figure out what to do. At each juncture, we see if this is a tree
460 that the language may need to handle specially. First handle things that
461 aren't types. */
462 if (! TYPE_P (t))
464 tree inner = t;
465 tree placeholder_ptr = 0;
467 /* Remove any nops, then give the language a chance to do
468 something with this tree before we look at it. */
469 STRIP_NOPS (t);
470 set = (*lang_hooks.get_alias_set) (t);
471 if (set != -1)
472 return set;
474 /* First see if the actual object referenced is an INDIRECT_REF from a
475 restrict-qualified pointer or a "void *". Replace
476 PLACEHOLDER_EXPRs. */
477 while (TREE_CODE (inner) == PLACEHOLDER_EXPR
478 || handled_component_p (inner))
480 if (TREE_CODE (inner) == PLACEHOLDER_EXPR)
481 inner = find_placeholder (inner, &placeholder_ptr);
482 else
483 inner = TREE_OPERAND (inner, 0);
485 STRIP_NOPS (inner);
488 /* Check for accesses through restrict-qualified pointers. */
489 if (TREE_CODE (inner) == INDIRECT_REF)
491 tree decl = find_base_decl (TREE_OPERAND (inner, 0));
493 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
495 /* If we haven't computed the actual alias set, do it now. */
496 if (DECL_POINTER_ALIAS_SET (decl) == -2)
498 /* No two restricted pointers can point at the same thing.
499 However, a restricted pointer can point at the same thing
500 as an unrestricted pointer, if that unrestricted pointer
501 is based on the restricted pointer. So, we make the
502 alias set for the restricted pointer a subset of the
503 alias set for the type pointed to by the type of the
504 decl. */
505 HOST_WIDE_INT pointed_to_alias_set
506 = get_alias_set (TREE_TYPE (TREE_TYPE (decl)));
508 if (pointed_to_alias_set == 0)
509 /* It's not legal to make a subset of alias set zero. */
511 else
513 DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
514 record_alias_subset (pointed_to_alias_set,
515 DECL_POINTER_ALIAS_SET (decl));
519 /* We use the alias set indicated in the declaration. */
520 return DECL_POINTER_ALIAS_SET (decl);
523 /* If we have an INDIRECT_REF via a void pointer, we don't
524 know anything about what that might alias. */
525 else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE)
526 return 0;
529 /* Otherwise, pick up the outermost object that we could have a pointer
530 to, processing conversion and PLACEHOLDER_EXPR as above. */
531 placeholder_ptr = 0;
532 while (TREE_CODE (t) == PLACEHOLDER_EXPR
533 || (handled_component_p (t) && ! can_address_p (t)))
535 if (TREE_CODE (t) == PLACEHOLDER_EXPR)
536 t = find_placeholder (t, &placeholder_ptr);
537 else
538 t = TREE_OPERAND (t, 0);
540 STRIP_NOPS (t);
543 /* If we've already determined the alias set for a decl, just return
544 it. This is necessary for C++ anonymous unions, whose component
545 variables don't look like union members (boo!). */
546 if (TREE_CODE (t) == VAR_DECL
547 && DECL_RTL_SET_P (t) && GET_CODE (DECL_RTL (t)) == MEM)
548 return MEM_ALIAS_SET (DECL_RTL (t));
550 /* Now all we care about is the type. */
551 t = TREE_TYPE (t);
554 /* Variant qualifiers don't affect the alias set, so get the main
555 variant. If this is a type with a known alias set, return it. */
556 t = TYPE_MAIN_VARIANT (t);
557 if (TYPE_ALIAS_SET_KNOWN_P (t))
558 return TYPE_ALIAS_SET (t);
560 /* See if the language has special handling for this type. */
561 set = (*lang_hooks.get_alias_set) (t);
562 if (set != -1)
563 return set;
565 /* There are no objects of FUNCTION_TYPE, so there's no point in
566 using up an alias set for them. (There are, of course, pointers
567 and references to functions, but that's different.) */
568 else if (TREE_CODE (t) == FUNCTION_TYPE)
569 set = 0;
571 /* Unless the language specifies otherwise, let vector types alias
572 their components. This avoids some nasty type punning issues in
573 normal usage. And indeed lets vectors be treated more like an
574 array slice. */
575 else if (TREE_CODE (t) == VECTOR_TYPE)
576 set = get_alias_set (TREE_TYPE (t));
578 else
579 /* Otherwise make a new alias set for this type. */
580 set = new_alias_set ();
582 TYPE_ALIAS_SET (t) = set;
584 /* If this is an aggregate type, we must record any component aliasing
585 information. */
586 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
587 record_component_aliases (t);
589 return set;
592 /* Return a brand-new alias set. */
594 HOST_WIDE_INT
595 new_alias_set (void)
597 static HOST_WIDE_INT last_alias_set;
599 if (flag_strict_aliasing)
600 return ++last_alias_set;
601 else
602 return 0;
605 /* Indicate that things in SUBSET can alias things in SUPERSET, but
606 not vice versa. For example, in C, a store to an `int' can alias a
607 structure containing an `int', but not vice versa. Here, the
608 structure would be the SUPERSET and `int' the SUBSET. This
609 function should be called only once per SUPERSET/SUBSET pair.
611 It is illegal for SUPERSET to be zero; everything is implicitly a
612 subset of alias set zero. */
614 void
615 record_alias_subset (HOST_WIDE_INT superset, HOST_WIDE_INT subset)
617 alias_set_entry superset_entry;
618 alias_set_entry subset_entry;
620 /* It is possible in complex type situations for both sets to be the same,
621 in which case we can ignore this operation. */
622 if (superset == subset)
623 return;
625 if (superset == 0)
626 abort ();
628 superset_entry = get_alias_set_entry (superset);
629 if (superset_entry == 0)
631 /* Create an entry for the SUPERSET, so that we have a place to
632 attach the SUBSET. */
633 superset_entry
634 = (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
635 superset_entry->alias_set = superset;
636 superset_entry->children
637 = splay_tree_new (splay_tree_compare_ints, 0, 0);
638 superset_entry->has_zero_child = 0;
639 splay_tree_insert (alias_sets, (splay_tree_key) superset,
640 (splay_tree_value) superset_entry);
643 if (subset == 0)
644 superset_entry->has_zero_child = 1;
645 else
647 subset_entry = get_alias_set_entry (subset);
648 /* If there is an entry for the subset, enter all of its children
649 (if they are not already present) as children of the SUPERSET. */
650 if (subset_entry)
652 if (subset_entry->has_zero_child)
653 superset_entry->has_zero_child = 1;
655 splay_tree_foreach (subset_entry->children, insert_subset_children,
656 superset_entry->children);
659 /* Enter the SUBSET itself as a child of the SUPERSET. */
660 splay_tree_insert (superset_entry->children,
661 (splay_tree_key) subset, 0);
665 /* Record that component types of TYPE, if any, are part of that type for
666 aliasing purposes. For record types, we only record component types
667 for fields that are marked addressable. For array types, we always
668 record the component types, so the front end should not call this
669 function if the individual component aren't addressable. */
671 void
672 record_component_aliases (tree type)
674 HOST_WIDE_INT superset = get_alias_set (type);
675 tree field;
677 if (superset == 0)
678 return;
680 switch (TREE_CODE (type))
682 case ARRAY_TYPE:
683 if (! TYPE_NONALIASED_COMPONENT (type))
684 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
685 break;
687 case RECORD_TYPE:
688 case UNION_TYPE:
689 case QUAL_UNION_TYPE:
690 /* Recursively record aliases for the base classes, if there are any */
691 if (TYPE_BINFO (type) != NULL && TYPE_BINFO_BASETYPES (type) != NULL)
693 int i;
694 for (i = 0; i < TREE_VEC_LENGTH (TYPE_BINFO_BASETYPES (type)); i++)
696 tree binfo = TREE_VEC_ELT (TYPE_BINFO_BASETYPES (type), i);
697 record_alias_subset (superset,
698 get_alias_set (BINFO_TYPE (binfo)));
701 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
702 if (TREE_CODE (field) == FIELD_DECL && ! DECL_NONADDRESSABLE_P (field))
703 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
704 break;
706 case COMPLEX_TYPE:
707 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
708 break;
710 default:
711 break;
715 /* Allocate an alias set for use in storing and reading from the varargs
716 spill area. */
718 HOST_WIDE_INT
719 get_varargs_alias_set (void)
721 static HOST_WIDE_INT set = -1;
723 if (set == -1)
724 set = new_alias_set ();
726 return set;
729 /* Likewise, but used for the fixed portions of the frame, e.g., register
730 save areas. */
732 HOST_WIDE_INT
733 get_frame_alias_set (void)
735 static HOST_WIDE_INT set = -1;
737 if (set == -1)
738 set = new_alias_set ();
740 return set;
743 /* Inside SRC, the source of a SET, find a base address. */
745 static rtx
746 find_base_value (rtx src)
748 unsigned int regno;
750 switch (GET_CODE (src))
752 case SYMBOL_REF:
753 case LABEL_REF:
754 return src;
756 case REG:
757 regno = REGNO (src);
758 /* At the start of a function, argument registers have known base
759 values which may be lost later. Returning an ADDRESS
760 expression here allows optimization based on argument values
761 even when the argument registers are used for other purposes. */
762 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
763 return new_reg_base_value[regno];
765 /* If a pseudo has a known base value, return it. Do not do this
766 for non-fixed hard regs since it can result in a circular
767 dependency chain for registers which have values at function entry.
769 The test above is not sufficient because the scheduler may move
770 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
771 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
772 && regno < reg_base_value_size)
774 /* If we're inside init_alias_analysis, use new_reg_base_value
775 to reduce the number of relaxation iterations. */
776 if (new_reg_base_value && new_reg_base_value[regno]
777 && REG_N_SETS (regno) == 1)
778 return new_reg_base_value[regno];
780 if (reg_base_value[regno])
781 return reg_base_value[regno];
784 return src;
786 case MEM:
787 /* Check for an argument passed in memory. Only record in the
788 copying-arguments block; it is too hard to track changes
789 otherwise. */
790 if (copying_arguments
791 && (XEXP (src, 0) == arg_pointer_rtx
792 || (GET_CODE (XEXP (src, 0)) == PLUS
793 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
794 return gen_rtx_ADDRESS (VOIDmode, src);
795 return 0;
797 case CONST:
798 src = XEXP (src, 0);
799 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
800 break;
802 /* ... fall through ... */
804 case PLUS:
805 case MINUS:
807 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
809 /* If either operand is a REG that is a known pointer, then it
810 is the base. */
811 if (REG_P (src_0) && REG_POINTER (src_0))
812 return find_base_value (src_0);
813 if (REG_P (src_1) && REG_POINTER (src_1))
814 return find_base_value (src_1);
816 /* If either operand is a REG, then see if we already have
817 a known value for it. */
818 if (REG_P (src_0))
820 temp = find_base_value (src_0);
821 if (temp != 0)
822 src_0 = temp;
825 if (REG_P (src_1))
827 temp = find_base_value (src_1);
828 if (temp!= 0)
829 src_1 = temp;
832 /* If either base is named object or a special address
833 (like an argument or stack reference), then use it for the
834 base term. */
835 if (src_0 != 0
836 && (GET_CODE (src_0) == SYMBOL_REF
837 || GET_CODE (src_0) == LABEL_REF
838 || (GET_CODE (src_0) == ADDRESS
839 && GET_MODE (src_0) != VOIDmode)))
840 return src_0;
842 if (src_1 != 0
843 && (GET_CODE (src_1) == SYMBOL_REF
844 || GET_CODE (src_1) == LABEL_REF
845 || (GET_CODE (src_1) == ADDRESS
846 && GET_MODE (src_1) != VOIDmode)))
847 return src_1;
849 /* Guess which operand is the base address:
850 If either operand is a symbol, then it is the base. If
851 either operand is a CONST_INT, then the other is the base. */
852 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
853 return find_base_value (src_0);
854 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
855 return find_base_value (src_1);
857 return 0;
860 case LO_SUM:
861 /* The standard form is (lo_sum reg sym) so look only at the
862 second operand. */
863 return find_base_value (XEXP (src, 1));
865 case AND:
866 /* If the second operand is constant set the base
867 address to the first operand. */
868 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
869 return find_base_value (XEXP (src, 0));
870 return 0;
872 case TRUNCATE:
873 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
874 break;
875 /* Fall through. */
876 case HIGH:
877 case PRE_INC:
878 case PRE_DEC:
879 case POST_INC:
880 case POST_DEC:
881 case PRE_MODIFY:
882 case POST_MODIFY:
883 return find_base_value (XEXP (src, 0));
885 case ZERO_EXTEND:
886 case SIGN_EXTEND: /* used for NT/Alpha pointers */
888 rtx temp = find_base_value (XEXP (src, 0));
890 #ifdef POINTERS_EXTEND_UNSIGNED
891 if (temp != 0 && CONSTANT_P (temp) && GET_MODE (temp) != Pmode)
892 temp = convert_memory_address (Pmode, temp);
893 #endif
895 return temp;
898 default:
899 break;
902 return 0;
905 /* Called from init_alias_analysis indirectly through note_stores. */
907 /* While scanning insns to find base values, reg_seen[N] is nonzero if
908 register N has been set in this function. */
909 static char *reg_seen;
911 /* Addresses which are known not to alias anything else are identified
912 by a unique integer. */
913 static int unique_id;
915 static void
916 record_set (rtx dest, rtx set, void *data ATTRIBUTE_UNUSED)
918 unsigned regno;
919 rtx src;
920 int n;
922 if (GET_CODE (dest) != REG)
923 return;
925 regno = REGNO (dest);
927 if (regno >= reg_base_value_size)
928 abort ();
930 /* If this spans multiple hard registers, then we must indicate that every
931 register has an unusable value. */
932 if (regno < FIRST_PSEUDO_REGISTER)
933 n = HARD_REGNO_NREGS (regno, GET_MODE (dest));
934 else
935 n = 1;
936 if (n != 1)
938 while (--n >= 0)
940 reg_seen[regno + n] = 1;
941 new_reg_base_value[regno + n] = 0;
943 return;
946 if (set)
948 /* A CLOBBER wipes out any old value but does not prevent a previously
949 unset register from acquiring a base address (i.e. reg_seen is not
950 set). */
951 if (GET_CODE (set) == CLOBBER)
953 new_reg_base_value[regno] = 0;
954 return;
956 src = SET_SRC (set);
958 else
960 if (reg_seen[regno])
962 new_reg_base_value[regno] = 0;
963 return;
965 reg_seen[regno] = 1;
966 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
967 GEN_INT (unique_id++));
968 return;
971 /* This is not the first set. If the new value is not related to the
972 old value, forget the base value. Note that the following code is
973 not detected:
974 extern int x, y; int *p = &x; p += (&y-&x);
975 ANSI C does not allow computing the difference of addresses
976 of distinct top level objects. */
977 if (new_reg_base_value[regno])
978 switch (GET_CODE (src))
980 case LO_SUM:
981 case MINUS:
982 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
983 new_reg_base_value[regno] = 0;
984 break;
985 case PLUS:
986 /* If the value we add in the PLUS is also a valid base value,
987 this might be the actual base value, and the original value
988 an index. */
990 rtx other = NULL_RTX;
992 if (XEXP (src, 0) == dest)
993 other = XEXP (src, 1);
994 else if (XEXP (src, 1) == dest)
995 other = XEXP (src, 0);
997 if (! other || find_base_value (other))
998 new_reg_base_value[regno] = 0;
999 break;
1001 case AND:
1002 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1003 new_reg_base_value[regno] = 0;
1004 break;
1005 default:
1006 new_reg_base_value[regno] = 0;
1007 break;
1009 /* If this is the first set of a register, record the value. */
1010 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1011 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1012 new_reg_base_value[regno] = find_base_value (src);
1014 reg_seen[regno] = 1;
1017 /* Called from loop optimization when a new pseudo-register is
1018 created. It indicates that REGNO is being set to VAL. f INVARIANT
1019 is true then this value also describes an invariant relationship
1020 which can be used to deduce that two registers with unknown values
1021 are different. */
1023 void
1024 record_base_value (unsigned int regno, rtx val, int invariant)
1026 if (regno >= reg_base_value_size)
1027 return;
1029 if (invariant && alias_invariant)
1030 alias_invariant[regno] = val;
1032 if (GET_CODE (val) == REG)
1034 if (REGNO (val) < reg_base_value_size)
1035 reg_base_value[regno] = reg_base_value[REGNO (val)];
1037 return;
1040 reg_base_value[regno] = find_base_value (val);
1043 /* Clear alias info for a register. This is used if an RTL transformation
1044 changes the value of a register. This is used in flow by AUTO_INC_DEC
1045 optimizations. We don't need to clear reg_base_value, since flow only
1046 changes the offset. */
1048 void
1049 clear_reg_alias_info (rtx reg)
1051 unsigned int regno = REGNO (reg);
1053 if (regno < reg_known_value_size && regno >= FIRST_PSEUDO_REGISTER)
1054 reg_known_value[regno] = reg;
1057 /* Returns a canonical version of X, from the point of view alias
1058 analysis. (For example, if X is a MEM whose address is a register,
1059 and the register has a known value (say a SYMBOL_REF), then a MEM
1060 whose address is the SYMBOL_REF is returned.) */
1063 canon_rtx (rtx x)
1065 /* Recursively look for equivalences. */
1066 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
1067 && REGNO (x) < reg_known_value_size)
1068 return reg_known_value[REGNO (x)] == x
1069 ? x : canon_rtx (reg_known_value[REGNO (x)]);
1070 else if (GET_CODE (x) == PLUS)
1072 rtx x0 = canon_rtx (XEXP (x, 0));
1073 rtx x1 = canon_rtx (XEXP (x, 1));
1075 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1077 if (GET_CODE (x0) == CONST_INT)
1078 return plus_constant (x1, INTVAL (x0));
1079 else if (GET_CODE (x1) == CONST_INT)
1080 return plus_constant (x0, INTVAL (x1));
1081 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1085 /* This gives us much better alias analysis when called from
1086 the loop optimizer. Note we want to leave the original
1087 MEM alone, but need to return the canonicalized MEM with
1088 all the flags with their original values. */
1089 else if (GET_CODE (x) == MEM)
1090 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1092 return x;
1095 /* Return 1 if X and Y are identical-looking rtx's.
1096 Expect that X and Y has been already canonicalized.
1098 We use the data in reg_known_value above to see if two registers with
1099 different numbers are, in fact, equivalent. */
1101 static int
1102 rtx_equal_for_memref_p (rtx x, rtx y)
1104 int i;
1105 int j;
1106 enum rtx_code code;
1107 const char *fmt;
1109 if (x == 0 && y == 0)
1110 return 1;
1111 if (x == 0 || y == 0)
1112 return 0;
1114 if (x == y)
1115 return 1;
1117 code = GET_CODE (x);
1118 /* Rtx's of different codes cannot be equal. */
1119 if (code != GET_CODE (y))
1120 return 0;
1122 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1123 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1125 if (GET_MODE (x) != GET_MODE (y))
1126 return 0;
1128 /* Some RTL can be compared without a recursive examination. */
1129 switch (code)
1131 case VALUE:
1132 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
1134 case REG:
1135 return REGNO (x) == REGNO (y);
1137 case LABEL_REF:
1138 return XEXP (x, 0) == XEXP (y, 0);
1140 case SYMBOL_REF:
1141 return XSTR (x, 0) == XSTR (y, 0);
1143 case CONST_INT:
1144 case CONST_DOUBLE:
1145 /* There's no need to compare the contents of CONST_DOUBLEs or
1146 CONST_INTs because pointer equality is a good enough
1147 comparison for these nodes. */
1148 return 0;
1150 case ADDRESSOF:
1151 return (XINT (x, 1) == XINT (y, 1)
1152 && rtx_equal_for_memref_p (XEXP (x, 0),
1153 XEXP (y, 0)));
1155 default:
1156 break;
1159 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1160 if (code == PLUS)
1161 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1162 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1163 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1164 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1165 /* For commutative operations, the RTX match if the operand match in any
1166 order. Also handle the simple binary and unary cases without a loop. */
1167 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
1169 rtx xop0 = canon_rtx (XEXP (x, 0));
1170 rtx yop0 = canon_rtx (XEXP (y, 0));
1171 rtx yop1 = canon_rtx (XEXP (y, 1));
1173 return ((rtx_equal_for_memref_p (xop0, yop0)
1174 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1175 || (rtx_equal_for_memref_p (xop0, yop1)
1176 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1178 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
1180 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1181 canon_rtx (XEXP (y, 0)))
1182 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1183 canon_rtx (XEXP (y, 1))));
1185 else if (GET_RTX_CLASS (code) == '1')
1186 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1187 canon_rtx (XEXP (y, 0)));
1189 /* Compare the elements. If any pair of corresponding elements
1190 fail to match, return 0 for the whole things.
1192 Limit cases to types which actually appear in addresses. */
1194 fmt = GET_RTX_FORMAT (code);
1195 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1197 switch (fmt[i])
1199 case 'i':
1200 if (XINT (x, i) != XINT (y, i))
1201 return 0;
1202 break;
1204 case 'E':
1205 /* Two vectors must have the same length. */
1206 if (XVECLEN (x, i) != XVECLEN (y, i))
1207 return 0;
1209 /* And the corresponding elements must match. */
1210 for (j = 0; j < XVECLEN (x, i); j++)
1211 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1212 canon_rtx (XVECEXP (y, i, j))) == 0)
1213 return 0;
1214 break;
1216 case 'e':
1217 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1218 canon_rtx (XEXP (y, i))) == 0)
1219 return 0;
1220 break;
1222 /* This can happen for asm operands. */
1223 case 's':
1224 if (strcmp (XSTR (x, i), XSTR (y, i)))
1225 return 0;
1226 break;
1228 /* This can happen for an asm which clobbers memory. */
1229 case '0':
1230 break;
1232 /* It is believed that rtx's at this level will never
1233 contain anything but integers and other rtx's,
1234 except for within LABEL_REFs and SYMBOL_REFs. */
1235 default:
1236 abort ();
1239 return 1;
1242 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
1243 X and return it, or return 0 if none found. */
1245 static rtx
1246 find_symbolic_term (rtx x)
1248 int i;
1249 enum rtx_code code;
1250 const char *fmt;
1252 code = GET_CODE (x);
1253 if (code == SYMBOL_REF || code == LABEL_REF)
1254 return x;
1255 if (GET_RTX_CLASS (code) == 'o')
1256 return 0;
1258 fmt = GET_RTX_FORMAT (code);
1259 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1261 rtx t;
1263 if (fmt[i] == 'e')
1265 t = find_symbolic_term (XEXP (x, i));
1266 if (t != 0)
1267 return t;
1269 else if (fmt[i] == 'E')
1270 break;
1272 return 0;
1276 find_base_term (rtx x)
1278 cselib_val *val;
1279 struct elt_loc_list *l;
1281 #if defined (FIND_BASE_TERM)
1282 /* Try machine-dependent ways to find the base term. */
1283 x = FIND_BASE_TERM (x);
1284 #endif
1286 switch (GET_CODE (x))
1288 case REG:
1289 return REG_BASE_VALUE (x);
1291 case TRUNCATE:
1292 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1293 return 0;
1294 /* Fall through. */
1295 case HIGH:
1296 case PRE_INC:
1297 case PRE_DEC:
1298 case POST_INC:
1299 case POST_DEC:
1300 case PRE_MODIFY:
1301 case POST_MODIFY:
1302 return find_base_term (XEXP (x, 0));
1304 case ZERO_EXTEND:
1305 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1307 rtx temp = find_base_term (XEXP (x, 0));
1309 #ifdef POINTERS_EXTEND_UNSIGNED
1310 if (temp != 0 && CONSTANT_P (temp) && GET_MODE (temp) != Pmode)
1311 temp = convert_memory_address (Pmode, temp);
1312 #endif
1314 return temp;
1317 case VALUE:
1318 val = CSELIB_VAL_PTR (x);
1319 for (l = val->locs; l; l = l->next)
1320 if ((x = find_base_term (l->loc)) != 0)
1321 return x;
1322 return 0;
1324 case CONST:
1325 x = XEXP (x, 0);
1326 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1327 return 0;
1328 /* fall through */
1329 case LO_SUM:
1330 case PLUS:
1331 case MINUS:
1333 rtx tmp1 = XEXP (x, 0);
1334 rtx tmp2 = XEXP (x, 1);
1336 /* This is a little bit tricky since we have to determine which of
1337 the two operands represents the real base address. Otherwise this
1338 routine may return the index register instead of the base register.
1340 That may cause us to believe no aliasing was possible, when in
1341 fact aliasing is possible.
1343 We use a few simple tests to guess the base register. Additional
1344 tests can certainly be added. For example, if one of the operands
1345 is a shift or multiply, then it must be the index register and the
1346 other operand is the base register. */
1348 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1349 return find_base_term (tmp2);
1351 /* If either operand is known to be a pointer, then use it
1352 to determine the base term. */
1353 if (REG_P (tmp1) && REG_POINTER (tmp1))
1354 return find_base_term (tmp1);
1356 if (REG_P (tmp2) && REG_POINTER (tmp2))
1357 return find_base_term (tmp2);
1359 /* Neither operand was known to be a pointer. Go ahead and find the
1360 base term for both operands. */
1361 tmp1 = find_base_term (tmp1);
1362 tmp2 = find_base_term (tmp2);
1364 /* If either base term is named object or a special address
1365 (like an argument or stack reference), then use it for the
1366 base term. */
1367 if (tmp1 != 0
1368 && (GET_CODE (tmp1) == SYMBOL_REF
1369 || GET_CODE (tmp1) == LABEL_REF
1370 || (GET_CODE (tmp1) == ADDRESS
1371 && GET_MODE (tmp1) != VOIDmode)))
1372 return tmp1;
1374 if (tmp2 != 0
1375 && (GET_CODE (tmp2) == SYMBOL_REF
1376 || GET_CODE (tmp2) == LABEL_REF
1377 || (GET_CODE (tmp2) == ADDRESS
1378 && GET_MODE (tmp2) != VOIDmode)))
1379 return tmp2;
1381 /* We could not determine which of the two operands was the
1382 base register and which was the index. So we can determine
1383 nothing from the base alias check. */
1384 return 0;
1387 case AND:
1388 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1389 return find_base_term (XEXP (x, 0));
1390 return 0;
1392 case SYMBOL_REF:
1393 case LABEL_REF:
1394 return x;
1396 case ADDRESSOF:
1397 return REG_BASE_VALUE (frame_pointer_rtx);
1399 default:
1400 return 0;
1404 /* Return 0 if the addresses X and Y are known to point to different
1405 objects, 1 if they might be pointers to the same object. */
1407 static int
1408 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1409 enum machine_mode y_mode)
1411 rtx x_base = find_base_term (x);
1412 rtx y_base = find_base_term (y);
1414 /* If the address itself has no known base see if a known equivalent
1415 value has one. If either address still has no known base, nothing
1416 is known about aliasing. */
1417 if (x_base == 0)
1419 rtx x_c;
1421 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1422 return 1;
1424 x_base = find_base_term (x_c);
1425 if (x_base == 0)
1426 return 1;
1429 if (y_base == 0)
1431 rtx y_c;
1432 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1433 return 1;
1435 y_base = find_base_term (y_c);
1436 if (y_base == 0)
1437 return 1;
1440 /* If the base addresses are equal nothing is known about aliasing. */
1441 if (rtx_equal_p (x_base, y_base))
1442 return 1;
1444 /* The base addresses of the read and write are different expressions.
1445 If they are both symbols and they are not accessed via AND, there is
1446 no conflict. We can bring knowledge of object alignment into play
1447 here. For example, on alpha, "char a, b;" can alias one another,
1448 though "char a; long b;" cannot. */
1449 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1451 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1452 return 1;
1453 if (GET_CODE (x) == AND
1454 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1455 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1456 return 1;
1457 if (GET_CODE (y) == AND
1458 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1459 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1460 return 1;
1461 /* Differing symbols never alias. */
1462 return 0;
1465 /* If one address is a stack reference there can be no alias:
1466 stack references using different base registers do not alias,
1467 a stack reference can not alias a parameter, and a stack reference
1468 can not alias a global. */
1469 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1470 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1471 return 0;
1473 if (! flag_argument_noalias)
1474 return 1;
1476 if (flag_argument_noalias > 1)
1477 return 0;
1479 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1480 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1483 /* Convert the address X into something we can use. This is done by returning
1484 it unchanged unless it is a value; in the latter case we call cselib to get
1485 a more useful rtx. */
1488 get_addr (rtx x)
1490 cselib_val *v;
1491 struct elt_loc_list *l;
1493 if (GET_CODE (x) != VALUE)
1494 return x;
1495 v = CSELIB_VAL_PTR (x);
1496 for (l = v->locs; l; l = l->next)
1497 if (CONSTANT_P (l->loc))
1498 return l->loc;
1499 for (l = v->locs; l; l = l->next)
1500 if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
1501 return l->loc;
1502 if (v->locs)
1503 return v->locs->loc;
1504 return x;
1507 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1508 where SIZE is the size in bytes of the memory reference. If ADDR
1509 is not modified by the memory reference then ADDR is returned. */
1512 addr_side_effect_eval (rtx addr, int size, int n_refs)
1514 int offset = 0;
1516 switch (GET_CODE (addr))
1518 case PRE_INC:
1519 offset = (n_refs + 1) * size;
1520 break;
1521 case PRE_DEC:
1522 offset = -(n_refs + 1) * size;
1523 break;
1524 case POST_INC:
1525 offset = n_refs * size;
1526 break;
1527 case POST_DEC:
1528 offset = -n_refs * size;
1529 break;
1531 default:
1532 return addr;
1535 if (offset)
1536 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1537 GEN_INT (offset));
1538 else
1539 addr = XEXP (addr, 0);
1540 addr = canon_rtx (addr);
1542 return addr;
1545 /* Return nonzero if X and Y (memory addresses) could reference the
1546 same location in memory. C is an offset accumulator. When
1547 C is nonzero, we are testing aliases between X and Y + C.
1548 XSIZE is the size in bytes of the X reference,
1549 similarly YSIZE is the size in bytes for Y.
1550 Expect that canon_rtx has been already called for X and Y.
1552 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1553 referenced (the reference was BLKmode), so make the most pessimistic
1554 assumptions.
1556 If XSIZE or YSIZE is negative, we may access memory outside the object
1557 being referenced as a side effect. This can happen when using AND to
1558 align memory references, as is done on the Alpha.
1560 Nice to notice that varying addresses cannot conflict with fp if no
1561 local variables had their addresses taken, but that's too hard now. */
1563 static int
1564 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1566 if (GET_CODE (x) == VALUE)
1567 x = get_addr (x);
1568 if (GET_CODE (y) == VALUE)
1569 y = get_addr (y);
1570 if (GET_CODE (x) == HIGH)
1571 x = XEXP (x, 0);
1572 else if (GET_CODE (x) == LO_SUM)
1573 x = XEXP (x, 1);
1574 else
1575 x = addr_side_effect_eval (x, xsize, 0);
1576 if (GET_CODE (y) == HIGH)
1577 y = XEXP (y, 0);
1578 else if (GET_CODE (y) == LO_SUM)
1579 y = XEXP (y, 1);
1580 else
1581 y = addr_side_effect_eval (y, ysize, 0);
1583 if (rtx_equal_for_memref_p (x, y))
1585 if (xsize <= 0 || ysize <= 0)
1586 return 1;
1587 if (c >= 0 && xsize > c)
1588 return 1;
1589 if (c < 0 && ysize+c > 0)
1590 return 1;
1591 return 0;
1594 /* This code used to check for conflicts involving stack references and
1595 globals but the base address alias code now handles these cases. */
1597 if (GET_CODE (x) == PLUS)
1599 /* The fact that X is canonicalized means that this
1600 PLUS rtx is canonicalized. */
1601 rtx x0 = XEXP (x, 0);
1602 rtx x1 = XEXP (x, 1);
1604 if (GET_CODE (y) == PLUS)
1606 /* The fact that Y is canonicalized means that this
1607 PLUS rtx is canonicalized. */
1608 rtx y0 = XEXP (y, 0);
1609 rtx y1 = XEXP (y, 1);
1611 if (rtx_equal_for_memref_p (x1, y1))
1612 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1613 if (rtx_equal_for_memref_p (x0, y0))
1614 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1615 if (GET_CODE (x1) == CONST_INT)
1617 if (GET_CODE (y1) == CONST_INT)
1618 return memrefs_conflict_p (xsize, x0, ysize, y0,
1619 c - INTVAL (x1) + INTVAL (y1));
1620 else
1621 return memrefs_conflict_p (xsize, x0, ysize, y,
1622 c - INTVAL (x1));
1624 else if (GET_CODE (y1) == CONST_INT)
1625 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1627 return 1;
1629 else if (GET_CODE (x1) == CONST_INT)
1630 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1632 else if (GET_CODE (y) == PLUS)
1634 /* The fact that Y is canonicalized means that this
1635 PLUS rtx is canonicalized. */
1636 rtx y0 = XEXP (y, 0);
1637 rtx y1 = XEXP (y, 1);
1639 if (GET_CODE (y1) == CONST_INT)
1640 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1641 else
1642 return 1;
1645 if (GET_CODE (x) == GET_CODE (y))
1646 switch (GET_CODE (x))
1648 case MULT:
1650 /* Handle cases where we expect the second operands to be the
1651 same, and check only whether the first operand would conflict
1652 or not. */
1653 rtx x0, y0;
1654 rtx x1 = canon_rtx (XEXP (x, 1));
1655 rtx y1 = canon_rtx (XEXP (y, 1));
1656 if (! rtx_equal_for_memref_p (x1, y1))
1657 return 1;
1658 x0 = canon_rtx (XEXP (x, 0));
1659 y0 = canon_rtx (XEXP (y, 0));
1660 if (rtx_equal_for_memref_p (x0, y0))
1661 return (xsize == 0 || ysize == 0
1662 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1664 /* Can't properly adjust our sizes. */
1665 if (GET_CODE (x1) != CONST_INT)
1666 return 1;
1667 xsize /= INTVAL (x1);
1668 ysize /= INTVAL (x1);
1669 c /= INTVAL (x1);
1670 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1673 case REG:
1674 /* Are these registers known not to be equal? */
1675 if (alias_invariant)
1677 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1678 rtx i_x, i_y; /* invariant relationships of X and Y */
1680 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1681 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1683 if (i_x == 0 && i_y == 0)
1684 break;
1686 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1687 ysize, i_y ? i_y : y, c))
1688 return 0;
1690 break;
1692 default:
1693 break;
1696 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1697 as an access with indeterminate size. Assume that references
1698 besides AND are aligned, so if the size of the other reference is
1699 at least as large as the alignment, assume no other overlap. */
1700 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1702 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1703 xsize = -1;
1704 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1706 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1708 /* ??? If we are indexing far enough into the array/structure, we
1709 may yet be able to determine that we can not overlap. But we
1710 also need to that we are far enough from the end not to overlap
1711 a following reference, so we do nothing with that for now. */
1712 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1713 ysize = -1;
1714 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1717 if (GET_CODE (x) == ADDRESSOF)
1719 if (y == frame_pointer_rtx
1720 || GET_CODE (y) == ADDRESSOF)
1721 return xsize <= 0 || ysize <= 0;
1723 if (GET_CODE (y) == ADDRESSOF)
1725 if (x == frame_pointer_rtx)
1726 return xsize <= 0 || ysize <= 0;
1729 if (CONSTANT_P (x))
1731 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1733 c += (INTVAL (y) - INTVAL (x));
1734 return (xsize <= 0 || ysize <= 0
1735 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1738 if (GET_CODE (x) == CONST)
1740 if (GET_CODE (y) == CONST)
1741 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1742 ysize, canon_rtx (XEXP (y, 0)), c);
1743 else
1744 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1745 ysize, y, c);
1747 if (GET_CODE (y) == CONST)
1748 return memrefs_conflict_p (xsize, x, ysize,
1749 canon_rtx (XEXP (y, 0)), c);
1751 if (CONSTANT_P (y))
1752 return (xsize <= 0 || ysize <= 0
1753 || (rtx_equal_for_memref_p (x, y)
1754 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1756 return 1;
1758 return 1;
1761 /* Functions to compute memory dependencies.
1763 Since we process the insns in execution order, we can build tables
1764 to keep track of what registers are fixed (and not aliased), what registers
1765 are varying in known ways, and what registers are varying in unknown
1766 ways.
1768 If both memory references are volatile, then there must always be a
1769 dependence between the two references, since their order can not be
1770 changed. A volatile and non-volatile reference can be interchanged
1771 though.
1773 A MEM_IN_STRUCT reference at a non-AND varying address can never
1774 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1775 also must allow AND addresses, because they may generate accesses
1776 outside the object being referenced. This is used to generate
1777 aligned addresses from unaligned addresses, for instance, the alpha
1778 storeqi_unaligned pattern. */
1780 /* Read dependence: X is read after read in MEM takes place. There can
1781 only be a dependence here if both reads are volatile. */
1784 read_dependence (rtx mem, rtx x)
1786 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1789 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1790 MEM2 is a reference to a structure at a varying address, or returns
1791 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1792 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1793 to decide whether or not an address may vary; it should return
1794 nonzero whenever variation is possible.
1795 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1797 static rtx
1798 fixed_scalar_and_varying_struct_p (rtx mem1, rtx mem2, rtx mem1_addr,
1799 rtx mem2_addr,
1800 int (*varies_p) (rtx, int))
1802 if (! flag_strict_aliasing)
1803 return NULL_RTX;
1805 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1806 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1807 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1808 varying address. */
1809 return mem1;
1811 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1812 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1813 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1814 varying address. */
1815 return mem2;
1817 return NULL_RTX;
1820 /* Returns nonzero if something about the mode or address format MEM1
1821 indicates that it might well alias *anything*. */
1823 static int
1824 aliases_everything_p (rtx mem)
1826 if (GET_CODE (XEXP (mem, 0)) == AND)
1827 /* If the address is an AND, its very hard to know at what it is
1828 actually pointing. */
1829 return 1;
1831 return 0;
1834 /* Return true if we can determine that the fields referenced cannot
1835 overlap for any pair of objects. */
1837 static bool
1838 nonoverlapping_component_refs_p (tree x, tree y)
1840 tree fieldx, fieldy, typex, typey, orig_y;
1844 /* The comparison has to be done at a common type, since we don't
1845 know how the inheritance hierarchy works. */
1846 orig_y = y;
1849 fieldx = TREE_OPERAND (x, 1);
1850 typex = DECL_FIELD_CONTEXT (fieldx);
1852 y = orig_y;
1855 fieldy = TREE_OPERAND (y, 1);
1856 typey = DECL_FIELD_CONTEXT (fieldy);
1858 if (typex == typey)
1859 goto found;
1861 y = TREE_OPERAND (y, 0);
1863 while (y && TREE_CODE (y) == COMPONENT_REF);
1865 x = TREE_OPERAND (x, 0);
1867 while (x && TREE_CODE (x) == COMPONENT_REF);
1869 /* Never found a common type. */
1870 return false;
1872 found:
1873 /* If we're left with accessing different fields of a structure,
1874 then no overlap. */
1875 if (TREE_CODE (typex) == RECORD_TYPE
1876 && fieldx != fieldy)
1877 return true;
1879 /* The comparison on the current field failed. If we're accessing
1880 a very nested structure, look at the next outer level. */
1881 x = TREE_OPERAND (x, 0);
1882 y = TREE_OPERAND (y, 0);
1884 while (x && y
1885 && TREE_CODE (x) == COMPONENT_REF
1886 && TREE_CODE (y) == COMPONENT_REF);
1888 return false;
1891 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
1893 static tree
1894 decl_for_component_ref (tree x)
1898 x = TREE_OPERAND (x, 0);
1900 while (x && TREE_CODE (x) == COMPONENT_REF);
1902 return x && DECL_P (x) ? x : NULL_TREE;
1905 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1906 offset of the field reference. */
1908 static rtx
1909 adjust_offset_for_component_ref (tree x, rtx offset)
1911 HOST_WIDE_INT ioffset;
1913 if (! offset)
1914 return NULL_RTX;
1916 ioffset = INTVAL (offset);
1919 tree field = TREE_OPERAND (x, 1);
1921 if (! host_integerp (DECL_FIELD_OFFSET (field), 1))
1922 return NULL_RTX;
1923 ioffset += (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1924 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1925 / BITS_PER_UNIT));
1927 x = TREE_OPERAND (x, 0);
1929 while (x && TREE_CODE (x) == COMPONENT_REF);
1931 return GEN_INT (ioffset);
1934 /* Return nonzero if we can determine the exprs corresponding to memrefs
1935 X and Y and they do not overlap. */
1937 static int
1938 nonoverlapping_memrefs_p (rtx x, rtx y)
1940 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
1941 rtx rtlx, rtly;
1942 rtx basex, basey;
1943 rtx moffsetx, moffsety;
1944 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
1946 /* Unless both have exprs, we can't tell anything. */
1947 if (exprx == 0 || expry == 0)
1948 return 0;
1950 /* If both are field references, we may be able to determine something. */
1951 if (TREE_CODE (exprx) == COMPONENT_REF
1952 && TREE_CODE (expry) == COMPONENT_REF
1953 && nonoverlapping_component_refs_p (exprx, expry))
1954 return 1;
1956 /* If the field reference test failed, look at the DECLs involved. */
1957 moffsetx = MEM_OFFSET (x);
1958 if (TREE_CODE (exprx) == COMPONENT_REF)
1960 tree t = decl_for_component_ref (exprx);
1961 if (! t)
1962 return 0;
1963 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
1964 exprx = t;
1966 else if (TREE_CODE (exprx) == INDIRECT_REF)
1968 exprx = TREE_OPERAND (exprx, 0);
1969 if (flag_argument_noalias < 2
1970 || TREE_CODE (exprx) != PARM_DECL)
1971 return 0;
1974 moffsety = MEM_OFFSET (y);
1975 if (TREE_CODE (expry) == COMPONENT_REF)
1977 tree t = decl_for_component_ref (expry);
1978 if (! t)
1979 return 0;
1980 moffsety = adjust_offset_for_component_ref (expry, moffsety);
1981 expry = t;
1983 else if (TREE_CODE (expry) == INDIRECT_REF)
1985 expry = TREE_OPERAND (expry, 0);
1986 if (flag_argument_noalias < 2
1987 || TREE_CODE (expry) != PARM_DECL)
1988 return 0;
1991 if (! DECL_P (exprx) || ! DECL_P (expry))
1992 return 0;
1994 rtlx = DECL_RTL (exprx);
1995 rtly = DECL_RTL (expry);
1997 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
1998 can't overlap unless they are the same because we never reuse that part
1999 of the stack frame used for locals for spilled pseudos. */
2000 if ((GET_CODE (rtlx) != MEM || GET_CODE (rtly) != MEM)
2001 && ! rtx_equal_p (rtlx, rtly))
2002 return 1;
2004 /* Get the base and offsets of both decls. If either is a register, we
2005 know both are and are the same, so use that as the base. The only
2006 we can avoid overlap is if we can deduce that they are nonoverlapping
2007 pieces of that decl, which is very rare. */
2008 basex = GET_CODE (rtlx) == MEM ? XEXP (rtlx, 0) : rtlx;
2009 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2010 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2012 basey = GET_CODE (rtly) == MEM ? XEXP (rtly, 0) : rtly;
2013 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2014 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2016 /* If the bases are different, we know they do not overlap if both
2017 are constants or if one is a constant and the other a pointer into the
2018 stack frame. Otherwise a different base means we can't tell if they
2019 overlap or not. */
2020 if (! rtx_equal_p (basex, basey))
2021 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2022 || (CONSTANT_P (basex) && REG_P (basey)
2023 && REGNO_PTR_FRAME_P (REGNO (basey)))
2024 || (CONSTANT_P (basey) && REG_P (basex)
2025 && REGNO_PTR_FRAME_P (REGNO (basex))));
2027 sizex = (GET_CODE (rtlx) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2028 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2029 : -1);
2030 sizey = (GET_CODE (rtly) != MEM ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2031 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2032 -1);
2034 /* If we have an offset for either memref, it can update the values computed
2035 above. */
2036 if (moffsetx)
2037 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2038 if (moffsety)
2039 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2041 /* If a memref has both a size and an offset, we can use the smaller size.
2042 We can't do this if the offset isn't known because we must view this
2043 memref as being anywhere inside the DECL's MEM. */
2044 if (MEM_SIZE (x) && moffsetx)
2045 sizex = INTVAL (MEM_SIZE (x));
2046 if (MEM_SIZE (y) && moffsety)
2047 sizey = INTVAL (MEM_SIZE (y));
2049 /* Put the values of the memref with the lower offset in X's values. */
2050 if (offsetx > offsety)
2052 tem = offsetx, offsetx = offsety, offsety = tem;
2053 tem = sizex, sizex = sizey, sizey = tem;
2056 /* If we don't know the size of the lower-offset value, we can't tell
2057 if they conflict. Otherwise, we do the test. */
2058 return sizex >= 0 && offsety >= offsetx + sizex;
2061 /* True dependence: X is read after store in MEM takes place. */
2064 true_dependence (rtx mem, enum machine_mode mem_mode, rtx x,
2065 int (*varies) (rtx, int))
2067 rtx x_addr, mem_addr;
2068 rtx base;
2070 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2071 return 1;
2073 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2074 This is used in epilogue deallocation functions. */
2075 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2076 return 1;
2077 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2078 return 1;
2080 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2081 return 0;
2083 /* Unchanging memory can't conflict with non-unchanging memory.
2084 A non-unchanging read can conflict with a non-unchanging write.
2085 An unchanging read can conflict with an unchanging write since
2086 there may be a single store to this address to initialize it.
2087 Note that an unchanging store can conflict with a non-unchanging read
2088 since we have to make conservative assumptions when we have a
2089 record with readonly fields and we are copying the whole thing.
2090 Just fall through to the code below to resolve potential conflicts.
2091 This won't handle all cases optimally, but the possible performance
2092 loss should be negligible. */
2093 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2094 return 0;
2096 if (nonoverlapping_memrefs_p (mem, x))
2097 return 0;
2099 if (mem_mode == VOIDmode)
2100 mem_mode = GET_MODE (mem);
2102 x_addr = get_addr (XEXP (x, 0));
2103 mem_addr = get_addr (XEXP (mem, 0));
2105 base = find_base_term (x_addr);
2106 if (base && (GET_CODE (base) == LABEL_REF
2107 || (GET_CODE (base) == SYMBOL_REF
2108 && CONSTANT_POOL_ADDRESS_P (base))))
2109 return 0;
2111 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2112 return 0;
2114 x_addr = canon_rtx (x_addr);
2115 mem_addr = canon_rtx (mem_addr);
2117 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2118 SIZE_FOR_MODE (x), x_addr, 0))
2119 return 0;
2121 if (aliases_everything_p (x))
2122 return 1;
2124 /* We cannot use aliases_everything_p to test MEM, since we must look
2125 at MEM_MODE, rather than GET_MODE (MEM). */
2126 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2127 return 1;
2129 /* In true_dependence we also allow BLKmode to alias anything. Why
2130 don't we do this in anti_dependence and output_dependence? */
2131 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2132 return 1;
2134 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2135 varies);
2138 /* Canonical true dependence: X is read after store in MEM takes place.
2139 Variant of true_dependence which assumes MEM has already been
2140 canonicalized (hence we no longer do that here).
2141 The mem_addr argument has been added, since true_dependence computed
2142 this value prior to canonicalizing. */
2145 canon_true_dependence (rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2146 rtx x, int (*varies) (rtx, int))
2148 rtx x_addr;
2150 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2151 return 1;
2153 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2154 This is used in epilogue deallocation functions. */
2155 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2156 return 1;
2157 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2158 return 1;
2160 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2161 return 0;
2163 /* If X is an unchanging read, then it can't possibly conflict with any
2164 non-unchanging store. It may conflict with an unchanging write though,
2165 because there may be a single store to this address to initialize it.
2166 Just fall through to the code below to resolve the case where we have
2167 both an unchanging read and an unchanging write. This won't handle all
2168 cases optimally, but the possible performance loss should be
2169 negligible. */
2170 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
2171 return 0;
2173 if (nonoverlapping_memrefs_p (x, mem))
2174 return 0;
2176 x_addr = get_addr (XEXP (x, 0));
2178 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2179 return 0;
2181 x_addr = canon_rtx (x_addr);
2182 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2183 SIZE_FOR_MODE (x), x_addr, 0))
2184 return 0;
2186 if (aliases_everything_p (x))
2187 return 1;
2189 /* We cannot use aliases_everything_p to test MEM, since we must look
2190 at MEM_MODE, rather than GET_MODE (MEM). */
2191 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2192 return 1;
2194 /* In true_dependence we also allow BLKmode to alias anything. Why
2195 don't we do this in anti_dependence and output_dependence? */
2196 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2197 return 1;
2199 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2200 varies);
2203 /* Returns nonzero if a write to X might alias a previous read from
2204 (or, if WRITEP is nonzero, a write to) MEM. */
2206 static int
2207 write_dependence_p (rtx mem, rtx x, int writep)
2209 rtx x_addr, mem_addr;
2210 rtx fixed_scalar;
2211 rtx base;
2213 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2214 return 1;
2216 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2217 This is used in epilogue deallocation functions. */
2218 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2219 return 1;
2220 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2221 return 1;
2223 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2224 return 0;
2226 /* Unchanging memory can't conflict with non-unchanging memory. */
2227 if (RTX_UNCHANGING_P (x) != RTX_UNCHANGING_P (mem))
2228 return 0;
2230 /* If MEM is an unchanging read, then it can't possibly conflict with
2231 the store to X, because there is at most one store to MEM, and it must
2232 have occurred somewhere before MEM. */
2233 if (! writep && RTX_UNCHANGING_P (mem))
2234 return 0;
2236 if (nonoverlapping_memrefs_p (x, mem))
2237 return 0;
2239 x_addr = get_addr (XEXP (x, 0));
2240 mem_addr = get_addr (XEXP (mem, 0));
2242 if (! writep)
2244 base = find_base_term (mem_addr);
2245 if (base && (GET_CODE (base) == LABEL_REF
2246 || (GET_CODE (base) == SYMBOL_REF
2247 && CONSTANT_POOL_ADDRESS_P (base))))
2248 return 0;
2251 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2252 GET_MODE (mem)))
2253 return 0;
2255 x_addr = canon_rtx (x_addr);
2256 mem_addr = canon_rtx (mem_addr);
2258 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2259 SIZE_FOR_MODE (x), x_addr, 0))
2260 return 0;
2262 fixed_scalar
2263 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2264 rtx_addr_varies_p);
2266 return (!(fixed_scalar == mem && !aliases_everything_p (x))
2267 && !(fixed_scalar == x && !aliases_everything_p (mem)));
2270 /* Anti dependence: X is written after read in MEM takes place. */
2273 anti_dependence (rtx mem, rtx x)
2275 return write_dependence_p (mem, x, /*writep=*/0);
2278 /* Output dependence: X is written after store in MEM takes place. */
2281 output_dependence (rtx mem, rtx x)
2283 return write_dependence_p (mem, x, /*writep=*/1);
2286 /* A subroutine of nonlocal_mentioned_p, returns 1 if *LOC mentions
2287 something which is not local to the function and is not constant. */
2289 static int
2290 nonlocal_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2292 rtx x = *loc;
2293 rtx base;
2294 int regno;
2296 if (! x)
2297 return 0;
2299 switch (GET_CODE (x))
2301 case SUBREG:
2302 if (GET_CODE (SUBREG_REG (x)) == REG)
2304 /* Global registers are not local. */
2305 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2306 && global_regs[subreg_regno (x)])
2307 return 1;
2308 return 0;
2310 break;
2312 case REG:
2313 regno = REGNO (x);
2314 /* Global registers are not local. */
2315 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
2316 return 1;
2317 return 0;
2319 case SCRATCH:
2320 case PC:
2321 case CC0:
2322 case CONST_INT:
2323 case CONST_DOUBLE:
2324 case CONST_VECTOR:
2325 case CONST:
2326 case LABEL_REF:
2327 return 0;
2329 case SYMBOL_REF:
2330 /* Constants in the function's constants pool are constant. */
2331 if (CONSTANT_POOL_ADDRESS_P (x))
2332 return 0;
2333 return 1;
2335 case CALL:
2336 /* Non-constant calls and recursion are not local. */
2337 return 1;
2339 case MEM:
2340 /* Be overly conservative and consider any volatile memory
2341 reference as not local. */
2342 if (MEM_VOLATILE_P (x))
2343 return 1;
2344 base = find_base_term (XEXP (x, 0));
2345 if (base)
2347 /* A Pmode ADDRESS could be a reference via the structure value
2348 address or static chain. Such memory references are nonlocal.
2350 Thus, we have to examine the contents of the ADDRESS to find
2351 out if this is a local reference or not. */
2352 if (GET_CODE (base) == ADDRESS
2353 && GET_MODE (base) == Pmode
2354 && (XEXP (base, 0) == stack_pointer_rtx
2355 || XEXP (base, 0) == arg_pointer_rtx
2356 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2357 || XEXP (base, 0) == hard_frame_pointer_rtx
2358 #endif
2359 || XEXP (base, 0) == frame_pointer_rtx))
2360 return 0;
2361 /* Constants in the function's constant pool are constant. */
2362 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
2363 return 0;
2365 return 1;
2367 case UNSPEC_VOLATILE:
2368 case ASM_INPUT:
2369 return 1;
2371 case ASM_OPERANDS:
2372 if (MEM_VOLATILE_P (x))
2373 return 1;
2375 /* FALLTHROUGH */
2377 default:
2378 break;
2381 return 0;
2384 /* Returns nonzero if X might mention something which is not
2385 local to the function and is not constant. */
2387 static int
2388 nonlocal_mentioned_p (rtx x)
2390 if (INSN_P (x))
2392 if (GET_CODE (x) == CALL_INSN)
2394 if (! CONST_OR_PURE_CALL_P (x))
2395 return 1;
2396 x = CALL_INSN_FUNCTION_USAGE (x);
2397 if (x == 0)
2398 return 0;
2400 else
2401 x = PATTERN (x);
2404 return for_each_rtx (&x, nonlocal_mentioned_p_1, NULL);
2407 /* A subroutine of nonlocal_referenced_p, returns 1 if *LOC references
2408 something which is not local to the function and is not constant. */
2410 static int
2411 nonlocal_referenced_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2413 rtx x = *loc;
2415 if (! x)
2416 return 0;
2418 switch (GET_CODE (x))
2420 case MEM:
2421 case REG:
2422 case SYMBOL_REF:
2423 case SUBREG:
2424 return nonlocal_mentioned_p (x);
2426 case CALL:
2427 /* Non-constant calls and recursion are not local. */
2428 return 1;
2430 case SET:
2431 if (nonlocal_mentioned_p (SET_SRC (x)))
2432 return 1;
2434 if (GET_CODE (SET_DEST (x)) == MEM)
2435 return nonlocal_mentioned_p (XEXP (SET_DEST (x), 0));
2437 /* If the destination is anything other than a CC0, PC,
2438 MEM, REG, or a SUBREG of a REG that occupies all of
2439 the REG, then X references nonlocal memory if it is
2440 mentioned in the destination. */
2441 if (GET_CODE (SET_DEST (x)) != CC0
2442 && GET_CODE (SET_DEST (x)) != PC
2443 && GET_CODE (SET_DEST (x)) != REG
2444 && ! (GET_CODE (SET_DEST (x)) == SUBREG
2445 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
2446 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
2447 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
2448 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
2449 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
2450 return nonlocal_mentioned_p (SET_DEST (x));
2451 return 0;
2453 case CLOBBER:
2454 if (GET_CODE (XEXP (x, 0)) == MEM)
2455 return nonlocal_mentioned_p (XEXP (XEXP (x, 0), 0));
2456 return 0;
2458 case USE:
2459 return nonlocal_mentioned_p (XEXP (x, 0));
2461 case ASM_INPUT:
2462 case UNSPEC_VOLATILE:
2463 return 1;
2465 case ASM_OPERANDS:
2466 if (MEM_VOLATILE_P (x))
2467 return 1;
2469 /* FALLTHROUGH */
2471 default:
2472 break;
2475 return 0;
2478 /* Returns nonzero if X might reference something which is not
2479 local to the function and is not constant. */
2481 static int
2482 nonlocal_referenced_p (rtx x)
2484 if (INSN_P (x))
2486 if (GET_CODE (x) == CALL_INSN)
2488 if (! CONST_OR_PURE_CALL_P (x))
2489 return 1;
2490 x = CALL_INSN_FUNCTION_USAGE (x);
2491 if (x == 0)
2492 return 0;
2494 else
2495 x = PATTERN (x);
2498 return for_each_rtx (&x, nonlocal_referenced_p_1, NULL);
2501 /* A subroutine of nonlocal_set_p, returns 1 if *LOC sets
2502 something which is not local to the function and is not constant. */
2504 static int
2505 nonlocal_set_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
2507 rtx x = *loc;
2509 if (! x)
2510 return 0;
2512 switch (GET_CODE (x))
2514 case CALL:
2515 /* Non-constant calls and recursion are not local. */
2516 return 1;
2518 case PRE_INC:
2519 case PRE_DEC:
2520 case POST_INC:
2521 case POST_DEC:
2522 case PRE_MODIFY:
2523 case POST_MODIFY:
2524 return nonlocal_mentioned_p (XEXP (x, 0));
2526 case SET:
2527 if (nonlocal_mentioned_p (SET_DEST (x)))
2528 return 1;
2529 return nonlocal_set_p (SET_SRC (x));
2531 case CLOBBER:
2532 return nonlocal_mentioned_p (XEXP (x, 0));
2534 case USE:
2535 return 0;
2537 case ASM_INPUT:
2538 case UNSPEC_VOLATILE:
2539 return 1;
2541 case ASM_OPERANDS:
2542 if (MEM_VOLATILE_P (x))
2543 return 1;
2545 /* FALLTHROUGH */
2547 default:
2548 break;
2551 return 0;
2554 /* Returns nonzero if X might set something which is not
2555 local to the function and is not constant. */
2557 static int
2558 nonlocal_set_p (rtx x)
2560 if (INSN_P (x))
2562 if (GET_CODE (x) == CALL_INSN)
2564 if (! CONST_OR_PURE_CALL_P (x))
2565 return 1;
2566 x = CALL_INSN_FUNCTION_USAGE (x);
2567 if (x == 0)
2568 return 0;
2570 else
2571 x = PATTERN (x);
2574 return for_each_rtx (&x, nonlocal_set_p_1, NULL);
2577 /* Mark the function if it is pure or constant. */
2579 void
2580 mark_constant_function (void)
2582 rtx insn;
2583 int nonlocal_memory_referenced;
2585 if (TREE_READONLY (current_function_decl)
2586 || DECL_IS_PURE (current_function_decl)
2587 || TREE_THIS_VOLATILE (current_function_decl)
2588 || current_function_has_nonlocal_goto
2589 || !(*targetm.binds_local_p) (current_function_decl))
2590 return;
2592 /* A loop might not return which counts as a side effect. */
2593 if (mark_dfs_back_edges ())
2594 return;
2596 nonlocal_memory_referenced = 0;
2598 init_alias_analysis ();
2600 /* Determine if this is a constant or pure function. */
2602 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2604 if (! INSN_P (insn))
2605 continue;
2607 if (nonlocal_set_p (insn) || global_reg_mentioned_p (insn)
2608 || volatile_refs_p (PATTERN (insn)))
2609 break;
2611 if (! nonlocal_memory_referenced)
2612 nonlocal_memory_referenced = nonlocal_referenced_p (insn);
2615 end_alias_analysis ();
2617 /* Mark the function. */
2619 if (insn)
2621 else if (nonlocal_memory_referenced)
2623 cgraph_rtl_info (current_function_decl)->pure_function = 1;
2624 DECL_IS_PURE (current_function_decl) = 1;
2626 else
2628 cgraph_rtl_info (current_function_decl)->const_function = 1;
2629 TREE_READONLY (current_function_decl) = 1;
2634 void
2635 init_alias_once (void)
2637 int i;
2639 #ifndef OUTGOING_REGNO
2640 #define OUTGOING_REGNO(N) N
2641 #endif
2642 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2643 /* Check whether this register can hold an incoming pointer
2644 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2645 numbers, so translate if necessary due to register windows. */
2646 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2647 && HARD_REGNO_MODE_OK (i, Pmode))
2648 static_reg_base_value[i]
2649 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2651 static_reg_base_value[STACK_POINTER_REGNUM]
2652 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2653 static_reg_base_value[ARG_POINTER_REGNUM]
2654 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2655 static_reg_base_value[FRAME_POINTER_REGNUM]
2656 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2657 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2658 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2659 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2660 #endif
2662 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
2665 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2666 to be memory reference. */
2667 static bool memory_modified;
2668 static void
2669 memory_modified_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
2671 if (GET_CODE (x) == MEM)
2673 if (anti_dependence (x, (rtx)data) || output_dependence (x, (rtx)data))
2674 memory_modified = true;
2679 /* Return true when INSN possibly modify memory contents of MEM
2680 (ie address can be modified). */
2681 bool
2682 memory_modified_in_insn_p (rtx mem, rtx insn)
2684 if (!INSN_P (insn))
2685 return false;
2686 memory_modified = false;
2687 note_stores (PATTERN (insn), memory_modified_1, mem);
2688 return memory_modified;
2691 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2692 array. */
2694 void
2695 init_alias_analysis (void)
2697 int maxreg = max_reg_num ();
2698 int changed, pass;
2699 int i;
2700 unsigned int ui;
2701 rtx insn;
2703 timevar_push (TV_ALIAS_ANALYSIS);
2705 reg_known_value_size = maxreg;
2707 reg_known_value
2708 = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
2709 - FIRST_PSEUDO_REGISTER;
2710 reg_known_equiv_p
2711 = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
2712 - FIRST_PSEUDO_REGISTER;
2714 /* Overallocate reg_base_value to allow some growth during loop
2715 optimization. Loop unrolling can create a large number of
2716 registers. */
2717 reg_base_value_size = maxreg * 2;
2718 reg_base_value = (rtx *) ggc_alloc_cleared (reg_base_value_size
2719 * sizeof (rtx));
2721 new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
2722 reg_seen = (char *) xmalloc (reg_base_value_size);
2723 if (! reload_completed && flag_old_unroll_loops)
2725 /* ??? Why are we realloc'ing if we're just going to zero it? */
2726 alias_invariant = (rtx *)xrealloc (alias_invariant,
2727 reg_base_value_size * sizeof (rtx));
2728 memset ((char *)alias_invariant, 0, reg_base_value_size * sizeof (rtx));
2731 /* The basic idea is that each pass through this loop will use the
2732 "constant" information from the previous pass to propagate alias
2733 information through another level of assignments.
2735 This could get expensive if the assignment chains are long. Maybe
2736 we should throttle the number of iterations, possibly based on
2737 the optimization level or flag_expensive_optimizations.
2739 We could propagate more information in the first pass by making use
2740 of REG_N_SETS to determine immediately that the alias information
2741 for a pseudo is "constant".
2743 A program with an uninitialized variable can cause an infinite loop
2744 here. Instead of doing a full dataflow analysis to detect such problems
2745 we just cap the number of iterations for the loop.
2747 The state of the arrays for the set chain in question does not matter
2748 since the program has undefined behavior. */
2750 pass = 0;
2753 /* Assume nothing will change this iteration of the loop. */
2754 changed = 0;
2756 /* We want to assign the same IDs each iteration of this loop, so
2757 start counting from zero each iteration of the loop. */
2758 unique_id = 0;
2760 /* We're at the start of the function each iteration through the
2761 loop, so we're copying arguments. */
2762 copying_arguments = true;
2764 /* Wipe the potential alias information clean for this pass. */
2765 memset ((char *) new_reg_base_value, 0, reg_base_value_size * sizeof (rtx));
2767 /* Wipe the reg_seen array clean. */
2768 memset ((char *) reg_seen, 0, reg_base_value_size);
2770 /* Mark all hard registers which may contain an address.
2771 The stack, frame and argument pointers may contain an address.
2772 An argument register which can hold a Pmode value may contain
2773 an address even if it is not in BASE_REGS.
2775 The address expression is VOIDmode for an argument and
2776 Pmode for other registers. */
2778 memcpy (new_reg_base_value, static_reg_base_value,
2779 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2781 /* Walk the insns adding values to the new_reg_base_value array. */
2782 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2784 if (INSN_P (insn))
2786 rtx note, set;
2788 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2789 /* The prologue/epilogue insns are not threaded onto the
2790 insn chain until after reload has completed. Thus,
2791 there is no sense wasting time checking if INSN is in
2792 the prologue/epilogue until after reload has completed. */
2793 if (reload_completed
2794 && prologue_epilogue_contains (insn))
2795 continue;
2796 #endif
2798 /* If this insn has a noalias note, process it, Otherwise,
2799 scan for sets. A simple set will have no side effects
2800 which could change the base value of any other register. */
2802 if (GET_CODE (PATTERN (insn)) == SET
2803 && REG_NOTES (insn) != 0
2804 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2805 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2806 else
2807 note_stores (PATTERN (insn), record_set, NULL);
2809 set = single_set (insn);
2811 if (set != 0
2812 && GET_CODE (SET_DEST (set)) == REG
2813 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2815 unsigned int regno = REGNO (SET_DEST (set));
2816 rtx src = SET_SRC (set);
2818 if (REG_NOTES (insn) != 0
2819 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
2820 && REG_N_SETS (regno) == 1)
2821 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2822 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2823 && ! rtx_varies_p (XEXP (note, 0), 1)
2824 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
2826 reg_known_value[regno] = XEXP (note, 0);
2827 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
2829 else if (REG_N_SETS (regno) == 1
2830 && GET_CODE (src) == PLUS
2831 && GET_CODE (XEXP (src, 0)) == REG
2832 && REGNO (XEXP (src, 0)) >= FIRST_PSEUDO_REGISTER
2833 && (reg_known_value[REGNO (XEXP (src, 0))])
2834 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2836 rtx op0 = XEXP (src, 0);
2837 op0 = reg_known_value[REGNO (op0)];
2838 reg_known_value[regno]
2839 = plus_constant (op0, INTVAL (XEXP (src, 1)));
2840 reg_known_equiv_p[regno] = 0;
2842 else if (REG_N_SETS (regno) == 1
2843 && ! rtx_varies_p (src, 1))
2845 reg_known_value[regno] = src;
2846 reg_known_equiv_p[regno] = 0;
2850 else if (GET_CODE (insn) == NOTE
2851 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
2852 copying_arguments = false;
2855 /* Now propagate values from new_reg_base_value to reg_base_value. */
2856 for (ui = 0; ui < reg_base_value_size; ui++)
2858 if (new_reg_base_value[ui]
2859 && new_reg_base_value[ui] != reg_base_value[ui]
2860 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
2862 reg_base_value[ui] = new_reg_base_value[ui];
2863 changed = 1;
2867 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2869 /* Fill in the remaining entries. */
2870 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
2871 if (reg_known_value[i] == 0)
2872 reg_known_value[i] = regno_reg_rtx[i];
2874 /* Simplify the reg_base_value array so that no register refers to
2875 another register, except to special registers indirectly through
2876 ADDRESS expressions.
2878 In theory this loop can take as long as O(registers^2), but unless
2879 there are very long dependency chains it will run in close to linear
2880 time.
2882 This loop may not be needed any longer now that the main loop does
2883 a better job at propagating alias information. */
2884 pass = 0;
2887 changed = 0;
2888 pass++;
2889 for (ui = 0; ui < reg_base_value_size; ui++)
2891 rtx base = reg_base_value[ui];
2892 if (base && GET_CODE (base) == REG)
2894 unsigned int base_regno = REGNO (base);
2895 if (base_regno == ui) /* register set from itself */
2896 reg_base_value[ui] = 0;
2897 else
2898 reg_base_value[ui] = reg_base_value[base_regno];
2899 changed = 1;
2903 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
2905 /* Clean up. */
2906 free (new_reg_base_value);
2907 new_reg_base_value = 0;
2908 free (reg_seen);
2909 reg_seen = 0;
2910 timevar_pop (TV_ALIAS_ANALYSIS);
2913 void
2914 end_alias_analysis (void)
2916 free (reg_known_value + FIRST_PSEUDO_REGISTER);
2917 reg_known_value = 0;
2918 reg_known_value_size = 0;
2919 free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
2920 reg_known_equiv_p = 0;
2921 reg_base_value = 0;
2922 reg_base_value_size = 0;
2923 if (alias_invariant)
2925 free (alias_invariant);
2926 alias_invariant = 0;
2930 #include "gt-alias.h"