Import gcc-4.4.1
[dragonfly.git] / contrib / gcc-4.4 / gcc / alias.c
blob2b1b380898abc999e9311c2da638f51ad026e3f6
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "function.h"
30 #include "alias.h"
31 #include "emit-rtl.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "output.h"
37 #include "toplev.h"
38 #include "cselib.h"
39 #include "splay-tree.h"
40 #include "ggc.h"
41 #include "langhooks.h"
42 #include "timevar.h"
43 #include "target.h"
44 #include "cgraph.h"
45 #include "varray.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
48 #include "df.h"
50 /* The aliasing API provided here solves related but different problems:
52 Say there exists (in c)
54 struct X {
55 struct Y y1;
56 struct Z z2;
57 } x1, *px1, *px2;
59 struct Y y2, *py;
60 struct Z z2, *pz;
63 py = &px1.y1;
64 px2 = &x1;
66 Consider the four questions:
68 Can a store to x1 interfere with px2->y1?
69 Can a store to x1 interfere with px2->z2?
70 (*px2).z2
71 Can a store to x1 change the value pointed to by with py?
72 Can a store to x1 change the value pointed to by with pz?
74 The answer to these questions can be yes, yes, yes, and maybe.
76 The first two questions can be answered with a simple examination
77 of the type system. If structure X contains a field of type Y then
78 a store thru a pointer to an X can overwrite any field that is
79 contained (recursively) in an X (unless we know that px1 != px2).
81 The last two of the questions can be solved in the same way as the
82 first two questions but this is too conservative. The observation
83 is that in some cases analysis we can know if which (if any) fields
84 are addressed and if those addresses are used in bad ways. This
85 analysis may be language specific. In C, arbitrary operations may
86 be applied to pointers. However, there is some indication that
87 this may be too conservative for some C++ types.
89 The pass ipa-type-escape does this analysis for the types whose
90 instances do not escape across the compilation boundary.
92 Historically in GCC, these two problems were combined and a single
93 data structure was used to represent the solution to these
94 problems. We now have two similar but different data structures,
95 The data structure to solve the last two question is similar to the
96 first, but does not contain have the fields in it whose address are
97 never taken. For types that do escape the compilation unit, the
98 data structures will have identical information.
101 /* The alias sets assigned to MEMs assist the back-end in determining
102 which MEMs can alias which other MEMs. In general, two MEMs in
103 different alias sets cannot alias each other, with one important
104 exception. Consider something like:
106 struct S { int i; double d; };
108 a store to an `S' can alias something of either type `int' or type
109 `double'. (However, a store to an `int' cannot alias a `double'
110 and vice versa.) We indicate this via a tree structure that looks
111 like:
112 struct S
115 |/_ _\|
116 int double
118 (The arrows are directed and point downwards.)
119 In this situation we say the alias set for `struct S' is the
120 `superset' and that those for `int' and `double' are `subsets'.
122 To see whether two alias sets can point to the same memory, we must
123 see if either alias set is a subset of the other. We need not trace
124 past immediate descendants, however, since we propagate all
125 grandchildren up one level.
127 Alias set zero is implicitly a superset of all other alias sets.
128 However, this is no actual entry for alias set zero. It is an
129 error to attempt to explicitly construct a subset of zero. */
131 struct alias_set_entry GTY(())
133 /* The alias set number, as stored in MEM_ALIAS_SET. */
134 alias_set_type alias_set;
136 /* Nonzero if would have a child of zero: this effectively makes this
137 alias set the same as alias set zero. */
138 int has_zero_child;
140 /* The children of the alias set. These are not just the immediate
141 children, but, in fact, all descendants. So, if we have:
143 struct T { struct S s; float f; }
145 continuing our example above, the children here will be all of
146 `int', `double', `float', and `struct S'. */
147 splay_tree GTY((param1_is (int), param2_is (int))) children;
149 typedef struct alias_set_entry *alias_set_entry;
151 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
152 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
153 static void record_set (rtx, const_rtx, void *);
154 static int base_alias_check (rtx, rtx, enum machine_mode,
155 enum machine_mode);
156 static rtx find_base_value (rtx);
157 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
158 static int insert_subset_children (splay_tree_node, void*);
159 static tree find_base_decl (tree);
160 static alias_set_entry get_alias_set_entry (alias_set_type);
161 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
162 bool (*) (const_rtx, bool));
163 static int aliases_everything_p (const_rtx);
164 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
165 static tree decl_for_component_ref (tree);
166 static rtx adjust_offset_for_component_ref (tree, rtx);
167 static int write_dependence_p (const_rtx, const_rtx, int);
169 static void memory_modified_1 (rtx, const_rtx, void *);
171 /* Set up all info needed to perform alias analysis on memory references. */
173 /* Returns the size in bytes of the mode of X. */
174 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
176 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
177 different alias sets. We ignore alias sets in functions making use
178 of variable arguments because the va_arg macros on some systems are
179 not legal ANSI C. */
180 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
181 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
183 /* Cap the number of passes we make over the insns propagating alias
184 information through set chains. 10 is a completely arbitrary choice. */
185 #define MAX_ALIAS_LOOP_PASSES 10
187 /* reg_base_value[N] gives an address to which register N is related.
188 If all sets after the first add or subtract to the current value
189 or otherwise modify it so it does not point to a different top level
190 object, reg_base_value[N] is equal to the address part of the source
191 of the first set.
193 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
194 expressions represent certain special values: function arguments and
195 the stack, frame, and argument pointers.
197 The contents of an ADDRESS is not normally used, the mode of the
198 ADDRESS determines whether the ADDRESS is a function argument or some
199 other special value. Pointer equality, not rtx_equal_p, determines whether
200 two ADDRESS expressions refer to the same base address.
202 The only use of the contents of an ADDRESS is for determining if the
203 current function performs nonlocal memory memory references for the
204 purposes of marking the function as a constant function. */
206 static GTY(()) VEC(rtx,gc) *reg_base_value;
207 static rtx *new_reg_base_value;
209 /* We preserve the copy of old array around to avoid amount of garbage
210 produced. About 8% of garbage produced were attributed to this
211 array. */
212 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
214 /* Static hunks of RTL used by the aliasing code; these are initialized
215 once per function to avoid unnecessary RTL allocations. */
216 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
218 #define REG_BASE_VALUE(X) \
219 (REGNO (X) < VEC_length (rtx, reg_base_value) \
220 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
222 /* Vector indexed by N giving the initial (unchanging) value known for
223 pseudo-register N. This array is initialized in init_alias_analysis,
224 and does not change until end_alias_analysis is called. */
225 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
227 /* Indicates number of valid entries in reg_known_value. */
228 static GTY(()) unsigned int reg_known_value_size;
230 /* Vector recording for each reg_known_value whether it is due to a
231 REG_EQUIV note. Future passes (viz., reload) may replace the
232 pseudo with the equivalent expression and so we account for the
233 dependences that would be introduced if that happens.
235 The REG_EQUIV notes created in assign_parms may mention the arg
236 pointer, and there are explicit insns in the RTL that modify the
237 arg pointer. Thus we must ensure that such insns don't get
238 scheduled across each other because that would invalidate the
239 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
240 wrong, but solving the problem in the scheduler will likely give
241 better code, so we do it here. */
242 static bool *reg_known_equiv_p;
244 /* True when scanning insns from the start of the rtl to the
245 NOTE_INSN_FUNCTION_BEG note. */
246 static bool copying_arguments;
248 DEF_VEC_P(alias_set_entry);
249 DEF_VEC_ALLOC_P(alias_set_entry,gc);
251 /* The splay-tree used to store the various alias set entries. */
252 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
254 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
255 such an entry, or NULL otherwise. */
257 static inline alias_set_entry
258 get_alias_set_entry (alias_set_type alias_set)
260 return VEC_index (alias_set_entry, alias_sets, alias_set);
263 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
264 the two MEMs cannot alias each other. */
266 static inline int
267 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
269 /* Perform a basic sanity check. Namely, that there are no alias sets
270 if we're not using strict aliasing. This helps to catch bugs
271 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
272 where a MEM is allocated in some way other than by the use of
273 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
274 use alias sets to indicate that spilled registers cannot alias each
275 other, we might need to remove this check. */
276 gcc_assert (flag_strict_aliasing
277 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
279 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
282 /* Insert the NODE into the splay tree given by DATA. Used by
283 record_alias_subset via splay_tree_foreach. */
285 static int
286 insert_subset_children (splay_tree_node node, void *data)
288 splay_tree_insert ((splay_tree) data, node->key, node->value);
290 return 0;
293 /* Return true if the first alias set is a subset of the second. */
295 bool
296 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
298 alias_set_entry ase;
300 /* Everything is a subset of the "aliases everything" set. */
301 if (set2 == 0)
302 return true;
304 /* Otherwise, check if set1 is a subset of set2. */
305 ase = get_alias_set_entry (set2);
306 if (ase != 0
307 && ((ase->has_zero_child && set1 == 0)
308 || splay_tree_lookup (ase->children,
309 (splay_tree_key) set1)))
310 return true;
311 return false;
314 /* Return 1 if the two specified alias sets may conflict. */
317 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
319 alias_set_entry ase;
321 /* The easy case. */
322 if (alias_sets_must_conflict_p (set1, set2))
323 return 1;
325 /* See if the first alias set is a subset of the second. */
326 ase = get_alias_set_entry (set1);
327 if (ase != 0
328 && (ase->has_zero_child
329 || splay_tree_lookup (ase->children,
330 (splay_tree_key) set2)))
331 return 1;
333 /* Now do the same, but with the alias sets reversed. */
334 ase = get_alias_set_entry (set2);
335 if (ase != 0
336 && (ase->has_zero_child
337 || splay_tree_lookup (ase->children,
338 (splay_tree_key) set1)))
339 return 1;
341 /* The two alias sets are distinct and neither one is the
342 child of the other. Therefore, they cannot conflict. */
343 return 0;
346 static int
347 walk_mems_2 (rtx *x, rtx mem)
349 if (MEM_P (*x))
351 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
352 return 1;
354 return -1;
356 return 0;
359 static int
360 walk_mems_1 (rtx *x, rtx *pat)
362 if (MEM_P (*x))
364 /* Visit all MEMs in *PAT and check indepedence. */
365 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
366 /* Indicate that dependence was determined and stop traversal. */
367 return 1;
369 return -1;
371 return 0;
374 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
375 bool
376 insn_alias_sets_conflict_p (rtx insn1, rtx insn2)
378 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
379 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
380 &PATTERN (insn2));
383 /* Return 1 if the two specified alias sets will always conflict. */
386 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
388 if (set1 == 0 || set2 == 0 || set1 == set2)
389 return 1;
391 return 0;
394 /* Return 1 if any MEM object of type T1 will always conflict (using the
395 dependency routines in this file) with any MEM object of type T2.
396 This is used when allocating temporary storage. If T1 and/or T2 are
397 NULL_TREE, it means we know nothing about the storage. */
400 objects_must_conflict_p (tree t1, tree t2)
402 alias_set_type set1, set2;
404 /* If neither has a type specified, we don't know if they'll conflict
405 because we may be using them to store objects of various types, for
406 example the argument and local variables areas of inlined functions. */
407 if (t1 == 0 && t2 == 0)
408 return 0;
410 /* If they are the same type, they must conflict. */
411 if (t1 == t2
412 /* Likewise if both are volatile. */
413 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
414 return 1;
416 set1 = t1 ? get_alias_set (t1) : 0;
417 set2 = t2 ? get_alias_set (t2) : 0;
419 /* We can't use alias_sets_conflict_p because we must make sure
420 that every subtype of t1 will conflict with every subtype of
421 t2 for which a pair of subobjects of these respective subtypes
422 overlaps on the stack. */
423 return alias_sets_must_conflict_p (set1, set2);
426 /* T is an expression with pointer type. Find the DECL on which this
427 expression is based. (For example, in `a[i]' this would be `a'.)
428 If there is no such DECL, or a unique decl cannot be determined,
429 NULL_TREE is returned. */
431 static tree
432 find_base_decl (tree t)
434 tree d0, d1;
436 if (t == 0 || t == error_mark_node || ! POINTER_TYPE_P (TREE_TYPE (t)))
437 return 0;
439 /* If this is a declaration, return it. If T is based on a restrict
440 qualified decl, return that decl. */
441 if (DECL_P (t))
443 if (TREE_CODE (t) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (t))
444 t = DECL_GET_RESTRICT_BASE (t);
445 return t;
448 /* Handle general expressions. It would be nice to deal with
449 COMPONENT_REFs here. If we could tell that `a' and `b' were the
450 same, then `a->f' and `b->f' are also the same. */
451 switch (TREE_CODE_CLASS (TREE_CODE (t)))
453 case tcc_unary:
454 return find_base_decl (TREE_OPERAND (t, 0));
456 case tcc_binary:
457 /* Return 0 if found in neither or both are the same. */
458 d0 = find_base_decl (TREE_OPERAND (t, 0));
459 d1 = find_base_decl (TREE_OPERAND (t, 1));
460 if (d0 == d1)
461 return d0;
462 else if (d0 == 0)
463 return d1;
464 else if (d1 == 0)
465 return d0;
466 else
467 return 0;
469 default:
470 return 0;
474 /* Return true if all nested component references handled by
475 get_inner_reference in T are such that we should use the alias set
476 provided by the object at the heart of T.
478 This is true for non-addressable components (which don't have their
479 own alias set), as well as components of objects in alias set zero.
480 This later point is a special case wherein we wish to override the
481 alias set used by the component, but we don't have per-FIELD_DECL
482 assignable alias sets. */
484 bool
485 component_uses_parent_alias_set (const_tree t)
487 while (1)
489 /* If we're at the end, it vacuously uses its own alias set. */
490 if (!handled_component_p (t))
491 return false;
493 switch (TREE_CODE (t))
495 case COMPONENT_REF:
496 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
497 return true;
498 break;
500 case ARRAY_REF:
501 case ARRAY_RANGE_REF:
502 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
503 return true;
504 break;
506 case REALPART_EXPR:
507 case IMAGPART_EXPR:
508 break;
510 default:
511 /* Bitfields and casts are never addressable. */
512 return true;
515 t = TREE_OPERAND (t, 0);
516 if (get_alias_set (TREE_TYPE (t)) == 0)
517 return true;
521 /* Return the alias set for T, which may be either a type or an
522 expression. Call language-specific routine for help, if needed. */
524 alias_set_type
525 get_alias_set (tree t)
527 alias_set_type set;
529 /* If we're not doing any alias analysis, just assume everything
530 aliases everything else. Also return 0 if this or its type is
531 an error. */
532 if (! flag_strict_aliasing || t == error_mark_node
533 || (! TYPE_P (t)
534 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
535 return 0;
537 /* We can be passed either an expression or a type. This and the
538 language-specific routine may make mutually-recursive calls to each other
539 to figure out what to do. At each juncture, we see if this is a tree
540 that the language may need to handle specially. First handle things that
541 aren't types. */
542 if (! TYPE_P (t))
544 tree inner = t;
546 /* Remove any nops, then give the language a chance to do
547 something with this tree before we look at it. */
548 STRIP_NOPS (t);
549 set = lang_hooks.get_alias_set (t);
550 if (set != -1)
551 return set;
553 /* First see if the actual object referenced is an INDIRECT_REF from a
554 restrict-qualified pointer or a "void *". */
555 while (handled_component_p (inner))
557 inner = TREE_OPERAND (inner, 0);
558 STRIP_NOPS (inner);
561 /* Check for accesses through restrict-qualified pointers. */
562 if (INDIRECT_REF_P (inner))
564 tree decl;
566 if (TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME)
567 decl = SSA_NAME_VAR (TREE_OPERAND (inner, 0));
568 else
569 decl = find_base_decl (TREE_OPERAND (inner, 0));
571 if (decl && DECL_POINTER_ALIAS_SET_KNOWN_P (decl))
573 /* If we haven't computed the actual alias set, do it now. */
574 if (DECL_POINTER_ALIAS_SET (decl) == -2)
576 tree pointed_to_type = TREE_TYPE (TREE_TYPE (decl));
578 /* No two restricted pointers can point at the same thing.
579 However, a restricted pointer can point at the same thing
580 as an unrestricted pointer, if that unrestricted pointer
581 is based on the restricted pointer. So, we make the
582 alias set for the restricted pointer a subset of the
583 alias set for the type pointed to by the type of the
584 decl. */
585 alias_set_type pointed_to_alias_set
586 = get_alias_set (pointed_to_type);
588 if (pointed_to_alias_set == 0)
589 /* It's not legal to make a subset of alias set zero. */
590 DECL_POINTER_ALIAS_SET (decl) = 0;
591 else if (AGGREGATE_TYPE_P (pointed_to_type))
592 /* For an aggregate, we must treat the restricted
593 pointer the same as an ordinary pointer. If we
594 were to make the type pointed to by the
595 restricted pointer a subset of the pointed-to
596 type, then we would believe that other subsets
597 of the pointed-to type (such as fields of that
598 type) do not conflict with the type pointed to
599 by the restricted pointer. */
600 DECL_POINTER_ALIAS_SET (decl)
601 = pointed_to_alias_set;
602 else
604 DECL_POINTER_ALIAS_SET (decl) = new_alias_set ();
605 record_alias_subset (pointed_to_alias_set,
606 DECL_POINTER_ALIAS_SET (decl));
610 /* We use the alias set indicated in the declaration. */
611 return DECL_POINTER_ALIAS_SET (decl);
614 /* If we have an INDIRECT_REF via a void pointer, we don't
615 know anything about what that might alias. Likewise if the
616 pointer is marked that way. */
617 else if (TREE_CODE (TREE_TYPE (inner)) == VOID_TYPE
618 || (TYPE_REF_CAN_ALIAS_ALL
619 (TREE_TYPE (TREE_OPERAND (inner, 0)))))
620 return 0;
623 /* Otherwise, pick up the outermost object that we could have a pointer
624 to, processing conversions as above. */
625 while (component_uses_parent_alias_set (t))
627 t = TREE_OPERAND (t, 0);
628 STRIP_NOPS (t);
631 /* If we've already determined the alias set for a decl, just return
632 it. This is necessary for C++ anonymous unions, whose component
633 variables don't look like union members (boo!). */
634 if (TREE_CODE (t) == VAR_DECL
635 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
636 return MEM_ALIAS_SET (DECL_RTL (t));
638 /* Now all we care about is the type. */
639 t = TREE_TYPE (t);
642 /* Variant qualifiers don't affect the alias set, so get the main
643 variant. Always use the canonical type as well.
644 If this is a type with a known alias set, return it. */
645 t = TYPE_MAIN_VARIANT (t);
646 if (TYPE_CANONICAL (t))
647 t = TYPE_CANONICAL (t);
648 if (TYPE_ALIAS_SET_KNOWN_P (t))
649 return TYPE_ALIAS_SET (t);
651 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
652 if (!COMPLETE_TYPE_P (t))
654 /* For arrays with unknown size the conservative answer is the
655 alias set of the element type. */
656 if (TREE_CODE (t) == ARRAY_TYPE)
657 return get_alias_set (TREE_TYPE (t));
659 /* But return zero as a conservative answer for incomplete types. */
660 return 0;
663 /* See if the language has special handling for this type. */
664 set = lang_hooks.get_alias_set (t);
665 if (set != -1)
666 return set;
668 /* There are no objects of FUNCTION_TYPE, so there's no point in
669 using up an alias set for them. (There are, of course, pointers
670 and references to functions, but that's different.) */
671 else if (TREE_CODE (t) == FUNCTION_TYPE
672 || TREE_CODE (t) == METHOD_TYPE)
673 set = 0;
675 /* Unless the language specifies otherwise, let vector types alias
676 their components. This avoids some nasty type punning issues in
677 normal usage. And indeed lets vectors be treated more like an
678 array slice. */
679 else if (TREE_CODE (t) == VECTOR_TYPE)
680 set = get_alias_set (TREE_TYPE (t));
682 /* Unless the language specifies otherwise, treat array types the
683 same as their components. This avoids the asymmetry we get
684 through recording the components. Consider accessing a
685 character(kind=1) through a reference to a character(kind=1)[1:1].
686 Or consider if we want to assign integer(kind=4)[0:D.1387] and
687 integer(kind=4)[4] the same alias set or not.
688 Just be pragmatic here and make sure the array and its element
689 type get the same alias set assigned. */
690 else if (TREE_CODE (t) == ARRAY_TYPE
691 && !TYPE_NONALIASED_COMPONENT (t))
692 set = get_alias_set (TREE_TYPE (t));
694 else
695 /* Otherwise make a new alias set for this type. */
696 set = new_alias_set ();
698 TYPE_ALIAS_SET (t) = set;
700 /* If this is an aggregate type, we must record any component aliasing
701 information. */
702 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
703 record_component_aliases (t);
705 return set;
708 /* Return a brand-new alias set. */
710 alias_set_type
711 new_alias_set (void)
713 if (flag_strict_aliasing)
715 if (alias_sets == 0)
716 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
717 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
718 return VEC_length (alias_set_entry, alias_sets) - 1;
720 else
721 return 0;
724 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
725 not everything that aliases SUPERSET also aliases SUBSET. For example,
726 in C, a store to an `int' can alias a load of a structure containing an
727 `int', and vice versa. But it can't alias a load of a 'double' member
728 of the same structure. Here, the structure would be the SUPERSET and
729 `int' the SUBSET. This relationship is also described in the comment at
730 the beginning of this file.
732 This function should be called only once per SUPERSET/SUBSET pair.
734 It is illegal for SUPERSET to be zero; everything is implicitly a
735 subset of alias set zero. */
737 void
738 record_alias_subset (alias_set_type superset, alias_set_type subset)
740 alias_set_entry superset_entry;
741 alias_set_entry subset_entry;
743 /* It is possible in complex type situations for both sets to be the same,
744 in which case we can ignore this operation. */
745 if (superset == subset)
746 return;
748 gcc_assert (superset);
750 superset_entry = get_alias_set_entry (superset);
751 if (superset_entry == 0)
753 /* Create an entry for the SUPERSET, so that we have a place to
754 attach the SUBSET. */
755 superset_entry = GGC_NEW (struct alias_set_entry);
756 superset_entry->alias_set = superset;
757 superset_entry->children
758 = splay_tree_new_ggc (splay_tree_compare_ints);
759 superset_entry->has_zero_child = 0;
760 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
763 if (subset == 0)
764 superset_entry->has_zero_child = 1;
765 else
767 subset_entry = get_alias_set_entry (subset);
768 /* If there is an entry for the subset, enter all of its children
769 (if they are not already present) as children of the SUPERSET. */
770 if (subset_entry)
772 if (subset_entry->has_zero_child)
773 superset_entry->has_zero_child = 1;
775 splay_tree_foreach (subset_entry->children, insert_subset_children,
776 superset_entry->children);
779 /* Enter the SUBSET itself as a child of the SUPERSET. */
780 splay_tree_insert (superset_entry->children,
781 (splay_tree_key) subset, 0);
785 /* Record that component types of TYPE, if any, are part of that type for
786 aliasing purposes. For record types, we only record component types
787 for fields that are not marked non-addressable. For array types, we
788 only record the component type if it is not marked non-aliased. */
790 void
791 record_component_aliases (tree type)
793 alias_set_type superset = get_alias_set (type);
794 tree field;
796 if (superset == 0)
797 return;
799 switch (TREE_CODE (type))
801 case RECORD_TYPE:
802 case UNION_TYPE:
803 case QUAL_UNION_TYPE:
804 /* Recursively record aliases for the base classes, if there are any. */
805 if (TYPE_BINFO (type))
807 int i;
808 tree binfo, base_binfo;
810 for (binfo = TYPE_BINFO (type), i = 0;
811 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
812 record_alias_subset (superset,
813 get_alias_set (BINFO_TYPE (base_binfo)));
815 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
816 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
817 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
818 break;
820 case COMPLEX_TYPE:
821 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
822 break;
824 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
825 element type. */
827 default:
828 break;
832 /* Allocate an alias set for use in storing and reading from the varargs
833 spill area. */
835 static GTY(()) alias_set_type varargs_set = -1;
837 alias_set_type
838 get_varargs_alias_set (void)
840 #if 1
841 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
842 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
843 consistently use the varargs alias set for loads from the varargs
844 area. So don't use it anywhere. */
845 return 0;
846 #else
847 if (varargs_set == -1)
848 varargs_set = new_alias_set ();
850 return varargs_set;
851 #endif
854 /* Likewise, but used for the fixed portions of the frame, e.g., register
855 save areas. */
857 static GTY(()) alias_set_type frame_set = -1;
859 alias_set_type
860 get_frame_alias_set (void)
862 if (frame_set == -1)
863 frame_set = new_alias_set ();
865 return frame_set;
868 /* Inside SRC, the source of a SET, find a base address. */
870 static rtx
871 find_base_value (rtx src)
873 unsigned int regno;
875 #if defined (FIND_BASE_TERM)
876 /* Try machine-dependent ways to find the base term. */
877 src = FIND_BASE_TERM (src);
878 #endif
880 switch (GET_CODE (src))
882 case SYMBOL_REF:
883 case LABEL_REF:
884 return src;
886 case REG:
887 regno = REGNO (src);
888 /* At the start of a function, argument registers have known base
889 values which may be lost later. Returning an ADDRESS
890 expression here allows optimization based on argument values
891 even when the argument registers are used for other purposes. */
892 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
893 return new_reg_base_value[regno];
895 /* If a pseudo has a known base value, return it. Do not do this
896 for non-fixed hard regs since it can result in a circular
897 dependency chain for registers which have values at function entry.
899 The test above is not sufficient because the scheduler may move
900 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
901 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
902 && regno < VEC_length (rtx, reg_base_value))
904 /* If we're inside init_alias_analysis, use new_reg_base_value
905 to reduce the number of relaxation iterations. */
906 if (new_reg_base_value && new_reg_base_value[regno]
907 && DF_REG_DEF_COUNT (regno) == 1)
908 return new_reg_base_value[regno];
910 if (VEC_index (rtx, reg_base_value, regno))
911 return VEC_index (rtx, reg_base_value, regno);
914 return 0;
916 case MEM:
917 /* Check for an argument passed in memory. Only record in the
918 copying-arguments block; it is too hard to track changes
919 otherwise. */
920 if (copying_arguments
921 && (XEXP (src, 0) == arg_pointer_rtx
922 || (GET_CODE (XEXP (src, 0)) == PLUS
923 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
924 return gen_rtx_ADDRESS (VOIDmode, src);
925 return 0;
927 case CONST:
928 src = XEXP (src, 0);
929 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
930 break;
932 /* ... fall through ... */
934 case PLUS:
935 case MINUS:
937 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
939 /* If either operand is a REG that is a known pointer, then it
940 is the base. */
941 if (REG_P (src_0) && REG_POINTER (src_0))
942 return find_base_value (src_0);
943 if (REG_P (src_1) && REG_POINTER (src_1))
944 return find_base_value (src_1);
946 /* If either operand is a REG, then see if we already have
947 a known value for it. */
948 if (REG_P (src_0))
950 temp = find_base_value (src_0);
951 if (temp != 0)
952 src_0 = temp;
955 if (REG_P (src_1))
957 temp = find_base_value (src_1);
958 if (temp!= 0)
959 src_1 = temp;
962 /* If either base is named object or a special address
963 (like an argument or stack reference), then use it for the
964 base term. */
965 if (src_0 != 0
966 && (GET_CODE (src_0) == SYMBOL_REF
967 || GET_CODE (src_0) == LABEL_REF
968 || (GET_CODE (src_0) == ADDRESS
969 && GET_MODE (src_0) != VOIDmode)))
970 return src_0;
972 if (src_1 != 0
973 && (GET_CODE (src_1) == SYMBOL_REF
974 || GET_CODE (src_1) == LABEL_REF
975 || (GET_CODE (src_1) == ADDRESS
976 && GET_MODE (src_1) != VOIDmode)))
977 return src_1;
979 /* Guess which operand is the base address:
980 If either operand is a symbol, then it is the base. If
981 either operand is a CONST_INT, then the other is the base. */
982 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
983 return find_base_value (src_0);
984 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
985 return find_base_value (src_1);
987 return 0;
990 case LO_SUM:
991 /* The standard form is (lo_sum reg sym) so look only at the
992 second operand. */
993 return find_base_value (XEXP (src, 1));
995 case AND:
996 /* If the second operand is constant set the base
997 address to the first operand. */
998 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
999 return find_base_value (XEXP (src, 0));
1000 return 0;
1002 case TRUNCATE:
1003 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1004 break;
1005 /* Fall through. */
1006 case HIGH:
1007 case PRE_INC:
1008 case PRE_DEC:
1009 case POST_INC:
1010 case POST_DEC:
1011 case PRE_MODIFY:
1012 case POST_MODIFY:
1013 return find_base_value (XEXP (src, 0));
1015 case ZERO_EXTEND:
1016 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1018 rtx temp = find_base_value (XEXP (src, 0));
1020 if (temp != 0 && CONSTANT_P (temp))
1021 temp = convert_memory_address (Pmode, temp);
1023 return temp;
1026 default:
1027 break;
1030 return 0;
1033 /* Called from init_alias_analysis indirectly through note_stores. */
1035 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1036 register N has been set in this function. */
1037 static char *reg_seen;
1039 /* Addresses which are known not to alias anything else are identified
1040 by a unique integer. */
1041 static int unique_id;
1043 static void
1044 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1046 unsigned regno;
1047 rtx src;
1048 int n;
1050 if (!REG_P (dest))
1051 return;
1053 regno = REGNO (dest);
1055 gcc_assert (regno < VEC_length (rtx, reg_base_value));
1057 /* If this spans multiple hard registers, then we must indicate that every
1058 register has an unusable value. */
1059 if (regno < FIRST_PSEUDO_REGISTER)
1060 n = hard_regno_nregs[regno][GET_MODE (dest)];
1061 else
1062 n = 1;
1063 if (n != 1)
1065 while (--n >= 0)
1067 reg_seen[regno + n] = 1;
1068 new_reg_base_value[regno + n] = 0;
1070 return;
1073 if (set)
1075 /* A CLOBBER wipes out any old value but does not prevent a previously
1076 unset register from acquiring a base address (i.e. reg_seen is not
1077 set). */
1078 if (GET_CODE (set) == CLOBBER)
1080 new_reg_base_value[regno] = 0;
1081 return;
1083 src = SET_SRC (set);
1085 else
1087 if (reg_seen[regno])
1089 new_reg_base_value[regno] = 0;
1090 return;
1092 reg_seen[regno] = 1;
1093 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1094 GEN_INT (unique_id++));
1095 return;
1098 /* If this is not the first set of REGNO, see whether the new value
1099 is related to the old one. There are two cases of interest:
1101 (1) The register might be assigned an entirely new value
1102 that has the same base term as the original set.
1104 (2) The set might be a simple self-modification that
1105 cannot change REGNO's base value.
1107 If neither case holds, reject the original base value as invalid.
1108 Note that the following situation is not detected:
1110 extern int x, y; int *p = &x; p += (&y-&x);
1112 ANSI C does not allow computing the difference of addresses
1113 of distinct top level objects. */
1114 if (new_reg_base_value[regno] != 0
1115 && find_base_value (src) != new_reg_base_value[regno])
1116 switch (GET_CODE (src))
1118 case LO_SUM:
1119 case MINUS:
1120 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1121 new_reg_base_value[regno] = 0;
1122 break;
1123 case PLUS:
1124 /* If the value we add in the PLUS is also a valid base value,
1125 this might be the actual base value, and the original value
1126 an index. */
1128 rtx other = NULL_RTX;
1130 if (XEXP (src, 0) == dest)
1131 other = XEXP (src, 1);
1132 else if (XEXP (src, 1) == dest)
1133 other = XEXP (src, 0);
1135 if (! other || find_base_value (other))
1136 new_reg_base_value[regno] = 0;
1137 break;
1139 case AND:
1140 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
1141 new_reg_base_value[regno] = 0;
1142 break;
1143 default:
1144 new_reg_base_value[regno] = 0;
1145 break;
1147 /* If this is the first set of a register, record the value. */
1148 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1149 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1150 new_reg_base_value[regno] = find_base_value (src);
1152 reg_seen[regno] = 1;
1155 /* If a value is known for REGNO, return it. */
1158 get_reg_known_value (unsigned int regno)
1160 if (regno >= FIRST_PSEUDO_REGISTER)
1162 regno -= FIRST_PSEUDO_REGISTER;
1163 if (regno < reg_known_value_size)
1164 return reg_known_value[regno];
1166 return NULL;
1169 /* Set it. */
1171 static void
1172 set_reg_known_value (unsigned int regno, rtx val)
1174 if (regno >= FIRST_PSEUDO_REGISTER)
1176 regno -= FIRST_PSEUDO_REGISTER;
1177 if (regno < reg_known_value_size)
1178 reg_known_value[regno] = val;
1182 /* Similarly for reg_known_equiv_p. */
1184 bool
1185 get_reg_known_equiv_p (unsigned int regno)
1187 if (regno >= FIRST_PSEUDO_REGISTER)
1189 regno -= FIRST_PSEUDO_REGISTER;
1190 if (regno < reg_known_value_size)
1191 return reg_known_equiv_p[regno];
1193 return false;
1196 static void
1197 set_reg_known_equiv_p (unsigned int regno, bool val)
1199 if (regno >= FIRST_PSEUDO_REGISTER)
1201 regno -= FIRST_PSEUDO_REGISTER;
1202 if (regno < reg_known_value_size)
1203 reg_known_equiv_p[regno] = val;
1208 /* Returns a canonical version of X, from the point of view alias
1209 analysis. (For example, if X is a MEM whose address is a register,
1210 and the register has a known value (say a SYMBOL_REF), then a MEM
1211 whose address is the SYMBOL_REF is returned.) */
1214 canon_rtx (rtx x)
1216 /* Recursively look for equivalences. */
1217 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1219 rtx t = get_reg_known_value (REGNO (x));
1220 if (t == x)
1221 return x;
1222 if (t)
1223 return canon_rtx (t);
1226 if (GET_CODE (x) == PLUS)
1228 rtx x0 = canon_rtx (XEXP (x, 0));
1229 rtx x1 = canon_rtx (XEXP (x, 1));
1231 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1233 if (GET_CODE (x0) == CONST_INT)
1234 return plus_constant (x1, INTVAL (x0));
1235 else if (GET_CODE (x1) == CONST_INT)
1236 return plus_constant (x0, INTVAL (x1));
1237 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1241 /* This gives us much better alias analysis when called from
1242 the loop optimizer. Note we want to leave the original
1243 MEM alone, but need to return the canonicalized MEM with
1244 all the flags with their original values. */
1245 else if (MEM_P (x))
1246 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1248 return x;
1251 /* Return 1 if X and Y are identical-looking rtx's.
1252 Expect that X and Y has been already canonicalized.
1254 We use the data in reg_known_value above to see if two registers with
1255 different numbers are, in fact, equivalent. */
1257 static int
1258 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1260 int i;
1261 int j;
1262 enum rtx_code code;
1263 const char *fmt;
1265 if (x == 0 && y == 0)
1266 return 1;
1267 if (x == 0 || y == 0)
1268 return 0;
1270 if (x == y)
1271 return 1;
1273 code = GET_CODE (x);
1274 /* Rtx's of different codes cannot be equal. */
1275 if (code != GET_CODE (y))
1276 return 0;
1278 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1279 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1281 if (GET_MODE (x) != GET_MODE (y))
1282 return 0;
1284 /* Some RTL can be compared without a recursive examination. */
1285 switch (code)
1287 case REG:
1288 return REGNO (x) == REGNO (y);
1290 case LABEL_REF:
1291 return XEXP (x, 0) == XEXP (y, 0);
1293 case SYMBOL_REF:
1294 return XSTR (x, 0) == XSTR (y, 0);
1296 case VALUE:
1297 case CONST_INT:
1298 case CONST_DOUBLE:
1299 case CONST_FIXED:
1300 /* There's no need to compare the contents of CONST_DOUBLEs or
1301 CONST_INTs because pointer equality is a good enough
1302 comparison for these nodes. */
1303 return 0;
1305 default:
1306 break;
1309 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1310 if (code == PLUS)
1311 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1312 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1313 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1314 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1315 /* For commutative operations, the RTX match if the operand match in any
1316 order. Also handle the simple binary and unary cases without a loop. */
1317 if (COMMUTATIVE_P (x))
1319 rtx xop0 = canon_rtx (XEXP (x, 0));
1320 rtx yop0 = canon_rtx (XEXP (y, 0));
1321 rtx yop1 = canon_rtx (XEXP (y, 1));
1323 return ((rtx_equal_for_memref_p (xop0, yop0)
1324 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1325 || (rtx_equal_for_memref_p (xop0, yop1)
1326 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1328 else if (NON_COMMUTATIVE_P (x))
1330 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1331 canon_rtx (XEXP (y, 0)))
1332 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1333 canon_rtx (XEXP (y, 1))));
1335 else if (UNARY_P (x))
1336 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1337 canon_rtx (XEXP (y, 0)));
1339 /* Compare the elements. If any pair of corresponding elements
1340 fail to match, return 0 for the whole things.
1342 Limit cases to types which actually appear in addresses. */
1344 fmt = GET_RTX_FORMAT (code);
1345 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1347 switch (fmt[i])
1349 case 'i':
1350 if (XINT (x, i) != XINT (y, i))
1351 return 0;
1352 break;
1354 case 'E':
1355 /* Two vectors must have the same length. */
1356 if (XVECLEN (x, i) != XVECLEN (y, i))
1357 return 0;
1359 /* And the corresponding elements must match. */
1360 for (j = 0; j < XVECLEN (x, i); j++)
1361 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1362 canon_rtx (XVECEXP (y, i, j))) == 0)
1363 return 0;
1364 break;
1366 case 'e':
1367 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1368 canon_rtx (XEXP (y, i))) == 0)
1369 return 0;
1370 break;
1372 /* This can happen for asm operands. */
1373 case 's':
1374 if (strcmp (XSTR (x, i), XSTR (y, i)))
1375 return 0;
1376 break;
1378 /* This can happen for an asm which clobbers memory. */
1379 case '0':
1380 break;
1382 /* It is believed that rtx's at this level will never
1383 contain anything but integers and other rtx's,
1384 except for within LABEL_REFs and SYMBOL_REFs. */
1385 default:
1386 gcc_unreachable ();
1389 return 1;
1393 find_base_term (rtx x)
1395 cselib_val *val;
1396 struct elt_loc_list *l;
1398 #if defined (FIND_BASE_TERM)
1399 /* Try machine-dependent ways to find the base term. */
1400 x = FIND_BASE_TERM (x);
1401 #endif
1403 switch (GET_CODE (x))
1405 case REG:
1406 return REG_BASE_VALUE (x);
1408 case TRUNCATE:
1409 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1410 return 0;
1411 /* Fall through. */
1412 case HIGH:
1413 case PRE_INC:
1414 case PRE_DEC:
1415 case POST_INC:
1416 case POST_DEC:
1417 case PRE_MODIFY:
1418 case POST_MODIFY:
1419 return find_base_term (XEXP (x, 0));
1421 case ZERO_EXTEND:
1422 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1424 rtx temp = find_base_term (XEXP (x, 0));
1426 if (temp != 0 && CONSTANT_P (temp))
1427 temp = convert_memory_address (Pmode, temp);
1429 return temp;
1432 case VALUE:
1433 val = CSELIB_VAL_PTR (x);
1434 if (!val)
1435 return 0;
1436 for (l = val->locs; l; l = l->next)
1437 if ((x = find_base_term (l->loc)) != 0)
1438 return x;
1439 return 0;
1441 case LO_SUM:
1442 /* The standard form is (lo_sum reg sym) so look only at the
1443 second operand. */
1444 return find_base_term (XEXP (x, 1));
1446 case CONST:
1447 x = XEXP (x, 0);
1448 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1449 return 0;
1450 /* Fall through. */
1451 case PLUS:
1452 case MINUS:
1454 rtx tmp1 = XEXP (x, 0);
1455 rtx tmp2 = XEXP (x, 1);
1457 /* This is a little bit tricky since we have to determine which of
1458 the two operands represents the real base address. Otherwise this
1459 routine may return the index register instead of the base register.
1461 That may cause us to believe no aliasing was possible, when in
1462 fact aliasing is possible.
1464 We use a few simple tests to guess the base register. Additional
1465 tests can certainly be added. For example, if one of the operands
1466 is a shift or multiply, then it must be the index register and the
1467 other operand is the base register. */
1469 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1470 return find_base_term (tmp2);
1472 /* If either operand is known to be a pointer, then use it
1473 to determine the base term. */
1474 if (REG_P (tmp1) && REG_POINTER (tmp1))
1475 return find_base_term (tmp1);
1477 if (REG_P (tmp2) && REG_POINTER (tmp2))
1478 return find_base_term (tmp2);
1480 /* Neither operand was known to be a pointer. Go ahead and find the
1481 base term for both operands. */
1482 tmp1 = find_base_term (tmp1);
1483 tmp2 = find_base_term (tmp2);
1485 /* If either base term is named object or a special address
1486 (like an argument or stack reference), then use it for the
1487 base term. */
1488 if (tmp1 != 0
1489 && (GET_CODE (tmp1) == SYMBOL_REF
1490 || GET_CODE (tmp1) == LABEL_REF
1491 || (GET_CODE (tmp1) == ADDRESS
1492 && GET_MODE (tmp1) != VOIDmode)))
1493 return tmp1;
1495 if (tmp2 != 0
1496 && (GET_CODE (tmp2) == SYMBOL_REF
1497 || GET_CODE (tmp2) == LABEL_REF
1498 || (GET_CODE (tmp2) == ADDRESS
1499 && GET_MODE (tmp2) != VOIDmode)))
1500 return tmp2;
1502 /* We could not determine which of the two operands was the
1503 base register and which was the index. So we can determine
1504 nothing from the base alias check. */
1505 return 0;
1508 case AND:
1509 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) != 0)
1510 return find_base_term (XEXP (x, 0));
1511 return 0;
1513 case SYMBOL_REF:
1514 case LABEL_REF:
1515 return x;
1517 default:
1518 return 0;
1522 /* Return 0 if the addresses X and Y are known to point to different
1523 objects, 1 if they might be pointers to the same object. */
1525 static int
1526 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1527 enum machine_mode y_mode)
1529 rtx x_base = find_base_term (x);
1530 rtx y_base = find_base_term (y);
1532 /* If the address itself has no known base see if a known equivalent
1533 value has one. If either address still has no known base, nothing
1534 is known about aliasing. */
1535 if (x_base == 0)
1537 rtx x_c;
1539 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1540 return 1;
1542 x_base = find_base_term (x_c);
1543 if (x_base == 0)
1544 return 1;
1547 if (y_base == 0)
1549 rtx y_c;
1550 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1551 return 1;
1553 y_base = find_base_term (y_c);
1554 if (y_base == 0)
1555 return 1;
1558 /* If the base addresses are equal nothing is known about aliasing. */
1559 if (rtx_equal_p (x_base, y_base))
1560 return 1;
1562 /* The base addresses are different expressions. If they are not accessed
1563 via AND, there is no conflict. We can bring knowledge of object
1564 alignment into play here. For example, on alpha, "char a, b;" can
1565 alias one another, though "char a; long b;" cannot. AND addesses may
1566 implicitly alias surrounding objects; i.e. unaligned access in DImode
1567 via AND address can alias all surrounding object types except those
1568 with aligment 8 or higher. */
1569 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1570 return 1;
1571 if (GET_CODE (x) == AND
1572 && (GET_CODE (XEXP (x, 1)) != CONST_INT
1573 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1574 return 1;
1575 if (GET_CODE (y) == AND
1576 && (GET_CODE (XEXP (y, 1)) != CONST_INT
1577 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1578 return 1;
1580 /* Differing symbols not accessed via AND never alias. */
1581 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1582 return 0;
1584 /* If one address is a stack reference there can be no alias:
1585 stack references using different base registers do not alias,
1586 a stack reference can not alias a parameter, and a stack reference
1587 can not alias a global. */
1588 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1589 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1590 return 0;
1592 if (! flag_argument_noalias)
1593 return 1;
1595 if (flag_argument_noalias > 1)
1596 return 0;
1598 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1599 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1602 /* Convert the address X into something we can use. This is done by returning
1603 it unchanged unless it is a value; in the latter case we call cselib to get
1604 a more useful rtx. */
1607 get_addr (rtx x)
1609 cselib_val *v;
1610 struct elt_loc_list *l;
1612 if (GET_CODE (x) != VALUE)
1613 return x;
1614 v = CSELIB_VAL_PTR (x);
1615 if (v)
1617 for (l = v->locs; l; l = l->next)
1618 if (CONSTANT_P (l->loc))
1619 return l->loc;
1620 for (l = v->locs; l; l = l->next)
1621 if (!REG_P (l->loc) && !MEM_P (l->loc))
1622 return l->loc;
1623 if (v->locs)
1624 return v->locs->loc;
1626 return x;
1629 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1630 where SIZE is the size in bytes of the memory reference. If ADDR
1631 is not modified by the memory reference then ADDR is returned. */
1633 static rtx
1634 addr_side_effect_eval (rtx addr, int size, int n_refs)
1636 int offset = 0;
1638 switch (GET_CODE (addr))
1640 case PRE_INC:
1641 offset = (n_refs + 1) * size;
1642 break;
1643 case PRE_DEC:
1644 offset = -(n_refs + 1) * size;
1645 break;
1646 case POST_INC:
1647 offset = n_refs * size;
1648 break;
1649 case POST_DEC:
1650 offset = -n_refs * size;
1651 break;
1653 default:
1654 return addr;
1657 if (offset)
1658 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1659 GEN_INT (offset));
1660 else
1661 addr = XEXP (addr, 0);
1662 addr = canon_rtx (addr);
1664 return addr;
1667 /* Return nonzero if X and Y (memory addresses) could reference the
1668 same location in memory. C is an offset accumulator. When
1669 C is nonzero, we are testing aliases between X and Y + C.
1670 XSIZE is the size in bytes of the X reference,
1671 similarly YSIZE is the size in bytes for Y.
1672 Expect that canon_rtx has been already called for X and Y.
1674 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1675 referenced (the reference was BLKmode), so make the most pessimistic
1676 assumptions.
1678 If XSIZE or YSIZE is negative, we may access memory outside the object
1679 being referenced as a side effect. This can happen when using AND to
1680 align memory references, as is done on the Alpha.
1682 Nice to notice that varying addresses cannot conflict with fp if no
1683 local variables had their addresses taken, but that's too hard now. */
1685 static int
1686 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1688 if (GET_CODE (x) == VALUE)
1689 x = get_addr (x);
1690 if (GET_CODE (y) == VALUE)
1691 y = get_addr (y);
1692 if (GET_CODE (x) == HIGH)
1693 x = XEXP (x, 0);
1694 else if (GET_CODE (x) == LO_SUM)
1695 x = XEXP (x, 1);
1696 else
1697 x = addr_side_effect_eval (x, xsize, 0);
1698 if (GET_CODE (y) == HIGH)
1699 y = XEXP (y, 0);
1700 else if (GET_CODE (y) == LO_SUM)
1701 y = XEXP (y, 1);
1702 else
1703 y = addr_side_effect_eval (y, ysize, 0);
1705 if (rtx_equal_for_memref_p (x, y))
1707 if (xsize <= 0 || ysize <= 0)
1708 return 1;
1709 if (c >= 0 && xsize > c)
1710 return 1;
1711 if (c < 0 && ysize+c > 0)
1712 return 1;
1713 return 0;
1716 /* This code used to check for conflicts involving stack references and
1717 globals but the base address alias code now handles these cases. */
1719 if (GET_CODE (x) == PLUS)
1721 /* The fact that X is canonicalized means that this
1722 PLUS rtx is canonicalized. */
1723 rtx x0 = XEXP (x, 0);
1724 rtx x1 = XEXP (x, 1);
1726 if (GET_CODE (y) == PLUS)
1728 /* The fact that Y is canonicalized means that this
1729 PLUS rtx is canonicalized. */
1730 rtx y0 = XEXP (y, 0);
1731 rtx y1 = XEXP (y, 1);
1733 if (rtx_equal_for_memref_p (x1, y1))
1734 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1735 if (rtx_equal_for_memref_p (x0, y0))
1736 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1737 if (GET_CODE (x1) == CONST_INT)
1739 if (GET_CODE (y1) == CONST_INT)
1740 return memrefs_conflict_p (xsize, x0, ysize, y0,
1741 c - INTVAL (x1) + INTVAL (y1));
1742 else
1743 return memrefs_conflict_p (xsize, x0, ysize, y,
1744 c - INTVAL (x1));
1746 else if (GET_CODE (y1) == CONST_INT)
1747 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1749 return 1;
1751 else if (GET_CODE (x1) == CONST_INT)
1752 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1754 else if (GET_CODE (y) == PLUS)
1756 /* The fact that Y is canonicalized means that this
1757 PLUS rtx is canonicalized. */
1758 rtx y0 = XEXP (y, 0);
1759 rtx y1 = XEXP (y, 1);
1761 if (GET_CODE (y1) == CONST_INT)
1762 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1763 else
1764 return 1;
1767 if (GET_CODE (x) == GET_CODE (y))
1768 switch (GET_CODE (x))
1770 case MULT:
1772 /* Handle cases where we expect the second operands to be the
1773 same, and check only whether the first operand would conflict
1774 or not. */
1775 rtx x0, y0;
1776 rtx x1 = canon_rtx (XEXP (x, 1));
1777 rtx y1 = canon_rtx (XEXP (y, 1));
1778 if (! rtx_equal_for_memref_p (x1, y1))
1779 return 1;
1780 x0 = canon_rtx (XEXP (x, 0));
1781 y0 = canon_rtx (XEXP (y, 0));
1782 if (rtx_equal_for_memref_p (x0, y0))
1783 return (xsize == 0 || ysize == 0
1784 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1786 /* Can't properly adjust our sizes. */
1787 if (GET_CODE (x1) != CONST_INT)
1788 return 1;
1789 xsize /= INTVAL (x1);
1790 ysize /= INTVAL (x1);
1791 c /= INTVAL (x1);
1792 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1795 default:
1796 break;
1799 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1800 as an access with indeterminate size. Assume that references
1801 besides AND are aligned, so if the size of the other reference is
1802 at least as large as the alignment, assume no other overlap. */
1803 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1805 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1806 xsize = -1;
1807 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1809 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1811 /* ??? If we are indexing far enough into the array/structure, we
1812 may yet be able to determine that we can not overlap. But we
1813 also need to that we are far enough from the end not to overlap
1814 a following reference, so we do nothing with that for now. */
1815 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1816 ysize = -1;
1817 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1820 if (CONSTANT_P (x))
1822 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1824 c += (INTVAL (y) - INTVAL (x));
1825 return (xsize <= 0 || ysize <= 0
1826 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1829 if (GET_CODE (x) == CONST)
1831 if (GET_CODE (y) == CONST)
1832 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1833 ysize, canon_rtx (XEXP (y, 0)), c);
1834 else
1835 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1836 ysize, y, c);
1838 if (GET_CODE (y) == CONST)
1839 return memrefs_conflict_p (xsize, x, ysize,
1840 canon_rtx (XEXP (y, 0)), c);
1842 if (CONSTANT_P (y))
1843 return (xsize <= 0 || ysize <= 0
1844 || (rtx_equal_for_memref_p (x, y)
1845 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1847 return 1;
1849 return 1;
1852 /* Functions to compute memory dependencies.
1854 Since we process the insns in execution order, we can build tables
1855 to keep track of what registers are fixed (and not aliased), what registers
1856 are varying in known ways, and what registers are varying in unknown
1857 ways.
1859 If both memory references are volatile, then there must always be a
1860 dependence between the two references, since their order can not be
1861 changed. A volatile and non-volatile reference can be interchanged
1862 though.
1864 A MEM_IN_STRUCT reference at a non-AND varying address can never
1865 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1866 also must allow AND addresses, because they may generate accesses
1867 outside the object being referenced. This is used to generate
1868 aligned addresses from unaligned addresses, for instance, the alpha
1869 storeqi_unaligned pattern. */
1871 /* Read dependence: X is read after read in MEM takes place. There can
1872 only be a dependence here if both reads are volatile. */
1875 read_dependence (const_rtx mem, const_rtx x)
1877 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1880 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1881 MEM2 is a reference to a structure at a varying address, or returns
1882 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1883 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1884 to decide whether or not an address may vary; it should return
1885 nonzero whenever variation is possible.
1886 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1888 static const_rtx
1889 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
1890 rtx mem2_addr,
1891 bool (*varies_p) (const_rtx, bool))
1893 if (! flag_strict_aliasing)
1894 return NULL_RTX;
1896 if (MEM_ALIAS_SET (mem2)
1897 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1898 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1899 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1900 varying address. */
1901 return mem1;
1903 if (MEM_ALIAS_SET (mem1)
1904 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1905 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1906 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1907 varying address. */
1908 return mem2;
1910 return NULL_RTX;
1913 /* Returns nonzero if something about the mode or address format MEM1
1914 indicates that it might well alias *anything*. */
1916 static int
1917 aliases_everything_p (const_rtx mem)
1919 if (GET_CODE (XEXP (mem, 0)) == AND)
1920 /* If the address is an AND, it's very hard to know at what it is
1921 actually pointing. */
1922 return 1;
1924 return 0;
1927 /* Return true if we can determine that the fields referenced cannot
1928 overlap for any pair of objects. */
1930 static bool
1931 nonoverlapping_component_refs_p (const_tree x, const_tree y)
1933 const_tree fieldx, fieldy, typex, typey, orig_y;
1937 /* The comparison has to be done at a common type, since we don't
1938 know how the inheritance hierarchy works. */
1939 orig_y = y;
1942 fieldx = TREE_OPERAND (x, 1);
1943 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
1945 y = orig_y;
1948 fieldy = TREE_OPERAND (y, 1);
1949 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
1951 if (typex == typey)
1952 goto found;
1954 y = TREE_OPERAND (y, 0);
1956 while (y && TREE_CODE (y) == COMPONENT_REF);
1958 x = TREE_OPERAND (x, 0);
1960 while (x && TREE_CODE (x) == COMPONENT_REF);
1961 /* Never found a common type. */
1962 return false;
1964 found:
1965 /* If we're left with accessing different fields of a structure,
1966 then no overlap. */
1967 if (TREE_CODE (typex) == RECORD_TYPE
1968 && fieldx != fieldy)
1969 return true;
1971 /* The comparison on the current field failed. If we're accessing
1972 a very nested structure, look at the next outer level. */
1973 x = TREE_OPERAND (x, 0);
1974 y = TREE_OPERAND (y, 0);
1976 while (x && y
1977 && TREE_CODE (x) == COMPONENT_REF
1978 && TREE_CODE (y) == COMPONENT_REF);
1980 return false;
1983 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
1985 static tree
1986 decl_for_component_ref (tree x)
1990 x = TREE_OPERAND (x, 0);
1992 while (x && TREE_CODE (x) == COMPONENT_REF);
1994 return x && DECL_P (x) ? x : NULL_TREE;
1997 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
1998 offset of the field reference. */
2000 static rtx
2001 adjust_offset_for_component_ref (tree x, rtx offset)
2003 HOST_WIDE_INT ioffset;
2005 if (! offset)
2006 return NULL_RTX;
2008 ioffset = INTVAL (offset);
2011 tree offset = component_ref_field_offset (x);
2012 tree field = TREE_OPERAND (x, 1);
2014 if (! host_integerp (offset, 1))
2015 return NULL_RTX;
2016 ioffset += (tree_low_cst (offset, 1)
2017 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2018 / BITS_PER_UNIT));
2020 x = TREE_OPERAND (x, 0);
2022 while (x && TREE_CODE (x) == COMPONENT_REF);
2024 return GEN_INT (ioffset);
2027 /* Return nonzero if we can determine the exprs corresponding to memrefs
2028 X and Y and they do not overlap. */
2031 nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
2033 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2034 rtx rtlx, rtly;
2035 rtx basex, basey;
2036 rtx moffsetx, moffsety;
2037 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2039 /* Unless both have exprs, we can't tell anything. */
2040 if (exprx == 0 || expry == 0)
2041 return 0;
2043 /* If both are field references, we may be able to determine something. */
2044 if (TREE_CODE (exprx) == COMPONENT_REF
2045 && TREE_CODE (expry) == COMPONENT_REF
2046 && nonoverlapping_component_refs_p (exprx, expry))
2047 return 1;
2050 /* If the field reference test failed, look at the DECLs involved. */
2051 moffsetx = MEM_OFFSET (x);
2052 if (TREE_CODE (exprx) == COMPONENT_REF)
2054 if (TREE_CODE (expry) == VAR_DECL
2055 && POINTER_TYPE_P (TREE_TYPE (expry)))
2057 tree field = TREE_OPERAND (exprx, 1);
2058 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2059 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2060 TREE_TYPE (field)))
2061 return 1;
2064 tree t = decl_for_component_ref (exprx);
2065 if (! t)
2066 return 0;
2067 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2068 exprx = t;
2071 else if (INDIRECT_REF_P (exprx))
2073 exprx = TREE_OPERAND (exprx, 0);
2074 if (flag_argument_noalias < 2
2075 || TREE_CODE (exprx) != PARM_DECL)
2076 return 0;
2079 moffsety = MEM_OFFSET (y);
2080 if (TREE_CODE (expry) == COMPONENT_REF)
2082 if (TREE_CODE (exprx) == VAR_DECL
2083 && POINTER_TYPE_P (TREE_TYPE (exprx)))
2085 tree field = TREE_OPERAND (expry, 1);
2086 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2087 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2088 TREE_TYPE (field)))
2089 return 1;
2092 tree t = decl_for_component_ref (expry);
2093 if (! t)
2094 return 0;
2095 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2096 expry = t;
2099 else if (INDIRECT_REF_P (expry))
2101 expry = TREE_OPERAND (expry, 0);
2102 if (flag_argument_noalias < 2
2103 || TREE_CODE (expry) != PARM_DECL)
2104 return 0;
2107 if (! DECL_P (exprx) || ! DECL_P (expry))
2108 return 0;
2110 rtlx = DECL_RTL (exprx);
2111 rtly = DECL_RTL (expry);
2113 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2114 can't overlap unless they are the same because we never reuse that part
2115 of the stack frame used for locals for spilled pseudos. */
2116 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2117 && ! rtx_equal_p (rtlx, rtly))
2118 return 1;
2120 /* Get the base and offsets of both decls. If either is a register, we
2121 know both are and are the same, so use that as the base. The only
2122 we can avoid overlap is if we can deduce that they are nonoverlapping
2123 pieces of that decl, which is very rare. */
2124 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2125 if (GET_CODE (basex) == PLUS && GET_CODE (XEXP (basex, 1)) == CONST_INT)
2126 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2128 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2129 if (GET_CODE (basey) == PLUS && GET_CODE (XEXP (basey, 1)) == CONST_INT)
2130 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2132 /* If the bases are different, we know they do not overlap if both
2133 are constants or if one is a constant and the other a pointer into the
2134 stack frame. Otherwise a different base means we can't tell if they
2135 overlap or not. */
2136 if (! rtx_equal_p (basex, basey))
2137 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2138 || (CONSTANT_P (basex) && REG_P (basey)
2139 && REGNO_PTR_FRAME_P (REGNO (basey)))
2140 || (CONSTANT_P (basey) && REG_P (basex)
2141 && REGNO_PTR_FRAME_P (REGNO (basex))));
2143 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2144 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2145 : -1);
2146 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2147 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2148 -1);
2150 /* If we have an offset for either memref, it can update the values computed
2151 above. */
2152 if (moffsetx)
2153 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2154 if (moffsety)
2155 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2157 /* If a memref has both a size and an offset, we can use the smaller size.
2158 We can't do this if the offset isn't known because we must view this
2159 memref as being anywhere inside the DECL's MEM. */
2160 if (MEM_SIZE (x) && moffsetx)
2161 sizex = INTVAL (MEM_SIZE (x));
2162 if (MEM_SIZE (y) && moffsety)
2163 sizey = INTVAL (MEM_SIZE (y));
2165 /* Put the values of the memref with the lower offset in X's values. */
2166 if (offsetx > offsety)
2168 tem = offsetx, offsetx = offsety, offsety = tem;
2169 tem = sizex, sizex = sizey, sizey = tem;
2172 /* If we don't know the size of the lower-offset value, we can't tell
2173 if they conflict. Otherwise, we do the test. */
2174 return sizex >= 0 && offsety >= offsetx + sizex;
2177 /* True dependence: X is read after store in MEM takes place. */
2180 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2181 bool (*varies) (const_rtx, bool))
2183 rtx x_addr, mem_addr;
2184 rtx base;
2186 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2187 return 1;
2189 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2190 This is used in epilogue deallocation functions, and in cselib. */
2191 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2192 return 1;
2193 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2194 return 1;
2195 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2196 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2197 return 1;
2199 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2200 return 0;
2202 /* Read-only memory is by definition never modified, and therefore can't
2203 conflict with anything. We don't expect to find read-only set on MEM,
2204 but stupid user tricks can produce them, so don't die. */
2205 if (MEM_READONLY_P (x))
2206 return 0;
2208 if (nonoverlapping_memrefs_p (mem, x))
2209 return 0;
2211 if (mem_mode == VOIDmode)
2212 mem_mode = GET_MODE (mem);
2214 x_addr = get_addr (XEXP (x, 0));
2215 mem_addr = get_addr (XEXP (mem, 0));
2217 base = find_base_term (x_addr);
2218 if (base && (GET_CODE (base) == LABEL_REF
2219 || (GET_CODE (base) == SYMBOL_REF
2220 && CONSTANT_POOL_ADDRESS_P (base))))
2221 return 0;
2223 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2224 return 0;
2226 x_addr = canon_rtx (x_addr);
2227 mem_addr = canon_rtx (mem_addr);
2229 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2230 SIZE_FOR_MODE (x), x_addr, 0))
2231 return 0;
2233 if (aliases_everything_p (x))
2234 return 1;
2236 /* We cannot use aliases_everything_p to test MEM, since we must look
2237 at MEM_MODE, rather than GET_MODE (MEM). */
2238 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2239 return 1;
2241 /* In true_dependence we also allow BLKmode to alias anything. Why
2242 don't we do this in anti_dependence and output_dependence? */
2243 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2244 return 1;
2246 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2247 varies);
2250 /* Canonical true dependence: X is read after store in MEM takes place.
2251 Variant of true_dependence which assumes MEM has already been
2252 canonicalized (hence we no longer do that here).
2253 The mem_addr argument has been added, since true_dependence computed
2254 this value prior to canonicalizing.
2255 If x_addr is non-NULL, it is used in preference of XEXP (x, 0). */
2258 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2259 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
2261 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2262 return 1;
2264 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2265 This is used in epilogue deallocation functions. */
2266 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2267 return 1;
2268 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2269 return 1;
2270 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2271 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2272 return 1;
2274 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2275 return 0;
2277 /* Read-only memory is by definition never modified, and therefore can't
2278 conflict with anything. We don't expect to find read-only set on MEM,
2279 but stupid user tricks can produce them, so don't die. */
2280 if (MEM_READONLY_P (x))
2281 return 0;
2283 if (nonoverlapping_memrefs_p (x, mem))
2284 return 0;
2286 if (! x_addr)
2287 x_addr = get_addr (XEXP (x, 0));
2289 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2290 return 0;
2292 x_addr = canon_rtx (x_addr);
2293 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2294 SIZE_FOR_MODE (x), x_addr, 0))
2295 return 0;
2297 if (aliases_everything_p (x))
2298 return 1;
2300 /* We cannot use aliases_everything_p to test MEM, since we must look
2301 at MEM_MODE, rather than GET_MODE (MEM). */
2302 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2303 return 1;
2305 /* In true_dependence we also allow BLKmode to alias anything. Why
2306 don't we do this in anti_dependence and output_dependence? */
2307 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2308 return 1;
2310 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2311 varies);
2314 /* Returns nonzero if a write to X might alias a previous read from
2315 (or, if WRITEP is nonzero, a write to) MEM. */
2317 static int
2318 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2320 rtx x_addr, mem_addr;
2321 const_rtx fixed_scalar;
2322 rtx base;
2324 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2325 return 1;
2327 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2328 This is used in epilogue deallocation functions. */
2329 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2330 return 1;
2331 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2332 return 1;
2333 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2334 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2335 return 1;
2337 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2338 return 0;
2340 /* A read from read-only memory can't conflict with read-write memory. */
2341 if (!writep && MEM_READONLY_P (mem))
2342 return 0;
2344 if (nonoverlapping_memrefs_p (x, mem))
2345 return 0;
2347 x_addr = get_addr (XEXP (x, 0));
2348 mem_addr = get_addr (XEXP (mem, 0));
2350 if (! writep)
2352 base = find_base_term (mem_addr);
2353 if (base && (GET_CODE (base) == LABEL_REF
2354 || (GET_CODE (base) == SYMBOL_REF
2355 && CONSTANT_POOL_ADDRESS_P (base))))
2356 return 0;
2359 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2360 GET_MODE (mem)))
2361 return 0;
2363 x_addr = canon_rtx (x_addr);
2364 mem_addr = canon_rtx (mem_addr);
2366 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2367 SIZE_FOR_MODE (x), x_addr, 0))
2368 return 0;
2370 fixed_scalar
2371 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2372 rtx_addr_varies_p);
2374 return (!(fixed_scalar == mem && !aliases_everything_p (x))
2375 && !(fixed_scalar == x && !aliases_everything_p (mem)));
2378 /* Anti dependence: X is written after read in MEM takes place. */
2381 anti_dependence (const_rtx mem, const_rtx x)
2383 return write_dependence_p (mem, x, /*writep=*/0);
2386 /* Output dependence: X is written after store in MEM takes place. */
2389 output_dependence (const_rtx mem, const_rtx x)
2391 return write_dependence_p (mem, x, /*writep=*/1);
2395 void
2396 init_alias_target (void)
2398 int i;
2400 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2402 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2403 /* Check whether this register can hold an incoming pointer
2404 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2405 numbers, so translate if necessary due to register windows. */
2406 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2407 && HARD_REGNO_MODE_OK (i, Pmode))
2408 static_reg_base_value[i]
2409 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2411 static_reg_base_value[STACK_POINTER_REGNUM]
2412 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2413 static_reg_base_value[ARG_POINTER_REGNUM]
2414 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2415 static_reg_base_value[FRAME_POINTER_REGNUM]
2416 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2417 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2418 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2419 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2420 #endif
2423 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2424 to be memory reference. */
2425 static bool memory_modified;
2426 static void
2427 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2429 if (MEM_P (x))
2431 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2432 memory_modified = true;
2437 /* Return true when INSN possibly modify memory contents of MEM
2438 (i.e. address can be modified). */
2439 bool
2440 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2442 if (!INSN_P (insn))
2443 return false;
2444 memory_modified = false;
2445 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2446 return memory_modified;
2449 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2450 array. */
2452 void
2453 init_alias_analysis (void)
2455 unsigned int maxreg = max_reg_num ();
2456 int changed, pass;
2457 int i;
2458 unsigned int ui;
2459 rtx insn;
2461 timevar_push (TV_ALIAS_ANALYSIS);
2463 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2464 reg_known_value = GGC_CNEWVEC (rtx, reg_known_value_size);
2465 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2467 /* If we have memory allocated from the previous run, use it. */
2468 if (old_reg_base_value)
2469 reg_base_value = old_reg_base_value;
2471 if (reg_base_value)
2472 VEC_truncate (rtx, reg_base_value, 0);
2474 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2476 new_reg_base_value = XNEWVEC (rtx, maxreg);
2477 reg_seen = XNEWVEC (char, maxreg);
2479 /* The basic idea is that each pass through this loop will use the
2480 "constant" information from the previous pass to propagate alias
2481 information through another level of assignments.
2483 This could get expensive if the assignment chains are long. Maybe
2484 we should throttle the number of iterations, possibly based on
2485 the optimization level or flag_expensive_optimizations.
2487 We could propagate more information in the first pass by making use
2488 of DF_REG_DEF_COUNT to determine immediately that the alias information
2489 for a pseudo is "constant".
2491 A program with an uninitialized variable can cause an infinite loop
2492 here. Instead of doing a full dataflow analysis to detect such problems
2493 we just cap the number of iterations for the loop.
2495 The state of the arrays for the set chain in question does not matter
2496 since the program has undefined behavior. */
2498 pass = 0;
2501 /* Assume nothing will change this iteration of the loop. */
2502 changed = 0;
2504 /* We want to assign the same IDs each iteration of this loop, so
2505 start counting from zero each iteration of the loop. */
2506 unique_id = 0;
2508 /* We're at the start of the function each iteration through the
2509 loop, so we're copying arguments. */
2510 copying_arguments = true;
2512 /* Wipe the potential alias information clean for this pass. */
2513 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2515 /* Wipe the reg_seen array clean. */
2516 memset (reg_seen, 0, maxreg);
2518 /* Mark all hard registers which may contain an address.
2519 The stack, frame and argument pointers may contain an address.
2520 An argument register which can hold a Pmode value may contain
2521 an address even if it is not in BASE_REGS.
2523 The address expression is VOIDmode for an argument and
2524 Pmode for other registers. */
2526 memcpy (new_reg_base_value, static_reg_base_value,
2527 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2529 /* Walk the insns adding values to the new_reg_base_value array. */
2530 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2532 if (INSN_P (insn))
2534 rtx note, set;
2536 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2537 /* The prologue/epilogue insns are not threaded onto the
2538 insn chain until after reload has completed. Thus,
2539 there is no sense wasting time checking if INSN is in
2540 the prologue/epilogue until after reload has completed. */
2541 if (reload_completed
2542 && prologue_epilogue_contains (insn))
2543 continue;
2544 #endif
2546 /* If this insn has a noalias note, process it, Otherwise,
2547 scan for sets. A simple set will have no side effects
2548 which could change the base value of any other register. */
2550 if (GET_CODE (PATTERN (insn)) == SET
2551 && REG_NOTES (insn) != 0
2552 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2553 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2554 else
2555 note_stores (PATTERN (insn), record_set, NULL);
2557 set = single_set (insn);
2559 if (set != 0
2560 && REG_P (SET_DEST (set))
2561 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2563 unsigned int regno = REGNO (SET_DEST (set));
2564 rtx src = SET_SRC (set);
2565 rtx t;
2567 note = find_reg_equal_equiv_note (insn);
2568 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2569 && DF_REG_DEF_COUNT (regno) != 1)
2570 note = NULL_RTX;
2572 if (note != NULL_RTX
2573 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2574 && ! rtx_varies_p (XEXP (note, 0), 1)
2575 && ! reg_overlap_mentioned_p (SET_DEST (set),
2576 XEXP (note, 0)))
2578 set_reg_known_value (regno, XEXP (note, 0));
2579 set_reg_known_equiv_p (regno,
2580 REG_NOTE_KIND (note) == REG_EQUIV);
2582 else if (DF_REG_DEF_COUNT (regno) == 1
2583 && GET_CODE (src) == PLUS
2584 && REG_P (XEXP (src, 0))
2585 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2586 && GET_CODE (XEXP (src, 1)) == CONST_INT)
2588 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2589 set_reg_known_value (regno, t);
2590 set_reg_known_equiv_p (regno, 0);
2592 else if (DF_REG_DEF_COUNT (regno) == 1
2593 && ! rtx_varies_p (src, 1))
2595 set_reg_known_value (regno, src);
2596 set_reg_known_equiv_p (regno, 0);
2600 else if (NOTE_P (insn)
2601 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2602 copying_arguments = false;
2605 /* Now propagate values from new_reg_base_value to reg_base_value. */
2606 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2608 for (ui = 0; ui < maxreg; ui++)
2610 if (new_reg_base_value[ui]
2611 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2612 && ! rtx_equal_p (new_reg_base_value[ui],
2613 VEC_index (rtx, reg_base_value, ui)))
2615 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2616 changed = 1;
2620 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2622 /* Fill in the remaining entries. */
2623 for (i = 0; i < (int)reg_known_value_size; i++)
2624 if (reg_known_value[i] == 0)
2625 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2627 /* Clean up. */
2628 free (new_reg_base_value);
2629 new_reg_base_value = 0;
2630 free (reg_seen);
2631 reg_seen = 0;
2632 timevar_pop (TV_ALIAS_ANALYSIS);
2635 void
2636 end_alias_analysis (void)
2638 old_reg_base_value = reg_base_value;
2639 ggc_free (reg_known_value);
2640 reg_known_value = 0;
2641 reg_known_value_size = 0;
2642 free (reg_known_equiv_p);
2643 reg_known_equiv_p = 0;
2646 #include "gt-alias.h"