Merge branch 'master' into python
[official-gcc.git] / gcc / alias.c
bloba4083a1a13b1e9b43b0dfa5d38f8fa42f43b63f2
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "function.h"
30 #include "alias.h"
31 #include "emit-rtl.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "output.h"
37 #include "toplev.h"
38 #include "cselib.h"
39 #include "splay-tree.h"
40 #include "ggc.h"
41 #include "langhooks.h"
42 #include "timevar.h"
43 #include "target.h"
44 #include "cgraph.h"
45 #include "tree-pass.h"
46 #include "ipa-type-escape.h"
47 #include "df.h"
48 #include "tree-ssa-alias.h"
49 #include "pointer-set.h"
50 #include "tree-flow.h"
52 /* The aliasing API provided here solves related but different problems:
54 Say there exists (in c)
56 struct X {
57 struct Y y1;
58 struct Z z2;
59 } x1, *px1, *px2;
61 struct Y y2, *py;
62 struct Z z2, *pz;
65 py = &px1.y1;
66 px2 = &x1;
68 Consider the four questions:
70 Can a store to x1 interfere with px2->y1?
71 Can a store to x1 interfere with px2->z2?
72 (*px2).z2
73 Can a store to x1 change the value pointed to by with py?
74 Can a store to x1 change the value pointed to by with pz?
76 The answer to these questions can be yes, yes, yes, and maybe.
78 The first two questions can be answered with a simple examination
79 of the type system. If structure X contains a field of type Y then
80 a store thru a pointer to an X can overwrite any field that is
81 contained (recursively) in an X (unless we know that px1 != px2).
83 The last two of the questions can be solved in the same way as the
84 first two questions but this is too conservative. The observation
85 is that in some cases analysis we can know if which (if any) fields
86 are addressed and if those addresses are used in bad ways. This
87 analysis may be language specific. In C, arbitrary operations may
88 be applied to pointers. However, there is some indication that
89 this may be too conservative for some C++ types.
91 The pass ipa-type-escape does this analysis for the types whose
92 instances do not escape across the compilation boundary.
94 Historically in GCC, these two problems were combined and a single
95 data structure was used to represent the solution to these
96 problems. We now have two similar but different data structures,
97 The data structure to solve the last two question is similar to the
98 first, but does not contain have the fields in it whose address are
99 never taken. For types that do escape the compilation unit, the
100 data structures will have identical information.
103 /* The alias sets assigned to MEMs assist the back-end in determining
104 which MEMs can alias which other MEMs. In general, two MEMs in
105 different alias sets cannot alias each other, with one important
106 exception. Consider something like:
108 struct S { int i; double d; };
110 a store to an `S' can alias something of either type `int' or type
111 `double'. (However, a store to an `int' cannot alias a `double'
112 and vice versa.) We indicate this via a tree structure that looks
113 like:
114 struct S
117 |/_ _\|
118 int double
120 (The arrows are directed and point downwards.)
121 In this situation we say the alias set for `struct S' is the
122 `superset' and that those for `int' and `double' are `subsets'.
124 To see whether two alias sets can point to the same memory, we must
125 see if either alias set is a subset of the other. We need not trace
126 past immediate descendants, however, since we propagate all
127 grandchildren up one level.
129 Alias set zero is implicitly a superset of all other alias sets.
130 However, this is no actual entry for alias set zero. It is an
131 error to attempt to explicitly construct a subset of zero. */
133 struct GTY(()) alias_set_entry_d {
134 /* The alias set number, as stored in MEM_ALIAS_SET. */
135 alias_set_type alias_set;
137 /* Nonzero if would have a child of zero: this effectively makes this
138 alias set the same as alias set zero. */
139 int has_zero_child;
141 /* The children of the alias set. These are not just the immediate
142 children, but, in fact, all descendants. So, if we have:
144 struct T { struct S s; float f; }
146 continuing our example above, the children here will be all of
147 `int', `double', `float', and `struct S'. */
148 splay_tree GTY((param1_is (int), param2_is (int))) children;
150 typedef struct alias_set_entry_d *alias_set_entry;
152 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
153 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
154 static void record_set (rtx, const_rtx, void *);
155 static int base_alias_check (rtx, rtx, enum machine_mode,
156 enum machine_mode);
157 static rtx find_base_value (rtx);
158 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
159 static int insert_subset_children (splay_tree_node, void*);
160 static alias_set_entry get_alias_set_entry (alias_set_type);
161 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
162 bool (*) (const_rtx, bool));
163 static int aliases_everything_p (const_rtx);
164 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
165 static tree decl_for_component_ref (tree);
166 static rtx adjust_offset_for_component_ref (tree, rtx);
167 static int write_dependence_p (const_rtx, const_rtx, int);
169 static void memory_modified_1 (rtx, const_rtx, void *);
171 /* Set up all info needed to perform alias analysis on memory references. */
173 /* Returns the size in bytes of the mode of X. */
174 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
176 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
177 different alias sets. We ignore alias sets in functions making use
178 of variable arguments because the va_arg macros on some systems are
179 not legal ANSI C. */
180 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
181 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
183 /* Cap the number of passes we make over the insns propagating alias
184 information through set chains. 10 is a completely arbitrary choice. */
185 #define MAX_ALIAS_LOOP_PASSES 10
187 /* reg_base_value[N] gives an address to which register N is related.
188 If all sets after the first add or subtract to the current value
189 or otherwise modify it so it does not point to a different top level
190 object, reg_base_value[N] is equal to the address part of the source
191 of the first set.
193 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
194 expressions represent certain special values: function arguments and
195 the stack, frame, and argument pointers.
197 The contents of an ADDRESS is not normally used, the mode of the
198 ADDRESS determines whether the ADDRESS is a function argument or some
199 other special value. Pointer equality, not rtx_equal_p, determines whether
200 two ADDRESS expressions refer to the same base address.
202 The only use of the contents of an ADDRESS is for determining if the
203 current function performs nonlocal memory memory references for the
204 purposes of marking the function as a constant function. */
206 static GTY(()) VEC(rtx,gc) *reg_base_value;
207 static rtx *new_reg_base_value;
209 /* We preserve the copy of old array around to avoid amount of garbage
210 produced. About 8% of garbage produced were attributed to this
211 array. */
212 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
214 /* Static hunks of RTL used by the aliasing code; these are initialized
215 once per function to avoid unnecessary RTL allocations. */
216 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
218 #define REG_BASE_VALUE(X) \
219 (REGNO (X) < VEC_length (rtx, reg_base_value) \
220 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
222 /* Vector indexed by N giving the initial (unchanging) value known for
223 pseudo-register N. This array is initialized in init_alias_analysis,
224 and does not change until end_alias_analysis is called. */
225 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
227 /* Indicates number of valid entries in reg_known_value. */
228 static GTY(()) unsigned int reg_known_value_size;
230 /* Vector recording for each reg_known_value whether it is due to a
231 REG_EQUIV note. Future passes (viz., reload) may replace the
232 pseudo with the equivalent expression and so we account for the
233 dependences that would be introduced if that happens.
235 The REG_EQUIV notes created in assign_parms may mention the arg
236 pointer, and there are explicit insns in the RTL that modify the
237 arg pointer. Thus we must ensure that such insns don't get
238 scheduled across each other because that would invalidate the
239 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
240 wrong, but solving the problem in the scheduler will likely give
241 better code, so we do it here. */
242 static bool *reg_known_equiv_p;
244 /* True when scanning insns from the start of the rtl to the
245 NOTE_INSN_FUNCTION_BEG note. */
246 static bool copying_arguments;
248 DEF_VEC_P(alias_set_entry);
249 DEF_VEC_ALLOC_P(alias_set_entry,gc);
251 /* The splay-tree used to store the various alias set entries. */
252 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
254 /* Build a decomposed reference object for querying the alias-oracle
255 from the MEM rtx and store it in *REF.
256 Returns false if MEM is not suitable for the alias-oracle. */
258 static bool
259 ao_ref_from_mem (ao_ref *ref, const_rtx mem)
261 tree expr = MEM_EXPR (mem);
262 tree base;
264 if (!expr)
265 return false;
267 ao_ref_init (ref, expr);
269 /* Get the base of the reference and see if we have to reject or
270 adjust it. */
271 base = ao_ref_base (ref);
272 if (base == NULL_TREE)
273 return false;
275 /* The tree oracle doesn't like to have these. */
276 if (TREE_CODE (base) == FUNCTION_DECL
277 || TREE_CODE (base) == LABEL_DECL)
278 return false;
280 /* If this is a pointer dereference of a non-SSA_NAME punt.
281 ??? We could replace it with a pointer to anything. */
282 if (INDIRECT_REF_P (base)
283 && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
284 return false;
286 /* If this is a reference based on a partitioned decl replace the
287 base with an INDIRECT_REF of the pointer representative we
288 created during stack slot partitioning. */
289 if (TREE_CODE (base) == VAR_DECL
290 && ! TREE_STATIC (base)
291 && cfun->gimple_df->decls_to_pointers != NULL)
293 void *namep;
294 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
295 if (namep)
297 ref->base_alias_set = get_alias_set (base);
298 ref->base = build1 (INDIRECT_REF, TREE_TYPE (base), *(tree *)namep);
302 ref->ref_alias_set = MEM_ALIAS_SET (mem);
304 /* If MEM_OFFSET or MEM_SIZE are NULL we have to punt.
305 Keep points-to related information though. */
306 if (!MEM_OFFSET (mem)
307 || !MEM_SIZE (mem))
309 ref->ref = NULL_TREE;
310 ref->offset = 0;
311 ref->size = -1;
312 ref->max_size = -1;
313 return true;
316 /* If the base decl is a parameter we can have negative MEM_OFFSET in
317 case of promoted subregs on bigendian targets. Trust the MEM_EXPR
318 here. */
319 if (INTVAL (MEM_OFFSET (mem)) < 0
320 && ((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
321 * BITS_PER_UNIT) == ref->size)
322 return true;
324 ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
325 ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
327 /* The MEM may extend into adjacent fields, so adjust max_size if
328 necessary. */
329 if (ref->max_size != -1
330 && ref->size > ref->max_size)
331 ref->max_size = ref->size;
333 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
334 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
335 if (MEM_EXPR (mem) != get_spill_slot_decl (false)
336 && (ref->offset < 0
337 || (DECL_P (ref->base)
338 && (!host_integerp (DECL_SIZE (ref->base), 1)
339 || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base)))
340 < (unsigned HOST_WIDE_INT)(ref->offset + ref->size))))))
341 return false;
343 return true;
346 /* Query the alias-oracle on whether the two memory rtx X and MEM may
347 alias. If TBAA_P is set also apply TBAA. Returns true if the
348 two rtxen may alias, false otherwise. */
350 static bool
351 rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
353 ao_ref ref1, ref2;
355 if (!ao_ref_from_mem (&ref1, x)
356 || !ao_ref_from_mem (&ref2, mem))
357 return true;
359 return refs_may_alias_p_1 (&ref1, &ref2, tbaa_p);
362 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
363 such an entry, or NULL otherwise. */
365 static inline alias_set_entry
366 get_alias_set_entry (alias_set_type alias_set)
368 return VEC_index (alias_set_entry, alias_sets, alias_set);
371 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
372 the two MEMs cannot alias each other. */
374 static inline int
375 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
377 /* Perform a basic sanity check. Namely, that there are no alias sets
378 if we're not using strict aliasing. This helps to catch bugs
379 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
380 where a MEM is allocated in some way other than by the use of
381 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
382 use alias sets to indicate that spilled registers cannot alias each
383 other, we might need to remove this check. */
384 gcc_assert (flag_strict_aliasing
385 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
387 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
390 /* Insert the NODE into the splay tree given by DATA. Used by
391 record_alias_subset via splay_tree_foreach. */
393 static int
394 insert_subset_children (splay_tree_node node, void *data)
396 splay_tree_insert ((splay_tree) data, node->key, node->value);
398 return 0;
401 /* Return true if the first alias set is a subset of the second. */
403 bool
404 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
406 alias_set_entry ase;
408 /* Everything is a subset of the "aliases everything" set. */
409 if (set2 == 0)
410 return true;
412 /* Otherwise, check if set1 is a subset of set2. */
413 ase = get_alias_set_entry (set2);
414 if (ase != 0
415 && (ase->has_zero_child
416 || splay_tree_lookup (ase->children,
417 (splay_tree_key) set1)))
418 return true;
419 return false;
422 /* Return 1 if the two specified alias sets may conflict. */
425 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
427 alias_set_entry ase;
429 /* The easy case. */
430 if (alias_sets_must_conflict_p (set1, set2))
431 return 1;
433 /* See if the first alias set is a subset of the second. */
434 ase = get_alias_set_entry (set1);
435 if (ase != 0
436 && (ase->has_zero_child
437 || splay_tree_lookup (ase->children,
438 (splay_tree_key) set2)))
439 return 1;
441 /* Now do the same, but with the alias sets reversed. */
442 ase = get_alias_set_entry (set2);
443 if (ase != 0
444 && (ase->has_zero_child
445 || splay_tree_lookup (ase->children,
446 (splay_tree_key) set1)))
447 return 1;
449 /* The two alias sets are distinct and neither one is the
450 child of the other. Therefore, they cannot conflict. */
451 return 0;
454 static int
455 walk_mems_2 (rtx *x, rtx mem)
457 if (MEM_P (*x))
459 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
460 return 1;
462 return -1;
464 return 0;
467 static int
468 walk_mems_1 (rtx *x, rtx *pat)
470 if (MEM_P (*x))
472 /* Visit all MEMs in *PAT and check indepedence. */
473 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
474 /* Indicate that dependence was determined and stop traversal. */
475 return 1;
477 return -1;
479 return 0;
482 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
483 bool
484 insn_alias_sets_conflict_p (rtx insn1, rtx insn2)
486 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
487 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
488 &PATTERN (insn2));
491 /* Return 1 if the two specified alias sets will always conflict. */
494 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
496 if (set1 == 0 || set2 == 0 || set1 == set2)
497 return 1;
499 return 0;
502 /* Return 1 if any MEM object of type T1 will always conflict (using the
503 dependency routines in this file) with any MEM object of type T2.
504 This is used when allocating temporary storage. If T1 and/or T2 are
505 NULL_TREE, it means we know nothing about the storage. */
508 objects_must_conflict_p (tree t1, tree t2)
510 alias_set_type set1, set2;
512 /* If neither has a type specified, we don't know if they'll conflict
513 because we may be using them to store objects of various types, for
514 example the argument and local variables areas of inlined functions. */
515 if (t1 == 0 && t2 == 0)
516 return 0;
518 /* If they are the same type, they must conflict. */
519 if (t1 == t2
520 /* Likewise if both are volatile. */
521 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
522 return 1;
524 set1 = t1 ? get_alias_set (t1) : 0;
525 set2 = t2 ? get_alias_set (t2) : 0;
527 /* We can't use alias_sets_conflict_p because we must make sure
528 that every subtype of t1 will conflict with every subtype of
529 t2 for which a pair of subobjects of these respective subtypes
530 overlaps on the stack. */
531 return alias_sets_must_conflict_p (set1, set2);
534 /* Return true if all nested component references handled by
535 get_inner_reference in T are such that we should use the alias set
536 provided by the object at the heart of T.
538 This is true for non-addressable components (which don't have their
539 own alias set), as well as components of objects in alias set zero.
540 This later point is a special case wherein we wish to override the
541 alias set used by the component, but we don't have per-FIELD_DECL
542 assignable alias sets. */
544 bool
545 component_uses_parent_alias_set (const_tree t)
547 while (1)
549 /* If we're at the end, it vacuously uses its own alias set. */
550 if (!handled_component_p (t))
551 return false;
553 switch (TREE_CODE (t))
555 case COMPONENT_REF:
556 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
557 return true;
558 break;
560 case ARRAY_REF:
561 case ARRAY_RANGE_REF:
562 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
563 return true;
564 break;
566 case REALPART_EXPR:
567 case IMAGPART_EXPR:
568 break;
570 default:
571 /* Bitfields and casts are never addressable. */
572 return true;
575 t = TREE_OPERAND (t, 0);
576 if (get_alias_set (TREE_TYPE (t)) == 0)
577 return true;
581 /* Return the alias set for the memory pointed to by T, which may be
582 either a type or an expression. Return -1 if there is nothing
583 special about dereferencing T. */
585 static alias_set_type
586 get_deref_alias_set_1 (tree t)
588 /* If we're not doing any alias analysis, just assume everything
589 aliases everything else. */
590 if (!flag_strict_aliasing)
591 return 0;
593 /* All we care about is the type. */
594 if (! TYPE_P (t))
595 t = TREE_TYPE (t);
597 /* If we have an INDIRECT_REF via a void pointer, we don't
598 know anything about what that might alias. Likewise if the
599 pointer is marked that way. */
600 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
601 || TYPE_REF_CAN_ALIAS_ALL (t))
602 return 0;
604 return -1;
607 /* Return the alias set for the memory pointed to by T, which may be
608 either a type or an expression. */
610 alias_set_type
611 get_deref_alias_set (tree t)
613 alias_set_type set = get_deref_alias_set_1 (t);
615 /* Fall back to the alias-set of the pointed-to type. */
616 if (set == -1)
618 if (! TYPE_P (t))
619 t = TREE_TYPE (t);
620 set = get_alias_set (TREE_TYPE (t));
623 return set;
626 /* Return the alias set for T, which may be either a type or an
627 expression. Call language-specific routine for help, if needed. */
629 alias_set_type
630 get_alias_set (tree t)
632 alias_set_type set;
634 /* If we're not doing any alias analysis, just assume everything
635 aliases everything else. Also return 0 if this or its type is
636 an error. */
637 if (! flag_strict_aliasing || t == error_mark_node
638 || (! TYPE_P (t)
639 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
640 return 0;
642 /* We can be passed either an expression or a type. This and the
643 language-specific routine may make mutually-recursive calls to each other
644 to figure out what to do. At each juncture, we see if this is a tree
645 that the language may need to handle specially. First handle things that
646 aren't types. */
647 if (! TYPE_P (t))
649 tree inner;
651 /* Remove any nops, then give the language a chance to do
652 something with this tree before we look at it. */
653 STRIP_NOPS (t);
654 set = lang_hooks.get_alias_set (t);
655 if (set != -1)
656 return set;
658 /* Retrieve the original memory reference if needed. */
659 if (TREE_CODE (t) == TARGET_MEM_REF)
660 t = TMR_ORIGINAL (t);
662 /* First see if the actual object referenced is an INDIRECT_REF from a
663 restrict-qualified pointer or a "void *". */
664 inner = t;
665 while (handled_component_p (inner))
667 inner = TREE_OPERAND (inner, 0);
668 STRIP_NOPS (inner);
671 if (INDIRECT_REF_P (inner))
673 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
674 if (set != -1)
675 return set;
678 /* Otherwise, pick up the outermost object that we could have a pointer
679 to, processing conversions as above. */
680 while (component_uses_parent_alias_set (t))
682 t = TREE_OPERAND (t, 0);
683 STRIP_NOPS (t);
686 /* If we've already determined the alias set for a decl, just return
687 it. This is necessary for C++ anonymous unions, whose component
688 variables don't look like union members (boo!). */
689 if (TREE_CODE (t) == VAR_DECL
690 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
691 return MEM_ALIAS_SET (DECL_RTL (t));
693 /* Now all we care about is the type. */
694 t = TREE_TYPE (t);
697 /* Variant qualifiers don't affect the alias set, so get the main
698 variant. */
699 t = TYPE_MAIN_VARIANT (t);
701 /* Always use the canonical type as well. If this is a type that
702 requires structural comparisons to identify compatible types
703 use alias set zero. */
704 if (TYPE_STRUCTURAL_EQUALITY_P (t))
706 /* Allow the language to specify another alias set for this
707 type. */
708 set = lang_hooks.get_alias_set (t);
709 if (set != -1)
710 return set;
711 return 0;
713 t = TYPE_CANONICAL (t);
714 /* Canonical types shouldn't form a tree nor should the canonical
715 type require structural equality checks. */
716 gcc_assert (!TYPE_STRUCTURAL_EQUALITY_P (t) && TYPE_CANONICAL (t) == t);
718 /* If this is a type with a known alias set, return it. */
719 if (TYPE_ALIAS_SET_KNOWN_P (t))
720 return TYPE_ALIAS_SET (t);
722 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
723 if (!COMPLETE_TYPE_P (t))
725 /* For arrays with unknown size the conservative answer is the
726 alias set of the element type. */
727 if (TREE_CODE (t) == ARRAY_TYPE)
728 return get_alias_set (TREE_TYPE (t));
730 /* But return zero as a conservative answer for incomplete types. */
731 return 0;
734 /* See if the language has special handling for this type. */
735 set = lang_hooks.get_alias_set (t);
736 if (set != -1)
737 return set;
739 /* There are no objects of FUNCTION_TYPE, so there's no point in
740 using up an alias set for them. (There are, of course, pointers
741 and references to functions, but that's different.) */
742 else if (TREE_CODE (t) == FUNCTION_TYPE
743 || TREE_CODE (t) == METHOD_TYPE)
744 set = 0;
746 /* Unless the language specifies otherwise, let vector types alias
747 their components. This avoids some nasty type punning issues in
748 normal usage. And indeed lets vectors be treated more like an
749 array slice. */
750 else if (TREE_CODE (t) == VECTOR_TYPE)
751 set = get_alias_set (TREE_TYPE (t));
753 /* Unless the language specifies otherwise, treat array types the
754 same as their components. This avoids the asymmetry we get
755 through recording the components. Consider accessing a
756 character(kind=1) through a reference to a character(kind=1)[1:1].
757 Or consider if we want to assign integer(kind=4)[0:D.1387] and
758 integer(kind=4)[4] the same alias set or not.
759 Just be pragmatic here and make sure the array and its element
760 type get the same alias set assigned. */
761 else if (TREE_CODE (t) == ARRAY_TYPE
762 && !TYPE_NONALIASED_COMPONENT (t))
763 set = get_alias_set (TREE_TYPE (t));
765 else
766 /* Otherwise make a new alias set for this type. */
767 set = new_alias_set ();
769 TYPE_ALIAS_SET (t) = set;
771 /* If this is an aggregate type, we must record any component aliasing
772 information. */
773 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
774 record_component_aliases (t);
776 return set;
779 /* Return a brand-new alias set. */
781 alias_set_type
782 new_alias_set (void)
784 if (flag_strict_aliasing)
786 if (alias_sets == 0)
787 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
788 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
789 return VEC_length (alias_set_entry, alias_sets) - 1;
791 else
792 return 0;
795 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
796 not everything that aliases SUPERSET also aliases SUBSET. For example,
797 in C, a store to an `int' can alias a load of a structure containing an
798 `int', and vice versa. But it can't alias a load of a 'double' member
799 of the same structure. Here, the structure would be the SUPERSET and
800 `int' the SUBSET. This relationship is also described in the comment at
801 the beginning of this file.
803 This function should be called only once per SUPERSET/SUBSET pair.
805 It is illegal for SUPERSET to be zero; everything is implicitly a
806 subset of alias set zero. */
808 void
809 record_alias_subset (alias_set_type superset, alias_set_type subset)
811 alias_set_entry superset_entry;
812 alias_set_entry subset_entry;
814 /* It is possible in complex type situations for both sets to be the same,
815 in which case we can ignore this operation. */
816 if (superset == subset)
817 return;
819 gcc_assert (superset);
821 superset_entry = get_alias_set_entry (superset);
822 if (superset_entry == 0)
824 /* Create an entry for the SUPERSET, so that we have a place to
825 attach the SUBSET. */
826 superset_entry = GGC_NEW (struct alias_set_entry_d);
827 superset_entry->alias_set = superset;
828 superset_entry->children
829 = splay_tree_new_ggc (splay_tree_compare_ints);
830 superset_entry->has_zero_child = 0;
831 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
834 if (subset == 0)
835 superset_entry->has_zero_child = 1;
836 else
838 subset_entry = get_alias_set_entry (subset);
839 /* If there is an entry for the subset, enter all of its children
840 (if they are not already present) as children of the SUPERSET. */
841 if (subset_entry)
843 if (subset_entry->has_zero_child)
844 superset_entry->has_zero_child = 1;
846 splay_tree_foreach (subset_entry->children, insert_subset_children,
847 superset_entry->children);
850 /* Enter the SUBSET itself as a child of the SUPERSET. */
851 splay_tree_insert (superset_entry->children,
852 (splay_tree_key) subset, 0);
856 /* Record that component types of TYPE, if any, are part of that type for
857 aliasing purposes. For record types, we only record component types
858 for fields that are not marked non-addressable. For array types, we
859 only record the component type if it is not marked non-aliased. */
861 void
862 record_component_aliases (tree type)
864 alias_set_type superset = get_alias_set (type);
865 tree field;
867 if (superset == 0)
868 return;
870 switch (TREE_CODE (type))
872 case RECORD_TYPE:
873 case UNION_TYPE:
874 case QUAL_UNION_TYPE:
875 /* Recursively record aliases for the base classes, if there are any. */
876 if (TYPE_BINFO (type))
878 int i;
879 tree binfo, base_binfo;
881 for (binfo = TYPE_BINFO (type), i = 0;
882 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
883 record_alias_subset (superset,
884 get_alias_set (BINFO_TYPE (base_binfo)));
886 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
887 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
888 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
889 break;
891 case COMPLEX_TYPE:
892 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
893 break;
895 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
896 element type. */
898 default:
899 break;
903 /* Allocate an alias set for use in storing and reading from the varargs
904 spill area. */
906 static GTY(()) alias_set_type varargs_set = -1;
908 alias_set_type
909 get_varargs_alias_set (void)
911 #if 1
912 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
913 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
914 consistently use the varargs alias set for loads from the varargs
915 area. So don't use it anywhere. */
916 return 0;
917 #else
918 if (varargs_set == -1)
919 varargs_set = new_alias_set ();
921 return varargs_set;
922 #endif
925 /* Likewise, but used for the fixed portions of the frame, e.g., register
926 save areas. */
928 static GTY(()) alias_set_type frame_set = -1;
930 alias_set_type
931 get_frame_alias_set (void)
933 if (frame_set == -1)
934 frame_set = new_alias_set ();
936 return frame_set;
939 /* Inside SRC, the source of a SET, find a base address. */
941 static rtx
942 find_base_value (rtx src)
944 unsigned int regno;
946 #if defined (FIND_BASE_TERM)
947 /* Try machine-dependent ways to find the base term. */
948 src = FIND_BASE_TERM (src);
949 #endif
951 switch (GET_CODE (src))
953 case SYMBOL_REF:
954 case LABEL_REF:
955 return src;
957 case REG:
958 regno = REGNO (src);
959 /* At the start of a function, argument registers have known base
960 values which may be lost later. Returning an ADDRESS
961 expression here allows optimization based on argument values
962 even when the argument registers are used for other purposes. */
963 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
964 return new_reg_base_value[regno];
966 /* If a pseudo has a known base value, return it. Do not do this
967 for non-fixed hard regs since it can result in a circular
968 dependency chain for registers which have values at function entry.
970 The test above is not sufficient because the scheduler may move
971 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
972 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
973 && regno < VEC_length (rtx, reg_base_value))
975 /* If we're inside init_alias_analysis, use new_reg_base_value
976 to reduce the number of relaxation iterations. */
977 if (new_reg_base_value && new_reg_base_value[regno]
978 && DF_REG_DEF_COUNT (regno) == 1)
979 return new_reg_base_value[regno];
981 if (VEC_index (rtx, reg_base_value, regno))
982 return VEC_index (rtx, reg_base_value, regno);
985 return 0;
987 case MEM:
988 /* Check for an argument passed in memory. Only record in the
989 copying-arguments block; it is too hard to track changes
990 otherwise. */
991 if (copying_arguments
992 && (XEXP (src, 0) == arg_pointer_rtx
993 || (GET_CODE (XEXP (src, 0)) == PLUS
994 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
995 return gen_rtx_ADDRESS (VOIDmode, src);
996 return 0;
998 case CONST:
999 src = XEXP (src, 0);
1000 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
1001 break;
1003 /* ... fall through ... */
1005 case PLUS:
1006 case MINUS:
1008 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
1010 /* If either operand is a REG that is a known pointer, then it
1011 is the base. */
1012 if (REG_P (src_0) && REG_POINTER (src_0))
1013 return find_base_value (src_0);
1014 if (REG_P (src_1) && REG_POINTER (src_1))
1015 return find_base_value (src_1);
1017 /* If either operand is a REG, then see if we already have
1018 a known value for it. */
1019 if (REG_P (src_0))
1021 temp = find_base_value (src_0);
1022 if (temp != 0)
1023 src_0 = temp;
1026 if (REG_P (src_1))
1028 temp = find_base_value (src_1);
1029 if (temp!= 0)
1030 src_1 = temp;
1033 /* If either base is named object or a special address
1034 (like an argument or stack reference), then use it for the
1035 base term. */
1036 if (src_0 != 0
1037 && (GET_CODE (src_0) == SYMBOL_REF
1038 || GET_CODE (src_0) == LABEL_REF
1039 || (GET_CODE (src_0) == ADDRESS
1040 && GET_MODE (src_0) != VOIDmode)))
1041 return src_0;
1043 if (src_1 != 0
1044 && (GET_CODE (src_1) == SYMBOL_REF
1045 || GET_CODE (src_1) == LABEL_REF
1046 || (GET_CODE (src_1) == ADDRESS
1047 && GET_MODE (src_1) != VOIDmode)))
1048 return src_1;
1050 /* Guess which operand is the base address:
1051 If either operand is a symbol, then it is the base. If
1052 either operand is a CONST_INT, then the other is the base. */
1053 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
1054 return find_base_value (src_0);
1055 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
1056 return find_base_value (src_1);
1058 return 0;
1061 case LO_SUM:
1062 /* The standard form is (lo_sum reg sym) so look only at the
1063 second operand. */
1064 return find_base_value (XEXP (src, 1));
1066 case AND:
1067 /* If the second operand is constant set the base
1068 address to the first operand. */
1069 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
1070 return find_base_value (XEXP (src, 0));
1071 return 0;
1073 case TRUNCATE:
1074 /* As we do not know which address space the pointer is refering to, we can
1075 handle this only if the target does not support different pointer or
1076 address modes depending on the address space. */
1077 if (!target_default_pointer_address_modes_p ())
1078 break;
1079 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1080 break;
1081 /* Fall through. */
1082 case HIGH:
1083 case PRE_INC:
1084 case PRE_DEC:
1085 case POST_INC:
1086 case POST_DEC:
1087 case PRE_MODIFY:
1088 case POST_MODIFY:
1089 return find_base_value (XEXP (src, 0));
1091 case ZERO_EXTEND:
1092 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1093 /* As we do not know which address space the pointer is refering to, we can
1094 handle this only if the target does not support different pointer or
1095 address modes depending on the address space. */
1096 if (!target_default_pointer_address_modes_p ())
1097 break;
1100 rtx temp = find_base_value (XEXP (src, 0));
1102 if (temp != 0 && CONSTANT_P (temp))
1103 temp = convert_memory_address (Pmode, temp);
1105 return temp;
1108 default:
1109 break;
1112 return 0;
1115 /* Called from init_alias_analysis indirectly through note_stores. */
1117 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1118 register N has been set in this function. */
1119 static char *reg_seen;
1121 /* Addresses which are known not to alias anything else are identified
1122 by a unique integer. */
1123 static int unique_id;
1125 static void
1126 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1128 unsigned regno;
1129 rtx src;
1130 int n;
1132 if (!REG_P (dest))
1133 return;
1135 regno = REGNO (dest);
1137 gcc_assert (regno < VEC_length (rtx, reg_base_value));
1139 /* If this spans multiple hard registers, then we must indicate that every
1140 register has an unusable value. */
1141 if (regno < FIRST_PSEUDO_REGISTER)
1142 n = hard_regno_nregs[regno][GET_MODE (dest)];
1143 else
1144 n = 1;
1145 if (n != 1)
1147 while (--n >= 0)
1149 reg_seen[regno + n] = 1;
1150 new_reg_base_value[regno + n] = 0;
1152 return;
1155 if (set)
1157 /* A CLOBBER wipes out any old value but does not prevent a previously
1158 unset register from acquiring a base address (i.e. reg_seen is not
1159 set). */
1160 if (GET_CODE (set) == CLOBBER)
1162 new_reg_base_value[regno] = 0;
1163 return;
1165 src = SET_SRC (set);
1167 else
1169 if (reg_seen[regno])
1171 new_reg_base_value[regno] = 0;
1172 return;
1174 reg_seen[regno] = 1;
1175 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1176 GEN_INT (unique_id++));
1177 return;
1180 /* If this is not the first set of REGNO, see whether the new value
1181 is related to the old one. There are two cases of interest:
1183 (1) The register might be assigned an entirely new value
1184 that has the same base term as the original set.
1186 (2) The set might be a simple self-modification that
1187 cannot change REGNO's base value.
1189 If neither case holds, reject the original base value as invalid.
1190 Note that the following situation is not detected:
1192 extern int x, y; int *p = &x; p += (&y-&x);
1194 ANSI C does not allow computing the difference of addresses
1195 of distinct top level objects. */
1196 if (new_reg_base_value[regno] != 0
1197 && find_base_value (src) != new_reg_base_value[regno])
1198 switch (GET_CODE (src))
1200 case LO_SUM:
1201 case MINUS:
1202 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1203 new_reg_base_value[regno] = 0;
1204 break;
1205 case PLUS:
1206 /* If the value we add in the PLUS is also a valid base value,
1207 this might be the actual base value, and the original value
1208 an index. */
1210 rtx other = NULL_RTX;
1212 if (XEXP (src, 0) == dest)
1213 other = XEXP (src, 1);
1214 else if (XEXP (src, 1) == dest)
1215 other = XEXP (src, 0);
1217 if (! other || find_base_value (other))
1218 new_reg_base_value[regno] = 0;
1219 break;
1221 case AND:
1222 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
1223 new_reg_base_value[regno] = 0;
1224 break;
1225 default:
1226 new_reg_base_value[regno] = 0;
1227 break;
1229 /* If this is the first set of a register, record the value. */
1230 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1231 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1232 new_reg_base_value[regno] = find_base_value (src);
1234 reg_seen[regno] = 1;
1237 /* If a value is known for REGNO, return it. */
1240 get_reg_known_value (unsigned int regno)
1242 if (regno >= FIRST_PSEUDO_REGISTER)
1244 regno -= FIRST_PSEUDO_REGISTER;
1245 if (regno < reg_known_value_size)
1246 return reg_known_value[regno];
1248 return NULL;
1251 /* Set it. */
1253 static void
1254 set_reg_known_value (unsigned int regno, rtx val)
1256 if (regno >= FIRST_PSEUDO_REGISTER)
1258 regno -= FIRST_PSEUDO_REGISTER;
1259 if (regno < reg_known_value_size)
1260 reg_known_value[regno] = val;
1264 /* Similarly for reg_known_equiv_p. */
1266 bool
1267 get_reg_known_equiv_p (unsigned int regno)
1269 if (regno >= FIRST_PSEUDO_REGISTER)
1271 regno -= FIRST_PSEUDO_REGISTER;
1272 if (regno < reg_known_value_size)
1273 return reg_known_equiv_p[regno];
1275 return false;
1278 static void
1279 set_reg_known_equiv_p (unsigned int regno, bool val)
1281 if (regno >= FIRST_PSEUDO_REGISTER)
1283 regno -= FIRST_PSEUDO_REGISTER;
1284 if (regno < reg_known_value_size)
1285 reg_known_equiv_p[regno] = val;
1290 /* Returns a canonical version of X, from the point of view alias
1291 analysis. (For example, if X is a MEM whose address is a register,
1292 and the register has a known value (say a SYMBOL_REF), then a MEM
1293 whose address is the SYMBOL_REF is returned.) */
1296 canon_rtx (rtx x)
1298 /* Recursively look for equivalences. */
1299 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1301 rtx t = get_reg_known_value (REGNO (x));
1302 if (t == x)
1303 return x;
1304 if (t)
1305 return canon_rtx (t);
1308 if (GET_CODE (x) == PLUS)
1310 rtx x0 = canon_rtx (XEXP (x, 0));
1311 rtx x1 = canon_rtx (XEXP (x, 1));
1313 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1315 if (CONST_INT_P (x0))
1316 return plus_constant (x1, INTVAL (x0));
1317 else if (CONST_INT_P (x1))
1318 return plus_constant (x0, INTVAL (x1));
1319 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1323 /* This gives us much better alias analysis when called from
1324 the loop optimizer. Note we want to leave the original
1325 MEM alone, but need to return the canonicalized MEM with
1326 all the flags with their original values. */
1327 else if (MEM_P (x))
1328 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1330 return x;
1333 /* Return 1 if X and Y are identical-looking rtx's.
1334 Expect that X and Y has been already canonicalized.
1336 We use the data in reg_known_value above to see if two registers with
1337 different numbers are, in fact, equivalent. */
1339 static int
1340 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1342 int i;
1343 int j;
1344 enum rtx_code code;
1345 const char *fmt;
1347 if (x == 0 && y == 0)
1348 return 1;
1349 if (x == 0 || y == 0)
1350 return 0;
1352 if (x == y)
1353 return 1;
1355 code = GET_CODE (x);
1356 /* Rtx's of different codes cannot be equal. */
1357 if (code != GET_CODE (y))
1358 return 0;
1360 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1361 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1363 if (GET_MODE (x) != GET_MODE (y))
1364 return 0;
1366 /* Some RTL can be compared without a recursive examination. */
1367 switch (code)
1369 case REG:
1370 return REGNO (x) == REGNO (y);
1372 case LABEL_REF:
1373 return XEXP (x, 0) == XEXP (y, 0);
1375 case SYMBOL_REF:
1376 return XSTR (x, 0) == XSTR (y, 0);
1378 case VALUE:
1379 case CONST_INT:
1380 case CONST_DOUBLE:
1381 case CONST_FIXED:
1382 /* There's no need to compare the contents of CONST_DOUBLEs or
1383 CONST_INTs because pointer equality is a good enough
1384 comparison for these nodes. */
1385 return 0;
1387 default:
1388 break;
1391 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1392 if (code == PLUS)
1393 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1394 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1395 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1396 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1397 /* For commutative operations, the RTX match if the operand match in any
1398 order. Also handle the simple binary and unary cases without a loop. */
1399 if (COMMUTATIVE_P (x))
1401 rtx xop0 = canon_rtx (XEXP (x, 0));
1402 rtx yop0 = canon_rtx (XEXP (y, 0));
1403 rtx yop1 = canon_rtx (XEXP (y, 1));
1405 return ((rtx_equal_for_memref_p (xop0, yop0)
1406 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1407 || (rtx_equal_for_memref_p (xop0, yop1)
1408 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1410 else if (NON_COMMUTATIVE_P (x))
1412 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1413 canon_rtx (XEXP (y, 0)))
1414 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1415 canon_rtx (XEXP (y, 1))));
1417 else if (UNARY_P (x))
1418 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1419 canon_rtx (XEXP (y, 0)));
1421 /* Compare the elements. If any pair of corresponding elements
1422 fail to match, return 0 for the whole things.
1424 Limit cases to types which actually appear in addresses. */
1426 fmt = GET_RTX_FORMAT (code);
1427 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1429 switch (fmt[i])
1431 case 'i':
1432 if (XINT (x, i) != XINT (y, i))
1433 return 0;
1434 break;
1436 case 'E':
1437 /* Two vectors must have the same length. */
1438 if (XVECLEN (x, i) != XVECLEN (y, i))
1439 return 0;
1441 /* And the corresponding elements must match. */
1442 for (j = 0; j < XVECLEN (x, i); j++)
1443 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1444 canon_rtx (XVECEXP (y, i, j))) == 0)
1445 return 0;
1446 break;
1448 case 'e':
1449 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1450 canon_rtx (XEXP (y, i))) == 0)
1451 return 0;
1452 break;
1454 /* This can happen for asm operands. */
1455 case 's':
1456 if (strcmp (XSTR (x, i), XSTR (y, i)))
1457 return 0;
1458 break;
1460 /* This can happen for an asm which clobbers memory. */
1461 case '0':
1462 break;
1464 /* It is believed that rtx's at this level will never
1465 contain anything but integers and other rtx's,
1466 except for within LABEL_REFs and SYMBOL_REFs. */
1467 default:
1468 gcc_unreachable ();
1471 return 1;
1475 find_base_term (rtx x)
1477 cselib_val *val;
1478 struct elt_loc_list *l;
1480 #if defined (FIND_BASE_TERM)
1481 /* Try machine-dependent ways to find the base term. */
1482 x = FIND_BASE_TERM (x);
1483 #endif
1485 switch (GET_CODE (x))
1487 case REG:
1488 return REG_BASE_VALUE (x);
1490 case TRUNCATE:
1491 /* As we do not know which address space the pointer is refering to, we can
1492 handle this only if the target does not support different pointer or
1493 address modes depending on the address space. */
1494 if (!target_default_pointer_address_modes_p ())
1495 return 0;
1496 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1497 return 0;
1498 /* Fall through. */
1499 case HIGH:
1500 case PRE_INC:
1501 case PRE_DEC:
1502 case POST_INC:
1503 case POST_DEC:
1504 case PRE_MODIFY:
1505 case POST_MODIFY:
1506 return find_base_term (XEXP (x, 0));
1508 case ZERO_EXTEND:
1509 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1510 /* As we do not know which address space the pointer is refering to, we can
1511 handle this only if the target does not support different pointer or
1512 address modes depending on the address space. */
1513 if (!target_default_pointer_address_modes_p ())
1514 return 0;
1517 rtx temp = find_base_term (XEXP (x, 0));
1519 if (temp != 0 && CONSTANT_P (temp))
1520 temp = convert_memory_address (Pmode, temp);
1522 return temp;
1525 case VALUE:
1526 val = CSELIB_VAL_PTR (x);
1527 if (!val)
1528 return 0;
1529 for (l = val->locs; l; l = l->next)
1530 if ((x = find_base_term (l->loc)) != 0)
1531 return x;
1532 return 0;
1534 case LO_SUM:
1535 /* The standard form is (lo_sum reg sym) so look only at the
1536 second operand. */
1537 return find_base_term (XEXP (x, 1));
1539 case CONST:
1540 x = XEXP (x, 0);
1541 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1542 return 0;
1543 /* Fall through. */
1544 case PLUS:
1545 case MINUS:
1547 rtx tmp1 = XEXP (x, 0);
1548 rtx tmp2 = XEXP (x, 1);
1550 /* This is a little bit tricky since we have to determine which of
1551 the two operands represents the real base address. Otherwise this
1552 routine may return the index register instead of the base register.
1554 That may cause us to believe no aliasing was possible, when in
1555 fact aliasing is possible.
1557 We use a few simple tests to guess the base register. Additional
1558 tests can certainly be added. For example, if one of the operands
1559 is a shift or multiply, then it must be the index register and the
1560 other operand is the base register. */
1562 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1563 return find_base_term (tmp2);
1565 /* If either operand is known to be a pointer, then use it
1566 to determine the base term. */
1567 if (REG_P (tmp1) && REG_POINTER (tmp1))
1569 rtx base = find_base_term (tmp1);
1570 if (base)
1571 return base;
1574 if (REG_P (tmp2) && REG_POINTER (tmp2))
1576 rtx base = find_base_term (tmp2);
1577 if (base)
1578 return base;
1581 /* Neither operand was known to be a pointer. Go ahead and find the
1582 base term for both operands. */
1583 tmp1 = find_base_term (tmp1);
1584 tmp2 = find_base_term (tmp2);
1586 /* If either base term is named object or a special address
1587 (like an argument or stack reference), then use it for the
1588 base term. */
1589 if (tmp1 != 0
1590 && (GET_CODE (tmp1) == SYMBOL_REF
1591 || GET_CODE (tmp1) == LABEL_REF
1592 || (GET_CODE (tmp1) == ADDRESS
1593 && GET_MODE (tmp1) != VOIDmode)))
1594 return tmp1;
1596 if (tmp2 != 0
1597 && (GET_CODE (tmp2) == SYMBOL_REF
1598 || GET_CODE (tmp2) == LABEL_REF
1599 || (GET_CODE (tmp2) == ADDRESS
1600 && GET_MODE (tmp2) != VOIDmode)))
1601 return tmp2;
1603 /* We could not determine which of the two operands was the
1604 base register and which was the index. So we can determine
1605 nothing from the base alias check. */
1606 return 0;
1609 case AND:
1610 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
1611 return find_base_term (XEXP (x, 0));
1612 return 0;
1614 case SYMBOL_REF:
1615 case LABEL_REF:
1616 return x;
1618 default:
1619 return 0;
1623 /* Return 0 if the addresses X and Y are known to point to different
1624 objects, 1 if they might be pointers to the same object. */
1626 static int
1627 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1628 enum machine_mode y_mode)
1630 rtx x_base = find_base_term (x);
1631 rtx y_base = find_base_term (y);
1633 /* If the address itself has no known base see if a known equivalent
1634 value has one. If either address still has no known base, nothing
1635 is known about aliasing. */
1636 if (x_base == 0)
1638 rtx x_c;
1640 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1641 return 1;
1643 x_base = find_base_term (x_c);
1644 if (x_base == 0)
1645 return 1;
1648 if (y_base == 0)
1650 rtx y_c;
1651 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1652 return 1;
1654 y_base = find_base_term (y_c);
1655 if (y_base == 0)
1656 return 1;
1659 /* If the base addresses are equal nothing is known about aliasing. */
1660 if (rtx_equal_p (x_base, y_base))
1661 return 1;
1663 /* The base addresses are different expressions. If they are not accessed
1664 via AND, there is no conflict. We can bring knowledge of object
1665 alignment into play here. For example, on alpha, "char a, b;" can
1666 alias one another, though "char a; long b;" cannot. AND addesses may
1667 implicitly alias surrounding objects; i.e. unaligned access in DImode
1668 via AND address can alias all surrounding object types except those
1669 with aligment 8 or higher. */
1670 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1671 return 1;
1672 if (GET_CODE (x) == AND
1673 && (!CONST_INT_P (XEXP (x, 1))
1674 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1675 return 1;
1676 if (GET_CODE (y) == AND
1677 && (!CONST_INT_P (XEXP (y, 1))
1678 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1679 return 1;
1681 /* Differing symbols not accessed via AND never alias. */
1682 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1683 return 0;
1685 /* If one address is a stack reference there can be no alias:
1686 stack references using different base registers do not alias,
1687 a stack reference can not alias a parameter, and a stack reference
1688 can not alias a global. */
1689 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1690 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1691 return 0;
1693 return 1;
1696 /* Convert the address X into something we can use. This is done by returning
1697 it unchanged unless it is a value; in the latter case we call cselib to get
1698 a more useful rtx. */
1701 get_addr (rtx x)
1703 cselib_val *v;
1704 struct elt_loc_list *l;
1706 if (GET_CODE (x) != VALUE)
1707 return x;
1708 v = CSELIB_VAL_PTR (x);
1709 if (v)
1711 for (l = v->locs; l; l = l->next)
1712 if (CONSTANT_P (l->loc))
1713 return l->loc;
1714 for (l = v->locs; l; l = l->next)
1715 if (!REG_P (l->loc) && !MEM_P (l->loc))
1716 return l->loc;
1717 if (v->locs)
1718 return v->locs->loc;
1720 return x;
1723 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1724 where SIZE is the size in bytes of the memory reference. If ADDR
1725 is not modified by the memory reference then ADDR is returned. */
1727 static rtx
1728 addr_side_effect_eval (rtx addr, int size, int n_refs)
1730 int offset = 0;
1732 switch (GET_CODE (addr))
1734 case PRE_INC:
1735 offset = (n_refs + 1) * size;
1736 break;
1737 case PRE_DEC:
1738 offset = -(n_refs + 1) * size;
1739 break;
1740 case POST_INC:
1741 offset = n_refs * size;
1742 break;
1743 case POST_DEC:
1744 offset = -n_refs * size;
1745 break;
1747 default:
1748 return addr;
1751 if (offset)
1752 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1753 GEN_INT (offset));
1754 else
1755 addr = XEXP (addr, 0);
1756 addr = canon_rtx (addr);
1758 return addr;
1761 /* Return one if X and Y (memory addresses) reference the
1762 same location in memory or if the references overlap.
1763 Return zero if they do not overlap, else return
1764 minus one in which case they still might reference the same location.
1766 C is an offset accumulator. When
1767 C is nonzero, we are testing aliases between X and Y + C.
1768 XSIZE is the size in bytes of the X reference,
1769 similarly YSIZE is the size in bytes for Y.
1770 Expect that canon_rtx has been already called for X and Y.
1772 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1773 referenced (the reference was BLKmode), so make the most pessimistic
1774 assumptions.
1776 If XSIZE or YSIZE is negative, we may access memory outside the object
1777 being referenced as a side effect. This can happen when using AND to
1778 align memory references, as is done on the Alpha.
1780 Nice to notice that varying addresses cannot conflict with fp if no
1781 local variables had their addresses taken, but that's too hard now.
1783 ??? Contrary to the tree alias oracle this does not return
1784 one for X + non-constant and Y + non-constant when X and Y are equal.
1785 If that is fixed the TBAA hack for union type-punning can be removed. */
1787 static int
1788 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1790 if (GET_CODE (x) == VALUE)
1792 if (REG_P (y))
1794 struct elt_loc_list *l = NULL;
1795 if (CSELIB_VAL_PTR (x))
1796 for (l = CSELIB_VAL_PTR (x)->locs; l; l = l->next)
1797 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, y))
1798 break;
1799 if (l)
1800 x = y;
1801 else
1802 x = get_addr (x);
1804 /* Don't call get_addr if y is the same VALUE. */
1805 else if (x != y)
1806 x = get_addr (x);
1808 if (GET_CODE (y) == VALUE)
1810 if (REG_P (x))
1812 struct elt_loc_list *l = NULL;
1813 if (CSELIB_VAL_PTR (y))
1814 for (l = CSELIB_VAL_PTR (y)->locs; l; l = l->next)
1815 if (REG_P (l->loc) && rtx_equal_for_memref_p (l->loc, x))
1816 break;
1817 if (l)
1818 y = x;
1819 else
1820 y = get_addr (y);
1822 /* Don't call get_addr if x is the same VALUE. */
1823 else if (y != x)
1824 y = get_addr (y);
1826 if (GET_CODE (x) == HIGH)
1827 x = XEXP (x, 0);
1828 else if (GET_CODE (x) == LO_SUM)
1829 x = XEXP (x, 1);
1830 else
1831 x = addr_side_effect_eval (x, xsize, 0);
1832 if (GET_CODE (y) == HIGH)
1833 y = XEXP (y, 0);
1834 else if (GET_CODE (y) == LO_SUM)
1835 y = XEXP (y, 1);
1836 else
1837 y = addr_side_effect_eval (y, ysize, 0);
1839 if (rtx_equal_for_memref_p (x, y))
1841 if (xsize <= 0 || ysize <= 0)
1842 return 1;
1843 if (c >= 0 && xsize > c)
1844 return 1;
1845 if (c < 0 && ysize+c > 0)
1846 return 1;
1847 return 0;
1850 /* This code used to check for conflicts involving stack references and
1851 globals but the base address alias code now handles these cases. */
1853 if (GET_CODE (x) == PLUS)
1855 /* The fact that X is canonicalized means that this
1856 PLUS rtx is canonicalized. */
1857 rtx x0 = XEXP (x, 0);
1858 rtx x1 = XEXP (x, 1);
1860 if (GET_CODE (y) == PLUS)
1862 /* The fact that Y is canonicalized means that this
1863 PLUS rtx is canonicalized. */
1864 rtx y0 = XEXP (y, 0);
1865 rtx y1 = XEXP (y, 1);
1867 if (rtx_equal_for_memref_p (x1, y1))
1868 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1869 if (rtx_equal_for_memref_p (x0, y0))
1870 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1871 if (CONST_INT_P (x1))
1873 if (CONST_INT_P (y1))
1874 return memrefs_conflict_p (xsize, x0, ysize, y0,
1875 c - INTVAL (x1) + INTVAL (y1));
1876 else
1877 return memrefs_conflict_p (xsize, x0, ysize, y,
1878 c - INTVAL (x1));
1880 else if (CONST_INT_P (y1))
1881 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1883 return -1;
1885 else if (CONST_INT_P (x1))
1886 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1888 else if (GET_CODE (y) == PLUS)
1890 /* The fact that Y is canonicalized means that this
1891 PLUS rtx is canonicalized. */
1892 rtx y0 = XEXP (y, 0);
1893 rtx y1 = XEXP (y, 1);
1895 if (CONST_INT_P (y1))
1896 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1897 else
1898 return -1;
1901 if (GET_CODE (x) == GET_CODE (y))
1902 switch (GET_CODE (x))
1904 case MULT:
1906 /* Handle cases where we expect the second operands to be the
1907 same, and check only whether the first operand would conflict
1908 or not. */
1909 rtx x0, y0;
1910 rtx x1 = canon_rtx (XEXP (x, 1));
1911 rtx y1 = canon_rtx (XEXP (y, 1));
1912 if (! rtx_equal_for_memref_p (x1, y1))
1913 return -1;
1914 x0 = canon_rtx (XEXP (x, 0));
1915 y0 = canon_rtx (XEXP (y, 0));
1916 if (rtx_equal_for_memref_p (x0, y0))
1917 return (xsize == 0 || ysize == 0
1918 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1920 /* Can't properly adjust our sizes. */
1921 if (!CONST_INT_P (x1))
1922 return -1;
1923 xsize /= INTVAL (x1);
1924 ysize /= INTVAL (x1);
1925 c /= INTVAL (x1);
1926 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1929 default:
1930 break;
1933 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1934 as an access with indeterminate size. Assume that references
1935 besides AND are aligned, so if the size of the other reference is
1936 at least as large as the alignment, assume no other overlap. */
1937 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
1939 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1940 xsize = -1;
1941 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1943 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
1945 /* ??? If we are indexing far enough into the array/structure, we
1946 may yet be able to determine that we can not overlap. But we
1947 also need to that we are far enough from the end not to overlap
1948 a following reference, so we do nothing with that for now. */
1949 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1950 ysize = -1;
1951 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1954 if (CONSTANT_P (x))
1956 if (CONST_INT_P (x) && CONST_INT_P (y))
1958 c += (INTVAL (y) - INTVAL (x));
1959 return (xsize <= 0 || ysize <= 0
1960 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1963 if (GET_CODE (x) == CONST)
1965 if (GET_CODE (y) == CONST)
1966 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1967 ysize, canon_rtx (XEXP (y, 0)), c);
1968 else
1969 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1970 ysize, y, c);
1972 if (GET_CODE (y) == CONST)
1973 return memrefs_conflict_p (xsize, x, ysize,
1974 canon_rtx (XEXP (y, 0)), c);
1976 if (CONSTANT_P (y))
1977 return (xsize <= 0 || ysize <= 0
1978 || (rtx_equal_for_memref_p (x, y)
1979 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1981 return -1;
1984 return -1;
1987 /* Functions to compute memory dependencies.
1989 Since we process the insns in execution order, we can build tables
1990 to keep track of what registers are fixed (and not aliased), what registers
1991 are varying in known ways, and what registers are varying in unknown
1992 ways.
1994 If both memory references are volatile, then there must always be a
1995 dependence between the two references, since their order can not be
1996 changed. A volatile and non-volatile reference can be interchanged
1997 though.
1999 A MEM_IN_STRUCT reference at a non-AND varying address can never
2000 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
2001 also must allow AND addresses, because they may generate accesses
2002 outside the object being referenced. This is used to generate
2003 aligned addresses from unaligned addresses, for instance, the alpha
2004 storeqi_unaligned pattern. */
2006 /* Read dependence: X is read after read in MEM takes place. There can
2007 only be a dependence here if both reads are volatile. */
2010 read_dependence (const_rtx mem, const_rtx x)
2012 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
2015 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
2016 MEM2 is a reference to a structure at a varying address, or returns
2017 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
2018 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
2019 to decide whether or not an address may vary; it should return
2020 nonzero whenever variation is possible.
2021 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
2023 static const_rtx
2024 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
2025 rtx mem2_addr,
2026 bool (*varies_p) (const_rtx, bool))
2028 if (! flag_strict_aliasing)
2029 return NULL_RTX;
2031 if (MEM_ALIAS_SET (mem2)
2032 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
2033 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
2034 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
2035 varying address. */
2036 return mem1;
2038 if (MEM_ALIAS_SET (mem1)
2039 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
2040 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
2041 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
2042 varying address. */
2043 return mem2;
2045 return NULL_RTX;
2048 /* Returns nonzero if something about the mode or address format MEM1
2049 indicates that it might well alias *anything*. */
2051 static int
2052 aliases_everything_p (const_rtx mem)
2054 if (GET_CODE (XEXP (mem, 0)) == AND)
2055 /* If the address is an AND, it's very hard to know at what it is
2056 actually pointing. */
2057 return 1;
2059 return 0;
2062 /* Return true if we can determine that the fields referenced cannot
2063 overlap for any pair of objects. */
2065 static bool
2066 nonoverlapping_component_refs_p (const_tree x, const_tree y)
2068 const_tree fieldx, fieldy, typex, typey, orig_y;
2070 if (!flag_strict_aliasing)
2071 return false;
2075 /* The comparison has to be done at a common type, since we don't
2076 know how the inheritance hierarchy works. */
2077 orig_y = y;
2080 fieldx = TREE_OPERAND (x, 1);
2081 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
2083 y = orig_y;
2086 fieldy = TREE_OPERAND (y, 1);
2087 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
2089 if (typex == typey)
2090 goto found;
2092 y = TREE_OPERAND (y, 0);
2094 while (y && TREE_CODE (y) == COMPONENT_REF);
2096 x = TREE_OPERAND (x, 0);
2098 while (x && TREE_CODE (x) == COMPONENT_REF);
2099 /* Never found a common type. */
2100 return false;
2102 found:
2103 /* If we're left with accessing different fields of a structure,
2104 then no overlap. */
2105 if (TREE_CODE (typex) == RECORD_TYPE
2106 && fieldx != fieldy)
2107 return true;
2109 /* The comparison on the current field failed. If we're accessing
2110 a very nested structure, look at the next outer level. */
2111 x = TREE_OPERAND (x, 0);
2112 y = TREE_OPERAND (y, 0);
2114 while (x && y
2115 && TREE_CODE (x) == COMPONENT_REF
2116 && TREE_CODE (y) == COMPONENT_REF);
2118 return false;
2121 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2123 static tree
2124 decl_for_component_ref (tree x)
2128 x = TREE_OPERAND (x, 0);
2130 while (x && TREE_CODE (x) == COMPONENT_REF);
2132 return x && DECL_P (x) ? x : NULL_TREE;
2135 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2136 offset of the field reference. */
2138 static rtx
2139 adjust_offset_for_component_ref (tree x, rtx offset)
2141 HOST_WIDE_INT ioffset;
2143 if (! offset)
2144 return NULL_RTX;
2146 ioffset = INTVAL (offset);
2149 tree offset = component_ref_field_offset (x);
2150 tree field = TREE_OPERAND (x, 1);
2152 if (! host_integerp (offset, 1))
2153 return NULL_RTX;
2154 ioffset += (tree_low_cst (offset, 1)
2155 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2156 / BITS_PER_UNIT));
2158 x = TREE_OPERAND (x, 0);
2160 while (x && TREE_CODE (x) == COMPONENT_REF);
2162 return GEN_INT (ioffset);
2165 /* Return nonzero if we can determine the exprs corresponding to memrefs
2166 X and Y and they do not overlap. */
2169 nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
2171 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2172 rtx rtlx, rtly;
2173 rtx basex, basey;
2174 rtx moffsetx, moffsety;
2175 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2177 /* Unless both have exprs, we can't tell anything. */
2178 if (exprx == 0 || expry == 0)
2179 return 0;
2181 /* For spill-slot accesses make sure we have valid offsets. */
2182 if ((exprx == get_spill_slot_decl (false)
2183 && ! MEM_OFFSET (x))
2184 || (expry == get_spill_slot_decl (false)
2185 && ! MEM_OFFSET (y)))
2186 return 0;
2188 /* If both are field references, we may be able to determine something. */
2189 if (TREE_CODE (exprx) == COMPONENT_REF
2190 && TREE_CODE (expry) == COMPONENT_REF
2191 && nonoverlapping_component_refs_p (exprx, expry))
2192 return 1;
2195 /* If the field reference test failed, look at the DECLs involved. */
2196 moffsetx = MEM_OFFSET (x);
2197 if (TREE_CODE (exprx) == COMPONENT_REF)
2199 tree t = decl_for_component_ref (exprx);
2200 if (! t)
2201 return 0;
2202 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2203 exprx = t;
2206 moffsety = MEM_OFFSET (y);
2207 if (TREE_CODE (expry) == COMPONENT_REF)
2209 tree t = decl_for_component_ref (expry);
2210 if (! t)
2211 return 0;
2212 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2213 expry = t;
2216 if (! DECL_P (exprx) || ! DECL_P (expry))
2217 return 0;
2219 /* With invalid code we can end up storing into the constant pool.
2220 Bail out to avoid ICEing when creating RTL for this.
2221 See gfortran.dg/lto/20091028-2_0.f90. */
2222 if (TREE_CODE (exprx) == CONST_DECL
2223 || TREE_CODE (expry) == CONST_DECL)
2224 return 1;
2226 rtlx = DECL_RTL (exprx);
2227 rtly = DECL_RTL (expry);
2229 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2230 can't overlap unless they are the same because we never reuse that part
2231 of the stack frame used for locals for spilled pseudos. */
2232 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2233 && ! rtx_equal_p (rtlx, rtly))
2234 return 1;
2236 /* If we have MEMs refering to different address spaces (which can
2237 potentially overlap), we cannot easily tell from the addresses
2238 whether the references overlap. */
2239 if (MEM_P (rtlx) && MEM_P (rtly)
2240 && MEM_ADDR_SPACE (rtlx) != MEM_ADDR_SPACE (rtly))
2241 return 0;
2243 /* Get the base and offsets of both decls. If either is a register, we
2244 know both are and are the same, so use that as the base. The only
2245 we can avoid overlap is if we can deduce that they are nonoverlapping
2246 pieces of that decl, which is very rare. */
2247 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2248 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
2249 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2251 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2252 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
2253 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2255 /* If the bases are different, we know they do not overlap if both
2256 are constants or if one is a constant and the other a pointer into the
2257 stack frame. Otherwise a different base means we can't tell if they
2258 overlap or not. */
2259 if (! rtx_equal_p (basex, basey))
2260 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2261 || (CONSTANT_P (basex) && REG_P (basey)
2262 && REGNO_PTR_FRAME_P (REGNO (basey)))
2263 || (CONSTANT_P (basey) && REG_P (basex)
2264 && REGNO_PTR_FRAME_P (REGNO (basex))));
2266 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2267 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2268 : -1);
2269 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2270 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2271 -1);
2273 /* If we have an offset for either memref, it can update the values computed
2274 above. */
2275 if (moffsetx)
2276 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2277 if (moffsety)
2278 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2280 /* If a memref has both a size and an offset, we can use the smaller size.
2281 We can't do this if the offset isn't known because we must view this
2282 memref as being anywhere inside the DECL's MEM. */
2283 if (MEM_SIZE (x) && moffsetx)
2284 sizex = INTVAL (MEM_SIZE (x));
2285 if (MEM_SIZE (y) && moffsety)
2286 sizey = INTVAL (MEM_SIZE (y));
2288 /* Put the values of the memref with the lower offset in X's values. */
2289 if (offsetx > offsety)
2291 tem = offsetx, offsetx = offsety, offsety = tem;
2292 tem = sizex, sizex = sizey, sizey = tem;
2295 /* If we don't know the size of the lower-offset value, we can't tell
2296 if they conflict. Otherwise, we do the test. */
2297 return sizex >= 0 && offsety >= offsetx + sizex;
2300 /* True dependence: X is read after store in MEM takes place. */
2303 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2304 bool (*varies) (const_rtx, bool))
2306 rtx x_addr, mem_addr;
2307 rtx base;
2308 int ret;
2310 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2311 return 1;
2313 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2314 This is used in epilogue deallocation functions, and in cselib. */
2315 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2316 return 1;
2317 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2318 return 1;
2319 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2320 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2321 return 1;
2323 /* Read-only memory is by definition never modified, and therefore can't
2324 conflict with anything. We don't expect to find read-only set on MEM,
2325 but stupid user tricks can produce them, so don't die. */
2326 if (MEM_READONLY_P (x))
2327 return 0;
2329 /* If we have MEMs refering to different address spaces (which can
2330 potentially overlap), we cannot easily tell from the addresses
2331 whether the references overlap. */
2332 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2333 return 1;
2335 if (mem_mode == VOIDmode)
2336 mem_mode = GET_MODE (mem);
2338 x_addr = XEXP (x, 0);
2339 mem_addr = XEXP (mem, 0);
2340 if (!((GET_CODE (x_addr) == VALUE
2341 && GET_CODE (mem_addr) != VALUE
2342 && reg_mentioned_p (x_addr, mem_addr))
2343 || (GET_CODE (x_addr) != VALUE
2344 && GET_CODE (mem_addr) == VALUE
2345 && reg_mentioned_p (mem_addr, x_addr))))
2347 x_addr = get_addr (x_addr);
2348 mem_addr = get_addr (mem_addr);
2351 base = find_base_term (x_addr);
2352 if (base && (GET_CODE (base) == LABEL_REF
2353 || (GET_CODE (base) == SYMBOL_REF
2354 && CONSTANT_POOL_ADDRESS_P (base))))
2355 return 0;
2357 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2358 return 0;
2360 x_addr = canon_rtx (x_addr);
2361 mem_addr = canon_rtx (mem_addr);
2363 if ((ret = memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2364 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2365 return ret;
2367 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2368 return 0;
2370 if (nonoverlapping_memrefs_p (mem, x))
2371 return 0;
2373 if (aliases_everything_p (x))
2374 return 1;
2376 /* We cannot use aliases_everything_p to test MEM, since we must look
2377 at MEM_MODE, rather than GET_MODE (MEM). */
2378 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2379 return 1;
2381 /* In true_dependence we also allow BLKmode to alias anything. Why
2382 don't we do this in anti_dependence and output_dependence? */
2383 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2384 return 1;
2386 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2387 return 0;
2389 return rtx_refs_may_alias_p (x, mem, true);
2392 /* Canonical true dependence: X is read after store in MEM takes place.
2393 Variant of true_dependence which assumes MEM has already been
2394 canonicalized (hence we no longer do that here).
2395 The mem_addr argument has been added, since true_dependence computed
2396 this value prior to canonicalizing.
2397 If x_addr is non-NULL, it is used in preference of XEXP (x, 0). */
2400 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2401 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
2403 int ret;
2405 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2406 return 1;
2408 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2409 This is used in epilogue deallocation functions. */
2410 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2411 return 1;
2412 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2413 return 1;
2414 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2415 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2416 return 1;
2418 /* Read-only memory is by definition never modified, and therefore can't
2419 conflict with anything. We don't expect to find read-only set on MEM,
2420 but stupid user tricks can produce them, so don't die. */
2421 if (MEM_READONLY_P (x))
2422 return 0;
2424 /* If we have MEMs refering to different address spaces (which can
2425 potentially overlap), we cannot easily tell from the addresses
2426 whether the references overlap. */
2427 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2428 return 1;
2430 if (! x_addr)
2432 x_addr = XEXP (x, 0);
2433 if (!((GET_CODE (x_addr) == VALUE
2434 && GET_CODE (mem_addr) != VALUE
2435 && reg_mentioned_p (x_addr, mem_addr))
2436 || (GET_CODE (x_addr) != VALUE
2437 && GET_CODE (mem_addr) == VALUE
2438 && reg_mentioned_p (mem_addr, x_addr))))
2439 x_addr = get_addr (x_addr);
2442 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2443 return 0;
2445 x_addr = canon_rtx (x_addr);
2446 if ((ret = memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2447 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2448 return ret;
2450 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2451 return 0;
2453 if (nonoverlapping_memrefs_p (x, mem))
2454 return 0;
2456 if (aliases_everything_p (x))
2457 return 1;
2459 /* We cannot use aliases_everything_p to test MEM, since we must look
2460 at MEM_MODE, rather than GET_MODE (MEM). */
2461 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2462 return 1;
2464 /* In true_dependence we also allow BLKmode to alias anything. Why
2465 don't we do this in anti_dependence and output_dependence? */
2466 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2467 return 1;
2469 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2470 return 0;
2472 return rtx_refs_may_alias_p (x, mem, true);
2475 /* Returns nonzero if a write to X might alias a previous read from
2476 (or, if WRITEP is nonzero, a write to) MEM. */
2478 static int
2479 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2481 rtx x_addr, mem_addr;
2482 const_rtx fixed_scalar;
2483 rtx base;
2484 int ret;
2486 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2487 return 1;
2489 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2490 This is used in epilogue deallocation functions. */
2491 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2492 return 1;
2493 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2494 return 1;
2495 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2496 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2497 return 1;
2499 /* A read from read-only memory can't conflict with read-write memory. */
2500 if (!writep && MEM_READONLY_P (mem))
2501 return 0;
2503 /* If we have MEMs refering to different address spaces (which can
2504 potentially overlap), we cannot easily tell from the addresses
2505 whether the references overlap. */
2506 if (MEM_ADDR_SPACE (mem) != MEM_ADDR_SPACE (x))
2507 return 1;
2509 x_addr = XEXP (x, 0);
2510 mem_addr = XEXP (mem, 0);
2511 if (!((GET_CODE (x_addr) == VALUE
2512 && GET_CODE (mem_addr) != VALUE
2513 && reg_mentioned_p (x_addr, mem_addr))
2514 || (GET_CODE (x_addr) != VALUE
2515 && GET_CODE (mem_addr) == VALUE
2516 && reg_mentioned_p (mem_addr, x_addr))))
2518 x_addr = get_addr (x_addr);
2519 mem_addr = get_addr (mem_addr);
2522 if (! writep)
2524 base = find_base_term (mem_addr);
2525 if (base && (GET_CODE (base) == LABEL_REF
2526 || (GET_CODE (base) == SYMBOL_REF
2527 && CONSTANT_POOL_ADDRESS_P (base))))
2528 return 0;
2531 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2532 GET_MODE (mem)))
2533 return 0;
2535 x_addr = canon_rtx (x_addr);
2536 mem_addr = canon_rtx (mem_addr);
2538 if ((ret = memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2539 SIZE_FOR_MODE (x), x_addr, 0)) != -1)
2540 return ret;
2542 if (nonoverlapping_memrefs_p (x, mem))
2543 return 0;
2545 fixed_scalar
2546 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2547 rtx_addr_varies_p);
2549 if ((fixed_scalar == mem && !aliases_everything_p (x))
2550 || (fixed_scalar == x && !aliases_everything_p (mem)))
2551 return 0;
2553 return rtx_refs_may_alias_p (x, mem, false);
2556 /* Anti dependence: X is written after read in MEM takes place. */
2559 anti_dependence (const_rtx mem, const_rtx x)
2561 return write_dependence_p (mem, x, /*writep=*/0);
2564 /* Output dependence: X is written after store in MEM takes place. */
2567 output_dependence (const_rtx mem, const_rtx x)
2569 return write_dependence_p (mem, x, /*writep=*/1);
2573 void
2574 init_alias_target (void)
2576 int i;
2578 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2580 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2581 /* Check whether this register can hold an incoming pointer
2582 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2583 numbers, so translate if necessary due to register windows. */
2584 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2585 && HARD_REGNO_MODE_OK (i, Pmode))
2586 static_reg_base_value[i]
2587 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2589 static_reg_base_value[STACK_POINTER_REGNUM]
2590 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2591 static_reg_base_value[ARG_POINTER_REGNUM]
2592 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2593 static_reg_base_value[FRAME_POINTER_REGNUM]
2594 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2595 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2596 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2597 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2598 #endif
2601 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2602 to be memory reference. */
2603 static bool memory_modified;
2604 static void
2605 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2607 if (MEM_P (x))
2609 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2610 memory_modified = true;
2615 /* Return true when INSN possibly modify memory contents of MEM
2616 (i.e. address can be modified). */
2617 bool
2618 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2620 if (!INSN_P (insn))
2621 return false;
2622 memory_modified = false;
2623 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2624 return memory_modified;
2627 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2628 array. */
2630 void
2631 init_alias_analysis (void)
2633 unsigned int maxreg = max_reg_num ();
2634 int changed, pass;
2635 int i;
2636 unsigned int ui;
2637 rtx insn;
2639 timevar_push (TV_ALIAS_ANALYSIS);
2641 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2642 reg_known_value = GGC_CNEWVEC (rtx, reg_known_value_size);
2643 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2645 /* If we have memory allocated from the previous run, use it. */
2646 if (old_reg_base_value)
2647 reg_base_value = old_reg_base_value;
2649 if (reg_base_value)
2650 VEC_truncate (rtx, reg_base_value, 0);
2652 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2654 new_reg_base_value = XNEWVEC (rtx, maxreg);
2655 reg_seen = XNEWVEC (char, maxreg);
2657 /* The basic idea is that each pass through this loop will use the
2658 "constant" information from the previous pass to propagate alias
2659 information through another level of assignments.
2661 This could get expensive if the assignment chains are long. Maybe
2662 we should throttle the number of iterations, possibly based on
2663 the optimization level or flag_expensive_optimizations.
2665 We could propagate more information in the first pass by making use
2666 of DF_REG_DEF_COUNT to determine immediately that the alias information
2667 for a pseudo is "constant".
2669 A program with an uninitialized variable can cause an infinite loop
2670 here. Instead of doing a full dataflow analysis to detect such problems
2671 we just cap the number of iterations for the loop.
2673 The state of the arrays for the set chain in question does not matter
2674 since the program has undefined behavior. */
2676 pass = 0;
2679 /* Assume nothing will change this iteration of the loop. */
2680 changed = 0;
2682 /* We want to assign the same IDs each iteration of this loop, so
2683 start counting from zero each iteration of the loop. */
2684 unique_id = 0;
2686 /* We're at the start of the function each iteration through the
2687 loop, so we're copying arguments. */
2688 copying_arguments = true;
2690 /* Wipe the potential alias information clean for this pass. */
2691 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2693 /* Wipe the reg_seen array clean. */
2694 memset (reg_seen, 0, maxreg);
2696 /* Mark all hard registers which may contain an address.
2697 The stack, frame and argument pointers may contain an address.
2698 An argument register which can hold a Pmode value may contain
2699 an address even if it is not in BASE_REGS.
2701 The address expression is VOIDmode for an argument and
2702 Pmode for other registers. */
2704 memcpy (new_reg_base_value, static_reg_base_value,
2705 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2707 /* Walk the insns adding values to the new_reg_base_value array. */
2708 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2710 if (INSN_P (insn))
2712 rtx note, set;
2714 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2715 /* The prologue/epilogue insns are not threaded onto the
2716 insn chain until after reload has completed. Thus,
2717 there is no sense wasting time checking if INSN is in
2718 the prologue/epilogue until after reload has completed. */
2719 if (reload_completed
2720 && prologue_epilogue_contains (insn))
2721 continue;
2722 #endif
2724 /* If this insn has a noalias note, process it, Otherwise,
2725 scan for sets. A simple set will have no side effects
2726 which could change the base value of any other register. */
2728 if (GET_CODE (PATTERN (insn)) == SET
2729 && REG_NOTES (insn) != 0
2730 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2731 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2732 else
2733 note_stores (PATTERN (insn), record_set, NULL);
2735 set = single_set (insn);
2737 if (set != 0
2738 && REG_P (SET_DEST (set))
2739 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2741 unsigned int regno = REGNO (SET_DEST (set));
2742 rtx src = SET_SRC (set);
2743 rtx t;
2745 note = find_reg_equal_equiv_note (insn);
2746 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2747 && DF_REG_DEF_COUNT (regno) != 1)
2748 note = NULL_RTX;
2750 if (note != NULL_RTX
2751 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2752 && ! rtx_varies_p (XEXP (note, 0), 1)
2753 && ! reg_overlap_mentioned_p (SET_DEST (set),
2754 XEXP (note, 0)))
2756 set_reg_known_value (regno, XEXP (note, 0));
2757 set_reg_known_equiv_p (regno,
2758 REG_NOTE_KIND (note) == REG_EQUIV);
2760 else if (DF_REG_DEF_COUNT (regno) == 1
2761 && GET_CODE (src) == PLUS
2762 && REG_P (XEXP (src, 0))
2763 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2764 && CONST_INT_P (XEXP (src, 1)))
2766 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2767 set_reg_known_value (regno, t);
2768 set_reg_known_equiv_p (regno, 0);
2770 else if (DF_REG_DEF_COUNT (regno) == 1
2771 && ! rtx_varies_p (src, 1))
2773 set_reg_known_value (regno, src);
2774 set_reg_known_equiv_p (regno, 0);
2778 else if (NOTE_P (insn)
2779 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2780 copying_arguments = false;
2783 /* Now propagate values from new_reg_base_value to reg_base_value. */
2784 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2786 for (ui = 0; ui < maxreg; ui++)
2788 if (new_reg_base_value[ui]
2789 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2790 && ! rtx_equal_p (new_reg_base_value[ui],
2791 VEC_index (rtx, reg_base_value, ui)))
2793 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2794 changed = 1;
2798 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2800 /* Fill in the remaining entries. */
2801 for (i = 0; i < (int)reg_known_value_size; i++)
2802 if (reg_known_value[i] == 0)
2803 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2805 /* Clean up. */
2806 free (new_reg_base_value);
2807 new_reg_base_value = 0;
2808 free (reg_seen);
2809 reg_seen = 0;
2810 timevar_pop (TV_ALIAS_ANALYSIS);
2813 void
2814 end_alias_analysis (void)
2816 old_reg_base_value = reg_base_value;
2817 ggc_free (reg_known_value);
2818 reg_known_value = 0;
2819 reg_known_value_size = 0;
2820 free (reg_known_equiv_p);
2821 reg_known_equiv_p = 0;
2824 #include "gt-alias.h"