* tree-pass.h (register_pass_info): New structure.
[official-gcc.git] / gcc / alias.c
blob6bb051e62b52ec45ff470e33c7d878cbbfe93aed
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "function.h"
30 #include "alias.h"
31 #include "emit-rtl.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
35 #include "flags.h"
36 #include "output.h"
37 #include "toplev.h"
38 #include "cselib.h"
39 #include "splay-tree.h"
40 #include "ggc.h"
41 #include "langhooks.h"
42 #include "timevar.h"
43 #include "target.h"
44 #include "cgraph.h"
45 #include "varray.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
48 #include "df.h"
49 #include "tree-ssa-alias.h"
50 #include "pointer-set.h"
51 #include "tree-flow.h"
53 /* The aliasing API provided here solves related but different problems:
55 Say there exists (in c)
57 struct X {
58 struct Y y1;
59 struct Z z2;
60 } x1, *px1, *px2;
62 struct Y y2, *py;
63 struct Z z2, *pz;
66 py = &px1.y1;
67 px2 = &x1;
69 Consider the four questions:
71 Can a store to x1 interfere with px2->y1?
72 Can a store to x1 interfere with px2->z2?
73 (*px2).z2
74 Can a store to x1 change the value pointed to by with py?
75 Can a store to x1 change the value pointed to by with pz?
77 The answer to these questions can be yes, yes, yes, and maybe.
79 The first two questions can be answered with a simple examination
80 of the type system. If structure X contains a field of type Y then
81 a store thru a pointer to an X can overwrite any field that is
82 contained (recursively) in an X (unless we know that px1 != px2).
84 The last two of the questions can be solved in the same way as the
85 first two questions but this is too conservative. The observation
86 is that in some cases analysis we can know if which (if any) fields
87 are addressed and if those addresses are used in bad ways. This
88 analysis may be language specific. In C, arbitrary operations may
89 be applied to pointers. However, there is some indication that
90 this may be too conservative for some C++ types.
92 The pass ipa-type-escape does this analysis for the types whose
93 instances do not escape across the compilation boundary.
95 Historically in GCC, these two problems were combined and a single
96 data structure was used to represent the solution to these
97 problems. We now have two similar but different data structures,
98 The data structure to solve the last two question is similar to the
99 first, but does not contain have the fields in it whose address are
100 never taken. For types that do escape the compilation unit, the
101 data structures will have identical information.
104 /* The alias sets assigned to MEMs assist the back-end in determining
105 which MEMs can alias which other MEMs. In general, two MEMs in
106 different alias sets cannot alias each other, with one important
107 exception. Consider something like:
109 struct S { int i; double d; };
111 a store to an `S' can alias something of either type `int' or type
112 `double'. (However, a store to an `int' cannot alias a `double'
113 and vice versa.) We indicate this via a tree structure that looks
114 like:
115 struct S
118 |/_ _\|
119 int double
121 (The arrows are directed and point downwards.)
122 In this situation we say the alias set for `struct S' is the
123 `superset' and that those for `int' and `double' are `subsets'.
125 To see whether two alias sets can point to the same memory, we must
126 see if either alias set is a subset of the other. We need not trace
127 past immediate descendants, however, since we propagate all
128 grandchildren up one level.
130 Alias set zero is implicitly a superset of all other alias sets.
131 However, this is no actual entry for alias set zero. It is an
132 error to attempt to explicitly construct a subset of zero. */
134 struct GTY(()) alias_set_entry_d {
135 /* The alias set number, as stored in MEM_ALIAS_SET. */
136 alias_set_type alias_set;
138 /* Nonzero if would have a child of zero: this effectively makes this
139 alias set the same as alias set zero. */
140 int has_zero_child;
142 /* The children of the alias set. These are not just the immediate
143 children, but, in fact, all descendants. So, if we have:
145 struct T { struct S s; float f; }
147 continuing our example above, the children here will be all of
148 `int', `double', `float', and `struct S'. */
149 splay_tree GTY((param1_is (int), param2_is (int))) children;
151 typedef struct alias_set_entry_d *alias_set_entry;
153 static int rtx_equal_for_memref_p (const_rtx, const_rtx);
154 static int memrefs_conflict_p (int, rtx, int, rtx, HOST_WIDE_INT);
155 static void record_set (rtx, const_rtx, void *);
156 static int base_alias_check (rtx, rtx, enum machine_mode,
157 enum machine_mode);
158 static rtx find_base_value (rtx);
159 static int mems_in_disjoint_alias_sets_p (const_rtx, const_rtx);
160 static int insert_subset_children (splay_tree_node, void*);
161 static alias_set_entry get_alias_set_entry (alias_set_type);
162 static const_rtx fixed_scalar_and_varying_struct_p (const_rtx, const_rtx, rtx, rtx,
163 bool (*) (const_rtx, bool));
164 static int aliases_everything_p (const_rtx);
165 static bool nonoverlapping_component_refs_p (const_tree, const_tree);
166 static tree decl_for_component_ref (tree);
167 static rtx adjust_offset_for_component_ref (tree, rtx);
168 static int write_dependence_p (const_rtx, const_rtx, int);
170 static void memory_modified_1 (rtx, const_rtx, void *);
172 /* Set up all info needed to perform alias analysis on memory references. */
174 /* Returns the size in bytes of the mode of X. */
175 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
177 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
178 different alias sets. We ignore alias sets in functions making use
179 of variable arguments because the va_arg macros on some systems are
180 not legal ANSI C. */
181 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
182 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
184 /* Cap the number of passes we make over the insns propagating alias
185 information through set chains. 10 is a completely arbitrary choice. */
186 #define MAX_ALIAS_LOOP_PASSES 10
188 /* reg_base_value[N] gives an address to which register N is related.
189 If all sets after the first add or subtract to the current value
190 or otherwise modify it so it does not point to a different top level
191 object, reg_base_value[N] is equal to the address part of the source
192 of the first set.
194 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
195 expressions represent certain special values: function arguments and
196 the stack, frame, and argument pointers.
198 The contents of an ADDRESS is not normally used, the mode of the
199 ADDRESS determines whether the ADDRESS is a function argument or some
200 other special value. Pointer equality, not rtx_equal_p, determines whether
201 two ADDRESS expressions refer to the same base address.
203 The only use of the contents of an ADDRESS is for determining if the
204 current function performs nonlocal memory memory references for the
205 purposes of marking the function as a constant function. */
207 static GTY(()) VEC(rtx,gc) *reg_base_value;
208 static rtx *new_reg_base_value;
210 /* We preserve the copy of old array around to avoid amount of garbage
211 produced. About 8% of garbage produced were attributed to this
212 array. */
213 static GTY((deletable)) VEC(rtx,gc) *old_reg_base_value;
215 /* Static hunks of RTL used by the aliasing code; these are initialized
216 once per function to avoid unnecessary RTL allocations. */
217 static GTY (()) rtx static_reg_base_value[FIRST_PSEUDO_REGISTER];
219 #define REG_BASE_VALUE(X) \
220 (REGNO (X) < VEC_length (rtx, reg_base_value) \
221 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
223 /* Vector indexed by N giving the initial (unchanging) value known for
224 pseudo-register N. This array is initialized in init_alias_analysis,
225 and does not change until end_alias_analysis is called. */
226 static GTY((length("reg_known_value_size"))) rtx *reg_known_value;
228 /* Indicates number of valid entries in reg_known_value. */
229 static GTY(()) unsigned int reg_known_value_size;
231 /* Vector recording for each reg_known_value whether it is due to a
232 REG_EQUIV note. Future passes (viz., reload) may replace the
233 pseudo with the equivalent expression and so we account for the
234 dependences that would be introduced if that happens.
236 The REG_EQUIV notes created in assign_parms may mention the arg
237 pointer, and there are explicit insns in the RTL that modify the
238 arg pointer. Thus we must ensure that such insns don't get
239 scheduled across each other because that would invalidate the
240 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
241 wrong, but solving the problem in the scheduler will likely give
242 better code, so we do it here. */
243 static bool *reg_known_equiv_p;
245 /* True when scanning insns from the start of the rtl to the
246 NOTE_INSN_FUNCTION_BEG note. */
247 static bool copying_arguments;
249 DEF_VEC_P(alias_set_entry);
250 DEF_VEC_ALLOC_P(alias_set_entry,gc);
252 /* The splay-tree used to store the various alias set entries. */
253 static GTY (()) VEC(alias_set_entry,gc) *alias_sets;
255 /* Build a decomposed reference object for querying the alias-oracle
256 from the MEM rtx and store it in *REF.
257 Returns false if MEM is not suitable for the alias-oracle. */
259 static bool
260 ao_ref_from_mem (ao_ref *ref, const_rtx mem)
262 tree expr = MEM_EXPR (mem);
263 tree base;
265 if (!expr)
266 return false;
268 ao_ref_init (ref, expr);
270 /* Get the base of the reference and see if we have to reject or
271 adjust it. */
272 base = ao_ref_base (ref);
273 if (base == NULL_TREE)
274 return false;
276 /* If this is a pointer dereference of a non-SSA_NAME punt.
277 ??? We could replace it with a pointer to anything. */
278 if (INDIRECT_REF_P (base)
279 && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
280 return false;
282 /* The tree oracle doesn't like to have these. */
283 if (TREE_CODE (base) == FUNCTION_DECL
284 || TREE_CODE (base) == LABEL_DECL)
285 return false;
287 /* If this is a reference based on a partitioned decl replace the
288 base with an INDIRECT_REF of the pointer representative we
289 created during stack slot partitioning. */
290 if (TREE_CODE (base) == VAR_DECL
291 && ! TREE_STATIC (base)
292 && cfun->gimple_df->decls_to_pointers != NULL)
294 void *namep;
295 namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
296 if (namep)
298 ref->base_alias_set = get_alias_set (base);
299 ref->base = build1 (INDIRECT_REF, TREE_TYPE (base), *(tree *)namep);
303 ref->ref_alias_set = MEM_ALIAS_SET (mem);
305 /* For NULL MEM_OFFSET the MEM_EXPR may have been stripped arbitrarily
306 without recording offset or extent adjustments properly. */
307 if (MEM_OFFSET (mem) == NULL_RTX)
309 ref->offset = 0;
310 ref->max_size = -1;
312 else if (INTVAL (MEM_OFFSET (mem)) < 0
313 && MEM_EXPR (mem) != get_spill_slot_decl (false))
315 /* Negative MEM_OFFSET happens for promoted subregs on bigendian
316 targets. We need to compensate both the size and the offset here,
317 which get_ref_base_and_extent will have done based on the MEM_EXPR
318 already. */
319 gcc_assert (((INTVAL (MEM_SIZE (mem)) + INTVAL (MEM_OFFSET (mem)))
320 * BITS_PER_UNIT)
321 == ref->size);
322 return true;
324 else
326 ref->offset += INTVAL (MEM_OFFSET (mem)) * BITS_PER_UNIT;
329 /* NULL MEM_SIZE should not really happen with a non-NULL MEM_EXPR,
330 but just play safe here. The size may have been adjusted together
331 with the offset, so we need to take it if it is set and not rely
332 on MEM_EXPR here (which has the size determining parts potentially
333 stripped anyway). We lose precision for max_size which is only
334 available from the remaining MEM_EXPR. */
335 if (MEM_SIZE (mem) == NULL_RTX)
337 ref->size = -1;
338 ref->max_size = -1;
340 else
342 ref->size = INTVAL (MEM_SIZE (mem)) * BITS_PER_UNIT;
345 return true;
348 /* Query the alias-oracle on whether the two memory rtx X and MEM may
349 alias. If TBAA_P is set also apply TBAA. Returns true if the
350 two rtxen may alias, false otherwise. */
352 static bool
353 rtx_refs_may_alias_p (const_rtx x, const_rtx mem, bool tbaa_p)
355 ao_ref ref1, ref2;
357 if (!ao_ref_from_mem (&ref1, x)
358 || !ao_ref_from_mem (&ref2, mem))
359 return true;
361 return refs_may_alias_p_1 (&ref1, &ref2, tbaa_p);
364 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
365 such an entry, or NULL otherwise. */
367 static inline alias_set_entry
368 get_alias_set_entry (alias_set_type alias_set)
370 return VEC_index (alias_set_entry, alias_sets, alias_set);
373 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
374 the two MEMs cannot alias each other. */
376 static inline int
377 mems_in_disjoint_alias_sets_p (const_rtx mem1, const_rtx mem2)
379 /* Perform a basic sanity check. Namely, that there are no alias sets
380 if we're not using strict aliasing. This helps to catch bugs
381 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
382 where a MEM is allocated in some way other than by the use of
383 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
384 use alias sets to indicate that spilled registers cannot alias each
385 other, we might need to remove this check. */
386 gcc_assert (flag_strict_aliasing
387 || (!MEM_ALIAS_SET (mem1) && !MEM_ALIAS_SET (mem2)));
389 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1), MEM_ALIAS_SET (mem2));
392 /* Insert the NODE into the splay tree given by DATA. Used by
393 record_alias_subset via splay_tree_foreach. */
395 static int
396 insert_subset_children (splay_tree_node node, void *data)
398 splay_tree_insert ((splay_tree) data, node->key, node->value);
400 return 0;
403 /* Return true if the first alias set is a subset of the second. */
405 bool
406 alias_set_subset_of (alias_set_type set1, alias_set_type set2)
408 alias_set_entry ase;
410 /* Everything is a subset of the "aliases everything" set. */
411 if (set2 == 0)
412 return true;
414 /* Otherwise, check if set1 is a subset of set2. */
415 ase = get_alias_set_entry (set2);
416 if (ase != 0
417 && ((ase->has_zero_child && set1 == 0)
418 || splay_tree_lookup (ase->children,
419 (splay_tree_key) set1)))
420 return true;
421 return false;
424 /* Return 1 if the two specified alias sets may conflict. */
427 alias_sets_conflict_p (alias_set_type set1, alias_set_type set2)
429 alias_set_entry ase;
431 /* The easy case. */
432 if (alias_sets_must_conflict_p (set1, set2))
433 return 1;
435 /* See if the first alias set is a subset of the second. */
436 ase = get_alias_set_entry (set1);
437 if (ase != 0
438 && (ase->has_zero_child
439 || splay_tree_lookup (ase->children,
440 (splay_tree_key) set2)))
441 return 1;
443 /* Now do the same, but with the alias sets reversed. */
444 ase = get_alias_set_entry (set2);
445 if (ase != 0
446 && (ase->has_zero_child
447 || splay_tree_lookup (ase->children,
448 (splay_tree_key) set1)))
449 return 1;
451 /* The two alias sets are distinct and neither one is the
452 child of the other. Therefore, they cannot conflict. */
453 return 0;
456 static int
457 walk_mems_2 (rtx *x, rtx mem)
459 if (MEM_P (*x))
461 if (alias_sets_conflict_p (MEM_ALIAS_SET(*x), MEM_ALIAS_SET(mem)))
462 return 1;
464 return -1;
466 return 0;
469 static int
470 walk_mems_1 (rtx *x, rtx *pat)
472 if (MEM_P (*x))
474 /* Visit all MEMs in *PAT and check indepedence. */
475 if (for_each_rtx (pat, (rtx_function) walk_mems_2, *x))
476 /* Indicate that dependence was determined and stop traversal. */
477 return 1;
479 return -1;
481 return 0;
484 /* Return 1 if two specified instructions have mem expr with conflict alias sets*/
485 bool
486 insn_alias_sets_conflict_p (rtx insn1, rtx insn2)
488 /* For each pair of MEMs in INSN1 and INSN2 check their independence. */
489 return for_each_rtx (&PATTERN (insn1), (rtx_function) walk_mems_1,
490 &PATTERN (insn2));
493 /* Return 1 if the two specified alias sets will always conflict. */
496 alias_sets_must_conflict_p (alias_set_type set1, alias_set_type set2)
498 if (set1 == 0 || set2 == 0 || set1 == set2)
499 return 1;
501 return 0;
504 /* Return 1 if any MEM object of type T1 will always conflict (using the
505 dependency routines in this file) with any MEM object of type T2.
506 This is used when allocating temporary storage. If T1 and/or T2 are
507 NULL_TREE, it means we know nothing about the storage. */
510 objects_must_conflict_p (tree t1, tree t2)
512 alias_set_type set1, set2;
514 /* If neither has a type specified, we don't know if they'll conflict
515 because we may be using them to store objects of various types, for
516 example the argument and local variables areas of inlined functions. */
517 if (t1 == 0 && t2 == 0)
518 return 0;
520 /* If they are the same type, they must conflict. */
521 if (t1 == t2
522 /* Likewise if both are volatile. */
523 || (t1 != 0 && TYPE_VOLATILE (t1) && t2 != 0 && TYPE_VOLATILE (t2)))
524 return 1;
526 set1 = t1 ? get_alias_set (t1) : 0;
527 set2 = t2 ? get_alias_set (t2) : 0;
529 /* We can't use alias_sets_conflict_p because we must make sure
530 that every subtype of t1 will conflict with every subtype of
531 t2 for which a pair of subobjects of these respective subtypes
532 overlaps on the stack. */
533 return alias_sets_must_conflict_p (set1, set2);
536 /* Return true if all nested component references handled by
537 get_inner_reference in T are such that we should use the alias set
538 provided by the object at the heart of T.
540 This is true for non-addressable components (which don't have their
541 own alias set), as well as components of objects in alias set zero.
542 This later point is a special case wherein we wish to override the
543 alias set used by the component, but we don't have per-FIELD_DECL
544 assignable alias sets. */
546 bool
547 component_uses_parent_alias_set (const_tree t)
549 while (1)
551 /* If we're at the end, it vacuously uses its own alias set. */
552 if (!handled_component_p (t))
553 return false;
555 switch (TREE_CODE (t))
557 case COMPONENT_REF:
558 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t, 1)))
559 return true;
560 break;
562 case ARRAY_REF:
563 case ARRAY_RANGE_REF:
564 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t, 0))))
565 return true;
566 break;
568 case REALPART_EXPR:
569 case IMAGPART_EXPR:
570 break;
572 default:
573 /* Bitfields and casts are never addressable. */
574 return true;
577 t = TREE_OPERAND (t, 0);
578 if (get_alias_set (TREE_TYPE (t)) == 0)
579 return true;
583 /* Return the alias set for the memory pointed to by T, which may be
584 either a type or an expression. Return -1 if there is nothing
585 special about dereferencing T. */
587 static alias_set_type
588 get_deref_alias_set_1 (tree t)
590 /* If we're not doing any alias analysis, just assume everything
591 aliases everything else. */
592 if (!flag_strict_aliasing)
593 return 0;
595 /* All we care about is the type. */
596 if (! TYPE_P (t))
597 t = TREE_TYPE (t);
599 /* If we have an INDIRECT_REF via a void pointer, we don't
600 know anything about what that might alias. Likewise if the
601 pointer is marked that way. */
602 if (TREE_CODE (TREE_TYPE (t)) == VOID_TYPE
603 || TYPE_REF_CAN_ALIAS_ALL (t))
604 return 0;
606 return -1;
609 /* Return the alias set for the memory pointed to by T, which may be
610 either a type or an expression. */
612 alias_set_type
613 get_deref_alias_set (tree t)
615 alias_set_type set = get_deref_alias_set_1 (t);
617 /* Fall back to the alias-set of the pointed-to type. */
618 if (set == -1)
620 if (! TYPE_P (t))
621 t = TREE_TYPE (t);
622 set = get_alias_set (TREE_TYPE (t));
625 return set;
628 /* Return the alias set for T, which may be either a type or an
629 expression. Call language-specific routine for help, if needed. */
631 alias_set_type
632 get_alias_set (tree t)
634 alias_set_type set;
636 /* If we're not doing any alias analysis, just assume everything
637 aliases everything else. Also return 0 if this or its type is
638 an error. */
639 if (! flag_strict_aliasing || t == error_mark_node
640 || (! TYPE_P (t)
641 && (TREE_TYPE (t) == 0 || TREE_TYPE (t) == error_mark_node)))
642 return 0;
644 /* We can be passed either an expression or a type. This and the
645 language-specific routine may make mutually-recursive calls to each other
646 to figure out what to do. At each juncture, we see if this is a tree
647 that the language may need to handle specially. First handle things that
648 aren't types. */
649 if (! TYPE_P (t))
651 tree inner = t;
653 /* Remove any nops, then give the language a chance to do
654 something with this tree before we look at it. */
655 STRIP_NOPS (t);
656 set = lang_hooks.get_alias_set (t);
657 if (set != -1)
658 return set;
660 /* First see if the actual object referenced is an INDIRECT_REF from a
661 restrict-qualified pointer or a "void *". */
662 while (handled_component_p (inner))
664 inner = TREE_OPERAND (inner, 0);
665 STRIP_NOPS (inner);
668 if (INDIRECT_REF_P (inner))
670 set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
671 if (set != -1)
672 return set;
675 /* Otherwise, pick up the outermost object that we could have a pointer
676 to, processing conversions as above. */
677 while (component_uses_parent_alias_set (t))
679 t = TREE_OPERAND (t, 0);
680 STRIP_NOPS (t);
683 /* If we've already determined the alias set for a decl, just return
684 it. This is necessary for C++ anonymous unions, whose component
685 variables don't look like union members (boo!). */
686 if (TREE_CODE (t) == VAR_DECL
687 && DECL_RTL_SET_P (t) && MEM_P (DECL_RTL (t)))
688 return MEM_ALIAS_SET (DECL_RTL (t));
690 /* Now all we care about is the type. */
691 t = TREE_TYPE (t);
694 /* Variant qualifiers don't affect the alias set, so get the main
695 variant. */
696 t = TYPE_MAIN_VARIANT (t);
698 /* Always use the canonical type as well. If this is a type that
699 requires structural comparisons to identify compatible types
700 use alias set zero. */
701 if (TYPE_STRUCTURAL_EQUALITY_P (t))
702 return 0;
703 t = TYPE_CANONICAL (t);
704 /* Canonical types shouldn't form a tree nor should the canonical
705 type require structural equality checks. */
706 gcc_assert (!TYPE_STRUCTURAL_EQUALITY_P (t) && TYPE_CANONICAL (t) == t);
708 /* If this is a type with a known alias set, return it. */
709 if (TYPE_ALIAS_SET_KNOWN_P (t))
710 return TYPE_ALIAS_SET (t);
712 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
713 if (!COMPLETE_TYPE_P (t))
715 /* For arrays with unknown size the conservative answer is the
716 alias set of the element type. */
717 if (TREE_CODE (t) == ARRAY_TYPE)
718 return get_alias_set (TREE_TYPE (t));
720 /* But return zero as a conservative answer for incomplete types. */
721 return 0;
724 /* See if the language has special handling for this type. */
725 set = lang_hooks.get_alias_set (t);
726 if (set != -1)
727 return set;
729 /* There are no objects of FUNCTION_TYPE, so there's no point in
730 using up an alias set for them. (There are, of course, pointers
731 and references to functions, but that's different.) */
732 else if (TREE_CODE (t) == FUNCTION_TYPE
733 || TREE_CODE (t) == METHOD_TYPE)
734 set = 0;
736 /* Unless the language specifies otherwise, let vector types alias
737 their components. This avoids some nasty type punning issues in
738 normal usage. And indeed lets vectors be treated more like an
739 array slice. */
740 else if (TREE_CODE (t) == VECTOR_TYPE)
741 set = get_alias_set (TREE_TYPE (t));
743 /* Unless the language specifies otherwise, treat array types the
744 same as their components. This avoids the asymmetry we get
745 through recording the components. Consider accessing a
746 character(kind=1) through a reference to a character(kind=1)[1:1].
747 Or consider if we want to assign integer(kind=4)[0:D.1387] and
748 integer(kind=4)[4] the same alias set or not.
749 Just be pragmatic here and make sure the array and its element
750 type get the same alias set assigned. */
751 else if (TREE_CODE (t) == ARRAY_TYPE
752 && !TYPE_NONALIASED_COMPONENT (t))
753 set = get_alias_set (TREE_TYPE (t));
755 else
756 /* Otherwise make a new alias set for this type. */
757 set = new_alias_set ();
759 TYPE_ALIAS_SET (t) = set;
761 /* If this is an aggregate type, we must record any component aliasing
762 information. */
763 if (AGGREGATE_TYPE_P (t) || TREE_CODE (t) == COMPLEX_TYPE)
764 record_component_aliases (t);
766 return set;
769 /* Return a brand-new alias set. */
771 alias_set_type
772 new_alias_set (void)
774 if (flag_strict_aliasing)
776 if (alias_sets == 0)
777 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
778 VEC_safe_push (alias_set_entry, gc, alias_sets, 0);
779 return VEC_length (alias_set_entry, alias_sets) - 1;
781 else
782 return 0;
785 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
786 not everything that aliases SUPERSET also aliases SUBSET. For example,
787 in C, a store to an `int' can alias a load of a structure containing an
788 `int', and vice versa. But it can't alias a load of a 'double' member
789 of the same structure. Here, the structure would be the SUPERSET and
790 `int' the SUBSET. This relationship is also described in the comment at
791 the beginning of this file.
793 This function should be called only once per SUPERSET/SUBSET pair.
795 It is illegal for SUPERSET to be zero; everything is implicitly a
796 subset of alias set zero. */
798 void
799 record_alias_subset (alias_set_type superset, alias_set_type subset)
801 alias_set_entry superset_entry;
802 alias_set_entry subset_entry;
804 /* It is possible in complex type situations for both sets to be the same,
805 in which case we can ignore this operation. */
806 if (superset == subset)
807 return;
809 gcc_assert (superset);
811 superset_entry = get_alias_set_entry (superset);
812 if (superset_entry == 0)
814 /* Create an entry for the SUPERSET, so that we have a place to
815 attach the SUBSET. */
816 superset_entry = GGC_NEW (struct alias_set_entry_d);
817 superset_entry->alias_set = superset;
818 superset_entry->children
819 = splay_tree_new_ggc (splay_tree_compare_ints);
820 superset_entry->has_zero_child = 0;
821 VEC_replace (alias_set_entry, alias_sets, superset, superset_entry);
824 if (subset == 0)
825 superset_entry->has_zero_child = 1;
826 else
828 subset_entry = get_alias_set_entry (subset);
829 /* If there is an entry for the subset, enter all of its children
830 (if they are not already present) as children of the SUPERSET. */
831 if (subset_entry)
833 if (subset_entry->has_zero_child)
834 superset_entry->has_zero_child = 1;
836 splay_tree_foreach (subset_entry->children, insert_subset_children,
837 superset_entry->children);
840 /* Enter the SUBSET itself as a child of the SUPERSET. */
841 splay_tree_insert (superset_entry->children,
842 (splay_tree_key) subset, 0);
846 /* Record that component types of TYPE, if any, are part of that type for
847 aliasing purposes. For record types, we only record component types
848 for fields that are not marked non-addressable. For array types, we
849 only record the component type if it is not marked non-aliased. */
851 void
852 record_component_aliases (tree type)
854 alias_set_type superset = get_alias_set (type);
855 tree field;
857 if (superset == 0)
858 return;
860 switch (TREE_CODE (type))
862 case RECORD_TYPE:
863 case UNION_TYPE:
864 case QUAL_UNION_TYPE:
865 /* Recursively record aliases for the base classes, if there are any. */
866 if (TYPE_BINFO (type))
868 int i;
869 tree binfo, base_binfo;
871 for (binfo = TYPE_BINFO (type), i = 0;
872 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
873 record_alias_subset (superset,
874 get_alias_set (BINFO_TYPE (base_binfo)));
876 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
877 if (TREE_CODE (field) == FIELD_DECL && !DECL_NONADDRESSABLE_P (field))
878 record_alias_subset (superset, get_alias_set (TREE_TYPE (field)));
879 break;
881 case COMPLEX_TYPE:
882 record_alias_subset (superset, get_alias_set (TREE_TYPE (type)));
883 break;
885 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
886 element type. */
888 default:
889 break;
893 /* Allocate an alias set for use in storing and reading from the varargs
894 spill area. */
896 static GTY(()) alias_set_type varargs_set = -1;
898 alias_set_type
899 get_varargs_alias_set (void)
901 #if 1
902 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
903 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
904 consistently use the varargs alias set for loads from the varargs
905 area. So don't use it anywhere. */
906 return 0;
907 #else
908 if (varargs_set == -1)
909 varargs_set = new_alias_set ();
911 return varargs_set;
912 #endif
915 /* Likewise, but used for the fixed portions of the frame, e.g., register
916 save areas. */
918 static GTY(()) alias_set_type frame_set = -1;
920 alias_set_type
921 get_frame_alias_set (void)
923 if (frame_set == -1)
924 frame_set = new_alias_set ();
926 return frame_set;
929 /* Inside SRC, the source of a SET, find a base address. */
931 static rtx
932 find_base_value (rtx src)
934 unsigned int regno;
936 #if defined (FIND_BASE_TERM)
937 /* Try machine-dependent ways to find the base term. */
938 src = FIND_BASE_TERM (src);
939 #endif
941 switch (GET_CODE (src))
943 case SYMBOL_REF:
944 case LABEL_REF:
945 return src;
947 case REG:
948 regno = REGNO (src);
949 /* At the start of a function, argument registers have known base
950 values which may be lost later. Returning an ADDRESS
951 expression here allows optimization based on argument values
952 even when the argument registers are used for other purposes. */
953 if (regno < FIRST_PSEUDO_REGISTER && copying_arguments)
954 return new_reg_base_value[regno];
956 /* If a pseudo has a known base value, return it. Do not do this
957 for non-fixed hard regs since it can result in a circular
958 dependency chain for registers which have values at function entry.
960 The test above is not sufficient because the scheduler may move
961 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
962 if ((regno >= FIRST_PSEUDO_REGISTER || fixed_regs[regno])
963 && regno < VEC_length (rtx, reg_base_value))
965 /* If we're inside init_alias_analysis, use new_reg_base_value
966 to reduce the number of relaxation iterations. */
967 if (new_reg_base_value && new_reg_base_value[regno]
968 && DF_REG_DEF_COUNT (regno) == 1)
969 return new_reg_base_value[regno];
971 if (VEC_index (rtx, reg_base_value, regno))
972 return VEC_index (rtx, reg_base_value, regno);
975 return 0;
977 case MEM:
978 /* Check for an argument passed in memory. Only record in the
979 copying-arguments block; it is too hard to track changes
980 otherwise. */
981 if (copying_arguments
982 && (XEXP (src, 0) == arg_pointer_rtx
983 || (GET_CODE (XEXP (src, 0)) == PLUS
984 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
985 return gen_rtx_ADDRESS (VOIDmode, src);
986 return 0;
988 case CONST:
989 src = XEXP (src, 0);
990 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
991 break;
993 /* ... fall through ... */
995 case PLUS:
996 case MINUS:
998 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
1000 /* If either operand is a REG that is a known pointer, then it
1001 is the base. */
1002 if (REG_P (src_0) && REG_POINTER (src_0))
1003 return find_base_value (src_0);
1004 if (REG_P (src_1) && REG_POINTER (src_1))
1005 return find_base_value (src_1);
1007 /* If either operand is a REG, then see if we already have
1008 a known value for it. */
1009 if (REG_P (src_0))
1011 temp = find_base_value (src_0);
1012 if (temp != 0)
1013 src_0 = temp;
1016 if (REG_P (src_1))
1018 temp = find_base_value (src_1);
1019 if (temp!= 0)
1020 src_1 = temp;
1023 /* If either base is named object or a special address
1024 (like an argument or stack reference), then use it for the
1025 base term. */
1026 if (src_0 != 0
1027 && (GET_CODE (src_0) == SYMBOL_REF
1028 || GET_CODE (src_0) == LABEL_REF
1029 || (GET_CODE (src_0) == ADDRESS
1030 && GET_MODE (src_0) != VOIDmode)))
1031 return src_0;
1033 if (src_1 != 0
1034 && (GET_CODE (src_1) == SYMBOL_REF
1035 || GET_CODE (src_1) == LABEL_REF
1036 || (GET_CODE (src_1) == ADDRESS
1037 && GET_MODE (src_1) != VOIDmode)))
1038 return src_1;
1040 /* Guess which operand is the base address:
1041 If either operand is a symbol, then it is the base. If
1042 either operand is a CONST_INT, then the other is the base. */
1043 if (CONST_INT_P (src_1) || CONSTANT_P (src_0))
1044 return find_base_value (src_0);
1045 else if (CONST_INT_P (src_0) || CONSTANT_P (src_1))
1046 return find_base_value (src_1);
1048 return 0;
1051 case LO_SUM:
1052 /* The standard form is (lo_sum reg sym) so look only at the
1053 second operand. */
1054 return find_base_value (XEXP (src, 1));
1056 case AND:
1057 /* If the second operand is constant set the base
1058 address to the first operand. */
1059 if (CONST_INT_P (XEXP (src, 1)) && INTVAL (XEXP (src, 1)) != 0)
1060 return find_base_value (XEXP (src, 0));
1061 return 0;
1063 case TRUNCATE:
1064 if (GET_MODE_SIZE (GET_MODE (src)) < GET_MODE_SIZE (Pmode))
1065 break;
1066 /* Fall through. */
1067 case HIGH:
1068 case PRE_INC:
1069 case PRE_DEC:
1070 case POST_INC:
1071 case POST_DEC:
1072 case PRE_MODIFY:
1073 case POST_MODIFY:
1074 return find_base_value (XEXP (src, 0));
1076 case ZERO_EXTEND:
1077 case SIGN_EXTEND: /* used for NT/Alpha pointers */
1079 rtx temp = find_base_value (XEXP (src, 0));
1081 if (temp != 0 && CONSTANT_P (temp))
1082 temp = convert_memory_address (Pmode, temp);
1084 return temp;
1087 default:
1088 break;
1091 return 0;
1094 /* Called from init_alias_analysis indirectly through note_stores. */
1096 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1097 register N has been set in this function. */
1098 static char *reg_seen;
1100 /* Addresses which are known not to alias anything else are identified
1101 by a unique integer. */
1102 static int unique_id;
1104 static void
1105 record_set (rtx dest, const_rtx set, void *data ATTRIBUTE_UNUSED)
1107 unsigned regno;
1108 rtx src;
1109 int n;
1111 if (!REG_P (dest))
1112 return;
1114 regno = REGNO (dest);
1116 gcc_assert (regno < VEC_length (rtx, reg_base_value));
1118 /* If this spans multiple hard registers, then we must indicate that every
1119 register has an unusable value. */
1120 if (regno < FIRST_PSEUDO_REGISTER)
1121 n = hard_regno_nregs[regno][GET_MODE (dest)];
1122 else
1123 n = 1;
1124 if (n != 1)
1126 while (--n >= 0)
1128 reg_seen[regno + n] = 1;
1129 new_reg_base_value[regno + n] = 0;
1131 return;
1134 if (set)
1136 /* A CLOBBER wipes out any old value but does not prevent a previously
1137 unset register from acquiring a base address (i.e. reg_seen is not
1138 set). */
1139 if (GET_CODE (set) == CLOBBER)
1141 new_reg_base_value[regno] = 0;
1142 return;
1144 src = SET_SRC (set);
1146 else
1148 if (reg_seen[regno])
1150 new_reg_base_value[regno] = 0;
1151 return;
1153 reg_seen[regno] = 1;
1154 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
1155 GEN_INT (unique_id++));
1156 return;
1159 /* If this is not the first set of REGNO, see whether the new value
1160 is related to the old one. There are two cases of interest:
1162 (1) The register might be assigned an entirely new value
1163 that has the same base term as the original set.
1165 (2) The set might be a simple self-modification that
1166 cannot change REGNO's base value.
1168 If neither case holds, reject the original base value as invalid.
1169 Note that the following situation is not detected:
1171 extern int x, y; int *p = &x; p += (&y-&x);
1173 ANSI C does not allow computing the difference of addresses
1174 of distinct top level objects. */
1175 if (new_reg_base_value[regno] != 0
1176 && find_base_value (src) != new_reg_base_value[regno])
1177 switch (GET_CODE (src))
1179 case LO_SUM:
1180 case MINUS:
1181 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
1182 new_reg_base_value[regno] = 0;
1183 break;
1184 case PLUS:
1185 /* If the value we add in the PLUS is also a valid base value,
1186 this might be the actual base value, and the original value
1187 an index. */
1189 rtx other = NULL_RTX;
1191 if (XEXP (src, 0) == dest)
1192 other = XEXP (src, 1);
1193 else if (XEXP (src, 1) == dest)
1194 other = XEXP (src, 0);
1196 if (! other || find_base_value (other))
1197 new_reg_base_value[regno] = 0;
1198 break;
1200 case AND:
1201 if (XEXP (src, 0) != dest || !CONST_INT_P (XEXP (src, 1)))
1202 new_reg_base_value[regno] = 0;
1203 break;
1204 default:
1205 new_reg_base_value[regno] = 0;
1206 break;
1208 /* If this is the first set of a register, record the value. */
1209 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
1210 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
1211 new_reg_base_value[regno] = find_base_value (src);
1213 reg_seen[regno] = 1;
1216 /* If a value is known for REGNO, return it. */
1219 get_reg_known_value (unsigned int regno)
1221 if (regno >= FIRST_PSEUDO_REGISTER)
1223 regno -= FIRST_PSEUDO_REGISTER;
1224 if (regno < reg_known_value_size)
1225 return reg_known_value[regno];
1227 return NULL;
1230 /* Set it. */
1232 static void
1233 set_reg_known_value (unsigned int regno, rtx val)
1235 if (regno >= FIRST_PSEUDO_REGISTER)
1237 regno -= FIRST_PSEUDO_REGISTER;
1238 if (regno < reg_known_value_size)
1239 reg_known_value[regno] = val;
1243 /* Similarly for reg_known_equiv_p. */
1245 bool
1246 get_reg_known_equiv_p (unsigned int regno)
1248 if (regno >= FIRST_PSEUDO_REGISTER)
1250 regno -= FIRST_PSEUDO_REGISTER;
1251 if (regno < reg_known_value_size)
1252 return reg_known_equiv_p[regno];
1254 return false;
1257 static void
1258 set_reg_known_equiv_p (unsigned int regno, bool val)
1260 if (regno >= FIRST_PSEUDO_REGISTER)
1262 regno -= FIRST_PSEUDO_REGISTER;
1263 if (regno < reg_known_value_size)
1264 reg_known_equiv_p[regno] = val;
1269 /* Returns a canonical version of X, from the point of view alias
1270 analysis. (For example, if X is a MEM whose address is a register,
1271 and the register has a known value (say a SYMBOL_REF), then a MEM
1272 whose address is the SYMBOL_REF is returned.) */
1275 canon_rtx (rtx x)
1277 /* Recursively look for equivalences. */
1278 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER)
1280 rtx t = get_reg_known_value (REGNO (x));
1281 if (t == x)
1282 return x;
1283 if (t)
1284 return canon_rtx (t);
1287 if (GET_CODE (x) == PLUS)
1289 rtx x0 = canon_rtx (XEXP (x, 0));
1290 rtx x1 = canon_rtx (XEXP (x, 1));
1292 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
1294 if (CONST_INT_P (x0))
1295 return plus_constant (x1, INTVAL (x0));
1296 else if (CONST_INT_P (x1))
1297 return plus_constant (x0, INTVAL (x1));
1298 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
1302 /* This gives us much better alias analysis when called from
1303 the loop optimizer. Note we want to leave the original
1304 MEM alone, but need to return the canonicalized MEM with
1305 all the flags with their original values. */
1306 else if (MEM_P (x))
1307 x = replace_equiv_address_nv (x, canon_rtx (XEXP (x, 0)));
1309 return x;
1312 /* Return 1 if X and Y are identical-looking rtx's.
1313 Expect that X and Y has been already canonicalized.
1315 We use the data in reg_known_value above to see if two registers with
1316 different numbers are, in fact, equivalent. */
1318 static int
1319 rtx_equal_for_memref_p (const_rtx x, const_rtx y)
1321 int i;
1322 int j;
1323 enum rtx_code code;
1324 const char *fmt;
1326 if (x == 0 && y == 0)
1327 return 1;
1328 if (x == 0 || y == 0)
1329 return 0;
1331 if (x == y)
1332 return 1;
1334 code = GET_CODE (x);
1335 /* Rtx's of different codes cannot be equal. */
1336 if (code != GET_CODE (y))
1337 return 0;
1339 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1340 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1342 if (GET_MODE (x) != GET_MODE (y))
1343 return 0;
1345 /* Some RTL can be compared without a recursive examination. */
1346 switch (code)
1348 case REG:
1349 return REGNO (x) == REGNO (y);
1351 case LABEL_REF:
1352 return XEXP (x, 0) == XEXP (y, 0);
1354 case SYMBOL_REF:
1355 return XSTR (x, 0) == XSTR (y, 0);
1357 case VALUE:
1358 case CONST_INT:
1359 case CONST_DOUBLE:
1360 case CONST_FIXED:
1361 /* There's no need to compare the contents of CONST_DOUBLEs or
1362 CONST_INTs because pointer equality is a good enough
1363 comparison for these nodes. */
1364 return 0;
1366 default:
1367 break;
1370 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1371 if (code == PLUS)
1372 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
1373 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
1374 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
1375 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
1376 /* For commutative operations, the RTX match if the operand match in any
1377 order. Also handle the simple binary and unary cases without a loop. */
1378 if (COMMUTATIVE_P (x))
1380 rtx xop0 = canon_rtx (XEXP (x, 0));
1381 rtx yop0 = canon_rtx (XEXP (y, 0));
1382 rtx yop1 = canon_rtx (XEXP (y, 1));
1384 return ((rtx_equal_for_memref_p (xop0, yop0)
1385 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop1))
1386 || (rtx_equal_for_memref_p (xop0, yop1)
1387 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)), yop0)));
1389 else if (NON_COMMUTATIVE_P (x))
1391 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1392 canon_rtx (XEXP (y, 0)))
1393 && rtx_equal_for_memref_p (canon_rtx (XEXP (x, 1)),
1394 canon_rtx (XEXP (y, 1))));
1396 else if (UNARY_P (x))
1397 return rtx_equal_for_memref_p (canon_rtx (XEXP (x, 0)),
1398 canon_rtx (XEXP (y, 0)));
1400 /* Compare the elements. If any pair of corresponding elements
1401 fail to match, return 0 for the whole things.
1403 Limit cases to types which actually appear in addresses. */
1405 fmt = GET_RTX_FORMAT (code);
1406 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1408 switch (fmt[i])
1410 case 'i':
1411 if (XINT (x, i) != XINT (y, i))
1412 return 0;
1413 break;
1415 case 'E':
1416 /* Two vectors must have the same length. */
1417 if (XVECLEN (x, i) != XVECLEN (y, i))
1418 return 0;
1420 /* And the corresponding elements must match. */
1421 for (j = 0; j < XVECLEN (x, i); j++)
1422 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x, i, j)),
1423 canon_rtx (XVECEXP (y, i, j))) == 0)
1424 return 0;
1425 break;
1427 case 'e':
1428 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x, i)),
1429 canon_rtx (XEXP (y, i))) == 0)
1430 return 0;
1431 break;
1433 /* This can happen for asm operands. */
1434 case 's':
1435 if (strcmp (XSTR (x, i), XSTR (y, i)))
1436 return 0;
1437 break;
1439 /* This can happen for an asm which clobbers memory. */
1440 case '0':
1441 break;
1443 /* It is believed that rtx's at this level will never
1444 contain anything but integers and other rtx's,
1445 except for within LABEL_REFs and SYMBOL_REFs. */
1446 default:
1447 gcc_unreachable ();
1450 return 1;
1454 find_base_term (rtx x)
1456 cselib_val *val;
1457 struct elt_loc_list *l;
1459 #if defined (FIND_BASE_TERM)
1460 /* Try machine-dependent ways to find the base term. */
1461 x = FIND_BASE_TERM (x);
1462 #endif
1464 switch (GET_CODE (x))
1466 case REG:
1467 return REG_BASE_VALUE (x);
1469 case TRUNCATE:
1470 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (Pmode))
1471 return 0;
1472 /* Fall through. */
1473 case HIGH:
1474 case PRE_INC:
1475 case PRE_DEC:
1476 case POST_INC:
1477 case POST_DEC:
1478 case PRE_MODIFY:
1479 case POST_MODIFY:
1480 return find_base_term (XEXP (x, 0));
1482 case ZERO_EXTEND:
1483 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
1485 rtx temp = find_base_term (XEXP (x, 0));
1487 if (temp != 0 && CONSTANT_P (temp))
1488 temp = convert_memory_address (Pmode, temp);
1490 return temp;
1493 case VALUE:
1494 val = CSELIB_VAL_PTR (x);
1495 if (!val)
1496 return 0;
1497 for (l = val->locs; l; l = l->next)
1498 if ((x = find_base_term (l->loc)) != 0)
1499 return x;
1500 return 0;
1502 case LO_SUM:
1503 /* The standard form is (lo_sum reg sym) so look only at the
1504 second operand. */
1505 return find_base_term (XEXP (x, 1));
1507 case CONST:
1508 x = XEXP (x, 0);
1509 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
1510 return 0;
1511 /* Fall through. */
1512 case PLUS:
1513 case MINUS:
1515 rtx tmp1 = XEXP (x, 0);
1516 rtx tmp2 = XEXP (x, 1);
1518 /* This is a little bit tricky since we have to determine which of
1519 the two operands represents the real base address. Otherwise this
1520 routine may return the index register instead of the base register.
1522 That may cause us to believe no aliasing was possible, when in
1523 fact aliasing is possible.
1525 We use a few simple tests to guess the base register. Additional
1526 tests can certainly be added. For example, if one of the operands
1527 is a shift or multiply, then it must be the index register and the
1528 other operand is the base register. */
1530 if (tmp1 == pic_offset_table_rtx && CONSTANT_P (tmp2))
1531 return find_base_term (tmp2);
1533 /* If either operand is known to be a pointer, then use it
1534 to determine the base term. */
1535 if (REG_P (tmp1) && REG_POINTER (tmp1))
1537 rtx base = find_base_term (tmp1);
1538 if (base)
1539 return base;
1542 if (REG_P (tmp2) && REG_POINTER (tmp2))
1544 rtx base = find_base_term (tmp2);
1545 if (base)
1546 return base;
1549 /* Neither operand was known to be a pointer. Go ahead and find the
1550 base term for both operands. */
1551 tmp1 = find_base_term (tmp1);
1552 tmp2 = find_base_term (tmp2);
1554 /* If either base term is named object or a special address
1555 (like an argument or stack reference), then use it for the
1556 base term. */
1557 if (tmp1 != 0
1558 && (GET_CODE (tmp1) == SYMBOL_REF
1559 || GET_CODE (tmp1) == LABEL_REF
1560 || (GET_CODE (tmp1) == ADDRESS
1561 && GET_MODE (tmp1) != VOIDmode)))
1562 return tmp1;
1564 if (tmp2 != 0
1565 && (GET_CODE (tmp2) == SYMBOL_REF
1566 || GET_CODE (tmp2) == LABEL_REF
1567 || (GET_CODE (tmp2) == ADDRESS
1568 && GET_MODE (tmp2) != VOIDmode)))
1569 return tmp2;
1571 /* We could not determine which of the two operands was the
1572 base register and which was the index. So we can determine
1573 nothing from the base alias check. */
1574 return 0;
1577 case AND:
1578 if (CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) != 0)
1579 return find_base_term (XEXP (x, 0));
1580 return 0;
1582 case SYMBOL_REF:
1583 case LABEL_REF:
1584 return x;
1586 default:
1587 return 0;
1591 /* Return 0 if the addresses X and Y are known to point to different
1592 objects, 1 if they might be pointers to the same object. */
1594 static int
1595 base_alias_check (rtx x, rtx y, enum machine_mode x_mode,
1596 enum machine_mode y_mode)
1598 rtx x_base = find_base_term (x);
1599 rtx y_base = find_base_term (y);
1601 /* If the address itself has no known base see if a known equivalent
1602 value has one. If either address still has no known base, nothing
1603 is known about aliasing. */
1604 if (x_base == 0)
1606 rtx x_c;
1608 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
1609 return 1;
1611 x_base = find_base_term (x_c);
1612 if (x_base == 0)
1613 return 1;
1616 if (y_base == 0)
1618 rtx y_c;
1619 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
1620 return 1;
1622 y_base = find_base_term (y_c);
1623 if (y_base == 0)
1624 return 1;
1627 /* If the base addresses are equal nothing is known about aliasing. */
1628 if (rtx_equal_p (x_base, y_base))
1629 return 1;
1631 /* The base addresses are different expressions. If they are not accessed
1632 via AND, there is no conflict. We can bring knowledge of object
1633 alignment into play here. For example, on alpha, "char a, b;" can
1634 alias one another, though "char a; long b;" cannot. AND addesses may
1635 implicitly alias surrounding objects; i.e. unaligned access in DImode
1636 via AND address can alias all surrounding object types except those
1637 with aligment 8 or higher. */
1638 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
1639 return 1;
1640 if (GET_CODE (x) == AND
1641 && (!CONST_INT_P (XEXP (x, 1))
1642 || (int) GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
1643 return 1;
1644 if (GET_CODE (y) == AND
1645 && (!CONST_INT_P (XEXP (y, 1))
1646 || (int) GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
1647 return 1;
1649 /* Differing symbols not accessed via AND never alias. */
1650 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
1651 return 0;
1653 /* If one address is a stack reference there can be no alias:
1654 stack references using different base registers do not alias,
1655 a stack reference can not alias a parameter, and a stack reference
1656 can not alias a global. */
1657 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
1658 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
1659 return 0;
1661 if (! flag_argument_noalias)
1662 return 1;
1664 if (flag_argument_noalias > 1)
1665 return 0;
1667 /* Weak noalias assertion (arguments are distinct, but may match globals). */
1668 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
1671 /* Convert the address X into something we can use. This is done by returning
1672 it unchanged unless it is a value; in the latter case we call cselib to get
1673 a more useful rtx. */
1676 get_addr (rtx x)
1678 cselib_val *v;
1679 struct elt_loc_list *l;
1681 if (GET_CODE (x) != VALUE)
1682 return x;
1683 v = CSELIB_VAL_PTR (x);
1684 if (v)
1686 for (l = v->locs; l; l = l->next)
1687 if (CONSTANT_P (l->loc))
1688 return l->loc;
1689 for (l = v->locs; l; l = l->next)
1690 if (!REG_P (l->loc) && !MEM_P (l->loc))
1691 return l->loc;
1692 if (v->locs)
1693 return v->locs->loc;
1695 return x;
1698 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1699 where SIZE is the size in bytes of the memory reference. If ADDR
1700 is not modified by the memory reference then ADDR is returned. */
1702 static rtx
1703 addr_side_effect_eval (rtx addr, int size, int n_refs)
1705 int offset = 0;
1707 switch (GET_CODE (addr))
1709 case PRE_INC:
1710 offset = (n_refs + 1) * size;
1711 break;
1712 case PRE_DEC:
1713 offset = -(n_refs + 1) * size;
1714 break;
1715 case POST_INC:
1716 offset = n_refs * size;
1717 break;
1718 case POST_DEC:
1719 offset = -n_refs * size;
1720 break;
1722 default:
1723 return addr;
1726 if (offset)
1727 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0),
1728 GEN_INT (offset));
1729 else
1730 addr = XEXP (addr, 0);
1731 addr = canon_rtx (addr);
1733 return addr;
1736 /* Return nonzero if X and Y (memory addresses) could reference the
1737 same location in memory. C is an offset accumulator. When
1738 C is nonzero, we are testing aliases between X and Y + C.
1739 XSIZE is the size in bytes of the X reference,
1740 similarly YSIZE is the size in bytes for Y.
1741 Expect that canon_rtx has been already called for X and Y.
1743 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1744 referenced (the reference was BLKmode), so make the most pessimistic
1745 assumptions.
1747 If XSIZE or YSIZE is negative, we may access memory outside the object
1748 being referenced as a side effect. This can happen when using AND to
1749 align memory references, as is done on the Alpha.
1751 Nice to notice that varying addresses cannot conflict with fp if no
1752 local variables had their addresses taken, but that's too hard now. */
1754 static int
1755 memrefs_conflict_p (int xsize, rtx x, int ysize, rtx y, HOST_WIDE_INT c)
1757 if (GET_CODE (x) == VALUE)
1758 x = get_addr (x);
1759 if (GET_CODE (y) == VALUE)
1760 y = get_addr (y);
1761 if (GET_CODE (x) == HIGH)
1762 x = XEXP (x, 0);
1763 else if (GET_CODE (x) == LO_SUM)
1764 x = XEXP (x, 1);
1765 else
1766 x = addr_side_effect_eval (x, xsize, 0);
1767 if (GET_CODE (y) == HIGH)
1768 y = XEXP (y, 0);
1769 else if (GET_CODE (y) == LO_SUM)
1770 y = XEXP (y, 1);
1771 else
1772 y = addr_side_effect_eval (y, ysize, 0);
1774 if (rtx_equal_for_memref_p (x, y))
1776 if (xsize <= 0 || ysize <= 0)
1777 return 1;
1778 if (c >= 0 && xsize > c)
1779 return 1;
1780 if (c < 0 && ysize+c > 0)
1781 return 1;
1782 return 0;
1785 /* This code used to check for conflicts involving stack references and
1786 globals but the base address alias code now handles these cases. */
1788 if (GET_CODE (x) == PLUS)
1790 /* The fact that X is canonicalized means that this
1791 PLUS rtx is canonicalized. */
1792 rtx x0 = XEXP (x, 0);
1793 rtx x1 = XEXP (x, 1);
1795 if (GET_CODE (y) == PLUS)
1797 /* The fact that Y is canonicalized means that this
1798 PLUS rtx is canonicalized. */
1799 rtx y0 = XEXP (y, 0);
1800 rtx y1 = XEXP (y, 1);
1802 if (rtx_equal_for_memref_p (x1, y1))
1803 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1804 if (rtx_equal_for_memref_p (x0, y0))
1805 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1806 if (CONST_INT_P (x1))
1808 if (CONST_INT_P (y1))
1809 return memrefs_conflict_p (xsize, x0, ysize, y0,
1810 c - INTVAL (x1) + INTVAL (y1));
1811 else
1812 return memrefs_conflict_p (xsize, x0, ysize, y,
1813 c - INTVAL (x1));
1815 else if (CONST_INT_P (y1))
1816 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1818 return 1;
1820 else if (CONST_INT_P (x1))
1821 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1823 else if (GET_CODE (y) == PLUS)
1825 /* The fact that Y is canonicalized means that this
1826 PLUS rtx is canonicalized. */
1827 rtx y0 = XEXP (y, 0);
1828 rtx y1 = XEXP (y, 1);
1830 if (CONST_INT_P (y1))
1831 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1832 else
1833 return 1;
1836 if (GET_CODE (x) == GET_CODE (y))
1837 switch (GET_CODE (x))
1839 case MULT:
1841 /* Handle cases where we expect the second operands to be the
1842 same, and check only whether the first operand would conflict
1843 or not. */
1844 rtx x0, y0;
1845 rtx x1 = canon_rtx (XEXP (x, 1));
1846 rtx y1 = canon_rtx (XEXP (y, 1));
1847 if (! rtx_equal_for_memref_p (x1, y1))
1848 return 1;
1849 x0 = canon_rtx (XEXP (x, 0));
1850 y0 = canon_rtx (XEXP (y, 0));
1851 if (rtx_equal_for_memref_p (x0, y0))
1852 return (xsize == 0 || ysize == 0
1853 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1855 /* Can't properly adjust our sizes. */
1856 if (!CONST_INT_P (x1))
1857 return 1;
1858 xsize /= INTVAL (x1);
1859 ysize /= INTVAL (x1);
1860 c /= INTVAL (x1);
1861 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1864 default:
1865 break;
1868 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1869 as an access with indeterminate size. Assume that references
1870 besides AND are aligned, so if the size of the other reference is
1871 at least as large as the alignment, assume no other overlap. */
1872 if (GET_CODE (x) == AND && CONST_INT_P (XEXP (x, 1)))
1874 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1875 xsize = -1;
1876 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)), ysize, y, c);
1878 if (GET_CODE (y) == AND && CONST_INT_P (XEXP (y, 1)))
1880 /* ??? If we are indexing far enough into the array/structure, we
1881 may yet be able to determine that we can not overlap. But we
1882 also need to that we are far enough from the end not to overlap
1883 a following reference, so we do nothing with that for now. */
1884 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1885 ysize = -1;
1886 return memrefs_conflict_p (xsize, x, ysize, canon_rtx (XEXP (y, 0)), c);
1889 if (CONSTANT_P (x))
1891 if (CONST_INT_P (x) && CONST_INT_P (y))
1893 c += (INTVAL (y) - INTVAL (x));
1894 return (xsize <= 0 || ysize <= 0
1895 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1898 if (GET_CODE (x) == CONST)
1900 if (GET_CODE (y) == CONST)
1901 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1902 ysize, canon_rtx (XEXP (y, 0)), c);
1903 else
1904 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1905 ysize, y, c);
1907 if (GET_CODE (y) == CONST)
1908 return memrefs_conflict_p (xsize, x, ysize,
1909 canon_rtx (XEXP (y, 0)), c);
1911 if (CONSTANT_P (y))
1912 return (xsize <= 0 || ysize <= 0
1913 || (rtx_equal_for_memref_p (x, y)
1914 && ((c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1916 return 1;
1918 return 1;
1921 /* Functions to compute memory dependencies.
1923 Since we process the insns in execution order, we can build tables
1924 to keep track of what registers are fixed (and not aliased), what registers
1925 are varying in known ways, and what registers are varying in unknown
1926 ways.
1928 If both memory references are volatile, then there must always be a
1929 dependence between the two references, since their order can not be
1930 changed. A volatile and non-volatile reference can be interchanged
1931 though.
1933 A MEM_IN_STRUCT reference at a non-AND varying address can never
1934 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
1935 also must allow AND addresses, because they may generate accesses
1936 outside the object being referenced. This is used to generate
1937 aligned addresses from unaligned addresses, for instance, the alpha
1938 storeqi_unaligned pattern. */
1940 /* Read dependence: X is read after read in MEM takes place. There can
1941 only be a dependence here if both reads are volatile. */
1944 read_dependence (const_rtx mem, const_rtx x)
1946 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1949 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1950 MEM2 is a reference to a structure at a varying address, or returns
1951 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1952 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1953 to decide whether or not an address may vary; it should return
1954 nonzero whenever variation is possible.
1955 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1957 static const_rtx
1958 fixed_scalar_and_varying_struct_p (const_rtx mem1, const_rtx mem2, rtx mem1_addr,
1959 rtx mem2_addr,
1960 bool (*varies_p) (const_rtx, bool))
1962 if (! flag_strict_aliasing)
1963 return NULL_RTX;
1965 if (MEM_ALIAS_SET (mem2)
1966 && MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1967 && !varies_p (mem1_addr, 1) && varies_p (mem2_addr, 1))
1968 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1969 varying address. */
1970 return mem1;
1972 if (MEM_ALIAS_SET (mem1)
1973 && MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1974 && varies_p (mem1_addr, 1) && !varies_p (mem2_addr, 1))
1975 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1976 varying address. */
1977 return mem2;
1979 return NULL_RTX;
1982 /* Returns nonzero if something about the mode or address format MEM1
1983 indicates that it might well alias *anything*. */
1985 static int
1986 aliases_everything_p (const_rtx mem)
1988 if (GET_CODE (XEXP (mem, 0)) == AND)
1989 /* If the address is an AND, it's very hard to know at what it is
1990 actually pointing. */
1991 return 1;
1993 return 0;
1996 /* Return true if we can determine that the fields referenced cannot
1997 overlap for any pair of objects. */
1999 static bool
2000 nonoverlapping_component_refs_p (const_tree x, const_tree y)
2002 const_tree fieldx, fieldy, typex, typey, orig_y;
2004 if (!flag_strict_aliasing)
2005 return false;
2009 /* The comparison has to be done at a common type, since we don't
2010 know how the inheritance hierarchy works. */
2011 orig_y = y;
2014 fieldx = TREE_OPERAND (x, 1);
2015 typex = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx));
2017 y = orig_y;
2020 fieldy = TREE_OPERAND (y, 1);
2021 typey = TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy));
2023 if (typex == typey)
2024 goto found;
2026 y = TREE_OPERAND (y, 0);
2028 while (y && TREE_CODE (y) == COMPONENT_REF);
2030 x = TREE_OPERAND (x, 0);
2032 while (x && TREE_CODE (x) == COMPONENT_REF);
2033 /* Never found a common type. */
2034 return false;
2036 found:
2037 /* If we're left with accessing different fields of a structure,
2038 then no overlap. */
2039 if (TREE_CODE (typex) == RECORD_TYPE
2040 && fieldx != fieldy)
2041 return true;
2043 /* The comparison on the current field failed. If we're accessing
2044 a very nested structure, look at the next outer level. */
2045 x = TREE_OPERAND (x, 0);
2046 y = TREE_OPERAND (y, 0);
2048 while (x && y
2049 && TREE_CODE (x) == COMPONENT_REF
2050 && TREE_CODE (y) == COMPONENT_REF);
2052 return false;
2055 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2057 static tree
2058 decl_for_component_ref (tree x)
2062 x = TREE_OPERAND (x, 0);
2064 while (x && TREE_CODE (x) == COMPONENT_REF);
2066 return x && DECL_P (x) ? x : NULL_TREE;
2069 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2070 offset of the field reference. */
2072 static rtx
2073 adjust_offset_for_component_ref (tree x, rtx offset)
2075 HOST_WIDE_INT ioffset;
2077 if (! offset)
2078 return NULL_RTX;
2080 ioffset = INTVAL (offset);
2083 tree offset = component_ref_field_offset (x);
2084 tree field = TREE_OPERAND (x, 1);
2086 if (! host_integerp (offset, 1))
2087 return NULL_RTX;
2088 ioffset += (tree_low_cst (offset, 1)
2089 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2090 / BITS_PER_UNIT));
2092 x = TREE_OPERAND (x, 0);
2094 while (x && TREE_CODE (x) == COMPONENT_REF);
2096 return GEN_INT (ioffset);
2099 /* Return nonzero if we can determine the exprs corresponding to memrefs
2100 X and Y and they do not overlap. */
2103 nonoverlapping_memrefs_p (const_rtx x, const_rtx y)
2105 tree exprx = MEM_EXPR (x), expry = MEM_EXPR (y);
2106 rtx rtlx, rtly;
2107 rtx basex, basey;
2108 rtx moffsetx, moffsety;
2109 HOST_WIDE_INT offsetx = 0, offsety = 0, sizex, sizey, tem;
2111 /* Unless both have exprs, we can't tell anything. */
2112 if (exprx == 0 || expry == 0)
2113 return 0;
2115 /* If both are field references, we may be able to determine something. */
2116 if (TREE_CODE (exprx) == COMPONENT_REF
2117 && TREE_CODE (expry) == COMPONENT_REF
2118 && nonoverlapping_component_refs_p (exprx, expry))
2119 return 1;
2122 /* If the field reference test failed, look at the DECLs involved. */
2123 moffsetx = MEM_OFFSET (x);
2124 if (TREE_CODE (exprx) == COMPONENT_REF)
2126 if (TREE_CODE (expry) == VAR_DECL
2127 && POINTER_TYPE_P (TREE_TYPE (expry)))
2129 tree field = TREE_OPERAND (exprx, 1);
2130 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2131 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2132 TREE_TYPE (field)))
2133 return 1;
2136 tree t = decl_for_component_ref (exprx);
2137 if (! t)
2138 return 0;
2139 moffsetx = adjust_offset_for_component_ref (exprx, moffsetx);
2140 exprx = t;
2143 else if (INDIRECT_REF_P (exprx))
2145 exprx = TREE_OPERAND (exprx, 0);
2146 if (flag_argument_noalias < 2
2147 || TREE_CODE (exprx) != PARM_DECL)
2148 return 0;
2151 moffsety = MEM_OFFSET (y);
2152 if (TREE_CODE (expry) == COMPONENT_REF)
2154 if (TREE_CODE (exprx) == VAR_DECL
2155 && POINTER_TYPE_P (TREE_TYPE (exprx)))
2157 tree field = TREE_OPERAND (expry, 1);
2158 tree fieldcontext = DECL_FIELD_CONTEXT (field);
2159 if (ipa_type_escape_field_does_not_clobber_p (fieldcontext,
2160 TREE_TYPE (field)))
2161 return 1;
2164 tree t = decl_for_component_ref (expry);
2165 if (! t)
2166 return 0;
2167 moffsety = adjust_offset_for_component_ref (expry, moffsety);
2168 expry = t;
2171 else if (INDIRECT_REF_P (expry))
2173 expry = TREE_OPERAND (expry, 0);
2174 if (flag_argument_noalias < 2
2175 || TREE_CODE (expry) != PARM_DECL)
2176 return 0;
2179 if (! DECL_P (exprx) || ! DECL_P (expry))
2180 return 0;
2182 rtlx = DECL_RTL (exprx);
2183 rtly = DECL_RTL (expry);
2185 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2186 can't overlap unless they are the same because we never reuse that part
2187 of the stack frame used for locals for spilled pseudos. */
2188 if ((!MEM_P (rtlx) || !MEM_P (rtly))
2189 && ! rtx_equal_p (rtlx, rtly))
2190 return 1;
2192 /* Get the base and offsets of both decls. If either is a register, we
2193 know both are and are the same, so use that as the base. The only
2194 we can avoid overlap is if we can deduce that they are nonoverlapping
2195 pieces of that decl, which is very rare. */
2196 basex = MEM_P (rtlx) ? XEXP (rtlx, 0) : rtlx;
2197 if (GET_CODE (basex) == PLUS && CONST_INT_P (XEXP (basex, 1)))
2198 offsetx = INTVAL (XEXP (basex, 1)), basex = XEXP (basex, 0);
2200 basey = MEM_P (rtly) ? XEXP (rtly, 0) : rtly;
2201 if (GET_CODE (basey) == PLUS && CONST_INT_P (XEXP (basey, 1)))
2202 offsety = INTVAL (XEXP (basey, 1)), basey = XEXP (basey, 0);
2204 /* If the bases are different, we know they do not overlap if both
2205 are constants or if one is a constant and the other a pointer into the
2206 stack frame. Otherwise a different base means we can't tell if they
2207 overlap or not. */
2208 if (! rtx_equal_p (basex, basey))
2209 return ((CONSTANT_P (basex) && CONSTANT_P (basey))
2210 || (CONSTANT_P (basex) && REG_P (basey)
2211 && REGNO_PTR_FRAME_P (REGNO (basey)))
2212 || (CONSTANT_P (basey) && REG_P (basex)
2213 && REGNO_PTR_FRAME_P (REGNO (basex))));
2215 sizex = (!MEM_P (rtlx) ? (int) GET_MODE_SIZE (GET_MODE (rtlx))
2216 : MEM_SIZE (rtlx) ? INTVAL (MEM_SIZE (rtlx))
2217 : -1);
2218 sizey = (!MEM_P (rtly) ? (int) GET_MODE_SIZE (GET_MODE (rtly))
2219 : MEM_SIZE (rtly) ? INTVAL (MEM_SIZE (rtly)) :
2220 -1);
2222 /* If we have an offset for either memref, it can update the values computed
2223 above. */
2224 if (moffsetx)
2225 offsetx += INTVAL (moffsetx), sizex -= INTVAL (moffsetx);
2226 if (moffsety)
2227 offsety += INTVAL (moffsety), sizey -= INTVAL (moffsety);
2229 /* If a memref has both a size and an offset, we can use the smaller size.
2230 We can't do this if the offset isn't known because we must view this
2231 memref as being anywhere inside the DECL's MEM. */
2232 if (MEM_SIZE (x) && moffsetx)
2233 sizex = INTVAL (MEM_SIZE (x));
2234 if (MEM_SIZE (y) && moffsety)
2235 sizey = INTVAL (MEM_SIZE (y));
2237 /* Put the values of the memref with the lower offset in X's values. */
2238 if (offsetx > offsety)
2240 tem = offsetx, offsetx = offsety, offsety = tem;
2241 tem = sizex, sizex = sizey, sizey = tem;
2244 /* If we don't know the size of the lower-offset value, we can't tell
2245 if they conflict. Otherwise, we do the test. */
2246 return sizex >= 0 && offsety >= offsetx + sizex;
2249 /* True dependence: X is read after store in MEM takes place. */
2252 true_dependence (const_rtx mem, enum machine_mode mem_mode, const_rtx x,
2253 bool (*varies) (const_rtx, bool))
2255 rtx x_addr, mem_addr;
2256 rtx base;
2258 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2259 return 1;
2261 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2262 This is used in epilogue deallocation functions, and in cselib. */
2263 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2264 return 1;
2265 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2266 return 1;
2267 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2268 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2269 return 1;
2271 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2272 return 0;
2274 /* Read-only memory is by definition never modified, and therefore can't
2275 conflict with anything. We don't expect to find read-only set on MEM,
2276 but stupid user tricks can produce them, so don't die. */
2277 if (MEM_READONLY_P (x))
2278 return 0;
2280 if (nonoverlapping_memrefs_p (mem, x))
2281 return 0;
2283 if (mem_mode == VOIDmode)
2284 mem_mode = GET_MODE (mem);
2286 x_addr = get_addr (XEXP (x, 0));
2287 mem_addr = get_addr (XEXP (mem, 0));
2289 base = find_base_term (x_addr);
2290 if (base && (GET_CODE (base) == LABEL_REF
2291 || (GET_CODE (base) == SYMBOL_REF
2292 && CONSTANT_POOL_ADDRESS_P (base))))
2293 return 0;
2295 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2296 return 0;
2298 x_addr = canon_rtx (x_addr);
2299 mem_addr = canon_rtx (mem_addr);
2301 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2302 SIZE_FOR_MODE (x), x_addr, 0))
2303 return 0;
2305 if (aliases_everything_p (x))
2306 return 1;
2308 /* We cannot use aliases_everything_p to test MEM, since we must look
2309 at MEM_MODE, rather than GET_MODE (MEM). */
2310 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2311 return 1;
2313 /* In true_dependence we also allow BLKmode to alias anything. Why
2314 don't we do this in anti_dependence and output_dependence? */
2315 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2316 return 1;
2318 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2319 return 0;
2321 return rtx_refs_may_alias_p (x, mem, true);
2324 /* Canonical true dependence: X is read after store in MEM takes place.
2325 Variant of true_dependence which assumes MEM has already been
2326 canonicalized (hence we no longer do that here).
2327 The mem_addr argument has been added, since true_dependence computed
2328 this value prior to canonicalizing.
2329 If x_addr is non-NULL, it is used in preference of XEXP (x, 0). */
2332 canon_true_dependence (const_rtx mem, enum machine_mode mem_mode, rtx mem_addr,
2333 const_rtx x, rtx x_addr, bool (*varies) (const_rtx, bool))
2335 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2336 return 1;
2338 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2339 This is used in epilogue deallocation functions. */
2340 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2341 return 1;
2342 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2343 return 1;
2344 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2345 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2346 return 1;
2348 if (DIFFERENT_ALIAS_SETS_P (x, mem))
2349 return 0;
2351 /* Read-only memory is by definition never modified, and therefore can't
2352 conflict with anything. We don't expect to find read-only set on MEM,
2353 but stupid user tricks can produce them, so don't die. */
2354 if (MEM_READONLY_P (x))
2355 return 0;
2357 if (nonoverlapping_memrefs_p (x, mem))
2358 return 0;
2360 if (! x_addr)
2361 x_addr = get_addr (XEXP (x, 0));
2363 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
2364 return 0;
2366 x_addr = canon_rtx (x_addr);
2367 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
2368 SIZE_FOR_MODE (x), x_addr, 0))
2369 return 0;
2371 if (aliases_everything_p (x))
2372 return 1;
2374 /* We cannot use aliases_everything_p to test MEM, since we must look
2375 at MEM_MODE, rather than GET_MODE (MEM). */
2376 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
2377 return 1;
2379 /* In true_dependence we also allow BLKmode to alias anything. Why
2380 don't we do this in anti_dependence and output_dependence? */
2381 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
2382 return 1;
2384 if (fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr, varies))
2385 return 0;
2387 return rtx_refs_may_alias_p (x, mem, true);
2390 /* Returns nonzero if a write to X might alias a previous read from
2391 (or, if WRITEP is nonzero, a write to) MEM. */
2393 static int
2394 write_dependence_p (const_rtx mem, const_rtx x, int writep)
2396 rtx x_addr, mem_addr;
2397 const_rtx fixed_scalar;
2398 rtx base;
2400 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
2401 return 1;
2403 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2404 This is used in epilogue deallocation functions. */
2405 if (GET_MODE (x) == BLKmode && GET_CODE (XEXP (x, 0)) == SCRATCH)
2406 return 1;
2407 if (GET_MODE (mem) == BLKmode && GET_CODE (XEXP (mem, 0)) == SCRATCH)
2408 return 1;
2409 if (MEM_ALIAS_SET (x) == ALIAS_SET_MEMORY_BARRIER
2410 || MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2411 return 1;
2413 /* A read from read-only memory can't conflict with read-write memory. */
2414 if (!writep && MEM_READONLY_P (mem))
2415 return 0;
2417 if (nonoverlapping_memrefs_p (x, mem))
2418 return 0;
2420 x_addr = get_addr (XEXP (x, 0));
2421 mem_addr = get_addr (XEXP (mem, 0));
2423 if (! writep)
2425 base = find_base_term (mem_addr);
2426 if (base && (GET_CODE (base) == LABEL_REF
2427 || (GET_CODE (base) == SYMBOL_REF
2428 && CONSTANT_POOL_ADDRESS_P (base))))
2429 return 0;
2432 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
2433 GET_MODE (mem)))
2434 return 0;
2436 x_addr = canon_rtx (x_addr);
2437 mem_addr = canon_rtx (mem_addr);
2439 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
2440 SIZE_FOR_MODE (x), x_addr, 0))
2441 return 0;
2443 fixed_scalar
2444 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
2445 rtx_addr_varies_p);
2447 if ((fixed_scalar == mem && !aliases_everything_p (x))
2448 || (fixed_scalar == x && !aliases_everything_p (mem)))
2449 return 0;
2451 return rtx_refs_may_alias_p (x, mem, false);
2454 /* Anti dependence: X is written after read in MEM takes place. */
2457 anti_dependence (const_rtx mem, const_rtx x)
2459 return write_dependence_p (mem, x, /*writep=*/0);
2462 /* Output dependence: X is written after store in MEM takes place. */
2465 output_dependence (const_rtx mem, const_rtx x)
2467 return write_dependence_p (mem, x, /*writep=*/1);
2471 void
2472 init_alias_target (void)
2474 int i;
2476 memset (static_reg_base_value, 0, sizeof static_reg_base_value);
2478 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2479 /* Check whether this register can hold an incoming pointer
2480 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2481 numbers, so translate if necessary due to register windows. */
2482 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
2483 && HARD_REGNO_MODE_OK (i, Pmode))
2484 static_reg_base_value[i]
2485 = gen_rtx_ADDRESS (VOIDmode, gen_rtx_REG (Pmode, i));
2487 static_reg_base_value[STACK_POINTER_REGNUM]
2488 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
2489 static_reg_base_value[ARG_POINTER_REGNUM]
2490 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
2491 static_reg_base_value[FRAME_POINTER_REGNUM]
2492 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
2493 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2494 static_reg_base_value[HARD_FRAME_POINTER_REGNUM]
2495 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
2496 #endif
2499 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2500 to be memory reference. */
2501 static bool memory_modified;
2502 static void
2503 memory_modified_1 (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
2505 if (MEM_P (x))
2507 if (anti_dependence (x, (const_rtx)data) || output_dependence (x, (const_rtx)data))
2508 memory_modified = true;
2513 /* Return true when INSN possibly modify memory contents of MEM
2514 (i.e. address can be modified). */
2515 bool
2516 memory_modified_in_insn_p (const_rtx mem, const_rtx insn)
2518 if (!INSN_P (insn))
2519 return false;
2520 memory_modified = false;
2521 note_stores (PATTERN (insn), memory_modified_1, CONST_CAST_RTX(mem));
2522 return memory_modified;
2525 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2526 array. */
2528 void
2529 init_alias_analysis (void)
2531 unsigned int maxreg = max_reg_num ();
2532 int changed, pass;
2533 int i;
2534 unsigned int ui;
2535 rtx insn;
2537 timevar_push (TV_ALIAS_ANALYSIS);
2539 reg_known_value_size = maxreg - FIRST_PSEUDO_REGISTER;
2540 reg_known_value = GGC_CNEWVEC (rtx, reg_known_value_size);
2541 reg_known_equiv_p = XCNEWVEC (bool, reg_known_value_size);
2543 /* If we have memory allocated from the previous run, use it. */
2544 if (old_reg_base_value)
2545 reg_base_value = old_reg_base_value;
2547 if (reg_base_value)
2548 VEC_truncate (rtx, reg_base_value, 0);
2550 VEC_safe_grow_cleared (rtx, gc, reg_base_value, maxreg);
2552 new_reg_base_value = XNEWVEC (rtx, maxreg);
2553 reg_seen = XNEWVEC (char, maxreg);
2555 /* The basic idea is that each pass through this loop will use the
2556 "constant" information from the previous pass to propagate alias
2557 information through another level of assignments.
2559 This could get expensive if the assignment chains are long. Maybe
2560 we should throttle the number of iterations, possibly based on
2561 the optimization level or flag_expensive_optimizations.
2563 We could propagate more information in the first pass by making use
2564 of DF_REG_DEF_COUNT to determine immediately that the alias information
2565 for a pseudo is "constant".
2567 A program with an uninitialized variable can cause an infinite loop
2568 here. Instead of doing a full dataflow analysis to detect such problems
2569 we just cap the number of iterations for the loop.
2571 The state of the arrays for the set chain in question does not matter
2572 since the program has undefined behavior. */
2574 pass = 0;
2577 /* Assume nothing will change this iteration of the loop. */
2578 changed = 0;
2580 /* We want to assign the same IDs each iteration of this loop, so
2581 start counting from zero each iteration of the loop. */
2582 unique_id = 0;
2584 /* We're at the start of the function each iteration through the
2585 loop, so we're copying arguments. */
2586 copying_arguments = true;
2588 /* Wipe the potential alias information clean for this pass. */
2589 memset (new_reg_base_value, 0, maxreg * sizeof (rtx));
2591 /* Wipe the reg_seen array clean. */
2592 memset (reg_seen, 0, maxreg);
2594 /* Mark all hard registers which may contain an address.
2595 The stack, frame and argument pointers may contain an address.
2596 An argument register which can hold a Pmode value may contain
2597 an address even if it is not in BASE_REGS.
2599 The address expression is VOIDmode for an argument and
2600 Pmode for other registers. */
2602 memcpy (new_reg_base_value, static_reg_base_value,
2603 FIRST_PSEUDO_REGISTER * sizeof (rtx));
2605 /* Walk the insns adding values to the new_reg_base_value array. */
2606 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
2608 if (INSN_P (insn))
2610 rtx note, set;
2612 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2613 /* The prologue/epilogue insns are not threaded onto the
2614 insn chain until after reload has completed. Thus,
2615 there is no sense wasting time checking if INSN is in
2616 the prologue/epilogue until after reload has completed. */
2617 if (reload_completed
2618 && prologue_epilogue_contains (insn))
2619 continue;
2620 #endif
2622 /* If this insn has a noalias note, process it, Otherwise,
2623 scan for sets. A simple set will have no side effects
2624 which could change the base value of any other register. */
2626 if (GET_CODE (PATTERN (insn)) == SET
2627 && REG_NOTES (insn) != 0
2628 && find_reg_note (insn, REG_NOALIAS, NULL_RTX))
2629 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
2630 else
2631 note_stores (PATTERN (insn), record_set, NULL);
2633 set = single_set (insn);
2635 if (set != 0
2636 && REG_P (SET_DEST (set))
2637 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
2639 unsigned int regno = REGNO (SET_DEST (set));
2640 rtx src = SET_SRC (set);
2641 rtx t;
2643 note = find_reg_equal_equiv_note (insn);
2644 if (note && REG_NOTE_KIND (note) == REG_EQUAL
2645 && DF_REG_DEF_COUNT (regno) != 1)
2646 note = NULL_RTX;
2648 if (note != NULL_RTX
2649 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
2650 && ! rtx_varies_p (XEXP (note, 0), 1)
2651 && ! reg_overlap_mentioned_p (SET_DEST (set),
2652 XEXP (note, 0)))
2654 set_reg_known_value (regno, XEXP (note, 0));
2655 set_reg_known_equiv_p (regno,
2656 REG_NOTE_KIND (note) == REG_EQUIV);
2658 else if (DF_REG_DEF_COUNT (regno) == 1
2659 && GET_CODE (src) == PLUS
2660 && REG_P (XEXP (src, 0))
2661 && (t = get_reg_known_value (REGNO (XEXP (src, 0))))
2662 && CONST_INT_P (XEXP (src, 1)))
2664 t = plus_constant (t, INTVAL (XEXP (src, 1)));
2665 set_reg_known_value (regno, t);
2666 set_reg_known_equiv_p (regno, 0);
2668 else if (DF_REG_DEF_COUNT (regno) == 1
2669 && ! rtx_varies_p (src, 1))
2671 set_reg_known_value (regno, src);
2672 set_reg_known_equiv_p (regno, 0);
2676 else if (NOTE_P (insn)
2677 && NOTE_KIND (insn) == NOTE_INSN_FUNCTION_BEG)
2678 copying_arguments = false;
2681 /* Now propagate values from new_reg_base_value to reg_base_value. */
2682 gcc_assert (maxreg == (unsigned int) max_reg_num ());
2684 for (ui = 0; ui < maxreg; ui++)
2686 if (new_reg_base_value[ui]
2687 && new_reg_base_value[ui] != VEC_index (rtx, reg_base_value, ui)
2688 && ! rtx_equal_p (new_reg_base_value[ui],
2689 VEC_index (rtx, reg_base_value, ui)))
2691 VEC_replace (rtx, reg_base_value, ui, new_reg_base_value[ui]);
2692 changed = 1;
2696 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
2698 /* Fill in the remaining entries. */
2699 for (i = 0; i < (int)reg_known_value_size; i++)
2700 if (reg_known_value[i] == 0)
2701 reg_known_value[i] = regno_reg_rtx[i + FIRST_PSEUDO_REGISTER];
2703 /* Clean up. */
2704 free (new_reg_base_value);
2705 new_reg_base_value = 0;
2706 free (reg_seen);
2707 reg_seen = 0;
2708 timevar_pop (TV_ALIAS_ANALYSIS);
2711 void
2712 end_alias_analysis (void)
2714 old_reg_base_value = reg_base_value;
2715 ggc_free (reg_known_value);
2716 reg_known_value = 0;
2717 reg_known_value_size = 0;
2718 free (reg_known_equiv_p);
2719 reg_known_equiv_p = 0;
2722 #include "gt-alias.h"