1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006,
3 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
4 Contributed by John Carr (jfc@mit.edu).
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
24 #include "coretypes.h"
33 #include "hard-reg-set.h"
34 #include "basic-block.h"
37 #include "diagnostic-core.h"
40 #include "splay-tree.h"
42 #include "langhooks.h"
46 #include "tree-pass.h"
47 #include "ipa-type-escape.h"
49 #include "tree-ssa-alias.h"
50 #include "pointer-set.h"
51 #include "tree-flow.h"
53 /* The aliasing API provided here solves related but different problems:
55 Say there exists (in c)
69 Consider the four questions:
71 Can a store to x1 interfere with px2->y1?
72 Can a store to x1 interfere with px2->z2?
74 Can a store to x1 change the value pointed to by with py?
75 Can a store to x1 change the value pointed to by with pz?
77 The answer to these questions can be yes, yes, yes, and maybe.
79 The first two questions can be answered with a simple examination
80 of the type system. If structure X contains a field of type Y then
81 a store thru a pointer to an X can overwrite any field that is
82 contained (recursively) in an X (unless we know that px1 != px2).
84 The last two of the questions can be solved in the same way as the
85 first two questions but this is too conservative. The observation
86 is that in some cases analysis we can know if which (if any) fields
87 are addressed and if those addresses are used in bad ways. This
88 analysis may be language specific. In C, arbitrary operations may
89 be applied to pointers. However, there is some indication that
90 this may be too conservative for some C++ types.
92 The pass ipa-type-escape does this analysis for the types whose
93 instances do not escape across the compilation boundary.
95 Historically in GCC, these two problems were combined and a single
96 data structure was used to represent the solution to these
97 problems. We now have two similar but different data structures,
98 The data structure to solve the last two question is similar to the
99 first, but does not contain have the fields in it whose address are
100 never taken. For types that do escape the compilation unit, the
101 data structures will have identical information.
104 /* The alias sets assigned to MEMs assist the back-end in determining
105 which MEMs can alias which other MEMs. In general, two MEMs in
106 different alias sets cannot alias each other, with one important
107 exception. Consider something like:
109 struct S { int i; double d; };
111 a store to an `S' can alias something of either type `int' or type
112 `double'. (However, a store to an `int' cannot alias a `double'
113 and vice versa.) We indicate this via a tree structure that looks
121 (The arrows are directed and point downwards.)
122 In this situation we say the alias set for `struct S' is the
123 `superset' and that those for `int' and `double' are `subsets'.
125 To see whether two alias sets can point to the same memory, we must
126 see if either alias set is a subset of the other. We need not trace
127 past immediate descendants, however, since we propagate all
128 grandchildren up one level.
130 Alias set zero is implicitly a superset of all other alias sets.
131 However, this is no actual entry for alias set zero. It is an
132 error to attempt to explicitly construct a subset of zero. */
134 struct GTY(()) alias_set_entry_d
{
135 /* The alias set number, as stored in MEM_ALIAS_SET. */
136 alias_set_type alias_set
;
138 /* Nonzero if would have a child of zero: this effectively makes this
139 alias set the same as alias set zero. */
142 /* The children of the alias set. These are not just the immediate
143 children, but, in fact, all descendants. So, if we have:
145 struct T { struct S s; float f; }
147 continuing our example above, the children here will be all of
148 `int', `double', `float', and `struct S'. */
149 splay_tree
GTY((param1_is (int), param2_is (int))) children
;
151 typedef struct alias_set_entry_d
*alias_set_entry
;
153 static int rtx_equal_for_memref_p (const_rtx
, const_rtx
);
154 static int memrefs_conflict_p (int, rtx
, int, rtx
, HOST_WIDE_INT
);
155 static void record_set (rtx
, const_rtx
, void *);
156 static int base_alias_check (rtx
, rtx
, enum machine_mode
,
158 static rtx
find_base_value (rtx
);
159 static int mems_in_disjoint_alias_sets_p (const_rtx
, const_rtx
);
160 static int insert_subset_children (splay_tree_node
, void*);
161 static alias_set_entry
get_alias_set_entry (alias_set_type
);
162 static const_rtx
fixed_scalar_and_varying_struct_p (const_rtx
, const_rtx
, rtx
, rtx
,
163 bool (*) (const_rtx
, bool));
164 static int aliases_everything_p (const_rtx
);
165 static bool nonoverlapping_component_refs_p (const_tree
, const_tree
);
166 static tree
decl_for_component_ref (tree
);
167 static rtx
adjust_offset_for_component_ref (tree
, rtx
);
168 static int write_dependence_p (const_rtx
, const_rtx
, int);
170 static void memory_modified_1 (rtx
, const_rtx
, void *);
172 /* Set up all info needed to perform alias analysis on memory references. */
174 /* Returns the size in bytes of the mode of X. */
175 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
177 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
178 different alias sets. We ignore alias sets in functions making use
179 of variable arguments because the va_arg macros on some systems are
181 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
182 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
184 /* Cap the number of passes we make over the insns propagating alias
185 information through set chains. 10 is a completely arbitrary choice. */
186 #define MAX_ALIAS_LOOP_PASSES 10
188 /* reg_base_value[N] gives an address to which register N is related.
189 If all sets after the first add or subtract to the current value
190 or otherwise modify it so it does not point to a different top level
191 object, reg_base_value[N] is equal to the address part of the source
194 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
195 expressions represent certain special values: function arguments and
196 the stack, frame, and argument pointers.
198 The contents of an ADDRESS is not normally used, the mode of the
199 ADDRESS determines whether the ADDRESS is a function argument or some
200 other special value. Pointer equality, not rtx_equal_p, determines whether
201 two ADDRESS expressions refer to the same base address.
203 The only use of the contents of an ADDRESS is for determining if the
204 current function performs nonlocal memory memory references for the
205 purposes of marking the function as a constant function. */
207 static GTY(()) VEC(rtx
,gc
) *reg_base_value
;
208 static rtx
*new_reg_base_value
;
210 /* We preserve the copy of old array around to avoid amount of garbage
211 produced. About 8% of garbage produced were attributed to this
213 static GTY((deletable
)) VEC(rtx
,gc
) *old_reg_base_value
;
215 #define static_reg_base_value \
216 (this_target_rtl->x_static_reg_base_value)
218 #define REG_BASE_VALUE(X) \
219 (REGNO (X) < VEC_length (rtx, reg_base_value) \
220 ? VEC_index (rtx, reg_base_value, REGNO (X)) : 0)
222 /* Vector indexed by N giving the initial (unchanging) value known for
223 pseudo-register N. This array is initialized in init_alias_analysis,
224 and does not change until end_alias_analysis is called. */
225 static GTY((length("reg_known_value_size"))) rtx
*reg_known_value
;
227 /* Indicates number of valid entries in reg_known_value. */
228 static GTY(()) unsigned int reg_known_value_size
;
230 /* Vector recording for each reg_known_value whether it is due to a
231 REG_EQUIV note. Future passes (viz., reload) may replace the
232 pseudo with the equivalent expression and so we account for the
233 dependences that would be introduced if that happens.
235 The REG_EQUIV notes created in assign_parms may mention the arg
236 pointer, and there are explicit insns in the RTL that modify the
237 arg pointer. Thus we must ensure that such insns don't get
238 scheduled across each other because that would invalidate the
239 REG_EQUIV notes. One could argue that the REG_EQUIV notes are
240 wrong, but solving the problem in the scheduler will likely give
241 better code, so we do it here. */
242 static bool *reg_known_equiv_p
;
244 /* True when scanning insns from the start of the rtl to the
245 NOTE_INSN_FUNCTION_BEG note. */
246 static bool copying_arguments
;
248 DEF_VEC_P(alias_set_entry
);
249 DEF_VEC_ALLOC_P(alias_set_entry
,gc
);
251 /* The splay-tree used to store the various alias set entries. */
252 static GTY (()) VEC(alias_set_entry
,gc
) *alias_sets
;
254 /* Build a decomposed reference object for querying the alias-oracle
255 from the MEM rtx and store it in *REF.
256 Returns false if MEM is not suitable for the alias-oracle. */
259 ao_ref_from_mem (ao_ref
*ref
, const_rtx mem
)
261 tree expr
= MEM_EXPR (mem
);
267 ao_ref_init (ref
, expr
);
269 /* Get the base of the reference and see if we have to reject or
271 base
= ao_ref_base (ref
);
272 if (base
== NULL_TREE
)
275 /* The tree oracle doesn't like to have these. */
276 if (TREE_CODE (base
) == FUNCTION_DECL
277 || TREE_CODE (base
) == LABEL_DECL
)
280 /* If this is a pointer dereference of a non-SSA_NAME punt.
281 ??? We could replace it with a pointer to anything. */
282 if ((INDIRECT_REF_P (base
)
283 || TREE_CODE (base
) == MEM_REF
)
284 && TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
)
286 if (TREE_CODE (base
) == TARGET_MEM_REF
288 && TREE_CODE (TMR_BASE (base
)) != SSA_NAME
)
291 /* If this is a reference based on a partitioned decl replace the
292 base with an INDIRECT_REF of the pointer representative we
293 created during stack slot partitioning. */
294 if (TREE_CODE (base
) == VAR_DECL
295 && ! TREE_STATIC (base
)
296 && cfun
->gimple_df
->decls_to_pointers
!= NULL
)
299 namep
= pointer_map_contains (cfun
->gimple_df
->decls_to_pointers
, base
);
301 ref
->base
= build_simple_mem_ref (*(tree
*)namep
);
303 else if (TREE_CODE (base
) == TARGET_MEM_REF
304 && TREE_CODE (TMR_BASE (base
)) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (TMR_BASE (base
), 0)) == VAR_DECL
306 && ! TREE_STATIC (TREE_OPERAND (TMR_BASE (base
), 0))
307 && cfun
->gimple_df
->decls_to_pointers
!= NULL
)
310 namep
= pointer_map_contains (cfun
->gimple_df
->decls_to_pointers
,
311 TREE_OPERAND (TMR_BASE (base
), 0));
313 ref
->base
= build_simple_mem_ref (*(tree
*)namep
);
316 ref
->ref_alias_set
= MEM_ALIAS_SET (mem
);
318 /* If MEM_OFFSET or MEM_SIZE are NULL we have to punt.
319 Keep points-to related information though. */
320 if (!MEM_OFFSET (mem
)
323 ref
->ref
= NULL_TREE
;
330 /* If the base decl is a parameter we can have negative MEM_OFFSET in
331 case of promoted subregs on bigendian targets. Trust the MEM_EXPR
333 if (INTVAL (MEM_OFFSET (mem
)) < 0
334 && ((INTVAL (MEM_SIZE (mem
)) + INTVAL (MEM_OFFSET (mem
)))
335 * BITS_PER_UNIT
) == ref
->size
)
338 ref
->offset
+= INTVAL (MEM_OFFSET (mem
)) * BITS_PER_UNIT
;
339 ref
->size
= INTVAL (MEM_SIZE (mem
)) * BITS_PER_UNIT
;
341 /* The MEM may extend into adjacent fields, so adjust max_size if
343 if (ref
->max_size
!= -1
344 && ref
->size
> ref
->max_size
)
345 ref
->max_size
= ref
->size
;
347 /* If MEM_OFFSET and MEM_SIZE get us outside of the base object of
348 the MEM_EXPR punt. This happens for STRICT_ALIGNMENT targets a lot. */
349 if (MEM_EXPR (mem
) != get_spill_slot_decl (false)
351 || (DECL_P (ref
->base
)
352 && (!host_integerp (DECL_SIZE (ref
->base
), 1)
353 || (TREE_INT_CST_LOW (DECL_SIZE ((ref
->base
)))
354 < (unsigned HOST_WIDE_INT
)(ref
->offset
+ ref
->size
))))))
360 /* Query the alias-oracle on whether the two memory rtx X and MEM may
361 alias. If TBAA_P is set also apply TBAA. Returns true if the
362 two rtxen may alias, false otherwise. */
365 rtx_refs_may_alias_p (const_rtx x
, const_rtx mem
, bool tbaa_p
)
369 if (!ao_ref_from_mem (&ref1
, x
)
370 || !ao_ref_from_mem (&ref2
, mem
))
373 return refs_may_alias_p_1 (&ref1
, &ref2
,
375 && MEM_ALIAS_SET (x
) != 0
376 && MEM_ALIAS_SET (mem
) != 0);
379 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
380 such an entry, or NULL otherwise. */
382 static inline alias_set_entry
383 get_alias_set_entry (alias_set_type alias_set
)
385 return VEC_index (alias_set_entry
, alias_sets
, alias_set
);
388 /* Returns nonzero if the alias sets for MEM1 and MEM2 are such that
389 the two MEMs cannot alias each other. */
392 mems_in_disjoint_alias_sets_p (const_rtx mem1
, const_rtx mem2
)
394 /* Perform a basic sanity check. Namely, that there are no alias sets
395 if we're not using strict aliasing. This helps to catch bugs
396 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
397 where a MEM is allocated in some way other than by the use of
398 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
399 use alias sets to indicate that spilled registers cannot alias each
400 other, we might need to remove this check. */
401 gcc_assert (flag_strict_aliasing
402 || (!MEM_ALIAS_SET (mem1
) && !MEM_ALIAS_SET (mem2
)));
404 return ! alias_sets_conflict_p (MEM_ALIAS_SET (mem1
), MEM_ALIAS_SET (mem2
));
407 /* Insert the NODE into the splay tree given by DATA. Used by
408 record_alias_subset via splay_tree_foreach. */
411 insert_subset_children (splay_tree_node node
, void *data
)
413 splay_tree_insert ((splay_tree
) data
, node
->key
, node
->value
);
418 /* Return true if the first alias set is a subset of the second. */
421 alias_set_subset_of (alias_set_type set1
, alias_set_type set2
)
425 /* Everything is a subset of the "aliases everything" set. */
429 /* Otherwise, check if set1 is a subset of set2. */
430 ase
= get_alias_set_entry (set2
);
432 && (ase
->has_zero_child
433 || splay_tree_lookup (ase
->children
,
434 (splay_tree_key
) set1
)))
439 /* Return 1 if the two specified alias sets may conflict. */
442 alias_sets_conflict_p (alias_set_type set1
, alias_set_type set2
)
447 if (alias_sets_must_conflict_p (set1
, set2
))
450 /* See if the first alias set is a subset of the second. */
451 ase
= get_alias_set_entry (set1
);
453 && (ase
->has_zero_child
454 || splay_tree_lookup (ase
->children
,
455 (splay_tree_key
) set2
)))
458 /* Now do the same, but with the alias sets reversed. */
459 ase
= get_alias_set_entry (set2
);
461 && (ase
->has_zero_child
462 || splay_tree_lookup (ase
->children
,
463 (splay_tree_key
) set1
)))
466 /* The two alias sets are distinct and neither one is the
467 child of the other. Therefore, they cannot conflict. */
471 /* Return 1 if the two specified alias sets will always conflict. */
474 alias_sets_must_conflict_p (alias_set_type set1
, alias_set_type set2
)
476 if (set1
== 0 || set2
== 0 || set1
== set2
)
482 /* Return 1 if any MEM object of type T1 will always conflict (using the
483 dependency routines in this file) with any MEM object of type T2.
484 This is used when allocating temporary storage. If T1 and/or T2 are
485 NULL_TREE, it means we know nothing about the storage. */
488 objects_must_conflict_p (tree t1
, tree t2
)
490 alias_set_type set1
, set2
;
492 /* If neither has a type specified, we don't know if they'll conflict
493 because we may be using them to store objects of various types, for
494 example the argument and local variables areas of inlined functions. */
495 if (t1
== 0 && t2
== 0)
498 /* If they are the same type, they must conflict. */
500 /* Likewise if both are volatile. */
501 || (t1
!= 0 && TYPE_VOLATILE (t1
) && t2
!= 0 && TYPE_VOLATILE (t2
)))
504 set1
= t1
? get_alias_set (t1
) : 0;
505 set2
= t2
? get_alias_set (t2
) : 0;
507 /* We can't use alias_sets_conflict_p because we must make sure
508 that every subtype of t1 will conflict with every subtype of
509 t2 for which a pair of subobjects of these respective subtypes
510 overlaps on the stack. */
511 return alias_sets_must_conflict_p (set1
, set2
);
514 /* Return true if all nested component references handled by
515 get_inner_reference in T are such that we should use the alias set
516 provided by the object at the heart of T.
518 This is true for non-addressable components (which don't have their
519 own alias set), as well as components of objects in alias set zero.
520 This later point is a special case wherein we wish to override the
521 alias set used by the component, but we don't have per-FIELD_DECL
522 assignable alias sets. */
525 component_uses_parent_alias_set (const_tree t
)
529 /* If we're at the end, it vacuously uses its own alias set. */
530 if (!handled_component_p (t
))
533 switch (TREE_CODE (t
))
536 if (DECL_NONADDRESSABLE_P (TREE_OPERAND (t
, 1)))
541 case ARRAY_RANGE_REF
:
542 if (TYPE_NONALIASED_COMPONENT (TREE_TYPE (TREE_OPERAND (t
, 0))))
551 /* Bitfields and casts are never addressable. */
555 t
= TREE_OPERAND (t
, 0);
556 if (get_alias_set (TREE_TYPE (t
)) == 0)
561 /* Return the alias set for the memory pointed to by T, which may be
562 either a type or an expression. Return -1 if there is nothing
563 special about dereferencing T. */
565 static alias_set_type
566 get_deref_alias_set_1 (tree t
)
568 /* If we're not doing any alias analysis, just assume everything
569 aliases everything else. */
570 if (!flag_strict_aliasing
)
573 /* All we care about is the type. */
577 /* If we have an INDIRECT_REF via a void pointer, we don't
578 know anything about what that might alias. Likewise if the
579 pointer is marked that way. */
580 if (TREE_CODE (TREE_TYPE (t
)) == VOID_TYPE
581 || TYPE_REF_CAN_ALIAS_ALL (t
))
587 /* Return the alias set for the memory pointed to by T, which may be
588 either a type or an expression. */
591 get_deref_alias_set (tree t
)
593 alias_set_type set
= get_deref_alias_set_1 (t
);
595 /* Fall back to the alias-set of the pointed-to type. */
600 set
= get_alias_set (TREE_TYPE (t
));
606 /* Return the alias set for T, which may be either a type or an
607 expression. Call language-specific routine for help, if needed. */
610 get_alias_set (tree t
)
614 /* If we're not doing any alias analysis, just assume everything
615 aliases everything else. Also return 0 if this or its type is
617 if (! flag_strict_aliasing
|| t
== error_mark_node
619 && (TREE_TYPE (t
) == 0 || TREE_TYPE (t
) == error_mark_node
)))
622 /* We can be passed either an expression or a type. This and the
623 language-specific routine may make mutually-recursive calls to each other
624 to figure out what to do. At each juncture, we see if this is a tree
625 that the language may need to handle specially. First handle things that
631 /* Give the language a chance to do something with this tree
632 before we look at it. */
634 set
= lang_hooks
.get_alias_set (t
);
638 /* Get the base object of the reference. */
640 while (handled_component_p (inner
))
642 /* If there is a VIEW_CONVERT_EXPR in the chain we cannot use
643 the type of any component references that wrap it to
644 determine the alias-set. */
645 if (TREE_CODE (inner
) == VIEW_CONVERT_EXPR
)
646 t
= TREE_OPERAND (inner
, 0);
647 inner
= TREE_OPERAND (inner
, 0);
650 /* Handle pointer dereferences here, they can override the
652 if (INDIRECT_REF_P (inner
))
654 set
= get_deref_alias_set_1 (TREE_OPERAND (inner
, 0));
658 else if (TREE_CODE (inner
) == TARGET_MEM_REF
)
659 return get_deref_alias_set (TMR_OFFSET (inner
));
660 else if (TREE_CODE (inner
) == MEM_REF
)
662 set
= get_deref_alias_set_1 (TREE_OPERAND (inner
, 1));
667 /* If the innermost reference is a MEM_REF that has a
668 conversion embedded treat it like a VIEW_CONVERT_EXPR above,
669 using the memory access type for determining the alias-set. */
670 if (TREE_CODE (inner
) == MEM_REF
671 && TYPE_MAIN_VARIANT (TREE_TYPE (inner
))
673 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (inner
, 1)))))
674 return get_deref_alias_set (TREE_OPERAND (inner
, 1));
676 /* Otherwise, pick up the outermost object that we could have a pointer
677 to, processing conversions as above. */
678 while (component_uses_parent_alias_set (t
))
680 t
= TREE_OPERAND (t
, 0);
684 /* If we've already determined the alias set for a decl, just return
685 it. This is necessary for C++ anonymous unions, whose component
686 variables don't look like union members (boo!). */
687 if (TREE_CODE (t
) == VAR_DECL
688 && DECL_RTL_SET_P (t
) && MEM_P (DECL_RTL (t
)))
689 return MEM_ALIAS_SET (DECL_RTL (t
));
691 /* Now all we care about is the type. */
695 /* Variant qualifiers don't affect the alias set, so get the main
697 t
= TYPE_MAIN_VARIANT (t
);
699 /* Always use the canonical type as well. If this is a type that
700 requires structural comparisons to identify compatible types
701 use alias set zero. */
702 if (TYPE_STRUCTURAL_EQUALITY_P (t
))
704 /* Allow the language to specify another alias set for this
706 set
= lang_hooks
.get_alias_set (t
);
712 t
= TYPE_CANONICAL (t
);
714 /* Canonical types shouldn't form a tree nor should the canonical
715 type require structural equality checks. */
716 gcc_checking_assert (TYPE_CANONICAL (t
) == t
717 && !TYPE_STRUCTURAL_EQUALITY_P (t
));
719 /* If this is a type with a known alias set, return it. */
720 if (TYPE_ALIAS_SET_KNOWN_P (t
))
721 return TYPE_ALIAS_SET (t
);
723 /* We don't want to set TYPE_ALIAS_SET for incomplete types. */
724 if (!COMPLETE_TYPE_P (t
))
726 /* For arrays with unknown size the conservative answer is the
727 alias set of the element type. */
728 if (TREE_CODE (t
) == ARRAY_TYPE
)
729 return get_alias_set (TREE_TYPE (t
));
731 /* But return zero as a conservative answer for incomplete types. */
735 /* See if the language has special handling for this type. */
736 set
= lang_hooks
.get_alias_set (t
);
740 /* There are no objects of FUNCTION_TYPE, so there's no point in
741 using up an alias set for them. (There are, of course, pointers
742 and references to functions, but that's different.) */
743 else if (TREE_CODE (t
) == FUNCTION_TYPE
|| TREE_CODE (t
) == METHOD_TYPE
)
746 /* Unless the language specifies otherwise, let vector types alias
747 their components. This avoids some nasty type punning issues in
748 normal usage. And indeed lets vectors be treated more like an
750 else if (TREE_CODE (t
) == VECTOR_TYPE
)
751 set
= get_alias_set (TREE_TYPE (t
));
753 /* Unless the language specifies otherwise, treat array types the
754 same as their components. This avoids the asymmetry we get
755 through recording the components. Consider accessing a
756 character(kind=1) through a reference to a character(kind=1)[1:1].
757 Or consider if we want to assign integer(kind=4)[0:D.1387] and
758 integer(kind=4)[4] the same alias set or not.
759 Just be pragmatic here and make sure the array and its element
760 type get the same alias set assigned. */
761 else if (TREE_CODE (t
) == ARRAY_TYPE
&& !TYPE_NONALIASED_COMPONENT (t
))
762 set
= get_alias_set (TREE_TYPE (t
));
764 /* From the former common C and C++ langhook implementation:
766 Unfortunately, there is no canonical form of a pointer type.
767 In particular, if we have `typedef int I', then `int *', and
768 `I *' are different types. So, we have to pick a canonical
769 representative. We do this below.
771 Technically, this approach is actually more conservative that
772 it needs to be. In particular, `const int *' and `int *'
773 should be in different alias sets, according to the C and C++
774 standard, since their types are not the same, and so,
775 technically, an `int **' and `const int **' cannot point at
778 But, the standard is wrong. In particular, this code is
783 const int* const* cipp = ipp;
784 And, it doesn't make sense for that to be legal unless you
785 can dereference IPP and CIPP. So, we ignore cv-qualifiers on
786 the pointed-to types. This issue has been reported to the
789 In addition to the above canonicalization issue, with LTO
790 we should also canonicalize `T (*)[]' to `T *' avoiding
791 alias issues with pointer-to element types and pointer-to
794 Likewise we need to deal with the situation of incomplete
795 pointed-to types and make `*(struct X **)&a' and
796 `*(struct X {} **)&a' alias. Otherwise we will have to
797 guarantee that all pointer-to incomplete type variants
798 will be replaced by pointer-to complete type variants if
801 With LTO the convenient situation of using `void *' to
802 access and store any pointer type will also become
803 more apparent (and `void *' is just another pointer-to
804 incomplete type). Assigning alias-set zero to `void *'
805 and all pointer-to incomplete types is a not appealing
806 solution. Assigning an effective alias-set zero only
807 affecting pointers might be - by recording proper subset
808 relationships of all pointer alias-sets.
810 Pointer-to function types are another grey area which
811 needs caution. Globbing them all into one alias-set
812 or the above effective zero set would work.
814 For now just assign the same alias-set to all pointers.
815 That's simple and avoids all the above problems. */
816 else if (POINTER_TYPE_P (t
)
817 && t
!= ptr_type_node
)
818 return get_alias_set (ptr_type_node
);
820 /* Otherwise make a new alias set for this type. */
822 set
= new_alias_set ();
824 TYPE_ALIAS_SET (t
) = set
;
826 /* If this is an aggregate type or a complex type, we must record any
827 component aliasing information. */
828 if (AGGREGATE_TYPE_P (t
) || TREE_CODE (t
) == COMPLEX_TYPE
)
829 record_component_aliases (t
);
834 /* Return a brand-new alias set. */
839 if (flag_strict_aliasing
)
842 VEC_safe_push (alias_set_entry
, gc
, alias_sets
, 0);
843 VEC_safe_push (alias_set_entry
, gc
, alias_sets
, 0);
844 return VEC_length (alias_set_entry
, alias_sets
) - 1;
850 /* Indicate that things in SUBSET can alias things in SUPERSET, but that
851 not everything that aliases SUPERSET also aliases SUBSET. For example,
852 in C, a store to an `int' can alias a load of a structure containing an
853 `int', and vice versa. But it can't alias a load of a 'double' member
854 of the same structure. Here, the structure would be the SUPERSET and
855 `int' the SUBSET. This relationship is also described in the comment at
856 the beginning of this file.
858 This function should be called only once per SUPERSET/SUBSET pair.
860 It is illegal for SUPERSET to be zero; everything is implicitly a
861 subset of alias set zero. */
864 record_alias_subset (alias_set_type superset
, alias_set_type subset
)
866 alias_set_entry superset_entry
;
867 alias_set_entry subset_entry
;
869 /* It is possible in complex type situations for both sets to be the same,
870 in which case we can ignore this operation. */
871 if (superset
== subset
)
874 gcc_assert (superset
);
876 superset_entry
= get_alias_set_entry (superset
);
877 if (superset_entry
== 0)
879 /* Create an entry for the SUPERSET, so that we have a place to
880 attach the SUBSET. */
881 superset_entry
= ggc_alloc_cleared_alias_set_entry_d ();
882 superset_entry
->alias_set
= superset
;
883 superset_entry
->children
884 = splay_tree_new_ggc (splay_tree_compare_ints
,
885 ggc_alloc_splay_tree_scalar_scalar_splay_tree_s
,
886 ggc_alloc_splay_tree_scalar_scalar_splay_tree_node_s
);
887 superset_entry
->has_zero_child
= 0;
888 VEC_replace (alias_set_entry
, alias_sets
, superset
, superset_entry
);
892 superset_entry
->has_zero_child
= 1;
895 subset_entry
= get_alias_set_entry (subset
);
896 /* If there is an entry for the subset, enter all of its children
897 (if they are not already present) as children of the SUPERSET. */
900 if (subset_entry
->has_zero_child
)
901 superset_entry
->has_zero_child
= 1;
903 splay_tree_foreach (subset_entry
->children
, insert_subset_children
,
904 superset_entry
->children
);
907 /* Enter the SUBSET itself as a child of the SUPERSET. */
908 splay_tree_insert (superset_entry
->children
,
909 (splay_tree_key
) subset
, 0);
913 /* Record that component types of TYPE, if any, are part of that type for
914 aliasing purposes. For record types, we only record component types
915 for fields that are not marked non-addressable. For array types, we
916 only record the component type if it is not marked non-aliased. */
919 record_component_aliases (tree type
)
921 alias_set_type superset
= get_alias_set (type
);
927 switch (TREE_CODE (type
))
931 case QUAL_UNION_TYPE
:
932 /* Recursively record aliases for the base classes, if there are any. */
933 if (TYPE_BINFO (type
))
936 tree binfo
, base_binfo
;
938 for (binfo
= TYPE_BINFO (type
), i
= 0;
939 BINFO_BASE_ITERATE (binfo
, i
, base_binfo
); i
++)
940 record_alias_subset (superset
,
941 get_alias_set (BINFO_TYPE (base_binfo
)));
943 for (field
= TYPE_FIELDS (type
); field
!= 0; field
= DECL_CHAIN (field
))
944 if (TREE_CODE (field
) == FIELD_DECL
&& !DECL_NONADDRESSABLE_P (field
))
945 record_alias_subset (superset
, get_alias_set (TREE_TYPE (field
)));
949 record_alias_subset (superset
, get_alias_set (TREE_TYPE (type
)));
952 /* VECTOR_TYPE and ARRAY_TYPE share the alias set with their
960 /* Allocate an alias set for use in storing and reading from the varargs
963 static GTY(()) alias_set_type varargs_set
= -1;
966 get_varargs_alias_set (void)
969 /* We now lower VA_ARG_EXPR, and there's currently no way to attach the
970 varargs alias set to an INDIRECT_REF (FIXME!), so we can't
971 consistently use the varargs alias set for loads from the varargs
972 area. So don't use it anywhere. */
975 if (varargs_set
== -1)
976 varargs_set
= new_alias_set ();
982 /* Likewise, but used for the fixed portions of the frame, e.g., register
985 static GTY(()) alias_set_type frame_set
= -1;
988 get_frame_alias_set (void)
991 frame_set
= new_alias_set ();
996 /* Inside SRC, the source of a SET, find a base address. */
999 find_base_value (rtx src
)
1003 #if defined (FIND_BASE_TERM)
1004 /* Try machine-dependent ways to find the base term. */
1005 src
= FIND_BASE_TERM (src
);
1008 switch (GET_CODE (src
))
1015 regno
= REGNO (src
);
1016 /* At the start of a function, argument registers have known base
1017 values which may be lost later. Returning an ADDRESS
1018 expression here allows optimization based on argument values
1019 even when the argument registers are used for other purposes. */
1020 if (regno
< FIRST_PSEUDO_REGISTER
&& copying_arguments
)
1021 return new_reg_base_value
[regno
];
1023 /* If a pseudo has a known base value, return it. Do not do this
1024 for non-fixed hard regs since it can result in a circular
1025 dependency chain for registers which have values at function entry.
1027 The test above is not sufficient because the scheduler may move
1028 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
1029 if ((regno
>= FIRST_PSEUDO_REGISTER
|| fixed_regs
[regno
])
1030 && regno
< VEC_length (rtx
, reg_base_value
))
1032 /* If we're inside init_alias_analysis, use new_reg_base_value
1033 to reduce the number of relaxation iterations. */
1034 if (new_reg_base_value
&& new_reg_base_value
[regno
]
1035 && DF_REG_DEF_COUNT (regno
) == 1)
1036 return new_reg_base_value
[regno
];
1038 if (VEC_index (rtx
, reg_base_value
, regno
))
1039 return VEC_index (rtx
, reg_base_value
, regno
);
1045 /* Check for an argument passed in memory. Only record in the
1046 copying-arguments block; it is too hard to track changes
1048 if (copying_arguments
1049 && (XEXP (src
, 0) == arg_pointer_rtx
1050 || (GET_CODE (XEXP (src
, 0)) == PLUS
1051 && XEXP (XEXP (src
, 0), 0) == arg_pointer_rtx
)))
1052 return gen_rtx_ADDRESS (VOIDmode
, src
);
1056 src
= XEXP (src
, 0);
1057 if (GET_CODE (src
) != PLUS
&& GET_CODE (src
) != MINUS
)
1060 /* ... fall through ... */
1065 rtx temp
, src_0
= XEXP (src
, 0), src_1
= XEXP (src
, 1);
1067 /* If either operand is a REG that is a known pointer, then it
1069 if (REG_P (src_0
) && REG_POINTER (src_0
))
1070 return find_base_value (src_0
);
1071 if (REG_P (src_1
) && REG_POINTER (src_1
))
1072 return find_base_value (src_1
);
1074 /* If either operand is a REG, then see if we already have
1075 a known value for it. */
1078 temp
= find_base_value (src_0
);
1085 temp
= find_base_value (src_1
);
1090 /* If either base is named object or a special address
1091 (like an argument or stack reference), then use it for the
1094 && (GET_CODE (src_0
) == SYMBOL_REF
1095 || GET_CODE (src_0
) == LABEL_REF
1096 || (GET_CODE (src_0
) == ADDRESS
1097 && GET_MODE (src_0
) != VOIDmode
)))
1101 && (GET_CODE (src_1
) == SYMBOL_REF
1102 || GET_CODE (src_1
) == LABEL_REF
1103 || (GET_CODE (src_1
) == ADDRESS
1104 && GET_MODE (src_1
) != VOIDmode
)))
1107 /* Guess which operand is the base address:
1108 If either operand is a symbol, then it is the base. If
1109 either operand is a CONST_INT, then the other is the base. */
1110 if (CONST_INT_P (src_1
) || CONSTANT_P (src_0
))
1111 return find_base_value (src_0
);
1112 else if (CONST_INT_P (src_0
) || CONSTANT_P (src_1
))
1113 return find_base_value (src_1
);
1119 /* The standard form is (lo_sum reg sym) so look only at the
1121 return find_base_value (XEXP (src
, 1));
1124 /* If the second operand is constant set the base
1125 address to the first operand. */
1126 if (CONST_INT_P (XEXP (src
, 1)) && INTVAL (XEXP (src
, 1)) != 0)
1127 return find_base_value (XEXP (src
, 0));
1131 /* As we do not know which address space the pointer is refering to, we can
1132 handle this only if the target does not support different pointer or
1133 address modes depending on the address space. */
1134 if (!target_default_pointer_address_modes_p ())
1136 if (GET_MODE_SIZE (GET_MODE (src
)) < GET_MODE_SIZE (Pmode
))
1146 return find_base_value (XEXP (src
, 0));
1149 case SIGN_EXTEND
: /* used for NT/Alpha pointers */
1150 /* As we do not know which address space the pointer is refering to, we can
1151 handle this only if the target does not support different pointer or
1152 address modes depending on the address space. */
1153 if (!target_default_pointer_address_modes_p ())
1157 rtx temp
= find_base_value (XEXP (src
, 0));
1159 if (temp
!= 0 && CONSTANT_P (temp
))
1160 temp
= convert_memory_address (Pmode
, temp
);
1172 /* Called from init_alias_analysis indirectly through note_stores. */
1174 /* While scanning insns to find base values, reg_seen[N] is nonzero if
1175 register N has been set in this function. */
1176 static char *reg_seen
;
1178 /* Addresses which are known not to alias anything else are identified
1179 by a unique integer. */
1180 static int unique_id
;
1183 record_set (rtx dest
, const_rtx set
, void *data ATTRIBUTE_UNUSED
)
1192 regno
= REGNO (dest
);
1194 gcc_checking_assert (regno
< VEC_length (rtx
, reg_base_value
));
1196 /* If this spans multiple hard registers, then we must indicate that every
1197 register has an unusable value. */
1198 if (regno
< FIRST_PSEUDO_REGISTER
)
1199 n
= hard_regno_nregs
[regno
][GET_MODE (dest
)];
1206 reg_seen
[regno
+ n
] = 1;
1207 new_reg_base_value
[regno
+ n
] = 0;
1214 /* A CLOBBER wipes out any old value but does not prevent a previously
1215 unset register from acquiring a base address (i.e. reg_seen is not
1217 if (GET_CODE (set
) == CLOBBER
)
1219 new_reg_base_value
[regno
] = 0;
1222 src
= SET_SRC (set
);
1226 if (reg_seen
[regno
])
1228 new_reg_base_value
[regno
] = 0;
1231 reg_seen
[regno
] = 1;
1232 new_reg_base_value
[regno
] = gen_rtx_ADDRESS (Pmode
,
1233 GEN_INT (unique_id
++));
1237 /* If this is not the first set of REGNO, see whether the new value
1238 is related to the old one. There are two cases of interest:
1240 (1) The register might be assigned an entirely new value
1241 that has the same base term as the original set.
1243 (2) The set might be a simple self-modification that
1244 cannot change REGNO's base value.
1246 If neither case holds, reject the original base value as invalid.
1247 Note that the following situation is not detected:
1249 extern int x, y; int *p = &x; p += (&y-&x);
1251 ANSI C does not allow computing the difference of addresses
1252 of distinct top level objects. */
1253 if (new_reg_base_value
[regno
] != 0
1254 && find_base_value (src
) != new_reg_base_value
[regno
])
1255 switch (GET_CODE (src
))
1259 if (XEXP (src
, 0) != dest
&& XEXP (src
, 1) != dest
)
1260 new_reg_base_value
[regno
] = 0;
1263 /* If the value we add in the PLUS is also a valid base value,
1264 this might be the actual base value, and the original value
1267 rtx other
= NULL_RTX
;
1269 if (XEXP (src
, 0) == dest
)
1270 other
= XEXP (src
, 1);
1271 else if (XEXP (src
, 1) == dest
)
1272 other
= XEXP (src
, 0);
1274 if (! other
|| find_base_value (other
))
1275 new_reg_base_value
[regno
] = 0;
1279 if (XEXP (src
, 0) != dest
|| !CONST_INT_P (XEXP (src
, 1)))
1280 new_reg_base_value
[regno
] = 0;
1283 new_reg_base_value
[regno
] = 0;
1286 /* If this is the first set of a register, record the value. */
1287 else if ((regno
>= FIRST_PSEUDO_REGISTER
|| ! fixed_regs
[regno
])
1288 && ! reg_seen
[regno
] && new_reg_base_value
[regno
] == 0)
1289 new_reg_base_value
[regno
] = find_base_value (src
);
1291 reg_seen
[regno
] = 1;
1294 /* Return REG_BASE_VALUE for REGNO. Selective scheduler uses this to avoid
1295 using hard registers with non-null REG_BASE_VALUE for renaming. */
1297 get_reg_base_value (unsigned int regno
)
1299 return VEC_index (rtx
, reg_base_value
, regno
);
1302 /* If a value is known for REGNO, return it. */
1305 get_reg_known_value (unsigned int regno
)
1307 if (regno
>= FIRST_PSEUDO_REGISTER
)
1309 regno
-= FIRST_PSEUDO_REGISTER
;
1310 if (regno
< reg_known_value_size
)
1311 return reg_known_value
[regno
];
1319 set_reg_known_value (unsigned int regno
, rtx val
)
1321 if (regno
>= FIRST_PSEUDO_REGISTER
)
1323 regno
-= FIRST_PSEUDO_REGISTER
;
1324 if (regno
< reg_known_value_size
)
1325 reg_known_value
[regno
] = val
;
1329 /* Similarly for reg_known_equiv_p. */
1332 get_reg_known_equiv_p (unsigned int regno
)
1334 if (regno
>= FIRST_PSEUDO_REGISTER
)
1336 regno
-= FIRST_PSEUDO_REGISTER
;
1337 if (regno
< reg_known_value_size
)
1338 return reg_known_equiv_p
[regno
];
1344 set_reg_known_equiv_p (unsigned int regno
, bool val
)
1346 if (regno
>= FIRST_PSEUDO_REGISTER
)
1348 regno
-= FIRST_PSEUDO_REGISTER
;
1349 if (regno
< reg_known_value_size
)
1350 reg_known_equiv_p
[regno
] = val
;
1355 /* Returns a canonical version of X, from the point of view alias
1356 analysis. (For example, if X is a MEM whose address is a register,
1357 and the register has a known value (say a SYMBOL_REF), then a MEM
1358 whose address is the SYMBOL_REF is returned.) */
1363 /* Recursively look for equivalences. */
1364 if (REG_P (x
) && REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
1366 rtx t
= get_reg_known_value (REGNO (x
));
1370 return canon_rtx (t
);
1373 if (GET_CODE (x
) == PLUS
)
1375 rtx x0
= canon_rtx (XEXP (x
, 0));
1376 rtx x1
= canon_rtx (XEXP (x
, 1));
1378 if (x0
!= XEXP (x
, 0) || x1
!= XEXP (x
, 1))
1380 if (CONST_INT_P (x0
))
1381 return plus_constant (x1
, INTVAL (x0
));
1382 else if (CONST_INT_P (x1
))
1383 return plus_constant (x0
, INTVAL (x1
));
1384 return gen_rtx_PLUS (GET_MODE (x
), x0
, x1
);
1388 /* This gives us much better alias analysis when called from
1389 the loop optimizer. Note we want to leave the original
1390 MEM alone, but need to return the canonicalized MEM with
1391 all the flags with their original values. */
1393 x
= replace_equiv_address_nv (x
, canon_rtx (XEXP (x
, 0)));
1398 /* Return 1 if X and Y are identical-looking rtx's.
1399 Expect that X and Y has been already canonicalized.
1401 We use the data in reg_known_value above to see if two registers with
1402 different numbers are, in fact, equivalent. */
1405 rtx_equal_for_memref_p (const_rtx x
, const_rtx y
)
1412 if (x
== 0 && y
== 0)
1414 if (x
== 0 || y
== 0)
1420 code
= GET_CODE (x
);
1421 /* Rtx's of different codes cannot be equal. */
1422 if (code
!= GET_CODE (y
))
1425 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
1426 (REG:SI x) and (REG:HI x) are NOT equivalent. */
1428 if (GET_MODE (x
) != GET_MODE (y
))
1431 /* Some RTL can be compared without a recursive examination. */
1435 return REGNO (x
) == REGNO (y
);
1438 return XEXP (x
, 0) == XEXP (y
, 0);
1441 return XSTR (x
, 0) == XSTR (y
, 0);
1447 /* There's no need to compare the contents of CONST_DOUBLEs or
1448 CONST_INTs because pointer equality is a good enough
1449 comparison for these nodes. */
1456 /* canon_rtx knows how to handle plus. No need to canonicalize. */
1458 return ((rtx_equal_for_memref_p (XEXP (x
, 0), XEXP (y
, 0))
1459 && rtx_equal_for_memref_p (XEXP (x
, 1), XEXP (y
, 1)))
1460 || (rtx_equal_for_memref_p (XEXP (x
, 0), XEXP (y
, 1))
1461 && rtx_equal_for_memref_p (XEXP (x
, 1), XEXP (y
, 0))));
1462 /* For commutative operations, the RTX match if the operand match in any
1463 order. Also handle the simple binary and unary cases without a loop. */
1464 if (COMMUTATIVE_P (x
))
1466 rtx xop0
= canon_rtx (XEXP (x
, 0));
1467 rtx yop0
= canon_rtx (XEXP (y
, 0));
1468 rtx yop1
= canon_rtx (XEXP (y
, 1));
1470 return ((rtx_equal_for_memref_p (xop0
, yop0
)
1471 && rtx_equal_for_memref_p (canon_rtx (XEXP (x
, 1)), yop1
))
1472 || (rtx_equal_for_memref_p (xop0
, yop1
)
1473 && rtx_equal_for_memref_p (canon_rtx (XEXP (x
, 1)), yop0
)));
1475 else if (NON_COMMUTATIVE_P (x
))
1477 return (rtx_equal_for_memref_p (canon_rtx (XEXP (x
, 0)),
1478 canon_rtx (XEXP (y
, 0)))
1479 && rtx_equal_for_memref_p (canon_rtx (XEXP (x
, 1)),
1480 canon_rtx (XEXP (y
, 1))));
1482 else if (UNARY_P (x
))
1483 return rtx_equal_for_memref_p (canon_rtx (XEXP (x
, 0)),
1484 canon_rtx (XEXP (y
, 0)));
1486 /* Compare the elements. If any pair of corresponding elements
1487 fail to match, return 0 for the whole things.
1489 Limit cases to types which actually appear in addresses. */
1491 fmt
= GET_RTX_FORMAT (code
);
1492 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1497 if (XINT (x
, i
) != XINT (y
, i
))
1502 /* Two vectors must have the same length. */
1503 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1506 /* And the corresponding elements must match. */
1507 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1508 if (rtx_equal_for_memref_p (canon_rtx (XVECEXP (x
, i
, j
)),
1509 canon_rtx (XVECEXP (y
, i
, j
))) == 0)
1514 if (rtx_equal_for_memref_p (canon_rtx (XEXP (x
, i
)),
1515 canon_rtx (XEXP (y
, i
))) == 0)
1519 /* This can happen for asm operands. */
1521 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1525 /* This can happen for an asm which clobbers memory. */
1529 /* It is believed that rtx's at this level will never
1530 contain anything but integers and other rtx's,
1531 except for within LABEL_REFs and SYMBOL_REFs. */
1540 find_base_term (rtx x
)
1543 struct elt_loc_list
*l
;
1545 #if defined (FIND_BASE_TERM)
1546 /* Try machine-dependent ways to find the base term. */
1547 x
= FIND_BASE_TERM (x
);
1550 switch (GET_CODE (x
))
1553 return REG_BASE_VALUE (x
);
1556 /* As we do not know which address space the pointer is refering to, we can
1557 handle this only if the target does not support different pointer or
1558 address modes depending on the address space. */
1559 if (!target_default_pointer_address_modes_p ())
1561 if (GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (Pmode
))
1571 return find_base_term (XEXP (x
, 0));
1574 case SIGN_EXTEND
: /* Used for Alpha/NT pointers */
1575 /* As we do not know which address space the pointer is refering to, we can
1576 handle this only if the target does not support different pointer or
1577 address modes depending on the address space. */
1578 if (!target_default_pointer_address_modes_p ())
1582 rtx temp
= find_base_term (XEXP (x
, 0));
1584 if (temp
!= 0 && CONSTANT_P (temp
))
1585 temp
= convert_memory_address (Pmode
, temp
);
1591 val
= CSELIB_VAL_PTR (x
);
1594 for (l
= val
->locs
; l
; l
= l
->next
)
1595 if ((x
= find_base_term (l
->loc
)) != 0)
1600 /* The standard form is (lo_sum reg sym) so look only at the
1602 return find_base_term (XEXP (x
, 1));
1606 if (GET_CODE (x
) != PLUS
&& GET_CODE (x
) != MINUS
)
1612 rtx tmp1
= XEXP (x
, 0);
1613 rtx tmp2
= XEXP (x
, 1);
1615 /* This is a little bit tricky since we have to determine which of
1616 the two operands represents the real base address. Otherwise this
1617 routine may return the index register instead of the base register.
1619 That may cause us to believe no aliasing was possible, when in
1620 fact aliasing is possible.
1622 We use a few simple tests to guess the base register. Additional
1623 tests can certainly be added. For example, if one of the operands
1624 is a shift or multiply, then it must be the index register and the
1625 other operand is the base register. */
1627 if (tmp1
== pic_offset_table_rtx
&& CONSTANT_P (tmp2
))
1628 return find_base_term (tmp2
);
1630 /* If either operand is known to be a pointer, then use it
1631 to determine the base term. */
1632 if (REG_P (tmp1
) && REG_POINTER (tmp1
))
1634 rtx base
= find_base_term (tmp1
);
1639 if (REG_P (tmp2
) && REG_POINTER (tmp2
))
1641 rtx base
= find_base_term (tmp2
);
1646 /* Neither operand was known to be a pointer. Go ahead and find the
1647 base term for both operands. */
1648 tmp1
= find_base_term (tmp1
);
1649 tmp2
= find_base_term (tmp2
);
1651 /* If either base term is named object or a special address
1652 (like an argument or stack reference), then use it for the
1655 && (GET_CODE (tmp1
) == SYMBOL_REF
1656 || GET_CODE (tmp1
) == LABEL_REF
1657 || (GET_CODE (tmp1
) == ADDRESS
1658 && GET_MODE (tmp1
) != VOIDmode
)))
1662 && (GET_CODE (tmp2
) == SYMBOL_REF
1663 || GET_CODE (tmp2
) == LABEL_REF
1664 || (GET_CODE (tmp2
) == ADDRESS
1665 && GET_MODE (tmp2
) != VOIDmode
)))
1668 /* We could not determine which of the two operands was the
1669 base register and which was the index. So we can determine
1670 nothing from the base alias check. */
1675 if (CONST_INT_P (XEXP (x
, 1)) && INTVAL (XEXP (x
, 1)) != 0)
1676 return find_base_term (XEXP (x
, 0));
1688 /* Return 0 if the addresses X and Y are known to point to different
1689 objects, 1 if they might be pointers to the same object. */
1692 base_alias_check (rtx x
, rtx y
, enum machine_mode x_mode
,
1693 enum machine_mode y_mode
)
1695 rtx x_base
= find_base_term (x
);
1696 rtx y_base
= find_base_term (y
);
1698 /* If the address itself has no known base see if a known equivalent
1699 value has one. If either address still has no known base, nothing
1700 is known about aliasing. */
1705 if (! flag_expensive_optimizations
|| (x_c
= canon_rtx (x
)) == x
)
1708 x_base
= find_base_term (x_c
);
1716 if (! flag_expensive_optimizations
|| (y_c
= canon_rtx (y
)) == y
)
1719 y_base
= find_base_term (y_c
);
1724 /* If the base addresses are equal nothing is known about aliasing. */
1725 if (rtx_equal_p (x_base
, y_base
))
1728 /* The base addresses are different expressions. If they are not accessed
1729 via AND, there is no conflict. We can bring knowledge of object
1730 alignment into play here. For example, on alpha, "char a, b;" can
1731 alias one another, though "char a; long b;" cannot. AND addesses may
1732 implicitly alias surrounding objects; i.e. unaligned access in DImode
1733 via AND address can alias all surrounding object types except those
1734 with aligment 8 or higher. */
1735 if (GET_CODE (x
) == AND
&& GET_CODE (y
) == AND
)
1737 if (GET_CODE (x
) == AND
1738 && (!CONST_INT_P (XEXP (x
, 1))
1739 || (int) GET_MODE_UNIT_SIZE (y_mode
) < -INTVAL (XEXP (x
, 1))))
1741 if (GET_CODE (y
) == AND
1742 && (!CONST_INT_P (XEXP (y
, 1))
1743 || (int) GET_MODE_UNIT_SIZE (x_mode
) < -INTVAL (XEXP (y
, 1))))
1746 /* Differing symbols not accessed via AND never alias. */
1747 if (GET_CODE (x_base
) != ADDRESS
&& GET_CODE (y_base
) != ADDRESS
)
1750 /* If one address is a stack reference there can be no alias:
1751 stack references using different base registers do not alias,
1752 a stack reference can not alias a parameter, and a stack reference
1753 can not alias a global. */
1754 if ((GET_CODE (x_base
) == ADDRESS
&& GET_MODE (x_base
) == Pmode
)
1755 || (GET_CODE (y_base
) == ADDRESS
&& GET_MODE (y_base
) == Pmode
))
1761 /* Convert the address X into something we can use. This is done by returning
1762 it unchanged unless it is a value; in the latter case we call cselib to get
1763 a more useful rtx. */
1769 struct elt_loc_list
*l
;
1771 if (GET_CODE (x
) != VALUE
)
1773 v
= CSELIB_VAL_PTR (x
);
1776 for (l
= v
->locs
; l
; l
= l
->next
)
1777 if (CONSTANT_P (l
->loc
))
1779 for (l
= v
->locs
; l
; l
= l
->next
)
1780 if (!REG_P (l
->loc
) && !MEM_P (l
->loc
))
1783 return v
->locs
->loc
;
1788 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
1789 where SIZE is the size in bytes of the memory reference. If ADDR
1790 is not modified by the memory reference then ADDR is returned. */
1793 addr_side_effect_eval (rtx addr
, int size
, int n_refs
)
1797 switch (GET_CODE (addr
))
1800 offset
= (n_refs
+ 1) * size
;
1803 offset
= -(n_refs
+ 1) * size
;
1806 offset
= n_refs
* size
;
1809 offset
= -n_refs
* size
;
1817 addr
= gen_rtx_PLUS (GET_MODE (addr
), XEXP (addr
, 0),
1820 addr
= XEXP (addr
, 0);
1821 addr
= canon_rtx (addr
);
1826 /* Return one if X and Y (memory addresses) reference the
1827 same location in memory or if the references overlap.
1828 Return zero if they do not overlap, else return
1829 minus one in which case they still might reference the same location.
1831 C is an offset accumulator. When
1832 C is nonzero, we are testing aliases between X and Y + C.
1833 XSIZE is the size in bytes of the X reference,
1834 similarly YSIZE is the size in bytes for Y.
1835 Expect that canon_rtx has been already called for X and Y.
1837 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1838 referenced (the reference was BLKmode), so make the most pessimistic
1841 If XSIZE or YSIZE is negative, we may access memory outside the object
1842 being referenced as a side effect. This can happen when using AND to
1843 align memory references, as is done on the Alpha.
1845 Nice to notice that varying addresses cannot conflict with fp if no
1846 local variables had their addresses taken, but that's too hard now.
1848 ??? Contrary to the tree alias oracle this does not return
1849 one for X + non-constant and Y + non-constant when X and Y are equal.
1850 If that is fixed the TBAA hack for union type-punning can be removed. */
1853 memrefs_conflict_p (int xsize
, rtx x
, int ysize
, rtx y
, HOST_WIDE_INT c
)
1855 if (GET_CODE (x
) == VALUE
)
1859 struct elt_loc_list
*l
= NULL
;
1860 if (CSELIB_VAL_PTR (x
))
1861 for (l
= CSELIB_VAL_PTR (x
)->locs
; l
; l
= l
->next
)
1862 if (REG_P (l
->loc
) && rtx_equal_for_memref_p (l
->loc
, y
))
1869 /* Don't call get_addr if y is the same VALUE. */
1873 if (GET_CODE (y
) == VALUE
)
1877 struct elt_loc_list
*l
= NULL
;
1878 if (CSELIB_VAL_PTR (y
))
1879 for (l
= CSELIB_VAL_PTR (y
)->locs
; l
; l
= l
->next
)
1880 if (REG_P (l
->loc
) && rtx_equal_for_memref_p (l
->loc
, x
))
1887 /* Don't call get_addr if x is the same VALUE. */
1891 if (GET_CODE (x
) == HIGH
)
1893 else if (GET_CODE (x
) == LO_SUM
)
1896 x
= addr_side_effect_eval (x
, xsize
, 0);
1897 if (GET_CODE (y
) == HIGH
)
1899 else if (GET_CODE (y
) == LO_SUM
)
1902 y
= addr_side_effect_eval (y
, ysize
, 0);
1904 if (rtx_equal_for_memref_p (x
, y
))
1906 if (xsize
<= 0 || ysize
<= 0)
1908 if (c
>= 0 && xsize
> c
)
1910 if (c
< 0 && ysize
+c
> 0)
1915 /* This code used to check for conflicts involving stack references and
1916 globals but the base address alias code now handles these cases. */
1918 if (GET_CODE (x
) == PLUS
)
1920 /* The fact that X is canonicalized means that this
1921 PLUS rtx is canonicalized. */
1922 rtx x0
= XEXP (x
, 0);
1923 rtx x1
= XEXP (x
, 1);
1925 if (GET_CODE (y
) == PLUS
)
1927 /* The fact that Y is canonicalized means that this
1928 PLUS rtx is canonicalized. */
1929 rtx y0
= XEXP (y
, 0);
1930 rtx y1
= XEXP (y
, 1);
1932 if (rtx_equal_for_memref_p (x1
, y1
))
1933 return memrefs_conflict_p (xsize
, x0
, ysize
, y0
, c
);
1934 if (rtx_equal_for_memref_p (x0
, y0
))
1935 return memrefs_conflict_p (xsize
, x1
, ysize
, y1
, c
);
1936 if (CONST_INT_P (x1
))
1938 if (CONST_INT_P (y1
))
1939 return memrefs_conflict_p (xsize
, x0
, ysize
, y0
,
1940 c
- INTVAL (x1
) + INTVAL (y1
));
1942 return memrefs_conflict_p (xsize
, x0
, ysize
, y
,
1945 else if (CONST_INT_P (y1
))
1946 return memrefs_conflict_p (xsize
, x
, ysize
, y0
, c
+ INTVAL (y1
));
1950 else if (CONST_INT_P (x1
))
1951 return memrefs_conflict_p (xsize
, x0
, ysize
, y
, c
- INTVAL (x1
));
1953 else if (GET_CODE (y
) == PLUS
)
1955 /* The fact that Y is canonicalized means that this
1956 PLUS rtx is canonicalized. */
1957 rtx y0
= XEXP (y
, 0);
1958 rtx y1
= XEXP (y
, 1);
1960 if (CONST_INT_P (y1
))
1961 return memrefs_conflict_p (xsize
, x
, ysize
, y0
, c
+ INTVAL (y1
));
1966 if (GET_CODE (x
) == GET_CODE (y
))
1967 switch (GET_CODE (x
))
1971 /* Handle cases where we expect the second operands to be the
1972 same, and check only whether the first operand would conflict
1975 rtx x1
= canon_rtx (XEXP (x
, 1));
1976 rtx y1
= canon_rtx (XEXP (y
, 1));
1977 if (! rtx_equal_for_memref_p (x1
, y1
))
1979 x0
= canon_rtx (XEXP (x
, 0));
1980 y0
= canon_rtx (XEXP (y
, 0));
1981 if (rtx_equal_for_memref_p (x0
, y0
))
1982 return (xsize
== 0 || ysize
== 0
1983 || (c
>= 0 && xsize
> c
) || (c
< 0 && ysize
+c
> 0));
1985 /* Can't properly adjust our sizes. */
1986 if (!CONST_INT_P (x1
))
1988 xsize
/= INTVAL (x1
);
1989 ysize
/= INTVAL (x1
);
1991 return memrefs_conflict_p (xsize
, x0
, ysize
, y0
, c
);
1998 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1999 as an access with indeterminate size. Assume that references
2000 besides AND are aligned, so if the size of the other reference is
2001 at least as large as the alignment, assume no other overlap. */
2002 if (GET_CODE (x
) == AND
&& CONST_INT_P (XEXP (x
, 1)))
2004 if (GET_CODE (y
) == AND
|| ysize
< -INTVAL (XEXP (x
, 1)))
2006 return memrefs_conflict_p (xsize
, canon_rtx (XEXP (x
, 0)), ysize
, y
, c
);
2008 if (GET_CODE (y
) == AND
&& CONST_INT_P (XEXP (y
, 1)))
2010 /* ??? If we are indexing far enough into the array/structure, we
2011 may yet be able to determine that we can not overlap. But we
2012 also need to that we are far enough from the end not to overlap
2013 a following reference, so we do nothing with that for now. */
2014 if (GET_CODE (x
) == AND
|| xsize
< -INTVAL (XEXP (y
, 1)))
2016 return memrefs_conflict_p (xsize
, x
, ysize
, canon_rtx (XEXP (y
, 0)), c
);
2021 if (CONST_INT_P (x
) && CONST_INT_P (y
))
2023 c
+= (INTVAL (y
) - INTVAL (x
));
2024 return (xsize
<= 0 || ysize
<= 0
2025 || (c
>= 0 && xsize
> c
) || (c
< 0 && ysize
+c
> 0));
2028 if (GET_CODE (x
) == CONST
)
2030 if (GET_CODE (y
) == CONST
)
2031 return memrefs_conflict_p (xsize
, canon_rtx (XEXP (x
, 0)),
2032 ysize
, canon_rtx (XEXP (y
, 0)), c
);
2034 return memrefs_conflict_p (xsize
, canon_rtx (XEXP (x
, 0)),
2037 if (GET_CODE (y
) == CONST
)
2038 return memrefs_conflict_p (xsize
, x
, ysize
,
2039 canon_rtx (XEXP (y
, 0)), c
);
2042 return (xsize
<= 0 || ysize
<= 0
2043 || (rtx_equal_for_memref_p (x
, y
)
2044 && ((c
>= 0 && xsize
> c
) || (c
< 0 && ysize
+c
> 0))));
2052 /* Functions to compute memory dependencies.
2054 Since we process the insns in execution order, we can build tables
2055 to keep track of what registers are fixed (and not aliased), what registers
2056 are varying in known ways, and what registers are varying in unknown
2059 If both memory references are volatile, then there must always be a
2060 dependence between the two references, since their order can not be
2061 changed. A volatile and non-volatile reference can be interchanged
2064 A MEM_IN_STRUCT reference at a non-AND varying address can never
2065 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We
2066 also must allow AND addresses, because they may generate accesses
2067 outside the object being referenced. This is used to generate
2068 aligned addresses from unaligned addresses, for instance, the alpha
2069 storeqi_unaligned pattern. */
2071 /* Read dependence: X is read after read in MEM takes place. There can
2072 only be a dependence here if both reads are volatile. */
2075 read_dependence (const_rtx mem
, const_rtx x
)
2077 return MEM_VOLATILE_P (x
) && MEM_VOLATILE_P (mem
);
2080 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
2081 MEM2 is a reference to a structure at a varying address, or returns
2082 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
2083 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
2084 to decide whether or not an address may vary; it should return
2085 nonzero whenever variation is possible.
2086 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
2089 fixed_scalar_and_varying_struct_p (const_rtx mem1
, const_rtx mem2
, rtx mem1_addr
,
2091 bool (*varies_p
) (const_rtx
, bool))
2093 if (! flag_strict_aliasing
)
2096 if (MEM_ALIAS_SET (mem2
)
2097 && MEM_SCALAR_P (mem1
) && MEM_IN_STRUCT_P (mem2
)
2098 && !varies_p (mem1_addr
, 1) && varies_p (mem2_addr
, 1))
2099 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
2103 if (MEM_ALIAS_SET (mem1
)
2104 && MEM_IN_STRUCT_P (mem1
) && MEM_SCALAR_P (mem2
)
2105 && varies_p (mem1_addr
, 1) && !varies_p (mem2_addr
, 1))
2106 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
2113 /* Returns nonzero if something about the mode or address format MEM1
2114 indicates that it might well alias *anything*. */
2117 aliases_everything_p (const_rtx mem
)
2119 if (GET_CODE (XEXP (mem
, 0)) == AND
)
2120 /* If the address is an AND, it's very hard to know at what it is
2121 actually pointing. */
2127 /* Return true if we can determine that the fields referenced cannot
2128 overlap for any pair of objects. */
2131 nonoverlapping_component_refs_p (const_tree x
, const_tree y
)
2133 const_tree fieldx
, fieldy
, typex
, typey
, orig_y
;
2135 if (!flag_strict_aliasing
)
2140 /* The comparison has to be done at a common type, since we don't
2141 know how the inheritance hierarchy works. */
2145 fieldx
= TREE_OPERAND (x
, 1);
2146 typex
= TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldx
));
2151 fieldy
= TREE_OPERAND (y
, 1);
2152 typey
= TYPE_MAIN_VARIANT (DECL_FIELD_CONTEXT (fieldy
));
2157 y
= TREE_OPERAND (y
, 0);
2159 while (y
&& TREE_CODE (y
) == COMPONENT_REF
);
2161 x
= TREE_OPERAND (x
, 0);
2163 while (x
&& TREE_CODE (x
) == COMPONENT_REF
);
2164 /* Never found a common type. */
2168 /* If we're left with accessing different fields of a structure,
2170 if (TREE_CODE (typex
) == RECORD_TYPE
2171 && fieldx
!= fieldy
)
2174 /* The comparison on the current field failed. If we're accessing
2175 a very nested structure, look at the next outer level. */
2176 x
= TREE_OPERAND (x
, 0);
2177 y
= TREE_OPERAND (y
, 0);
2180 && TREE_CODE (x
) == COMPONENT_REF
2181 && TREE_CODE (y
) == COMPONENT_REF
);
2186 /* Look at the bottom of the COMPONENT_REF list for a DECL, and return it. */
2189 decl_for_component_ref (tree x
)
2193 x
= TREE_OPERAND (x
, 0);
2195 while (x
&& TREE_CODE (x
) == COMPONENT_REF
);
2197 return x
&& DECL_P (x
) ? x
: NULL_TREE
;
2200 /* Walk up the COMPONENT_REF list and adjust OFFSET to compensate for the
2201 offset of the field reference. */
2204 adjust_offset_for_component_ref (tree x
, rtx offset
)
2206 HOST_WIDE_INT ioffset
;
2211 ioffset
= INTVAL (offset
);
2214 tree offset
= component_ref_field_offset (x
);
2215 tree field
= TREE_OPERAND (x
, 1);
2217 if (! host_integerp (offset
, 1))
2219 ioffset
+= (tree_low_cst (offset
, 1)
2220 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
2223 x
= TREE_OPERAND (x
, 0);
2225 while (x
&& TREE_CODE (x
) == COMPONENT_REF
);
2227 return GEN_INT (ioffset
);
2230 /* Return nonzero if we can determine the exprs corresponding to memrefs
2231 X and Y and they do not overlap.
2232 If LOOP_VARIANT is set, skip offset-based disambiguation */
2235 nonoverlapping_memrefs_p (const_rtx x
, const_rtx y
, bool loop_invariant
)
2237 tree exprx
= MEM_EXPR (x
), expry
= MEM_EXPR (y
);
2240 rtx moffsetx
, moffsety
;
2241 HOST_WIDE_INT offsetx
= 0, offsety
= 0, sizex
, sizey
, tem
;
2243 /* Unless both have exprs, we can't tell anything. */
2244 if (exprx
== 0 || expry
== 0)
2247 /* For spill-slot accesses make sure we have valid offsets. */
2248 if ((exprx
== get_spill_slot_decl (false)
2249 && ! MEM_OFFSET (x
))
2250 || (expry
== get_spill_slot_decl (false)
2251 && ! MEM_OFFSET (y
)))
2254 /* If both are field references, we may be able to determine something. */
2255 if (TREE_CODE (exprx
) == COMPONENT_REF
2256 && TREE_CODE (expry
) == COMPONENT_REF
2257 && nonoverlapping_component_refs_p (exprx
, expry
))
2261 /* If the field reference test failed, look at the DECLs involved. */
2262 moffsetx
= MEM_OFFSET (x
);
2263 if (TREE_CODE (exprx
) == COMPONENT_REF
)
2265 tree t
= decl_for_component_ref (exprx
);
2268 moffsetx
= adjust_offset_for_component_ref (exprx
, moffsetx
);
2272 moffsety
= MEM_OFFSET (y
);
2273 if (TREE_CODE (expry
) == COMPONENT_REF
)
2275 tree t
= decl_for_component_ref (expry
);
2278 moffsety
= adjust_offset_for_component_ref (expry
, moffsety
);
2282 if (! DECL_P (exprx
) || ! DECL_P (expry
))
2285 /* With invalid code we can end up storing into the constant pool.
2286 Bail out to avoid ICEing when creating RTL for this.
2287 See gfortran.dg/lto/20091028-2_0.f90. */
2288 if (TREE_CODE (exprx
) == CONST_DECL
2289 || TREE_CODE (expry
) == CONST_DECL
)
2292 rtlx
= DECL_RTL (exprx
);
2293 rtly
= DECL_RTL (expry
);
2295 /* If either RTL is not a MEM, it must be a REG or CONCAT, meaning they
2296 can't overlap unless they are the same because we never reuse that part
2297 of the stack frame used for locals for spilled pseudos. */
2298 if ((!MEM_P (rtlx
) || !MEM_P (rtly
))
2299 && ! rtx_equal_p (rtlx
, rtly
))
2302 /* If we have MEMs refering to different address spaces (which can
2303 potentially overlap), we cannot easily tell from the addresses
2304 whether the references overlap. */
2305 if (MEM_P (rtlx
) && MEM_P (rtly
)
2306 && MEM_ADDR_SPACE (rtlx
) != MEM_ADDR_SPACE (rtly
))
2309 /* Get the base and offsets of both decls. If either is a register, we
2310 know both are and are the same, so use that as the base. The only
2311 we can avoid overlap is if we can deduce that they are nonoverlapping
2312 pieces of that decl, which is very rare. */
2313 basex
= MEM_P (rtlx
) ? XEXP (rtlx
, 0) : rtlx
;
2314 if (GET_CODE (basex
) == PLUS
&& CONST_INT_P (XEXP (basex
, 1)))
2315 offsetx
= INTVAL (XEXP (basex
, 1)), basex
= XEXP (basex
, 0);
2317 basey
= MEM_P (rtly
) ? XEXP (rtly
, 0) : rtly
;
2318 if (GET_CODE (basey
) == PLUS
&& CONST_INT_P (XEXP (basey
, 1)))
2319 offsety
= INTVAL (XEXP (basey
, 1)), basey
= XEXP (basey
, 0);
2321 /* If the bases are different, we know they do not overlap if both
2322 are constants or if one is a constant and the other a pointer into the
2323 stack frame. Otherwise a different base means we can't tell if they
2325 if (! rtx_equal_p (basex
, basey
))
2326 return ((CONSTANT_P (basex
) && CONSTANT_P (basey
))
2327 || (CONSTANT_P (basex
) && REG_P (basey
)
2328 && REGNO_PTR_FRAME_P (REGNO (basey
)))
2329 || (CONSTANT_P (basey
) && REG_P (basex
)
2330 && REGNO_PTR_FRAME_P (REGNO (basex
))));
2332 /* Offset based disambiguation not appropriate for loop invariant */
2336 sizex
= (!MEM_P (rtlx
) ? (int) GET_MODE_SIZE (GET_MODE (rtlx
))
2337 : MEM_SIZE (rtlx
) ? INTVAL (MEM_SIZE (rtlx
))
2339 sizey
= (!MEM_P (rtly
) ? (int) GET_MODE_SIZE (GET_MODE (rtly
))
2340 : MEM_SIZE (rtly
) ? INTVAL (MEM_SIZE (rtly
)) :
2343 /* If we have an offset for either memref, it can update the values computed
2346 offsetx
+= INTVAL (moffsetx
), sizex
-= INTVAL (moffsetx
);
2348 offsety
+= INTVAL (moffsety
), sizey
-= INTVAL (moffsety
);
2350 /* If a memref has both a size and an offset, we can use the smaller size.
2351 We can't do this if the offset isn't known because we must view this
2352 memref as being anywhere inside the DECL's MEM. */
2353 if (MEM_SIZE (x
) && moffsetx
)
2354 sizex
= INTVAL (MEM_SIZE (x
));
2355 if (MEM_SIZE (y
) && moffsety
)
2356 sizey
= INTVAL (MEM_SIZE (y
));
2358 /* Put the values of the memref with the lower offset in X's values. */
2359 if (offsetx
> offsety
)
2361 tem
= offsetx
, offsetx
= offsety
, offsety
= tem
;
2362 tem
= sizex
, sizex
= sizey
, sizey
= tem
;
2365 /* If we don't know the size of the lower-offset value, we can't tell
2366 if they conflict. Otherwise, we do the test. */
2367 return sizex
>= 0 && offsety
>= offsetx
+ sizex
;
2370 /* Helper for true_dependence and canon_true_dependence.
2371 Checks for true dependence: X is read after store in MEM takes place.
2373 VARIES is the function that should be used as rtx_varies function.
2375 If MEM_CANONICALIZED is FALSE, then X_ADDR and MEM_ADDR should be
2376 NULL_RTX, and the canonical addresses of MEM and X are both computed
2377 here. If MEM_CANONICALIZED, then MEM must be already canonicalized.
2379 If X_ADDR is non-NULL, it is used in preference of XEXP (x, 0).
2381 Returns 1 if there is a true dependence, 0 otherwise. */
2384 true_dependence_1 (const_rtx mem
, enum machine_mode mem_mode
, rtx mem_addr
,
2385 const_rtx x
, rtx x_addr
, bool (*varies
) (const_rtx
, bool),
2386 bool mem_canonicalized
)
2391 gcc_checking_assert (mem_canonicalized
? (mem_addr
!= NULL_RTX
)
2392 : (mem_addr
== NULL_RTX
&& x_addr
== NULL_RTX
));
2394 if (MEM_VOLATILE_P (x
) && MEM_VOLATILE_P (mem
))
2397 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2398 This is used in epilogue deallocation functions, and in cselib. */
2399 if (GET_MODE (x
) == BLKmode
&& GET_CODE (XEXP (x
, 0)) == SCRATCH
)
2401 if (GET_MODE (mem
) == BLKmode
&& GET_CODE (XEXP (mem
, 0)) == SCRATCH
)
2403 if (MEM_ALIAS_SET (x
) == ALIAS_SET_MEMORY_BARRIER
2404 || MEM_ALIAS_SET (mem
) == ALIAS_SET_MEMORY_BARRIER
)
2407 /* Read-only memory is by definition never modified, and therefore can't
2408 conflict with anything. We don't expect to find read-only set on MEM,
2409 but stupid user tricks can produce them, so don't die. */
2410 if (MEM_READONLY_P (x
))
2413 /* If we have MEMs refering to different address spaces (which can
2414 potentially overlap), we cannot easily tell from the addresses
2415 whether the references overlap. */
2416 if (MEM_ADDR_SPACE (mem
) != MEM_ADDR_SPACE (x
))
2421 mem_addr
= XEXP (mem
, 0);
2422 if (mem_mode
== VOIDmode
)
2423 mem_mode
= GET_MODE (mem
);
2428 x_addr
= XEXP (x
, 0);
2429 if (!((GET_CODE (x_addr
) == VALUE
2430 && GET_CODE (mem_addr
) != VALUE
2431 && reg_mentioned_p (x_addr
, mem_addr
))
2432 || (GET_CODE (x_addr
) != VALUE
2433 && GET_CODE (mem_addr
) == VALUE
2434 && reg_mentioned_p (mem_addr
, x_addr
))))
2436 x_addr
= get_addr (x_addr
);
2437 if (! mem_canonicalized
)
2438 mem_addr
= get_addr (mem_addr
);
2442 base
= find_base_term (x_addr
);
2443 if (base
&& (GET_CODE (base
) == LABEL_REF
2444 || (GET_CODE (base
) == SYMBOL_REF
2445 && CONSTANT_POOL_ADDRESS_P (base
))))
2448 if (! base_alias_check (x_addr
, mem_addr
, GET_MODE (x
), mem_mode
))
2451 x_addr
= canon_rtx (x_addr
);
2452 if (!mem_canonicalized
)
2453 mem_addr
= canon_rtx (mem_addr
);
2455 if ((ret
= memrefs_conflict_p (GET_MODE_SIZE (mem_mode
), mem_addr
,
2456 SIZE_FOR_MODE (x
), x_addr
, 0)) != -1)
2459 if (DIFFERENT_ALIAS_SETS_P (x
, mem
))
2462 if (nonoverlapping_memrefs_p (mem
, x
, false))
2465 if (aliases_everything_p (x
))
2468 /* We cannot use aliases_everything_p to test MEM, since we must look
2469 at MEM_ADDR, rather than XEXP (mem, 0). */
2470 if (GET_CODE (mem_addr
) == AND
)
2473 /* ??? In true_dependence we also allow BLKmode to alias anything. Why
2474 don't we do this in anti_dependence and output_dependence? */
2475 if (mem_mode
== BLKmode
|| GET_MODE (x
) == BLKmode
)
2478 if (fixed_scalar_and_varying_struct_p (mem
, x
, mem_addr
, x_addr
, varies
))
2481 return rtx_refs_may_alias_p (x
, mem
, true);
2484 /* True dependence: X is read after store in MEM takes place. */
2487 true_dependence (const_rtx mem
, enum machine_mode mem_mode
, const_rtx x
,
2488 bool (*varies
) (const_rtx
, bool))
2490 return true_dependence_1 (mem
, mem_mode
, NULL_RTX
,
2491 x
, NULL_RTX
, varies
,
2492 /*mem_canonicalized=*/false);
2495 /* Canonical true dependence: X is read after store in MEM takes place.
2496 Variant of true_dependence which assumes MEM has already been
2497 canonicalized (hence we no longer do that here).
2498 The mem_addr argument has been added, since true_dependence_1 computed
2499 this value prior to canonicalizing. */
2502 canon_true_dependence (const_rtx mem
, enum machine_mode mem_mode
, rtx mem_addr
,
2503 const_rtx x
, rtx x_addr
, bool (*varies
) (const_rtx
, bool))
2505 return true_dependence_1 (mem
, mem_mode
, mem_addr
,
2507 /*mem_canonicalized=*/true);
2510 /* Returns nonzero if a write to X might alias a previous read from
2511 (or, if WRITEP is nonzero, a write to) MEM. */
2514 write_dependence_p (const_rtx mem
, const_rtx x
, int writep
)
2516 rtx x_addr
, mem_addr
;
2517 const_rtx fixed_scalar
;
2521 if (MEM_VOLATILE_P (x
) && MEM_VOLATILE_P (mem
))
2524 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything.
2525 This is used in epilogue deallocation functions. */
2526 if (GET_MODE (x
) == BLKmode
&& GET_CODE (XEXP (x
, 0)) == SCRATCH
)
2528 if (GET_MODE (mem
) == BLKmode
&& GET_CODE (XEXP (mem
, 0)) == SCRATCH
)
2530 if (MEM_ALIAS_SET (x
) == ALIAS_SET_MEMORY_BARRIER
2531 || MEM_ALIAS_SET (mem
) == ALIAS_SET_MEMORY_BARRIER
)
2534 /* A read from read-only memory can't conflict with read-write memory. */
2535 if (!writep
&& MEM_READONLY_P (mem
))
2538 /* If we have MEMs refering to different address spaces (which can
2539 potentially overlap), we cannot easily tell from the addresses
2540 whether the references overlap. */
2541 if (MEM_ADDR_SPACE (mem
) != MEM_ADDR_SPACE (x
))
2544 x_addr
= XEXP (x
, 0);
2545 mem_addr
= XEXP (mem
, 0);
2546 if (!((GET_CODE (x_addr
) == VALUE
2547 && GET_CODE (mem_addr
) != VALUE
2548 && reg_mentioned_p (x_addr
, mem_addr
))
2549 || (GET_CODE (x_addr
) != VALUE
2550 && GET_CODE (mem_addr
) == VALUE
2551 && reg_mentioned_p (mem_addr
, x_addr
))))
2553 x_addr
= get_addr (x_addr
);
2554 mem_addr
= get_addr (mem_addr
);
2559 base
= find_base_term (mem_addr
);
2560 if (base
&& (GET_CODE (base
) == LABEL_REF
2561 || (GET_CODE (base
) == SYMBOL_REF
2562 && CONSTANT_POOL_ADDRESS_P (base
))))
2566 if (! base_alias_check (x_addr
, mem_addr
, GET_MODE (x
),
2570 x_addr
= canon_rtx (x_addr
);
2571 mem_addr
= canon_rtx (mem_addr
);
2573 if ((ret
= memrefs_conflict_p (SIZE_FOR_MODE (mem
), mem_addr
,
2574 SIZE_FOR_MODE (x
), x_addr
, 0)) != -1)
2577 if (nonoverlapping_memrefs_p (x
, mem
, false))
2581 = fixed_scalar_and_varying_struct_p (mem
, x
, mem_addr
, x_addr
,
2584 if ((fixed_scalar
== mem
&& !aliases_everything_p (x
))
2585 || (fixed_scalar
== x
&& !aliases_everything_p (mem
)))
2588 return rtx_refs_may_alias_p (x
, mem
, false);
2591 /* Anti dependence: X is written after read in MEM takes place. */
2594 anti_dependence (const_rtx mem
, const_rtx x
)
2596 return write_dependence_p (mem
, x
, /*writep=*/0);
2599 /* Output dependence: X is written after store in MEM takes place. */
2602 output_dependence (const_rtx mem
, const_rtx x
)
2604 return write_dependence_p (mem
, x
, /*writep=*/1);
2609 /* Check whether X may be aliased with MEM. Don't do offset-based
2610 memory disambiguation & TBAA. */
2612 may_alias_p (const_rtx mem
, const_rtx x
)
2614 rtx x_addr
, mem_addr
;
2616 if (MEM_VOLATILE_P (x
) && MEM_VOLATILE_P (mem
))
2619 /* ??? In true_dependence we also allow BLKmode to alias anything. */
2620 if (GET_MODE (mem
) == BLKmode
|| GET_MODE (x
) == BLKmode
)
2623 if (MEM_ALIAS_SET (x
) == ALIAS_SET_MEMORY_BARRIER
2624 || MEM_ALIAS_SET (mem
) == ALIAS_SET_MEMORY_BARRIER
)
2627 /* Read-only memory is by definition never modified, and therefore can't
2628 conflict with anything. We don't expect to find read-only set on MEM,
2629 but stupid user tricks can produce them, so don't die. */
2630 if (MEM_READONLY_P (x
))
2633 /* If we have MEMs refering to different address spaces (which can
2634 potentially overlap), we cannot easily tell from the addresses
2635 whether the references overlap. */
2636 if (MEM_ADDR_SPACE (mem
) != MEM_ADDR_SPACE (x
))
2639 x_addr
= XEXP (x
, 0);
2640 mem_addr
= XEXP (mem
, 0);
2641 if (!((GET_CODE (x_addr
) == VALUE
2642 && GET_CODE (mem_addr
) != VALUE
2643 && reg_mentioned_p (x_addr
, mem_addr
))
2644 || (GET_CODE (x_addr
) != VALUE
2645 && GET_CODE (mem_addr
) == VALUE
2646 && reg_mentioned_p (mem_addr
, x_addr
))))
2648 x_addr
= get_addr (x_addr
);
2649 mem_addr
= get_addr (mem_addr
);
2652 if (! base_alias_check (x_addr
, mem_addr
, GET_MODE (x
), GET_MODE (mem_addr
)))
2655 x_addr
= canon_rtx (x_addr
);
2656 mem_addr
= canon_rtx (mem_addr
);
2658 if (nonoverlapping_memrefs_p (mem
, x
, true))
2661 if (aliases_everything_p (x
))
2664 /* We cannot use aliases_everything_p to test MEM, since we must look
2665 at MEM_ADDR, rather than XEXP (mem, 0). */
2666 if (GET_CODE (mem_addr
) == AND
)
2669 if (fixed_scalar_and_varying_struct_p (mem
, x
, mem_addr
, x_addr
,
2673 /* TBAA not valid for loop_invarint */
2674 return rtx_refs_may_alias_p (x
, mem
, false);
2678 init_alias_target (void)
2682 memset (static_reg_base_value
, 0, sizeof static_reg_base_value
);
2684 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2685 /* Check whether this register can hold an incoming pointer
2686 argument. FUNCTION_ARG_REGNO_P tests outgoing register
2687 numbers, so translate if necessary due to register windows. */
2688 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i
))
2689 && HARD_REGNO_MODE_OK (i
, Pmode
))
2690 static_reg_base_value
[i
]
2691 = gen_rtx_ADDRESS (VOIDmode
, gen_rtx_REG (Pmode
, i
));
2693 static_reg_base_value
[STACK_POINTER_REGNUM
]
2694 = gen_rtx_ADDRESS (Pmode
, stack_pointer_rtx
);
2695 static_reg_base_value
[ARG_POINTER_REGNUM
]
2696 = gen_rtx_ADDRESS (Pmode
, arg_pointer_rtx
);
2697 static_reg_base_value
[FRAME_POINTER_REGNUM
]
2698 = gen_rtx_ADDRESS (Pmode
, frame_pointer_rtx
);
2699 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
2700 static_reg_base_value
[HARD_FRAME_POINTER_REGNUM
]
2701 = gen_rtx_ADDRESS (Pmode
, hard_frame_pointer_rtx
);
2705 /* Set MEMORY_MODIFIED when X modifies DATA (that is assumed
2706 to be memory reference. */
2707 static bool memory_modified
;
2709 memory_modified_1 (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
2713 if (anti_dependence (x
, (const_rtx
)data
) || output_dependence (x
, (const_rtx
)data
))
2714 memory_modified
= true;
2719 /* Return true when INSN possibly modify memory contents of MEM
2720 (i.e. address can be modified). */
2722 memory_modified_in_insn_p (const_rtx mem
, const_rtx insn
)
2726 memory_modified
= false;
2727 note_stores (PATTERN (insn
), memory_modified_1
, CONST_CAST_RTX(mem
));
2728 return memory_modified
;
2731 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
2735 init_alias_analysis (void)
2737 unsigned int maxreg
= max_reg_num ();
2743 timevar_push (TV_ALIAS_ANALYSIS
);
2745 reg_known_value_size
= maxreg
- FIRST_PSEUDO_REGISTER
;
2746 reg_known_value
= ggc_alloc_cleared_vec_rtx (reg_known_value_size
);
2747 reg_known_equiv_p
= XCNEWVEC (bool, reg_known_value_size
);
2749 /* If we have memory allocated from the previous run, use it. */
2750 if (old_reg_base_value
)
2751 reg_base_value
= old_reg_base_value
;
2754 VEC_truncate (rtx
, reg_base_value
, 0);
2756 VEC_safe_grow_cleared (rtx
, gc
, reg_base_value
, maxreg
);
2758 new_reg_base_value
= XNEWVEC (rtx
, maxreg
);
2759 reg_seen
= XNEWVEC (char, maxreg
);
2761 /* The basic idea is that each pass through this loop will use the
2762 "constant" information from the previous pass to propagate alias
2763 information through another level of assignments.
2765 This could get expensive if the assignment chains are long. Maybe
2766 we should throttle the number of iterations, possibly based on
2767 the optimization level or flag_expensive_optimizations.
2769 We could propagate more information in the first pass by making use
2770 of DF_REG_DEF_COUNT to determine immediately that the alias information
2771 for a pseudo is "constant".
2773 A program with an uninitialized variable can cause an infinite loop
2774 here. Instead of doing a full dataflow analysis to detect such problems
2775 we just cap the number of iterations for the loop.
2777 The state of the arrays for the set chain in question does not matter
2778 since the program has undefined behavior. */
2783 /* Assume nothing will change this iteration of the loop. */
2786 /* We want to assign the same IDs each iteration of this loop, so
2787 start counting from zero each iteration of the loop. */
2790 /* We're at the start of the function each iteration through the
2791 loop, so we're copying arguments. */
2792 copying_arguments
= true;
2794 /* Wipe the potential alias information clean for this pass. */
2795 memset (new_reg_base_value
, 0, maxreg
* sizeof (rtx
));
2797 /* Wipe the reg_seen array clean. */
2798 memset (reg_seen
, 0, maxreg
);
2800 /* Mark all hard registers which may contain an address.
2801 The stack, frame and argument pointers may contain an address.
2802 An argument register which can hold a Pmode value may contain
2803 an address even if it is not in BASE_REGS.
2805 The address expression is VOIDmode for an argument and
2806 Pmode for other registers. */
2808 memcpy (new_reg_base_value
, static_reg_base_value
,
2809 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
2811 /* Walk the insns adding values to the new_reg_base_value array. */
2812 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
2818 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
2819 /* The prologue/epilogue insns are not threaded onto the
2820 insn chain until after reload has completed. Thus,
2821 there is no sense wasting time checking if INSN is in
2822 the prologue/epilogue until after reload has completed. */
2823 if (reload_completed
2824 && prologue_epilogue_contains (insn
))
2828 /* If this insn has a noalias note, process it, Otherwise,
2829 scan for sets. A simple set will have no side effects
2830 which could change the base value of any other register. */
2832 if (GET_CODE (PATTERN (insn
)) == SET
2833 && REG_NOTES (insn
) != 0
2834 && find_reg_note (insn
, REG_NOALIAS
, NULL_RTX
))
2835 record_set (SET_DEST (PATTERN (insn
)), NULL_RTX
, NULL
);
2837 note_stores (PATTERN (insn
), record_set
, NULL
);
2839 set
= single_set (insn
);
2842 && REG_P (SET_DEST (set
))
2843 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
2845 unsigned int regno
= REGNO (SET_DEST (set
));
2846 rtx src
= SET_SRC (set
);
2849 note
= find_reg_equal_equiv_note (insn
);
2850 if (note
&& REG_NOTE_KIND (note
) == REG_EQUAL
2851 && DF_REG_DEF_COUNT (regno
) != 1)
2854 if (note
!= NULL_RTX
2855 && GET_CODE (XEXP (note
, 0)) != EXPR_LIST
2856 && ! rtx_varies_p (XEXP (note
, 0), 1)
2857 && ! reg_overlap_mentioned_p (SET_DEST (set
),
2860 set_reg_known_value (regno
, XEXP (note
, 0));
2861 set_reg_known_equiv_p (regno
,
2862 REG_NOTE_KIND (note
) == REG_EQUIV
);
2864 else if (DF_REG_DEF_COUNT (regno
) == 1
2865 && GET_CODE (src
) == PLUS
2866 && REG_P (XEXP (src
, 0))
2867 && (t
= get_reg_known_value (REGNO (XEXP (src
, 0))))
2868 && CONST_INT_P (XEXP (src
, 1)))
2870 t
= plus_constant (t
, INTVAL (XEXP (src
, 1)));
2871 set_reg_known_value (regno
, t
);
2872 set_reg_known_equiv_p (regno
, 0);
2874 else if (DF_REG_DEF_COUNT (regno
) == 1
2875 && ! rtx_varies_p (src
, 1))
2877 set_reg_known_value (regno
, src
);
2878 set_reg_known_equiv_p (regno
, 0);
2882 else if (NOTE_P (insn
)
2883 && NOTE_KIND (insn
) == NOTE_INSN_FUNCTION_BEG
)
2884 copying_arguments
= false;
2887 /* Now propagate values from new_reg_base_value to reg_base_value. */
2888 gcc_assert (maxreg
== (unsigned int) max_reg_num ());
2890 for (ui
= 0; ui
< maxreg
; ui
++)
2892 if (new_reg_base_value
[ui
]
2893 && new_reg_base_value
[ui
] != VEC_index (rtx
, reg_base_value
, ui
)
2894 && ! rtx_equal_p (new_reg_base_value
[ui
],
2895 VEC_index (rtx
, reg_base_value
, ui
)))
2897 VEC_replace (rtx
, reg_base_value
, ui
, new_reg_base_value
[ui
]);
2902 while (changed
&& ++pass
< MAX_ALIAS_LOOP_PASSES
);
2904 /* Fill in the remaining entries. */
2905 for (i
= 0; i
< (int)reg_known_value_size
; i
++)
2906 if (reg_known_value
[i
] == 0)
2907 reg_known_value
[i
] = regno_reg_rtx
[i
+ FIRST_PSEUDO_REGISTER
];
2910 free (new_reg_base_value
);
2911 new_reg_base_value
= 0;
2914 timevar_pop (TV_ALIAS_ANALYSIS
);
2917 /* Equate REG_BASE_VALUE (reg1) to REG_BASE_VALUE (reg2).
2918 Special API for var-tracking pass purposes. */
2921 vt_equate_reg_base_value (const_rtx reg1
, const_rtx reg2
)
2923 VEC_replace (rtx
, reg_base_value
, REGNO (reg1
), REG_BASE_VALUE (reg2
));
2927 end_alias_analysis (void)
2929 old_reg_base_value
= reg_base_value
;
2930 ggc_free (reg_known_value
);
2931 reg_known_value
= 0;
2932 reg_known_value_size
= 0;
2933 free (reg_known_equiv_p
);
2934 reg_known_equiv_p
= 0;
2937 #include "gt-alias.h"