* tree.c (make_lang_type_fn): New funtion pointer.
[official-gcc.git] / gcc / alias.c
blobfbd0e7d220781d1a47053e6baef5171741a07b8b
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "function.h"
28 #include "expr.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "output.h"
33 #include "toplev.h"
34 #include "splay-tree.h"
35 #include "insn-flags.h"
37 /* The alias sets assigned to MEMs assist the back-end in determining
38 which MEMs can alias which other MEMs. In general, two MEMs in
39 different alias sets to not alias each other. There is one
40 exception, however. Consider something like:
42 struct S {int i; double d; };
44 a store to an `S' can alias something of either type `int' or type
45 `double'. (However, a store to an `int' cannot alias a `double'
46 and vice versa.) We indicate this via a tree structure that looks
47 like:
48 struct S
49 / \
50 / \
51 |/_ _\|
52 int double
54 (The arrows are directed and point downwards.) If, when comparing
55 two alias sets, we can hold one set fixed, and trace the other set
56 downwards, and at some point find the first set, the two MEMs can
57 alias one another. In this situation we say the alias set for
58 `struct S' is the `superset' and that those for `int' and `double'
59 are `subsets'.
61 Alias set zero is implicitly a superset of all other alias sets.
62 However, this is no actual entry for alias set zero. It is an
63 error to attempt to explicitly construct a subset of zero. */
65 typedef struct alias_set_entry {
66 /* The alias set number, as stored in MEM_ALIAS_SET. */
67 int alias_set;
69 /* The children of the alias set. These are not just the immediate
70 children, but, in fact, all children. So, if we have:
72 struct T { struct S s; float f; }
74 continuing our example above, the children here will be all of
75 `int', `double', `float', and `struct S'. */
76 splay_tree children;
77 }* alias_set_entry;
79 static rtx canon_rtx PROTO((rtx));
80 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
81 static rtx find_symbolic_term PROTO((rtx));
82 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
83 HOST_WIDE_INT));
84 static void record_set PROTO((rtx, rtx));
85 static rtx find_base_term PROTO((rtx));
86 static int base_alias_check PROTO((rtx, rtx, enum machine_mode,
87 enum machine_mode));
88 static rtx find_base_value PROTO((rtx));
89 static int mems_in_disjoint_alias_sets_p PROTO((rtx, rtx));
90 static int insert_subset_children PROTO((splay_tree_node,
91 void*));
92 static alias_set_entry get_alias_set_entry PROTO((int));
93 static rtx fixed_scalar_and_varying_struct_p PROTO((rtx, rtx, int (*)(rtx)));
94 static int aliases_everything_p PROTO((rtx));
95 static int write_dependence_p PROTO((rtx, rtx, int));
96 static int nonlocal_reference_p PROTO((rtx));
98 /* Set up all info needed to perform alias analysis on memory references. */
100 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
102 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
103 different alias sets. We ignore alias sets in functions making use
104 of variable arguments because the va_arg macros on some systems are
105 not legal ANSI C. */
106 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
107 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
109 /* Cap the number of passes we make over the insns propagating alias
110 information through set chains.
112 10 is a completely arbitrary choice. */
113 #define MAX_ALIAS_LOOP_PASSES 10
115 /* reg_base_value[N] gives an address to which register N is related.
116 If all sets after the first add or subtract to the current value
117 or otherwise modify it so it does not point to a different top level
118 object, reg_base_value[N] is equal to the address part of the source
119 of the first set.
121 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
122 expressions represent certain special values: function arguments and
123 the stack, frame, and argument pointers. The contents of an address
124 expression are not used (but they are descriptive for debugging);
125 only the address and mode matter. Pointer equality, not rtx_equal_p,
126 determines whether two ADDRESS expressions refer to the same base
127 address. The mode determines whether it is a function argument or
128 other special value. */
130 rtx *reg_base_value;
131 rtx *new_reg_base_value;
132 unsigned int reg_base_value_size; /* size of reg_base_value array */
133 #define REG_BASE_VALUE(X) \
134 ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
136 /* Vector of known invariant relationships between registers. Set in
137 loop unrolling. Indexed by register number, if nonzero the value
138 is an expression describing this register in terms of another.
140 The length of this array is REG_BASE_VALUE_SIZE.
142 Because this array contains only pseudo registers it has no effect
143 after reload. */
144 static rtx *alias_invariant;
146 /* Vector indexed by N giving the initial (unchanging) value known
147 for pseudo-register N. */
148 rtx *reg_known_value;
150 /* Indicates number of valid entries in reg_known_value. */
151 static int reg_known_value_size;
153 /* Vector recording for each reg_known_value whether it is due to a
154 REG_EQUIV note. Future passes (viz., reload) may replace the
155 pseudo with the equivalent expression and so we account for the
156 dependences that would be introduced if that happens. */
157 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
158 assign_parms mention the arg pointer, and there are explicit insns in the
159 RTL that modify the arg pointer. Thus we must ensure that such insns don't
160 get scheduled across each other because that would invalidate the REG_EQUIV
161 notes. One could argue that the REG_EQUIV notes are wrong, but solving
162 the problem in the scheduler will likely give better code, so we do it
163 here. */
164 char *reg_known_equiv_p;
166 /* True when scanning insns from the start of the rtl to the
167 NOTE_INSN_FUNCTION_BEG note. */
169 static int copying_arguments;
171 /* The splay-tree used to store the various alias set entries. */
173 static splay_tree alias_sets;
175 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
176 such an entry, or NULL otherwise. */
178 static alias_set_entry
179 get_alias_set_entry (alias_set)
180 int alias_set;
182 splay_tree_node sn =
183 splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
185 return sn ? ((alias_set_entry) sn->value) : ((alias_set_entry) 0);
188 /* Returns nonzero value if the alias sets for MEM1 and MEM2 are such
189 that the two MEMs cannot alias each other. */
191 static int
192 mems_in_disjoint_alias_sets_p (mem1, mem2)
193 rtx mem1;
194 rtx mem2;
196 alias_set_entry ase;
198 #ifdef ENABLE_CHECKING
199 /* Perform a basic sanity check. Namely, that there are no alias sets
200 if we're not using strict aliasing. This helps to catch bugs
201 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
202 where a MEM is allocated in some way other than by the use of
203 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
204 use alias sets to indicate that spilled registers cannot alias each
205 other, we might need to remove this check. */
206 if (!flag_strict_aliasing &&
207 (MEM_ALIAS_SET (mem1) || MEM_ALIAS_SET (mem2)))
208 abort ();
209 #endif
211 /* The code used in varargs macros are often not conforming ANSI C,
212 which can trick the compiler into making incorrect aliasing
213 assumptions in these functions. So, we don't use alias sets in
214 such a function. FIXME: This should be moved into the front-end;
215 it is a language-dependent notion, and there's no reason not to
216 still use these checks to handle globals. */
217 if (current_function_stdarg || current_function_varargs)
218 return 0;
220 if (!MEM_ALIAS_SET (mem1) || !MEM_ALIAS_SET (mem2))
221 /* We have no alias set information for one of the MEMs, so we
222 have to assume it can alias anything. */
223 return 0;
225 if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
226 /* The two alias sets are the same, so they may alias. */
227 return 0;
229 /* Iterate through each of the children of the first alias set,
230 comparing it with the second alias set. */
231 ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
232 if (ase && splay_tree_lookup (ase->children,
233 (splay_tree_key) MEM_ALIAS_SET (mem2)))
234 return 0;
236 /* Now do the same, but with the alias sets reversed. */
237 ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
238 if (ase && splay_tree_lookup (ase->children,
239 (splay_tree_key) MEM_ALIAS_SET (mem1)))
240 return 0;
242 /* The two MEMs are in distinct alias sets, and neither one is the
243 child of the other. Therefore, they cannot alias. */
244 return 1;
247 /* Insert the NODE into the splay tree given by DATA. Used by
248 record_alias_subset via splay_tree_foreach. */
250 static int
251 insert_subset_children (node, data)
252 splay_tree_node node;
253 void *data;
255 splay_tree_insert ((splay_tree) data,
256 node->key,
257 node->value);
259 return 0;
262 /* Indicate that things in SUBSET can alias things in SUPERSET, but
263 not vice versa. For example, in C, a store to an `int' can alias a
264 structure containing an `int', but not vice versa. Here, the
265 structure would be the SUPERSET and `int' the SUBSET. This
266 function should be called only once per SUPERSET/SUBSET pair. At
267 present any given alias set may only be a subset of one superset.
269 It is illegal for SUPERSET to be zero; everything is implicitly a
270 subset of alias set zero. */
272 void
273 record_alias_subset (superset, subset)
274 int superset;
275 int subset;
277 alias_set_entry superset_entry;
278 alias_set_entry subset_entry;
280 if (superset == 0)
281 abort ();
283 superset_entry = get_alias_set_entry (superset);
284 if (!superset_entry)
286 /* Create an entry for the SUPERSET, so that we have a place to
287 attach the SUBSET. */
288 superset_entry =
289 (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
290 superset_entry->alias_set = superset;
291 superset_entry->children
292 = splay_tree_new (splay_tree_compare_ints, 0, 0);
293 splay_tree_insert (alias_sets,
294 (splay_tree_key) superset,
295 (splay_tree_value) superset_entry);
299 subset_entry = get_alias_set_entry (subset);
300 if (subset_entry)
301 /* There is an entry for the subset. Enter all of its children
302 (if they are not already present) as children of the SUPERSET. */
303 splay_tree_foreach (subset_entry->children,
304 insert_subset_children,
305 superset_entry->children);
307 /* Enter the SUBSET itself as a child of the SUPERSET. */
308 splay_tree_insert (superset_entry->children,
309 (splay_tree_key) subset,
310 /*value=*/0);
313 /* Inside SRC, the source of a SET, find a base address. */
315 static rtx
316 find_base_value (src)
317 register rtx src;
319 switch (GET_CODE (src))
321 case SYMBOL_REF:
322 case LABEL_REF:
323 return src;
325 case REG:
326 /* At the start of a function argument registers have known base
327 values which may be lost later. Returning an ADDRESS
328 expression here allows optimization based on argument values
329 even when the argument registers are used for other purposes. */
330 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
331 return new_reg_base_value[REGNO (src)];
333 /* If a pseudo has a known base value, return it. Do not do this
334 for hard regs since it can result in a circular dependency
335 chain for registers which have values at function entry.
337 The test above is not sufficient because the scheduler may move
338 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
339 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
340 && (unsigned) REGNO (src) < reg_base_value_size
341 && reg_base_value[REGNO (src)])
342 return reg_base_value[REGNO (src)];
344 return src;
346 case MEM:
347 /* Check for an argument passed in memory. Only record in the
348 copying-arguments block; it is too hard to track changes
349 otherwise. */
350 if (copying_arguments
351 && (XEXP (src, 0) == arg_pointer_rtx
352 || (GET_CODE (XEXP (src, 0)) == PLUS
353 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
354 return gen_rtx_ADDRESS (VOIDmode, src);
355 return 0;
357 case CONST:
358 src = XEXP (src, 0);
359 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
360 break;
361 /* fall through */
363 case PLUS:
364 case MINUS:
366 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
368 /* If either operand is a REG, then see if we already have
369 a known value for it. */
370 if (GET_CODE (src_0) == REG)
372 temp = find_base_value (src_0);
373 if (temp)
374 src_0 = temp;
377 if (GET_CODE (src_1) == REG)
379 temp = find_base_value (src_1);
380 if (temp)
381 src_1 = temp;
384 /* Guess which operand is the base address.
386 If either operand is a symbol, then it is the base. If
387 either operand is a CONST_INT, then the other is the base. */
389 if (GET_CODE (src_1) == CONST_INT
390 || GET_CODE (src_0) == SYMBOL_REF
391 || GET_CODE (src_0) == LABEL_REF
392 || GET_CODE (src_0) == CONST)
393 return find_base_value (src_0);
395 if (GET_CODE (src_0) == CONST_INT
396 || GET_CODE (src_1) == SYMBOL_REF
397 || GET_CODE (src_1) == LABEL_REF
398 || GET_CODE (src_1) == CONST)
399 return find_base_value (src_1);
401 /* This might not be necessary anymore.
403 If either operand is a REG that is a known pointer, then it
404 is the base. */
405 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
406 return find_base_value (src_0);
408 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
409 return find_base_value (src_1);
411 return 0;
414 case LO_SUM:
415 /* The standard form is (lo_sum reg sym) so look only at the
416 second operand. */
417 return find_base_value (XEXP (src, 1));
419 case AND:
420 /* If the second operand is constant set the base
421 address to the first operand. */
422 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
423 return find_base_value (XEXP (src, 0));
424 return 0;
426 case ZERO_EXTEND:
427 case SIGN_EXTEND: /* used for NT/Alpha pointers */
428 case HIGH:
429 return find_base_value (XEXP (src, 0));
431 default:
432 break;
435 return 0;
438 /* Called from init_alias_analysis indirectly through note_stores. */
440 /* while scanning insns to find base values, reg_seen[N] is nonzero if
441 register N has been set in this function. */
442 static char *reg_seen;
444 /* Addresses which are known not to alias anything else are identified
445 by a unique integer. */
446 static int unique_id;
448 static void
449 record_set (dest, set)
450 rtx dest, set;
452 register int regno;
453 rtx src;
455 if (GET_CODE (dest) != REG)
456 return;
458 regno = REGNO (dest);
460 if (set)
462 /* A CLOBBER wipes out any old value but does not prevent a previously
463 unset register from acquiring a base address (i.e. reg_seen is not
464 set). */
465 if (GET_CODE (set) == CLOBBER)
467 new_reg_base_value[regno] = 0;
468 return;
470 src = SET_SRC (set);
472 else
474 if (reg_seen[regno])
476 new_reg_base_value[regno] = 0;
477 return;
479 reg_seen[regno] = 1;
480 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
481 GEN_INT (unique_id++));
482 return;
485 /* This is not the first set. If the new value is not related to the
486 old value, forget the base value. Note that the following code is
487 not detected:
488 extern int x, y; int *p = &x; p += (&y-&x);
489 ANSI C does not allow computing the difference of addresses
490 of distinct top level objects. */
491 if (new_reg_base_value[regno])
492 switch (GET_CODE (src))
494 case LO_SUM:
495 case PLUS:
496 case MINUS:
497 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
498 new_reg_base_value[regno] = 0;
499 break;
500 case AND:
501 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
502 new_reg_base_value[regno] = 0;
503 break;
504 default:
505 new_reg_base_value[regno] = 0;
506 break;
508 /* If this is the first set of a register, record the value. */
509 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
510 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
511 new_reg_base_value[regno] = find_base_value (src);
513 reg_seen[regno] = 1;
516 /* Called from loop optimization when a new pseudo-register is created. */
517 void
518 record_base_value (regno, val, invariant)
519 int regno;
520 rtx val;
521 int invariant;
523 if ((unsigned) regno >= reg_base_value_size)
524 return;
526 /* If INVARIANT is true then this value also describes an invariant
527 relationship which can be used to deduce that two registers with
528 unknown values are different. */
529 if (invariant && alias_invariant)
530 alias_invariant[regno] = val;
532 if (GET_CODE (val) == REG)
534 if ((unsigned) REGNO (val) < reg_base_value_size)
536 reg_base_value[regno] = reg_base_value[REGNO (val)];
538 return;
540 reg_base_value[regno] = find_base_value (val);
543 static rtx
544 canon_rtx (x)
545 rtx x;
547 /* Recursively look for equivalences. */
548 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
549 && REGNO (x) < reg_known_value_size)
550 return reg_known_value[REGNO (x)] == x
551 ? x : canon_rtx (reg_known_value[REGNO (x)]);
552 else if (GET_CODE (x) == PLUS)
554 rtx x0 = canon_rtx (XEXP (x, 0));
555 rtx x1 = canon_rtx (XEXP (x, 1));
557 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
559 /* We can tolerate LO_SUMs being offset here; these
560 rtl are used for nothing other than comparisons. */
561 if (GET_CODE (x0) == CONST_INT)
562 return plus_constant_for_output (x1, INTVAL (x0));
563 else if (GET_CODE (x1) == CONST_INT)
564 return plus_constant_for_output (x0, INTVAL (x1));
565 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
568 /* This gives us much better alias analysis when called from
569 the loop optimizer. Note we want to leave the original
570 MEM alone, but need to return the canonicalized MEM with
571 all the flags with their original values. */
572 else if (GET_CODE (x) == MEM)
574 rtx addr = canon_rtx (XEXP (x, 0));
575 if (addr != XEXP (x, 0))
577 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
578 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
579 MEM_COPY_ATTRIBUTES (new, x);
580 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
581 x = new;
584 return x;
587 /* Return 1 if X and Y are identical-looking rtx's.
589 We use the data in reg_known_value above to see if two registers with
590 different numbers are, in fact, equivalent. */
592 static int
593 rtx_equal_for_memref_p (x, y)
594 rtx x, y;
596 register int i;
597 register int j;
598 register enum rtx_code code;
599 register const char *fmt;
601 if (x == 0 && y == 0)
602 return 1;
603 if (x == 0 || y == 0)
604 return 0;
605 x = canon_rtx (x);
606 y = canon_rtx (y);
608 if (x == y)
609 return 1;
611 code = GET_CODE (x);
612 /* Rtx's of different codes cannot be equal. */
613 if (code != GET_CODE (y))
614 return 0;
616 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
617 (REG:SI x) and (REG:HI x) are NOT equivalent. */
619 if (GET_MODE (x) != GET_MODE (y))
620 return 0;
622 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
624 if (code == REG)
625 return REGNO (x) == REGNO (y);
626 if (code == LABEL_REF)
627 return XEXP (x, 0) == XEXP (y, 0);
628 if (code == SYMBOL_REF)
629 return XSTR (x, 0) == XSTR (y, 0);
630 if (code == CONST_INT)
631 return INTVAL (x) == INTVAL (y);
632 /* There's no need to compare the contents of CONST_DOUBLEs because
633 they're unique. */
634 if (code == CONST_DOUBLE)
635 return 0;
636 if (code == ADDRESSOF)
637 return REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0)) && XINT (x, 1) == XINT (y, 1);
639 /* For commutative operations, the RTX match if the operand match in any
640 order. Also handle the simple binary and unary cases without a loop. */
641 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
642 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
643 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
644 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
645 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
646 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
647 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
648 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
649 else if (GET_RTX_CLASS (code) == '1')
650 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
652 /* Compare the elements. If any pair of corresponding elements
653 fail to match, return 0 for the whole things.
655 Limit cases to types which actually appear in addresses. */
657 fmt = GET_RTX_FORMAT (code);
658 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
660 switch (fmt[i])
662 case 'i':
663 if (XINT (x, i) != XINT (y, i))
664 return 0;
665 break;
667 case 'E':
668 /* Two vectors must have the same length. */
669 if (XVECLEN (x, i) != XVECLEN (y, i))
670 return 0;
672 /* And the corresponding elements must match. */
673 for (j = 0; j < XVECLEN (x, i); j++)
674 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
675 return 0;
676 break;
678 case 'e':
679 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
680 return 0;
681 break;
683 /* This can happen for an asm which clobbers memory. */
684 case '0':
685 break;
687 /* It is believed that rtx's at this level will never
688 contain anything but integers and other rtx's,
689 except for within LABEL_REFs and SYMBOL_REFs. */
690 default:
691 abort ();
694 return 1;
697 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
698 X and return it, or return 0 if none found. */
700 static rtx
701 find_symbolic_term (x)
702 rtx x;
704 register int i;
705 register enum rtx_code code;
706 register const char *fmt;
708 code = GET_CODE (x);
709 if (code == SYMBOL_REF || code == LABEL_REF)
710 return x;
711 if (GET_RTX_CLASS (code) == 'o')
712 return 0;
714 fmt = GET_RTX_FORMAT (code);
715 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
717 rtx t;
719 if (fmt[i] == 'e')
721 t = find_symbolic_term (XEXP (x, i));
722 if (t != 0)
723 return t;
725 else if (fmt[i] == 'E')
726 break;
728 return 0;
731 static rtx
732 find_base_term (x)
733 register rtx x;
735 switch (GET_CODE (x))
737 case REG:
738 return REG_BASE_VALUE (x);
740 case ZERO_EXTEND:
741 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
742 case HIGH:
743 case PRE_INC:
744 case PRE_DEC:
745 case POST_INC:
746 case POST_DEC:
747 return find_base_term (XEXP (x, 0));
749 case CONST:
750 x = XEXP (x, 0);
751 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
752 return 0;
753 /* fall through */
754 case LO_SUM:
755 case PLUS:
756 case MINUS:
758 rtx tmp1 = XEXP (x, 0);
759 rtx tmp2 = XEXP (x, 1);
761 /* This is a litle bit tricky since we have to determine which of
762 the two operands represents the real base address. Otherwise this
763 routine may return the index register instead of the base register.
765 That may cause us to believe no aliasing was possible, when in
766 fact aliasing is possible.
768 We use a few simple tests to guess the base register. Additional
769 tests can certainly be added. For example, if one of the operands
770 is a shift or multiply, then it must be the index register and the
771 other operand is the base register. */
773 /* If either operand is known to be a pointer, then use it
774 to determine the base term. */
775 if (REG_P (tmp1) && REGNO_POINTER_FLAG (REGNO (tmp1)))
776 return find_base_term (tmp1);
778 if (REG_P (tmp2) && REGNO_POINTER_FLAG (REGNO (tmp2)))
779 return find_base_term (tmp2);
781 /* Neither operand was known to be a pointer. Go ahead and find the
782 base term for both operands. */
783 tmp1 = find_base_term (tmp1);
784 tmp2 = find_base_term (tmp2);
786 /* If either base term is named object or a special address
787 (like an argument or stack reference), then use it for the
788 base term. */
789 if (tmp1
790 && (GET_CODE (tmp1) == SYMBOL_REF
791 || GET_CODE (tmp1) == LABEL_REF
792 || (GET_CODE (tmp1) == ADDRESS
793 && GET_MODE (tmp1) != VOIDmode)))
794 return tmp1;
796 if (tmp2
797 && (GET_CODE (tmp2) == SYMBOL_REF
798 || GET_CODE (tmp2) == LABEL_REF
799 || (GET_CODE (tmp2) == ADDRESS
800 && GET_MODE (tmp2) != VOIDmode)))
801 return tmp2;
803 /* We could not determine which of the two operands was the
804 base register and which was the index. So we can determine
805 nothing from the base alias check. */
806 return 0;
809 case AND:
810 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
811 return REG_BASE_VALUE (XEXP (x, 0));
812 return 0;
814 case SYMBOL_REF:
815 case LABEL_REF:
816 return x;
818 default:
819 return 0;
823 /* Return 0 if the addresses X and Y are known to point to different
824 objects, 1 if they might be pointers to the same object. */
826 static int
827 base_alias_check (x, y, x_mode, y_mode)
828 rtx x, y;
829 enum machine_mode x_mode, y_mode;
831 rtx x_base = find_base_term (x);
832 rtx y_base = find_base_term (y);
834 /* If the address itself has no known base see if a known equivalent
835 value has one. If either address still has no known base, nothing
836 is known about aliasing. */
837 if (x_base == 0)
839 rtx x_c;
840 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
841 return 1;
842 x_base = find_base_term (x_c);
843 if (x_base == 0)
844 return 1;
847 if (y_base == 0)
849 rtx y_c;
850 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
851 return 1;
852 y_base = find_base_term (y_c);
853 if (y_base == 0)
854 return 1;
857 /* If the base addresses are equal nothing is known about aliasing. */
858 if (rtx_equal_p (x_base, y_base))
859 return 1;
861 /* The base addresses of the read and write are different expressions.
862 If they are both symbols and they are not accessed via AND, there is
863 no conflict. We can bring knowledge of object alignment into play
864 here. For example, on alpha, "char a, b;" can alias one another,
865 though "char a; long b;" cannot. */
866 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
868 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
869 return 1;
870 if (GET_CODE (x) == AND
871 && (GET_CODE (XEXP (x, 1)) != CONST_INT
872 || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
873 return 1;
874 if (GET_CODE (y) == AND
875 && (GET_CODE (XEXP (y, 1)) != CONST_INT
876 || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
877 return 1;
878 /* Differing symbols never alias. */
879 return 0;
882 /* If one address is a stack reference there can be no alias:
883 stack references using different base registers do not alias,
884 a stack reference can not alias a parameter, and a stack reference
885 can not alias a global. */
886 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
887 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
888 return 0;
890 if (! flag_argument_noalias)
891 return 1;
893 if (flag_argument_noalias > 1)
894 return 0;
896 /* Weak noalias assertion (arguments are distinct, but may match globals). */
897 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
900 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
901 where SIZE is the size in bytes of the memory reference. If ADDR
902 is not modified by the memory reference then ADDR is returned. */
905 addr_side_effect_eval (addr, size, n_refs)
906 rtx addr;
907 int size;
908 int n_refs;
910 int offset = 0;
912 switch (GET_CODE (addr))
914 case PRE_INC:
915 offset = (n_refs + 1) * size;
916 break;
917 case PRE_DEC:
918 offset = -(n_refs + 1) * size;
919 break;
920 case POST_INC:
921 offset = n_refs * size;
922 break;
923 case POST_DEC:
924 offset = -n_refs * size;
925 break;
927 default:
928 return addr;
931 if (offset)
932 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
933 else
934 addr = XEXP (addr, 0);
936 return addr;
939 /* Return nonzero if X and Y (memory addresses) could reference the
940 same location in memory. C is an offset accumulator. When
941 C is nonzero, we are testing aliases between X and Y + C.
942 XSIZE is the size in bytes of the X reference,
943 similarly YSIZE is the size in bytes for Y.
945 If XSIZE or YSIZE is zero, we do not know the amount of memory being
946 referenced (the reference was BLKmode), so make the most pessimistic
947 assumptions.
949 If XSIZE or YSIZE is negative, we may access memory outside the object
950 being referenced as a side effect. This can happen when using AND to
951 align memory references, as is done on the Alpha.
953 Nice to notice that varying addresses cannot conflict with fp if no
954 local variables had their addresses taken, but that's too hard now. */
957 static int
958 memrefs_conflict_p (xsize, x, ysize, y, c)
959 register rtx x, y;
960 int xsize, ysize;
961 HOST_WIDE_INT c;
963 if (GET_CODE (x) == HIGH)
964 x = XEXP (x, 0);
965 else if (GET_CODE (x) == LO_SUM)
966 x = XEXP (x, 1);
967 else
968 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
969 if (GET_CODE (y) == HIGH)
970 y = XEXP (y, 0);
971 else if (GET_CODE (y) == LO_SUM)
972 y = XEXP (y, 1);
973 else
974 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
976 if (rtx_equal_for_memref_p (x, y))
978 if (xsize <= 0 || ysize <= 0)
979 return 1;
980 if (c >= 0 && xsize > c)
981 return 1;
982 if (c < 0 && ysize+c > 0)
983 return 1;
984 return 0;
987 /* This code used to check for conflicts involving stack references and
988 globals but the base address alias code now handles these cases. */
990 if (GET_CODE (x) == PLUS)
992 /* The fact that X is canonicalized means that this
993 PLUS rtx is canonicalized. */
994 rtx x0 = XEXP (x, 0);
995 rtx x1 = XEXP (x, 1);
997 if (GET_CODE (y) == PLUS)
999 /* The fact that Y is canonicalized means that this
1000 PLUS rtx is canonicalized. */
1001 rtx y0 = XEXP (y, 0);
1002 rtx y1 = XEXP (y, 1);
1004 if (rtx_equal_for_memref_p (x1, y1))
1005 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1006 if (rtx_equal_for_memref_p (x0, y0))
1007 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1008 if (GET_CODE (x1) == CONST_INT)
1010 if (GET_CODE (y1) == CONST_INT)
1011 return memrefs_conflict_p (xsize, x0, ysize, y0,
1012 c - INTVAL (x1) + INTVAL (y1));
1013 else
1014 return memrefs_conflict_p (xsize, x0, ysize, y,
1015 c - INTVAL (x1));
1017 else if (GET_CODE (y1) == CONST_INT)
1018 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1020 return 1;
1022 else if (GET_CODE (x1) == CONST_INT)
1023 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1025 else if (GET_CODE (y) == PLUS)
1027 /* The fact that Y is canonicalized means that this
1028 PLUS rtx is canonicalized. */
1029 rtx y0 = XEXP (y, 0);
1030 rtx y1 = XEXP (y, 1);
1032 if (GET_CODE (y1) == CONST_INT)
1033 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1034 else
1035 return 1;
1038 if (GET_CODE (x) == GET_CODE (y))
1039 switch (GET_CODE (x))
1041 case MULT:
1043 /* Handle cases where we expect the second operands to be the
1044 same, and check only whether the first operand would conflict
1045 or not. */
1046 rtx x0, y0;
1047 rtx x1 = canon_rtx (XEXP (x, 1));
1048 rtx y1 = canon_rtx (XEXP (y, 1));
1049 if (! rtx_equal_for_memref_p (x1, y1))
1050 return 1;
1051 x0 = canon_rtx (XEXP (x, 0));
1052 y0 = canon_rtx (XEXP (y, 0));
1053 if (rtx_equal_for_memref_p (x0, y0))
1054 return (xsize == 0 || ysize == 0
1055 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1057 /* Can't properly adjust our sizes. */
1058 if (GET_CODE (x1) != CONST_INT)
1059 return 1;
1060 xsize /= INTVAL (x1);
1061 ysize /= INTVAL (x1);
1062 c /= INTVAL (x1);
1063 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1066 case REG:
1067 /* Are these registers known not to be equal? */
1068 if (alias_invariant)
1070 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1071 rtx i_x, i_y; /* invariant relationships of X and Y */
1073 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1074 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1076 if (i_x == 0 && i_y == 0)
1077 break;
1079 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1080 ysize, i_y ? i_y : y, c))
1081 return 0;
1083 break;
1085 default:
1086 break;
1089 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1090 as an access with indeterminate size. Assume that references
1091 besides AND are aligned, so if the size of the other reference is
1092 at least as large as the alignment, assume no other overlap. */
1093 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1095 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1096 xsize = -1;
1097 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1099 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1101 /* ??? If we are indexing far enough into the array/structure, we
1102 may yet be able to determine that we can not overlap. But we
1103 also need to that we are far enough from the end not to overlap
1104 a following reference, so we do nothing with that for now. */
1105 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1106 ysize = -1;
1107 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1110 if (CONSTANT_P (x))
1112 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1114 c += (INTVAL (y) - INTVAL (x));
1115 return (xsize <= 0 || ysize <= 0
1116 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1119 if (GET_CODE (x) == CONST)
1121 if (GET_CODE (y) == CONST)
1122 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1123 ysize, canon_rtx (XEXP (y, 0)), c);
1124 else
1125 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1126 ysize, y, c);
1128 if (GET_CODE (y) == CONST)
1129 return memrefs_conflict_p (xsize, x, ysize,
1130 canon_rtx (XEXP (y, 0)), c);
1132 if (CONSTANT_P (y))
1133 return (xsize < 0 || ysize < 0
1134 || (rtx_equal_for_memref_p (x, y)
1135 && (xsize == 0 || ysize == 0
1136 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1138 return 1;
1140 return 1;
1143 /* Functions to compute memory dependencies.
1145 Since we process the insns in execution order, we can build tables
1146 to keep track of what registers are fixed (and not aliased), what registers
1147 are varying in known ways, and what registers are varying in unknown
1148 ways.
1150 If both memory references are volatile, then there must always be a
1151 dependence between the two references, since their order can not be
1152 changed. A volatile and non-volatile reference can be interchanged
1153 though.
1155 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
1156 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
1157 allow QImode aliasing because the ANSI C standard allows character
1158 pointers to alias anything. We are assuming that characters are
1159 always QImode here. We also must allow AND addresses, because they may
1160 generate accesses outside the object being referenced. This is used to
1161 generate aligned addresses from unaligned addresses, for instance, the
1162 alpha storeqi_unaligned pattern. */
1164 /* Read dependence: X is read after read in MEM takes place. There can
1165 only be a dependence here if both reads are volatile. */
1168 read_dependence (mem, x)
1169 rtx mem;
1170 rtx x;
1172 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1175 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1176 MEM2 is a reference to a structure at a varying address, or returns
1177 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1178 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1179 to decide whether or not an address may vary; it should return
1180 nozero whenever variation is possible. */
1182 static rtx
1183 fixed_scalar_and_varying_struct_p (mem1, mem2, varies_p)
1184 rtx mem1;
1185 rtx mem2;
1186 int (*varies_p) PROTO((rtx));
1188 rtx mem1_addr = XEXP (mem1, 0);
1189 rtx mem2_addr = XEXP (mem2, 0);
1191 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1192 && !varies_p (mem1_addr) && varies_p (mem2_addr))
1193 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1194 varying address. */
1195 return mem1;
1197 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1198 && varies_p (mem1_addr) && !varies_p (mem2_addr))
1199 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1200 varying address. */
1201 return mem2;
1203 return NULL_RTX;
1206 /* Returns nonzero if something about the mode or address format MEM1
1207 indicates that it might well alias *anything*. */
1209 static int
1210 aliases_everything_p (mem)
1211 rtx mem;
1213 if (GET_MODE (mem) == QImode)
1214 /* ANSI C says that a `char*' can point to anything. */
1215 return 1;
1217 if (GET_CODE (XEXP (mem, 0)) == AND)
1218 /* If the address is an AND, its very hard to know at what it is
1219 actually pointing. */
1220 return 1;
1222 return 0;
1225 /* True dependence: X is read after store in MEM takes place. */
1228 true_dependence (mem, mem_mode, x, varies)
1229 rtx mem;
1230 enum machine_mode mem_mode;
1231 rtx x;
1232 int (*varies) PROTO((rtx));
1234 register rtx x_addr, mem_addr;
1236 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1237 return 1;
1239 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1240 return 0;
1242 /* If X is an unchanging read, then it can't possibly conflict with any
1243 non-unchanging store. It may conflict with an unchanging write though,
1244 because there may be a single store to this address to initialize it.
1245 Just fall through to the code below to resolve the case where we have
1246 both an unchanging read and an unchanging write. This won't handle all
1247 cases optimally, but the possible performance loss should be
1248 negligible. */
1249 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1250 return 0;
1252 if (mem_mode == VOIDmode)
1253 mem_mode = GET_MODE (mem);
1255 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x), mem_mode))
1256 return 0;
1258 x_addr = canon_rtx (XEXP (x, 0));
1259 mem_addr = canon_rtx (XEXP (mem, 0));
1261 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1262 SIZE_FOR_MODE (x), x_addr, 0))
1263 return 0;
1265 if (aliases_everything_p (x))
1266 return 1;
1268 /* We cannot use aliases_everyting_p to test MEM, since we must look
1269 at MEM_MODE, rather than GET_MODE (MEM). */
1270 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1271 return 1;
1273 /* In true_dependence we also allow BLKmode to alias anything. Why
1274 don't we do this in anti_dependence and output_dependence? */
1275 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1276 return 1;
1278 return !fixed_scalar_and_varying_struct_p (mem, x, varies);
1281 /* Returns non-zero if a write to X might alias a previous read from
1282 (or, if WRITEP is non-zero, a write to) MEM. */
1284 static int
1285 write_dependence_p (mem, x, writep)
1286 rtx mem;
1287 rtx x;
1288 int writep;
1290 rtx x_addr, mem_addr;
1291 rtx fixed_scalar;
1293 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1294 return 1;
1296 /* If MEM is an unchanging read, then it can't possibly conflict with
1297 the store to X, because there is at most one store to MEM, and it must
1298 have occurred somewhere before MEM. */
1299 if (!writep && RTX_UNCHANGING_P (mem))
1300 return 0;
1302 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x),
1303 GET_MODE (mem)))
1304 return 0;
1306 x = canon_rtx (x);
1307 mem = canon_rtx (mem);
1309 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1310 return 0;
1312 x_addr = XEXP (x, 0);
1313 mem_addr = XEXP (mem, 0);
1315 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1316 SIZE_FOR_MODE (x), x_addr, 0))
1317 return 0;
1319 fixed_scalar
1320 = fixed_scalar_and_varying_struct_p (mem, x, rtx_addr_varies_p);
1322 return (!(fixed_scalar == mem && !aliases_everything_p (x))
1323 && !(fixed_scalar == x && !aliases_everything_p (mem)));
1326 /* Anti dependence: X is written after read in MEM takes place. */
1329 anti_dependence (mem, x)
1330 rtx mem;
1331 rtx x;
1333 return write_dependence_p (mem, x, /*writep=*/0);
1336 /* Output dependence: X is written after store in MEM takes place. */
1339 output_dependence (mem, x)
1340 register rtx mem;
1341 register rtx x;
1343 return write_dependence_p (mem, x, /*writep=*/1);
1346 /* Returns non-zero if X might refer to something which is not
1347 local to the function and is not constant. */
1349 static int
1350 nonlocal_reference_p (x)
1351 rtx x;
1353 rtx base;
1354 register RTX_CODE code;
1355 int regno;
1357 code = GET_CODE (x);
1359 if (GET_RTX_CLASS (code) == 'i')
1361 /* Constant functions are constant. */
1362 if (code == CALL_INSN && CONST_CALL_P (x))
1363 return 0;
1364 x = PATTERN (x);
1365 code = GET_CODE (x);
1368 switch (code)
1370 case SUBREG:
1371 if (GET_CODE (SUBREG_REG (x)) == REG)
1373 /* Global registers are not local. */
1374 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
1375 && global_regs[REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)])
1376 return 1;
1377 return 0;
1379 break;
1381 case REG:
1382 regno = REGNO (x);
1383 /* Global registers are not local. */
1384 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1385 return 1;
1386 return 0;
1388 case SCRATCH:
1389 case PC:
1390 case CC0:
1391 case CONST_INT:
1392 case CONST_DOUBLE:
1393 case CONST:
1394 case LABEL_REF:
1395 return 0;
1397 case SYMBOL_REF:
1398 /* Constants in the function's constants pool are constant. */
1399 if (CONSTANT_POOL_ADDRESS_P (x))
1400 return 0;
1401 return 1;
1403 case CALL:
1404 /* Recursion introduces no additional considerations. */
1405 if (GET_CODE (XEXP (x, 0)) == MEM
1406 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1407 && strcmp(XSTR (XEXP (XEXP (x, 0), 0), 0),
1408 IDENTIFIER_POINTER (
1409 DECL_ASSEMBLER_NAME (current_function_decl))) == 0)
1410 return 0;
1411 return 1;
1413 case MEM:
1414 /* Be overly conservative and consider any volatile memory
1415 reference as not local. */
1416 if (MEM_VOLATILE_P (x))
1417 return 1;
1418 base = find_base_term (XEXP (x, 0));
1419 if (base)
1421 /* Stack references are local. */
1422 if (GET_CODE (base) == ADDRESS && GET_MODE (base) == Pmode)
1423 return 0;
1424 /* Constants in the function's constant pool are constant. */
1425 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
1426 return 0;
1428 return 1;
1430 case ASM_INPUT:
1431 case ASM_OPERANDS:
1432 return 1;
1434 default:
1435 break;
1438 /* Recursively scan the operands of this expression. */
1441 register const char *fmt = GET_RTX_FORMAT (code);
1442 register int i;
1444 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1446 if (fmt[i] == 'e')
1448 if (nonlocal_reference_p (XEXP (x, i)))
1449 return 1;
1451 if (fmt[i] == 'E')
1453 register int j;
1454 for (j = 0; j < XVECLEN (x, i); j++)
1455 if (nonlocal_reference_p (XVECEXP (x, i, j)))
1456 return 1;
1461 return 0;
1464 /* Mark the function if it is constant. */
1466 void
1467 mark_constant_function ()
1469 rtx insn;
1471 if (TREE_PUBLIC (current_function_decl)
1472 || TREE_READONLY (current_function_decl)
1473 || TREE_THIS_VOLATILE (current_function_decl)
1474 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
1475 return;
1477 /* Determine if this is a constant function. */
1479 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1480 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1481 && nonlocal_reference_p (insn))
1482 return;
1484 /* Mark the function. */
1486 TREE_READONLY (current_function_decl) = 1;
1490 static HARD_REG_SET argument_registers;
1492 void
1493 init_alias_once ()
1495 register int i;
1497 #ifndef OUTGOING_REGNO
1498 #define OUTGOING_REGNO(N) N
1499 #endif
1500 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1501 /* Check whether this register can hold an incoming pointer
1502 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1503 numbers, so translate if necessary due to register windows. */
1504 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1505 && HARD_REGNO_MODE_OK (i, Pmode))
1506 SET_HARD_REG_BIT (argument_registers, i);
1508 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
1511 void
1512 init_alias_analysis ()
1514 int maxreg = max_reg_num ();
1515 int changed, pass;
1516 register int i;
1517 register unsigned int ui;
1518 register rtx insn;
1520 reg_known_value_size = maxreg;
1522 reg_known_value
1523 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
1524 - FIRST_PSEUDO_REGISTER;
1525 reg_known_equiv_p =
1526 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
1527 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
1528 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
1529 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
1530 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
1532 /* Overallocate reg_base_value to allow some growth during loop
1533 optimization. Loop unrolling can create a large number of
1534 registers. */
1535 reg_base_value_size = maxreg * 2;
1536 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
1537 new_reg_base_value = (rtx *)alloca (reg_base_value_size * sizeof (rtx));
1538 reg_seen = (char *)alloca (reg_base_value_size);
1539 bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
1540 if (! reload_completed && flag_unroll_loops)
1542 alias_invariant = (rtx *)xrealloc (alias_invariant,
1543 reg_base_value_size * sizeof (rtx));
1544 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1548 /* The basic idea is that each pass through this loop will use the
1549 "constant" information from the previous pass to propagate alias
1550 information through another level of assignments.
1552 This could get expensive if the assignment chains are long. Maybe
1553 we should throttle the number of iterations, possibly based on
1554 the optimization level or flag_expensive_optimizations.
1556 We could propagate more information in the first pass by making use
1557 of REG_N_SETS to determine immediately that the alias information
1558 for a pseudo is "constant".
1560 A program with an uninitialized variable can cause an infinite loop
1561 here. Instead of doing a full dataflow analysis to detect such problems
1562 we just cap the number of iterations for the loop.
1564 The state of the arrays for the set chain in question does not matter
1565 since the program has undefined behavior. */
1567 pass = 0;
1570 /* Assume nothing will change this iteration of the loop. */
1571 changed = 0;
1573 /* We want to assign the same IDs each iteration of this loop, so
1574 start counting from zero each iteration of the loop. */
1575 unique_id = 0;
1577 /* We're at the start of the funtion each iteration through the
1578 loop, so we're copying arguments. */
1579 copying_arguments = 1;
1581 /* Wipe the potential alias information clean for this pass. */
1582 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1584 /* Wipe the reg_seen array clean. */
1585 bzero ((char *) reg_seen, reg_base_value_size);
1587 /* Mark all hard registers which may contain an address.
1588 The stack, frame and argument pointers may contain an address.
1589 An argument register which can hold a Pmode value may contain
1590 an address even if it is not in BASE_REGS.
1592 The address expression is VOIDmode for an argument and
1593 Pmode for other registers. */
1595 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1596 if (TEST_HARD_REG_BIT (argument_registers, i))
1597 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1598 gen_rtx_REG (Pmode, i));
1600 new_reg_base_value[STACK_POINTER_REGNUM]
1601 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1602 new_reg_base_value[ARG_POINTER_REGNUM]
1603 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1604 new_reg_base_value[FRAME_POINTER_REGNUM]
1605 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1606 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1607 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1608 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1609 #endif
1610 if (struct_value_incoming_rtx
1611 && GET_CODE (struct_value_incoming_rtx) == REG)
1612 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1613 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1615 if (static_chain_rtx
1616 && GET_CODE (static_chain_rtx) == REG)
1617 new_reg_base_value[REGNO (static_chain_rtx)]
1618 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1620 /* Walk the insns adding values to the new_reg_base_value array. */
1621 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1623 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
1624 if (prologue_epilogue_contains (insn))
1625 continue;
1626 #endif
1627 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1629 rtx note, set;
1630 /* If this insn has a noalias note, process it, Otherwise,
1631 scan for sets. A simple set will have no side effects
1632 which could change the base value of any other register. */
1634 if (GET_CODE (PATTERN (insn)) == SET
1635 && (find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
1636 record_set (SET_DEST (PATTERN (insn)), NULL_RTX);
1637 else
1638 note_stores (PATTERN (insn), record_set);
1640 set = single_set (insn);
1642 if (set != 0
1643 && GET_CODE (SET_DEST (set)) == REG
1644 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1645 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1646 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1647 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1648 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
1649 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
1651 int regno = REGNO (SET_DEST (set));
1652 reg_known_value[regno] = XEXP (note, 0);
1653 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1656 else if (GET_CODE (insn) == NOTE
1657 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1658 copying_arguments = 0;
1661 /* Now propagate values from new_reg_base_value to reg_base_value. */
1662 for (ui = 0; ui < reg_base_value_size; ui++)
1664 if (new_reg_base_value[ui]
1665 && new_reg_base_value[ui] != reg_base_value[ui]
1666 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1668 reg_base_value[ui] = new_reg_base_value[ui];
1669 changed = 1;
1673 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1675 /* Fill in the remaining entries. */
1676 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1677 if (reg_known_value[i] == 0)
1678 reg_known_value[i] = regno_reg_rtx[i];
1680 /* Simplify the reg_base_value array so that no register refers to
1681 another register, except to special registers indirectly through
1682 ADDRESS expressions.
1684 In theory this loop can take as long as O(registers^2), but unless
1685 there are very long dependency chains it will run in close to linear
1686 time.
1688 This loop may not be needed any longer now that the main loop does
1689 a better job at propagating alias information. */
1690 pass = 0;
1693 changed = 0;
1694 pass++;
1695 for (ui = 0; ui < reg_base_value_size; ui++)
1697 rtx base = reg_base_value[ui];
1698 if (base && GET_CODE (base) == REG)
1700 unsigned int base_regno = REGNO (base);
1701 if (base_regno == ui) /* register set from itself */
1702 reg_base_value[ui] = 0;
1703 else
1704 reg_base_value[ui] = reg_base_value[base_regno];
1705 changed = 1;
1709 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1711 new_reg_base_value = 0;
1712 reg_seen = 0;
1715 void
1716 end_alias_analysis ()
1718 reg_known_value = 0;
1719 reg_base_value = 0;
1720 reg_base_value_size = 0;
1721 if (alias_invariant)
1723 free ((char *)alias_invariant);
1724 alias_invariant = 0;