* dump.c (dequeue_and_dump): Dump DECL_EXTERNAL.
[official-gcc.git] / gcc / alias.c
blob686ffb845dd66ee43db5036ef5cc28597006272f
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "function.h"
28 #include "expr.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "output.h"
33 #include "toplev.h"
34 #include "splay-tree.h"
36 /* The alias sets assigned to MEMs assist the back-end in determining
37 which MEMs can alias which other MEMs. In general, two MEMs in
38 different alias sets to not alias each other. There is one
39 exception, however. Consider something like:
41 struct S {int i; double d; };
43 a store to an `S' can alias something of either type `int' or type
44 `double'. (However, a store to an `int' cannot alias a `double'
45 and vice versa.) We indicate this via a tree structure that looks
46 like:
47 struct S
48 / \
49 / \
50 |/_ _\|
51 int double
53 (The arrows are directed and point downwards.) If, when comparing
54 two alias sets, we can hold one set fixed, and trace the other set
55 downwards, and at some point find the first set, the two MEMs can
56 alias one another. In this situation we say the alias set for
57 `struct S' is the `superset' and that those for `int' and `double'
58 are `subsets'.
60 Alias set zero is implicitly a superset of all other alias sets.
61 However, this is no actual entry for alias set zero. It is an
62 error to attempt to explicitly construct a subset of zero. */
64 typedef struct alias_set_entry {
65 /* The alias set number, as stored in MEM_ALIAS_SET. */
66 int alias_set;
68 /* The children of the alias set. These are not just the immediate
69 children, but, in fact, all children. So, if we have:
71 struct T { struct S s; float f; }
73 continuing our example above, the children here will be all of
74 `int', `double', `float', and `struct S'. */
75 splay_tree children;
76 }* alias_set_entry;
78 static rtx canon_rtx PROTO((rtx));
79 static int rtx_equal_for_memref_p PROTO((rtx, rtx));
80 static rtx find_symbolic_term PROTO((rtx));
81 static int memrefs_conflict_p PROTO((int, rtx, int, rtx,
82 HOST_WIDE_INT));
83 static void record_set PROTO((rtx, rtx));
84 static rtx find_base_term PROTO((rtx));
85 static int base_alias_check PROTO((rtx, rtx, enum machine_mode,
86 enum machine_mode));
87 static rtx find_base_value PROTO((rtx));
88 static int mems_in_disjoint_alias_sets_p PROTO((rtx, rtx));
89 static int insert_subset_children PROTO((splay_tree_node,
90 void*));
91 static alias_set_entry get_alias_set_entry PROTO((int));
92 static rtx fixed_scalar_and_varying_struct_p PROTO((rtx, rtx, int (*)(rtx)));
93 static int aliases_everything_p PROTO((rtx));
94 static int write_dependence_p PROTO((rtx, rtx, int));
96 /* Set up all info needed to perform alias analysis on memory references. */
98 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
100 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
101 different alias sets. We ignore alias sets in functions making use
102 of variable arguments because the va_arg macros on some systems are
103 not legal ANSI C. */
104 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
105 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
107 /* Cap the number of passes we make over the insns propagating alias
108 information through set chains.
110 10 is a completely arbitrary choice. */
111 #define MAX_ALIAS_LOOP_PASSES 10
113 /* reg_base_value[N] gives an address to which register N is related.
114 If all sets after the first add or subtract to the current value
115 or otherwise modify it so it does not point to a different top level
116 object, reg_base_value[N] is equal to the address part of the source
117 of the first set.
119 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
120 expressions represent certain special values: function arguments and
121 the stack, frame, and argument pointers. The contents of an address
122 expression are not used (but they are descriptive for debugging);
123 only the address and mode matter. Pointer equality, not rtx_equal_p,
124 determines whether two ADDRESS expressions refer to the same base
125 address. The mode determines whether it is a function argument or
126 other special value. */
128 rtx *reg_base_value;
129 rtx *new_reg_base_value;
130 unsigned int reg_base_value_size; /* size of reg_base_value array */
131 #define REG_BASE_VALUE(X) \
132 ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
134 /* Vector of known invariant relationships between registers. Set in
135 loop unrolling. Indexed by register number, if nonzero the value
136 is an expression describing this register in terms of another.
138 The length of this array is REG_BASE_VALUE_SIZE.
140 Because this array contains only pseudo registers it has no effect
141 after reload. */
142 static rtx *alias_invariant;
144 /* Vector indexed by N giving the initial (unchanging) value known
145 for pseudo-register N. */
146 rtx *reg_known_value;
148 /* Indicates number of valid entries in reg_known_value. */
149 static int reg_known_value_size;
151 /* Vector recording for each reg_known_value whether it is due to a
152 REG_EQUIV note. Future passes (viz., reload) may replace the
153 pseudo with the equivalent expression and so we account for the
154 dependences that would be introduced if that happens. */
155 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
156 assign_parms mention the arg pointer, and there are explicit insns in the
157 RTL that modify the arg pointer. Thus we must ensure that such insns don't
158 get scheduled across each other because that would invalidate the REG_EQUIV
159 notes. One could argue that the REG_EQUIV notes are wrong, but solving
160 the problem in the scheduler will likely give better code, so we do it
161 here. */
162 char *reg_known_equiv_p;
164 /* True when scanning insns from the start of the rtl to the
165 NOTE_INSN_FUNCTION_BEG note. */
167 static int copying_arguments;
169 /* The splay-tree used to store the various alias set entries. */
171 static splay_tree alias_sets;
173 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
174 such an entry, or NULL otherwise. */
176 static alias_set_entry
177 get_alias_set_entry (alias_set)
178 int alias_set;
180 splay_tree_node sn =
181 splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
183 return sn ? ((alias_set_entry) sn->value) : ((alias_set_entry) 0);
186 /* Returns nonzero value if the alias sets for MEM1 and MEM2 are such
187 that the two MEMs cannot alias each other. */
189 static int
190 mems_in_disjoint_alias_sets_p (mem1, mem2)
191 rtx mem1;
192 rtx mem2;
194 alias_set_entry ase;
196 #ifdef ENABLE_CHECKING
197 /* Perform a basic sanity check. Namely, that there are no alias sets
198 if we're not using strict aliasing. This helps to catch bugs
199 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
200 where a MEM is allocated in some way other than by the use of
201 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
202 use alias sets to indicate that spilled registers cannot alias each
203 other, we might need to remove this check. */
204 if (!flag_strict_aliasing &&
205 (MEM_ALIAS_SET (mem1) || MEM_ALIAS_SET (mem2)))
206 abort ();
207 #endif
209 /* The code used in varargs macros are often not conforming ANSI C,
210 which can trick the compiler into making incorrect aliasing
211 assumptions in these functions. So, we don't use alias sets in
212 such a function. FIXME: This should be moved into the front-end;
213 it is a language-dependent notion, and there's no reason not to
214 still use these checks to handle globals. */
215 if (current_function_stdarg || current_function_varargs)
216 return 0;
218 if (!MEM_ALIAS_SET (mem1) || !MEM_ALIAS_SET (mem2))
219 /* We have no alias set information for one of the MEMs, so we
220 have to assume it can alias anything. */
221 return 0;
223 if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
224 /* The two alias sets are the same, so they may alias. */
225 return 0;
227 /* Iterate through each of the children of the first alias set,
228 comparing it with the second alias set. */
229 ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
230 if (ase && splay_tree_lookup (ase->children,
231 (splay_tree_key) MEM_ALIAS_SET (mem2)))
232 return 0;
234 /* Now do the same, but with the alias sets reversed. */
235 ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
236 if (ase && splay_tree_lookup (ase->children,
237 (splay_tree_key) MEM_ALIAS_SET (mem1)))
238 return 0;
240 /* The two MEMs are in distinct alias sets, and neither one is the
241 child of the other. Therefore, they cannot alias. */
242 return 1;
245 /* Insert the NODE into the splay tree given by DATA. Used by
246 record_alias_subset via splay_tree_foreach. */
248 static int
249 insert_subset_children (node, data)
250 splay_tree_node node;
251 void *data;
253 splay_tree_insert ((splay_tree) data,
254 node->key,
255 node->value);
257 return 0;
260 /* Indicate that things in SUBSET can alias things in SUPERSET, but
261 not vice versa. For example, in C, a store to an `int' can alias a
262 structure containing an `int', but not vice versa. Here, the
263 structure would be the SUPERSET and `int' the SUBSET. This
264 function should be called only once per SUPERSET/SUBSET pair. At
265 present any given alias set may only be a subset of one superset.
267 It is illegal for SUPERSET to be zero; everything is implicitly a
268 subset of alias set zero. */
270 void
271 record_alias_subset (superset, subset)
272 int superset;
273 int subset;
275 alias_set_entry superset_entry;
276 alias_set_entry subset_entry;
278 if (superset == 0)
279 abort ();
281 superset_entry = get_alias_set_entry (superset);
282 if (!superset_entry)
284 /* Create an entry for the SUPERSET, so that we have a place to
285 attach the SUBSET. */
286 superset_entry =
287 (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
288 superset_entry->alias_set = superset;
289 superset_entry->children
290 = splay_tree_new (splay_tree_compare_ints, 0, 0);
291 splay_tree_insert (alias_sets,
292 (splay_tree_key) superset,
293 (splay_tree_value) superset_entry);
297 subset_entry = get_alias_set_entry (subset);
298 if (subset_entry)
299 /* There is an entry for the subset. Enter all of its children
300 (if they are not already present) as children of the SUPERSET. */
301 splay_tree_foreach (subset_entry->children,
302 insert_subset_children,
303 superset_entry->children);
305 /* Enter the SUBSET itself as a child of the SUPERSET. */
306 splay_tree_insert (superset_entry->children,
307 (splay_tree_key) subset,
308 /*value=*/0);
311 /* Inside SRC, the source of a SET, find a base address. */
313 static rtx
314 find_base_value (src)
315 register rtx src;
317 switch (GET_CODE (src))
319 case SYMBOL_REF:
320 case LABEL_REF:
321 return src;
323 case REG:
324 /* At the start of a function argument registers have known base
325 values which may be lost later. Returning an ADDRESS
326 expression here allows optimization based on argument values
327 even when the argument registers are used for other purposes. */
328 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
329 return new_reg_base_value[REGNO (src)];
331 /* If a pseudo has a known base value, return it. Do not do this
332 for hard regs since it can result in a circular dependency
333 chain for registers which have values at function entry.
335 The test above is not sufficient because the scheduler may move
336 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
337 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
338 && (unsigned) REGNO (src) < reg_base_value_size
339 && reg_base_value[REGNO (src)])
340 return reg_base_value[REGNO (src)];
342 return src;
344 case MEM:
345 /* Check for an argument passed in memory. Only record in the
346 copying-arguments block; it is too hard to track changes
347 otherwise. */
348 if (copying_arguments
349 && (XEXP (src, 0) == arg_pointer_rtx
350 || (GET_CODE (XEXP (src, 0)) == PLUS
351 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
352 return gen_rtx_ADDRESS (VOIDmode, src);
353 return 0;
355 case CONST:
356 src = XEXP (src, 0);
357 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
358 break;
359 /* fall through */
361 case PLUS:
362 case MINUS:
364 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
366 /* If either operand is a REG, then see if we already have
367 a known value for it. */
368 if (GET_CODE (src_0) == REG)
370 temp = find_base_value (src_0);
371 if (temp)
372 src_0 = temp;
375 if (GET_CODE (src_1) == REG)
377 temp = find_base_value (src_1);
378 if (temp)
379 src_1 = temp;
382 /* Guess which operand is the base address.
384 If either operand is a symbol, then it is the base. If
385 either operand is a CONST_INT, then the other is the base. */
387 if (GET_CODE (src_1) == CONST_INT
388 || GET_CODE (src_0) == SYMBOL_REF
389 || GET_CODE (src_0) == LABEL_REF
390 || GET_CODE (src_0) == CONST)
391 return find_base_value (src_0);
393 if (GET_CODE (src_0) == CONST_INT
394 || GET_CODE (src_1) == SYMBOL_REF
395 || GET_CODE (src_1) == LABEL_REF
396 || GET_CODE (src_1) == CONST)
397 return find_base_value (src_1);
399 /* This might not be necessary anymore.
401 If either operand is a REG that is a known pointer, then it
402 is the base. */
403 if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
404 return find_base_value (src_0);
406 if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
407 return find_base_value (src_1);
409 return 0;
412 case LO_SUM:
413 /* The standard form is (lo_sum reg sym) so look only at the
414 second operand. */
415 return find_base_value (XEXP (src, 1));
417 case AND:
418 /* If the second operand is constant set the base
419 address to the first operand. */
420 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
421 return find_base_value (XEXP (src, 0));
422 return 0;
424 case ZERO_EXTEND:
425 case SIGN_EXTEND: /* used for NT/Alpha pointers */
426 case HIGH:
427 return find_base_value (XEXP (src, 0));
429 default:
430 break;
433 return 0;
436 /* Called from init_alias_analysis indirectly through note_stores. */
438 /* while scanning insns to find base values, reg_seen[N] is nonzero if
439 register N has been set in this function. */
440 static char *reg_seen;
442 /* Addresses which are known not to alias anything else are identified
443 by a unique integer. */
444 static int unique_id;
446 static void
447 record_set (dest, set)
448 rtx dest, set;
450 register int regno;
451 rtx src;
453 if (GET_CODE (dest) != REG)
454 return;
456 regno = REGNO (dest);
458 if (set)
460 /* A CLOBBER wipes out any old value but does not prevent a previously
461 unset register from acquiring a base address (i.e. reg_seen is not
462 set). */
463 if (GET_CODE (set) == CLOBBER)
465 new_reg_base_value[regno] = 0;
466 return;
468 src = SET_SRC (set);
470 else
472 if (reg_seen[regno])
474 new_reg_base_value[regno] = 0;
475 return;
477 reg_seen[regno] = 1;
478 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
479 GEN_INT (unique_id++));
480 return;
483 /* This is not the first set. If the new value is not related to the
484 old value, forget the base value. Note that the following code is
485 not detected:
486 extern int x, y; int *p = &x; p += (&y-&x);
487 ANSI C does not allow computing the difference of addresses
488 of distinct top level objects. */
489 if (new_reg_base_value[regno])
490 switch (GET_CODE (src))
492 case LO_SUM:
493 case PLUS:
494 case MINUS:
495 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
496 new_reg_base_value[regno] = 0;
497 break;
498 case AND:
499 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
500 new_reg_base_value[regno] = 0;
501 break;
502 default:
503 new_reg_base_value[regno] = 0;
504 break;
506 /* If this is the first set of a register, record the value. */
507 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
508 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
509 new_reg_base_value[regno] = find_base_value (src);
511 reg_seen[regno] = 1;
514 /* Called from loop optimization when a new pseudo-register is created. */
515 void
516 record_base_value (regno, val, invariant)
517 int regno;
518 rtx val;
519 int invariant;
521 if ((unsigned) regno >= reg_base_value_size)
522 return;
524 /* If INVARIANT is true then this value also describes an invariant
525 relationship which can be used to deduce that two registers with
526 unknown values are different. */
527 if (invariant && alias_invariant)
528 alias_invariant[regno] = val;
530 if (GET_CODE (val) == REG)
532 if ((unsigned) REGNO (val) < reg_base_value_size)
534 reg_base_value[regno] = reg_base_value[REGNO (val)];
536 return;
538 reg_base_value[regno] = find_base_value (val);
541 static rtx
542 canon_rtx (x)
543 rtx x;
545 /* Recursively look for equivalences. */
546 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
547 && REGNO (x) < reg_known_value_size)
548 return reg_known_value[REGNO (x)] == x
549 ? x : canon_rtx (reg_known_value[REGNO (x)]);
550 else if (GET_CODE (x) == PLUS)
552 rtx x0 = canon_rtx (XEXP (x, 0));
553 rtx x1 = canon_rtx (XEXP (x, 1));
555 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
557 /* We can tolerate LO_SUMs being offset here; these
558 rtl are used for nothing other than comparisons. */
559 if (GET_CODE (x0) == CONST_INT)
560 return plus_constant_for_output (x1, INTVAL (x0));
561 else if (GET_CODE (x1) == CONST_INT)
562 return plus_constant_for_output (x0, INTVAL (x1));
563 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
566 /* This gives us much better alias analysis when called from
567 the loop optimizer. Note we want to leave the original
568 MEM alone, but need to return the canonicalized MEM with
569 all the flags with their original values. */
570 else if (GET_CODE (x) == MEM)
572 rtx addr = canon_rtx (XEXP (x, 0));
573 if (addr != XEXP (x, 0))
575 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
576 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
577 MEM_COPY_ATTRIBUTES (new, x);
578 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
579 x = new;
582 return x;
585 /* Return 1 if X and Y are identical-looking rtx's.
587 We use the data in reg_known_value above to see if two registers with
588 different numbers are, in fact, equivalent. */
590 static int
591 rtx_equal_for_memref_p (x, y)
592 rtx x, y;
594 register int i;
595 register int j;
596 register enum rtx_code code;
597 register const char *fmt;
599 if (x == 0 && y == 0)
600 return 1;
601 if (x == 0 || y == 0)
602 return 0;
603 x = canon_rtx (x);
604 y = canon_rtx (y);
606 if (x == y)
607 return 1;
609 code = GET_CODE (x);
610 /* Rtx's of different codes cannot be equal. */
611 if (code != GET_CODE (y))
612 return 0;
614 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
615 (REG:SI x) and (REG:HI x) are NOT equivalent. */
617 if (GET_MODE (x) != GET_MODE (y))
618 return 0;
620 /* REG, LABEL_REF, and SYMBOL_REF can be compared nonrecursively. */
622 if (code == REG)
623 return REGNO (x) == REGNO (y);
624 if (code == LABEL_REF)
625 return XEXP (x, 0) == XEXP (y, 0);
626 if (code == SYMBOL_REF)
627 return XSTR (x, 0) == XSTR (y, 0);
628 if (code == CONST_INT)
629 return INTVAL (x) == INTVAL (y);
630 /* There's no need to compare the contents of CONST_DOUBLEs because
631 they're unique. */
632 if (code == CONST_DOUBLE)
633 return 0;
634 if (code == ADDRESSOF)
635 return REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0)) && XINT (x, 1) == XINT (y, 1);
637 /* For commutative operations, the RTX match if the operand match in any
638 order. Also handle the simple binary and unary cases without a loop. */
639 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
640 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
641 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
642 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
643 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
644 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
645 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
646 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
647 else if (GET_RTX_CLASS (code) == '1')
648 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
650 /* Compare the elements. If any pair of corresponding elements
651 fail to match, return 0 for the whole things.
653 Limit cases to types which actually appear in addresses. */
655 fmt = GET_RTX_FORMAT (code);
656 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
658 switch (fmt[i])
660 case 'i':
661 if (XINT (x, i) != XINT (y, i))
662 return 0;
663 break;
665 case 'E':
666 /* Two vectors must have the same length. */
667 if (XVECLEN (x, i) != XVECLEN (y, i))
668 return 0;
670 /* And the corresponding elements must match. */
671 for (j = 0; j < XVECLEN (x, i); j++)
672 if (rtx_equal_for_memref_p (XVECEXP (x, i, j), XVECEXP (y, i, j)) == 0)
673 return 0;
674 break;
676 case 'e':
677 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
678 return 0;
679 break;
681 /* This can happen for an asm which clobbers memory. */
682 case '0':
683 break;
685 /* It is believed that rtx's at this level will never
686 contain anything but integers and other rtx's,
687 except for within LABEL_REFs and SYMBOL_REFs. */
688 default:
689 abort ();
692 return 1;
695 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
696 X and return it, or return 0 if none found. */
698 static rtx
699 find_symbolic_term (x)
700 rtx x;
702 register int i;
703 register enum rtx_code code;
704 register const char *fmt;
706 code = GET_CODE (x);
707 if (code == SYMBOL_REF || code == LABEL_REF)
708 return x;
709 if (GET_RTX_CLASS (code) == 'o')
710 return 0;
712 fmt = GET_RTX_FORMAT (code);
713 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
715 rtx t;
717 if (fmt[i] == 'e')
719 t = find_symbolic_term (XEXP (x, i));
720 if (t != 0)
721 return t;
723 else if (fmt[i] == 'E')
724 break;
726 return 0;
729 static rtx
730 find_base_term (x)
731 register rtx x;
733 switch (GET_CODE (x))
735 case REG:
736 return REG_BASE_VALUE (x);
738 case ZERO_EXTEND:
739 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
740 case HIGH:
741 case PRE_INC:
742 case PRE_DEC:
743 case POST_INC:
744 case POST_DEC:
745 return find_base_term (XEXP (x, 0));
747 case CONST:
748 x = XEXP (x, 0);
749 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
750 return 0;
751 /* fall through */
752 case LO_SUM:
753 case PLUS:
754 case MINUS:
756 rtx tmp1 = XEXP (x, 0);
757 rtx tmp2 = XEXP (x, 1);
759 /* This is a litle bit tricky since we have to determine which of
760 the two operands represents the real base address. Otherwise this
761 routine may return the index register instead of the base register.
763 That may cause us to believe no aliasing was possible, when in
764 fact aliasing is possible.
766 We use a few simple tests to guess the base register. Additional
767 tests can certainly be added. For example, if one of the operands
768 is a shift or multiply, then it must be the index register and the
769 other operand is the base register. */
771 /* If either operand is known to be a pointer, then use it
772 to determine the base term. */
773 if (REG_P (tmp1) && REGNO_POINTER_FLAG (REGNO (tmp1)))
774 return find_base_term (tmp1);
776 if (REG_P (tmp2) && REGNO_POINTER_FLAG (REGNO (tmp2)))
777 return find_base_term (tmp2);
779 /* Neither operand was known to be a pointer. Go ahead and find the
780 base term for both operands. */
781 tmp1 = find_base_term (tmp1);
782 tmp2 = find_base_term (tmp2);
784 /* If either base term is named object or a special address
785 (like an argument or stack reference), then use it for the
786 base term. */
787 if (tmp1
788 && (GET_CODE (tmp1) == SYMBOL_REF
789 || GET_CODE (tmp1) == LABEL_REF
790 || (GET_CODE (tmp1) == ADDRESS
791 && GET_MODE (tmp1) != VOIDmode)))
792 return tmp1;
794 if (tmp2
795 && (GET_CODE (tmp2) == SYMBOL_REF
796 || GET_CODE (tmp2) == LABEL_REF
797 || (GET_CODE (tmp2) == ADDRESS
798 && GET_MODE (tmp2) != VOIDmode)))
799 return tmp2;
801 /* We could not determine which of the two operands was the
802 base register and which was the index. So we can determine
803 nothing from the base alias check. */
804 return 0;
807 case AND:
808 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
809 return REG_BASE_VALUE (XEXP (x, 0));
810 return 0;
812 case SYMBOL_REF:
813 case LABEL_REF:
814 return x;
816 default:
817 return 0;
821 /* Return 0 if the addresses X and Y are known to point to different
822 objects, 1 if they might be pointers to the same object. */
824 static int
825 base_alias_check (x, y, x_mode, y_mode)
826 rtx x, y;
827 enum machine_mode x_mode, y_mode;
829 rtx x_base = find_base_term (x);
830 rtx y_base = find_base_term (y);
832 /* If the address itself has no known base see if a known equivalent
833 value has one. If either address still has no known base, nothing
834 is known about aliasing. */
835 if (x_base == 0)
837 rtx x_c;
838 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
839 return 1;
840 x_base = find_base_term (x_c);
841 if (x_base == 0)
842 return 1;
845 if (y_base == 0)
847 rtx y_c;
848 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
849 return 1;
850 y_base = find_base_term (y_c);
851 if (y_base == 0)
852 return 1;
855 /* If the base addresses are equal nothing is known about aliasing. */
856 if (rtx_equal_p (x_base, y_base))
857 return 1;
859 /* The base addresses of the read and write are different expressions.
860 If they are both symbols and they are not accessed via AND, there is
861 no conflict. We can bring knowledge of object alignment into play
862 here. For example, on alpha, "char a, b;" can alias one another,
863 though "char a; long b;" cannot. */
864 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
866 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
867 return 1;
868 if (GET_CODE (x) == AND
869 && (GET_CODE (XEXP (x, 1)) != CONST_INT
870 || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
871 return 1;
872 if (GET_CODE (y) == AND
873 && (GET_CODE (XEXP (y, 1)) != CONST_INT
874 || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
875 return 1;
876 /* Differing symbols never alias. */
877 return 0;
880 /* If one address is a stack reference there can be no alias:
881 stack references using different base registers do not alias,
882 a stack reference can not alias a parameter, and a stack reference
883 can not alias a global. */
884 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
885 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
886 return 0;
888 if (! flag_argument_noalias)
889 return 1;
891 if (flag_argument_noalias > 1)
892 return 0;
894 /* Weak noalias assertion (arguments are distinct, but may match globals). */
895 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
898 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
899 where SIZE is the size in bytes of the memory reference. If ADDR
900 is not modified by the memory reference then ADDR is returned. */
903 addr_side_effect_eval (addr, size, n_refs)
904 rtx addr;
905 int size;
906 int n_refs;
908 int offset = 0;
910 switch (GET_CODE (addr))
912 case PRE_INC:
913 offset = (n_refs + 1) * size;
914 break;
915 case PRE_DEC:
916 offset = -(n_refs + 1) * size;
917 break;
918 case POST_INC:
919 offset = n_refs * size;
920 break;
921 case POST_DEC:
922 offset = -n_refs * size;
923 break;
925 default:
926 return addr;
929 if (offset)
930 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
931 else
932 addr = XEXP (addr, 0);
934 return addr;
937 /* Return nonzero if X and Y (memory addresses) could reference the
938 same location in memory. C is an offset accumulator. When
939 C is nonzero, we are testing aliases between X and Y + C.
940 XSIZE is the size in bytes of the X reference,
941 similarly YSIZE is the size in bytes for Y.
943 If XSIZE or YSIZE is zero, we do not know the amount of memory being
944 referenced (the reference was BLKmode), so make the most pessimistic
945 assumptions.
947 If XSIZE or YSIZE is negative, we may access memory outside the object
948 being referenced as a side effect. This can happen when using AND to
949 align memory references, as is done on the Alpha.
951 Nice to notice that varying addresses cannot conflict with fp if no
952 local variables had their addresses taken, but that's too hard now. */
955 static int
956 memrefs_conflict_p (xsize, x, ysize, y, c)
957 register rtx x, y;
958 int xsize, ysize;
959 HOST_WIDE_INT c;
961 if (GET_CODE (x) == HIGH)
962 x = XEXP (x, 0);
963 else if (GET_CODE (x) == LO_SUM)
964 x = XEXP (x, 1);
965 else
966 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
967 if (GET_CODE (y) == HIGH)
968 y = XEXP (y, 0);
969 else if (GET_CODE (y) == LO_SUM)
970 y = XEXP (y, 1);
971 else
972 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
974 if (rtx_equal_for_memref_p (x, y))
976 if (xsize <= 0 || ysize <= 0)
977 return 1;
978 if (c >= 0 && xsize > c)
979 return 1;
980 if (c < 0 && ysize+c > 0)
981 return 1;
982 return 0;
985 /* This code used to check for conflicts involving stack references and
986 globals but the base address alias code now handles these cases. */
988 if (GET_CODE (x) == PLUS)
990 /* The fact that X is canonicalized means that this
991 PLUS rtx is canonicalized. */
992 rtx x0 = XEXP (x, 0);
993 rtx x1 = XEXP (x, 1);
995 if (GET_CODE (y) == PLUS)
997 /* The fact that Y is canonicalized means that this
998 PLUS rtx is canonicalized. */
999 rtx y0 = XEXP (y, 0);
1000 rtx y1 = XEXP (y, 1);
1002 if (rtx_equal_for_memref_p (x1, y1))
1003 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1004 if (rtx_equal_for_memref_p (x0, y0))
1005 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1006 if (GET_CODE (x1) == CONST_INT)
1008 if (GET_CODE (y1) == CONST_INT)
1009 return memrefs_conflict_p (xsize, x0, ysize, y0,
1010 c - INTVAL (x1) + INTVAL (y1));
1011 else
1012 return memrefs_conflict_p (xsize, x0, ysize, y,
1013 c - INTVAL (x1));
1015 else if (GET_CODE (y1) == CONST_INT)
1016 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1018 return 1;
1020 else if (GET_CODE (x1) == CONST_INT)
1021 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1023 else if (GET_CODE (y) == PLUS)
1025 /* The fact that Y is canonicalized means that this
1026 PLUS rtx is canonicalized. */
1027 rtx y0 = XEXP (y, 0);
1028 rtx y1 = XEXP (y, 1);
1030 if (GET_CODE (y1) == CONST_INT)
1031 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1032 else
1033 return 1;
1036 if (GET_CODE (x) == GET_CODE (y))
1037 switch (GET_CODE (x))
1039 case MULT:
1041 /* Handle cases where we expect the second operands to be the
1042 same, and check only whether the first operand would conflict
1043 or not. */
1044 rtx x0, y0;
1045 rtx x1 = canon_rtx (XEXP (x, 1));
1046 rtx y1 = canon_rtx (XEXP (y, 1));
1047 if (! rtx_equal_for_memref_p (x1, y1))
1048 return 1;
1049 x0 = canon_rtx (XEXP (x, 0));
1050 y0 = canon_rtx (XEXP (y, 0));
1051 if (rtx_equal_for_memref_p (x0, y0))
1052 return (xsize == 0 || ysize == 0
1053 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1055 /* Can't properly adjust our sizes. */
1056 if (GET_CODE (x1) != CONST_INT)
1057 return 1;
1058 xsize /= INTVAL (x1);
1059 ysize /= INTVAL (x1);
1060 c /= INTVAL (x1);
1061 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1064 case REG:
1065 /* Are these registers known not to be equal? */
1066 if (alias_invariant)
1068 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1069 rtx i_x, i_y; /* invariant relationships of X and Y */
1071 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1072 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1074 if (i_x == 0 && i_y == 0)
1075 break;
1077 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1078 ysize, i_y ? i_y : y, c))
1079 return 0;
1081 break;
1083 default:
1084 break;
1087 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1088 as an access with indeterminate size. Assume that references
1089 besides AND are aligned, so if the size of the other reference is
1090 at least as large as the alignment, assume no other overlap. */
1091 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1093 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1094 xsize = -1;
1095 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1097 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1099 /* ??? If we are indexing far enough into the array/structure, we
1100 may yet be able to determine that we can not overlap. But we
1101 also need to that we are far enough from the end not to overlap
1102 a following reference, so we do nothing with that for now. */
1103 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1104 ysize = -1;
1105 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1108 if (CONSTANT_P (x))
1110 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1112 c += (INTVAL (y) - INTVAL (x));
1113 return (xsize <= 0 || ysize <= 0
1114 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1117 if (GET_CODE (x) == CONST)
1119 if (GET_CODE (y) == CONST)
1120 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1121 ysize, canon_rtx (XEXP (y, 0)), c);
1122 else
1123 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1124 ysize, y, c);
1126 if (GET_CODE (y) == CONST)
1127 return memrefs_conflict_p (xsize, x, ysize,
1128 canon_rtx (XEXP (y, 0)), c);
1130 if (CONSTANT_P (y))
1131 return (xsize < 0 || ysize < 0
1132 || (rtx_equal_for_memref_p (x, y)
1133 && (xsize == 0 || ysize == 0
1134 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1136 return 1;
1138 return 1;
1141 /* Functions to compute memory dependencies.
1143 Since we process the insns in execution order, we can build tables
1144 to keep track of what registers are fixed (and not aliased), what registers
1145 are varying in known ways, and what registers are varying in unknown
1146 ways.
1148 If both memory references are volatile, then there must always be a
1149 dependence between the two references, since their order can not be
1150 changed. A volatile and non-volatile reference can be interchanged
1151 though.
1153 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
1154 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
1155 allow QImode aliasing because the ANSI C standard allows character
1156 pointers to alias anything. We are assuming that characters are
1157 always QImode here. We also must allow AND addresses, because they may
1158 generate accesses outside the object being referenced. This is used to
1159 generate aligned addresses from unaligned addresses, for instance, the
1160 alpha storeqi_unaligned pattern. */
1162 /* Read dependence: X is read after read in MEM takes place. There can
1163 only be a dependence here if both reads are volatile. */
1166 read_dependence (mem, x)
1167 rtx mem;
1168 rtx x;
1170 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1173 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1174 MEM2 is a reference to a structure at a varying address, or returns
1175 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1176 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1177 to decide whether or not an address may vary; it should return
1178 nozero whenever variation is possible. */
1180 static rtx
1181 fixed_scalar_and_varying_struct_p (mem1, mem2, varies_p)
1182 rtx mem1;
1183 rtx mem2;
1184 int (*varies_p) PROTO((rtx));
1186 rtx mem1_addr = XEXP (mem1, 0);
1187 rtx mem2_addr = XEXP (mem2, 0);
1189 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1190 && !varies_p (mem1_addr) && varies_p (mem2_addr))
1191 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1192 varying address. */
1193 return mem1;
1195 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1196 && varies_p (mem1_addr) && !varies_p (mem2_addr))
1197 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1198 varying address. */
1199 return mem2;
1201 return NULL_RTX;
1204 /* Returns nonzero if something about the mode or address format MEM1
1205 indicates that it might well alias *anything*. */
1207 static int
1208 aliases_everything_p (mem)
1209 rtx mem;
1211 if (GET_MODE (mem) == QImode)
1212 /* ANSI C says that a `char*' can point to anything. */
1213 return 1;
1215 if (GET_CODE (XEXP (mem, 0)) == AND)
1216 /* If the address is an AND, its very hard to know at what it is
1217 actually pointing. */
1218 return 1;
1220 return 0;
1223 /* True dependence: X is read after store in MEM takes place. */
1226 true_dependence (mem, mem_mode, x, varies)
1227 rtx mem;
1228 enum machine_mode mem_mode;
1229 rtx x;
1230 int (*varies) PROTO((rtx));
1232 register rtx x_addr, mem_addr;
1234 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1235 return 1;
1237 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1238 return 0;
1240 /* If X is an unchanging read, then it can't possibly conflict with any
1241 non-unchanging store. It may conflict with an unchanging write though,
1242 because there may be a single store to this address to initialize it.
1243 Just fall through to the code below to resolve the case where we have
1244 both an unchanging read and an unchanging write. This won't handle all
1245 cases optimally, but the possible performance loss should be
1246 negligible. */
1247 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1248 return 0;
1250 if (mem_mode == VOIDmode)
1251 mem_mode = GET_MODE (mem);
1253 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x), mem_mode))
1254 return 0;
1256 x_addr = canon_rtx (XEXP (x, 0));
1257 mem_addr = canon_rtx (XEXP (mem, 0));
1259 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1260 SIZE_FOR_MODE (x), x_addr, 0))
1261 return 0;
1263 if (aliases_everything_p (x))
1264 return 1;
1266 /* We cannot use aliases_everyting_p to test MEM, since we must look
1267 at MEM_MODE, rather than GET_MODE (MEM). */
1268 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1269 return 1;
1271 /* In true_dependence we also allow BLKmode to alias anything. Why
1272 don't we do this in anti_dependence and output_dependence? */
1273 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1274 return 1;
1276 return !fixed_scalar_and_varying_struct_p (mem, x, varies);
1279 /* Returns non-zero if a write to X might alias a previous read from
1280 (or, if WRITEP is non-zero, a write to) MEM. */
1282 static int
1283 write_dependence_p (mem, x, writep)
1284 rtx mem;
1285 rtx x;
1286 int writep;
1288 rtx x_addr, mem_addr;
1289 rtx fixed_scalar;
1291 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1292 return 1;
1294 /* If MEM is an unchanging read, then it can't possibly conflict with
1295 the store to X, because there is at most one store to MEM, and it must
1296 have occurred somewhere before MEM. */
1297 if (!writep && RTX_UNCHANGING_P (mem))
1298 return 0;
1300 if (! base_alias_check (XEXP (x, 0), XEXP (mem, 0), GET_MODE (x),
1301 GET_MODE (mem)))
1302 return 0;
1304 x = canon_rtx (x);
1305 mem = canon_rtx (mem);
1307 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1308 return 0;
1310 x_addr = XEXP (x, 0);
1311 mem_addr = XEXP (mem, 0);
1313 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1314 SIZE_FOR_MODE (x), x_addr, 0))
1315 return 0;
1317 fixed_scalar
1318 = fixed_scalar_and_varying_struct_p (mem, x, rtx_addr_varies_p);
1320 return (!(fixed_scalar == mem && !aliases_everything_p (x))
1321 && !(fixed_scalar == x && !aliases_everything_p (mem)));
1324 /* Anti dependence: X is written after read in MEM takes place. */
1327 anti_dependence (mem, x)
1328 rtx mem;
1329 rtx x;
1331 return write_dependence_p (mem, x, /*writep=*/0);
1334 /* Output dependence: X is written after store in MEM takes place. */
1337 output_dependence (mem, x)
1338 register rtx mem;
1339 register rtx x;
1341 return write_dependence_p (mem, x, /*writep=*/1);
1344 /* Returns non-zero if X might refer to something which is not
1345 local to the function and is not constant. */
1347 static int
1348 nonlocal_reference_p (x)
1349 rtx x;
1351 rtx base;
1352 register RTX_CODE code;
1353 int regno;
1355 code = GET_CODE (x);
1357 if (GET_RTX_CLASS (code) == 'i')
1359 /* Constant functions are constant. */
1360 if (code == CALL_INSN && CONST_CALL_P (x))
1361 return 0;
1362 x = PATTERN (x);
1363 code = GET_CODE (x);
1366 switch (code)
1368 case SUBREG:
1369 if (GET_CODE (SUBREG_REG (x)) == REG)
1371 /* Global registers are not local. */
1372 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
1373 && global_regs[REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)])
1374 return 1;
1375 return 0;
1377 break;
1379 case REG:
1380 regno = REGNO (x);
1381 /* Global registers are not local. */
1382 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1383 return 1;
1384 return 0;
1386 case SCRATCH:
1387 case PC:
1388 case CC0:
1389 case CONST_INT:
1390 case CONST_DOUBLE:
1391 case CONST:
1392 case LABEL_REF:
1393 return 0;
1395 case SYMBOL_REF:
1396 /* Constants in the function's constants pool are constant. */
1397 if (CONSTANT_POOL_ADDRESS_P (x))
1398 return 0;
1399 return 1;
1401 case CALL:
1402 /* Recursion introduces no additional considerations. */
1403 if (GET_CODE (XEXP (x, 0)) == MEM
1404 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1405 && strcmp(XSTR (XEXP (XEXP (x, 0), 0), 0),
1406 IDENTIFIER_POINTER (
1407 DECL_ASSEMBLER_NAME (current_function_decl))) == 0)
1408 return 0;
1409 return 1;
1411 case MEM:
1412 /* Be overly conservative and consider any volatile memory
1413 reference as not local. */
1414 if (MEM_VOLATILE_P (x))
1415 return 1;
1416 base = find_base_term (XEXP (x, 0));
1417 if (base)
1419 /* Stack references are local. */
1420 if (GET_CODE (base) == ADDRESS && GET_MODE (base) == Pmode)
1421 return 0;
1422 /* Constants in the function's constant pool are constant. */
1423 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
1424 return 0;
1426 return 1;
1428 case ASM_INPUT:
1429 case ASM_OPERANDS:
1430 return 1;
1432 default:
1433 break;
1436 /* Recursively scan the operands of this expression. */
1439 register const char *fmt = GET_RTX_FORMAT (code);
1440 register int i;
1442 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1444 if (fmt[i] == 'e')
1446 if (nonlocal_reference_p (XEXP (x, i)))
1447 return 1;
1449 if (fmt[i] == 'E')
1451 register int j;
1452 for (j = 0; j < XVECLEN (x, i); j++)
1453 if (nonlocal_reference_p (XVECEXP (x, i, j)))
1454 return 1;
1459 return 0;
1462 /* Mark the function if it is constant. */
1464 void
1465 mark_constant_function ()
1467 rtx insn;
1469 if (TREE_PUBLIC (current_function_decl)
1470 || TREE_READONLY (current_function_decl)
1471 || TREE_THIS_VOLATILE (current_function_decl)
1472 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
1473 return;
1475 /* Determine if this is a constant function. */
1477 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1478 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1479 && nonlocal_reference_p (insn))
1480 return;
1482 /* Mark the function. */
1484 TREE_READONLY (current_function_decl) = 1;
1488 static HARD_REG_SET argument_registers;
1490 void
1491 init_alias_once ()
1493 register int i;
1495 #ifndef OUTGOING_REGNO
1496 #define OUTGOING_REGNO(N) N
1497 #endif
1498 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1499 /* Check whether this register can hold an incoming pointer
1500 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1501 numbers, so translate if necessary due to register windows. */
1502 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1503 && HARD_REGNO_MODE_OK (i, Pmode))
1504 SET_HARD_REG_BIT (argument_registers, i);
1506 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
1509 void
1510 init_alias_analysis ()
1512 int maxreg = max_reg_num ();
1513 int changed, pass;
1514 register int i;
1515 register unsigned int ui;
1516 register rtx insn;
1518 reg_known_value_size = maxreg;
1520 reg_known_value
1521 = (rtx *) oballoc ((maxreg - FIRST_PSEUDO_REGISTER) * sizeof (rtx))
1522 - FIRST_PSEUDO_REGISTER;
1523 reg_known_equiv_p =
1524 oballoc (maxreg - FIRST_PSEUDO_REGISTER) - FIRST_PSEUDO_REGISTER;
1525 bzero ((char *) (reg_known_value + FIRST_PSEUDO_REGISTER),
1526 (maxreg-FIRST_PSEUDO_REGISTER) * sizeof (rtx));
1527 bzero (reg_known_equiv_p + FIRST_PSEUDO_REGISTER,
1528 (maxreg - FIRST_PSEUDO_REGISTER) * sizeof (char));
1530 /* Overallocate reg_base_value to allow some growth during loop
1531 optimization. Loop unrolling can create a large number of
1532 registers. */
1533 reg_base_value_size = maxreg * 2;
1534 reg_base_value = (rtx *)oballoc (reg_base_value_size * sizeof (rtx));
1535 new_reg_base_value = (rtx *)alloca (reg_base_value_size * sizeof (rtx));
1536 reg_seen = (char *)alloca (reg_base_value_size);
1537 bzero ((char *) reg_base_value, reg_base_value_size * sizeof (rtx));
1538 if (! reload_completed && flag_unroll_loops)
1540 alias_invariant = (rtx *)xrealloc (alias_invariant,
1541 reg_base_value_size * sizeof (rtx));
1542 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1546 /* The basic idea is that each pass through this loop will use the
1547 "constant" information from the previous pass to propagate alias
1548 information through another level of assignments.
1550 This could get expensive if the assignment chains are long. Maybe
1551 we should throttle the number of iterations, possibly based on
1552 the optimization level or flag_expensive_optimizations.
1554 We could propagate more information in the first pass by making use
1555 of REG_N_SETS to determine immediately that the alias information
1556 for a pseudo is "constant".
1558 A program with an uninitialized variable can cause an infinite loop
1559 here. Instead of doing a full dataflow analysis to detect such problems
1560 we just cap the number of iterations for the loop.
1562 The state of the arrays for the set chain in question does not matter
1563 since the program has undefined behavior. */
1565 pass = 0;
1568 /* Assume nothing will change this iteration of the loop. */
1569 changed = 0;
1571 /* We want to assign the same IDs each iteration of this loop, so
1572 start counting from zero each iteration of the loop. */
1573 unique_id = 0;
1575 /* We're at the start of the funtion each iteration through the
1576 loop, so we're copying arguments. */
1577 copying_arguments = 1;
1579 /* Wipe the potential alias information clean for this pass. */
1580 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1582 /* Wipe the reg_seen array clean. */
1583 bzero ((char *) reg_seen, reg_base_value_size);
1585 /* Mark all hard registers which may contain an address.
1586 The stack, frame and argument pointers may contain an address.
1587 An argument register which can hold a Pmode value may contain
1588 an address even if it is not in BASE_REGS.
1590 The address expression is VOIDmode for an argument and
1591 Pmode for other registers. */
1593 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1594 if (TEST_HARD_REG_BIT (argument_registers, i))
1595 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1596 gen_rtx_REG (Pmode, i));
1598 new_reg_base_value[STACK_POINTER_REGNUM]
1599 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1600 new_reg_base_value[ARG_POINTER_REGNUM]
1601 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1602 new_reg_base_value[FRAME_POINTER_REGNUM]
1603 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1604 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1605 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1606 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1607 #endif
1608 if (struct_value_incoming_rtx
1609 && GET_CODE (struct_value_incoming_rtx) == REG)
1610 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1611 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1613 if (static_chain_rtx
1614 && GET_CODE (static_chain_rtx) == REG)
1615 new_reg_base_value[REGNO (static_chain_rtx)]
1616 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1618 /* Walk the insns adding values to the new_reg_base_value array. */
1619 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1621 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
1622 if (prologue_epilogue_contains (insn))
1623 continue;
1624 #endif
1625 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1627 rtx note, set;
1628 /* If this insn has a noalias note, process it, Otherwise,
1629 scan for sets. A simple set will have no side effects
1630 which could change the base value of any other register. */
1632 if (GET_CODE (PATTERN (insn)) == SET
1633 && (find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
1634 record_set (SET_DEST (PATTERN (insn)), NULL_RTX);
1635 else
1636 note_stores (PATTERN (insn), record_set);
1638 set = single_set (insn);
1640 if (set != 0
1641 && GET_CODE (SET_DEST (set)) == REG
1642 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1643 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1644 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1645 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1646 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
1647 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
1649 int regno = REGNO (SET_DEST (set));
1650 reg_known_value[regno] = XEXP (note, 0);
1651 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1654 else if (GET_CODE (insn) == NOTE
1655 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1656 copying_arguments = 0;
1659 /* Now propagate values from new_reg_base_value to reg_base_value. */
1660 for (ui = 0; ui < reg_base_value_size; ui++)
1662 if (new_reg_base_value[ui]
1663 && new_reg_base_value[ui] != reg_base_value[ui]
1664 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1666 reg_base_value[ui] = new_reg_base_value[ui];
1667 changed = 1;
1671 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1673 /* Fill in the remaining entries. */
1674 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1675 if (reg_known_value[i] == 0)
1676 reg_known_value[i] = regno_reg_rtx[i];
1678 /* Simplify the reg_base_value array so that no register refers to
1679 another register, except to special registers indirectly through
1680 ADDRESS expressions.
1682 In theory this loop can take as long as O(registers^2), but unless
1683 there are very long dependency chains it will run in close to linear
1684 time.
1686 This loop may not be needed any longer now that the main loop does
1687 a better job at propagating alias information. */
1688 pass = 0;
1691 changed = 0;
1692 pass++;
1693 for (ui = 0; ui < reg_base_value_size; ui++)
1695 rtx base = reg_base_value[ui];
1696 if (base && GET_CODE (base) == REG)
1698 unsigned int base_regno = REGNO (base);
1699 if (base_regno == ui) /* register set from itself */
1700 reg_base_value[ui] = 0;
1701 else
1702 reg_base_value[ui] = reg_base_value[base_regno];
1703 changed = 1;
1707 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1709 new_reg_base_value = 0;
1710 reg_seen = 0;
1713 void
1714 end_alias_analysis ()
1716 reg_known_value = 0;
1717 reg_base_value = 0;
1718 reg_base_value_size = 0;
1719 if (alias_invariant)
1721 free ((char *)alias_invariant);
1722 alias_invariant = 0;