* arm-protos.h (arm_dllexport_name_p, arm_dllimport_name_p): Constify.
[official-gcc.git] / gcc / alias.c
blobe29c57e5234273cacda27855720e40debf56a369
1 /* Alias analysis for GNU C
2 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
3 Contributed by John Carr (jfc@mit.edu).
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "tm_p.h"
27 #include "function.h"
28 #include "insn-flags.h"
29 #include "expr.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "output.h"
34 #include "toplev.h"
35 #include "cselib.h"
36 #include "splay-tree.h"
37 #include "ggc.h"
39 /* The alias sets assigned to MEMs assist the back-end in determining
40 which MEMs can alias which other MEMs. In general, two MEMs in
41 different alias sets to not alias each other. There is one
42 exception, however. Consider something like:
44 struct S {int i; double d; };
46 a store to an `S' can alias something of either type `int' or type
47 `double'. (However, a store to an `int' cannot alias a `double'
48 and vice versa.) We indicate this via a tree structure that looks
49 like:
50 struct S
51 / \
52 / \
53 |/_ _\|
54 int double
56 (The arrows are directed and point downwards.) If, when comparing
57 two alias sets, we can hold one set fixed, trace the other set
58 downwards, and at some point find the first set, the two MEMs can
59 alias one another. In this situation we say the alias set for
60 `struct S' is the `superset' and that those for `int' and `double'
61 are `subsets'.
63 Alias set zero is implicitly a superset of all other alias sets.
64 However, this is no actual entry for alias set zero. It is an
65 error to attempt to explicitly construct a subset of zero. */
67 typedef struct alias_set_entry
69 /* The alias set number, as stored in MEM_ALIAS_SET. */
70 int alias_set;
72 /* The children of the alias set. These are not just the immediate
73 children, but, in fact, all children. So, if we have:
75 struct T { struct S s; float f; }
77 continuing our example above, the children here will be all of
78 `int', `double', `float', and `struct S'. */
79 splay_tree children;
80 } *alias_set_entry;
82 static int rtx_equal_for_memref_p PARAMS ((rtx, rtx));
83 static rtx find_symbolic_term PARAMS ((rtx));
84 static rtx get_addr PARAMS ((rtx));
85 static int memrefs_conflict_p PARAMS ((int, rtx, int, rtx,
86 HOST_WIDE_INT));
87 static void record_set PARAMS ((rtx, rtx, void *));
88 static rtx find_base_term PARAMS ((rtx));
89 static int base_alias_check PARAMS ((rtx, rtx, enum machine_mode,
90 enum machine_mode));
91 static rtx find_base_value PARAMS ((rtx));
92 static int mems_in_disjoint_alias_sets_p PARAMS ((rtx, rtx));
93 static int insert_subset_children PARAMS ((splay_tree_node, void*));
94 static alias_set_entry get_alias_set_entry PARAMS ((int));
95 static rtx fixed_scalar_and_varying_struct_p PARAMS ((rtx, rtx, rtx, rtx,
96 int (*)(rtx)));
97 static int aliases_everything_p PARAMS ((rtx));
98 static int write_dependence_p PARAMS ((rtx, rtx, int));
99 static int nonlocal_reference_p PARAMS ((rtx));
101 /* Set up all info needed to perform alias analysis on memory references. */
103 /* Returns the size in bytes of the mode of X. */
104 #define SIZE_FOR_MODE(X) (GET_MODE_SIZE (GET_MODE (X)))
106 /* Returns nonzero if MEM1 and MEM2 do not alias because they are in
107 different alias sets. We ignore alias sets in functions making use
108 of variable arguments because the va_arg macros on some systems are
109 not legal ANSI C. */
110 #define DIFFERENT_ALIAS_SETS_P(MEM1, MEM2) \
111 mems_in_disjoint_alias_sets_p (MEM1, MEM2)
113 /* Cap the number of passes we make over the insns propagating alias
114 information through set chains.
116 10 is a completely arbitrary choice. */
117 #define MAX_ALIAS_LOOP_PASSES 10
119 /* reg_base_value[N] gives an address to which register N is related.
120 If all sets after the first add or subtract to the current value
121 or otherwise modify it so it does not point to a different top level
122 object, reg_base_value[N] is equal to the address part of the source
123 of the first set.
125 A base address can be an ADDRESS, SYMBOL_REF, or LABEL_REF. ADDRESS
126 expressions represent certain special values: function arguments and
127 the stack, frame, and argument pointers.
129 The contents of an ADDRESS is not normally used, the mode of the
130 ADDRESS determines whether the ADDRESS is a function argument or some
131 other special value. Pointer equality, not rtx_equal_p, determines whether
132 two ADDRESS expressions refer to the same base address.
134 The only use of the contents of an ADDRESS is for determining if the
135 current function performs nonlocal memory memory references for the
136 purposes of marking the function as a constant function. */
138 static rtx *reg_base_value;
139 static rtx *new_reg_base_value;
140 static unsigned int reg_base_value_size; /* size of reg_base_value array */
142 #define REG_BASE_VALUE(X) \
143 ((unsigned) REGNO (X) < reg_base_value_size ? reg_base_value[REGNO (X)] : 0)
145 /* Vector of known invariant relationships between registers. Set in
146 loop unrolling. Indexed by register number, if nonzero the value
147 is an expression describing this register in terms of another.
149 The length of this array is REG_BASE_VALUE_SIZE.
151 Because this array contains only pseudo registers it has no effect
152 after reload. */
153 static rtx *alias_invariant;
155 /* Vector indexed by N giving the initial (unchanging) value known for
156 pseudo-register N. This array is initialized in
157 init_alias_analysis, and does not change until end_alias_analysis
158 is called. */
159 rtx *reg_known_value;
161 /* Indicates number of valid entries in reg_known_value. */
162 static unsigned int reg_known_value_size;
164 /* Vector recording for each reg_known_value whether it is due to a
165 REG_EQUIV note. Future passes (viz., reload) may replace the
166 pseudo with the equivalent expression and so we account for the
167 dependences that would be introduced if that happens. */
168 /* ??? This is a problem only on the Convex. The REG_EQUIV notes created in
169 assign_parms mention the arg pointer, and there are explicit insns in the
170 RTL that modify the arg pointer. Thus we must ensure that such insns don't
171 get scheduled across each other because that would invalidate the REG_EQUIV
172 notes. One could argue that the REG_EQUIV notes are wrong, but solving
173 the problem in the scheduler will likely give better code, so we do it
174 here. */
175 char *reg_known_equiv_p;
177 /* True when scanning insns from the start of the rtl to the
178 NOTE_INSN_FUNCTION_BEG note. */
180 static int copying_arguments;
182 /* The splay-tree used to store the various alias set entries. */
184 static splay_tree alias_sets;
186 /* Returns a pointer to the alias set entry for ALIAS_SET, if there is
187 such an entry, or NULL otherwise. */
189 static alias_set_entry
190 get_alias_set_entry (alias_set)
191 int alias_set;
193 splay_tree_node sn
194 = splay_tree_lookup (alias_sets, (splay_tree_key) alias_set);
196 return sn != 0 ? ((alias_set_entry) sn->value) : 0;
199 /* Returns nonzero value if the alias sets for MEM1 and MEM2 are such
200 that the two MEMs cannot alias each other. */
202 static int
203 mems_in_disjoint_alias_sets_p (mem1, mem2)
204 rtx mem1;
205 rtx mem2;
207 alias_set_entry ase;
209 #ifdef ENABLE_CHECKING
210 /* Perform a basic sanity check. Namely, that there are no alias sets
211 if we're not using strict aliasing. This helps to catch bugs
212 whereby someone uses PUT_CODE, but doesn't clear MEM_ALIAS_SET, or
213 where a MEM is allocated in some way other than by the use of
214 gen_rtx_MEM, and the MEM_ALIAS_SET is not cleared. If we begin to
215 use alias sets to indicate that spilled registers cannot alias each
216 other, we might need to remove this check. */
217 if (! flag_strict_aliasing
218 && (MEM_ALIAS_SET (mem1) != 0 || MEM_ALIAS_SET (mem2) != 0))
219 abort ();
220 #endif
222 /* The code used in varargs macros are often not conforming ANSI C,
223 which can trick the compiler into making incorrect aliasing
224 assumptions in these functions. So, we don't use alias sets in
225 such a function. FIXME: This should be moved into the front-end;
226 it is a language-dependent notion, and there's no reason not to
227 still use these checks to handle globals. */
228 if (current_function_stdarg || current_function_varargs)
229 return 0;
231 /* If have no alias set information for one of the MEMs, we have to assume
232 it can alias anything. */
233 if (MEM_ALIAS_SET (mem1) == 0 || MEM_ALIAS_SET (mem2) == 0)
234 return 0;
236 /* If the two alias sets are the same, they may alias. */
237 if (MEM_ALIAS_SET (mem1) == MEM_ALIAS_SET (mem2))
238 return 0;
240 /* Iterate through each of the children of the first alias set,
241 comparing it with the second alias set. */
242 ase = get_alias_set_entry (MEM_ALIAS_SET (mem1));
243 if (ase != 0 && splay_tree_lookup (ase->children,
244 (splay_tree_key) MEM_ALIAS_SET (mem2)))
245 return 0;
247 /* Now do the same, but with the alias sets reversed. */
248 ase = get_alias_set_entry (MEM_ALIAS_SET (mem2));
249 if (ase != 0 && splay_tree_lookup (ase->children,
250 (splay_tree_key) MEM_ALIAS_SET (mem1)))
251 return 0;
253 /* The two MEMs are in distinct alias sets, and neither one is the
254 child of the other. Therefore, they cannot alias. */
255 return 1;
258 /* Insert the NODE into the splay tree given by DATA. Used by
259 record_alias_subset via splay_tree_foreach. */
261 static int
262 insert_subset_children (node, data)
263 splay_tree_node node;
264 void *data;
266 splay_tree_insert ((splay_tree) data, node->key, node->value);
268 return 0;
271 /* Indicate that things in SUBSET can alias things in SUPERSET, but
272 not vice versa. For example, in C, a store to an `int' can alias a
273 structure containing an `int', but not vice versa. Here, the
274 structure would be the SUPERSET and `int' the SUBSET. This
275 function should be called only once per SUPERSET/SUBSET pair. At
276 present any given alias set may only be a subset of one superset.
278 It is illegal for SUPERSET to be zero; everything is implicitly a
279 subset of alias set zero. */
281 void
282 record_alias_subset (superset, subset)
283 int superset;
284 int subset;
286 alias_set_entry superset_entry;
287 alias_set_entry subset_entry;
289 if (superset == 0)
290 abort ();
292 superset_entry = get_alias_set_entry (superset);
293 if (superset_entry == 0)
295 /* Create an entry for the SUPERSET, so that we have a place to
296 attach the SUBSET. */
297 superset_entry
298 = (alias_set_entry) xmalloc (sizeof (struct alias_set_entry));
299 superset_entry->alias_set = superset;
300 superset_entry->children
301 = splay_tree_new (splay_tree_compare_ints, 0, 0);
302 splay_tree_insert (alias_sets, (splay_tree_key) superset,
303 (splay_tree_value) superset_entry);
307 subset_entry = get_alias_set_entry (subset);
309 /* If there is an entry for the subset, enter all of its children
310 (if they are not already present) as children of the SUPERSET. */
311 if (subset_entry)
312 splay_tree_foreach (subset_entry->children,
313 insert_subset_children,
314 superset_entry->children);
316 /* Enter the SUBSET itself as a child of the SUPERSET. */
317 splay_tree_insert (superset_entry->children,
318 (splay_tree_key) subset, 0);
321 /* Inside SRC, the source of a SET, find a base address. */
323 static rtx
324 find_base_value (src)
325 register rtx src;
327 switch (GET_CODE (src))
329 case SYMBOL_REF:
330 case LABEL_REF:
331 return src;
333 case REG:
334 /* At the start of a function, argument registers have known base
335 values which may be lost later. Returning an ADDRESS
336 expression here allows optimization based on argument values
337 even when the argument registers are used for other purposes. */
338 if (REGNO (src) < FIRST_PSEUDO_REGISTER && copying_arguments)
339 return new_reg_base_value[REGNO (src)];
341 /* If a pseudo has a known base value, return it. Do not do this
342 for hard regs since it can result in a circular dependency
343 chain for registers which have values at function entry.
345 The test above is not sufficient because the scheduler may move
346 a copy out of an arg reg past the NOTE_INSN_FUNCTION_BEGIN. */
347 if (REGNO (src) >= FIRST_PSEUDO_REGISTER
348 && (unsigned) REGNO (src) < reg_base_value_size
349 && reg_base_value[REGNO (src)])
350 return reg_base_value[REGNO (src)];
352 return src;
354 case MEM:
355 /* Check for an argument passed in memory. Only record in the
356 copying-arguments block; it is too hard to track changes
357 otherwise. */
358 if (copying_arguments
359 && (XEXP (src, 0) == arg_pointer_rtx
360 || (GET_CODE (XEXP (src, 0)) == PLUS
361 && XEXP (XEXP (src, 0), 0) == arg_pointer_rtx)))
362 return gen_rtx_ADDRESS (VOIDmode, src);
363 return 0;
365 case CONST:
366 src = XEXP (src, 0);
367 if (GET_CODE (src) != PLUS && GET_CODE (src) != MINUS)
368 break;
370 /* ... fall through ... */
372 case PLUS:
373 case MINUS:
375 rtx temp, src_0 = XEXP (src, 0), src_1 = XEXP (src, 1);
377 /* If either operand is a REG, then see if we already have
378 a known value for it. */
379 if (GET_CODE (src_0) == REG)
381 temp = find_base_value (src_0);
382 if (temp != 0)
383 src_0 = temp;
386 if (GET_CODE (src_1) == REG)
388 temp = find_base_value (src_1);
389 if (temp!= 0)
390 src_1 = temp;
393 /* Guess which operand is the base address:
394 If either operand is a symbol, then it is the base. If
395 either operand is a CONST_INT, then the other is the base. */
396 if (GET_CODE (src_1) == CONST_INT || CONSTANT_P (src_0))
397 return find_base_value (src_0);
398 else if (GET_CODE (src_0) == CONST_INT || CONSTANT_P (src_1))
399 return find_base_value (src_1);
401 /* This might not be necessary anymore:
402 If either operand is a REG that is a known pointer, then it
403 is the base. */
404 else if (GET_CODE (src_0) == REG && REGNO_POINTER_FLAG (REGNO (src_0)))
405 return find_base_value (src_0);
406 else if (GET_CODE (src_1) == REG && REGNO_POINTER_FLAG (REGNO (src_1)))
407 return find_base_value (src_1);
409 return 0;
412 case LO_SUM:
413 /* The standard form is (lo_sum reg sym) so look only at the
414 second operand. */
415 return find_base_value (XEXP (src, 1));
417 case AND:
418 /* If the second operand is constant set the base
419 address to the first operand. */
420 if (GET_CODE (XEXP (src, 1)) == CONST_INT && INTVAL (XEXP (src, 1)) != 0)
421 return find_base_value (XEXP (src, 0));
422 return 0;
424 case ZERO_EXTEND:
425 case SIGN_EXTEND: /* used for NT/Alpha pointers */
426 case HIGH:
427 return find_base_value (XEXP (src, 0));
429 default:
430 break;
433 return 0;
436 /* Called from init_alias_analysis indirectly through note_stores. */
438 /* While scanning insns to find base values, reg_seen[N] is nonzero if
439 register N has been set in this function. */
440 static char *reg_seen;
442 /* Addresses which are known not to alias anything else are identified
443 by a unique integer. */
444 static int unique_id;
446 static void
447 record_set (dest, set, data)
448 rtx dest, set;
449 void *data ATTRIBUTE_UNUSED;
451 register unsigned regno;
452 rtx src;
454 if (GET_CODE (dest) != REG)
455 return;
457 regno = REGNO (dest);
459 if (regno >= reg_base_value_size)
460 abort ();
462 if (set)
464 /* A CLOBBER wipes out any old value but does not prevent a previously
465 unset register from acquiring a base address (i.e. reg_seen is not
466 set). */
467 if (GET_CODE (set) == CLOBBER)
469 new_reg_base_value[regno] = 0;
470 return;
472 src = SET_SRC (set);
474 else
476 if (reg_seen[regno])
478 new_reg_base_value[regno] = 0;
479 return;
481 reg_seen[regno] = 1;
482 new_reg_base_value[regno] = gen_rtx_ADDRESS (Pmode,
483 GEN_INT (unique_id++));
484 return;
487 /* This is not the first set. If the new value is not related to the
488 old value, forget the base value. Note that the following code is
489 not detected:
490 extern int x, y; int *p = &x; p += (&y-&x);
491 ANSI C does not allow computing the difference of addresses
492 of distinct top level objects. */
493 if (new_reg_base_value[regno])
494 switch (GET_CODE (src))
496 case LO_SUM:
497 case PLUS:
498 case MINUS:
499 if (XEXP (src, 0) != dest && XEXP (src, 1) != dest)
500 new_reg_base_value[regno] = 0;
501 break;
502 case AND:
503 if (XEXP (src, 0) != dest || GET_CODE (XEXP (src, 1)) != CONST_INT)
504 new_reg_base_value[regno] = 0;
505 break;
506 default:
507 new_reg_base_value[regno] = 0;
508 break;
510 /* If this is the first set of a register, record the value. */
511 else if ((regno >= FIRST_PSEUDO_REGISTER || ! fixed_regs[regno])
512 && ! reg_seen[regno] && new_reg_base_value[regno] == 0)
513 new_reg_base_value[regno] = find_base_value (src);
515 reg_seen[regno] = 1;
518 /* Called from loop optimization when a new pseudo-register is created. */
520 void
521 record_base_value (regno, val, invariant)
522 int regno;
523 rtx val;
524 int invariant;
526 if ((unsigned) regno >= reg_base_value_size)
527 return;
529 /* If INVARIANT is true then this value also describes an invariant
530 relationship which can be used to deduce that two registers with
531 unknown values are different. */
532 if (invariant && alias_invariant)
533 alias_invariant[regno] = val;
535 if (GET_CODE (val) == REG)
537 if ((unsigned) REGNO (val) < reg_base_value_size)
538 reg_base_value[regno] = reg_base_value[REGNO (val)];
540 return;
543 reg_base_value[regno] = find_base_value (val);
546 /* Returns a canonical version of X, from the point of view alias
547 analysis. (For example, if X is a MEM whose address is a register,
548 and the register has a known value (say a SYMBOL_REF), then a MEM
549 whose address is the SYMBOL_REF is returned.) */
552 canon_rtx (x)
553 rtx x;
555 /* Recursively look for equivalences. */
556 if (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER
557 && REGNO (x) < reg_known_value_size)
558 return reg_known_value[REGNO (x)] == x
559 ? x : canon_rtx (reg_known_value[REGNO (x)]);
560 else if (GET_CODE (x) == PLUS)
562 rtx x0 = canon_rtx (XEXP (x, 0));
563 rtx x1 = canon_rtx (XEXP (x, 1));
565 if (x0 != XEXP (x, 0) || x1 != XEXP (x, 1))
567 /* We can tolerate LO_SUMs being offset here; these
568 rtl are used for nothing other than comparisons. */
569 if (GET_CODE (x0) == CONST_INT)
570 return plus_constant_for_output (x1, INTVAL (x0));
571 else if (GET_CODE (x1) == CONST_INT)
572 return plus_constant_for_output (x0, INTVAL (x1));
573 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
577 /* This gives us much better alias analysis when called from
578 the loop optimizer. Note we want to leave the original
579 MEM alone, but need to return the canonicalized MEM with
580 all the flags with their original values. */
581 else if (GET_CODE (x) == MEM)
583 rtx addr = canon_rtx (XEXP (x, 0));
585 if (addr != XEXP (x, 0))
587 rtx new = gen_rtx_MEM (GET_MODE (x), addr);
589 MEM_COPY_ATTRIBUTES (new, x);
590 x = new;
593 return x;
596 /* Return 1 if X and Y are identical-looking rtx's.
598 We use the data in reg_known_value above to see if two registers with
599 different numbers are, in fact, equivalent. */
601 static int
602 rtx_equal_for_memref_p (x, y)
603 rtx x, y;
605 register int i;
606 register int j;
607 register enum rtx_code code;
608 register const char *fmt;
610 if (x == 0 && y == 0)
611 return 1;
612 if (x == 0 || y == 0)
613 return 0;
615 x = canon_rtx (x);
616 y = canon_rtx (y);
618 if (x == y)
619 return 1;
621 code = GET_CODE (x);
622 /* Rtx's of different codes cannot be equal. */
623 if (code != GET_CODE (y))
624 return 0;
626 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
627 (REG:SI x) and (REG:HI x) are NOT equivalent. */
629 if (GET_MODE (x) != GET_MODE (y))
630 return 0;
632 /* Some RTL can be compared without a recursive examination. */
633 switch (code)
635 case REG:
636 return REGNO (x) == REGNO (y);
638 case LABEL_REF:
639 return XEXP (x, 0) == XEXP (y, 0);
641 case SYMBOL_REF:
642 return XSTR (x, 0) == XSTR (y, 0);
644 case CONST_INT:
645 case CONST_DOUBLE:
646 /* There's no need to compare the contents of CONST_DOUBLEs or
647 CONST_INTs because pointer equality is a good enough
648 comparison for these nodes. */
649 return 0;
651 case ADDRESSOF:
652 return (REGNO (XEXP (x, 0)) == REGNO (XEXP (y, 0))
653 && XINT (x, 1) == XINT (y, 1));
655 default:
656 break;
659 /* For commutative operations, the RTX match if the operand match in any
660 order. Also handle the simple binary and unary cases without a loop. */
661 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
662 return ((rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
663 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)))
664 || (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 1))
665 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 0))));
666 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
667 return (rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0))
668 && rtx_equal_for_memref_p (XEXP (x, 1), XEXP (y, 1)));
669 else if (GET_RTX_CLASS (code) == '1')
670 return rtx_equal_for_memref_p (XEXP (x, 0), XEXP (y, 0));
672 /* Compare the elements. If any pair of corresponding elements
673 fail to match, return 0 for the whole things.
675 Limit cases to types which actually appear in addresses. */
677 fmt = GET_RTX_FORMAT (code);
678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
680 switch (fmt[i])
682 case 'i':
683 if (XINT (x, i) != XINT (y, i))
684 return 0;
685 break;
687 case 'E':
688 /* Two vectors must have the same length. */
689 if (XVECLEN (x, i) != XVECLEN (y, i))
690 return 0;
692 /* And the corresponding elements must match. */
693 for (j = 0; j < XVECLEN (x, i); j++)
694 if (rtx_equal_for_memref_p (XVECEXP (x, i, j),
695 XVECEXP (y, i, j)) == 0)
696 return 0;
697 break;
699 case 'e':
700 if (rtx_equal_for_memref_p (XEXP (x, i), XEXP (y, i)) == 0)
701 return 0;
702 break;
704 /* This can happen for an asm which clobbers memory. */
705 case '0':
706 break;
708 /* It is believed that rtx's at this level will never
709 contain anything but integers and other rtx's,
710 except for within LABEL_REFs and SYMBOL_REFs. */
711 default:
712 abort ();
715 return 1;
718 /* Given an rtx X, find a SYMBOL_REF or LABEL_REF within
719 X and return it, or return 0 if none found. */
721 static rtx
722 find_symbolic_term (x)
723 rtx x;
725 register int i;
726 register enum rtx_code code;
727 register const char *fmt;
729 code = GET_CODE (x);
730 if (code == SYMBOL_REF || code == LABEL_REF)
731 return x;
732 if (GET_RTX_CLASS (code) == 'o')
733 return 0;
735 fmt = GET_RTX_FORMAT (code);
736 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
738 rtx t;
740 if (fmt[i] == 'e')
742 t = find_symbolic_term (XEXP (x, i));
743 if (t != 0)
744 return t;
746 else if (fmt[i] == 'E')
747 break;
749 return 0;
752 static rtx
753 find_base_term (x)
754 register rtx x;
756 cselib_val *val;
757 struct elt_loc_list *l;
759 switch (GET_CODE (x))
761 case REG:
762 return REG_BASE_VALUE (x);
764 case ZERO_EXTEND:
765 case SIGN_EXTEND: /* Used for Alpha/NT pointers */
766 case HIGH:
767 case PRE_INC:
768 case PRE_DEC:
769 case POST_INC:
770 case POST_DEC:
771 return find_base_term (XEXP (x, 0));
773 case VALUE:
774 val = CSELIB_VAL_PTR (x);
775 for (l = val->locs; l; l = l->next)
776 if ((x = find_base_term (l->loc)) != 0)
777 return x;
778 return 0;
780 case CONST:
781 x = XEXP (x, 0);
782 if (GET_CODE (x) != PLUS && GET_CODE (x) != MINUS)
783 return 0;
784 /* fall through */
785 case LO_SUM:
786 case PLUS:
787 case MINUS:
789 rtx tmp1 = XEXP (x, 0);
790 rtx tmp2 = XEXP (x, 1);
792 /* This is a litle bit tricky since we have to determine which of
793 the two operands represents the real base address. Otherwise this
794 routine may return the index register instead of the base register.
796 That may cause us to believe no aliasing was possible, when in
797 fact aliasing is possible.
799 We use a few simple tests to guess the base register. Additional
800 tests can certainly be added. For example, if one of the operands
801 is a shift or multiply, then it must be the index register and the
802 other operand is the base register. */
804 /* If either operand is known to be a pointer, then use it
805 to determine the base term. */
806 if (REG_P (tmp1) && REGNO_POINTER_FLAG (REGNO (tmp1)))
807 return find_base_term (tmp1);
809 if (REG_P (tmp2) && REGNO_POINTER_FLAG (REGNO (tmp2)))
810 return find_base_term (tmp2);
812 /* Neither operand was known to be a pointer. Go ahead and find the
813 base term for both operands. */
814 tmp1 = find_base_term (tmp1);
815 tmp2 = find_base_term (tmp2);
817 /* If either base term is named object or a special address
818 (like an argument or stack reference), then use it for the
819 base term. */
820 if (tmp1 != 0
821 && (GET_CODE (tmp1) == SYMBOL_REF
822 || GET_CODE (tmp1) == LABEL_REF
823 || (GET_CODE (tmp1) == ADDRESS
824 && GET_MODE (tmp1) != VOIDmode)))
825 return tmp1;
827 if (tmp2 != 0
828 && (GET_CODE (tmp2) == SYMBOL_REF
829 || GET_CODE (tmp2) == LABEL_REF
830 || (GET_CODE (tmp2) == ADDRESS
831 && GET_MODE (tmp2) != VOIDmode)))
832 return tmp2;
834 /* We could not determine which of the two operands was the
835 base register and which was the index. So we can determine
836 nothing from the base alias check. */
837 return 0;
840 case AND:
841 if (GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT)
842 return REG_BASE_VALUE (XEXP (x, 0));
843 return 0;
845 case SYMBOL_REF:
846 case LABEL_REF:
847 return x;
849 default:
850 return 0;
854 /* Return 0 if the addresses X and Y are known to point to different
855 objects, 1 if they might be pointers to the same object. */
857 static int
858 base_alias_check (x, y, x_mode, y_mode)
859 rtx x, y;
860 enum machine_mode x_mode, y_mode;
862 rtx x_base = find_base_term (x);
863 rtx y_base = find_base_term (y);
865 /* If the address itself has no known base see if a known equivalent
866 value has one. If either address still has no known base, nothing
867 is known about aliasing. */
868 if (x_base == 0)
870 rtx x_c;
872 if (! flag_expensive_optimizations || (x_c = canon_rtx (x)) == x)
873 return 1;
875 x_base = find_base_term (x_c);
876 if (x_base == 0)
877 return 1;
880 if (y_base == 0)
882 rtx y_c;
883 if (! flag_expensive_optimizations || (y_c = canon_rtx (y)) == y)
884 return 1;
886 y_base = find_base_term (y_c);
887 if (y_base == 0)
888 return 1;
891 /* If the base addresses are equal nothing is known about aliasing. */
892 if (rtx_equal_p (x_base, y_base))
893 return 1;
895 /* The base addresses of the read and write are different expressions.
896 If they are both symbols and they are not accessed via AND, there is
897 no conflict. We can bring knowledge of object alignment into play
898 here. For example, on alpha, "char a, b;" can alias one another,
899 though "char a; long b;" cannot. */
900 if (GET_CODE (x_base) != ADDRESS && GET_CODE (y_base) != ADDRESS)
902 if (GET_CODE (x) == AND && GET_CODE (y) == AND)
903 return 1;
904 if (GET_CODE (x) == AND
905 && (GET_CODE (XEXP (x, 1)) != CONST_INT
906 || GET_MODE_UNIT_SIZE (y_mode) < -INTVAL (XEXP (x, 1))))
907 return 1;
908 if (GET_CODE (y) == AND
909 && (GET_CODE (XEXP (y, 1)) != CONST_INT
910 || GET_MODE_UNIT_SIZE (x_mode) < -INTVAL (XEXP (y, 1))))
911 return 1;
912 /* Differing symbols never alias. */
913 return 0;
916 /* If one address is a stack reference there can be no alias:
917 stack references using different base registers do not alias,
918 a stack reference can not alias a parameter, and a stack reference
919 can not alias a global. */
920 if ((GET_CODE (x_base) == ADDRESS && GET_MODE (x_base) == Pmode)
921 || (GET_CODE (y_base) == ADDRESS && GET_MODE (y_base) == Pmode))
922 return 0;
924 if (! flag_argument_noalias)
925 return 1;
927 if (flag_argument_noalias > 1)
928 return 0;
930 /* Weak noalias assertion (arguments are distinct, but may match globals). */
931 return ! (GET_MODE (x_base) == VOIDmode && GET_MODE (y_base) == VOIDmode);
934 /* Convert the address X into something we can use. This is done by returning
935 it unchanged unless it is a value; in the latter case we call cselib to get
936 a more useful rtx. */
937 static rtx
938 get_addr (x)
939 rtx x;
941 cselib_val *v;
942 struct elt_loc_list *l;
944 if (GET_CODE (x) != VALUE)
945 return x;
946 v = CSELIB_VAL_PTR (x);
947 for (l = v->locs; l; l = l->next)
948 if (CONSTANT_P (l->loc))
949 return l->loc;
950 for (l = v->locs; l; l = l->next)
951 if (GET_CODE (l->loc) != REG && GET_CODE (l->loc) != MEM)
952 return l->loc;
953 if (v->locs)
954 return v->locs->loc;
955 return x;
958 /* Return the address of the (N_REFS + 1)th memory reference to ADDR
959 where SIZE is the size in bytes of the memory reference. If ADDR
960 is not modified by the memory reference then ADDR is returned. */
963 addr_side_effect_eval (addr, size, n_refs)
964 rtx addr;
965 int size;
966 int n_refs;
968 int offset = 0;
970 switch (GET_CODE (addr))
972 case PRE_INC:
973 offset = (n_refs + 1) * size;
974 break;
975 case PRE_DEC:
976 offset = -(n_refs + 1) * size;
977 break;
978 case POST_INC:
979 offset = n_refs * size;
980 break;
981 case POST_DEC:
982 offset = -n_refs * size;
983 break;
985 default:
986 return addr;
989 if (offset)
990 addr = gen_rtx_PLUS (GET_MODE (addr), XEXP (addr, 0), GEN_INT (offset));
991 else
992 addr = XEXP (addr, 0);
994 return addr;
997 /* Return nonzero if X and Y (memory addresses) could reference the
998 same location in memory. C is an offset accumulator. When
999 C is nonzero, we are testing aliases between X and Y + C.
1000 XSIZE is the size in bytes of the X reference,
1001 similarly YSIZE is the size in bytes for Y.
1003 If XSIZE or YSIZE is zero, we do not know the amount of memory being
1004 referenced (the reference was BLKmode), so make the most pessimistic
1005 assumptions.
1007 If XSIZE or YSIZE is negative, we may access memory outside the object
1008 being referenced as a side effect. This can happen when using AND to
1009 align memory references, as is done on the Alpha.
1011 Nice to notice that varying addresses cannot conflict with fp if no
1012 local variables had their addresses taken, but that's too hard now. */
1014 static int
1015 memrefs_conflict_p (xsize, x, ysize, y, c)
1016 register rtx x, y;
1017 int xsize, ysize;
1018 HOST_WIDE_INT c;
1020 if (GET_CODE (x) == VALUE)
1021 x = get_addr (x);
1022 if (GET_CODE (y) == VALUE)
1023 y = get_addr (y);
1024 if (GET_CODE (x) == HIGH)
1025 x = XEXP (x, 0);
1026 else if (GET_CODE (x) == LO_SUM)
1027 x = XEXP (x, 1);
1028 else
1029 x = canon_rtx (addr_side_effect_eval (x, xsize, 0));
1030 if (GET_CODE (y) == HIGH)
1031 y = XEXP (y, 0);
1032 else if (GET_CODE (y) == LO_SUM)
1033 y = XEXP (y, 1);
1034 else
1035 y = canon_rtx (addr_side_effect_eval (y, ysize, 0));
1037 if (rtx_equal_for_memref_p (x, y))
1039 if (xsize <= 0 || ysize <= 0)
1040 return 1;
1041 if (c >= 0 && xsize > c)
1042 return 1;
1043 if (c < 0 && ysize+c > 0)
1044 return 1;
1045 return 0;
1048 /* This code used to check for conflicts involving stack references and
1049 globals but the base address alias code now handles these cases. */
1051 if (GET_CODE (x) == PLUS)
1053 /* The fact that X is canonicalized means that this
1054 PLUS rtx is canonicalized. */
1055 rtx x0 = XEXP (x, 0);
1056 rtx x1 = XEXP (x, 1);
1058 if (GET_CODE (y) == PLUS)
1060 /* The fact that Y is canonicalized means that this
1061 PLUS rtx is canonicalized. */
1062 rtx y0 = XEXP (y, 0);
1063 rtx y1 = XEXP (y, 1);
1065 if (rtx_equal_for_memref_p (x1, y1))
1066 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1067 if (rtx_equal_for_memref_p (x0, y0))
1068 return memrefs_conflict_p (xsize, x1, ysize, y1, c);
1069 if (GET_CODE (x1) == CONST_INT)
1071 if (GET_CODE (y1) == CONST_INT)
1072 return memrefs_conflict_p (xsize, x0, ysize, y0,
1073 c - INTVAL (x1) + INTVAL (y1));
1074 else
1075 return memrefs_conflict_p (xsize, x0, ysize, y,
1076 c - INTVAL (x1));
1078 else if (GET_CODE (y1) == CONST_INT)
1079 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1081 return 1;
1083 else if (GET_CODE (x1) == CONST_INT)
1084 return memrefs_conflict_p (xsize, x0, ysize, y, c - INTVAL (x1));
1086 else if (GET_CODE (y) == PLUS)
1088 /* The fact that Y is canonicalized means that this
1089 PLUS rtx is canonicalized. */
1090 rtx y0 = XEXP (y, 0);
1091 rtx y1 = XEXP (y, 1);
1093 if (GET_CODE (y1) == CONST_INT)
1094 return memrefs_conflict_p (xsize, x, ysize, y0, c + INTVAL (y1));
1095 else
1096 return 1;
1099 if (GET_CODE (x) == GET_CODE (y))
1100 switch (GET_CODE (x))
1102 case MULT:
1104 /* Handle cases where we expect the second operands to be the
1105 same, and check only whether the first operand would conflict
1106 or not. */
1107 rtx x0, y0;
1108 rtx x1 = canon_rtx (XEXP (x, 1));
1109 rtx y1 = canon_rtx (XEXP (y, 1));
1110 if (! rtx_equal_for_memref_p (x1, y1))
1111 return 1;
1112 x0 = canon_rtx (XEXP (x, 0));
1113 y0 = canon_rtx (XEXP (y, 0));
1114 if (rtx_equal_for_memref_p (x0, y0))
1115 return (xsize == 0 || ysize == 0
1116 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1118 /* Can't properly adjust our sizes. */
1119 if (GET_CODE (x1) != CONST_INT)
1120 return 1;
1121 xsize /= INTVAL (x1);
1122 ysize /= INTVAL (x1);
1123 c /= INTVAL (x1);
1124 return memrefs_conflict_p (xsize, x0, ysize, y0, c);
1127 case REG:
1128 /* Are these registers known not to be equal? */
1129 if (alias_invariant)
1131 unsigned int r_x = REGNO (x), r_y = REGNO (y);
1132 rtx i_x, i_y; /* invariant relationships of X and Y */
1134 i_x = r_x >= reg_base_value_size ? 0 : alias_invariant[r_x];
1135 i_y = r_y >= reg_base_value_size ? 0 : alias_invariant[r_y];
1137 if (i_x == 0 && i_y == 0)
1138 break;
1140 if (! memrefs_conflict_p (xsize, i_x ? i_x : x,
1141 ysize, i_y ? i_y : y, c))
1142 return 0;
1144 break;
1146 default:
1147 break;
1150 /* Treat an access through an AND (e.g. a subword access on an Alpha)
1151 as an access with indeterminate size. Assume that references
1152 besides AND are aligned, so if the size of the other reference is
1153 at least as large as the alignment, assume no other overlap. */
1154 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT)
1156 if (GET_CODE (y) == AND || ysize < -INTVAL (XEXP (x, 1)))
1157 xsize = -1;
1158 return memrefs_conflict_p (xsize, XEXP (x, 0), ysize, y, c);
1160 if (GET_CODE (y) == AND && GET_CODE (XEXP (y, 1)) == CONST_INT)
1162 /* ??? If we are indexing far enough into the array/structure, we
1163 may yet be able to determine that we can not overlap. But we
1164 also need to that we are far enough from the end not to overlap
1165 a following reference, so we do nothing with that for now. */
1166 if (GET_CODE (x) == AND || xsize < -INTVAL (XEXP (y, 1)))
1167 ysize = -1;
1168 return memrefs_conflict_p (xsize, x, ysize, XEXP (y, 0), c);
1171 if (CONSTANT_P (x))
1173 if (GET_CODE (x) == CONST_INT && GET_CODE (y) == CONST_INT)
1175 c += (INTVAL (y) - INTVAL (x));
1176 return (xsize <= 0 || ysize <= 0
1177 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0));
1180 if (GET_CODE (x) == CONST)
1182 if (GET_CODE (y) == CONST)
1183 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1184 ysize, canon_rtx (XEXP (y, 0)), c);
1185 else
1186 return memrefs_conflict_p (xsize, canon_rtx (XEXP (x, 0)),
1187 ysize, y, c);
1189 if (GET_CODE (y) == CONST)
1190 return memrefs_conflict_p (xsize, x, ysize,
1191 canon_rtx (XEXP (y, 0)), c);
1193 if (CONSTANT_P (y))
1194 return (xsize < 0 || ysize < 0
1195 || (rtx_equal_for_memref_p (x, y)
1196 && (xsize == 0 || ysize == 0
1197 || (c >= 0 && xsize > c) || (c < 0 && ysize+c > 0))));
1199 return 1;
1201 return 1;
1204 /* Functions to compute memory dependencies.
1206 Since we process the insns in execution order, we can build tables
1207 to keep track of what registers are fixed (and not aliased), what registers
1208 are varying in known ways, and what registers are varying in unknown
1209 ways.
1211 If both memory references are volatile, then there must always be a
1212 dependence between the two references, since their order can not be
1213 changed. A volatile and non-volatile reference can be interchanged
1214 though.
1216 A MEM_IN_STRUCT reference at a non-QImode non-AND varying address can never
1217 conflict with a non-MEM_IN_STRUCT reference at a fixed address. We must
1218 allow QImode aliasing because the ANSI C standard allows character
1219 pointers to alias anything. We are assuming that characters are
1220 always QImode here. We also must allow AND addresses, because they may
1221 generate accesses outside the object being referenced. This is used to
1222 generate aligned addresses from unaligned addresses, for instance, the
1223 alpha storeqi_unaligned pattern. */
1225 /* Read dependence: X is read after read in MEM takes place. There can
1226 only be a dependence here if both reads are volatile. */
1229 read_dependence (mem, x)
1230 rtx mem;
1231 rtx x;
1233 return MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem);
1236 /* Returns MEM1 if and only if MEM1 is a scalar at a fixed address and
1237 MEM2 is a reference to a structure at a varying address, or returns
1238 MEM2 if vice versa. Otherwise, returns NULL_RTX. If a non-NULL
1239 value is returned MEM1 and MEM2 can never alias. VARIES_P is used
1240 to decide whether or not an address may vary; it should return
1241 nonzero whenever variation is possible.
1242 MEM1_ADDR and MEM2_ADDR are the addresses of MEM1 and MEM2. */
1244 static rtx
1245 fixed_scalar_and_varying_struct_p (mem1, mem2, mem1_addr, mem2_addr, varies_p)
1246 rtx mem1, mem2;
1247 rtx mem1_addr, mem2_addr;
1248 int (*varies_p) PARAMS ((rtx));
1250 if (MEM_SCALAR_P (mem1) && MEM_IN_STRUCT_P (mem2)
1251 && !varies_p (mem1_addr) && varies_p (mem2_addr))
1252 /* MEM1 is a scalar at a fixed address; MEM2 is a struct at a
1253 varying address. */
1254 return mem1;
1256 if (MEM_IN_STRUCT_P (mem1) && MEM_SCALAR_P (mem2)
1257 && varies_p (mem1_addr) && !varies_p (mem2_addr))
1258 /* MEM2 is a scalar at a fixed address; MEM1 is a struct at a
1259 varying address. */
1260 return mem2;
1262 return NULL_RTX;
1265 /* Returns nonzero if something about the mode or address format MEM1
1266 indicates that it might well alias *anything*. */
1268 static int
1269 aliases_everything_p (mem)
1270 rtx mem;
1272 if (GET_MODE (mem) == QImode)
1273 /* ANSI C says that a `char*' can point to anything. */
1274 return 1;
1276 if (GET_CODE (XEXP (mem, 0)) == AND)
1277 /* If the address is an AND, its very hard to know at what it is
1278 actually pointing. */
1279 return 1;
1281 return 0;
1284 /* True dependence: X is read after store in MEM takes place. */
1287 true_dependence (mem, mem_mode, x, varies)
1288 rtx mem;
1289 enum machine_mode mem_mode;
1290 rtx x;
1291 int (*varies) PARAMS ((rtx));
1293 register rtx x_addr, mem_addr;
1295 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1296 return 1;
1298 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1299 return 0;
1301 /* If X is an unchanging read, then it can't possibly conflict with any
1302 non-unchanging store. It may conflict with an unchanging write though,
1303 because there may be a single store to this address to initialize it.
1304 Just fall through to the code below to resolve the case where we have
1305 both an unchanging read and an unchanging write. This won't handle all
1306 cases optimally, but the possible performance loss should be
1307 negligible. */
1308 if (RTX_UNCHANGING_P (x) && ! RTX_UNCHANGING_P (mem))
1309 return 0;
1311 if (mem_mode == VOIDmode)
1312 mem_mode = GET_MODE (mem);
1314 x_addr = get_addr (XEXP (x, 0));
1315 mem_addr = get_addr (XEXP (mem, 0));
1317 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x), mem_mode))
1318 return 0;
1320 x_addr = canon_rtx (x_addr);
1321 mem_addr = canon_rtx (mem_addr);
1323 if (! memrefs_conflict_p (GET_MODE_SIZE (mem_mode), mem_addr,
1324 SIZE_FOR_MODE (x), x_addr, 0))
1325 return 0;
1327 if (aliases_everything_p (x))
1328 return 1;
1330 /* We cannot use aliases_everyting_p to test MEM, since we must look
1331 at MEM_MODE, rather than GET_MODE (MEM). */
1332 if (mem_mode == QImode || GET_CODE (mem_addr) == AND)
1333 return 1;
1335 /* In true_dependence we also allow BLKmode to alias anything. Why
1336 don't we do this in anti_dependence and output_dependence? */
1337 if (mem_mode == BLKmode || GET_MODE (x) == BLKmode)
1338 return 1;
1340 return ! fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1341 varies);
1344 /* Returns non-zero if a write to X might alias a previous read from
1345 (or, if WRITEP is non-zero, a write to) MEM. */
1347 static int
1348 write_dependence_p (mem, x, writep)
1349 rtx mem;
1350 rtx x;
1351 int writep;
1353 rtx x_addr, mem_addr;
1354 rtx fixed_scalar;
1356 if (MEM_VOLATILE_P (x) && MEM_VOLATILE_P (mem))
1357 return 1;
1359 if (DIFFERENT_ALIAS_SETS_P (x, mem))
1360 return 0;
1362 /* If MEM is an unchanging read, then it can't possibly conflict with
1363 the store to X, because there is at most one store to MEM, and it must
1364 have occurred somewhere before MEM. */
1365 if (!writep && RTX_UNCHANGING_P (mem))
1366 return 0;
1368 x_addr = get_addr (XEXP (x, 0));
1369 mem_addr = get_addr (XEXP (mem, 0));
1371 if (! base_alias_check (x_addr, mem_addr, GET_MODE (x),
1372 GET_MODE (mem)))
1373 return 0;
1375 x_addr = canon_rtx (x_addr);
1376 mem_addr = canon_rtx (mem_addr);
1378 if (!memrefs_conflict_p (SIZE_FOR_MODE (mem), mem_addr,
1379 SIZE_FOR_MODE (x), x_addr, 0))
1380 return 0;
1382 fixed_scalar
1383 = fixed_scalar_and_varying_struct_p (mem, x, mem_addr, x_addr,
1384 rtx_addr_varies_p);
1386 return (!(fixed_scalar == mem && !aliases_everything_p (x))
1387 && !(fixed_scalar == x && !aliases_everything_p (mem)));
1390 /* Anti dependence: X is written after read in MEM takes place. */
1393 anti_dependence (mem, x)
1394 rtx mem;
1395 rtx x;
1397 return write_dependence_p (mem, x, /*writep=*/0);
1400 /* Output dependence: X is written after store in MEM takes place. */
1403 output_dependence (mem, x)
1404 register rtx mem;
1405 register rtx x;
1407 return write_dependence_p (mem, x, /*writep=*/1);
1410 /* Returns non-zero if X might refer to something which is not
1411 local to the function and is not constant. */
1413 static int
1414 nonlocal_reference_p (x)
1415 rtx x;
1417 rtx base;
1418 register RTX_CODE code;
1419 int regno;
1421 code = GET_CODE (x);
1423 if (GET_RTX_CLASS (code) == 'i')
1425 /* Constant functions can be constant if they don't use
1426 scratch memory used to mark function w/o side effects. */
1427 if (code == CALL_INSN && CONST_CALL_P (x))
1429 x = CALL_INSN_FUNCTION_USAGE (x);
1430 if (!x) return 0;
1432 else
1433 x = PATTERN (x);
1434 code = GET_CODE (x);
1437 switch (code)
1439 case SUBREG:
1440 if (GET_CODE (SUBREG_REG (x)) == REG)
1442 /* Global registers are not local. */
1443 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
1444 && global_regs[REGNO (SUBREG_REG (x)) + SUBREG_WORD (x)])
1445 return 1;
1446 return 0;
1448 break;
1450 case REG:
1451 regno = REGNO (x);
1452 /* Global registers are not local. */
1453 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
1454 return 1;
1455 return 0;
1457 case SCRATCH:
1458 case PC:
1459 case CC0:
1460 case CONST_INT:
1461 case CONST_DOUBLE:
1462 case CONST:
1463 case LABEL_REF:
1464 return 0;
1466 case SYMBOL_REF:
1467 /* Constants in the function's constants pool are constant. */
1468 if (CONSTANT_POOL_ADDRESS_P (x))
1469 return 0;
1470 return 1;
1472 case CALL:
1473 /* Recursion introduces no additional considerations. */
1474 if (GET_CODE (XEXP (x, 0)) == MEM
1475 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
1476 && strcmp(XSTR (XEXP (XEXP (x, 0), 0), 0),
1477 IDENTIFIER_POINTER (
1478 DECL_ASSEMBLER_NAME (current_function_decl))) == 0)
1479 return 0;
1480 return 1;
1482 case MEM:
1483 /* Be overly conservative and consider any volatile memory
1484 reference as not local. */
1485 if (MEM_VOLATILE_P (x))
1486 return 1;
1487 base = find_base_term (XEXP (x, 0));
1488 if (base)
1490 /* A Pmode ADDRESS could be a reference via the structure value
1491 address or static chain. Such memory references are nonlocal.
1493 Thus, we have to examine the contents of the ADDRESS to find
1494 out if this is a local reference or not. */
1495 if (GET_CODE (base) == ADDRESS
1496 && GET_MODE (base) == Pmode
1497 && (XEXP (base, 0) == stack_pointer_rtx
1498 || XEXP (base, 0) == arg_pointer_rtx
1499 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1500 || XEXP (base, 0) == hard_frame_pointer_rtx
1501 #endif
1502 || XEXP (base, 0) == frame_pointer_rtx))
1503 return 0;
1504 /* Constants in the function's constant pool are constant. */
1505 if (GET_CODE (base) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (base))
1506 return 0;
1508 return 1;
1510 case ASM_INPUT:
1511 case ASM_OPERANDS:
1512 return 1;
1514 default:
1515 break;
1518 /* Recursively scan the operands of this expression. */
1521 register const char *fmt = GET_RTX_FORMAT (code);
1522 register int i;
1524 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1526 if (fmt[i] == 'e' && XEXP (x, i))
1528 if (nonlocal_reference_p (XEXP (x, i)))
1529 return 1;
1531 else if (fmt[i] == 'E')
1533 register int j;
1534 for (j = 0; j < XVECLEN (x, i); j++)
1535 if (nonlocal_reference_p (XVECEXP (x, i, j)))
1536 return 1;
1541 return 0;
1544 /* Mark the function if it is constant. */
1546 void
1547 mark_constant_function ()
1549 rtx insn;
1551 if (TREE_PUBLIC (current_function_decl)
1552 || TREE_READONLY (current_function_decl)
1553 || TREE_THIS_VOLATILE (current_function_decl)
1554 || TYPE_MODE (TREE_TYPE (current_function_decl)) == VOIDmode)
1555 return;
1557 /* Determine if this is a constant function. */
1559 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1560 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1561 && nonlocal_reference_p (insn))
1562 return;
1564 /* Mark the function. */
1566 TREE_READONLY (current_function_decl) = 1;
1570 static HARD_REG_SET argument_registers;
1572 void
1573 init_alias_once ()
1575 register int i;
1577 #ifndef OUTGOING_REGNO
1578 #define OUTGOING_REGNO(N) N
1579 #endif
1580 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1581 /* Check whether this register can hold an incoming pointer
1582 argument. FUNCTION_ARG_REGNO_P tests outgoing register
1583 numbers, so translate if necessary due to register windows. */
1584 if (FUNCTION_ARG_REGNO_P (OUTGOING_REGNO (i))
1585 && HARD_REGNO_MODE_OK (i, Pmode))
1586 SET_HARD_REG_BIT (argument_registers, i);
1588 alias_sets = splay_tree_new (splay_tree_compare_ints, 0, 0);
1591 /* Initialize the aliasing machinery. Initialize the REG_KNOWN_VALUE
1592 array. */
1594 void
1595 init_alias_analysis ()
1597 int maxreg = max_reg_num ();
1598 int changed, pass;
1599 register int i;
1600 register unsigned int ui;
1601 register rtx insn;
1603 reg_known_value_size = maxreg;
1605 reg_known_value
1606 = (rtx *) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (rtx))
1607 - FIRST_PSEUDO_REGISTER;
1608 reg_known_equiv_p
1609 = (char*) xcalloc ((maxreg - FIRST_PSEUDO_REGISTER), sizeof (char))
1610 - FIRST_PSEUDO_REGISTER;
1612 /* Overallocate reg_base_value to allow some growth during loop
1613 optimization. Loop unrolling can create a large number of
1614 registers. */
1615 reg_base_value_size = maxreg * 2;
1616 reg_base_value = (rtx *) xcalloc (reg_base_value_size, sizeof (rtx));
1617 if (ggc_p)
1618 ggc_add_rtx_root (reg_base_value, reg_base_value_size);
1620 new_reg_base_value = (rtx *) xmalloc (reg_base_value_size * sizeof (rtx));
1621 reg_seen = (char *) xmalloc (reg_base_value_size);
1622 if (! reload_completed && flag_unroll_loops)
1624 /* ??? Why are we realloc'ing if we're just going to zero it? */
1625 alias_invariant = (rtx *)xrealloc (alias_invariant,
1626 reg_base_value_size * sizeof (rtx));
1627 bzero ((char *)alias_invariant, reg_base_value_size * sizeof (rtx));
1631 /* The basic idea is that each pass through this loop will use the
1632 "constant" information from the previous pass to propagate alias
1633 information through another level of assignments.
1635 This could get expensive if the assignment chains are long. Maybe
1636 we should throttle the number of iterations, possibly based on
1637 the optimization level or flag_expensive_optimizations.
1639 We could propagate more information in the first pass by making use
1640 of REG_N_SETS to determine immediately that the alias information
1641 for a pseudo is "constant".
1643 A program with an uninitialized variable can cause an infinite loop
1644 here. Instead of doing a full dataflow analysis to detect such problems
1645 we just cap the number of iterations for the loop.
1647 The state of the arrays for the set chain in question does not matter
1648 since the program has undefined behavior. */
1650 pass = 0;
1653 /* Assume nothing will change this iteration of the loop. */
1654 changed = 0;
1656 /* We want to assign the same IDs each iteration of this loop, so
1657 start counting from zero each iteration of the loop. */
1658 unique_id = 0;
1660 /* We're at the start of the funtion each iteration through the
1661 loop, so we're copying arguments. */
1662 copying_arguments = 1;
1664 /* Wipe the potential alias information clean for this pass. */
1665 bzero ((char *) new_reg_base_value, reg_base_value_size * sizeof (rtx));
1667 /* Wipe the reg_seen array clean. */
1668 bzero ((char *) reg_seen, reg_base_value_size);
1670 /* Mark all hard registers which may contain an address.
1671 The stack, frame and argument pointers may contain an address.
1672 An argument register which can hold a Pmode value may contain
1673 an address even if it is not in BASE_REGS.
1675 The address expression is VOIDmode for an argument and
1676 Pmode for other registers. */
1678 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1679 if (TEST_HARD_REG_BIT (argument_registers, i))
1680 new_reg_base_value[i] = gen_rtx_ADDRESS (VOIDmode,
1681 gen_rtx_REG (Pmode, i));
1683 new_reg_base_value[STACK_POINTER_REGNUM]
1684 = gen_rtx_ADDRESS (Pmode, stack_pointer_rtx);
1685 new_reg_base_value[ARG_POINTER_REGNUM]
1686 = gen_rtx_ADDRESS (Pmode, arg_pointer_rtx);
1687 new_reg_base_value[FRAME_POINTER_REGNUM]
1688 = gen_rtx_ADDRESS (Pmode, frame_pointer_rtx);
1689 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
1690 new_reg_base_value[HARD_FRAME_POINTER_REGNUM]
1691 = gen_rtx_ADDRESS (Pmode, hard_frame_pointer_rtx);
1692 #endif
1693 if (struct_value_incoming_rtx
1694 && GET_CODE (struct_value_incoming_rtx) == REG)
1695 new_reg_base_value[REGNO (struct_value_incoming_rtx)]
1696 = gen_rtx_ADDRESS (Pmode, struct_value_incoming_rtx);
1698 if (static_chain_rtx
1699 && GET_CODE (static_chain_rtx) == REG)
1700 new_reg_base_value[REGNO (static_chain_rtx)]
1701 = gen_rtx_ADDRESS (Pmode, static_chain_rtx);
1703 /* Walk the insns adding values to the new_reg_base_value array. */
1704 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1706 #if defined (HAVE_prologue) || defined (HAVE_epilogue)
1707 if (prologue_epilogue_contains (insn))
1708 continue;
1709 #endif
1710 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1712 rtx note, set;
1713 /* If this insn has a noalias note, process it, Otherwise,
1714 scan for sets. A simple set will have no side effects
1715 which could change the base value of any other register. */
1717 if (GET_CODE (PATTERN (insn)) == SET
1718 && (find_reg_note (insn, REG_NOALIAS, NULL_RTX)))
1719 record_set (SET_DEST (PATTERN (insn)), NULL_RTX, NULL);
1720 else
1721 note_stores (PATTERN (insn), record_set, NULL);
1723 set = single_set (insn);
1725 if (set != 0
1726 && GET_CODE (SET_DEST (set)) == REG
1727 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER
1728 && (((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1729 && REG_N_SETS (REGNO (SET_DEST (set))) == 1)
1730 || (note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1731 && GET_CODE (XEXP (note, 0)) != EXPR_LIST
1732 && ! reg_overlap_mentioned_p (SET_DEST (set), XEXP (note, 0)))
1734 int regno = REGNO (SET_DEST (set));
1735 reg_known_value[regno] = XEXP (note, 0);
1736 reg_known_equiv_p[regno] = REG_NOTE_KIND (note) == REG_EQUIV;
1739 else if (GET_CODE (insn) == NOTE
1740 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
1741 copying_arguments = 0;
1744 /* Now propagate values from new_reg_base_value to reg_base_value. */
1745 for (ui = 0; ui < reg_base_value_size; ui++)
1747 if (new_reg_base_value[ui]
1748 && new_reg_base_value[ui] != reg_base_value[ui]
1749 && ! rtx_equal_p (new_reg_base_value[ui], reg_base_value[ui]))
1751 reg_base_value[ui] = new_reg_base_value[ui];
1752 changed = 1;
1756 while (changed && ++pass < MAX_ALIAS_LOOP_PASSES);
1758 /* Fill in the remaining entries. */
1759 for (i = FIRST_PSEUDO_REGISTER; i < maxreg; i++)
1760 if (reg_known_value[i] == 0)
1761 reg_known_value[i] = regno_reg_rtx[i];
1763 /* Simplify the reg_base_value array so that no register refers to
1764 another register, except to special registers indirectly through
1765 ADDRESS expressions.
1767 In theory this loop can take as long as O(registers^2), but unless
1768 there are very long dependency chains it will run in close to linear
1769 time.
1771 This loop may not be needed any longer now that the main loop does
1772 a better job at propagating alias information. */
1773 pass = 0;
1776 changed = 0;
1777 pass++;
1778 for (ui = 0; ui < reg_base_value_size; ui++)
1780 rtx base = reg_base_value[ui];
1781 if (base && GET_CODE (base) == REG)
1783 unsigned int base_regno = REGNO (base);
1784 if (base_regno == ui) /* register set from itself */
1785 reg_base_value[ui] = 0;
1786 else
1787 reg_base_value[ui] = reg_base_value[base_regno];
1788 changed = 1;
1792 while (changed && pass < MAX_ALIAS_LOOP_PASSES);
1794 /* Clean up. */
1795 free (new_reg_base_value);
1796 new_reg_base_value = 0;
1797 free (reg_seen);
1798 reg_seen = 0;
1801 void
1802 end_alias_analysis ()
1804 free (reg_known_value + FIRST_PSEUDO_REGISTER);
1805 reg_known_value = 0;
1806 reg_known_value_size = 0;
1807 free (reg_known_equiv_p + FIRST_PSEUDO_REGISTER);
1808 reg_known_equiv_p = 0;
1809 if (reg_base_value)
1811 if (ggc_p)
1812 ggc_del_root (reg_base_value);
1813 free (reg_base_value);
1814 reg_base_value = 0;
1816 reg_base_value_size = 0;
1817 if (alias_invariant)
1819 free (alias_invariant);
1820 alias_invariant = 0;