oops - minor formatting tidy ups to previous delta
[official-gcc.git] / gcc / cselib.c
blobd9b6ee14e56f9dab47b0c3548fb5556c0c1ed9e2
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "ggc.h"
38 #include "hashtab.h"
39 #include "cselib.h"
41 static int entry_and_rtx_equal_p PARAMS ((const void *, const void *));
42 static unsigned int get_value_hash PARAMS ((const void *));
43 static struct elt_list *new_elt_list PARAMS ((struct elt_list *,
44 cselib_val *));
45 static struct elt_loc_list *new_elt_loc_list PARAMS ((struct elt_loc_list *,
46 rtx));
47 static void unchain_one_value PARAMS ((cselib_val *));
48 static void unchain_one_elt_list PARAMS ((struct elt_list **));
49 static void unchain_one_elt_loc_list PARAMS ((struct elt_loc_list **));
50 static void clear_table PARAMS ((int));
51 static int discard_useless_locs PARAMS ((void **, void *));
52 static int discard_useless_values PARAMS ((void **, void *));
53 static void remove_useless_values PARAMS ((void));
54 static rtx wrap_constant PARAMS ((enum machine_mode, rtx));
55 static unsigned int hash_rtx PARAMS ((rtx, enum machine_mode, int));
56 static cselib_val *new_cselib_val PARAMS ((unsigned int,
57 enum machine_mode));
58 static void add_mem_for_addr PARAMS ((cselib_val *, cselib_val *,
59 rtx));
60 static cselib_val *cselib_lookup_mem PARAMS ((rtx, int));
61 static void cselib_invalidate_regno PARAMS ((unsigned int,
62 enum machine_mode));
63 static int cselib_mem_conflict_p PARAMS ((rtx, rtx));
64 static int cselib_invalidate_mem_1 PARAMS ((void **, void *));
65 static void cselib_invalidate_mem PARAMS ((rtx));
66 static void cselib_invalidate_rtx PARAMS ((rtx, rtx, void *));
67 static void cselib_record_set PARAMS ((rtx, cselib_val *,
68 cselib_val *));
69 static void cselib_record_sets PARAMS ((rtx));
71 /* There are three ways in which cselib can look up an rtx:
72 - for a REG, the reg_values table (which is indexed by regno) is used
73 - for a MEM, we recursively look up its address and then follow the
74 addr_list of that value
75 - for everything else, we compute a hash value and go through the hash
76 table. Since different rtx's can still have the same hash value,
77 this involves walking the table entries for a given value and comparing
78 the locations of the entries with the rtx we are looking up. */
80 /* A table that enables us to look up elts by their value. */
81 static GTY((param_is (cselib_val))) htab_t hash_table;
83 /* This is a global so we don't have to pass this through every function.
84 It is used in new_elt_loc_list to set SETTING_INSN. */
85 static rtx cselib_current_insn;
87 /* Every new unknown value gets a unique number. */
88 static unsigned int next_unknown_value;
90 /* The number of registers we had when the varrays were last resized. */
91 static unsigned int cselib_nregs;
93 /* Count values without known locations. Whenever this grows too big, we
94 remove these useless values from the table. */
95 static int n_useless_values;
97 /* Number of useless values before we remove them from the hash table. */
98 #define MAX_USELESS_VALUES 32
100 /* This table maps from register number to values. It does not contain
101 pointers to cselib_val structures, but rather elt_lists. The purpose is
102 to be able to refer to the same register in different modes. */
103 static GTY(()) varray_type reg_values;
104 static GTY((deletable (""))) varray_type reg_values_old;
105 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
107 /* The largest number of hard regs used by any entry added to the
108 REG_VALUES table. Cleared on each clear_table() invocation. */
109 static unsigned int max_value_regs;
111 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
112 in clear_table() for fast emptying. */
113 static GTY(()) varray_type used_regs;
114 static GTY((deletable (""))) varray_type used_regs_old;
116 /* We pass this to cselib_invalidate_mem to invalidate all of
117 memory for a non-const call instruction. */
118 static GTY(()) rtx callmem;
120 /* Caches for unused structures. */
121 static GTY((deletable (""))) cselib_val *empty_vals;
122 static GTY((deletable (""))) struct elt_list *empty_elt_lists;
123 static GTY((deletable (""))) struct elt_loc_list *empty_elt_loc_lists;
125 /* Set by discard_useless_locs if it deleted the last location of any
126 value. */
127 static int values_became_useless;
130 /* Allocate a struct elt_list and fill in its two elements with the
131 arguments. */
133 static struct elt_list *
134 new_elt_list (next, elt)
135 struct elt_list *next;
136 cselib_val *elt;
138 struct elt_list *el = empty_elt_lists;
140 if (el)
141 empty_elt_lists = el->next;
142 else
143 el = (struct elt_list *) ggc_alloc (sizeof (struct elt_list));
144 el->next = next;
145 el->elt = elt;
146 return el;
149 /* Allocate a struct elt_loc_list and fill in its two elements with the
150 arguments. */
152 static struct elt_loc_list *
153 new_elt_loc_list (next, loc)
154 struct elt_loc_list *next;
155 rtx loc;
157 struct elt_loc_list *el = empty_elt_loc_lists;
159 if (el)
160 empty_elt_loc_lists = el->next;
161 else
162 el = (struct elt_loc_list *) ggc_alloc (sizeof (struct elt_loc_list));
163 el->next = next;
164 el->loc = loc;
165 el->setting_insn = cselib_current_insn;
166 return el;
169 /* The elt_list at *PL is no longer needed. Unchain it and free its
170 storage. */
172 static void
173 unchain_one_elt_list (pl)
174 struct elt_list **pl;
176 struct elt_list *l = *pl;
178 *pl = l->next;
179 l->next = empty_elt_lists;
180 empty_elt_lists = l;
183 /* Likewise for elt_loc_lists. */
185 static void
186 unchain_one_elt_loc_list (pl)
187 struct elt_loc_list **pl;
189 struct elt_loc_list *l = *pl;
191 *pl = l->next;
192 l->next = empty_elt_loc_lists;
193 empty_elt_loc_lists = l;
196 /* Likewise for cselib_vals. This also frees the addr_list associated with
197 V. */
199 static void
200 unchain_one_value (v)
201 cselib_val *v;
203 while (v->addr_list)
204 unchain_one_elt_list (&v->addr_list);
206 v->u.next_free = empty_vals;
207 empty_vals = v;
210 /* Remove all entries from the hash table. Also used during
211 initialization. If CLEAR_ALL isn't set, then only clear the entries
212 which are known to have been used. */
214 static void
215 clear_table (clear_all)
216 int clear_all;
218 unsigned int i;
220 if (clear_all)
221 for (i = 0; i < cselib_nregs; i++)
222 REG_VALUES (i) = 0;
223 else
224 for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++)
225 REG_VALUES (VARRAY_UINT (used_regs, i)) = 0;
227 max_value_regs = 0;
229 VARRAY_POP_ALL (used_regs);
231 htab_empty (hash_table);
233 n_useless_values = 0;
235 next_unknown_value = 0;
238 /* The equality test for our hash table. The first argument ENTRY is a table
239 element (i.e. a cselib_val), while the second arg X is an rtx. We know
240 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
241 CONST of an appropriate mode. */
243 static int
244 entry_and_rtx_equal_p (entry, x_arg)
245 const void *entry, *x_arg;
247 struct elt_loc_list *l;
248 const cselib_val *v = (const cselib_val *) entry;
249 rtx x = (rtx) x_arg;
250 enum machine_mode mode = GET_MODE (x);
252 if (GET_CODE (x) == CONST_INT
253 || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE))
254 abort ();
255 if (mode != GET_MODE (v->u.val_rtx))
256 return 0;
258 /* Unwrap X if necessary. */
259 if (GET_CODE (x) == CONST
260 && (GET_CODE (XEXP (x, 0)) == CONST_INT
261 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
262 x = XEXP (x, 0);
264 /* We don't guarantee that distinct rtx's have different hash values,
265 so we need to do a comparison. */
266 for (l = v->locs; l; l = l->next)
267 if (rtx_equal_for_cselib_p (l->loc, x))
268 return 1;
270 return 0;
273 /* The hash function for our hash table. The value is always computed with
274 hash_rtx when adding an element; this function just extracts the hash
275 value from a cselib_val structure. */
277 static unsigned int
278 get_value_hash (entry)
279 const void *entry;
281 const cselib_val *v = (const cselib_val *) entry;
282 return v->value;
285 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
286 only return true for values which point to a cselib_val whose value
287 element has been set to zero, which implies the cselib_val will be
288 removed. */
291 references_value_p (x, only_useless)
292 rtx x;
293 int only_useless;
295 enum rtx_code code = GET_CODE (x);
296 const char *fmt = GET_RTX_FORMAT (code);
297 int i, j;
299 if (GET_CODE (x) == VALUE
300 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
301 return 1;
303 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
305 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
306 return 1;
307 else if (fmt[i] == 'E')
308 for (j = 0; j < XVECLEN (x, i); j++)
309 if (references_value_p (XVECEXP (x, i, j), only_useless))
310 return 1;
313 return 0;
316 /* For all locations found in X, delete locations that reference useless
317 values (i.e. values without any location). Called through
318 htab_traverse. */
320 static int
321 discard_useless_locs (x, info)
322 void **x;
323 void *info ATTRIBUTE_UNUSED;
325 cselib_val *v = (cselib_val *)*x;
326 struct elt_loc_list **p = &v->locs;
327 int had_locs = v->locs != 0;
329 while (*p)
331 if (references_value_p ((*p)->loc, 1))
332 unchain_one_elt_loc_list (p);
333 else
334 p = &(*p)->next;
337 if (had_locs && v->locs == 0)
339 n_useless_values++;
340 values_became_useless = 1;
342 return 1;
345 /* If X is a value with no locations, remove it from the hashtable. */
347 static int
348 discard_useless_values (x, info)
349 void **x;
350 void *info ATTRIBUTE_UNUSED;
352 cselib_val *v = (cselib_val *)*x;
354 if (v->locs == 0)
356 htab_clear_slot (hash_table, x);
357 unchain_one_value (v);
358 n_useless_values--;
361 return 1;
364 /* Clean out useless values (i.e. those which no longer have locations
365 associated with them) from the hash table. */
367 static void
368 remove_useless_values ()
370 /* First pass: eliminate locations that reference the value. That in
371 turn can make more values useless. */
374 values_became_useless = 0;
375 htab_traverse (hash_table, discard_useless_locs, 0);
377 while (values_became_useless);
379 /* Second pass: actually remove the values. */
380 htab_traverse (hash_table, discard_useless_values, 0);
382 if (n_useless_values != 0)
383 abort ();
386 /* Return nonzero if we can prove that X and Y contain the same value, taking
387 our gathered information into account. */
390 rtx_equal_for_cselib_p (x, y)
391 rtx x, y;
393 enum rtx_code code;
394 const char *fmt;
395 int i;
397 if (GET_CODE (x) == REG || GET_CODE (x) == MEM)
399 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
401 if (e)
402 x = e->u.val_rtx;
405 if (GET_CODE (y) == REG || GET_CODE (y) == MEM)
407 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
409 if (e)
410 y = e->u.val_rtx;
413 if (x == y)
414 return 1;
416 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
417 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
419 if (GET_CODE (x) == VALUE)
421 cselib_val *e = CSELIB_VAL_PTR (x);
422 struct elt_loc_list *l;
424 for (l = e->locs; l; l = l->next)
426 rtx t = l->loc;
428 /* Avoid infinite recursion. */
429 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
430 continue;
431 else if (rtx_equal_for_cselib_p (t, y))
432 return 1;
435 return 0;
438 if (GET_CODE (y) == VALUE)
440 cselib_val *e = CSELIB_VAL_PTR (y);
441 struct elt_loc_list *l;
443 for (l = e->locs; l; l = l->next)
445 rtx t = l->loc;
447 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
448 continue;
449 else if (rtx_equal_for_cselib_p (x, t))
450 return 1;
453 return 0;
456 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
457 return 0;
459 /* This won't be handled correctly by the code below. */
460 if (GET_CODE (x) == LABEL_REF)
461 return XEXP (x, 0) == XEXP (y, 0);
463 code = GET_CODE (x);
464 fmt = GET_RTX_FORMAT (code);
466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
468 int j;
470 switch (fmt[i])
472 case 'w':
473 if (XWINT (x, i) != XWINT (y, i))
474 return 0;
475 break;
477 case 'n':
478 case 'i':
479 if (XINT (x, i) != XINT (y, i))
480 return 0;
481 break;
483 case 'V':
484 case 'E':
485 /* Two vectors must have the same length. */
486 if (XVECLEN (x, i) != XVECLEN (y, i))
487 return 0;
489 /* And the corresponding elements must match. */
490 for (j = 0; j < XVECLEN (x, i); j++)
491 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
492 XVECEXP (y, i, j)))
493 return 0;
494 break;
496 case 'e':
497 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
498 return 0;
499 break;
501 case 'S':
502 case 's':
503 if (strcmp (XSTR (x, i), XSTR (y, i)))
504 return 0;
505 break;
507 case 'u':
508 /* These are just backpointers, so they don't matter. */
509 break;
511 case '0':
512 case 't':
513 break;
515 /* It is believed that rtx's at this level will never
516 contain anything but integers and other rtx's,
517 except for within LABEL_REFs and SYMBOL_REFs. */
518 default:
519 abort ();
522 return 1;
525 /* We need to pass down the mode of constants through the hash table
526 functions. For that purpose, wrap them in a CONST of the appropriate
527 mode. */
528 static rtx
529 wrap_constant (mode, x)
530 enum machine_mode mode;
531 rtx x;
533 if (GET_CODE (x) != CONST_INT
534 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
535 return x;
536 if (mode == VOIDmode)
537 abort ();
538 return gen_rtx_CONST (mode, x);
541 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
542 For registers and memory locations, we look up their cselib_val structure
543 and return its VALUE element.
544 Possible reasons for return 0 are: the object is volatile, or we couldn't
545 find a register or memory location in the table and CREATE is zero. If
546 CREATE is nonzero, table elts are created for regs and mem.
547 MODE is used in hashing for CONST_INTs only;
548 otherwise the mode of X is used. */
550 static unsigned int
551 hash_rtx (x, mode, create)
552 rtx x;
553 enum machine_mode mode;
554 int create;
556 cselib_val *e;
557 int i, j;
558 enum rtx_code code;
559 const char *fmt;
560 unsigned int hash = 0;
562 code = GET_CODE (x);
563 hash += (unsigned) code + (unsigned) GET_MODE (x);
565 switch (code)
567 case MEM:
568 case REG:
569 e = cselib_lookup (x, GET_MODE (x), create);
570 if (! e)
571 return 0;
573 return e->value;
575 case CONST_INT:
576 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x);
577 return hash ? hash : (unsigned int) CONST_INT;
579 case CONST_DOUBLE:
580 /* This is like the general case, except that it only counts
581 the integers representing the constant. */
582 hash += (unsigned) code + (unsigned) GET_MODE (x);
583 if (GET_MODE (x) != VOIDmode)
584 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
585 hash += XWINT (x, i);
586 else
587 hash += ((unsigned) CONST_DOUBLE_LOW (x)
588 + (unsigned) CONST_DOUBLE_HIGH (x));
589 return hash ? hash : (unsigned int) CONST_DOUBLE;
591 case CONST_VECTOR:
593 int units;
594 rtx elt;
596 units = CONST_VECTOR_NUNITS (x);
598 for (i = 0; i < units; ++i)
600 elt = CONST_VECTOR_ELT (x, i);
601 hash += hash_rtx (elt, GET_MODE (elt), 0);
604 return hash;
607 /* Assume there is only one rtx object for any given label. */
608 case LABEL_REF:
609 hash
610 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
611 return hash ? hash : (unsigned int) LABEL_REF;
613 case SYMBOL_REF:
614 hash
615 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
616 return hash ? hash : (unsigned int) SYMBOL_REF;
618 case PRE_DEC:
619 case PRE_INC:
620 case POST_DEC:
621 case POST_INC:
622 case POST_MODIFY:
623 case PRE_MODIFY:
624 case PC:
625 case CC0:
626 case CALL:
627 case UNSPEC_VOLATILE:
628 return 0;
630 case ASM_OPERANDS:
631 if (MEM_VOLATILE_P (x))
632 return 0;
634 break;
636 default:
637 break;
640 i = GET_RTX_LENGTH (code) - 1;
641 fmt = GET_RTX_FORMAT (code);
642 for (; i >= 0; i--)
644 if (fmt[i] == 'e')
646 rtx tem = XEXP (x, i);
647 unsigned int tem_hash = hash_rtx (tem, 0, create);
649 if (tem_hash == 0)
650 return 0;
652 hash += tem_hash;
654 else if (fmt[i] == 'E')
655 for (j = 0; j < XVECLEN (x, i); j++)
657 unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create);
659 if (tem_hash == 0)
660 return 0;
662 hash += tem_hash;
664 else if (fmt[i] == 's')
666 const unsigned char *p = (const unsigned char *) XSTR (x, i);
668 if (p)
669 while (*p)
670 hash += *p++;
672 else if (fmt[i] == 'i')
673 hash += XINT (x, i);
674 else if (fmt[i] == '0' || fmt[i] == 't')
675 /* unused */;
676 else
677 abort ();
680 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
683 /* Create a new value structure for VALUE and initialize it. The mode of the
684 value is MODE. */
686 static cselib_val *
687 new_cselib_val (value, mode)
688 unsigned int value;
689 enum machine_mode mode;
691 cselib_val *e = empty_vals;
693 if (e)
694 empty_vals = e->u.next_free;
695 else
696 e = (cselib_val *) ggc_alloc (sizeof (cselib_val));
698 if (value == 0)
699 abort ();
701 e->value = value;
702 e->u.val_rtx = gen_rtx_VALUE (mode);
703 CSELIB_VAL_PTR (e->u.val_rtx) = e;
704 e->addr_list = 0;
705 e->locs = 0;
706 return e;
709 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
710 contains the data at this address. X is a MEM that represents the
711 value. Update the two value structures to represent this situation. */
713 static void
714 add_mem_for_addr (addr_elt, mem_elt, x)
715 cselib_val *addr_elt, *mem_elt;
716 rtx x;
718 struct elt_loc_list *l;
720 /* Avoid duplicates. */
721 for (l = mem_elt->locs; l; l = l->next)
722 if (GET_CODE (l->loc) == MEM
723 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
724 return;
726 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
727 mem_elt->locs
728 = new_elt_loc_list (mem_elt->locs,
729 replace_equiv_address_nv (x, addr_elt->u.val_rtx));
732 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
733 If CREATE, make a new one if we haven't seen it before. */
735 static cselib_val *
736 cselib_lookup_mem (x, create)
737 rtx x;
738 int create;
740 enum machine_mode mode = GET_MODE (x);
741 void **slot;
742 cselib_val *addr;
743 cselib_val *mem_elt;
744 struct elt_list *l;
746 if (MEM_VOLATILE_P (x) || mode == BLKmode
747 || (FLOAT_MODE_P (mode) && flag_float_store))
748 return 0;
750 /* Look up the value for the address. */
751 addr = cselib_lookup (XEXP (x, 0), mode, create);
752 if (! addr)
753 return 0;
755 /* Find a value that describes a value of our mode at that address. */
756 for (l = addr->addr_list; l; l = l->next)
757 if (GET_MODE (l->elt->u.val_rtx) == mode)
758 return l->elt;
760 if (! create)
761 return 0;
763 mem_elt = new_cselib_val (++next_unknown_value, mode);
764 add_mem_for_addr (addr, mem_elt, x);
765 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
766 mem_elt->value, INSERT);
767 *slot = mem_elt;
768 return mem_elt;
771 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
772 with VALUE expressions. This way, it becomes independent of changes
773 to registers and memory.
774 X isn't actually modified; if modifications are needed, new rtl is
775 allocated. However, the return value can share rtl with X. */
778 cselib_subst_to_values (x)
779 rtx x;
781 enum rtx_code code = GET_CODE (x);
782 const char *fmt = GET_RTX_FORMAT (code);
783 cselib_val *e;
784 struct elt_list *l;
785 rtx copy = x;
786 int i;
788 switch (code)
790 case REG:
791 for (l = REG_VALUES (REGNO (x)); l; l = l->next)
792 if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x))
793 return l->elt->u.val_rtx;
795 abort ();
797 case MEM:
798 e = cselib_lookup_mem (x, 0);
799 if (! e)
801 /* This happens for autoincrements. Assign a value that doesn't
802 match any other. */
803 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
805 return e->u.val_rtx;
807 case CONST_DOUBLE:
808 case CONST_VECTOR:
809 case CONST_INT:
810 return x;
812 case POST_INC:
813 case PRE_INC:
814 case POST_DEC:
815 case PRE_DEC:
816 case POST_MODIFY:
817 case PRE_MODIFY:
818 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
819 return e->u.val_rtx;
821 default:
822 break;
825 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
827 if (fmt[i] == 'e')
829 rtx t = cselib_subst_to_values (XEXP (x, i));
831 if (t != XEXP (x, i) && x == copy)
832 copy = shallow_copy_rtx (x);
834 XEXP (copy, i) = t;
836 else if (fmt[i] == 'E')
838 int j, k;
840 for (j = 0; j < XVECLEN (x, i); j++)
842 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
844 if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
846 if (x == copy)
847 copy = shallow_copy_rtx (x);
849 XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
850 for (k = 0; k < j; k++)
851 XVECEXP (copy, i, k) = XVECEXP (x, i, k);
854 XVECEXP (copy, i, j) = t;
859 return copy;
862 /* Look up the rtl expression X in our tables and return the value it has.
863 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
864 we create a new one if possible, using mode MODE if X doesn't have a mode
865 (i.e. because it's a constant). */
867 cselib_val *
868 cselib_lookup (x, mode, create)
869 rtx x;
870 enum machine_mode mode;
871 int create;
873 void **slot;
874 cselib_val *e;
875 unsigned int hashval;
877 if (GET_MODE (x) != VOIDmode)
878 mode = GET_MODE (x);
880 if (GET_CODE (x) == VALUE)
881 return CSELIB_VAL_PTR (x);
883 if (GET_CODE (x) == REG)
885 struct elt_list *l;
886 unsigned int i = REGNO (x);
888 for (l = REG_VALUES (i); l; l = l->next)
889 if (mode == GET_MODE (l->elt->u.val_rtx))
890 return l->elt;
892 if (! create)
893 return 0;
895 if (i < FIRST_PSEUDO_REGISTER)
897 unsigned int n = HARD_REGNO_NREGS (i, mode);
899 if (n > max_value_regs)
900 max_value_regs = n;
903 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
904 e->locs = new_elt_loc_list (e->locs, x);
905 if (REG_VALUES (i) == 0)
906 VARRAY_PUSH_UINT (used_regs, i);
907 REG_VALUES (i) = new_elt_list (REG_VALUES (i), e);
908 slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT);
909 *slot = e;
910 return e;
913 if (GET_CODE (x) == MEM)
914 return cselib_lookup_mem (x, create);
916 hashval = hash_rtx (x, mode, create);
917 /* Can't even create if hashing is not possible. */
918 if (! hashval)
919 return 0;
921 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
922 hashval, create ? INSERT : NO_INSERT);
923 if (slot == 0)
924 return 0;
926 e = (cselib_val *) *slot;
927 if (e)
928 return e;
930 e = new_cselib_val (hashval, mode);
932 /* We have to fill the slot before calling cselib_subst_to_values:
933 the hash table is inconsistent until we do so, and
934 cselib_subst_to_values will need to do lookups. */
935 *slot = (void *) e;
936 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
937 return e;
940 /* Invalidate any entries in reg_values that overlap REGNO. This is called
941 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
942 is used to determine how many hard registers are being changed. If MODE
943 is VOIDmode, then only REGNO is being changed; this is used when
944 invalidating call clobbered registers across a call. */
946 static void
947 cselib_invalidate_regno (regno, mode)
948 unsigned int regno;
949 enum machine_mode mode;
951 unsigned int endregno;
952 unsigned int i;
954 /* If we see pseudos after reload, something is _wrong_. */
955 if (reload_completed && regno >= FIRST_PSEUDO_REGISTER
956 && reg_renumber[regno] >= 0)
957 abort ();
959 /* Determine the range of registers that must be invalidated. For
960 pseudos, only REGNO is affected. For hard regs, we must take MODE
961 into account, and we must also invalidate lower register numbers
962 if they contain values that overlap REGNO. */
963 if (regno < FIRST_PSEUDO_REGISTER && mode != VOIDmode)
965 if (regno < max_value_regs)
966 i = 0;
967 else
968 i = regno - max_value_regs;
970 endregno = regno + HARD_REGNO_NREGS (regno, mode);
972 else
974 i = regno;
975 endregno = regno + 1;
978 for (; i < endregno; i++)
980 struct elt_list **l = &REG_VALUES (i);
982 /* Go through all known values for this reg; if it overlaps the range
983 we're invalidating, remove the value. */
984 while (*l)
986 cselib_val *v = (*l)->elt;
987 struct elt_loc_list **p;
988 unsigned int this_last = i;
990 if (i < FIRST_PSEUDO_REGISTER)
991 this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1;
993 if (this_last < regno)
995 l = &(*l)->next;
996 continue;
999 /* We have an overlap. */
1000 unchain_one_elt_list (l);
1002 /* Now, we clear the mapping from value to reg. It must exist, so
1003 this code will crash intentionally if it doesn't. */
1004 for (p = &v->locs; ; p = &(*p)->next)
1006 rtx x = (*p)->loc;
1008 if (GET_CODE (x) == REG && REGNO (x) == i)
1010 unchain_one_elt_loc_list (p);
1011 break;
1014 if (v->locs == 0)
1015 n_useless_values++;
1020 /* The memory at address MEM_BASE is being changed.
1021 Return whether this change will invalidate VAL. */
1023 static int
1024 cselib_mem_conflict_p (mem_base, val)
1025 rtx mem_base;
1026 rtx val;
1028 enum rtx_code code;
1029 const char *fmt;
1030 int i, j;
1032 code = GET_CODE (val);
1033 switch (code)
1035 /* Get rid of a few simple cases quickly. */
1036 case REG:
1037 case PC:
1038 case CC0:
1039 case SCRATCH:
1040 case CONST:
1041 case CONST_INT:
1042 case CONST_DOUBLE:
1043 case CONST_VECTOR:
1044 case SYMBOL_REF:
1045 case LABEL_REF:
1046 return 0;
1048 case MEM:
1049 if (GET_MODE (mem_base) == BLKmode
1050 || GET_MODE (val) == BLKmode
1051 || anti_dependence (val, mem_base))
1052 return 1;
1054 /* The address may contain nested MEMs. */
1055 break;
1057 default:
1058 break;
1061 fmt = GET_RTX_FORMAT (code);
1062 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1064 if (fmt[i] == 'e')
1066 if (cselib_mem_conflict_p (mem_base, XEXP (val, i)))
1067 return 1;
1069 else if (fmt[i] == 'E')
1070 for (j = 0; j < XVECLEN (val, i); j++)
1071 if (cselib_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
1072 return 1;
1075 return 0;
1078 /* For the value found in SLOT, walk its locations to determine if any overlap
1079 INFO (which is a MEM rtx). */
1081 static int
1082 cselib_invalidate_mem_1 (slot, info)
1083 void **slot;
1084 void *info;
1086 cselib_val *v = (cselib_val *) *slot;
1087 rtx mem_rtx = (rtx) info;
1088 struct elt_loc_list **p = &v->locs;
1089 int had_locs = v->locs != 0;
1091 while (*p)
1093 rtx x = (*p)->loc;
1094 cselib_val *addr;
1095 struct elt_list **mem_chain;
1097 /* MEMs may occur in locations only at the top level; below
1098 that every MEM or REG is substituted by its VALUE. */
1099 if (GET_CODE (x) != MEM
1100 || ! cselib_mem_conflict_p (mem_rtx, x))
1102 p = &(*p)->next;
1103 continue;
1106 /* This one overlaps. */
1107 /* We must have a mapping from this MEM's address to the
1108 value (E). Remove that, too. */
1109 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1110 mem_chain = &addr->addr_list;
1111 for (;;)
1113 if ((*mem_chain)->elt == v)
1115 unchain_one_elt_list (mem_chain);
1116 break;
1119 mem_chain = &(*mem_chain)->next;
1122 unchain_one_elt_loc_list (p);
1125 if (had_locs && v->locs == 0)
1126 n_useless_values++;
1128 return 1;
1131 /* Invalidate any locations in the table which are changed because of a
1132 store to MEM_RTX. If this is called because of a non-const call
1133 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1135 static void
1136 cselib_invalidate_mem (mem_rtx)
1137 rtx mem_rtx;
1139 htab_traverse (hash_table, cselib_invalidate_mem_1, mem_rtx);
1142 /* Invalidate DEST, which is being assigned to or clobbered. The second and
1143 the third parameter exist so that this function can be passed to
1144 note_stores; they are ignored. */
1146 static void
1147 cselib_invalidate_rtx (dest, ignore, data)
1148 rtx dest;
1149 rtx ignore ATTRIBUTE_UNUSED;
1150 void *data ATTRIBUTE_UNUSED;
1152 while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT
1153 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG)
1154 dest = XEXP (dest, 0);
1156 if (GET_CODE (dest) == REG)
1157 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1158 else if (GET_CODE (dest) == MEM)
1159 cselib_invalidate_mem (dest);
1161 /* Some machines don't define AUTO_INC_DEC, but they still use push
1162 instructions. We need to catch that case here in order to
1163 invalidate the stack pointer correctly. Note that invalidating
1164 the stack pointer is different from invalidating DEST. */
1165 if (push_operand (dest, GET_MODE (dest)))
1166 cselib_invalidate_rtx (stack_pointer_rtx, NULL_RTX, NULL);
1169 /* Record the result of a SET instruction. DEST is being set; the source
1170 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1171 describes its address. */
1173 static void
1174 cselib_record_set (dest, src_elt, dest_addr_elt)
1175 rtx dest;
1176 cselib_val *src_elt, *dest_addr_elt;
1178 int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1;
1180 if (src_elt == 0 || side_effects_p (dest))
1181 return;
1183 if (dreg >= 0)
1185 if (REG_VALUES (dreg) == 0)
1186 VARRAY_PUSH_UINT (used_regs, dreg);
1188 if (dreg < FIRST_PSEUDO_REGISTER)
1190 unsigned int n = HARD_REGNO_NREGS (dreg, GET_MODE (dest));
1192 if (n > max_value_regs)
1193 max_value_regs = n;
1196 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1197 if (src_elt->locs == 0)
1198 n_useless_values--;
1199 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1201 else if (GET_CODE (dest) == MEM && dest_addr_elt != 0)
1203 if (src_elt->locs == 0)
1204 n_useless_values--;
1205 add_mem_for_addr (dest_addr_elt, src_elt, dest);
1209 /* Describe a single set that is part of an insn. */
1210 struct set
1212 rtx src;
1213 rtx dest;
1214 cselib_val *src_elt;
1215 cselib_val *dest_addr_elt;
1218 /* There is no good way to determine how many elements there can be
1219 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1220 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1222 /* Record the effects of any sets in INSN. */
1223 static void
1224 cselib_record_sets (insn)
1225 rtx insn;
1227 int n_sets = 0;
1228 int i;
1229 struct set sets[MAX_SETS];
1230 rtx body = PATTERN (insn);
1231 rtx cond = 0;
1233 body = PATTERN (insn);
1234 if (GET_CODE (body) == COND_EXEC)
1236 cond = COND_EXEC_TEST (body);
1237 body = COND_EXEC_CODE (body);
1240 /* Find all sets. */
1241 if (GET_CODE (body) == SET)
1243 sets[0].src = SET_SRC (body);
1244 sets[0].dest = SET_DEST (body);
1245 n_sets = 1;
1247 else if (GET_CODE (body) == PARALLEL)
1249 /* Look through the PARALLEL and record the values being
1250 set, if possible. Also handle any CLOBBERs. */
1251 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1253 rtx x = XVECEXP (body, 0, i);
1255 if (GET_CODE (x) == SET)
1257 sets[n_sets].src = SET_SRC (x);
1258 sets[n_sets].dest = SET_DEST (x);
1259 n_sets++;
1264 /* Look up the values that are read. Do this before invalidating the
1265 locations that are written. */
1266 for (i = 0; i < n_sets; i++)
1268 rtx dest = sets[i].dest;
1270 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1271 the low part after invalidating any knowledge about larger modes. */
1272 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1273 sets[i].dest = dest = XEXP (dest, 0);
1275 /* We don't know how to record anything but REG or MEM. */
1276 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1278 rtx src = sets[i].src;
1279 if (cond)
1280 src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
1281 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
1282 if (GET_CODE (dest) == MEM)
1283 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
1284 else
1285 sets[i].dest_addr_elt = 0;
1289 /* Invalidate all locations written by this insn. Note that the elts we
1290 looked up in the previous loop aren't affected, just some of their
1291 locations may go away. */
1292 note_stores (body, cselib_invalidate_rtx, NULL);
1294 /* Now enter the equivalences in our tables. */
1295 for (i = 0; i < n_sets; i++)
1297 rtx dest = sets[i].dest;
1298 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1299 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1303 /* Record the effects of INSN. */
1305 void
1306 cselib_process_insn (insn)
1307 rtx insn;
1309 int i;
1310 rtx x;
1312 cselib_current_insn = insn;
1314 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1315 if (GET_CODE (insn) == CODE_LABEL
1316 || (GET_CODE (insn) == CALL_INSN
1317 && find_reg_note (insn, REG_SETJMP, NULL))
1318 || (GET_CODE (insn) == INSN
1319 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1320 && MEM_VOLATILE_P (PATTERN (insn))))
1322 clear_table (0);
1323 return;
1326 if (! INSN_P (insn))
1328 cselib_current_insn = 0;
1329 return;
1332 /* If this is a call instruction, forget anything stored in a
1333 call clobbered register, or, if this is not a const call, in
1334 memory. */
1335 if (GET_CODE (insn) == CALL_INSN)
1337 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1338 if (call_used_regs[i])
1339 cselib_invalidate_regno (i, VOIDmode);
1341 if (! CONST_OR_PURE_CALL_P (insn))
1342 cselib_invalidate_mem (callmem);
1345 cselib_record_sets (insn);
1347 #ifdef AUTO_INC_DEC
1348 /* Clobber any registers which appear in REG_INC notes. We
1349 could keep track of the changes to their values, but it is
1350 unlikely to help. */
1351 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
1352 if (REG_NOTE_KIND (x) == REG_INC)
1353 cselib_invalidate_rtx (XEXP (x, 0), NULL_RTX, NULL);
1354 #endif
1356 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1357 after we have processed the insn. */
1358 if (GET_CODE (insn) == CALL_INSN)
1359 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
1360 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
1361 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);
1363 cselib_current_insn = 0;
1365 if (n_useless_values > MAX_USELESS_VALUES)
1366 remove_useless_values ();
1369 /* Make sure our varrays are big enough. Not called from any cselib routines;
1370 it must be called by the user if it allocated new registers. */
1372 void
1373 cselib_update_varray_sizes ()
1375 unsigned int nregs = max_reg_num ();
1377 if (nregs == cselib_nregs)
1378 return;
1380 cselib_nregs = nregs;
1381 VARRAY_GROW (reg_values, nregs);
1382 VARRAY_GROW (used_regs, nregs);
1385 /* Initialize cselib for one pass. The caller must also call
1386 init_alias_analysis. */
1388 void
1389 cselib_init ()
1391 /* This is only created once. */
1392 if (! callmem)
1393 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
1395 cselib_nregs = max_reg_num ();
1396 if (reg_values_old != NULL && VARRAY_SIZE (reg_values_old) >= cselib_nregs)
1398 reg_values = reg_values_old;
1399 used_regs = used_regs_old;
1400 VARRAY_CLEAR (reg_values);
1401 VARRAY_CLEAR (used_regs);
1403 else
1405 VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values");
1406 VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs");
1408 hash_table = htab_create_ggc (31, get_value_hash, entry_and_rtx_equal_p,
1409 NULL);
1410 clear_table (1);
1413 /* Called when the current user is done with cselib. */
1415 void
1416 cselib_finish ()
1418 reg_values_old = reg_values;
1419 reg_values = 0;
1420 used_regs_old = used_regs;
1421 used_regs = 0;
1422 hash_table = 0;
1423 n_useless_values = 0;
1424 next_unknown_value = 0;
1427 #include "gt-cselib.h"