PR libstdc++/3584
[official-gcc.git] / gcc / cselib.c
blob9c54015f1e84d4d56db48f7da25639d20157e5e7
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "ggc.h"
38 #include "hashtab.h"
39 #include "cselib.h"
41 static int entry_and_rtx_equal_p PARAMS ((const void *, const void *));
42 static unsigned int get_value_hash PARAMS ((const void *));
43 static struct elt_list *new_elt_list PARAMS ((struct elt_list *,
44 cselib_val *));
45 static struct elt_loc_list *new_elt_loc_list PARAMS ((struct elt_loc_list *,
46 rtx));
47 static void unchain_one_value PARAMS ((cselib_val *));
48 static void unchain_one_elt_list PARAMS ((struct elt_list **));
49 static void unchain_one_elt_loc_list PARAMS ((struct elt_loc_list **));
50 static void clear_table PARAMS ((int));
51 static int discard_useless_locs PARAMS ((void **, void *));
52 static int discard_useless_values PARAMS ((void **, void *));
53 static void remove_useless_values PARAMS ((void));
54 static rtx wrap_constant PARAMS ((enum machine_mode, rtx));
55 static unsigned int hash_rtx PARAMS ((rtx, enum machine_mode, int));
56 static cselib_val *new_cselib_val PARAMS ((unsigned int,
57 enum machine_mode));
58 static void add_mem_for_addr PARAMS ((cselib_val *, cselib_val *,
59 rtx));
60 static cselib_val *cselib_lookup_mem PARAMS ((rtx, int));
61 static void cselib_invalidate_regno PARAMS ((unsigned int,
62 enum machine_mode));
63 static int cselib_mem_conflict_p PARAMS ((rtx, rtx));
64 static int cselib_invalidate_mem_1 PARAMS ((void **, void *));
65 static void cselib_invalidate_mem PARAMS ((rtx));
66 static void cselib_invalidate_rtx PARAMS ((rtx, rtx, void *));
67 static void cselib_record_set PARAMS ((rtx, cselib_val *,
68 cselib_val *));
69 static void cselib_record_sets PARAMS ((rtx));
71 /* There are three ways in which cselib can look up an rtx:
72 - for a REG, the reg_values table (which is indexed by regno) is used
73 - for a MEM, we recursively look up its address and then follow the
74 addr_list of that value
75 - for everything else, we compute a hash value and go through the hash
76 table. Since different rtx's can still have the same hash value,
77 this involves walking the table entries for a given value and comparing
78 the locations of the entries with the rtx we are looking up. */
80 /* A table that enables us to look up elts by their value. */
81 static GTY((param_is (cselib_val))) htab_t hash_table;
83 /* This is a global so we don't have to pass this through every function.
84 It is used in new_elt_loc_list to set SETTING_INSN. */
85 static rtx cselib_current_insn;
87 /* Every new unknown value gets a unique number. */
88 static unsigned int next_unknown_value;
90 /* The number of registers we had when the varrays were last resized. */
91 static unsigned int cselib_nregs;
93 /* Count values without known locations. Whenever this grows too big, we
94 remove these useless values from the table. */
95 static int n_useless_values;
97 /* Number of useless values before we remove them from the hash table. */
98 #define MAX_USELESS_VALUES 32
100 /* This table maps from register number to values. It does not contain
101 pointers to cselib_val structures, but rather elt_lists. The purpose is
102 to be able to refer to the same register in different modes. */
103 static GTY(()) varray_type reg_values;
104 static GTY((deletable (""))) varray_type reg_values_old;
105 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
107 /* The largest number of hard regs used by any entry added to the
108 REG_VALUES table. Cleared on each clear_table() invocation. */
109 static unsigned int max_value_regs;
111 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
112 in clear_table() for fast emptying. */
113 static GTY(()) varray_type used_regs;
114 static GTY((deletable (""))) varray_type used_regs_old;
116 /* We pass this to cselib_invalidate_mem to invalidate all of
117 memory for a non-const call instruction. */
118 static GTY(()) rtx callmem;
120 /* Caches for unused structures. */
121 static GTY((deletable (""))) cselib_val *empty_vals;
122 static GTY((deletable (""))) struct elt_list *empty_elt_lists;
123 static GTY((deletable (""))) struct elt_loc_list *empty_elt_loc_lists;
125 /* Set by discard_useless_locs if it deleted the last location of any
126 value. */
127 static int values_became_useless;
130 /* Allocate a struct elt_list and fill in its two elements with the
131 arguments. */
133 static struct elt_list *
134 new_elt_list (next, elt)
135 struct elt_list *next;
136 cselib_val *elt;
138 struct elt_list *el = empty_elt_lists;
140 if (el)
141 empty_elt_lists = el->next;
142 else
143 el = (struct elt_list *) ggc_alloc (sizeof (struct elt_list));
144 el->next = next;
145 el->elt = elt;
146 return el;
149 /* Allocate a struct elt_loc_list and fill in its two elements with the
150 arguments. */
152 static struct elt_loc_list *
153 new_elt_loc_list (next, loc)
154 struct elt_loc_list *next;
155 rtx loc;
157 struct elt_loc_list *el = empty_elt_loc_lists;
159 if (el)
160 empty_elt_loc_lists = el->next;
161 else
162 el = (struct elt_loc_list *) ggc_alloc (sizeof (struct elt_loc_list));
163 el->next = next;
164 el->loc = loc;
165 el->setting_insn = cselib_current_insn;
166 return el;
169 /* The elt_list at *PL is no longer needed. Unchain it and free its
170 storage. */
172 static void
173 unchain_one_elt_list (pl)
174 struct elt_list **pl;
176 struct elt_list *l = *pl;
178 *pl = l->next;
179 l->next = empty_elt_lists;
180 empty_elt_lists = l;
183 /* Likewise for elt_loc_lists. */
185 static void
186 unchain_one_elt_loc_list (pl)
187 struct elt_loc_list **pl;
189 struct elt_loc_list *l = *pl;
191 *pl = l->next;
192 l->next = empty_elt_loc_lists;
193 empty_elt_loc_lists = l;
196 /* Likewise for cselib_vals. This also frees the addr_list associated with
197 V. */
199 static void
200 unchain_one_value (v)
201 cselib_val *v;
203 while (v->addr_list)
204 unchain_one_elt_list (&v->addr_list);
206 v->u.next_free = empty_vals;
207 empty_vals = v;
210 /* Remove all entries from the hash table. Also used during
211 initialization. If CLEAR_ALL isn't set, then only clear the entries
212 which are known to have been used. */
214 static void
215 clear_table (clear_all)
216 int clear_all;
218 unsigned int i;
220 if (clear_all)
221 for (i = 0; i < cselib_nregs; i++)
222 REG_VALUES (i) = 0;
223 else
224 for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++)
225 REG_VALUES (VARRAY_UINT (used_regs, i)) = 0;
227 max_value_regs = 0;
229 VARRAY_POP_ALL (used_regs);
231 htab_empty (hash_table);
233 n_useless_values = 0;
235 next_unknown_value = 0;
238 /* The equality test for our hash table. The first argument ENTRY is a table
239 element (i.e. a cselib_val), while the second arg X is an rtx. We know
240 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
241 CONST of an appropriate mode. */
243 static int
244 entry_and_rtx_equal_p (entry, x_arg)
245 const void *entry, *x_arg;
247 struct elt_loc_list *l;
248 const cselib_val *v = (const cselib_val *) entry;
249 rtx x = (rtx) x_arg;
250 enum machine_mode mode = GET_MODE (x);
252 if (GET_CODE (x) == CONST_INT
253 || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE))
254 abort ();
255 if (mode != GET_MODE (v->u.val_rtx))
256 return 0;
258 /* Unwrap X if necessary. */
259 if (GET_CODE (x) == CONST
260 && (GET_CODE (XEXP (x, 0)) == CONST_INT
261 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
262 x = XEXP (x, 0);
264 /* We don't guarantee that distinct rtx's have different hash values,
265 so we need to do a comparison. */
266 for (l = v->locs; l; l = l->next)
267 if (rtx_equal_for_cselib_p (l->loc, x))
268 return 1;
270 return 0;
273 /* The hash function for our hash table. The value is always computed with
274 hash_rtx when adding an element; this function just extracts the hash
275 value from a cselib_val structure. */
277 static unsigned int
278 get_value_hash (entry)
279 const void *entry;
281 const cselib_val *v = (const cselib_val *) entry;
282 return v->value;
285 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
286 only return true for values which point to a cselib_val whose value
287 element has been set to zero, which implies the cselib_val will be
288 removed. */
291 references_value_p (x, only_useless)
292 rtx x;
293 int only_useless;
295 enum rtx_code code = GET_CODE (x);
296 const char *fmt = GET_RTX_FORMAT (code);
297 int i, j;
299 if (GET_CODE (x) == VALUE
300 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
301 return 1;
303 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
305 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
306 return 1;
307 else if (fmt[i] == 'E')
308 for (j = 0; j < XVECLEN (x, i); j++)
309 if (references_value_p (XVECEXP (x, i, j), only_useless))
310 return 1;
313 return 0;
316 /* For all locations found in X, delete locations that reference useless
317 values (i.e. values without any location). Called through
318 htab_traverse. */
320 static int
321 discard_useless_locs (x, info)
322 void **x;
323 void *info ATTRIBUTE_UNUSED;
325 cselib_val *v = (cselib_val *)*x;
326 struct elt_loc_list **p = &v->locs;
327 int had_locs = v->locs != 0;
329 while (*p)
331 if (references_value_p ((*p)->loc, 1))
332 unchain_one_elt_loc_list (p);
333 else
334 p = &(*p)->next;
337 if (had_locs && v->locs == 0)
339 n_useless_values++;
340 values_became_useless = 1;
342 return 1;
345 /* If X is a value with no locations, remove it from the hashtable. */
347 static int
348 discard_useless_values (x, info)
349 void **x;
350 void *info ATTRIBUTE_UNUSED;
352 cselib_val *v = (cselib_val *)*x;
354 if (v->locs == 0)
356 htab_clear_slot (hash_table, x);
357 unchain_one_value (v);
358 n_useless_values--;
361 return 1;
364 /* Clean out useless values (i.e. those which no longer have locations
365 associated with them) from the hash table. */
367 static void
368 remove_useless_values ()
370 /* First pass: eliminate locations that reference the value. That in
371 turn can make more values useless. */
374 values_became_useless = 0;
375 htab_traverse (hash_table, discard_useless_locs, 0);
377 while (values_became_useless);
379 /* Second pass: actually remove the values. */
380 htab_traverse (hash_table, discard_useless_values, 0);
382 if (n_useless_values != 0)
383 abort ();
386 /* Return nonzero if we can prove that X and Y contain the same value, taking
387 our gathered information into account. */
390 rtx_equal_for_cselib_p (x, y)
391 rtx x, y;
393 enum rtx_code code;
394 const char *fmt;
395 int i;
397 if (GET_CODE (x) == REG || GET_CODE (x) == MEM)
399 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
401 if (e)
402 x = e->u.val_rtx;
405 if (GET_CODE (y) == REG || GET_CODE (y) == MEM)
407 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
409 if (e)
410 y = e->u.val_rtx;
413 if (x == y)
414 return 1;
416 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
417 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
419 if (GET_CODE (x) == VALUE)
421 cselib_val *e = CSELIB_VAL_PTR (x);
422 struct elt_loc_list *l;
424 for (l = e->locs; l; l = l->next)
426 rtx t = l->loc;
428 /* Avoid infinite recursion. */
429 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
430 continue;
431 else if (rtx_equal_for_cselib_p (t, y))
432 return 1;
435 return 0;
438 if (GET_CODE (y) == VALUE)
440 cselib_val *e = CSELIB_VAL_PTR (y);
441 struct elt_loc_list *l;
443 for (l = e->locs; l; l = l->next)
445 rtx t = l->loc;
447 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
448 continue;
449 else if (rtx_equal_for_cselib_p (x, t))
450 return 1;
453 return 0;
456 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
457 return 0;
459 /* This won't be handled correctly by the code below. */
460 if (GET_CODE (x) == LABEL_REF)
461 return XEXP (x, 0) == XEXP (y, 0);
463 code = GET_CODE (x);
464 fmt = GET_RTX_FORMAT (code);
466 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
468 int j;
470 switch (fmt[i])
472 case 'w':
473 if (XWINT (x, i) != XWINT (y, i))
474 return 0;
475 break;
477 case 'n':
478 case 'i':
479 if (XINT (x, i) != XINT (y, i))
480 return 0;
481 break;
483 case 'V':
484 case 'E':
485 /* Two vectors must have the same length. */
486 if (XVECLEN (x, i) != XVECLEN (y, i))
487 return 0;
489 /* And the corresponding elements must match. */
490 for (j = 0; j < XVECLEN (x, i); j++)
491 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
492 XVECEXP (y, i, j)))
493 return 0;
494 break;
496 case 'e':
497 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
498 return 0;
499 break;
501 case 'S':
502 case 's':
503 if (strcmp (XSTR (x, i), XSTR (y, i)))
504 return 0;
505 break;
507 case 'u':
508 /* These are just backpointers, so they don't matter. */
509 break;
511 case '0':
512 case 't':
513 break;
515 /* It is believed that rtx's at this level will never
516 contain anything but integers and other rtx's,
517 except for within LABEL_REFs and SYMBOL_REFs. */
518 default:
519 abort ();
522 return 1;
525 /* We need to pass down the mode of constants through the hash table
526 functions. For that purpose, wrap them in a CONST of the appropriate
527 mode. */
528 static rtx
529 wrap_constant (mode, x)
530 enum machine_mode mode;
531 rtx x;
533 if (GET_CODE (x) != CONST_INT
534 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
535 return x;
536 if (mode == VOIDmode)
537 abort ();
538 return gen_rtx_CONST (mode, x);
541 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
542 For registers and memory locations, we look up their cselib_val structure
543 and return its VALUE element.
544 Possible reasons for return 0 are: the object is volatile, or we couldn't
545 find a register or memory location in the table and CREATE is zero. If
546 CREATE is nonzero, table elts are created for regs and mem.
547 MODE is used in hashing for CONST_INTs only;
548 otherwise the mode of X is used. */
550 static unsigned int
551 hash_rtx (x, mode, create)
552 rtx x;
553 enum machine_mode mode;
554 int create;
556 cselib_val *e;
557 int i, j;
558 enum rtx_code code;
559 const char *fmt;
560 unsigned int hash = 0;
562 code = GET_CODE (x);
563 hash += (unsigned) code + (unsigned) GET_MODE (x);
565 switch (code)
567 case MEM:
568 case REG:
569 e = cselib_lookup (x, GET_MODE (x), create);
570 if (! e)
571 return 0;
573 return e->value;
575 case CONST_INT:
576 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x);
577 return hash ? hash : (unsigned int) CONST_INT;
579 case CONST_DOUBLE:
580 /* This is like the general case, except that it only counts
581 the integers representing the constant. */
582 hash += (unsigned) code + (unsigned) GET_MODE (x);
583 if (GET_MODE (x) != VOIDmode)
584 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
585 else
586 hash += ((unsigned) CONST_DOUBLE_LOW (x)
587 + (unsigned) CONST_DOUBLE_HIGH (x));
588 return hash ? hash : (unsigned int) CONST_DOUBLE;
590 case CONST_VECTOR:
592 int units;
593 rtx elt;
595 units = CONST_VECTOR_NUNITS (x);
597 for (i = 0; i < units; ++i)
599 elt = CONST_VECTOR_ELT (x, i);
600 hash += hash_rtx (elt, GET_MODE (elt), 0);
603 return hash;
606 /* Assume there is only one rtx object for any given label. */
607 case LABEL_REF:
608 hash
609 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
610 return hash ? hash : (unsigned int) LABEL_REF;
612 case SYMBOL_REF:
613 hash
614 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
615 return hash ? hash : (unsigned int) SYMBOL_REF;
617 case PRE_DEC:
618 case PRE_INC:
619 case POST_DEC:
620 case POST_INC:
621 case POST_MODIFY:
622 case PRE_MODIFY:
623 case PC:
624 case CC0:
625 case CALL:
626 case UNSPEC_VOLATILE:
627 return 0;
629 case ASM_OPERANDS:
630 if (MEM_VOLATILE_P (x))
631 return 0;
633 break;
635 default:
636 break;
639 i = GET_RTX_LENGTH (code) - 1;
640 fmt = GET_RTX_FORMAT (code);
641 for (; i >= 0; i--)
643 if (fmt[i] == 'e')
645 rtx tem = XEXP (x, i);
646 unsigned int tem_hash = hash_rtx (tem, 0, create);
648 if (tem_hash == 0)
649 return 0;
651 hash += tem_hash;
653 else if (fmt[i] == 'E')
654 for (j = 0; j < XVECLEN (x, i); j++)
656 unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create);
658 if (tem_hash == 0)
659 return 0;
661 hash += tem_hash;
663 else if (fmt[i] == 's')
665 const unsigned char *p = (const unsigned char *) XSTR (x, i);
667 if (p)
668 while (*p)
669 hash += *p++;
671 else if (fmt[i] == 'i')
672 hash += XINT (x, i);
673 else if (fmt[i] == '0' || fmt[i] == 't')
674 /* unused */;
675 else
676 abort ();
679 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
682 /* Create a new value structure for VALUE and initialize it. The mode of the
683 value is MODE. */
685 static cselib_val *
686 new_cselib_val (value, mode)
687 unsigned int value;
688 enum machine_mode mode;
690 cselib_val *e = empty_vals;
692 if (e)
693 empty_vals = e->u.next_free;
694 else
695 e = (cselib_val *) ggc_alloc (sizeof (cselib_val));
697 if (value == 0)
698 abort ();
700 e->value = value;
701 e->u.val_rtx = gen_rtx_VALUE (mode);
702 CSELIB_VAL_PTR (e->u.val_rtx) = e;
703 e->addr_list = 0;
704 e->locs = 0;
705 return e;
708 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
709 contains the data at this address. X is a MEM that represents the
710 value. Update the two value structures to represent this situation. */
712 static void
713 add_mem_for_addr (addr_elt, mem_elt, x)
714 cselib_val *addr_elt, *mem_elt;
715 rtx x;
717 struct elt_loc_list *l;
719 /* Avoid duplicates. */
720 for (l = mem_elt->locs; l; l = l->next)
721 if (GET_CODE (l->loc) == MEM
722 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
723 return;
725 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
726 mem_elt->locs
727 = new_elt_loc_list (mem_elt->locs,
728 replace_equiv_address_nv (x, addr_elt->u.val_rtx));
731 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
732 If CREATE, make a new one if we haven't seen it before. */
734 static cselib_val *
735 cselib_lookup_mem (x, create)
736 rtx x;
737 int create;
739 enum machine_mode mode = GET_MODE (x);
740 void **slot;
741 cselib_val *addr;
742 cselib_val *mem_elt;
743 struct elt_list *l;
745 if (MEM_VOLATILE_P (x) || mode == BLKmode
746 || (FLOAT_MODE_P (mode) && flag_float_store))
747 return 0;
749 /* Look up the value for the address. */
750 addr = cselib_lookup (XEXP (x, 0), mode, create);
751 if (! addr)
752 return 0;
754 /* Find a value that describes a value of our mode at that address. */
755 for (l = addr->addr_list; l; l = l->next)
756 if (GET_MODE (l->elt->u.val_rtx) == mode)
757 return l->elt;
759 if (! create)
760 return 0;
762 mem_elt = new_cselib_val (++next_unknown_value, mode);
763 add_mem_for_addr (addr, mem_elt, x);
764 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
765 mem_elt->value, INSERT);
766 *slot = mem_elt;
767 return mem_elt;
770 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
771 with VALUE expressions. This way, it becomes independent of changes
772 to registers and memory.
773 X isn't actually modified; if modifications are needed, new rtl is
774 allocated. However, the return value can share rtl with X. */
777 cselib_subst_to_values (x)
778 rtx x;
780 enum rtx_code code = GET_CODE (x);
781 const char *fmt = GET_RTX_FORMAT (code);
782 cselib_val *e;
783 struct elt_list *l;
784 rtx copy = x;
785 int i;
787 switch (code)
789 case REG:
790 for (l = REG_VALUES (REGNO (x)); l; l = l->next)
791 if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x))
792 return l->elt->u.val_rtx;
794 abort ();
796 case MEM:
797 e = cselib_lookup_mem (x, 0);
798 if (! e)
800 /* This happens for autoincrements. Assign a value that doesn't
801 match any other. */
802 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
804 return e->u.val_rtx;
806 case CONST_DOUBLE:
807 case CONST_VECTOR:
808 case CONST_INT:
809 return x;
811 case POST_INC:
812 case PRE_INC:
813 case POST_DEC:
814 case PRE_DEC:
815 case POST_MODIFY:
816 case PRE_MODIFY:
817 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
818 return e->u.val_rtx;
820 default:
821 break;
824 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
826 if (fmt[i] == 'e')
828 rtx t = cselib_subst_to_values (XEXP (x, i));
830 if (t != XEXP (x, i) && x == copy)
831 copy = shallow_copy_rtx (x);
833 XEXP (copy, i) = t;
835 else if (fmt[i] == 'E')
837 int j, k;
839 for (j = 0; j < XVECLEN (x, i); j++)
841 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
843 if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
845 if (x == copy)
846 copy = shallow_copy_rtx (x);
848 XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
849 for (k = 0; k < j; k++)
850 XVECEXP (copy, i, k) = XVECEXP (x, i, k);
853 XVECEXP (copy, i, j) = t;
858 return copy;
861 /* Look up the rtl expression X in our tables and return the value it has.
862 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
863 we create a new one if possible, using mode MODE if X doesn't have a mode
864 (i.e. because it's a constant). */
866 cselib_val *
867 cselib_lookup (x, mode, create)
868 rtx x;
869 enum machine_mode mode;
870 int create;
872 void **slot;
873 cselib_val *e;
874 unsigned int hashval;
876 if (GET_MODE (x) != VOIDmode)
877 mode = GET_MODE (x);
879 if (GET_CODE (x) == VALUE)
880 return CSELIB_VAL_PTR (x);
882 if (GET_CODE (x) == REG)
884 struct elt_list *l;
885 unsigned int i = REGNO (x);
887 for (l = REG_VALUES (i); l; l = l->next)
888 if (mode == GET_MODE (l->elt->u.val_rtx))
889 return l->elt;
891 if (! create)
892 return 0;
894 if (i < FIRST_PSEUDO_REGISTER)
896 unsigned int n = HARD_REGNO_NREGS (i, mode);
898 if (n > max_value_regs)
899 max_value_regs = n;
902 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
903 e->locs = new_elt_loc_list (e->locs, x);
904 if (REG_VALUES (i) == 0)
905 VARRAY_PUSH_UINT (used_regs, i);
906 REG_VALUES (i) = new_elt_list (REG_VALUES (i), e);
907 slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT);
908 *slot = e;
909 return e;
912 if (GET_CODE (x) == MEM)
913 return cselib_lookup_mem (x, create);
915 hashval = hash_rtx (x, mode, create);
916 /* Can't even create if hashing is not possible. */
917 if (! hashval)
918 return 0;
920 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
921 hashval, create ? INSERT : NO_INSERT);
922 if (slot == 0)
923 return 0;
925 e = (cselib_val *) *slot;
926 if (e)
927 return e;
929 e = new_cselib_val (hashval, mode);
931 /* We have to fill the slot before calling cselib_subst_to_values:
932 the hash table is inconsistent until we do so, and
933 cselib_subst_to_values will need to do lookups. */
934 *slot = (void *) e;
935 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
936 return e;
939 /* Invalidate any entries in reg_values that overlap REGNO. This is called
940 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
941 is used to determine how many hard registers are being changed. If MODE
942 is VOIDmode, then only REGNO is being changed; this is used when
943 invalidating call clobbered registers across a call. */
945 static void
946 cselib_invalidate_regno (regno, mode)
947 unsigned int regno;
948 enum machine_mode mode;
950 unsigned int endregno;
951 unsigned int i;
953 /* If we see pseudos after reload, something is _wrong_. */
954 if (reload_completed && regno >= FIRST_PSEUDO_REGISTER
955 && reg_renumber[regno] >= 0)
956 abort ();
958 /* Determine the range of registers that must be invalidated. For
959 pseudos, only REGNO is affected. For hard regs, we must take MODE
960 into account, and we must also invalidate lower register numbers
961 if they contain values that overlap REGNO. */
962 if (regno < FIRST_PSEUDO_REGISTER && mode != VOIDmode)
964 if (regno < max_value_regs)
965 i = 0;
966 else
967 i = regno - max_value_regs;
969 endregno = regno + HARD_REGNO_NREGS (regno, mode);
971 else
973 i = regno;
974 endregno = regno + 1;
977 for (; i < endregno; i++)
979 struct elt_list **l = &REG_VALUES (i);
981 /* Go through all known values for this reg; if it overlaps the range
982 we're invalidating, remove the value. */
983 while (*l)
985 cselib_val *v = (*l)->elt;
986 struct elt_loc_list **p;
987 unsigned int this_last = i;
989 if (i < FIRST_PSEUDO_REGISTER)
990 this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1;
992 if (this_last < regno)
994 l = &(*l)->next;
995 continue;
998 /* We have an overlap. */
999 unchain_one_elt_list (l);
1001 /* Now, we clear the mapping from value to reg. It must exist, so
1002 this code will crash intentionally if it doesn't. */
1003 for (p = &v->locs; ; p = &(*p)->next)
1005 rtx x = (*p)->loc;
1007 if (GET_CODE (x) == REG && REGNO (x) == i)
1009 unchain_one_elt_loc_list (p);
1010 break;
1013 if (v->locs == 0)
1014 n_useless_values++;
1019 /* The memory at address MEM_BASE is being changed.
1020 Return whether this change will invalidate VAL. */
1022 static int
1023 cselib_mem_conflict_p (mem_base, val)
1024 rtx mem_base;
1025 rtx val;
1027 enum rtx_code code;
1028 const char *fmt;
1029 int i, j;
1031 code = GET_CODE (val);
1032 switch (code)
1034 /* Get rid of a few simple cases quickly. */
1035 case REG:
1036 case PC:
1037 case CC0:
1038 case SCRATCH:
1039 case CONST:
1040 case CONST_INT:
1041 case CONST_DOUBLE:
1042 case CONST_VECTOR:
1043 case SYMBOL_REF:
1044 case LABEL_REF:
1045 return 0;
1047 case MEM:
1048 if (GET_MODE (mem_base) == BLKmode
1049 || GET_MODE (val) == BLKmode
1050 || anti_dependence (val, mem_base))
1051 return 1;
1053 /* The address may contain nested MEMs. */
1054 break;
1056 default:
1057 break;
1060 fmt = GET_RTX_FORMAT (code);
1061 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1063 if (fmt[i] == 'e')
1065 if (cselib_mem_conflict_p (mem_base, XEXP (val, i)))
1066 return 1;
1068 else if (fmt[i] == 'E')
1069 for (j = 0; j < XVECLEN (val, i); j++)
1070 if (cselib_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
1071 return 1;
1074 return 0;
1077 /* For the value found in SLOT, walk its locations to determine if any overlap
1078 INFO (which is a MEM rtx). */
1080 static int
1081 cselib_invalidate_mem_1 (slot, info)
1082 void **slot;
1083 void *info;
1085 cselib_val *v = (cselib_val *) *slot;
1086 rtx mem_rtx = (rtx) info;
1087 struct elt_loc_list **p = &v->locs;
1088 int had_locs = v->locs != 0;
1090 while (*p)
1092 rtx x = (*p)->loc;
1093 cselib_val *addr;
1094 struct elt_list **mem_chain;
1096 /* MEMs may occur in locations only at the top level; below
1097 that every MEM or REG is substituted by its VALUE. */
1098 if (GET_CODE (x) != MEM
1099 || ! cselib_mem_conflict_p (mem_rtx, x))
1101 p = &(*p)->next;
1102 continue;
1105 /* This one overlaps. */
1106 /* We must have a mapping from this MEM's address to the
1107 value (E). Remove that, too. */
1108 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1109 mem_chain = &addr->addr_list;
1110 for (;;)
1112 if ((*mem_chain)->elt == v)
1114 unchain_one_elt_list (mem_chain);
1115 break;
1118 mem_chain = &(*mem_chain)->next;
1121 unchain_one_elt_loc_list (p);
1124 if (had_locs && v->locs == 0)
1125 n_useless_values++;
1127 return 1;
1130 /* Invalidate any locations in the table which are changed because of a
1131 store to MEM_RTX. If this is called because of a non-const call
1132 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1134 static void
1135 cselib_invalidate_mem (mem_rtx)
1136 rtx mem_rtx;
1138 htab_traverse (hash_table, cselib_invalidate_mem_1, mem_rtx);
1141 /* Invalidate DEST, which is being assigned to or clobbered. The second and
1142 the third parameter exist so that this function can be passed to
1143 note_stores; they are ignored. */
1145 static void
1146 cselib_invalidate_rtx (dest, ignore, data)
1147 rtx dest;
1148 rtx ignore ATTRIBUTE_UNUSED;
1149 void *data ATTRIBUTE_UNUSED;
1151 while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT
1152 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG)
1153 dest = XEXP (dest, 0);
1155 if (GET_CODE (dest) == REG)
1156 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1157 else if (GET_CODE (dest) == MEM)
1158 cselib_invalidate_mem (dest);
1160 /* Some machines don't define AUTO_INC_DEC, but they still use push
1161 instructions. We need to catch that case here in order to
1162 invalidate the stack pointer correctly. Note that invalidating
1163 the stack pointer is different from invalidating DEST. */
1164 if (push_operand (dest, GET_MODE (dest)))
1165 cselib_invalidate_rtx (stack_pointer_rtx, NULL_RTX, NULL);
1168 /* Record the result of a SET instruction. DEST is being set; the source
1169 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1170 describes its address. */
1172 static void
1173 cselib_record_set (dest, src_elt, dest_addr_elt)
1174 rtx dest;
1175 cselib_val *src_elt, *dest_addr_elt;
1177 int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1;
1179 if (src_elt == 0 || side_effects_p (dest))
1180 return;
1182 if (dreg >= 0)
1184 if (REG_VALUES (dreg) == 0)
1185 VARRAY_PUSH_UINT (used_regs, dreg);
1187 if (dreg < FIRST_PSEUDO_REGISTER)
1189 unsigned int n = HARD_REGNO_NREGS (dreg, GET_MODE (dest));
1191 if (n > max_value_regs)
1192 max_value_regs = n;
1195 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1196 if (src_elt->locs == 0)
1197 n_useless_values--;
1198 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1200 else if (GET_CODE (dest) == MEM && dest_addr_elt != 0)
1202 if (src_elt->locs == 0)
1203 n_useless_values--;
1204 add_mem_for_addr (dest_addr_elt, src_elt, dest);
1208 /* Describe a single set that is part of an insn. */
1209 struct set
1211 rtx src;
1212 rtx dest;
1213 cselib_val *src_elt;
1214 cselib_val *dest_addr_elt;
1217 /* There is no good way to determine how many elements there can be
1218 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1219 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1221 /* Record the effects of any sets in INSN. */
1222 static void
1223 cselib_record_sets (insn)
1224 rtx insn;
1226 int n_sets = 0;
1227 int i;
1228 struct set sets[MAX_SETS];
1229 rtx body = PATTERN (insn);
1230 rtx cond = 0;
1232 body = PATTERN (insn);
1233 if (GET_CODE (body) == COND_EXEC)
1235 cond = COND_EXEC_TEST (body);
1236 body = COND_EXEC_CODE (body);
1239 /* Find all sets. */
1240 if (GET_CODE (body) == SET)
1242 sets[0].src = SET_SRC (body);
1243 sets[0].dest = SET_DEST (body);
1244 n_sets = 1;
1246 else if (GET_CODE (body) == PARALLEL)
1248 /* Look through the PARALLEL and record the values being
1249 set, if possible. Also handle any CLOBBERs. */
1250 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1252 rtx x = XVECEXP (body, 0, i);
1254 if (GET_CODE (x) == SET)
1256 sets[n_sets].src = SET_SRC (x);
1257 sets[n_sets].dest = SET_DEST (x);
1258 n_sets++;
1263 /* Look up the values that are read. Do this before invalidating the
1264 locations that are written. */
1265 for (i = 0; i < n_sets; i++)
1267 rtx dest = sets[i].dest;
1269 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1270 the low part after invalidating any knowledge about larger modes. */
1271 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1272 sets[i].dest = dest = XEXP (dest, 0);
1274 /* We don't know how to record anything but REG or MEM. */
1275 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1277 rtx src = sets[i].src;
1278 if (cond)
1279 src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
1280 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
1281 if (GET_CODE (dest) == MEM)
1282 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
1283 else
1284 sets[i].dest_addr_elt = 0;
1288 /* Invalidate all locations written by this insn. Note that the elts we
1289 looked up in the previous loop aren't affected, just some of their
1290 locations may go away. */
1291 note_stores (body, cselib_invalidate_rtx, NULL);
1293 /* Now enter the equivalences in our tables. */
1294 for (i = 0; i < n_sets; i++)
1296 rtx dest = sets[i].dest;
1297 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1298 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1302 /* Record the effects of INSN. */
1304 void
1305 cselib_process_insn (insn)
1306 rtx insn;
1308 int i;
1309 rtx x;
1311 cselib_current_insn = insn;
1313 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1314 if (GET_CODE (insn) == CODE_LABEL
1315 || (GET_CODE (insn) == CALL_INSN
1316 && find_reg_note (insn, REG_SETJMP, NULL))
1317 || (GET_CODE (insn) == INSN
1318 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1319 && MEM_VOLATILE_P (PATTERN (insn))))
1321 clear_table (0);
1322 return;
1325 if (! INSN_P (insn))
1327 cselib_current_insn = 0;
1328 return;
1331 /* If this is a call instruction, forget anything stored in a
1332 call clobbered register, or, if this is not a const call, in
1333 memory. */
1334 if (GET_CODE (insn) == CALL_INSN)
1336 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1337 if (call_used_regs[i])
1338 cselib_invalidate_regno (i, VOIDmode);
1340 if (! CONST_OR_PURE_CALL_P (insn))
1341 cselib_invalidate_mem (callmem);
1344 cselib_record_sets (insn);
1346 #ifdef AUTO_INC_DEC
1347 /* Clobber any registers which appear in REG_INC notes. We
1348 could keep track of the changes to their values, but it is
1349 unlikely to help. */
1350 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
1351 if (REG_NOTE_KIND (x) == REG_INC)
1352 cselib_invalidate_rtx (XEXP (x, 0), NULL_RTX, NULL);
1353 #endif
1355 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1356 after we have processed the insn. */
1357 if (GET_CODE (insn) == CALL_INSN)
1358 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
1359 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
1360 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);
1362 cselib_current_insn = 0;
1364 if (n_useless_values > MAX_USELESS_VALUES)
1365 remove_useless_values ();
1368 /* Make sure our varrays are big enough. Not called from any cselib routines;
1369 it must be called by the user if it allocated new registers. */
1371 void
1372 cselib_update_varray_sizes ()
1374 unsigned int nregs = max_reg_num ();
1376 if (nregs == cselib_nregs)
1377 return;
1379 cselib_nregs = nregs;
1380 VARRAY_GROW (reg_values, nregs);
1381 VARRAY_GROW (used_regs, nregs);
1384 /* Initialize cselib for one pass. The caller must also call
1385 init_alias_analysis. */
1387 void
1388 cselib_init ()
1390 /* This is only created once. */
1391 if (! callmem)
1392 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
1394 cselib_nregs = max_reg_num ();
1395 if (reg_values_old != NULL && VARRAY_SIZE (reg_values_old) >= cselib_nregs)
1397 reg_values = reg_values_old;
1398 used_regs = used_regs_old;
1399 VARRAY_CLEAR (reg_values);
1400 VARRAY_CLEAR (used_regs);
1402 else
1404 VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values");
1405 VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs");
1407 hash_table = htab_create_ggc (31, get_value_hash, entry_and_rtx_equal_p,
1408 NULL);
1409 clear_table (1);
1412 /* Called when the current user is done with cselib. */
1414 void
1415 cselib_finish ()
1417 reg_values_old = reg_values;
1418 reg_values = 0;
1419 used_regs_old = used_regs;
1420 used_regs = 0;
1421 hash_table = 0;
1422 n_useless_values = 0;
1423 next_unknown_value = 0;
1426 #include "gt-cselib.h"