Daily bump.
[official-gcc.git] / gcc / cselib.c
blobb0348fbc46209b4dc6f3b6d4f3b88313545d3bb5
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "regs.h"
28 #include "hard-reg-set.h"
29 #include "flags.h"
30 #include "real.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "expr.h"
35 #include "toplev.h"
36 #include "output.h"
37 #include "ggc.h"
38 #include "obstack.h"
39 #include "hashtab.h"
40 #include "cselib.h"
42 static int entry_and_rtx_equal_p PARAMS ((const void *, const void *));
43 static unsigned int get_value_hash PARAMS ((const void *));
44 static struct elt_list *new_elt_list PARAMS ((struct elt_list *,
45 cselib_val *));
46 static struct elt_loc_list *new_elt_loc_list PARAMS ((struct elt_loc_list *,
47 rtx));
48 static void unchain_one_value PARAMS ((cselib_val *));
49 static void unchain_one_elt_list PARAMS ((struct elt_list **));
50 static void unchain_one_elt_loc_list PARAMS ((struct elt_loc_list **));
51 static void clear_table PARAMS ((int));
52 static int discard_useless_locs PARAMS ((void **, void *));
53 static int discard_useless_values PARAMS ((void **, void *));
54 static void remove_useless_values PARAMS ((void));
55 static rtx wrap_constant PARAMS ((enum machine_mode, rtx));
56 static unsigned int hash_rtx PARAMS ((rtx, enum machine_mode, int));
57 static cselib_val *new_cselib_val PARAMS ((unsigned int,
58 enum machine_mode));
59 static void add_mem_for_addr PARAMS ((cselib_val *, cselib_val *,
60 rtx));
61 static cselib_val *cselib_lookup_mem PARAMS ((rtx, int));
62 static void cselib_invalidate_regno PARAMS ((unsigned int,
63 enum machine_mode));
64 static int cselib_mem_conflict_p PARAMS ((rtx, rtx));
65 static int cselib_invalidate_mem_1 PARAMS ((void **, void *));
66 static void cselib_invalidate_mem PARAMS ((rtx));
67 static void cselib_invalidate_rtx PARAMS ((rtx, rtx, void *));
68 static void cselib_record_set PARAMS ((rtx, cselib_val *,
69 cselib_val *));
70 static void cselib_record_sets PARAMS ((rtx));
72 /* There are three ways in which cselib can look up an rtx:
73 - for a REG, the reg_values table (which is indexed by regno) is used
74 - for a MEM, we recursively look up its address and then follow the
75 addr_list of that value
76 - for everything else, we compute a hash value and go through the hash
77 table. Since different rtx's can still have the same hash value,
78 this involves walking the table entries for a given value and comparing
79 the locations of the entries with the rtx we are looking up. */
81 /* A table that enables us to look up elts by their value. */
82 static htab_t hash_table;
84 /* This is a global so we don't have to pass this through every function.
85 It is used in new_elt_loc_list to set SETTING_INSN. */
86 static rtx cselib_current_insn;
88 /* Every new unknown value gets a unique number. */
89 static unsigned int next_unknown_value;
91 /* The number of registers we had when the varrays were last resized. */
92 static unsigned int cselib_nregs;
94 /* Count values without known locations. Whenever this grows too big, we
95 remove these useless values from the table. */
96 static int n_useless_values;
98 /* Number of useless values before we remove them from the hash table. */
99 #define MAX_USELESS_VALUES 32
101 /* This table maps from register number to values. It does not contain
102 pointers to cselib_val structures, but rather elt_lists. The purpose is
103 to be able to refer to the same register in different modes. */
104 static varray_type reg_values;
105 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
107 /* The largest number of hard regs used by any entry added to the
108 REG_VALUES table. Cleared on each clear_table() invocation. */
109 static unsigned int max_value_regs;
111 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
112 in clear_table() for fast emptying. */
113 static varray_type used_regs;
115 /* We pass this to cselib_invalidate_mem to invalidate all of
116 memory for a non-const call instruction. */
117 static rtx callmem;
119 /* Memory for our structures is allocated from this obstack. */
120 static struct obstack cselib_obstack;
122 /* Used to quickly free all memory. */
123 static char *cselib_startobj;
125 /* Caches for unused structures. */
126 static cselib_val *empty_vals;
127 static struct elt_list *empty_elt_lists;
128 static struct elt_loc_list *empty_elt_loc_lists;
130 /* Set by discard_useless_locs if it deleted the last location of any
131 value. */
132 static int values_became_useless;
135 /* Allocate a struct elt_list and fill in its two elements with the
136 arguments. */
138 static struct elt_list *
139 new_elt_list (next, elt)
140 struct elt_list *next;
141 cselib_val *elt;
143 struct elt_list *el = empty_elt_lists;
145 if (el)
146 empty_elt_lists = el->next;
147 else
148 el = (struct elt_list *) obstack_alloc (&cselib_obstack,
149 sizeof (struct elt_list));
150 el->next = next;
151 el->elt = elt;
152 return el;
155 /* Allocate a struct elt_loc_list and fill in its two elements with the
156 arguments. */
158 static struct elt_loc_list *
159 new_elt_loc_list (next, loc)
160 struct elt_loc_list *next;
161 rtx loc;
163 struct elt_loc_list *el = empty_elt_loc_lists;
165 if (el)
166 empty_elt_loc_lists = el->next;
167 else
168 el = (struct elt_loc_list *) obstack_alloc (&cselib_obstack,
169 sizeof (struct elt_loc_list));
170 el->next = next;
171 el->loc = loc;
172 el->setting_insn = cselib_current_insn;
173 return el;
176 /* The elt_list at *PL is no longer needed. Unchain it and free its
177 storage. */
179 static void
180 unchain_one_elt_list (pl)
181 struct elt_list **pl;
183 struct elt_list *l = *pl;
185 *pl = l->next;
186 l->next = empty_elt_lists;
187 empty_elt_lists = l;
190 /* Likewise for elt_loc_lists. */
192 static void
193 unchain_one_elt_loc_list (pl)
194 struct elt_loc_list **pl;
196 struct elt_loc_list *l = *pl;
198 *pl = l->next;
199 l->next = empty_elt_loc_lists;
200 empty_elt_loc_lists = l;
203 /* Likewise for cselib_vals. This also frees the addr_list associated with
204 V. */
206 static void
207 unchain_one_value (v)
208 cselib_val *v;
210 while (v->addr_list)
211 unchain_one_elt_list (&v->addr_list);
213 v->u.next_free = empty_vals;
214 empty_vals = v;
217 /* Remove all entries from the hash table. Also used during
218 initialization. If CLEAR_ALL isn't set, then only clear the entries
219 which are known to have been used. */
221 static void
222 clear_table (clear_all)
223 int clear_all;
225 unsigned int i;
227 if (clear_all)
228 for (i = 0; i < cselib_nregs; i++)
229 REG_VALUES (i) = 0;
230 else
231 for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++)
232 REG_VALUES (VARRAY_UINT (used_regs, i)) = 0;
234 max_value_regs = 0;
236 VARRAY_POP_ALL (used_regs);
238 htab_empty (hash_table);
239 obstack_free (&cselib_obstack, cselib_startobj);
241 empty_vals = 0;
242 empty_elt_lists = 0;
243 empty_elt_loc_lists = 0;
244 n_useless_values = 0;
246 next_unknown_value = 0;
249 /* The equality test for our hash table. The first argument ENTRY is a table
250 element (i.e. a cselib_val), while the second arg X is an rtx. We know
251 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
252 CONST of an appropriate mode. */
254 static int
255 entry_and_rtx_equal_p (entry, x_arg)
256 const void *entry, *x_arg;
258 struct elt_loc_list *l;
259 const cselib_val *v = (const cselib_val *) entry;
260 rtx x = (rtx) x_arg;
261 enum machine_mode mode = GET_MODE (x);
263 if (GET_CODE (x) == CONST_INT
264 || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE))
265 abort ();
266 if (mode != GET_MODE (v->u.val_rtx))
267 return 0;
269 /* Unwrap X if necessary. */
270 if (GET_CODE (x) == CONST
271 && (GET_CODE (XEXP (x, 0)) == CONST_INT
272 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
273 x = XEXP (x, 0);
275 /* We don't guarantee that distinct rtx's have different hash values,
276 so we need to do a comparison. */
277 for (l = v->locs; l; l = l->next)
278 if (rtx_equal_for_cselib_p (l->loc, x))
279 return 1;
281 return 0;
284 /* The hash function for our hash table. The value is always computed with
285 hash_rtx when adding an element; this function just extracts the hash
286 value from a cselib_val structure. */
288 static unsigned int
289 get_value_hash (entry)
290 const void *entry;
292 const cselib_val *v = (const cselib_val *) entry;
293 return v->value;
296 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
297 only return true for values which point to a cselib_val whose value
298 element has been set to zero, which implies the cselib_val will be
299 removed. */
302 references_value_p (x, only_useless)
303 rtx x;
304 int only_useless;
306 enum rtx_code code = GET_CODE (x);
307 const char *fmt = GET_RTX_FORMAT (code);
308 int i, j;
310 if (GET_CODE (x) == VALUE
311 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
312 return 1;
314 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
316 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
317 return 1;
318 else if (fmt[i] == 'E')
319 for (j = 0; j < XVECLEN (x, i); j++)
320 if (references_value_p (XVECEXP (x, i, j), only_useless))
321 return 1;
324 return 0;
327 /* For all locations found in X, delete locations that reference useless
328 values (i.e. values without any location). Called through
329 htab_traverse. */
331 static int
332 discard_useless_locs (x, info)
333 void **x;
334 void *info ATTRIBUTE_UNUSED;
336 cselib_val *v = (cselib_val *)*x;
337 struct elt_loc_list **p = &v->locs;
338 int had_locs = v->locs != 0;
340 while (*p)
342 if (references_value_p ((*p)->loc, 1))
343 unchain_one_elt_loc_list (p);
344 else
345 p = &(*p)->next;
348 if (had_locs && v->locs == 0)
350 n_useless_values++;
351 values_became_useless = 1;
353 return 1;
356 /* If X is a value with no locations, remove it from the hashtable. */
358 static int
359 discard_useless_values (x, info)
360 void **x;
361 void *info ATTRIBUTE_UNUSED;
363 cselib_val *v = (cselib_val *)*x;
365 if (v->locs == 0)
367 htab_clear_slot (hash_table, x);
368 unchain_one_value (v);
369 n_useless_values--;
372 return 1;
375 /* Clean out useless values (i.e. those which no longer have locations
376 associated with them) from the hash table. */
378 static void
379 remove_useless_values ()
381 /* First pass: eliminate locations that reference the value. That in
382 turn can make more values useless. */
385 values_became_useless = 0;
386 htab_traverse (hash_table, discard_useless_locs, 0);
388 while (values_became_useless);
390 /* Second pass: actually remove the values. */
391 htab_traverse (hash_table, discard_useless_values, 0);
393 if (n_useless_values != 0)
394 abort ();
397 /* Return nonzero if we can prove that X and Y contain the same value, taking
398 our gathered information into account. */
401 rtx_equal_for_cselib_p (x, y)
402 rtx x, y;
404 enum rtx_code code;
405 const char *fmt;
406 int i;
408 if (GET_CODE (x) == REG || GET_CODE (x) == MEM)
410 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
412 if (e)
413 x = e->u.val_rtx;
416 if (GET_CODE (y) == REG || GET_CODE (y) == MEM)
418 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
420 if (e)
421 y = e->u.val_rtx;
424 if (x == y)
425 return 1;
427 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
428 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
430 if (GET_CODE (x) == VALUE)
432 cselib_val *e = CSELIB_VAL_PTR (x);
433 struct elt_loc_list *l;
435 for (l = e->locs; l; l = l->next)
437 rtx t = l->loc;
439 /* Avoid infinite recursion. */
440 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
441 continue;
442 else if (rtx_equal_for_cselib_p (t, y))
443 return 1;
446 return 0;
449 if (GET_CODE (y) == VALUE)
451 cselib_val *e = CSELIB_VAL_PTR (y);
452 struct elt_loc_list *l;
454 for (l = e->locs; l; l = l->next)
456 rtx t = l->loc;
458 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
459 continue;
460 else if (rtx_equal_for_cselib_p (x, t))
461 return 1;
464 return 0;
467 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
468 return 0;
470 /* This won't be handled correctly by the code below. */
471 if (GET_CODE (x) == LABEL_REF)
472 return XEXP (x, 0) == XEXP (y, 0);
474 code = GET_CODE (x);
475 fmt = GET_RTX_FORMAT (code);
477 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
479 int j;
481 switch (fmt[i])
483 case 'w':
484 if (XWINT (x, i) != XWINT (y, i))
485 return 0;
486 break;
488 case 'n':
489 case 'i':
490 if (XINT (x, i) != XINT (y, i))
491 return 0;
492 break;
494 case 'V':
495 case 'E':
496 /* Two vectors must have the same length. */
497 if (XVECLEN (x, i) != XVECLEN (y, i))
498 return 0;
500 /* And the corresponding elements must match. */
501 for (j = 0; j < XVECLEN (x, i); j++)
502 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
503 XVECEXP (y, i, j)))
504 return 0;
505 break;
507 case 'e':
508 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
509 return 0;
510 break;
512 case 'S':
513 case 's':
514 if (strcmp (XSTR (x, i), XSTR (y, i)))
515 return 0;
516 break;
518 case 'u':
519 /* These are just backpointers, so they don't matter. */
520 break;
522 case '0':
523 case 't':
524 break;
526 /* It is believed that rtx's at this level will never
527 contain anything but integers and other rtx's,
528 except for within LABEL_REFs and SYMBOL_REFs. */
529 default:
530 abort ();
533 return 1;
536 /* We need to pass down the mode of constants through the hash table
537 functions. For that purpose, wrap them in a CONST of the appropriate
538 mode. */
539 static rtx
540 wrap_constant (mode, x)
541 enum machine_mode mode;
542 rtx x;
544 if (GET_CODE (x) != CONST_INT
545 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
546 return x;
547 if (mode == VOIDmode)
548 abort ();
549 return gen_rtx_CONST (mode, x);
552 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
553 For registers and memory locations, we look up their cselib_val structure
554 and return its VALUE element.
555 Possible reasons for return 0 are: the object is volatile, or we couldn't
556 find a register or memory location in the table and CREATE is zero. If
557 CREATE is nonzero, table elts are created for regs and mem.
558 MODE is used in hashing for CONST_INTs only;
559 otherwise the mode of X is used. */
561 static unsigned int
562 hash_rtx (x, mode, create)
563 rtx x;
564 enum machine_mode mode;
565 int create;
567 cselib_val *e;
568 int i, j;
569 enum rtx_code code;
570 const char *fmt;
571 unsigned int hash = 0;
573 code = GET_CODE (x);
574 hash += (unsigned) code + (unsigned) GET_MODE (x);
576 switch (code)
578 case MEM:
579 case REG:
580 e = cselib_lookup (x, GET_MODE (x), create);
581 if (! e)
582 return 0;
584 return e->value;
586 case CONST_INT:
587 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x);
588 return hash ? hash : (unsigned int) CONST_INT;
590 case CONST_DOUBLE:
591 /* This is like the general case, except that it only counts
592 the integers representing the constant. */
593 hash += (unsigned) code + (unsigned) GET_MODE (x);
594 if (GET_MODE (x) != VOIDmode)
595 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
596 hash += XWINT (x, i);
597 else
598 hash += ((unsigned) CONST_DOUBLE_LOW (x)
599 + (unsigned) CONST_DOUBLE_HIGH (x));
600 return hash ? hash : (unsigned int) CONST_DOUBLE;
602 case CONST_VECTOR:
604 int units;
605 rtx elt;
607 units = CONST_VECTOR_NUNITS (x);
609 for (i = 0; i < units; ++i)
611 elt = CONST_VECTOR_ELT (x, i);
612 hash += hash_rtx (elt, GET_MODE (elt), 0);
615 return hash;
618 /* Assume there is only one rtx object for any given label. */
619 case LABEL_REF:
620 hash
621 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
622 return hash ? hash : (unsigned int) LABEL_REF;
624 case SYMBOL_REF:
625 hash
626 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
627 return hash ? hash : (unsigned int) SYMBOL_REF;
629 case PRE_DEC:
630 case PRE_INC:
631 case POST_DEC:
632 case POST_INC:
633 case POST_MODIFY:
634 case PRE_MODIFY:
635 case PC:
636 case CC0:
637 case CALL:
638 case UNSPEC_VOLATILE:
639 return 0;
641 case ASM_OPERANDS:
642 if (MEM_VOLATILE_P (x))
643 return 0;
645 break;
647 default:
648 break;
651 i = GET_RTX_LENGTH (code) - 1;
652 fmt = GET_RTX_FORMAT (code);
653 for (; i >= 0; i--)
655 if (fmt[i] == 'e')
657 rtx tem = XEXP (x, i);
658 unsigned int tem_hash = hash_rtx (tem, 0, create);
660 if (tem_hash == 0)
661 return 0;
663 hash += tem_hash;
665 else if (fmt[i] == 'E')
666 for (j = 0; j < XVECLEN (x, i); j++)
668 unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create);
670 if (tem_hash == 0)
671 return 0;
673 hash += tem_hash;
675 else if (fmt[i] == 's')
677 const unsigned char *p = (const unsigned char *) XSTR (x, i);
679 if (p)
680 while (*p)
681 hash += *p++;
683 else if (fmt[i] == 'i')
684 hash += XINT (x, i);
685 else if (fmt[i] == '0' || fmt[i] == 't')
686 /* unused */;
687 else
688 abort ();
691 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
694 /* Create a new value structure for VALUE and initialize it. The mode of the
695 value is MODE. */
697 static cselib_val *
698 new_cselib_val (value, mode)
699 unsigned int value;
700 enum machine_mode mode;
702 cselib_val *e = empty_vals;
704 if (e)
705 empty_vals = e->u.next_free;
706 else
707 e = (cselib_val *) obstack_alloc (&cselib_obstack, sizeof (cselib_val));
709 if (value == 0)
710 abort ();
712 e->value = value;
713 e->u.val_rtx = gen_rtx_VALUE (mode);
714 CSELIB_VAL_PTR (e->u.val_rtx) = e;
715 e->addr_list = 0;
716 e->locs = 0;
717 return e;
720 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
721 contains the data at this address. X is a MEM that represents the
722 value. Update the two value structures to represent this situation. */
724 static void
725 add_mem_for_addr (addr_elt, mem_elt, x)
726 cselib_val *addr_elt, *mem_elt;
727 rtx x;
729 struct elt_loc_list *l;
731 /* Avoid duplicates. */
732 for (l = mem_elt->locs; l; l = l->next)
733 if (GET_CODE (l->loc) == MEM
734 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
735 return;
737 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
738 mem_elt->locs
739 = new_elt_loc_list (mem_elt->locs,
740 replace_equiv_address_nv (x, addr_elt->u.val_rtx));
743 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
744 If CREATE, make a new one if we haven't seen it before. */
746 static cselib_val *
747 cselib_lookup_mem (x, create)
748 rtx x;
749 int create;
751 enum machine_mode mode = GET_MODE (x);
752 void **slot;
753 cselib_val *addr;
754 cselib_val *mem_elt;
755 struct elt_list *l;
757 if (MEM_VOLATILE_P (x) || mode == BLKmode
758 || (FLOAT_MODE_P (mode) && flag_float_store))
759 return 0;
761 /* Look up the value for the address. */
762 addr = cselib_lookup (XEXP (x, 0), mode, create);
763 if (! addr)
764 return 0;
766 /* Find a value that describes a value of our mode at that address. */
767 for (l = addr->addr_list; l; l = l->next)
768 if (GET_MODE (l->elt->u.val_rtx) == mode)
769 return l->elt;
771 if (! create)
772 return 0;
774 mem_elt = new_cselib_val (++next_unknown_value, mode);
775 add_mem_for_addr (addr, mem_elt, x);
776 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
777 mem_elt->value, INSERT);
778 *slot = mem_elt;
779 return mem_elt;
782 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
783 with VALUE expressions. This way, it becomes independent of changes
784 to registers and memory.
785 X isn't actually modified; if modifications are needed, new rtl is
786 allocated. However, the return value can share rtl with X. */
789 cselib_subst_to_values (x)
790 rtx x;
792 enum rtx_code code = GET_CODE (x);
793 const char *fmt = GET_RTX_FORMAT (code);
794 cselib_val *e;
795 struct elt_list *l;
796 rtx copy = x;
797 int i;
799 switch (code)
801 case REG:
802 for (l = REG_VALUES (REGNO (x)); l; l = l->next)
803 if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x))
804 return l->elt->u.val_rtx;
806 abort ();
808 case MEM:
809 e = cselib_lookup_mem (x, 0);
810 if (! e)
812 /* This happens for autoincrements. Assign a value that doesn't
813 match any other. */
814 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
816 return e->u.val_rtx;
818 case CONST_DOUBLE:
819 case CONST_VECTOR:
820 case CONST_INT:
821 return x;
823 case POST_INC:
824 case PRE_INC:
825 case POST_DEC:
826 case PRE_DEC:
827 case POST_MODIFY:
828 case PRE_MODIFY:
829 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
830 return e->u.val_rtx;
832 default:
833 break;
836 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
838 if (fmt[i] == 'e')
840 rtx t = cselib_subst_to_values (XEXP (x, i));
842 if (t != XEXP (x, i) && x == copy)
843 copy = shallow_copy_rtx (x);
845 XEXP (copy, i) = t;
847 else if (fmt[i] == 'E')
849 int j, k;
851 for (j = 0; j < XVECLEN (x, i); j++)
853 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
855 if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
857 if (x == copy)
858 copy = shallow_copy_rtx (x);
860 XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
861 for (k = 0; k < j; k++)
862 XVECEXP (copy, i, k) = XVECEXP (x, i, k);
865 XVECEXP (copy, i, j) = t;
870 return copy;
873 /* Look up the rtl expression X in our tables and return the value it has.
874 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
875 we create a new one if possible, using mode MODE if X doesn't have a mode
876 (i.e. because it's a constant). */
878 cselib_val *
879 cselib_lookup (x, mode, create)
880 rtx x;
881 enum machine_mode mode;
882 int create;
884 void **slot;
885 cselib_val *e;
886 unsigned int hashval;
888 if (GET_MODE (x) != VOIDmode)
889 mode = GET_MODE (x);
891 if (GET_CODE (x) == VALUE)
892 return CSELIB_VAL_PTR (x);
894 if (GET_CODE (x) == REG)
896 struct elt_list *l;
897 unsigned int i = REGNO (x);
899 for (l = REG_VALUES (i); l; l = l->next)
900 if (mode == GET_MODE (l->elt->u.val_rtx))
901 return l->elt;
903 if (! create)
904 return 0;
906 if (i < FIRST_PSEUDO_REGISTER)
908 unsigned int n = HARD_REGNO_NREGS (i, mode);
910 if (n > max_value_regs)
911 max_value_regs = n;
914 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
915 e->locs = new_elt_loc_list (e->locs, x);
916 if (REG_VALUES (i) == 0)
917 VARRAY_PUSH_UINT (used_regs, i);
918 REG_VALUES (i) = new_elt_list (REG_VALUES (i), e);
919 slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT);
920 *slot = e;
921 return e;
924 if (GET_CODE (x) == MEM)
925 return cselib_lookup_mem (x, create);
927 hashval = hash_rtx (x, mode, create);
928 /* Can't even create if hashing is not possible. */
929 if (! hashval)
930 return 0;
932 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
933 hashval, create ? INSERT : NO_INSERT);
934 if (slot == 0)
935 return 0;
937 e = (cselib_val *) *slot;
938 if (e)
939 return e;
941 e = new_cselib_val (hashval, mode);
943 /* We have to fill the slot before calling cselib_subst_to_values:
944 the hash table is inconsistent until we do so, and
945 cselib_subst_to_values will need to do lookups. */
946 *slot = (void *) e;
947 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
948 return e;
951 /* Invalidate any entries in reg_values that overlap REGNO. This is called
952 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
953 is used to determine how many hard registers are being changed. If MODE
954 is VOIDmode, then only REGNO is being changed; this is used when
955 invalidating call clobbered registers across a call. */
957 static void
958 cselib_invalidate_regno (regno, mode)
959 unsigned int regno;
960 enum machine_mode mode;
962 unsigned int endregno;
963 unsigned int i;
965 /* If we see pseudos after reload, something is _wrong_. */
966 if (reload_completed && regno >= FIRST_PSEUDO_REGISTER
967 && reg_renumber[regno] >= 0)
968 abort ();
970 /* Determine the range of registers that must be invalidated. For
971 pseudos, only REGNO is affected. For hard regs, we must take MODE
972 into account, and we must also invalidate lower register numbers
973 if they contain values that overlap REGNO. */
974 if (regno < FIRST_PSEUDO_REGISTER && mode != VOIDmode)
976 if (regno < max_value_regs)
977 i = 0;
978 else
979 i = regno - max_value_regs;
981 endregno = regno + HARD_REGNO_NREGS (regno, mode);
983 else
985 i = regno;
986 endregno = regno + 1;
989 for (; i < endregno; i++)
991 struct elt_list **l = &REG_VALUES (i);
993 /* Go through all known values for this reg; if it overlaps the range
994 we're invalidating, remove the value. */
995 while (*l)
997 cselib_val *v = (*l)->elt;
998 struct elt_loc_list **p;
999 unsigned int this_last = i;
1001 if (i < FIRST_PSEUDO_REGISTER)
1002 this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1;
1004 if (this_last < regno)
1006 l = &(*l)->next;
1007 continue;
1010 /* We have an overlap. */
1011 unchain_one_elt_list (l);
1013 /* Now, we clear the mapping from value to reg. It must exist, so
1014 this code will crash intentionally if it doesn't. */
1015 for (p = &v->locs; ; p = &(*p)->next)
1017 rtx x = (*p)->loc;
1019 if (GET_CODE (x) == REG && REGNO (x) == i)
1021 unchain_one_elt_loc_list (p);
1022 break;
1025 if (v->locs == 0)
1026 n_useless_values++;
1031 /* The memory at address MEM_BASE is being changed.
1032 Return whether this change will invalidate VAL. */
1034 static int
1035 cselib_mem_conflict_p (mem_base, val)
1036 rtx mem_base;
1037 rtx val;
1039 enum rtx_code code;
1040 const char *fmt;
1041 int i, j;
1043 code = GET_CODE (val);
1044 switch (code)
1046 /* Get rid of a few simple cases quickly. */
1047 case REG:
1048 case PC:
1049 case CC0:
1050 case SCRATCH:
1051 case CONST:
1052 case CONST_INT:
1053 case CONST_DOUBLE:
1054 case CONST_VECTOR:
1055 case SYMBOL_REF:
1056 case LABEL_REF:
1057 return 0;
1059 case MEM:
1060 if (GET_MODE (mem_base) == BLKmode
1061 || GET_MODE (val) == BLKmode
1062 || anti_dependence (val, mem_base))
1063 return 1;
1065 /* The address may contain nested MEMs. */
1066 break;
1068 default:
1069 break;
1072 fmt = GET_RTX_FORMAT (code);
1073 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1075 if (fmt[i] == 'e')
1077 if (cselib_mem_conflict_p (mem_base, XEXP (val, i)))
1078 return 1;
1080 else if (fmt[i] == 'E')
1081 for (j = 0; j < XVECLEN (val, i); j++)
1082 if (cselib_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
1083 return 1;
1086 return 0;
1089 /* For the value found in SLOT, walk its locations to determine if any overlap
1090 INFO (which is a MEM rtx). */
1092 static int
1093 cselib_invalidate_mem_1 (slot, info)
1094 void **slot;
1095 void *info;
1097 cselib_val *v = (cselib_val *) *slot;
1098 rtx mem_rtx = (rtx) info;
1099 struct elt_loc_list **p = &v->locs;
1100 int had_locs = v->locs != 0;
1102 while (*p)
1104 rtx x = (*p)->loc;
1105 cselib_val *addr;
1106 struct elt_list **mem_chain;
1108 /* MEMs may occur in locations only at the top level; below
1109 that every MEM or REG is substituted by its VALUE. */
1110 if (GET_CODE (x) != MEM
1111 || ! cselib_mem_conflict_p (mem_rtx, x))
1113 p = &(*p)->next;
1114 continue;
1117 /* This one overlaps. */
1118 /* We must have a mapping from this MEM's address to the
1119 value (E). Remove that, too. */
1120 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1121 mem_chain = &addr->addr_list;
1122 for (;;)
1124 if ((*mem_chain)->elt == v)
1126 unchain_one_elt_list (mem_chain);
1127 break;
1130 mem_chain = &(*mem_chain)->next;
1133 unchain_one_elt_loc_list (p);
1136 if (had_locs && v->locs == 0)
1137 n_useless_values++;
1139 return 1;
1142 /* Invalidate any locations in the table which are changed because of a
1143 store to MEM_RTX. If this is called because of a non-const call
1144 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1146 static void
1147 cselib_invalidate_mem (mem_rtx)
1148 rtx mem_rtx;
1150 htab_traverse (hash_table, cselib_invalidate_mem_1, mem_rtx);
1153 /* Invalidate DEST, which is being assigned to or clobbered. The second and
1154 the third parameter exist so that this function can be passed to
1155 note_stores; they are ignored. */
1157 static void
1158 cselib_invalidate_rtx (dest, ignore, data)
1159 rtx dest;
1160 rtx ignore ATTRIBUTE_UNUSED;
1161 void *data ATTRIBUTE_UNUSED;
1163 while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT
1164 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG)
1165 dest = XEXP (dest, 0);
1167 if (GET_CODE (dest) == REG)
1168 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1169 else if (GET_CODE (dest) == MEM)
1170 cselib_invalidate_mem (dest);
1172 /* Some machines don't define AUTO_INC_DEC, but they still use push
1173 instructions. We need to catch that case here in order to
1174 invalidate the stack pointer correctly. Note that invalidating
1175 the stack pointer is different from invalidating DEST. */
1176 if (push_operand (dest, GET_MODE (dest)))
1177 cselib_invalidate_rtx (stack_pointer_rtx, NULL_RTX, NULL);
1180 /* Record the result of a SET instruction. DEST is being set; the source
1181 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1182 describes its address. */
1184 static void
1185 cselib_record_set (dest, src_elt, dest_addr_elt)
1186 rtx dest;
1187 cselib_val *src_elt, *dest_addr_elt;
1189 int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1;
1191 if (src_elt == 0 || side_effects_p (dest))
1192 return;
1194 if (dreg >= 0)
1196 if (REG_VALUES (dreg) == 0)
1197 VARRAY_PUSH_UINT (used_regs, dreg);
1199 if (dreg < FIRST_PSEUDO_REGISTER)
1201 unsigned int n = HARD_REGNO_NREGS (dreg, GET_MODE (dest));
1203 if (n > max_value_regs)
1204 max_value_regs = n;
1207 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1208 if (src_elt->locs == 0)
1209 n_useless_values--;
1210 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1212 else if (GET_CODE (dest) == MEM && dest_addr_elt != 0)
1214 if (src_elt->locs == 0)
1215 n_useless_values--;
1216 add_mem_for_addr (dest_addr_elt, src_elt, dest);
1220 /* Describe a single set that is part of an insn. */
1221 struct set
1223 rtx src;
1224 rtx dest;
1225 cselib_val *src_elt;
1226 cselib_val *dest_addr_elt;
1229 /* There is no good way to determine how many elements there can be
1230 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1231 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1233 /* Record the effects of any sets in INSN. */
1234 static void
1235 cselib_record_sets (insn)
1236 rtx insn;
1238 int n_sets = 0;
1239 int i;
1240 struct set sets[MAX_SETS];
1241 rtx body = PATTERN (insn);
1242 rtx cond = 0;
1244 body = PATTERN (insn);
1245 if (GET_CODE (body) == COND_EXEC)
1247 cond = COND_EXEC_TEST (body);
1248 body = COND_EXEC_CODE (body);
1251 /* Find all sets. */
1252 if (GET_CODE (body) == SET)
1254 sets[0].src = SET_SRC (body);
1255 sets[0].dest = SET_DEST (body);
1256 n_sets = 1;
1258 else if (GET_CODE (body) == PARALLEL)
1260 /* Look through the PARALLEL and record the values being
1261 set, if possible. Also handle any CLOBBERs. */
1262 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1264 rtx x = XVECEXP (body, 0, i);
1266 if (GET_CODE (x) == SET)
1268 sets[n_sets].src = SET_SRC (x);
1269 sets[n_sets].dest = SET_DEST (x);
1270 n_sets++;
1275 /* Look up the values that are read. Do this before invalidating the
1276 locations that are written. */
1277 for (i = 0; i < n_sets; i++)
1279 rtx dest = sets[i].dest;
1281 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1282 the low part after invalidating any knowledge about larger modes. */
1283 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1284 sets[i].dest = dest = XEXP (dest, 0);
1286 /* We don't know how to record anything but REG or MEM. */
1287 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1289 rtx src = sets[i].src;
1290 if (cond)
1291 src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
1292 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
1293 if (GET_CODE (dest) == MEM)
1294 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
1295 else
1296 sets[i].dest_addr_elt = 0;
1300 /* Invalidate all locations written by this insn. Note that the elts we
1301 looked up in the previous loop aren't affected, just some of their
1302 locations may go away. */
1303 note_stores (body, cselib_invalidate_rtx, NULL);
1305 /* Now enter the equivalences in our tables. */
1306 for (i = 0; i < n_sets; i++)
1308 rtx dest = sets[i].dest;
1309 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1310 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1314 /* Record the effects of INSN. */
1316 void
1317 cselib_process_insn (insn)
1318 rtx insn;
1320 int i;
1321 rtx x;
1323 cselib_current_insn = insn;
1325 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1326 if (GET_CODE (insn) == CODE_LABEL
1327 || (GET_CODE (insn) == CALL_INSN
1328 && find_reg_note (insn, REG_SETJMP, NULL))
1329 || (GET_CODE (insn) == INSN
1330 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1331 && MEM_VOLATILE_P (PATTERN (insn))))
1333 clear_table (0);
1334 return;
1337 if (! INSN_P (insn))
1339 cselib_current_insn = 0;
1340 return;
1343 /* If this is a call instruction, forget anything stored in a
1344 call clobbered register, or, if this is not a const call, in
1345 memory. */
1346 if (GET_CODE (insn) == CALL_INSN)
1348 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1349 if (call_used_regs[i])
1350 cselib_invalidate_regno (i, VOIDmode);
1352 if (! CONST_OR_PURE_CALL_P (insn))
1353 cselib_invalidate_mem (callmem);
1356 cselib_record_sets (insn);
1358 #ifdef AUTO_INC_DEC
1359 /* Clobber any registers which appear in REG_INC notes. We
1360 could keep track of the changes to their values, but it is
1361 unlikely to help. */
1362 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
1363 if (REG_NOTE_KIND (x) == REG_INC)
1364 cselib_invalidate_rtx (XEXP (x, 0), NULL_RTX, NULL);
1365 #endif
1367 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1368 after we have processed the insn. */
1369 if (GET_CODE (insn) == CALL_INSN)
1370 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
1371 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
1372 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);
1374 cselib_current_insn = 0;
1376 if (n_useless_values > MAX_USELESS_VALUES)
1377 remove_useless_values ();
1380 /* Make sure our varrays are big enough. Not called from any cselib routines;
1381 it must be called by the user if it allocated new registers. */
1383 void
1384 cselib_update_varray_sizes ()
1386 unsigned int nregs = max_reg_num ();
1388 if (nregs == cselib_nregs)
1389 return;
1391 cselib_nregs = nregs;
1392 VARRAY_GROW (reg_values, nregs);
1393 VARRAY_GROW (used_regs, nregs);
1396 /* Initialize cselib for one pass. The caller must also call
1397 init_alias_analysis. */
1399 void
1400 cselib_init ()
1402 /* These are only created once. */
1403 if (! callmem)
1405 gcc_obstack_init (&cselib_obstack);
1406 cselib_startobj = obstack_alloc (&cselib_obstack, 0);
1408 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
1409 ggc_add_rtx_root (&callmem, 1);
1412 cselib_nregs = max_reg_num ();
1413 VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values");
1414 VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs");
1415 hash_table = htab_create (31, get_value_hash, entry_and_rtx_equal_p, NULL);
1416 clear_table (1);
1419 /* Called when the current user is done with cselib. */
1421 void
1422 cselib_finish ()
1424 clear_table (0);
1425 VARRAY_FREE (reg_values);
1426 VARRAY_FREE (used_regs);
1427 htab_delete (hash_table);