* java/io/FileDescriptor.java (position): New private field.
[official-gcc.git] / gcc / cselib.c
blob2ef4c575c1b85aeb4d6783dc72955fa51121d051
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "real.h"
33 #include "insn-config.h"
34 #include "recog.h"
35 #include "function.h"
36 #include "expr.h"
37 #include "toplev.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "hashtab.h"
41 #include "cselib.h"
43 static int entry_and_rtx_equal_p PARAMS ((const void *, const void *));
44 static hashval_t get_value_hash PARAMS ((const void *));
45 static struct elt_list *new_elt_list PARAMS ((struct elt_list *,
46 cselib_val *));
47 static struct elt_loc_list *new_elt_loc_list PARAMS ((struct elt_loc_list *,
48 rtx));
49 static void unchain_one_value PARAMS ((cselib_val *));
50 static void unchain_one_elt_list PARAMS ((struct elt_list **));
51 static void unchain_one_elt_loc_list PARAMS ((struct elt_loc_list **));
52 static void clear_table PARAMS ((int));
53 static int discard_useless_locs PARAMS ((void **, void *));
54 static int discard_useless_values PARAMS ((void **, void *));
55 static void remove_useless_values PARAMS ((void));
56 static rtx wrap_constant PARAMS ((enum machine_mode, rtx));
57 static unsigned int hash_rtx PARAMS ((rtx, enum machine_mode, int));
58 static cselib_val *new_cselib_val PARAMS ((unsigned int,
59 enum machine_mode));
60 static void add_mem_for_addr PARAMS ((cselib_val *, cselib_val *,
61 rtx));
62 static cselib_val *cselib_lookup_mem PARAMS ((rtx, int));
63 static void cselib_invalidate_regno PARAMS ((unsigned int,
64 enum machine_mode));
65 static int cselib_mem_conflict_p PARAMS ((rtx, rtx));
66 static int cselib_invalidate_mem_1 PARAMS ((void **, void *));
67 static void cselib_invalidate_mem PARAMS ((rtx));
68 static void cselib_invalidate_rtx PARAMS ((rtx, rtx, void *));
69 static void cselib_record_set PARAMS ((rtx, cselib_val *,
70 cselib_val *));
71 static void cselib_record_sets PARAMS ((rtx));
73 /* There are three ways in which cselib can look up an rtx:
74 - for a REG, the reg_values table (which is indexed by regno) is used
75 - for a MEM, we recursively look up its address and then follow the
76 addr_list of that value
77 - for everything else, we compute a hash value and go through the hash
78 table. Since different rtx's can still have the same hash value,
79 this involves walking the table entries for a given value and comparing
80 the locations of the entries with the rtx we are looking up. */
82 /* A table that enables us to look up elts by their value. */
83 static GTY((param_is (cselib_val))) htab_t hash_table;
85 /* This is a global so we don't have to pass this through every function.
86 It is used in new_elt_loc_list to set SETTING_INSN. */
87 static rtx cselib_current_insn;
89 /* Every new unknown value gets a unique number. */
90 static unsigned int next_unknown_value;
92 /* The number of registers we had when the varrays were last resized. */
93 static unsigned int cselib_nregs;
95 /* Count values without known locations. Whenever this grows too big, we
96 remove these useless values from the table. */
97 static int n_useless_values;
99 /* Number of useless values before we remove them from the hash table. */
100 #define MAX_USELESS_VALUES 32
102 /* This table maps from register number to values. It does not contain
103 pointers to cselib_val structures, but rather elt_lists. The purpose is
104 to be able to refer to the same register in different modes. */
105 static GTY(()) varray_type reg_values;
106 static GTY((deletable (""))) varray_type reg_values_old;
107 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
109 /* The largest number of hard regs used by any entry added to the
110 REG_VALUES table. Cleared on each clear_table() invocation. */
111 static unsigned int max_value_regs;
113 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
114 in clear_table() for fast emptying. */
115 static GTY(()) varray_type used_regs;
116 static GTY((deletable (""))) varray_type used_regs_old;
118 /* We pass this to cselib_invalidate_mem to invalidate all of
119 memory for a non-const call instruction. */
120 static GTY(()) rtx callmem;
122 /* Caches for unused structures. */
123 static GTY((deletable (""))) cselib_val *empty_vals;
124 static GTY((deletable (""))) struct elt_list *empty_elt_lists;
125 static GTY((deletable (""))) struct elt_loc_list *empty_elt_loc_lists;
127 /* Set by discard_useless_locs if it deleted the last location of any
128 value. */
129 static int values_became_useless;
132 /* Allocate a struct elt_list and fill in its two elements with the
133 arguments. */
135 static struct elt_list *
136 new_elt_list (next, elt)
137 struct elt_list *next;
138 cselib_val *elt;
140 struct elt_list *el = empty_elt_lists;
142 if (el)
143 empty_elt_lists = el->next;
144 else
145 el = (struct elt_list *) ggc_alloc (sizeof (struct elt_list));
146 el->next = next;
147 el->elt = elt;
148 return el;
151 /* Allocate a struct elt_loc_list and fill in its two elements with the
152 arguments. */
154 static struct elt_loc_list *
155 new_elt_loc_list (next, loc)
156 struct elt_loc_list *next;
157 rtx loc;
159 struct elt_loc_list *el = empty_elt_loc_lists;
161 if (el)
162 empty_elt_loc_lists = el->next;
163 else
164 el = (struct elt_loc_list *) ggc_alloc (sizeof (struct elt_loc_list));
165 el->next = next;
166 el->loc = loc;
167 el->setting_insn = cselib_current_insn;
168 return el;
171 /* The elt_list at *PL is no longer needed. Unchain it and free its
172 storage. */
174 static void
175 unchain_one_elt_list (pl)
176 struct elt_list **pl;
178 struct elt_list *l = *pl;
180 *pl = l->next;
181 l->next = empty_elt_lists;
182 empty_elt_lists = l;
185 /* Likewise for elt_loc_lists. */
187 static void
188 unchain_one_elt_loc_list (pl)
189 struct elt_loc_list **pl;
191 struct elt_loc_list *l = *pl;
193 *pl = l->next;
194 l->next = empty_elt_loc_lists;
195 empty_elt_loc_lists = l;
198 /* Likewise for cselib_vals. This also frees the addr_list associated with
199 V. */
201 static void
202 unchain_one_value (v)
203 cselib_val *v;
205 while (v->addr_list)
206 unchain_one_elt_list (&v->addr_list);
208 v->u.next_free = empty_vals;
209 empty_vals = v;
212 /* Remove all entries from the hash table. Also used during
213 initialization. If CLEAR_ALL isn't set, then only clear the entries
214 which are known to have been used. */
216 static void
217 clear_table (clear_all)
218 int clear_all;
220 unsigned int i;
222 if (clear_all)
223 for (i = 0; i < cselib_nregs; i++)
224 REG_VALUES (i) = 0;
225 else
226 for (i = 0; i < VARRAY_ACTIVE_SIZE (used_regs); i++)
227 REG_VALUES (VARRAY_UINT (used_regs, i)) = 0;
229 max_value_regs = 0;
231 VARRAY_POP_ALL (used_regs);
233 htab_empty (hash_table);
235 n_useless_values = 0;
237 next_unknown_value = 0;
240 /* The equality test for our hash table. The first argument ENTRY is a table
241 element (i.e. a cselib_val), while the second arg X is an rtx. We know
242 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
243 CONST of an appropriate mode. */
245 static int
246 entry_and_rtx_equal_p (entry, x_arg)
247 const void *entry, *x_arg;
249 struct elt_loc_list *l;
250 const cselib_val *v = (const cselib_val *) entry;
251 rtx x = (rtx) x_arg;
252 enum machine_mode mode = GET_MODE (x);
254 if (GET_CODE (x) == CONST_INT
255 || (mode == VOIDmode && GET_CODE (x) == CONST_DOUBLE))
256 abort ();
257 if (mode != GET_MODE (v->u.val_rtx))
258 return 0;
260 /* Unwrap X if necessary. */
261 if (GET_CODE (x) == CONST
262 && (GET_CODE (XEXP (x, 0)) == CONST_INT
263 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
264 x = XEXP (x, 0);
266 /* We don't guarantee that distinct rtx's have different hash values,
267 so we need to do a comparison. */
268 for (l = v->locs; l; l = l->next)
269 if (rtx_equal_for_cselib_p (l->loc, x))
270 return 1;
272 return 0;
275 /* The hash function for our hash table. The value is always computed with
276 hash_rtx when adding an element; this function just extracts the hash
277 value from a cselib_val structure. */
279 static hashval_t
280 get_value_hash (entry)
281 const void *entry;
283 const cselib_val *v = (const cselib_val *) entry;
284 return v->value;
287 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
288 only return true for values which point to a cselib_val whose value
289 element has been set to zero, which implies the cselib_val will be
290 removed. */
293 references_value_p (x, only_useless)
294 rtx x;
295 int only_useless;
297 enum rtx_code code = GET_CODE (x);
298 const char *fmt = GET_RTX_FORMAT (code);
299 int i, j;
301 if (GET_CODE (x) == VALUE
302 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
303 return 1;
305 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
307 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
308 return 1;
309 else if (fmt[i] == 'E')
310 for (j = 0; j < XVECLEN (x, i); j++)
311 if (references_value_p (XVECEXP (x, i, j), only_useless))
312 return 1;
315 return 0;
318 /* For all locations found in X, delete locations that reference useless
319 values (i.e. values without any location). Called through
320 htab_traverse. */
322 static int
323 discard_useless_locs (x, info)
324 void **x;
325 void *info ATTRIBUTE_UNUSED;
327 cselib_val *v = (cselib_val *)*x;
328 struct elt_loc_list **p = &v->locs;
329 int had_locs = v->locs != 0;
331 while (*p)
333 if (references_value_p ((*p)->loc, 1))
334 unchain_one_elt_loc_list (p);
335 else
336 p = &(*p)->next;
339 if (had_locs && v->locs == 0)
341 n_useless_values++;
342 values_became_useless = 1;
344 return 1;
347 /* If X is a value with no locations, remove it from the hashtable. */
349 static int
350 discard_useless_values (x, info)
351 void **x;
352 void *info ATTRIBUTE_UNUSED;
354 cselib_val *v = (cselib_val *)*x;
356 if (v->locs == 0)
358 htab_clear_slot (hash_table, x);
359 unchain_one_value (v);
360 n_useless_values--;
363 return 1;
366 /* Clean out useless values (i.e. those which no longer have locations
367 associated with them) from the hash table. */
369 static void
370 remove_useless_values ()
372 /* First pass: eliminate locations that reference the value. That in
373 turn can make more values useless. */
376 values_became_useless = 0;
377 htab_traverse (hash_table, discard_useless_locs, 0);
379 while (values_became_useless);
381 /* Second pass: actually remove the values. */
382 htab_traverse (hash_table, discard_useless_values, 0);
384 if (n_useless_values != 0)
385 abort ();
388 /* Return nonzero if we can prove that X and Y contain the same value, taking
389 our gathered information into account. */
392 rtx_equal_for_cselib_p (x, y)
393 rtx x, y;
395 enum rtx_code code;
396 const char *fmt;
397 int i;
399 if (GET_CODE (x) == REG || GET_CODE (x) == MEM)
401 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
403 if (e)
404 x = e->u.val_rtx;
407 if (GET_CODE (y) == REG || GET_CODE (y) == MEM)
409 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
411 if (e)
412 y = e->u.val_rtx;
415 if (x == y)
416 return 1;
418 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
419 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
421 if (GET_CODE (x) == VALUE)
423 cselib_val *e = CSELIB_VAL_PTR (x);
424 struct elt_loc_list *l;
426 for (l = e->locs; l; l = l->next)
428 rtx t = l->loc;
430 /* Avoid infinite recursion. */
431 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
432 continue;
433 else if (rtx_equal_for_cselib_p (t, y))
434 return 1;
437 return 0;
440 if (GET_CODE (y) == VALUE)
442 cselib_val *e = CSELIB_VAL_PTR (y);
443 struct elt_loc_list *l;
445 for (l = e->locs; l; l = l->next)
447 rtx t = l->loc;
449 if (GET_CODE (t) == REG || GET_CODE (t) == MEM)
450 continue;
451 else if (rtx_equal_for_cselib_p (x, t))
452 return 1;
455 return 0;
458 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
459 return 0;
461 /* This won't be handled correctly by the code below. */
462 if (GET_CODE (x) == LABEL_REF)
463 return XEXP (x, 0) == XEXP (y, 0);
465 code = GET_CODE (x);
466 fmt = GET_RTX_FORMAT (code);
468 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
470 int j;
472 switch (fmt[i])
474 case 'w':
475 if (XWINT (x, i) != XWINT (y, i))
476 return 0;
477 break;
479 case 'n':
480 case 'i':
481 if (XINT (x, i) != XINT (y, i))
482 return 0;
483 break;
485 case 'V':
486 case 'E':
487 /* Two vectors must have the same length. */
488 if (XVECLEN (x, i) != XVECLEN (y, i))
489 return 0;
491 /* And the corresponding elements must match. */
492 for (j = 0; j < XVECLEN (x, i); j++)
493 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
494 XVECEXP (y, i, j)))
495 return 0;
496 break;
498 case 'e':
499 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
500 return 0;
501 break;
503 case 'S':
504 case 's':
505 if (strcmp (XSTR (x, i), XSTR (y, i)))
506 return 0;
507 break;
509 case 'u':
510 /* These are just backpointers, so they don't matter. */
511 break;
513 case '0':
514 case 't':
515 break;
517 /* It is believed that rtx's at this level will never
518 contain anything but integers and other rtx's,
519 except for within LABEL_REFs and SYMBOL_REFs. */
520 default:
521 abort ();
524 return 1;
527 /* We need to pass down the mode of constants through the hash table
528 functions. For that purpose, wrap them in a CONST of the appropriate
529 mode. */
530 static rtx
531 wrap_constant (mode, x)
532 enum machine_mode mode;
533 rtx x;
535 if (GET_CODE (x) != CONST_INT
536 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
537 return x;
538 if (mode == VOIDmode)
539 abort ();
540 return gen_rtx_CONST (mode, x);
543 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
544 For registers and memory locations, we look up their cselib_val structure
545 and return its VALUE element.
546 Possible reasons for return 0 are: the object is volatile, or we couldn't
547 find a register or memory location in the table and CREATE is zero. If
548 CREATE is nonzero, table elts are created for regs and mem.
549 MODE is used in hashing for CONST_INTs only;
550 otherwise the mode of X is used. */
552 static unsigned int
553 hash_rtx (x, mode, create)
554 rtx x;
555 enum machine_mode mode;
556 int create;
558 cselib_val *e;
559 int i, j;
560 enum rtx_code code;
561 const char *fmt;
562 unsigned int hash = 0;
564 code = GET_CODE (x);
565 hash += (unsigned) code + (unsigned) GET_MODE (x);
567 switch (code)
569 case MEM:
570 case REG:
571 e = cselib_lookup (x, GET_MODE (x), create);
572 if (! e)
573 return 0;
575 return e->value;
577 case CONST_INT:
578 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + INTVAL (x);
579 return hash ? hash : (unsigned int) CONST_INT;
581 case CONST_DOUBLE:
582 /* This is like the general case, except that it only counts
583 the integers representing the constant. */
584 hash += (unsigned) code + (unsigned) GET_MODE (x);
585 if (GET_MODE (x) != VOIDmode)
586 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
587 else
588 hash += ((unsigned) CONST_DOUBLE_LOW (x)
589 + (unsigned) CONST_DOUBLE_HIGH (x));
590 return hash ? hash : (unsigned int) CONST_DOUBLE;
592 case CONST_VECTOR:
594 int units;
595 rtx elt;
597 units = CONST_VECTOR_NUNITS (x);
599 for (i = 0; i < units; ++i)
601 elt = CONST_VECTOR_ELT (x, i);
602 hash += hash_rtx (elt, GET_MODE (elt), 0);
605 return hash;
608 /* Assume there is only one rtx object for any given label. */
609 case LABEL_REF:
610 hash
611 += ((unsigned) LABEL_REF << 7) + (unsigned long) XEXP (x, 0);
612 return hash ? hash : (unsigned int) LABEL_REF;
614 case SYMBOL_REF:
615 hash
616 += ((unsigned) SYMBOL_REF << 7) + (unsigned long) XSTR (x, 0);
617 return hash ? hash : (unsigned int) SYMBOL_REF;
619 case PRE_DEC:
620 case PRE_INC:
621 case POST_DEC:
622 case POST_INC:
623 case POST_MODIFY:
624 case PRE_MODIFY:
625 case PC:
626 case CC0:
627 case CALL:
628 case UNSPEC_VOLATILE:
629 return 0;
631 case ASM_OPERANDS:
632 if (MEM_VOLATILE_P (x))
633 return 0;
635 break;
637 default:
638 break;
641 i = GET_RTX_LENGTH (code) - 1;
642 fmt = GET_RTX_FORMAT (code);
643 for (; i >= 0; i--)
645 if (fmt[i] == 'e')
647 rtx tem = XEXP (x, i);
648 unsigned int tem_hash = hash_rtx (tem, 0, create);
650 if (tem_hash == 0)
651 return 0;
653 hash += tem_hash;
655 else if (fmt[i] == 'E')
656 for (j = 0; j < XVECLEN (x, i); j++)
658 unsigned int tem_hash = hash_rtx (XVECEXP (x, i, j), 0, create);
660 if (tem_hash == 0)
661 return 0;
663 hash += tem_hash;
665 else if (fmt[i] == 's')
667 const unsigned char *p = (const unsigned char *) XSTR (x, i);
669 if (p)
670 while (*p)
671 hash += *p++;
673 else if (fmt[i] == 'i')
674 hash += XINT (x, i);
675 else if (fmt[i] == '0' || fmt[i] == 't')
676 /* unused */;
677 else
678 abort ();
681 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
684 /* Create a new value structure for VALUE and initialize it. The mode of the
685 value is MODE. */
687 static cselib_val *
688 new_cselib_val (value, mode)
689 unsigned int value;
690 enum machine_mode mode;
692 cselib_val *e = empty_vals;
694 if (e)
695 empty_vals = e->u.next_free;
696 else
697 e = (cselib_val *) ggc_alloc (sizeof (cselib_val));
699 if (value == 0)
700 abort ();
702 e->value = value;
703 e->u.val_rtx = gen_rtx_VALUE (mode);
704 CSELIB_VAL_PTR (e->u.val_rtx) = e;
705 e->addr_list = 0;
706 e->locs = 0;
707 return e;
710 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
711 contains the data at this address. X is a MEM that represents the
712 value. Update the two value structures to represent this situation. */
714 static void
715 add_mem_for_addr (addr_elt, mem_elt, x)
716 cselib_val *addr_elt, *mem_elt;
717 rtx x;
719 struct elt_loc_list *l;
721 /* Avoid duplicates. */
722 for (l = mem_elt->locs; l; l = l->next)
723 if (GET_CODE (l->loc) == MEM
724 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
725 return;
727 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
728 mem_elt->locs
729 = new_elt_loc_list (mem_elt->locs,
730 replace_equiv_address_nv (x, addr_elt->u.val_rtx));
733 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
734 If CREATE, make a new one if we haven't seen it before. */
736 static cselib_val *
737 cselib_lookup_mem (x, create)
738 rtx x;
739 int create;
741 enum machine_mode mode = GET_MODE (x);
742 void **slot;
743 cselib_val *addr;
744 cselib_val *mem_elt;
745 struct elt_list *l;
747 if (MEM_VOLATILE_P (x) || mode == BLKmode
748 || (FLOAT_MODE_P (mode) && flag_float_store))
749 return 0;
751 /* Look up the value for the address. */
752 addr = cselib_lookup (XEXP (x, 0), mode, create);
753 if (! addr)
754 return 0;
756 /* Find a value that describes a value of our mode at that address. */
757 for (l = addr->addr_list; l; l = l->next)
758 if (GET_MODE (l->elt->u.val_rtx) == mode)
759 return l->elt;
761 if (! create)
762 return 0;
764 mem_elt = new_cselib_val (++next_unknown_value, mode);
765 add_mem_for_addr (addr, mem_elt, x);
766 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
767 mem_elt->value, INSERT);
768 *slot = mem_elt;
769 return mem_elt;
772 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
773 with VALUE expressions. This way, it becomes independent of changes
774 to registers and memory.
775 X isn't actually modified; if modifications are needed, new rtl is
776 allocated. However, the return value can share rtl with X. */
779 cselib_subst_to_values (x)
780 rtx x;
782 enum rtx_code code = GET_CODE (x);
783 const char *fmt = GET_RTX_FORMAT (code);
784 cselib_val *e;
785 struct elt_list *l;
786 rtx copy = x;
787 int i;
789 switch (code)
791 case REG:
792 for (l = REG_VALUES (REGNO (x)); l; l = l->next)
793 if (GET_MODE (l->elt->u.val_rtx) == GET_MODE (x))
794 return l->elt->u.val_rtx;
796 abort ();
798 case MEM:
799 e = cselib_lookup_mem (x, 0);
800 if (! e)
802 /* This happens for autoincrements. Assign a value that doesn't
803 match any other. */
804 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
806 return e->u.val_rtx;
808 case CONST_DOUBLE:
809 case CONST_VECTOR:
810 case CONST_INT:
811 return x;
813 case POST_INC:
814 case PRE_INC:
815 case POST_DEC:
816 case PRE_DEC:
817 case POST_MODIFY:
818 case PRE_MODIFY:
819 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
820 return e->u.val_rtx;
822 default:
823 break;
826 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
828 if (fmt[i] == 'e')
830 rtx t = cselib_subst_to_values (XEXP (x, i));
832 if (t != XEXP (x, i) && x == copy)
833 copy = shallow_copy_rtx (x);
835 XEXP (copy, i) = t;
837 else if (fmt[i] == 'E')
839 int j, k;
841 for (j = 0; j < XVECLEN (x, i); j++)
843 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
845 if (t != XVECEXP (x, i, j) && XVEC (x, i) == XVEC (copy, i))
847 if (x == copy)
848 copy = shallow_copy_rtx (x);
850 XVEC (copy, i) = rtvec_alloc (XVECLEN (x, i));
851 for (k = 0; k < j; k++)
852 XVECEXP (copy, i, k) = XVECEXP (x, i, k);
855 XVECEXP (copy, i, j) = t;
860 return copy;
863 /* Look up the rtl expression X in our tables and return the value it has.
864 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
865 we create a new one if possible, using mode MODE if X doesn't have a mode
866 (i.e. because it's a constant). */
868 cselib_val *
869 cselib_lookup (x, mode, create)
870 rtx x;
871 enum machine_mode mode;
872 int create;
874 void **slot;
875 cselib_val *e;
876 unsigned int hashval;
878 if (GET_MODE (x) != VOIDmode)
879 mode = GET_MODE (x);
881 if (GET_CODE (x) == VALUE)
882 return CSELIB_VAL_PTR (x);
884 if (GET_CODE (x) == REG)
886 struct elt_list *l;
887 unsigned int i = REGNO (x);
889 for (l = REG_VALUES (i); l; l = l->next)
890 if (mode == GET_MODE (l->elt->u.val_rtx))
891 return l->elt;
893 if (! create)
894 return 0;
896 if (i < FIRST_PSEUDO_REGISTER)
898 unsigned int n = HARD_REGNO_NREGS (i, mode);
900 if (n > max_value_regs)
901 max_value_regs = n;
904 e = new_cselib_val (++next_unknown_value, GET_MODE (x));
905 e->locs = new_elt_loc_list (e->locs, x);
906 if (REG_VALUES (i) == 0)
907 VARRAY_PUSH_UINT (used_regs, i);
908 REG_VALUES (i) = new_elt_list (REG_VALUES (i), e);
909 slot = htab_find_slot_with_hash (hash_table, x, e->value, INSERT);
910 *slot = e;
911 return e;
914 if (GET_CODE (x) == MEM)
915 return cselib_lookup_mem (x, create);
917 hashval = hash_rtx (x, mode, create);
918 /* Can't even create if hashing is not possible. */
919 if (! hashval)
920 return 0;
922 slot = htab_find_slot_with_hash (hash_table, wrap_constant (mode, x),
923 hashval, create ? INSERT : NO_INSERT);
924 if (slot == 0)
925 return 0;
927 e = (cselib_val *) *slot;
928 if (e)
929 return e;
931 e = new_cselib_val (hashval, mode);
933 /* We have to fill the slot before calling cselib_subst_to_values:
934 the hash table is inconsistent until we do so, and
935 cselib_subst_to_values will need to do lookups. */
936 *slot = (void *) e;
937 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
938 return e;
941 /* Invalidate any entries in reg_values that overlap REGNO. This is called
942 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
943 is used to determine how many hard registers are being changed. If MODE
944 is VOIDmode, then only REGNO is being changed; this is used when
945 invalidating call clobbered registers across a call. */
947 static void
948 cselib_invalidate_regno (regno, mode)
949 unsigned int regno;
950 enum machine_mode mode;
952 unsigned int endregno;
953 unsigned int i;
955 /* If we see pseudos after reload, something is _wrong_. */
956 if (reload_completed && regno >= FIRST_PSEUDO_REGISTER
957 && reg_renumber[regno] >= 0)
958 abort ();
960 /* Determine the range of registers that must be invalidated. For
961 pseudos, only REGNO is affected. For hard regs, we must take MODE
962 into account, and we must also invalidate lower register numbers
963 if they contain values that overlap REGNO. */
964 if (regno < FIRST_PSEUDO_REGISTER && mode != VOIDmode)
966 if (regno < max_value_regs)
967 i = 0;
968 else
969 i = regno - max_value_regs;
971 endregno = regno + HARD_REGNO_NREGS (regno, mode);
973 else
975 i = regno;
976 endregno = regno + 1;
979 for (; i < endregno; i++)
981 struct elt_list **l = &REG_VALUES (i);
983 /* Go through all known values for this reg; if it overlaps the range
984 we're invalidating, remove the value. */
985 while (*l)
987 cselib_val *v = (*l)->elt;
988 struct elt_loc_list **p;
989 unsigned int this_last = i;
991 if (i < FIRST_PSEUDO_REGISTER)
992 this_last += HARD_REGNO_NREGS (i, GET_MODE (v->u.val_rtx)) - 1;
994 if (this_last < regno)
996 l = &(*l)->next;
997 continue;
1000 /* We have an overlap. */
1001 unchain_one_elt_list (l);
1003 /* Now, we clear the mapping from value to reg. It must exist, so
1004 this code will crash intentionally if it doesn't. */
1005 for (p = &v->locs; ; p = &(*p)->next)
1007 rtx x = (*p)->loc;
1009 if (GET_CODE (x) == REG && REGNO (x) == i)
1011 unchain_one_elt_loc_list (p);
1012 break;
1015 if (v->locs == 0)
1016 n_useless_values++;
1021 /* The memory at address MEM_BASE is being changed.
1022 Return whether this change will invalidate VAL. */
1024 static int
1025 cselib_mem_conflict_p (mem_base, val)
1026 rtx mem_base;
1027 rtx val;
1029 enum rtx_code code;
1030 const char *fmt;
1031 int i, j;
1033 code = GET_CODE (val);
1034 switch (code)
1036 /* Get rid of a few simple cases quickly. */
1037 case REG:
1038 case PC:
1039 case CC0:
1040 case SCRATCH:
1041 case CONST:
1042 case CONST_INT:
1043 case CONST_DOUBLE:
1044 case CONST_VECTOR:
1045 case SYMBOL_REF:
1046 case LABEL_REF:
1047 return 0;
1049 case MEM:
1050 if (GET_MODE (mem_base) == BLKmode
1051 || GET_MODE (val) == BLKmode
1052 || anti_dependence (val, mem_base))
1053 return 1;
1055 /* The address may contain nested MEMs. */
1056 break;
1058 default:
1059 break;
1062 fmt = GET_RTX_FORMAT (code);
1063 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1065 if (fmt[i] == 'e')
1067 if (cselib_mem_conflict_p (mem_base, XEXP (val, i)))
1068 return 1;
1070 else if (fmt[i] == 'E')
1071 for (j = 0; j < XVECLEN (val, i); j++)
1072 if (cselib_mem_conflict_p (mem_base, XVECEXP (val, i, j)))
1073 return 1;
1076 return 0;
1079 /* For the value found in SLOT, walk its locations to determine if any overlap
1080 INFO (which is a MEM rtx). */
1082 static int
1083 cselib_invalidate_mem_1 (slot, info)
1084 void **slot;
1085 void *info;
1087 cselib_val *v = (cselib_val *) *slot;
1088 rtx mem_rtx = (rtx) info;
1089 struct elt_loc_list **p = &v->locs;
1090 int had_locs = v->locs != 0;
1092 while (*p)
1094 rtx x = (*p)->loc;
1095 cselib_val *addr;
1096 struct elt_list **mem_chain;
1098 /* MEMs may occur in locations only at the top level; below
1099 that every MEM or REG is substituted by its VALUE. */
1100 if (GET_CODE (x) != MEM
1101 || ! cselib_mem_conflict_p (mem_rtx, x))
1103 p = &(*p)->next;
1104 continue;
1107 /* This one overlaps. */
1108 /* We must have a mapping from this MEM's address to the
1109 value (E). Remove that, too. */
1110 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1111 mem_chain = &addr->addr_list;
1112 for (;;)
1114 if ((*mem_chain)->elt == v)
1116 unchain_one_elt_list (mem_chain);
1117 break;
1120 mem_chain = &(*mem_chain)->next;
1123 unchain_one_elt_loc_list (p);
1126 if (had_locs && v->locs == 0)
1127 n_useless_values++;
1129 return 1;
1132 /* Invalidate any locations in the table which are changed because of a
1133 store to MEM_RTX. If this is called because of a non-const call
1134 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1136 static void
1137 cselib_invalidate_mem (mem_rtx)
1138 rtx mem_rtx;
1140 htab_traverse (hash_table, cselib_invalidate_mem_1, mem_rtx);
1143 /* Invalidate DEST, which is being assigned to or clobbered. The second and
1144 the third parameter exist so that this function can be passed to
1145 note_stores; they are ignored. */
1147 static void
1148 cselib_invalidate_rtx (dest, ignore, data)
1149 rtx dest;
1150 rtx ignore ATTRIBUTE_UNUSED;
1151 void *data ATTRIBUTE_UNUSED;
1153 while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == SIGN_EXTRACT
1154 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG)
1155 dest = XEXP (dest, 0);
1157 if (GET_CODE (dest) == REG)
1158 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1159 else if (GET_CODE (dest) == MEM)
1160 cselib_invalidate_mem (dest);
1162 /* Some machines don't define AUTO_INC_DEC, but they still use push
1163 instructions. We need to catch that case here in order to
1164 invalidate the stack pointer correctly. Note that invalidating
1165 the stack pointer is different from invalidating DEST. */
1166 if (push_operand (dest, GET_MODE (dest)))
1167 cselib_invalidate_rtx (stack_pointer_rtx, NULL_RTX, NULL);
1170 /* Record the result of a SET instruction. DEST is being set; the source
1171 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1172 describes its address. */
1174 static void
1175 cselib_record_set (dest, src_elt, dest_addr_elt)
1176 rtx dest;
1177 cselib_val *src_elt, *dest_addr_elt;
1179 int dreg = GET_CODE (dest) == REG ? (int) REGNO (dest) : -1;
1181 if (src_elt == 0 || side_effects_p (dest))
1182 return;
1184 if (dreg >= 0)
1186 if (REG_VALUES (dreg) == 0)
1187 VARRAY_PUSH_UINT (used_regs, dreg);
1189 if (dreg < FIRST_PSEUDO_REGISTER)
1191 unsigned int n = HARD_REGNO_NREGS (dreg, GET_MODE (dest));
1193 if (n > max_value_regs)
1194 max_value_regs = n;
1197 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
1198 if (src_elt->locs == 0)
1199 n_useless_values--;
1200 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
1202 else if (GET_CODE (dest) == MEM && dest_addr_elt != 0)
1204 if (src_elt->locs == 0)
1205 n_useless_values--;
1206 add_mem_for_addr (dest_addr_elt, src_elt, dest);
1210 /* Describe a single set that is part of an insn. */
1211 struct set
1213 rtx src;
1214 rtx dest;
1215 cselib_val *src_elt;
1216 cselib_val *dest_addr_elt;
1219 /* There is no good way to determine how many elements there can be
1220 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1221 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1223 /* Record the effects of any sets in INSN. */
1224 static void
1225 cselib_record_sets (insn)
1226 rtx insn;
1228 int n_sets = 0;
1229 int i;
1230 struct set sets[MAX_SETS];
1231 rtx body = PATTERN (insn);
1232 rtx cond = 0;
1234 body = PATTERN (insn);
1235 if (GET_CODE (body) == COND_EXEC)
1237 cond = COND_EXEC_TEST (body);
1238 body = COND_EXEC_CODE (body);
1241 /* Find all sets. */
1242 if (GET_CODE (body) == SET)
1244 sets[0].src = SET_SRC (body);
1245 sets[0].dest = SET_DEST (body);
1246 n_sets = 1;
1248 else if (GET_CODE (body) == PARALLEL)
1250 /* Look through the PARALLEL and record the values being
1251 set, if possible. Also handle any CLOBBERs. */
1252 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
1254 rtx x = XVECEXP (body, 0, i);
1256 if (GET_CODE (x) == SET)
1258 sets[n_sets].src = SET_SRC (x);
1259 sets[n_sets].dest = SET_DEST (x);
1260 n_sets++;
1265 /* Look up the values that are read. Do this before invalidating the
1266 locations that are written. */
1267 for (i = 0; i < n_sets; i++)
1269 rtx dest = sets[i].dest;
1271 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1272 the low part after invalidating any knowledge about larger modes. */
1273 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
1274 sets[i].dest = dest = XEXP (dest, 0);
1276 /* We don't know how to record anything but REG or MEM. */
1277 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1279 rtx src = sets[i].src;
1280 if (cond)
1281 src = gen_rtx_IF_THEN_ELSE (GET_MODE (src), cond, src, dest);
1282 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
1283 if (GET_CODE (dest) == MEM)
1284 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0), Pmode, 1);
1285 else
1286 sets[i].dest_addr_elt = 0;
1290 /* Invalidate all locations written by this insn. Note that the elts we
1291 looked up in the previous loop aren't affected, just some of their
1292 locations may go away. */
1293 note_stores (body, cselib_invalidate_rtx, NULL);
1295 /* Now enter the equivalences in our tables. */
1296 for (i = 0; i < n_sets; i++)
1298 rtx dest = sets[i].dest;
1299 if (GET_CODE (dest) == REG || GET_CODE (dest) == MEM)
1300 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
1304 /* Record the effects of INSN. */
1306 void
1307 cselib_process_insn (insn)
1308 rtx insn;
1310 int i;
1311 rtx x;
1313 cselib_current_insn = insn;
1315 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1316 if (GET_CODE (insn) == CODE_LABEL
1317 || (GET_CODE (insn) == CALL_INSN
1318 && find_reg_note (insn, REG_SETJMP, NULL))
1319 || (GET_CODE (insn) == INSN
1320 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1321 && MEM_VOLATILE_P (PATTERN (insn))))
1323 clear_table (0);
1324 return;
1327 if (! INSN_P (insn))
1329 cselib_current_insn = 0;
1330 return;
1333 /* If this is a call instruction, forget anything stored in a
1334 call clobbered register, or, if this is not a const call, in
1335 memory. */
1336 if (GET_CODE (insn) == CALL_INSN)
1338 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1339 if (call_used_regs[i])
1340 cselib_invalidate_regno (i, VOIDmode);
1342 if (! CONST_OR_PURE_CALL_P (insn))
1343 cselib_invalidate_mem (callmem);
1346 cselib_record_sets (insn);
1348 #ifdef AUTO_INC_DEC
1349 /* Clobber any registers which appear in REG_INC notes. We
1350 could keep track of the changes to their values, but it is
1351 unlikely to help. */
1352 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
1353 if (REG_NOTE_KIND (x) == REG_INC)
1354 cselib_invalidate_rtx (XEXP (x, 0), NULL_RTX, NULL);
1355 #endif
1357 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1358 after we have processed the insn. */
1359 if (GET_CODE (insn) == CALL_INSN)
1360 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
1361 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
1362 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0), NULL_RTX, NULL);
1364 cselib_current_insn = 0;
1366 if (n_useless_values > MAX_USELESS_VALUES)
1367 remove_useless_values ();
1370 /* Make sure our varrays are big enough. Not called from any cselib routines;
1371 it must be called by the user if it allocated new registers. */
1373 void
1374 cselib_update_varray_sizes ()
1376 unsigned int nregs = max_reg_num ();
1378 if (nregs == cselib_nregs)
1379 return;
1381 cselib_nregs = nregs;
1382 VARRAY_GROW (reg_values, nregs);
1383 VARRAY_GROW (used_regs, nregs);
1386 /* Initialize cselib for one pass. The caller must also call
1387 init_alias_analysis. */
1389 void
1390 cselib_init ()
1392 /* This is only created once. */
1393 if (! callmem)
1394 callmem = gen_rtx_MEM (BLKmode, const0_rtx);
1396 cselib_nregs = max_reg_num ();
1397 if (reg_values_old != NULL && VARRAY_SIZE (reg_values_old) >= cselib_nregs)
1399 reg_values = reg_values_old;
1400 used_regs = used_regs_old;
1401 VARRAY_CLEAR (reg_values);
1402 VARRAY_CLEAR (used_regs);
1404 else
1406 VARRAY_ELT_LIST_INIT (reg_values, cselib_nregs, "reg_values");
1407 VARRAY_UINT_INIT (used_regs, cselib_nregs, "used_regs");
1409 hash_table = htab_create_ggc (31, get_value_hash, entry_and_rtx_equal_p,
1410 NULL);
1411 clear_table (1);
1414 /* Called when the current user is done with cselib. */
1416 void
1417 cselib_finish ()
1419 reg_values_old = reg_values;
1420 reg_values = 0;
1421 used_regs_old = used_regs;
1422 used_regs = 0;
1423 hash_table = 0;
1424 n_useless_values = 0;
1425 next_unknown_value = 0;
1428 #include "gt-cselib.h"