1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
28 #include "hard-reg-set.h"
31 #include "insn-config.h"
42 static int entry_and_rtx_equal_p
PARAMS ((const void *, const void *));
43 static unsigned int get_value_hash
PARAMS ((const void *));
44 static struct elt_list
*new_elt_list
PARAMS ((struct elt_list
*,
46 static struct elt_loc_list
*new_elt_loc_list
PARAMS ((struct elt_loc_list
*,
48 static void unchain_one_value
PARAMS ((cselib_val
*));
49 static void unchain_one_elt_list
PARAMS ((struct elt_list
**));
50 static void unchain_one_elt_loc_list
PARAMS ((struct elt_loc_list
**));
51 static void clear_table
PARAMS ((int));
52 static int discard_useless_locs
PARAMS ((void **, void *));
53 static int discard_useless_values
PARAMS ((void **, void *));
54 static void remove_useless_values
PARAMS ((void));
55 static rtx wrap_constant
PARAMS ((enum machine_mode
, rtx
));
56 static unsigned int hash_rtx
PARAMS ((rtx
, enum machine_mode
, int));
57 static cselib_val
*new_cselib_val
PARAMS ((unsigned int,
59 static void add_mem_for_addr
PARAMS ((cselib_val
*, cselib_val
*,
61 static cselib_val
*cselib_lookup_mem
PARAMS ((rtx
, int));
62 static void cselib_invalidate_regno
PARAMS ((unsigned int,
64 static int cselib_mem_conflict_p
PARAMS ((rtx
, rtx
));
65 static int cselib_invalidate_mem_1
PARAMS ((void **, void *));
66 static void cselib_invalidate_mem
PARAMS ((rtx
));
67 static void cselib_invalidate_rtx
PARAMS ((rtx
, rtx
, void *));
68 static void cselib_record_set
PARAMS ((rtx
, cselib_val
*,
70 static void cselib_record_sets
PARAMS ((rtx
));
72 /* There are three ways in which cselib can look up an rtx:
73 - for a REG, the reg_values table (which is indexed by regno) is used
74 - for a MEM, we recursively look up its address and then follow the
75 addr_list of that value
76 - for everything else, we compute a hash value and go through the hash
77 table. Since different rtx's can still have the same hash value,
78 this involves walking the table entries for a given value and comparing
79 the locations of the entries with the rtx we are looking up. */
81 /* A table that enables us to look up elts by their value. */
82 static htab_t hash_table
;
84 /* This is a global so we don't have to pass this through every function.
85 It is used in new_elt_loc_list to set SETTING_INSN. */
86 static rtx cselib_current_insn
;
88 /* Every new unknown value gets a unique number. */
89 static unsigned int next_unknown_value
;
91 /* The number of registers we had when the varrays were last resized. */
92 static unsigned int cselib_nregs
;
94 /* Count values without known locations. Whenever this grows too big, we
95 remove these useless values from the table. */
96 static int n_useless_values
;
98 /* Number of useless values before we remove them from the hash table. */
99 #define MAX_USELESS_VALUES 32
101 /* This table maps from register number to values. It does not contain
102 pointers to cselib_val structures, but rather elt_lists. The purpose is
103 to be able to refer to the same register in different modes. */
104 static varray_type reg_values
;
105 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
107 /* The largest number of hard regs used by any entry added to the
108 REG_VALUES table. Cleared on each clear_table() invocation. */
109 static unsigned int max_value_regs
;
111 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
112 in clear_table() for fast emptying. */
113 static varray_type used_regs
;
115 /* We pass this to cselib_invalidate_mem to invalidate all of
116 memory for a non-const call instruction. */
119 /* Memory for our structures is allocated from this obstack. */
120 static struct obstack cselib_obstack
;
122 /* Used to quickly free all memory. */
123 static char *cselib_startobj
;
125 /* Caches for unused structures. */
126 static cselib_val
*empty_vals
;
127 static struct elt_list
*empty_elt_lists
;
128 static struct elt_loc_list
*empty_elt_loc_lists
;
130 /* Set by discard_useless_locs if it deleted the last location of any
132 static int values_became_useless
;
135 /* Allocate a struct elt_list and fill in its two elements with the
138 static struct elt_list
*
139 new_elt_list (next
, elt
)
140 struct elt_list
*next
;
143 struct elt_list
*el
= empty_elt_lists
;
146 empty_elt_lists
= el
->next
;
148 el
= (struct elt_list
*) obstack_alloc (&cselib_obstack
,
149 sizeof (struct elt_list
));
155 /* Allocate a struct elt_loc_list and fill in its two elements with the
158 static struct elt_loc_list
*
159 new_elt_loc_list (next
, loc
)
160 struct elt_loc_list
*next
;
163 struct elt_loc_list
*el
= empty_elt_loc_lists
;
166 empty_elt_loc_lists
= el
->next
;
168 el
= (struct elt_loc_list
*) obstack_alloc (&cselib_obstack
,
169 sizeof (struct elt_loc_list
));
172 el
->setting_insn
= cselib_current_insn
;
176 /* The elt_list at *PL is no longer needed. Unchain it and free its
180 unchain_one_elt_list (pl
)
181 struct elt_list
**pl
;
183 struct elt_list
*l
= *pl
;
186 l
->next
= empty_elt_lists
;
190 /* Likewise for elt_loc_lists. */
193 unchain_one_elt_loc_list (pl
)
194 struct elt_loc_list
**pl
;
196 struct elt_loc_list
*l
= *pl
;
199 l
->next
= empty_elt_loc_lists
;
200 empty_elt_loc_lists
= l
;
203 /* Likewise for cselib_vals. This also frees the addr_list associated with
207 unchain_one_value (v
)
211 unchain_one_elt_list (&v
->addr_list
);
213 v
->u
.next_free
= empty_vals
;
217 /* Remove all entries from the hash table. Also used during
218 initialization. If CLEAR_ALL isn't set, then only clear the entries
219 which are known to have been used. */
222 clear_table (clear_all
)
228 for (i
= 0; i
< cselib_nregs
; i
++)
231 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (used_regs
); i
++)
232 REG_VALUES (VARRAY_UINT (used_regs
, i
)) = 0;
236 VARRAY_POP_ALL (used_regs
);
238 htab_empty (hash_table
);
239 obstack_free (&cselib_obstack
, cselib_startobj
);
243 empty_elt_loc_lists
= 0;
244 n_useless_values
= 0;
246 next_unknown_value
= 0;
249 /* The equality test for our hash table. The first argument ENTRY is a table
250 element (i.e. a cselib_val), while the second arg X is an rtx. We know
251 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
252 CONST of an appropriate mode. */
255 entry_and_rtx_equal_p (entry
, x_arg
)
256 const void *entry
, *x_arg
;
258 struct elt_loc_list
*l
;
259 const cselib_val
*v
= (const cselib_val
*) entry
;
261 enum machine_mode mode
= GET_MODE (x
);
263 if (GET_CODE (x
) == CONST_INT
264 || (mode
== VOIDmode
&& GET_CODE (x
) == CONST_DOUBLE
))
266 if (mode
!= GET_MODE (v
->u
.val_rtx
))
269 /* Unwrap X if necessary. */
270 if (GET_CODE (x
) == CONST
271 && (GET_CODE (XEXP (x
, 0)) == CONST_INT
272 || GET_CODE (XEXP (x
, 0)) == CONST_DOUBLE
))
275 /* We don't guarantee that distinct rtx's have different hash values,
276 so we need to do a comparison. */
277 for (l
= v
->locs
; l
; l
= l
->next
)
278 if (rtx_equal_for_cselib_p (l
->loc
, x
))
284 /* The hash function for our hash table. The value is always computed with
285 hash_rtx when adding an element; this function just extracts the hash
286 value from a cselib_val structure. */
289 get_value_hash (entry
)
292 const cselib_val
*v
= (const cselib_val
*) entry
;
296 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
297 only return true for values which point to a cselib_val whose value
298 element has been set to zero, which implies the cselib_val will be
302 references_value_p (x
, only_useless
)
306 enum rtx_code code
= GET_CODE (x
);
307 const char *fmt
= GET_RTX_FORMAT (code
);
310 if (GET_CODE (x
) == VALUE
311 && (! only_useless
|| CSELIB_VAL_PTR (x
)->locs
== 0))
314 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
316 if (fmt
[i
] == 'e' && references_value_p (XEXP (x
, i
), only_useless
))
318 else if (fmt
[i
] == 'E')
319 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
320 if (references_value_p (XVECEXP (x
, i
, j
), only_useless
))
327 /* For all locations found in X, delete locations that reference useless
328 values (i.e. values without any location). Called through
332 discard_useless_locs (x
, info
)
334 void *info ATTRIBUTE_UNUSED
;
336 cselib_val
*v
= (cselib_val
*)*x
;
337 struct elt_loc_list
**p
= &v
->locs
;
338 int had_locs
= v
->locs
!= 0;
342 if (references_value_p ((*p
)->loc
, 1))
343 unchain_one_elt_loc_list (p
);
348 if (had_locs
&& v
->locs
== 0)
351 values_became_useless
= 1;
356 /* If X is a value with no locations, remove it from the hashtable. */
359 discard_useless_values (x
, info
)
361 void *info ATTRIBUTE_UNUSED
;
363 cselib_val
*v
= (cselib_val
*)*x
;
367 htab_clear_slot (hash_table
, x
);
368 unchain_one_value (v
);
375 /* Clean out useless values (i.e. those which no longer have locations
376 associated with them) from the hash table. */
379 remove_useless_values ()
381 /* First pass: eliminate locations that reference the value. That in
382 turn can make more values useless. */
385 values_became_useless
= 0;
386 htab_traverse (hash_table
, discard_useless_locs
, 0);
388 while (values_became_useless
);
390 /* Second pass: actually remove the values. */
391 htab_traverse (hash_table
, discard_useless_values
, 0);
393 if (n_useless_values
!= 0)
397 /* Return nonzero if we can prove that X and Y contain the same value, taking
398 our gathered information into account. */
401 rtx_equal_for_cselib_p (x
, y
)
408 if (GET_CODE (x
) == REG
|| GET_CODE (x
) == MEM
)
410 cselib_val
*e
= cselib_lookup (x
, GET_MODE (x
), 0);
416 if (GET_CODE (y
) == REG
|| GET_CODE (y
) == MEM
)
418 cselib_val
*e
= cselib_lookup (y
, GET_MODE (y
), 0);
427 if (GET_CODE (x
) == VALUE
&& GET_CODE (y
) == VALUE
)
428 return CSELIB_VAL_PTR (x
) == CSELIB_VAL_PTR (y
);
430 if (GET_CODE (x
) == VALUE
)
432 cselib_val
*e
= CSELIB_VAL_PTR (x
);
433 struct elt_loc_list
*l
;
435 for (l
= e
->locs
; l
; l
= l
->next
)
439 /* Avoid infinite recursion. */
440 if (GET_CODE (t
) == REG
|| GET_CODE (t
) == MEM
)
442 else if (rtx_equal_for_cselib_p (t
, y
))
449 if (GET_CODE (y
) == VALUE
)
451 cselib_val
*e
= CSELIB_VAL_PTR (y
);
452 struct elt_loc_list
*l
;
454 for (l
= e
->locs
; l
; l
= l
->next
)
458 if (GET_CODE (t
) == REG
|| GET_CODE (t
) == MEM
)
460 else if (rtx_equal_for_cselib_p (x
, t
))
467 if (GET_CODE (x
) != GET_CODE (y
) || GET_MODE (x
) != GET_MODE (y
))
470 /* This won't be handled correctly by the code below. */
471 if (GET_CODE (x
) == LABEL_REF
)
472 return XEXP (x
, 0) == XEXP (y
, 0);
475 fmt
= GET_RTX_FORMAT (code
);
477 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
484 if (XWINT (x
, i
) != XWINT (y
, i
))
490 if (XINT (x
, i
) != XINT (y
, i
))
496 /* Two vectors must have the same length. */
497 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
500 /* And the corresponding elements must match. */
501 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
502 if (! rtx_equal_for_cselib_p (XVECEXP (x
, i
, j
),
508 if (! rtx_equal_for_cselib_p (XEXP (x
, i
), XEXP (y
, i
)))
514 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
519 /* These are just backpointers, so they don't matter. */
526 /* It is believed that rtx's at this level will never
527 contain anything but integers and other rtx's,
528 except for within LABEL_REFs and SYMBOL_REFs. */
536 /* We need to pass down the mode of constants through the hash table
537 functions. For that purpose, wrap them in a CONST of the appropriate
540 wrap_constant (mode
, x
)
541 enum machine_mode mode
;
544 if (GET_CODE (x
) != CONST_INT
545 && (GET_CODE (x
) != CONST_DOUBLE
|| GET_MODE (x
) != VOIDmode
))
547 if (mode
== VOIDmode
)
549 return gen_rtx_CONST (mode
, x
);
552 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
553 For registers and memory locations, we look up their cselib_val structure
554 and return its VALUE element.
555 Possible reasons for return 0 are: the object is volatile, or we couldn't
556 find a register or memory location in the table and CREATE is zero. If
557 CREATE is nonzero, table elts are created for regs and mem.
558 MODE is used in hashing for CONST_INTs only;
559 otherwise the mode of X is used. */
562 hash_rtx (x
, mode
, create
)
564 enum machine_mode mode
;
571 unsigned int hash
= 0;
574 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
580 e
= cselib_lookup (x
, GET_MODE (x
), create
);
587 hash
+= ((unsigned) CONST_INT
<< 7) + (unsigned) mode
+ INTVAL (x
);
588 return hash
? hash
: (unsigned int) CONST_INT
;
591 /* This is like the general case, except that it only counts
592 the integers representing the constant. */
593 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
594 if (GET_MODE (x
) != VOIDmode
)
595 for (i
= 2; i
< GET_RTX_LENGTH (CONST_DOUBLE
); i
++)
596 hash
+= XWINT (x
, i
);
598 hash
+= ((unsigned) CONST_DOUBLE_LOW (x
)
599 + (unsigned) CONST_DOUBLE_HIGH (x
));
600 return hash
? hash
: (unsigned int) CONST_DOUBLE
;
607 units
= CONST_VECTOR_NUNITS (x
);
609 for (i
= 0; i
< units
; ++i
)
611 elt
= CONST_VECTOR_ELT (x
, i
);
612 hash
+= hash_rtx (elt
, GET_MODE (elt
), 0);
618 /* Assume there is only one rtx object for any given label. */
621 += ((unsigned) LABEL_REF
<< 7) + (unsigned long) XEXP (x
, 0);
622 return hash
? hash
: (unsigned int) LABEL_REF
;
626 += ((unsigned) SYMBOL_REF
<< 7) + (unsigned long) XSTR (x
, 0);
627 return hash
? hash
: (unsigned int) SYMBOL_REF
;
638 case UNSPEC_VOLATILE
:
642 if (MEM_VOLATILE_P (x
))
651 i
= GET_RTX_LENGTH (code
) - 1;
652 fmt
= GET_RTX_FORMAT (code
);
657 rtx tem
= XEXP (x
, i
);
658 unsigned int tem_hash
= hash_rtx (tem
, 0, create
);
665 else if (fmt
[i
] == 'E')
666 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
668 unsigned int tem_hash
= hash_rtx (XVECEXP (x
, i
, j
), 0, create
);
675 else if (fmt
[i
] == 's')
677 const unsigned char *p
= (const unsigned char *) XSTR (x
, i
);
683 else if (fmt
[i
] == 'i')
685 else if (fmt
[i
] == '0' || fmt
[i
] == 't')
691 return hash
? hash
: 1 + (unsigned int) GET_CODE (x
);
694 /* Create a new value structure for VALUE and initialize it. The mode of the
698 new_cselib_val (value
, mode
)
700 enum machine_mode mode
;
702 cselib_val
*e
= empty_vals
;
705 empty_vals
= e
->u
.next_free
;
707 e
= (cselib_val
*) obstack_alloc (&cselib_obstack
, sizeof (cselib_val
));
713 e
->u
.val_rtx
= gen_rtx_VALUE (mode
);
714 CSELIB_VAL_PTR (e
->u
.val_rtx
) = e
;
720 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
721 contains the data at this address. X is a MEM that represents the
722 value. Update the two value structures to represent this situation. */
725 add_mem_for_addr (addr_elt
, mem_elt
, x
)
726 cselib_val
*addr_elt
, *mem_elt
;
729 struct elt_loc_list
*l
;
731 /* Avoid duplicates. */
732 for (l
= mem_elt
->locs
; l
; l
= l
->next
)
733 if (GET_CODE (l
->loc
) == MEM
734 && CSELIB_VAL_PTR (XEXP (l
->loc
, 0)) == addr_elt
)
737 addr_elt
->addr_list
= new_elt_list (addr_elt
->addr_list
, mem_elt
);
739 = new_elt_loc_list (mem_elt
->locs
,
740 replace_equiv_address_nv (x
, addr_elt
->u
.val_rtx
));
743 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
744 If CREATE, make a new one if we haven't seen it before. */
747 cselib_lookup_mem (x
, create
)
751 enum machine_mode mode
= GET_MODE (x
);
757 if (MEM_VOLATILE_P (x
) || mode
== BLKmode
758 || (FLOAT_MODE_P (mode
) && flag_float_store
))
761 /* Look up the value for the address. */
762 addr
= cselib_lookup (XEXP (x
, 0), mode
, create
);
766 /* Find a value that describes a value of our mode at that address. */
767 for (l
= addr
->addr_list
; l
; l
= l
->next
)
768 if (GET_MODE (l
->elt
->u
.val_rtx
) == mode
)
774 mem_elt
= new_cselib_val (++next_unknown_value
, mode
);
775 add_mem_for_addr (addr
, mem_elt
, x
);
776 slot
= htab_find_slot_with_hash (hash_table
, wrap_constant (mode
, x
),
777 mem_elt
->value
, INSERT
);
782 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
783 with VALUE expressions. This way, it becomes independent of changes
784 to registers and memory.
785 X isn't actually modified; if modifications are needed, new rtl is
786 allocated. However, the return value can share rtl with X. */
789 cselib_subst_to_values (x
)
792 enum rtx_code code
= GET_CODE (x
);
793 const char *fmt
= GET_RTX_FORMAT (code
);
802 for (l
= REG_VALUES (REGNO (x
)); l
; l
= l
->next
)
803 if (GET_MODE (l
->elt
->u
.val_rtx
) == GET_MODE (x
))
804 return l
->elt
->u
.val_rtx
;
809 e
= cselib_lookup_mem (x
, 0);
812 /* This happens for autoincrements. Assign a value that doesn't
814 e
= new_cselib_val (++next_unknown_value
, GET_MODE (x
));
829 e
= new_cselib_val (++next_unknown_value
, GET_MODE (x
));
836 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
840 rtx t
= cselib_subst_to_values (XEXP (x
, i
));
842 if (t
!= XEXP (x
, i
) && x
== copy
)
843 copy
= shallow_copy_rtx (x
);
847 else if (fmt
[i
] == 'E')
851 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
853 rtx t
= cselib_subst_to_values (XVECEXP (x
, i
, j
));
855 if (t
!= XVECEXP (x
, i
, j
) && XVEC (x
, i
) == XVEC (copy
, i
))
858 copy
= shallow_copy_rtx (x
);
860 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (x
, i
));
861 for (k
= 0; k
< j
; k
++)
862 XVECEXP (copy
, i
, k
) = XVECEXP (x
, i
, k
);
865 XVECEXP (copy
, i
, j
) = t
;
873 /* Look up the rtl expression X in our tables and return the value it has.
874 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
875 we create a new one if possible, using mode MODE if X doesn't have a mode
876 (i.e. because it's a constant). */
879 cselib_lookup (x
, mode
, create
)
881 enum machine_mode mode
;
886 unsigned int hashval
;
888 if (GET_MODE (x
) != VOIDmode
)
891 if (GET_CODE (x
) == VALUE
)
892 return CSELIB_VAL_PTR (x
);
894 if (GET_CODE (x
) == REG
)
897 unsigned int i
= REGNO (x
);
899 for (l
= REG_VALUES (i
); l
; l
= l
->next
)
900 if (mode
== GET_MODE (l
->elt
->u
.val_rtx
))
906 if (i
< FIRST_PSEUDO_REGISTER
)
908 unsigned int n
= HARD_REGNO_NREGS (i
, mode
);
910 if (n
> max_value_regs
)
914 e
= new_cselib_val (++next_unknown_value
, GET_MODE (x
));
915 e
->locs
= new_elt_loc_list (e
->locs
, x
);
916 if (REG_VALUES (i
) == 0)
917 VARRAY_PUSH_UINT (used_regs
, i
);
918 REG_VALUES (i
) = new_elt_list (REG_VALUES (i
), e
);
919 slot
= htab_find_slot_with_hash (hash_table
, x
, e
->value
, INSERT
);
924 if (GET_CODE (x
) == MEM
)
925 return cselib_lookup_mem (x
, create
);
927 hashval
= hash_rtx (x
, mode
, create
);
928 /* Can't even create if hashing is not possible. */
932 slot
= htab_find_slot_with_hash (hash_table
, wrap_constant (mode
, x
),
933 hashval
, create
? INSERT
: NO_INSERT
);
937 e
= (cselib_val
*) *slot
;
941 e
= new_cselib_val (hashval
, mode
);
943 /* We have to fill the slot before calling cselib_subst_to_values:
944 the hash table is inconsistent until we do so, and
945 cselib_subst_to_values will need to do lookups. */
947 e
->locs
= new_elt_loc_list (e
->locs
, cselib_subst_to_values (x
));
951 /* Invalidate any entries in reg_values that overlap REGNO. This is called
952 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
953 is used to determine how many hard registers are being changed. If MODE
954 is VOIDmode, then only REGNO is being changed; this is used when
955 invalidating call clobbered registers across a call. */
958 cselib_invalidate_regno (regno
, mode
)
960 enum machine_mode mode
;
962 unsigned int endregno
;
965 /* If we see pseudos after reload, something is _wrong_. */
966 if (reload_completed
&& regno
>= FIRST_PSEUDO_REGISTER
967 && reg_renumber
[regno
] >= 0)
970 /* Determine the range of registers that must be invalidated. For
971 pseudos, only REGNO is affected. For hard regs, we must take MODE
972 into account, and we must also invalidate lower register numbers
973 if they contain values that overlap REGNO. */
974 if (regno
< FIRST_PSEUDO_REGISTER
&& mode
!= VOIDmode
)
976 if (regno
< max_value_regs
)
979 i
= regno
- max_value_regs
;
981 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
986 endregno
= regno
+ 1;
989 for (; i
< endregno
; i
++)
991 struct elt_list
**l
= ®_VALUES (i
);
993 /* Go through all known values for this reg; if it overlaps the range
994 we're invalidating, remove the value. */
997 cselib_val
*v
= (*l
)->elt
;
998 struct elt_loc_list
**p
;
999 unsigned int this_last
= i
;
1001 if (i
< FIRST_PSEUDO_REGISTER
)
1002 this_last
+= HARD_REGNO_NREGS (i
, GET_MODE (v
->u
.val_rtx
)) - 1;
1004 if (this_last
< regno
)
1010 /* We have an overlap. */
1011 unchain_one_elt_list (l
);
1013 /* Now, we clear the mapping from value to reg. It must exist, so
1014 this code will crash intentionally if it doesn't. */
1015 for (p
= &v
->locs
; ; p
= &(*p
)->next
)
1019 if (GET_CODE (x
) == REG
&& REGNO (x
) == i
)
1021 unchain_one_elt_loc_list (p
);
1031 /* The memory at address MEM_BASE is being changed.
1032 Return whether this change will invalidate VAL. */
1035 cselib_mem_conflict_p (mem_base
, val
)
1043 code
= GET_CODE (val
);
1046 /* Get rid of a few simple cases quickly. */
1060 if (GET_MODE (mem_base
) == BLKmode
1061 || GET_MODE (val
) == BLKmode
1062 || anti_dependence (val
, mem_base
))
1065 /* The address may contain nested MEMs. */
1072 fmt
= GET_RTX_FORMAT (code
);
1073 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1077 if (cselib_mem_conflict_p (mem_base
, XEXP (val
, i
)))
1080 else if (fmt
[i
] == 'E')
1081 for (j
= 0; j
< XVECLEN (val
, i
); j
++)
1082 if (cselib_mem_conflict_p (mem_base
, XVECEXP (val
, i
, j
)))
1089 /* For the value found in SLOT, walk its locations to determine if any overlap
1090 INFO (which is a MEM rtx). */
1093 cselib_invalidate_mem_1 (slot
, info
)
1097 cselib_val
*v
= (cselib_val
*) *slot
;
1098 rtx mem_rtx
= (rtx
) info
;
1099 struct elt_loc_list
**p
= &v
->locs
;
1100 int had_locs
= v
->locs
!= 0;
1106 struct elt_list
**mem_chain
;
1108 /* MEMs may occur in locations only at the top level; below
1109 that every MEM or REG is substituted by its VALUE. */
1110 if (GET_CODE (x
) != MEM
1111 || ! cselib_mem_conflict_p (mem_rtx
, x
))
1117 /* This one overlaps. */
1118 /* We must have a mapping from this MEM's address to the
1119 value (E). Remove that, too. */
1120 addr
= cselib_lookup (XEXP (x
, 0), VOIDmode
, 0);
1121 mem_chain
= &addr
->addr_list
;
1124 if ((*mem_chain
)->elt
== v
)
1126 unchain_one_elt_list (mem_chain
);
1130 mem_chain
= &(*mem_chain
)->next
;
1133 unchain_one_elt_loc_list (p
);
1136 if (had_locs
&& v
->locs
== 0)
1142 /* Invalidate any locations in the table which are changed because of a
1143 store to MEM_RTX. If this is called because of a non-const call
1144 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1147 cselib_invalidate_mem (mem_rtx
)
1150 htab_traverse (hash_table
, cselib_invalidate_mem_1
, mem_rtx
);
1153 /* Invalidate DEST, which is being assigned to or clobbered. The second and
1154 the third parameter exist so that this function can be passed to
1155 note_stores; they are ignored. */
1158 cselib_invalidate_rtx (dest
, ignore
, data
)
1160 rtx ignore ATTRIBUTE_UNUSED
;
1161 void *data ATTRIBUTE_UNUSED
;
1163 while (GET_CODE (dest
) == STRICT_LOW_PART
|| GET_CODE (dest
) == SIGN_EXTRACT
1164 || GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == SUBREG
)
1165 dest
= XEXP (dest
, 0);
1167 if (GET_CODE (dest
) == REG
)
1168 cselib_invalidate_regno (REGNO (dest
), GET_MODE (dest
));
1169 else if (GET_CODE (dest
) == MEM
)
1170 cselib_invalidate_mem (dest
);
1172 /* Some machines don't define AUTO_INC_DEC, but they still use push
1173 instructions. We need to catch that case here in order to
1174 invalidate the stack pointer correctly. Note that invalidating
1175 the stack pointer is different from invalidating DEST. */
1176 if (push_operand (dest
, GET_MODE (dest
)))
1177 cselib_invalidate_rtx (stack_pointer_rtx
, NULL_RTX
, NULL
);
1180 /* Record the result of a SET instruction. DEST is being set; the source
1181 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1182 describes its address. */
1185 cselib_record_set (dest
, src_elt
, dest_addr_elt
)
1187 cselib_val
*src_elt
, *dest_addr_elt
;
1189 int dreg
= GET_CODE (dest
) == REG
? (int) REGNO (dest
) : -1;
1191 if (src_elt
== 0 || side_effects_p (dest
))
1196 if (REG_VALUES (dreg
) == 0)
1197 VARRAY_PUSH_UINT (used_regs
, dreg
);
1199 if (dreg
< FIRST_PSEUDO_REGISTER
)
1201 unsigned int n
= HARD_REGNO_NREGS (dreg
, GET_MODE (dest
));
1203 if (n
> max_value_regs
)
1207 REG_VALUES (dreg
) = new_elt_list (REG_VALUES (dreg
), src_elt
);
1208 if (src_elt
->locs
== 0)
1210 src_elt
->locs
= new_elt_loc_list (src_elt
->locs
, dest
);
1212 else if (GET_CODE (dest
) == MEM
&& dest_addr_elt
!= 0)
1214 if (src_elt
->locs
== 0)
1216 add_mem_for_addr (dest_addr_elt
, src_elt
, dest
);
1220 /* Describe a single set that is part of an insn. */
1225 cselib_val
*src_elt
;
1226 cselib_val
*dest_addr_elt
;
1229 /* There is no good way to determine how many elements there can be
1230 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1231 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1233 /* Record the effects of any sets in INSN. */
1235 cselib_record_sets (insn
)
1240 struct set sets
[MAX_SETS
];
1241 rtx body
= PATTERN (insn
);
1244 body
= PATTERN (insn
);
1245 if (GET_CODE (body
) == COND_EXEC
)
1247 cond
= COND_EXEC_TEST (body
);
1248 body
= COND_EXEC_CODE (body
);
1251 /* Find all sets. */
1252 if (GET_CODE (body
) == SET
)
1254 sets
[0].src
= SET_SRC (body
);
1255 sets
[0].dest
= SET_DEST (body
);
1258 else if (GET_CODE (body
) == PARALLEL
)
1260 /* Look through the PARALLEL and record the values being
1261 set, if possible. Also handle any CLOBBERs. */
1262 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
1264 rtx x
= XVECEXP (body
, 0, i
);
1266 if (GET_CODE (x
) == SET
)
1268 sets
[n_sets
].src
= SET_SRC (x
);
1269 sets
[n_sets
].dest
= SET_DEST (x
);
1275 /* Look up the values that are read. Do this before invalidating the
1276 locations that are written. */
1277 for (i
= 0; i
< n_sets
; i
++)
1279 rtx dest
= sets
[i
].dest
;
1281 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1282 the low part after invalidating any knowledge about larger modes. */
1283 if (GET_CODE (sets
[i
].dest
) == STRICT_LOW_PART
)
1284 sets
[i
].dest
= dest
= XEXP (dest
, 0);
1286 /* We don't know how to record anything but REG or MEM. */
1287 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == MEM
)
1289 rtx src
= sets
[i
].src
;
1291 src
= gen_rtx_IF_THEN_ELSE (GET_MODE (src
), cond
, src
, dest
);
1292 sets
[i
].src_elt
= cselib_lookup (src
, GET_MODE (dest
), 1);
1293 if (GET_CODE (dest
) == MEM
)
1294 sets
[i
].dest_addr_elt
= cselib_lookup (XEXP (dest
, 0), Pmode
, 1);
1296 sets
[i
].dest_addr_elt
= 0;
1300 /* Invalidate all locations written by this insn. Note that the elts we
1301 looked up in the previous loop aren't affected, just some of their
1302 locations may go away. */
1303 note_stores (body
, cselib_invalidate_rtx
, NULL
);
1305 /* Now enter the equivalences in our tables. */
1306 for (i
= 0; i
< n_sets
; i
++)
1308 rtx dest
= sets
[i
].dest
;
1309 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == MEM
)
1310 cselib_record_set (dest
, sets
[i
].src_elt
, sets
[i
].dest_addr_elt
);
1314 /* Record the effects of INSN. */
1317 cselib_process_insn (insn
)
1323 cselib_current_insn
= insn
;
1325 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1326 if (GET_CODE (insn
) == CODE_LABEL
1327 || (GET_CODE (insn
) == CALL_INSN
1328 && find_reg_note (insn
, REG_SETJMP
, NULL
))
1329 || (GET_CODE (insn
) == INSN
1330 && GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
1331 && MEM_VOLATILE_P (PATTERN (insn
))))
1337 if (! INSN_P (insn
))
1339 cselib_current_insn
= 0;
1343 /* If this is a call instruction, forget anything stored in a
1344 call clobbered register, or, if this is not a const call, in
1346 if (GET_CODE (insn
) == CALL_INSN
)
1348 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1349 if (call_used_regs
[i
])
1350 cselib_invalidate_regno (i
, VOIDmode
);
1352 if (! CONST_OR_PURE_CALL_P (insn
))
1353 cselib_invalidate_mem (callmem
);
1356 cselib_record_sets (insn
);
1359 /* Clobber any registers which appear in REG_INC notes. We
1360 could keep track of the changes to their values, but it is
1361 unlikely to help. */
1362 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
1363 if (REG_NOTE_KIND (x
) == REG_INC
)
1364 cselib_invalidate_rtx (XEXP (x
, 0), NULL_RTX
, NULL
);
1367 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1368 after we have processed the insn. */
1369 if (GET_CODE (insn
) == CALL_INSN
)
1370 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
1371 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
1372 cselib_invalidate_rtx (XEXP (XEXP (x
, 0), 0), NULL_RTX
, NULL
);
1374 cselib_current_insn
= 0;
1376 if (n_useless_values
> MAX_USELESS_VALUES
)
1377 remove_useless_values ();
1380 /* Make sure our varrays are big enough. Not called from any cselib routines;
1381 it must be called by the user if it allocated new registers. */
1384 cselib_update_varray_sizes ()
1386 unsigned int nregs
= max_reg_num ();
1388 if (nregs
== cselib_nregs
)
1391 cselib_nregs
= nregs
;
1392 VARRAY_GROW (reg_values
, nregs
);
1393 VARRAY_GROW (used_regs
, nregs
);
1396 /* Initialize cselib for one pass. The caller must also call
1397 init_alias_analysis. */
1402 /* These are only created once. */
1405 gcc_obstack_init (&cselib_obstack
);
1406 cselib_startobj
= obstack_alloc (&cselib_obstack
, 0);
1408 callmem
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1409 ggc_add_rtx_root (&callmem
, 1);
1412 cselib_nregs
= max_reg_num ();
1413 VARRAY_ELT_LIST_INIT (reg_values
, cselib_nregs
, "reg_values");
1414 VARRAY_UINT_INIT (used_regs
, cselib_nregs
, "used_regs");
1415 hash_table
= htab_create (31, get_value_hash
, entry_and_rtx_equal_p
, NULL
);
1419 /* Called when the current user is done with cselib. */
1425 VARRAY_FREE (reg_values
);
1426 VARRAY_FREE (used_regs
);
1427 htab_delete (hash_table
);