1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
28 #include "hard-reg-set.h"
31 #include "insn-config.h"
41 static int entry_and_rtx_equal_p
PARAMS ((const void *, const void *));
42 static hashval_t get_value_hash
PARAMS ((const void *));
43 static struct elt_list
*new_elt_list
PARAMS ((struct elt_list
*,
45 static struct elt_loc_list
*new_elt_loc_list
PARAMS ((struct elt_loc_list
*,
47 static void unchain_one_value
PARAMS ((cselib_val
*));
48 static void unchain_one_elt_list
PARAMS ((struct elt_list
**));
49 static void unchain_one_elt_loc_list
PARAMS ((struct elt_loc_list
**));
50 static void clear_table
PARAMS ((int));
51 static int discard_useless_locs
PARAMS ((void **, void *));
52 static int discard_useless_values
PARAMS ((void **, void *));
53 static void remove_useless_values
PARAMS ((void));
54 static rtx wrap_constant
PARAMS ((enum machine_mode
, rtx
));
55 static unsigned int hash_rtx
PARAMS ((rtx
, enum machine_mode
, int));
56 static cselib_val
*new_cselib_val
PARAMS ((unsigned int,
58 static void add_mem_for_addr
PARAMS ((cselib_val
*, cselib_val
*,
60 static cselib_val
*cselib_lookup_mem
PARAMS ((rtx
, int));
61 static void cselib_invalidate_regno
PARAMS ((unsigned int,
63 static int cselib_mem_conflict_p
PARAMS ((rtx
, rtx
));
64 static int cselib_invalidate_mem_1
PARAMS ((void **, void *));
65 static void cselib_invalidate_mem
PARAMS ((rtx
));
66 static void cselib_invalidate_rtx
PARAMS ((rtx
, rtx
, void *));
67 static void cselib_record_set
PARAMS ((rtx
, cselib_val
*,
69 static void cselib_record_sets
PARAMS ((rtx
));
71 /* There are three ways in which cselib can look up an rtx:
72 - for a REG, the reg_values table (which is indexed by regno) is used
73 - for a MEM, we recursively look up its address and then follow the
74 addr_list of that value
75 - for everything else, we compute a hash value and go through the hash
76 table. Since different rtx's can still have the same hash value,
77 this involves walking the table entries for a given value and comparing
78 the locations of the entries with the rtx we are looking up. */
80 /* A table that enables us to look up elts by their value. */
81 static GTY((param_is (cselib_val
))) htab_t hash_table
;
83 /* This is a global so we don't have to pass this through every function.
84 It is used in new_elt_loc_list to set SETTING_INSN. */
85 static rtx cselib_current_insn
;
86 static bool cselib_current_insn_in_libcall
;
88 /* Every new unknown value gets a unique number. */
89 static unsigned int next_unknown_value
;
91 /* The number of registers we had when the varrays were last resized. */
92 static unsigned int cselib_nregs
;
94 /* Count values without known locations. Whenever this grows too big, we
95 remove these useless values from the table. */
96 static int n_useless_values
;
98 /* Number of useless values before we remove them from the hash table. */
99 #define MAX_USELESS_VALUES 32
101 /* This table maps from register number to values. It does not contain
102 pointers to cselib_val structures, but rather elt_lists. The purpose is
103 to be able to refer to the same register in different modes. */
104 static GTY(()) varray_type reg_values
;
105 static GTY((deletable (""))) varray_type reg_values_old
;
106 #define REG_VALUES(I) VARRAY_ELT_LIST (reg_values, (I))
108 /* The largest number of hard regs used by any entry added to the
109 REG_VALUES table. Cleared on each clear_table() invocation. */
110 static unsigned int max_value_regs
;
112 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
113 in clear_table() for fast emptying. */
114 static GTY(()) varray_type used_regs
;
115 static GTY((deletable (""))) varray_type used_regs_old
;
117 /* We pass this to cselib_invalidate_mem to invalidate all of
118 memory for a non-const call instruction. */
119 static GTY(()) rtx callmem
;
121 /* Caches for unused structures. */
122 static GTY((deletable (""))) cselib_val
*empty_vals
;
123 static GTY((deletable (""))) struct elt_list
*empty_elt_lists
;
124 static GTY((deletable (""))) struct elt_loc_list
*empty_elt_loc_lists
;
126 /* Set by discard_useless_locs if it deleted the last location of any
128 static int values_became_useless
;
131 /* Allocate a struct elt_list and fill in its two elements with the
134 static struct elt_list
*
135 new_elt_list (next
, elt
)
136 struct elt_list
*next
;
139 struct elt_list
*el
= empty_elt_lists
;
142 empty_elt_lists
= el
->next
;
144 el
= (struct elt_list
*) ggc_alloc (sizeof (struct elt_list
));
150 /* Allocate a struct elt_loc_list and fill in its two elements with the
153 static struct elt_loc_list
*
154 new_elt_loc_list (next
, loc
)
155 struct elt_loc_list
*next
;
158 struct elt_loc_list
*el
= empty_elt_loc_lists
;
161 empty_elt_loc_lists
= el
->next
;
163 el
= (struct elt_loc_list
*) ggc_alloc (sizeof (struct elt_loc_list
));
166 el
->setting_insn
= cselib_current_insn
;
167 el
->in_libcall
= cselib_current_insn_in_libcall
;
171 /* The elt_list at *PL is no longer needed. Unchain it and free its
175 unchain_one_elt_list (pl
)
176 struct elt_list
**pl
;
178 struct elt_list
*l
= *pl
;
181 l
->next
= empty_elt_lists
;
185 /* Likewise for elt_loc_lists. */
188 unchain_one_elt_loc_list (pl
)
189 struct elt_loc_list
**pl
;
191 struct elt_loc_list
*l
= *pl
;
194 l
->next
= empty_elt_loc_lists
;
195 empty_elt_loc_lists
= l
;
198 /* Likewise for cselib_vals. This also frees the addr_list associated with
202 unchain_one_value (v
)
206 unchain_one_elt_list (&v
->addr_list
);
208 v
->u
.next_free
= empty_vals
;
212 /* Remove all entries from the hash table. Also used during
213 initialization. If CLEAR_ALL isn't set, then only clear the entries
214 which are known to have been used. */
217 clear_table (clear_all
)
223 for (i
= 0; i
< cselib_nregs
; i
++)
226 for (i
= 0; i
< VARRAY_ACTIVE_SIZE (used_regs
); i
++)
227 REG_VALUES (VARRAY_UINT (used_regs
, i
)) = 0;
231 VARRAY_POP_ALL (used_regs
);
233 htab_empty (hash_table
);
235 n_useless_values
= 0;
237 next_unknown_value
= 0;
240 /* The equality test for our hash table. The first argument ENTRY is a table
241 element (i.e. a cselib_val), while the second arg X is an rtx. We know
242 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
243 CONST of an appropriate mode. */
246 entry_and_rtx_equal_p (entry
, x_arg
)
247 const void *entry
, *x_arg
;
249 struct elt_loc_list
*l
;
250 const cselib_val
*v
= (const cselib_val
*) entry
;
252 enum machine_mode mode
= GET_MODE (x
);
254 if (GET_CODE (x
) == CONST_INT
255 || (mode
== VOIDmode
&& GET_CODE (x
) == CONST_DOUBLE
))
257 if (mode
!= GET_MODE (v
->u
.val_rtx
))
260 /* Unwrap X if necessary. */
261 if (GET_CODE (x
) == CONST
262 && (GET_CODE (XEXP (x
, 0)) == CONST_INT
263 || GET_CODE (XEXP (x
, 0)) == CONST_DOUBLE
))
266 /* We don't guarantee that distinct rtx's have different hash values,
267 so we need to do a comparison. */
268 for (l
= v
->locs
; l
; l
= l
->next
)
269 if (rtx_equal_for_cselib_p (l
->loc
, x
))
275 /* The hash function for our hash table. The value is always computed with
276 hash_rtx when adding an element; this function just extracts the hash
277 value from a cselib_val structure. */
280 get_value_hash (entry
)
283 const cselib_val
*v
= (const cselib_val
*) entry
;
287 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
288 only return true for values which point to a cselib_val whose value
289 element has been set to zero, which implies the cselib_val will be
293 references_value_p (x
, only_useless
)
297 enum rtx_code code
= GET_CODE (x
);
298 const char *fmt
= GET_RTX_FORMAT (code
);
301 if (GET_CODE (x
) == VALUE
302 && (! only_useless
|| CSELIB_VAL_PTR (x
)->locs
== 0))
305 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
307 if (fmt
[i
] == 'e' && references_value_p (XEXP (x
, i
), only_useless
))
309 else if (fmt
[i
] == 'E')
310 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
311 if (references_value_p (XVECEXP (x
, i
, j
), only_useless
))
318 /* For all locations found in X, delete locations that reference useless
319 values (i.e. values without any location). Called through
323 discard_useless_locs (x
, info
)
325 void *info ATTRIBUTE_UNUSED
;
327 cselib_val
*v
= (cselib_val
*)*x
;
328 struct elt_loc_list
**p
= &v
->locs
;
329 int had_locs
= v
->locs
!= 0;
333 if (references_value_p ((*p
)->loc
, 1))
334 unchain_one_elt_loc_list (p
);
339 if (had_locs
&& v
->locs
== 0)
342 values_became_useless
= 1;
347 /* If X is a value with no locations, remove it from the hashtable. */
350 discard_useless_values (x
, info
)
352 void *info ATTRIBUTE_UNUSED
;
354 cselib_val
*v
= (cselib_val
*)*x
;
358 htab_clear_slot (hash_table
, x
);
359 unchain_one_value (v
);
366 /* Clean out useless values (i.e. those which no longer have locations
367 associated with them) from the hash table. */
370 remove_useless_values ()
372 /* First pass: eliminate locations that reference the value. That in
373 turn can make more values useless. */
376 values_became_useless
= 0;
377 htab_traverse (hash_table
, discard_useless_locs
, 0);
379 while (values_became_useless
);
381 /* Second pass: actually remove the values. */
382 htab_traverse (hash_table
, discard_useless_values
, 0);
384 if (n_useless_values
!= 0)
388 /* Return nonzero if we can prove that X and Y contain the same value, taking
389 our gathered information into account. */
392 rtx_equal_for_cselib_p (x
, y
)
399 if (GET_CODE (x
) == REG
|| GET_CODE (x
) == MEM
)
401 cselib_val
*e
= cselib_lookup (x
, GET_MODE (x
), 0);
407 if (GET_CODE (y
) == REG
|| GET_CODE (y
) == MEM
)
409 cselib_val
*e
= cselib_lookup (y
, GET_MODE (y
), 0);
418 if (GET_CODE (x
) == VALUE
&& GET_CODE (y
) == VALUE
)
419 return CSELIB_VAL_PTR (x
) == CSELIB_VAL_PTR (y
);
421 if (GET_CODE (x
) == VALUE
)
423 cselib_val
*e
= CSELIB_VAL_PTR (x
);
424 struct elt_loc_list
*l
;
426 for (l
= e
->locs
; l
; l
= l
->next
)
430 /* Avoid infinite recursion. */
431 if (GET_CODE (t
) == REG
|| GET_CODE (t
) == MEM
)
433 else if (rtx_equal_for_cselib_p (t
, y
))
440 if (GET_CODE (y
) == VALUE
)
442 cselib_val
*e
= CSELIB_VAL_PTR (y
);
443 struct elt_loc_list
*l
;
445 for (l
= e
->locs
; l
; l
= l
->next
)
449 if (GET_CODE (t
) == REG
|| GET_CODE (t
) == MEM
)
451 else if (rtx_equal_for_cselib_p (x
, t
))
458 if (GET_CODE (x
) != GET_CODE (y
) || GET_MODE (x
) != GET_MODE (y
))
461 /* This won't be handled correctly by the code below. */
462 if (GET_CODE (x
) == LABEL_REF
)
463 return XEXP (x
, 0) == XEXP (y
, 0);
466 fmt
= GET_RTX_FORMAT (code
);
468 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
475 if (XWINT (x
, i
) != XWINT (y
, i
))
481 if (XINT (x
, i
) != XINT (y
, i
))
487 /* Two vectors must have the same length. */
488 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
491 /* And the corresponding elements must match. */
492 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
493 if (! rtx_equal_for_cselib_p (XVECEXP (x
, i
, j
),
499 if (! rtx_equal_for_cselib_p (XEXP (x
, i
), XEXP (y
, i
)))
505 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
510 /* These are just backpointers, so they don't matter. */
517 /* It is believed that rtx's at this level will never
518 contain anything but integers and other rtx's,
519 except for within LABEL_REFs and SYMBOL_REFs. */
527 /* We need to pass down the mode of constants through the hash table
528 functions. For that purpose, wrap them in a CONST of the appropriate
531 wrap_constant (mode
, x
)
532 enum machine_mode mode
;
535 if (GET_CODE (x
) != CONST_INT
536 && (GET_CODE (x
) != CONST_DOUBLE
|| GET_MODE (x
) != VOIDmode
))
538 if (mode
== VOIDmode
)
540 return gen_rtx_CONST (mode
, x
);
543 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
544 For registers and memory locations, we look up their cselib_val structure
545 and return its VALUE element.
546 Possible reasons for return 0 are: the object is volatile, or we couldn't
547 find a register or memory location in the table and CREATE is zero. If
548 CREATE is nonzero, table elts are created for regs and mem.
549 MODE is used in hashing for CONST_INTs only;
550 otherwise the mode of X is used. */
553 hash_rtx (x
, mode
, create
)
555 enum machine_mode mode
;
562 unsigned int hash
= 0;
565 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
571 e
= cselib_lookup (x
, GET_MODE (x
), create
);
578 hash
+= ((unsigned) CONST_INT
<< 7) + (unsigned) mode
+ INTVAL (x
);
579 return hash
? hash
: (unsigned int) CONST_INT
;
582 /* This is like the general case, except that it only counts
583 the integers representing the constant. */
584 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
585 if (GET_MODE (x
) != VOIDmode
)
586 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
588 hash
+= ((unsigned) CONST_DOUBLE_LOW (x
)
589 + (unsigned) CONST_DOUBLE_HIGH (x
));
590 return hash
? hash
: (unsigned int) CONST_DOUBLE
;
597 units
= CONST_VECTOR_NUNITS (x
);
599 for (i
= 0; i
< units
; ++i
)
601 elt
= CONST_VECTOR_ELT (x
, i
);
602 hash
+= hash_rtx (elt
, GET_MODE (elt
), 0);
608 /* Assume there is only one rtx object for any given label. */
611 += ((unsigned) LABEL_REF
<< 7) + (unsigned long) XEXP (x
, 0);
612 return hash
? hash
: (unsigned int) LABEL_REF
;
616 += ((unsigned) SYMBOL_REF
<< 7) + (unsigned long) XSTR (x
, 0);
617 return hash
? hash
: (unsigned int) SYMBOL_REF
;
628 case UNSPEC_VOLATILE
:
632 if (MEM_VOLATILE_P (x
))
641 i
= GET_RTX_LENGTH (code
) - 1;
642 fmt
= GET_RTX_FORMAT (code
);
647 rtx tem
= XEXP (x
, i
);
648 unsigned int tem_hash
= hash_rtx (tem
, 0, create
);
655 else if (fmt
[i
] == 'E')
656 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
658 unsigned int tem_hash
= hash_rtx (XVECEXP (x
, i
, j
), 0, create
);
665 else if (fmt
[i
] == 's')
667 const unsigned char *p
= (const unsigned char *) XSTR (x
, i
);
673 else if (fmt
[i
] == 'i')
675 else if (fmt
[i
] == '0' || fmt
[i
] == 't')
681 return hash
? hash
: 1 + (unsigned int) GET_CODE (x
);
684 /* Create a new value structure for VALUE and initialize it. The mode of the
688 new_cselib_val (value
, mode
)
690 enum machine_mode mode
;
692 cselib_val
*e
= empty_vals
;
695 empty_vals
= e
->u
.next_free
;
697 e
= (cselib_val
*) ggc_alloc (sizeof (cselib_val
));
703 e
->u
.val_rtx
= gen_rtx_VALUE (mode
);
704 CSELIB_VAL_PTR (e
->u
.val_rtx
) = e
;
710 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
711 contains the data at this address. X is a MEM that represents the
712 value. Update the two value structures to represent this situation. */
715 add_mem_for_addr (addr_elt
, mem_elt
, x
)
716 cselib_val
*addr_elt
, *mem_elt
;
719 struct elt_loc_list
*l
;
721 /* Avoid duplicates. */
722 for (l
= mem_elt
->locs
; l
; l
= l
->next
)
723 if (GET_CODE (l
->loc
) == MEM
724 && CSELIB_VAL_PTR (XEXP (l
->loc
, 0)) == addr_elt
)
727 addr_elt
->addr_list
= new_elt_list (addr_elt
->addr_list
, mem_elt
);
729 = new_elt_loc_list (mem_elt
->locs
,
730 replace_equiv_address_nv (x
, addr_elt
->u
.val_rtx
));
733 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
734 If CREATE, make a new one if we haven't seen it before. */
737 cselib_lookup_mem (x
, create
)
741 enum machine_mode mode
= GET_MODE (x
);
747 if (MEM_VOLATILE_P (x
) || mode
== BLKmode
748 || (FLOAT_MODE_P (mode
) && flag_float_store
))
751 /* Look up the value for the address. */
752 addr
= cselib_lookup (XEXP (x
, 0), mode
, create
);
756 /* Find a value that describes a value of our mode at that address. */
757 for (l
= addr
->addr_list
; l
; l
= l
->next
)
758 if (GET_MODE (l
->elt
->u
.val_rtx
) == mode
)
764 mem_elt
= new_cselib_val (++next_unknown_value
, mode
);
765 add_mem_for_addr (addr
, mem_elt
, x
);
766 slot
= htab_find_slot_with_hash (hash_table
, wrap_constant (mode
, x
),
767 mem_elt
->value
, INSERT
);
772 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
773 with VALUE expressions. This way, it becomes independent of changes
774 to registers and memory.
775 X isn't actually modified; if modifications are needed, new rtl is
776 allocated. However, the return value can share rtl with X. */
779 cselib_subst_to_values (x
)
782 enum rtx_code code
= GET_CODE (x
);
783 const char *fmt
= GET_RTX_FORMAT (code
);
792 for (l
= REG_VALUES (REGNO (x
)); l
; l
= l
->next
)
793 if (GET_MODE (l
->elt
->u
.val_rtx
) == GET_MODE (x
))
794 return l
->elt
->u
.val_rtx
;
799 e
= cselib_lookup_mem (x
, 0);
802 /* This happens for autoincrements. Assign a value that doesn't
804 e
= new_cselib_val (++next_unknown_value
, GET_MODE (x
));
819 e
= new_cselib_val (++next_unknown_value
, GET_MODE (x
));
826 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
830 rtx t
= cselib_subst_to_values (XEXP (x
, i
));
832 if (t
!= XEXP (x
, i
) && x
== copy
)
833 copy
= shallow_copy_rtx (x
);
837 else if (fmt
[i
] == 'E')
841 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
843 rtx t
= cselib_subst_to_values (XVECEXP (x
, i
, j
));
845 if (t
!= XVECEXP (x
, i
, j
) && XVEC (x
, i
) == XVEC (copy
, i
))
848 copy
= shallow_copy_rtx (x
);
850 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (x
, i
));
851 for (k
= 0; k
< j
; k
++)
852 XVECEXP (copy
, i
, k
) = XVECEXP (x
, i
, k
);
855 XVECEXP (copy
, i
, j
) = t
;
863 /* Look up the rtl expression X in our tables and return the value it has.
864 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
865 we create a new one if possible, using mode MODE if X doesn't have a mode
866 (i.e. because it's a constant). */
869 cselib_lookup (x
, mode
, create
)
871 enum machine_mode mode
;
876 unsigned int hashval
;
878 if (GET_MODE (x
) != VOIDmode
)
881 if (GET_CODE (x
) == VALUE
)
882 return CSELIB_VAL_PTR (x
);
884 if (GET_CODE (x
) == REG
)
887 unsigned int i
= REGNO (x
);
889 for (l
= REG_VALUES (i
); l
; l
= l
->next
)
890 if (mode
== GET_MODE (l
->elt
->u
.val_rtx
))
896 if (i
< FIRST_PSEUDO_REGISTER
)
898 unsigned int n
= HARD_REGNO_NREGS (i
, mode
);
900 if (n
> max_value_regs
)
904 e
= new_cselib_val (++next_unknown_value
, GET_MODE (x
));
905 e
->locs
= new_elt_loc_list (e
->locs
, x
);
906 if (REG_VALUES (i
) == 0)
907 VARRAY_PUSH_UINT (used_regs
, i
);
908 REG_VALUES (i
) = new_elt_list (REG_VALUES (i
), e
);
909 slot
= htab_find_slot_with_hash (hash_table
, x
, e
->value
, INSERT
);
914 if (GET_CODE (x
) == MEM
)
915 return cselib_lookup_mem (x
, create
);
917 hashval
= hash_rtx (x
, mode
, create
);
918 /* Can't even create if hashing is not possible. */
922 slot
= htab_find_slot_with_hash (hash_table
, wrap_constant (mode
, x
),
923 hashval
, create
? INSERT
: NO_INSERT
);
927 e
= (cselib_val
*) *slot
;
931 e
= new_cselib_val (hashval
, mode
);
933 /* We have to fill the slot before calling cselib_subst_to_values:
934 the hash table is inconsistent until we do so, and
935 cselib_subst_to_values will need to do lookups. */
937 e
->locs
= new_elt_loc_list (e
->locs
, cselib_subst_to_values (x
));
941 /* Invalidate any entries in reg_values that overlap REGNO. This is called
942 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
943 is used to determine how many hard registers are being changed. If MODE
944 is VOIDmode, then only REGNO is being changed; this is used when
945 invalidating call clobbered registers across a call. */
948 cselib_invalidate_regno (regno
, mode
)
950 enum machine_mode mode
;
952 unsigned int endregno
;
955 /* If we see pseudos after reload, something is _wrong_. */
956 if (reload_completed
&& regno
>= FIRST_PSEUDO_REGISTER
957 && reg_renumber
[regno
] >= 0)
960 /* Determine the range of registers that must be invalidated. For
961 pseudos, only REGNO is affected. For hard regs, we must take MODE
962 into account, and we must also invalidate lower register numbers
963 if they contain values that overlap REGNO. */
964 if (regno
< FIRST_PSEUDO_REGISTER
)
966 if (mode
== VOIDmode
)
969 if (regno
< max_value_regs
)
972 i
= regno
- max_value_regs
;
974 endregno
= regno
+ HARD_REGNO_NREGS (regno
, mode
);
979 endregno
= regno
+ 1;
982 for (; i
< endregno
; i
++)
984 struct elt_list
**l
= ®_VALUES (i
);
986 /* Go through all known values for this reg; if it overlaps the range
987 we're invalidating, remove the value. */
990 cselib_val
*v
= (*l
)->elt
;
991 struct elt_loc_list
**p
;
992 unsigned int this_last
= i
;
994 if (i
< FIRST_PSEUDO_REGISTER
)
995 this_last
+= HARD_REGNO_NREGS (i
, GET_MODE (v
->u
.val_rtx
)) - 1;
997 if (this_last
< regno
)
1003 /* We have an overlap. */
1004 unchain_one_elt_list (l
);
1006 /* Now, we clear the mapping from value to reg. It must exist, so
1007 this code will crash intentionally if it doesn't. */
1008 for (p
= &v
->locs
; ; p
= &(*p
)->next
)
1012 if (GET_CODE (x
) == REG
&& REGNO (x
) == i
)
1014 unchain_one_elt_loc_list (p
);
1024 /* The memory at address MEM_BASE is being changed.
1025 Return whether this change will invalidate VAL. */
1028 cselib_mem_conflict_p (mem_base
, val
)
1036 code
= GET_CODE (val
);
1039 /* Get rid of a few simple cases quickly. */
1053 if (GET_MODE (mem_base
) == BLKmode
1054 || GET_MODE (val
) == BLKmode
1055 || anti_dependence (val
, mem_base
))
1058 /* The address may contain nested MEMs. */
1065 fmt
= GET_RTX_FORMAT (code
);
1066 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1070 if (cselib_mem_conflict_p (mem_base
, XEXP (val
, i
)))
1073 else if (fmt
[i
] == 'E')
1074 for (j
= 0; j
< XVECLEN (val
, i
); j
++)
1075 if (cselib_mem_conflict_p (mem_base
, XVECEXP (val
, i
, j
)))
1082 /* For the value found in SLOT, walk its locations to determine if any overlap
1083 INFO (which is a MEM rtx). */
1086 cselib_invalidate_mem_1 (slot
, info
)
1090 cselib_val
*v
= (cselib_val
*) *slot
;
1091 rtx mem_rtx
= (rtx
) info
;
1092 struct elt_loc_list
**p
= &v
->locs
;
1093 int had_locs
= v
->locs
!= 0;
1099 struct elt_list
**mem_chain
;
1101 /* MEMs may occur in locations only at the top level; below
1102 that every MEM or REG is substituted by its VALUE. */
1103 if (GET_CODE (x
) != MEM
1104 || ! cselib_mem_conflict_p (mem_rtx
, x
))
1110 /* This one overlaps. */
1111 /* We must have a mapping from this MEM's address to the
1112 value (E). Remove that, too. */
1113 addr
= cselib_lookup (XEXP (x
, 0), VOIDmode
, 0);
1114 mem_chain
= &addr
->addr_list
;
1117 if ((*mem_chain
)->elt
== v
)
1119 unchain_one_elt_list (mem_chain
);
1123 mem_chain
= &(*mem_chain
)->next
;
1126 unchain_one_elt_loc_list (p
);
1129 if (had_locs
&& v
->locs
== 0)
1135 /* Invalidate any locations in the table which are changed because of a
1136 store to MEM_RTX. If this is called because of a non-const call
1137 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1140 cselib_invalidate_mem (mem_rtx
)
1143 htab_traverse (hash_table
, cselib_invalidate_mem_1
, mem_rtx
);
1146 /* Invalidate DEST, which is being assigned to or clobbered. The second and
1147 the third parameter exist so that this function can be passed to
1148 note_stores; they are ignored. */
1151 cselib_invalidate_rtx (dest
, ignore
, data
)
1153 rtx ignore ATTRIBUTE_UNUSED
;
1154 void *data ATTRIBUTE_UNUSED
;
1156 while (GET_CODE (dest
) == STRICT_LOW_PART
|| GET_CODE (dest
) == SIGN_EXTRACT
1157 || GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == SUBREG
)
1158 dest
= XEXP (dest
, 0);
1160 if (GET_CODE (dest
) == REG
)
1161 cselib_invalidate_regno (REGNO (dest
), GET_MODE (dest
));
1162 else if (GET_CODE (dest
) == MEM
)
1163 cselib_invalidate_mem (dest
);
1165 /* Some machines don't define AUTO_INC_DEC, but they still use push
1166 instructions. We need to catch that case here in order to
1167 invalidate the stack pointer correctly. Note that invalidating
1168 the stack pointer is different from invalidating DEST. */
1169 if (push_operand (dest
, GET_MODE (dest
)))
1170 cselib_invalidate_rtx (stack_pointer_rtx
, NULL_RTX
, NULL
);
1173 /* Record the result of a SET instruction. DEST is being set; the source
1174 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1175 describes its address. */
1178 cselib_record_set (dest
, src_elt
, dest_addr_elt
)
1180 cselib_val
*src_elt
, *dest_addr_elt
;
1182 int dreg
= GET_CODE (dest
) == REG
? (int) REGNO (dest
) : -1;
1184 if (src_elt
== 0 || side_effects_p (dest
))
1189 if (REG_VALUES (dreg
) == 0)
1190 VARRAY_PUSH_UINT (used_regs
, dreg
);
1192 if (dreg
< FIRST_PSEUDO_REGISTER
)
1194 unsigned int n
= HARD_REGNO_NREGS (dreg
, GET_MODE (dest
));
1196 if (n
> max_value_regs
)
1200 REG_VALUES (dreg
) = new_elt_list (REG_VALUES (dreg
), src_elt
);
1201 if (src_elt
->locs
== 0)
1203 src_elt
->locs
= new_elt_loc_list (src_elt
->locs
, dest
);
1205 else if (GET_CODE (dest
) == MEM
&& dest_addr_elt
!= 0)
1207 if (src_elt
->locs
== 0)
1209 add_mem_for_addr (dest_addr_elt
, src_elt
, dest
);
1213 /* Describe a single set that is part of an insn. */
1218 cselib_val
*src_elt
;
1219 cselib_val
*dest_addr_elt
;
1222 /* There is no good way to determine how many elements there can be
1223 in a PARALLEL. Since it's fairly cheap, use a really large number. */
1224 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
1226 /* Record the effects of any sets in INSN. */
1228 cselib_record_sets (insn
)
1233 struct set sets
[MAX_SETS
];
1234 rtx body
= PATTERN (insn
);
1237 body
= PATTERN (insn
);
1238 if (GET_CODE (body
) == COND_EXEC
)
1240 cond
= COND_EXEC_TEST (body
);
1241 body
= COND_EXEC_CODE (body
);
1244 /* Find all sets. */
1245 if (GET_CODE (body
) == SET
)
1247 sets
[0].src
= SET_SRC (body
);
1248 sets
[0].dest
= SET_DEST (body
);
1251 else if (GET_CODE (body
) == PARALLEL
)
1253 /* Look through the PARALLEL and record the values being
1254 set, if possible. Also handle any CLOBBERs. */
1255 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
1257 rtx x
= XVECEXP (body
, 0, i
);
1259 if (GET_CODE (x
) == SET
)
1261 sets
[n_sets
].src
= SET_SRC (x
);
1262 sets
[n_sets
].dest
= SET_DEST (x
);
1268 /* Look up the values that are read. Do this before invalidating the
1269 locations that are written. */
1270 for (i
= 0; i
< n_sets
; i
++)
1272 rtx dest
= sets
[i
].dest
;
1274 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
1275 the low part after invalidating any knowledge about larger modes. */
1276 if (GET_CODE (sets
[i
].dest
) == STRICT_LOW_PART
)
1277 sets
[i
].dest
= dest
= XEXP (dest
, 0);
1279 /* We don't know how to record anything but REG or MEM. */
1280 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == MEM
)
1282 rtx src
= sets
[i
].src
;
1284 src
= gen_rtx_IF_THEN_ELSE (GET_MODE (src
), cond
, src
, dest
);
1285 sets
[i
].src_elt
= cselib_lookup (src
, GET_MODE (dest
), 1);
1286 if (GET_CODE (dest
) == MEM
)
1287 sets
[i
].dest_addr_elt
= cselib_lookup (XEXP (dest
, 0), Pmode
, 1);
1289 sets
[i
].dest_addr_elt
= 0;
1293 /* Invalidate all locations written by this insn. Note that the elts we
1294 looked up in the previous loop aren't affected, just some of their
1295 locations may go away. */
1296 note_stores (body
, cselib_invalidate_rtx
, NULL
);
1298 /* Now enter the equivalences in our tables. */
1299 for (i
= 0; i
< n_sets
; i
++)
1301 rtx dest
= sets
[i
].dest
;
1302 if (GET_CODE (dest
) == REG
|| GET_CODE (dest
) == MEM
)
1303 cselib_record_set (dest
, sets
[i
].src_elt
, sets
[i
].dest_addr_elt
);
1307 /* Record the effects of INSN. */
1310 cselib_process_insn (insn
)
1316 if (find_reg_note (insn
, REG_LIBCALL
, NULL
))
1317 cselib_current_insn_in_libcall
= true;
1318 if (find_reg_note (insn
, REG_RETVAL
, NULL
))
1319 cselib_current_insn_in_libcall
= false;
1320 cselib_current_insn
= insn
;
1322 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
1323 if (GET_CODE (insn
) == CODE_LABEL
1324 || (GET_CODE (insn
) == CALL_INSN
1325 && find_reg_note (insn
, REG_SETJMP
, NULL
))
1326 || (GET_CODE (insn
) == INSN
1327 && GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
1328 && MEM_VOLATILE_P (PATTERN (insn
))))
1334 if (! INSN_P (insn
))
1336 cselib_current_insn
= 0;
1340 /* If this is a call instruction, forget anything stored in a
1341 call clobbered register, or, if this is not a const call, in
1343 if (GET_CODE (insn
) == CALL_INSN
)
1345 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1346 if (call_used_regs
[i
])
1347 cselib_invalidate_regno (i
, reg_raw_mode
[i
]);
1349 if (! CONST_OR_PURE_CALL_P (insn
))
1350 cselib_invalidate_mem (callmem
);
1353 cselib_record_sets (insn
);
1356 /* Clobber any registers which appear in REG_INC notes. We
1357 could keep track of the changes to their values, but it is
1358 unlikely to help. */
1359 for (x
= REG_NOTES (insn
); x
; x
= XEXP (x
, 1))
1360 if (REG_NOTE_KIND (x
) == REG_INC
)
1361 cselib_invalidate_rtx (XEXP (x
, 0), NULL_RTX
, NULL
);
1364 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
1365 after we have processed the insn. */
1366 if (GET_CODE (insn
) == CALL_INSN
)
1367 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
1368 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
1369 cselib_invalidate_rtx (XEXP (XEXP (x
, 0), 0), NULL_RTX
, NULL
);
1371 cselib_current_insn
= 0;
1373 if (n_useless_values
> MAX_USELESS_VALUES
)
1374 remove_useless_values ();
1377 /* Make sure our varrays are big enough. Not called from any cselib routines;
1378 it must be called by the user if it allocated new registers. */
1381 cselib_update_varray_sizes ()
1383 unsigned int nregs
= max_reg_num ();
1385 if (nregs
== cselib_nregs
)
1388 cselib_nregs
= nregs
;
1389 VARRAY_GROW (reg_values
, nregs
);
1390 VARRAY_GROW (used_regs
, nregs
);
1393 /* Initialize cselib for one pass. The caller must also call
1394 init_alias_analysis. */
1399 /* This is only created once. */
1401 callmem
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1403 cselib_nregs
= max_reg_num ();
1404 if (reg_values_old
!= NULL
&& VARRAY_SIZE (reg_values_old
) >= cselib_nregs
)
1406 reg_values
= reg_values_old
;
1407 used_regs
= used_regs_old
;
1408 VARRAY_CLEAR (reg_values
);
1409 VARRAY_CLEAR (used_regs
);
1413 VARRAY_ELT_LIST_INIT (reg_values
, cselib_nregs
, "reg_values");
1414 VARRAY_UINT_INIT (used_regs
, cselib_nregs
, "used_regs");
1416 hash_table
= htab_create_ggc (31, get_value_hash
, entry_and_rtx_equal_p
,
1419 cselib_current_insn_in_libcall
= false;
1422 /* Called when the current user is done with cselib. */
1427 reg_values_old
= reg_values
;
1429 used_regs_old
= used_regs
;
1432 n_useless_values
= 0;
1433 next_unknown_value
= 0;
1436 #include "gt-cselib.h"