1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
34 #include "function-abi.h"
37 /* A list of cselib_val structures. */
40 struct elt_list
*next
;
44 static bool cselib_record_memory
;
45 static bool cselib_preserve_constants
;
46 static bool cselib_any_perm_equivs
;
47 static inline void promote_debug_loc (struct elt_loc_list
*l
);
48 static struct elt_list
*new_elt_list (struct elt_list
*, cselib_val
*);
49 static void new_elt_loc_list (cselib_val
*, rtx
);
50 static void unchain_one_value (cselib_val
*);
51 static void unchain_one_elt_list (struct elt_list
**);
52 static void unchain_one_elt_loc_list (struct elt_loc_list
**);
53 static void remove_useless_values (void);
54 static unsigned int cselib_hash_rtx (rtx
, int, machine_mode
);
55 static cselib_val
*new_cselib_val (unsigned int, machine_mode
, rtx
);
56 static void add_mem_for_addr (cselib_val
*, cselib_val
*, rtx
);
57 static cselib_val
*cselib_lookup_mem (rtx
, int);
58 static void cselib_invalidate_regno (unsigned int, machine_mode
);
59 static void cselib_invalidate_mem (rtx
);
60 static void cselib_record_set (rtx
, cselib_val
*, cselib_val
*);
61 static void cselib_record_sets (rtx_insn
*);
62 static rtx
autoinc_split (rtx
, rtx
*, machine_mode
);
64 #define PRESERVED_VALUE_P(RTX) \
65 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
67 #define SP_BASED_VALUE_P(RTX) \
68 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
70 #define SP_DERIVED_VALUE_P(RTX) \
71 (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
73 struct expand_value_data
76 cselib_expand_callback callback
;
81 static rtx
cselib_expand_value_rtx_1 (rtx
, struct expand_value_data
*, int);
83 /* There are three ways in which cselib can look up an rtx:
84 - for a REG, the reg_values table (which is indexed by regno) is used
85 - for a MEM, we recursively look up its address and then follow the
86 addr_list of that value
87 - for everything else, we compute a hash value and go through the hash
88 table. Since different rtx's can still have the same hash value,
89 this involves walking the table entries for a given value and comparing
90 the locations of the entries with the rtx we are looking up. */
92 struct cselib_hasher
: nofree_ptr_hash
<cselib_val
>
95 /* The rtx value and its mode (needed separately for constant
99 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
100 machine_mode memmode
;
102 typedef key
*compare_type
;
103 static inline hashval_t
hash (const cselib_val
*);
104 static inline bool equal (const cselib_val
*, const key
*);
107 /* The hash function for our hash table. The value is always computed with
108 cselib_hash_rtx when adding an element; this function just extracts the
109 hash value from a cselib_val structure. */
112 cselib_hasher::hash (const cselib_val
*v
)
117 /* The equality test for our hash table. The first argument V is a table
118 element (i.e. a cselib_val), while the second arg X is an rtx. We know
119 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
120 CONST of an appropriate mode. */
123 cselib_hasher::equal (const cselib_val
*v
, const key
*x_arg
)
125 struct elt_loc_list
*l
;
127 machine_mode mode
= x_arg
->mode
;
128 machine_mode memmode
= x_arg
->memmode
;
130 if (mode
!= GET_MODE (v
->val_rtx
))
133 if (GET_CODE (x
) == VALUE
)
134 return x
== v
->val_rtx
;
136 if (SP_DERIVED_VALUE_P (v
->val_rtx
) && GET_MODE (x
) == Pmode
)
139 if (autoinc_split (x
, &xoff
, memmode
) == v
->val_rtx
&& xoff
== NULL_RTX
)
143 /* We don't guarantee that distinct rtx's have different hash values,
144 so we need to do a comparison. */
145 for (l
= v
->locs
; l
; l
= l
->next
)
146 if (rtx_equal_for_cselib_1 (l
->loc
, x
, memmode
, 0))
148 promote_debug_loc (l
);
155 /* A table that enables us to look up elts by their value. */
156 static hash_table
<cselib_hasher
> *cselib_hash_table
;
158 /* A table to hold preserved values. */
159 static hash_table
<cselib_hasher
> *cselib_preserved_hash_table
;
161 /* This is a global so we don't have to pass this through every function.
162 It is used in new_elt_loc_list to set SETTING_INSN. */
163 static rtx_insn
*cselib_current_insn
;
165 /* The unique id that the next create value will take. */
166 static unsigned int next_uid
;
168 /* The number of registers we had when the varrays were last resized. */
169 static unsigned int cselib_nregs
;
171 /* Count values without known locations, or with only locations that
172 wouldn't have been known except for debug insns. Whenever this
173 grows too big, we remove these useless values from the table.
175 Counting values with only debug values is a bit tricky. We don't
176 want to increment n_useless_values when we create a value for a
177 debug insn, for this would get n_useless_values out of sync, but we
178 want increment it if all locs in the list that were ever referenced
179 in nondebug insns are removed from the list.
181 In the general case, once we do that, we'd have to stop accepting
182 nondebug expressions in the loc list, to avoid having two values
183 equivalent that, without debug insns, would have been made into
184 separate values. However, because debug insns never introduce
185 equivalences themselves (no assignments), the only means for
186 growing loc lists is through nondebug assignments. If the locs
187 also happen to be referenced in debug insns, it will work just fine.
189 A consequence of this is that there's at most one debug-only loc in
190 each loc list. If we keep it in the first entry, testing whether
191 we have a debug-only loc list takes O(1).
193 Furthermore, since any additional entry in a loc list containing a
194 debug loc would have to come from an assignment (nondebug) that
195 references both the initial debug loc and the newly-equivalent loc,
196 the initial debug loc would be promoted to a nondebug loc, and the
197 loc list would not contain debug locs any more.
199 So the only case we have to be careful with in order to keep
200 n_useless_values in sync between debug and nondebug compilations is
201 to avoid incrementing n_useless_values when removing the single loc
202 from a value that turns out to not appear outside debug values. We
203 increment n_useless_debug_values instead, and leave such values
204 alone until, for other reasons, we garbage-collect useless
206 static int n_useless_values
;
207 static int n_useless_debug_values
;
209 /* Count values whose locs have been taken exclusively from debug
210 insns for the entire life of the value. */
211 static int n_debug_values
;
213 /* Number of useless values before we remove them from the hash table. */
214 #define MAX_USELESS_VALUES 32
216 /* This table maps from register number to values. It does not
217 contain pointers to cselib_val structures, but rather elt_lists.
218 The purpose is to be able to refer to the same register in
219 different modes. The first element of the list defines the mode in
220 which the register was set; if the mode is unknown or the value is
221 no longer valid in that mode, ELT will be NULL for the first
223 static struct elt_list
**reg_values
;
224 static unsigned int reg_values_size
;
225 #define REG_VALUES(i) reg_values[i]
227 /* The largest number of hard regs used by any entry added to the
228 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
229 static unsigned int max_value_regs
;
231 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
232 in cselib_clear_table() for fast emptying. */
233 static unsigned int *used_regs
;
234 static unsigned int n_used_regs
;
236 /* We pass this to cselib_invalidate_mem to invalidate all of
237 memory for a non-const call instruction. */
238 static GTY(()) rtx callmem
;
240 /* Set by discard_useless_locs if it deleted the last location of any
242 static int values_became_useless
;
244 /* Used as stop element of the containing_mem list so we can check
245 presence in the list by checking the next pointer. */
246 static cselib_val dummy_val
;
248 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
249 that is constant through the whole function and should never be
251 static cselib_val
*cfa_base_preserved_val
;
252 static unsigned int cfa_base_preserved_regno
= INVALID_REGNUM
;
254 /* Used to list all values that contain memory reference.
255 May or may not contain the useless values - the list is compacted
256 each time memory is invalidated. */
257 static cselib_val
*first_containing_mem
= &dummy_val
;
259 static object_allocator
<elt_list
> elt_list_pool ("elt_list");
260 static object_allocator
<elt_loc_list
> elt_loc_list_pool ("elt_loc_list");
261 static object_allocator
<cselib_val
> cselib_val_pool ("cselib_val_list");
263 static pool_allocator
value_pool ("value", RTX_CODE_SIZE (VALUE
));
265 /* If nonnull, cselib will call this function before freeing useless
266 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
267 void (*cselib_discard_hook
) (cselib_val
*);
269 /* If nonnull, cselib will call this function before recording sets or
270 even clobbering outputs of INSN. All the recorded sets will be
271 represented in the array sets[n_sets]. new_val_min can be used to
272 tell whether values present in sets are introduced by this
274 void (*cselib_record_sets_hook
) (rtx_insn
*insn
, struct cselib_set
*sets
,
279 /* Allocate a struct elt_list and fill in its two elements with the
282 static inline struct elt_list
*
283 new_elt_list (struct elt_list
*next
, cselib_val
*elt
)
285 elt_list
*el
= elt_list_pool
.allocate ();
291 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
295 new_elt_loc_list (cselib_val
*val
, rtx loc
)
297 struct elt_loc_list
*el
, *next
= val
->locs
;
299 gcc_checking_assert (!next
|| !next
->setting_insn
300 || !DEBUG_INSN_P (next
->setting_insn
)
301 || cselib_current_insn
== next
->setting_insn
);
303 /* If we're creating the first loc in a debug insn context, we've
304 just created a debug value. Count it. */
305 if (!next
&& cselib_current_insn
&& DEBUG_INSN_P (cselib_current_insn
))
308 val
= canonical_cselib_val (val
);
311 if (GET_CODE (loc
) == VALUE
)
313 loc
= canonical_cselib_val (CSELIB_VAL_PTR (loc
))->val_rtx
;
315 gcc_checking_assert (PRESERVED_VALUE_P (loc
)
316 == PRESERVED_VALUE_P (val
->val_rtx
));
318 if (val
->val_rtx
== loc
)
320 else if (val
->uid
> CSELIB_VAL_PTR (loc
)->uid
)
322 /* Reverse the insertion. */
323 new_elt_loc_list (CSELIB_VAL_PTR (loc
), val
->val_rtx
);
327 gcc_checking_assert (val
->uid
< CSELIB_VAL_PTR (loc
)->uid
);
329 if (CSELIB_VAL_PTR (loc
)->locs
)
331 /* Bring all locs from LOC to VAL. */
332 for (el
= CSELIB_VAL_PTR (loc
)->locs
; el
->next
; el
= el
->next
)
334 /* Adjust values that have LOC as canonical so that VAL
335 becomes their canonical. */
336 if (el
->loc
&& GET_CODE (el
->loc
) == VALUE
)
338 gcc_checking_assert (CSELIB_VAL_PTR (el
->loc
)->locs
->loc
340 CSELIB_VAL_PTR (el
->loc
)->locs
->loc
= val
->val_rtx
;
343 el
->next
= val
->locs
;
344 next
= val
->locs
= CSELIB_VAL_PTR (loc
)->locs
;
347 if (CSELIB_VAL_PTR (loc
)->addr_list
)
349 /* Bring in addr_list into canonical node. */
350 struct elt_list
*last
= CSELIB_VAL_PTR (loc
)->addr_list
;
353 last
->next
= val
->addr_list
;
354 val
->addr_list
= CSELIB_VAL_PTR (loc
)->addr_list
;
355 CSELIB_VAL_PTR (loc
)->addr_list
= NULL
;
358 if (CSELIB_VAL_PTR (loc
)->next_containing_mem
!= NULL
359 && val
->next_containing_mem
== NULL
)
361 /* Add VAL to the containing_mem list after LOC. LOC will
362 be removed when we notice it doesn't contain any
364 val
->next_containing_mem
= CSELIB_VAL_PTR (loc
)->next_containing_mem
;
365 CSELIB_VAL_PTR (loc
)->next_containing_mem
= val
;
368 /* Chain LOC back to VAL. */
369 el
= elt_loc_list_pool
.allocate ();
370 el
->loc
= val
->val_rtx
;
371 el
->setting_insn
= cselib_current_insn
;
373 CSELIB_VAL_PTR (loc
)->locs
= el
;
376 el
= elt_loc_list_pool
.allocate ();
378 el
->setting_insn
= cselib_current_insn
;
383 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
384 originating from a debug insn, maintaining the debug values
388 promote_debug_loc (struct elt_loc_list
*l
)
390 if (l
&& l
->setting_insn
&& DEBUG_INSN_P (l
->setting_insn
)
391 && (!cselib_current_insn
|| !DEBUG_INSN_P (cselib_current_insn
)))
394 l
->setting_insn
= cselib_current_insn
;
395 if (cselib_preserve_constants
&& l
->next
)
397 gcc_assert (l
->next
->setting_insn
398 && DEBUG_INSN_P (l
->next
->setting_insn
)
400 l
->next
->setting_insn
= cselib_current_insn
;
403 gcc_assert (!l
->next
);
407 /* The elt_list at *PL is no longer needed. Unchain it and free its
411 unchain_one_elt_list (struct elt_list
**pl
)
413 struct elt_list
*l
= *pl
;
416 elt_list_pool
.remove (l
);
419 /* Likewise for elt_loc_lists. */
422 unchain_one_elt_loc_list (struct elt_loc_list
**pl
)
424 struct elt_loc_list
*l
= *pl
;
427 elt_loc_list_pool
.remove (l
);
430 /* Likewise for cselib_vals. This also frees the addr_list associated with
434 unchain_one_value (cselib_val
*v
)
437 unchain_one_elt_list (&v
->addr_list
);
439 cselib_val_pool
.remove (v
);
442 /* Remove all entries from the hash table. Also used during
446 cselib_clear_table (void)
448 cselib_reset_table (1);
451 /* Return TRUE if V is a constant, a function invariant or a VALUE
452 equivalence; FALSE otherwise. */
455 invariant_or_equiv_p (cselib_val
*v
)
457 struct elt_loc_list
*l
;
459 if (v
== cfa_base_preserved_val
)
462 /* Keep VALUE equivalences around. */
463 for (l
= v
->locs
; l
; l
= l
->next
)
464 if (GET_CODE (l
->loc
) == VALUE
)
468 && v
->locs
->next
== NULL
)
470 if (CONSTANT_P (v
->locs
->loc
)
471 && (GET_CODE (v
->locs
->loc
) != CONST
472 || !references_value_p (v
->locs
->loc
, 0)))
474 /* Although a debug expr may be bound to different expressions,
475 we can preserve it as if it was constant, to get unification
476 and proper merging within var-tracking. */
477 if (GET_CODE (v
->locs
->loc
) == DEBUG_EXPR
478 || GET_CODE (v
->locs
->loc
) == DEBUG_IMPLICIT_PTR
479 || GET_CODE (v
->locs
->loc
) == ENTRY_VALUE
480 || GET_CODE (v
->locs
->loc
) == DEBUG_PARAMETER_REF
)
483 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
484 if (GET_CODE (v
->locs
->loc
) == PLUS
485 && CONST_INT_P (XEXP (v
->locs
->loc
, 1))
486 && GET_CODE (XEXP (v
->locs
->loc
, 0)) == VALUE
487 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v
->locs
->loc
, 0))))
494 /* Remove from hash table all VALUEs except constants, function
495 invariants and VALUE equivalences. */
498 preserve_constants_and_equivs (cselib_val
**x
, void *info ATTRIBUTE_UNUSED
)
502 if (invariant_or_equiv_p (v
))
504 cselib_hasher::key lookup
= {
505 GET_MODE (v
->val_rtx
), v
->val_rtx
, VOIDmode
508 = cselib_preserved_hash_table
->find_slot_with_hash (&lookup
,
514 cselib_hash_table
->clear_slot (x
);
519 /* Remove all entries from the hash table, arranging for the next
520 value to be numbered NUM. */
523 cselib_reset_table (unsigned int num
)
529 if (cfa_base_preserved_val
)
531 unsigned int regno
= cfa_base_preserved_regno
;
532 unsigned int new_used_regs
= 0;
533 for (i
= 0; i
< n_used_regs
; i
++)
534 if (used_regs
[i
] == regno
)
540 REG_VALUES (used_regs
[i
]) = 0;
541 gcc_assert (new_used_regs
== 1);
542 n_used_regs
= new_used_regs
;
543 used_regs
[0] = regno
;
545 = hard_regno_nregs (regno
,
546 GET_MODE (cfa_base_preserved_val
->locs
->loc
));
548 /* If cfa_base is sp + const_int, need to preserve also the
549 SP_DERIVED_VALUE_P value. */
550 for (struct elt_loc_list
*l
= cfa_base_preserved_val
->locs
;
552 if (GET_CODE (l
->loc
) == PLUS
553 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
554 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
555 && CONST_INT_P (XEXP (l
->loc
, 1)))
557 if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l
->loc
, 0))))
559 rtx val
= cfa_base_preserved_val
->val_rtx
;
560 rtx_insn
*save_cselib_current_insn
= cselib_current_insn
;
561 cselib_current_insn
= l
->setting_insn
;
562 new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l
->loc
, 0)),
563 plus_constant (Pmode
, val
,
564 -UINTVAL (XEXP (l
->loc
, 1))));
565 cselib_current_insn
= save_cselib_current_insn
;
572 for (i
= 0; i
< n_used_regs
; i
++)
573 REG_VALUES (used_regs
[i
]) = 0;
577 if (cselib_preserve_constants
)
578 cselib_hash_table
->traverse
<void *, preserve_constants_and_equivs
> (NULL
);
581 cselib_hash_table
->empty ();
582 gcc_checking_assert (!cselib_any_perm_equivs
);
585 n_useless_values
= 0;
586 n_useless_debug_values
= 0;
591 first_containing_mem
= &dummy_val
;
594 /* Return the number of the next value that will be generated. */
597 cselib_get_next_uid (void)
602 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
603 INSERTing if requested. When X is part of the address of a MEM,
604 MEMMODE should specify the mode of the MEM. */
607 cselib_find_slot (machine_mode mode
, rtx x
, hashval_t hash
,
608 enum insert_option insert
, machine_mode memmode
)
610 cselib_val
**slot
= NULL
;
611 cselib_hasher::key lookup
= { mode
, x
, memmode
};
612 if (cselib_preserve_constants
)
613 slot
= cselib_preserved_hash_table
->find_slot_with_hash (&lookup
, hash
,
616 slot
= cselib_hash_table
->find_slot_with_hash (&lookup
, hash
, insert
);
620 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
621 only return true for values which point to a cselib_val whose value
622 element has been set to zero, which implies the cselib_val will be
626 references_value_p (const_rtx x
, int only_useless
)
628 const enum rtx_code code
= GET_CODE (x
);
629 const char *fmt
= GET_RTX_FORMAT (code
);
632 if (GET_CODE (x
) == VALUE
634 || (CSELIB_VAL_PTR (x
)->locs
== 0 && !PRESERVED_VALUE_P (x
))))
637 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
639 if (fmt
[i
] == 'e' && references_value_p (XEXP (x
, i
), only_useless
))
641 else if (fmt
[i
] == 'E')
642 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
643 if (references_value_p (XVECEXP (x
, i
, j
), only_useless
))
650 /* Return true if V is a useless VALUE and can be discarded as such. */
653 cselib_useless_value_p (cselib_val
*v
)
656 && !PRESERVED_VALUE_P (v
->val_rtx
)
657 && !SP_DERIVED_VALUE_P (v
->val_rtx
));
660 /* For all locations found in X, delete locations that reference useless
661 values (i.e. values without any location). Called through
665 discard_useless_locs (cselib_val
**x
, void *info ATTRIBUTE_UNUSED
)
668 struct elt_loc_list
**p
= &v
->locs
;
669 bool had_locs
= v
->locs
!= NULL
;
670 rtx_insn
*setting_insn
= v
->locs
? v
->locs
->setting_insn
: NULL
;
674 if (references_value_p ((*p
)->loc
, 1))
675 unchain_one_elt_loc_list (p
);
680 if (had_locs
&& cselib_useless_value_p (v
))
682 if (setting_insn
&& DEBUG_INSN_P (setting_insn
))
683 n_useless_debug_values
++;
686 values_became_useless
= 1;
691 /* If X is a value with no locations, remove it from the hashtable. */
694 discard_useless_values (cselib_val
**x
, void *info ATTRIBUTE_UNUSED
)
698 if (v
->locs
== 0 && cselib_useless_value_p (v
))
700 if (cselib_discard_hook
)
701 cselib_discard_hook (v
);
703 CSELIB_VAL_PTR (v
->val_rtx
) = NULL
;
704 cselib_hash_table
->clear_slot (x
);
705 unchain_one_value (v
);
712 /* Clean out useless values (i.e. those which no longer have locations
713 associated with them) from the hash table. */
716 remove_useless_values (void)
720 /* First pass: eliminate locations that reference the value. That in
721 turn can make more values useless. */
724 values_became_useless
= 0;
725 cselib_hash_table
->traverse
<void *, discard_useless_locs
> (NULL
);
727 while (values_became_useless
);
729 /* Second pass: actually remove the values. */
731 p
= &first_containing_mem
;
732 for (v
= *p
; v
!= &dummy_val
; v
= v
->next_containing_mem
)
733 if (v
->locs
&& v
== canonical_cselib_val (v
))
736 p
= &(*p
)->next_containing_mem
;
740 n_useless_values
+= n_useless_debug_values
;
741 n_debug_values
-= n_useless_debug_values
;
742 n_useless_debug_values
= 0;
744 cselib_hash_table
->traverse
<void *, discard_useless_values
> (NULL
);
746 gcc_assert (!n_useless_values
);
749 /* Arrange for a value to not be removed from the hash table even if
750 it becomes useless. */
753 cselib_preserve_value (cselib_val
*v
)
755 PRESERVED_VALUE_P (v
->val_rtx
) = 1;
758 /* Test whether a value is preserved. */
761 cselib_preserved_value_p (cselib_val
*v
)
763 return PRESERVED_VALUE_P (v
->val_rtx
);
766 /* Arrange for a REG value to be assumed constant through the whole function,
767 never invalidated and preserved across cselib_reset_table calls. */
770 cselib_preserve_cfa_base_value (cselib_val
*v
, unsigned int regno
)
772 if (cselib_preserve_constants
774 && REG_P (v
->locs
->loc
))
776 cfa_base_preserved_val
= v
;
777 cfa_base_preserved_regno
= regno
;
781 /* Clean all non-constant expressions in the hash table, but retain
785 cselib_preserve_only_values (void)
789 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
790 cselib_invalidate_regno (i
, reg_raw_mode
[i
]);
792 cselib_invalidate_mem (callmem
);
794 remove_useless_values ();
796 gcc_assert (first_containing_mem
== &dummy_val
);
799 /* Arrange for a value to be marked as based on stack pointer
800 for find_base_term purposes. */
803 cselib_set_value_sp_based (cselib_val
*v
)
805 SP_BASED_VALUE_P (v
->val_rtx
) = 1;
808 /* Test whether a value is based on stack pointer for
809 find_base_term purposes. */
812 cselib_sp_based_value_p (cselib_val
*v
)
814 return SP_BASED_VALUE_P (v
->val_rtx
);
817 /* Return the mode in which a register was last set. If X is not a
818 register, return its mode. If the mode in which the register was
819 set is not known, or the value was already clobbered, return
823 cselib_reg_set_mode (const_rtx x
)
828 if (REG_VALUES (REGNO (x
)) == NULL
829 || REG_VALUES (REGNO (x
))->elt
== NULL
)
832 return GET_MODE (REG_VALUES (REGNO (x
))->elt
->val_rtx
);
835 /* If x is a PLUS or an autoinc operation, expand the operation,
836 storing the offset, if any, in *OFF. */
839 autoinc_split (rtx x
, rtx
*off
, machine_mode memmode
)
841 switch (GET_CODE (x
))
849 if (memmode
== VOIDmode
)
852 *off
= gen_int_mode (-GET_MODE_SIZE (memmode
), GET_MODE (x
));
857 if (memmode
== VOIDmode
)
860 *off
= gen_int_mode (GET_MODE_SIZE (memmode
), GET_MODE (x
));
878 if (GET_MODE (x
) == Pmode
879 && (REG_P (x
) || MEM_P (x
) || GET_CODE (x
) == VALUE
)
880 && (*off
== NULL_RTX
|| CONST_INT_P (*off
)))
883 if (GET_CODE (x
) == VALUE
)
884 e
= CSELIB_VAL_PTR (x
);
886 e
= cselib_lookup (x
, GET_MODE (x
), 0, memmode
);
889 if (SP_DERIVED_VALUE_P (e
->val_rtx
)
890 && (*off
== NULL_RTX
|| *off
== const0_rtx
))
895 for (struct elt_loc_list
*l
= e
->locs
; l
; l
= l
->next
)
896 if (GET_CODE (l
->loc
) == PLUS
897 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
898 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
899 && CONST_INT_P (XEXP (l
->loc
, 1)))
901 if (*off
== NULL_RTX
)
902 *off
= XEXP (l
->loc
, 1);
904 *off
= plus_constant (Pmode
, *off
,
905 INTVAL (XEXP (l
->loc
, 1)));
906 if (*off
== const0_rtx
)
908 return XEXP (l
->loc
, 0);
915 /* Return nonzero if we can prove that X and Y contain the same value,
916 taking our gathered information into account. MEMMODE holds the
917 mode of the enclosing MEM, if any, as required to deal with autoinc
918 addressing modes. If X and Y are not (known to be) part of
919 addresses, MEMMODE should be VOIDmode. */
922 rtx_equal_for_cselib_1 (rtx x
, rtx y
, machine_mode memmode
, int depth
)
928 if (REG_P (x
) || MEM_P (x
))
930 cselib_val
*e
= cselib_lookup (x
, GET_MODE (x
), 0, memmode
);
936 if (REG_P (y
) || MEM_P (y
))
938 cselib_val
*e
= cselib_lookup (y
, GET_MODE (y
), 0, memmode
);
947 if (GET_CODE (x
) == VALUE
)
949 cselib_val
*e
= canonical_cselib_val (CSELIB_VAL_PTR (x
));
950 struct elt_loc_list
*l
;
952 if (GET_CODE (y
) == VALUE
)
953 return e
== canonical_cselib_val (CSELIB_VAL_PTR (y
));
955 if ((SP_DERIVED_VALUE_P (x
)
956 || SP_DERIVED_VALUE_P (e
->val_rtx
))
957 && GET_MODE (y
) == Pmode
)
960 rtx yr
= autoinc_split (y
, &yoff
, memmode
);
961 if ((yr
== x
|| yr
== e
->val_rtx
) && yoff
== NULL_RTX
)
968 for (l
= e
->locs
; l
; l
= l
->next
)
972 /* Avoid infinite recursion. We know we have the canonical
973 value, so we can just skip any values in the equivalence
975 if (REG_P (t
) || MEM_P (t
) || GET_CODE (t
) == VALUE
)
977 else if (rtx_equal_for_cselib_1 (t
, y
, memmode
, depth
+ 1))
983 else if (GET_CODE (y
) == VALUE
)
985 cselib_val
*e
= canonical_cselib_val (CSELIB_VAL_PTR (y
));
986 struct elt_loc_list
*l
;
988 if ((SP_DERIVED_VALUE_P (y
)
989 || SP_DERIVED_VALUE_P (e
->val_rtx
))
990 && GET_MODE (x
) == Pmode
)
993 rtx xr
= autoinc_split (x
, &xoff
, memmode
);
994 if ((xr
== y
|| xr
== e
->val_rtx
) && xoff
== NULL_RTX
)
1001 for (l
= e
->locs
; l
; l
= l
->next
)
1005 if (REG_P (t
) || MEM_P (t
) || GET_CODE (t
) == VALUE
)
1007 else if (rtx_equal_for_cselib_1 (x
, t
, memmode
, depth
+ 1))
1014 if (GET_MODE (x
) != GET_MODE (y
))
1017 if (GET_CODE (x
) != GET_CODE (y
)
1018 || (GET_CODE (x
) == PLUS
1019 && GET_MODE (x
) == Pmode
1020 && CONST_INT_P (XEXP (x
, 1))
1021 && CONST_INT_P (XEXP (y
, 1))))
1023 rtx xorig
= x
, yorig
= y
;
1024 rtx xoff
= NULL
, yoff
= NULL
;
1026 x
= autoinc_split (x
, &xoff
, memmode
);
1027 y
= autoinc_split (y
, &yoff
, memmode
);
1029 /* Don't recurse if nothing changed. */
1030 if (x
!= xorig
|| y
!= yorig
)
1035 if (xoff
&& !rtx_equal_for_cselib_1 (xoff
, yoff
, memmode
, depth
))
1038 return rtx_equal_for_cselib_1 (x
, y
, memmode
, depth
);
1041 if (GET_CODE (xorig
) != GET_CODE (yorig
))
1045 /* These won't be handled correctly by the code below. */
1046 switch (GET_CODE (x
))
1053 if (!same_vector_encodings_p (x
, y
))
1057 case DEBUG_IMPLICIT_PTR
:
1058 return DEBUG_IMPLICIT_PTR_DECL (x
)
1059 == DEBUG_IMPLICIT_PTR_DECL (y
);
1061 case DEBUG_PARAMETER_REF
:
1062 return DEBUG_PARAMETER_REF_DECL (x
)
1063 == DEBUG_PARAMETER_REF_DECL (y
);
1066 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1067 use rtx_equal_for_cselib_1 to compare the operands. */
1068 return rtx_equal_p (ENTRY_VALUE_EXP (x
), ENTRY_VALUE_EXP (y
));
1071 return label_ref_label (x
) == label_ref_label (y
);
1074 return REGNO (x
) == REGNO (y
);
1077 /* We have to compare any autoinc operations in the addresses
1078 using this MEM's mode. */
1079 return rtx_equal_for_cselib_1 (XEXP (x
, 0), XEXP (y
, 0), GET_MODE (x
),
1086 code
= GET_CODE (x
);
1087 fmt
= GET_RTX_FORMAT (code
);
1089 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1096 if (XWINT (x
, i
) != XWINT (y
, i
))
1102 if (XINT (x
, i
) != XINT (y
, i
))
1107 if (maybe_ne (SUBREG_BYTE (x
), SUBREG_BYTE (y
)))
1113 /* Two vectors must have the same length. */
1114 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1117 /* And the corresponding elements must match. */
1118 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1119 if (! rtx_equal_for_cselib_1 (XVECEXP (x
, i
, j
),
1120 XVECEXP (y
, i
, j
), memmode
, depth
))
1126 && targetm
.commutative_p (x
, UNKNOWN
)
1127 && rtx_equal_for_cselib_1 (XEXP (x
, 1), XEXP (y
, 0), memmode
,
1129 && rtx_equal_for_cselib_1 (XEXP (x
, 0), XEXP (y
, 1), memmode
,
1132 if (! rtx_equal_for_cselib_1 (XEXP (x
, i
), XEXP (y
, i
), memmode
,
1139 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1144 /* These are just backpointers, so they don't matter. */
1151 /* It is believed that rtx's at this level will never
1152 contain anything but integers and other rtx's,
1153 except for within LABEL_REFs and SYMBOL_REFs. */
1161 /* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
1162 truly redundant, taking into account aliasing information. */
1164 cselib_redundant_set_p (rtx set
)
1166 gcc_assert (GET_CODE (set
) == SET
);
1167 rtx dest
= SET_DEST (set
);
1168 if (cselib_reg_set_mode (dest
) != GET_MODE (dest
))
1171 if (!rtx_equal_for_cselib_p (dest
, SET_SRC (set
)))
1174 while (GET_CODE (dest
) == SUBREG
1175 || GET_CODE (dest
) == ZERO_EXTRACT
1176 || GET_CODE (dest
) == STRICT_LOW_PART
)
1177 dest
= XEXP (dest
, 0);
1179 if (!flag_strict_aliasing
|| !MEM_P (dest
))
1182 /* For a store we need to check that suppressing it will not change
1183 the effective alias set. */
1184 rtx dest_addr
= XEXP (dest
, 0);
1186 /* Lookup the equivalents to the original dest (rather than just the
1188 cselib_val
*src_val
= cselib_lookup (SET_DEST (set
),
1189 GET_MODE (SET_DEST (set
)),
1194 /* Walk the list of source equivalents to find the MEM accessing
1195 the same location. */
1196 for (elt_loc_list
*l
= src_val
->locs
; l
; l
= l
->next
)
1198 rtx src_equiv
= l
->loc
;
1199 while (GET_CODE (src_equiv
) == SUBREG
1200 || GET_CODE (src_equiv
) == ZERO_EXTRACT
1201 || GET_CODE (src_equiv
) == STRICT_LOW_PART
)
1202 src_equiv
= XEXP (src_equiv
, 0);
1204 if (MEM_P (src_equiv
))
1206 /* Match the MEMs by comparing the addresses. We can
1207 only remove the later store if the earlier aliases at
1208 least all the accesses of the later one. */
1209 if (rtx_equal_for_cselib_1 (dest_addr
, XEXP (src_equiv
, 0),
1210 GET_MODE (dest
), 0))
1211 return mems_same_for_tbaa_p (src_equiv
, dest
);
1216 /* We failed to find a recorded value in the cselib history, so try
1217 the source of this set; this catches cases such as *p = *q when p
1218 and q have the same value. */
1219 rtx src
= SET_SRC (set
);
1220 while (GET_CODE (src
) == SUBREG
)
1221 src
= XEXP (src
, 0);
1224 && rtx_equal_for_cselib_1 (dest_addr
, XEXP (src
, 0), GET_MODE (dest
), 0))
1225 return mems_same_for_tbaa_p (src
, dest
);
1230 /* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
1231 except that it hashes (plus:P x c). */
1234 cselib_hash_plus_const_int (rtx x
, HOST_WIDE_INT c
, int create
,
1235 machine_mode memmode
)
1237 cselib_val
*e
= cselib_lookup (x
, GET_MODE (x
), create
, memmode
);
1241 if (! SP_DERIVED_VALUE_P (e
->val_rtx
))
1242 for (struct elt_loc_list
*l
= e
->locs
; l
; l
= l
->next
)
1243 if (GET_CODE (l
->loc
) == PLUS
1244 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
1245 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
1246 && CONST_INT_P (XEXP (l
->loc
, 1)))
1248 e
= CSELIB_VAL_PTR (XEXP (l
->loc
, 0));
1249 c
= trunc_int_for_mode (c
+ UINTVAL (XEXP (l
->loc
, 1)), Pmode
);
1255 unsigned hash
= (unsigned) PLUS
+ (unsigned) GET_MODE (x
);
1257 unsigned int tem_hash
= (unsigned) CONST_INT
+ (unsigned) VOIDmode
;
1258 tem_hash
+= ((unsigned) CONST_INT
<< 7) + (unsigned HOST_WIDE_INT
) c
;
1260 tem_hash
= (unsigned int) CONST_INT
;
1262 return hash
? hash
: 1 + (unsigned int) PLUS
;
1265 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1266 For registers and memory locations, we look up their cselib_val structure
1267 and return its VALUE element.
1268 Possible reasons for return 0 are: the object is volatile, or we couldn't
1269 find a register or memory location in the table and CREATE is zero. If
1270 CREATE is nonzero, table elts are created for regs and mem.
1271 N.B. this hash function returns the same hash value for RTXes that
1272 differ only in the order of operands, thus it is suitable for comparisons
1273 that take commutativity into account.
1274 If we wanted to also support associative rules, we'd have to use a different
1275 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1276 MEMMODE indicates the mode of an enclosing MEM, and it's only
1277 used to compute autoinc values.
1278 We used to have a MODE argument for hashing for CONST_INTs, but that
1279 didn't make sense, since it caused spurious hash differences between
1280 (set (reg:SI 1) (const_int))
1281 (plus:SI (reg:SI 2) (reg:SI 1))
1283 (plus:SI (reg:SI 2) (const_int))
1284 If the mode is important in any context, it must be checked specifically
1285 in a comparison anyway, since relying on hash differences is unsafe. */
1288 cselib_hash_rtx (rtx x
, int create
, machine_mode memmode
)
1295 unsigned int hash
= 0;
1297 code
= GET_CODE (x
);
1298 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1303 e
= CSELIB_VAL_PTR (x
);
1308 e
= cselib_lookup (x
, GET_MODE (x
), create
, memmode
);
1315 hash
+= ((unsigned) DEBUG_EXPR
<< 7)
1316 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
));
1317 return hash
? hash
: (unsigned int) DEBUG_EXPR
;
1319 case DEBUG_IMPLICIT_PTR
:
1320 hash
+= ((unsigned) DEBUG_IMPLICIT_PTR
<< 7)
1321 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x
));
1322 return hash
? hash
: (unsigned int) DEBUG_IMPLICIT_PTR
;
1324 case DEBUG_PARAMETER_REF
:
1325 hash
+= ((unsigned) DEBUG_PARAMETER_REF
<< 7)
1326 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x
));
1327 return hash
? hash
: (unsigned int) DEBUG_PARAMETER_REF
;
1330 /* ENTRY_VALUEs are function invariant, thus try to avoid
1331 recursing on argument if ENTRY_VALUE is one of the
1332 forms emitted by expand_debug_expr, otherwise
1333 ENTRY_VALUE hash would depend on the current value
1334 in some register or memory. */
1335 if (REG_P (ENTRY_VALUE_EXP (x
)))
1336 hash
+= (unsigned int) REG
1337 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x
))
1338 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x
));
1339 else if (MEM_P (ENTRY_VALUE_EXP (x
))
1340 && REG_P (XEXP (ENTRY_VALUE_EXP (x
), 0)))
1341 hash
+= (unsigned int) MEM
1342 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x
), 0))
1343 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x
), 0));
1345 hash
+= cselib_hash_rtx (ENTRY_VALUE_EXP (x
), create
, memmode
);
1346 return hash
? hash
: (unsigned int) ENTRY_VALUE
;
1349 hash
+= ((unsigned) CONST_INT
<< 7) + UINTVAL (x
);
1350 return hash
? hash
: (unsigned int) CONST_INT
;
1352 case CONST_WIDE_INT
:
1353 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (x
); i
++)
1354 hash
+= CONST_WIDE_INT_ELT (x
, i
);
1357 case CONST_POLY_INT
:
1361 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1362 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
1367 /* This is like the general case, except that it only counts
1368 the integers representing the constant. */
1369 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1370 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (x
) == VOIDmode
)
1371 hash
+= ((unsigned) CONST_DOUBLE_LOW (x
)
1372 + (unsigned) CONST_DOUBLE_HIGH (x
));
1374 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
1375 return hash
? hash
: (unsigned int) CONST_DOUBLE
;
1378 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
1379 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
1380 return hash
? hash
: (unsigned int) CONST_FIXED
;
1387 units
= const_vector_encoded_nelts (x
);
1389 for (i
= 0; i
< units
; ++i
)
1391 elt
= CONST_VECTOR_ENCODED_ELT (x
, i
);
1392 hash
+= cselib_hash_rtx (elt
, 0, memmode
);
1398 /* Assume there is only one rtx object for any given label. */
1400 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1401 differences and differences between each stage's debugging dumps. */
1402 hash
+= (((unsigned int) LABEL_REF
<< 7)
1403 + CODE_LABEL_NUMBER (label_ref_label (x
)));
1404 return hash
? hash
: (unsigned int) LABEL_REF
;
1408 /* Don't hash on the symbol's address to avoid bootstrap differences.
1409 Different hash values may cause expressions to be recorded in
1410 different orders and thus different registers to be used in the
1411 final assembler. This also avoids differences in the dump files
1412 between various stages. */
1414 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
1417 h
+= (h
<< 7) + *p
++; /* ??? revisit */
1419 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
1420 return hash
? hash
: (unsigned int) SYMBOL_REF
;
1425 /* We can't compute these without knowing the MEM mode. */
1426 gcc_assert (memmode
!= VOIDmode
);
1427 offset
= GET_MODE_SIZE (memmode
);
1428 if (code
== PRE_DEC
)
1430 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1431 like (mem:MEMMODE (plus (reg) (const_int I))). */
1432 if (GET_MODE (x
) == Pmode
1433 && (REG_P (XEXP (x
, 0))
1434 || MEM_P (XEXP (x
, 0))
1435 || GET_CODE (XEXP (x
, 0)) == VALUE
))
1438 if (offset
.is_constant (&c
))
1439 return cselib_hash_plus_const_int (XEXP (x
, 0),
1440 trunc_int_for_mode (c
, Pmode
),
1443 hash
= ((unsigned) PLUS
+ (unsigned) GET_MODE (x
)
1444 + cselib_hash_rtx (XEXP (x
, 0), create
, memmode
)
1445 + cselib_hash_rtx (gen_int_mode (offset
, GET_MODE (x
)),
1447 return hash
? hash
: 1 + (unsigned) PLUS
;
1450 gcc_assert (memmode
!= VOIDmode
);
1451 return cselib_hash_rtx (XEXP (x
, 1), create
, memmode
);
1456 gcc_assert (memmode
!= VOIDmode
);
1457 return cselib_hash_rtx (XEXP (x
, 0), create
, memmode
);
1461 case UNSPEC_VOLATILE
:
1465 if (MEM_VOLATILE_P (x
))
1471 if (GET_MODE (x
) == Pmode
1472 && (REG_P (XEXP (x
, 0))
1473 || MEM_P (XEXP (x
, 0))
1474 || GET_CODE (XEXP (x
, 0)) == VALUE
)
1475 && CONST_INT_P (XEXP (x
, 1)))
1476 return cselib_hash_plus_const_int (XEXP (x
, 0), INTVAL (XEXP (x
, 1)),
1484 i
= GET_RTX_LENGTH (code
) - 1;
1485 fmt
= GET_RTX_FORMAT (code
);
1492 rtx tem
= XEXP (x
, i
);
1493 unsigned int tem_hash
= cselib_hash_rtx (tem
, create
, memmode
);
1502 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1504 unsigned int tem_hash
1505 = cselib_hash_rtx (XVECEXP (x
, i
, j
), create
, memmode
);
1516 const unsigned char *p
= (const unsigned char *) XSTR (x
, i
);
1525 hash
+= XINT (x
, i
);
1529 hash
+= constant_lower_bound (SUBREG_BYTE (x
));
1542 return hash
? hash
: 1 + (unsigned int) GET_CODE (x
);
1545 /* Create a new value structure for VALUE and initialize it. The mode of the
1548 static inline cselib_val
*
1549 new_cselib_val (unsigned int hash
, machine_mode mode
, rtx x
)
1551 cselib_val
*e
= cselib_val_pool
.allocate ();
1554 gcc_assert (next_uid
);
1557 e
->uid
= next_uid
++;
1558 /* We use an alloc pool to allocate this RTL construct because it
1559 accounts for about 8% of the overall memory usage. We know
1560 precisely when we can have VALUE RTXen (when cselib is active)
1561 so we don't need to put them in garbage collected memory.
1562 ??? Why should a VALUE be an RTX in the first place? */
1563 e
->val_rtx
= (rtx_def
*) value_pool
.allocate ();
1564 memset (e
->val_rtx
, 0, RTX_HDR_SIZE
);
1565 PUT_CODE (e
->val_rtx
, VALUE
);
1566 PUT_MODE (e
->val_rtx
, mode
);
1567 CSELIB_VAL_PTR (e
->val_rtx
) = e
;
1570 e
->next_containing_mem
= 0;
1572 scalar_int_mode int_mode
;
1573 if (REG_P (x
) && is_int_mode (mode
, &int_mode
)
1574 && GET_MODE_SIZE (int_mode
) > 1
1575 && REG_VALUES (REGNO (x
)) != NULL
1576 && (!cselib_current_insn
|| !DEBUG_INSN_P (cselib_current_insn
)))
1578 rtx copy
= shallow_copy_rtx (x
);
1579 scalar_int_mode narrow_mode_iter
;
1580 FOR_EACH_MODE_UNTIL (narrow_mode_iter
, int_mode
)
1582 PUT_MODE_RAW (copy
, narrow_mode_iter
);
1583 cselib_val
*v
= cselib_lookup (copy
, narrow_mode_iter
, 0, VOIDmode
);
1586 rtx sub
= lowpart_subreg (narrow_mode_iter
, e
->val_rtx
, int_mode
);
1588 new_elt_loc_list (v
, sub
);
1593 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1595 fprintf (dump_file
, "cselib value %u:%u ", e
->uid
, hash
);
1596 if (flag_dump_noaddr
|| flag_dump_unnumbered
)
1597 fputs ("# ", dump_file
);
1599 fprintf (dump_file
, "%p ", (void*)e
);
1600 print_rtl_single (dump_file
, x
);
1601 fputc ('\n', dump_file
);
1607 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1608 contains the data at this address. X is a MEM that represents the
1609 value. Update the two value structures to represent this situation. */
1612 add_mem_for_addr (cselib_val
*addr_elt
, cselib_val
*mem_elt
, rtx x
)
1614 addr_elt
= canonical_cselib_val (addr_elt
);
1615 mem_elt
= canonical_cselib_val (mem_elt
);
1617 /* Avoid duplicates. */
1618 addr_space_t as
= MEM_ADDR_SPACE (x
);
1619 for (elt_loc_list
*l
= mem_elt
->locs
; l
; l
= l
->next
)
1621 && CSELIB_VAL_PTR (XEXP (l
->loc
, 0)) == addr_elt
1622 && MEM_ADDR_SPACE (l
->loc
) == as
)
1624 promote_debug_loc (l
);
1628 addr_elt
->addr_list
= new_elt_list (addr_elt
->addr_list
, mem_elt
);
1629 new_elt_loc_list (mem_elt
,
1630 replace_equiv_address_nv (x
, addr_elt
->val_rtx
));
1631 if (mem_elt
->next_containing_mem
== NULL
)
1633 mem_elt
->next_containing_mem
= first_containing_mem
;
1634 first_containing_mem
= mem_elt
;
1638 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1639 If CREATE, make a new one if we haven't seen it before. */
1642 cselib_lookup_mem (rtx x
, int create
)
1644 machine_mode mode
= GET_MODE (x
);
1645 machine_mode addr_mode
;
1648 cselib_val
*mem_elt
;
1650 if (MEM_VOLATILE_P (x
) || mode
== BLKmode
1651 || !cselib_record_memory
1652 || (FLOAT_MODE_P (mode
) && flag_float_store
))
1655 addr_mode
= GET_MODE (XEXP (x
, 0));
1656 if (addr_mode
== VOIDmode
)
1659 /* Look up the value for the address. */
1660 addr
= cselib_lookup (XEXP (x
, 0), addr_mode
, create
, mode
);
1663 addr
= canonical_cselib_val (addr
);
1665 /* Find a value that describes a value of our mode at that address. */
1666 addr_space_t as
= MEM_ADDR_SPACE (x
);
1667 for (elt_list
*l
= addr
->addr_list
; l
; l
= l
->next
)
1668 if (GET_MODE (l
->elt
->val_rtx
) == mode
)
1670 for (elt_loc_list
*l2
= l
->elt
->locs
; l2
; l2
= l2
->next
)
1671 if (MEM_P (l2
->loc
) && MEM_ADDR_SPACE (l2
->loc
) == as
)
1673 promote_debug_loc (l
->elt
->locs
);
1681 mem_elt
= new_cselib_val (next_uid
, mode
, x
);
1682 add_mem_for_addr (addr
, mem_elt
, x
);
1683 slot
= cselib_find_slot (mode
, x
, mem_elt
->hash
, INSERT
, VOIDmode
);
1688 /* Search through the possible substitutions in P. We prefer a non reg
1689 substitution because this allows us to expand the tree further. If
1690 we find, just a reg, take the lowest regno. There may be several
1691 non-reg results, we just take the first one because they will all
1692 expand to the same place. */
1695 expand_loc (struct elt_loc_list
*p
, struct expand_value_data
*evd
,
1698 rtx reg_result
= NULL
;
1699 unsigned int regno
= UINT_MAX
;
1700 struct elt_loc_list
*p_in
= p
;
1702 for (; p
; p
= p
->next
)
1704 /* Return these right away to avoid returning stack pointer based
1705 expressions for frame pointer and vice versa, which is something
1706 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1707 for more details. */
1709 && (REGNO (p
->loc
) == STACK_POINTER_REGNUM
1710 || REGNO (p
->loc
) == FRAME_POINTER_REGNUM
1711 || REGNO (p
->loc
) == HARD_FRAME_POINTER_REGNUM
1712 || REGNO (p
->loc
) == cfa_base_preserved_regno
))
1714 /* Avoid infinite recursion trying to expand a reg into a
1716 if ((REG_P (p
->loc
))
1717 && (REGNO (p
->loc
) < regno
)
1718 && !bitmap_bit_p (evd
->regs_active
, REGNO (p
->loc
)))
1720 reg_result
= p
->loc
;
1721 regno
= REGNO (p
->loc
);
1723 /* Avoid infinite recursion and do not try to expand the
1725 else if (GET_CODE (p
->loc
) == VALUE
1726 && CSELIB_VAL_PTR (p
->loc
)->locs
== p_in
)
1728 else if (!REG_P (p
->loc
))
1731 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1733 print_inline_rtx (dump_file
, p
->loc
, 0);
1734 fprintf (dump_file
, "\n");
1736 if (GET_CODE (p
->loc
) == LO_SUM
1737 && GET_CODE (XEXP (p
->loc
, 1)) == SYMBOL_REF
1739 && (note
= find_reg_note (p
->setting_insn
, REG_EQUAL
, NULL_RTX
))
1740 && XEXP (note
, 0) == XEXP (p
->loc
, 1))
1741 return XEXP (p
->loc
, 1);
1742 result
= cselib_expand_value_rtx_1 (p
->loc
, evd
, max_depth
- 1);
1749 if (regno
!= UINT_MAX
)
1752 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1753 fprintf (dump_file
, "r%d\n", regno
);
1755 result
= cselib_expand_value_rtx_1 (reg_result
, evd
, max_depth
- 1);
1760 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1764 print_inline_rtx (dump_file
, reg_result
, 0);
1765 fprintf (dump_file
, "\n");
1768 fprintf (dump_file
, "NULL\n");
1774 /* Forward substitute and expand an expression out to its roots.
1775 This is the opposite of common subexpression. Because local value
1776 numbering is such a weak optimization, the expanded expression is
1777 pretty much unique (not from a pointer equals point of view but
1778 from a tree shape point of view.
1780 This function returns NULL if the expansion fails. The expansion
1781 will fail if there is no value number for one of the operands or if
1782 one of the operands has been overwritten between the current insn
1783 and the beginning of the basic block. For instance x has no
1789 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1790 It is clear on return. */
1793 cselib_expand_value_rtx (rtx orig
, bitmap regs_active
, int max_depth
)
1795 struct expand_value_data evd
;
1797 evd
.regs_active
= regs_active
;
1798 evd
.callback
= NULL
;
1799 evd
.callback_arg
= NULL
;
1802 return cselib_expand_value_rtx_1 (orig
, &evd
, max_depth
);
1805 /* Same as cselib_expand_value_rtx, but using a callback to try to
1806 resolve some expressions. The CB function should return ORIG if it
1807 can't or does not want to deal with a certain RTX. Any other
1808 return value, including NULL, will be used as the expansion for
1809 VALUE, without any further changes. */
1812 cselib_expand_value_rtx_cb (rtx orig
, bitmap regs_active
, int max_depth
,
1813 cselib_expand_callback cb
, void *data
)
1815 struct expand_value_data evd
;
1817 evd
.regs_active
= regs_active
;
1819 evd
.callback_arg
= data
;
1822 return cselib_expand_value_rtx_1 (orig
, &evd
, max_depth
);
1825 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1826 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1827 would return NULL or non-NULL, without allocating new rtx. */
1830 cselib_dummy_expand_value_rtx_cb (rtx orig
, bitmap regs_active
, int max_depth
,
1831 cselib_expand_callback cb
, void *data
)
1833 struct expand_value_data evd
;
1835 evd
.regs_active
= regs_active
;
1837 evd
.callback_arg
= data
;
1840 return cselib_expand_value_rtx_1 (orig
, &evd
, max_depth
) != NULL
;
1843 /* Internal implementation of cselib_expand_value_rtx and
1844 cselib_expand_value_rtx_cb. */
1847 cselib_expand_value_rtx_1 (rtx orig
, struct expand_value_data
*evd
,
1853 const char *format_ptr
;
1856 code
= GET_CODE (orig
);
1858 /* For the context of dse, if we end up expand into a huge tree, we
1859 will not have a useful address, so we might as well just give up
1868 struct elt_list
*l
= REG_VALUES (REGNO (orig
));
1870 if (l
&& l
->elt
== NULL
)
1872 for (; l
; l
= l
->next
)
1873 if (GET_MODE (l
->elt
->val_rtx
) == GET_MODE (orig
))
1876 unsigned regno
= REGNO (orig
);
1878 /* The only thing that we are not willing to do (this
1879 is requirement of dse and if others potential uses
1880 need this function we should add a parm to control
1881 it) is that we will not substitute the
1882 STACK_POINTER_REGNUM, FRAME_POINTER or the
1885 These expansions confuses the code that notices that
1886 stores into the frame go dead at the end of the
1887 function and that the frame is not effected by calls
1888 to subroutines. If you allow the
1889 STACK_POINTER_REGNUM substitution, then dse will
1890 think that parameter pushing also goes dead which is
1891 wrong. If you allow the FRAME_POINTER or the
1892 HARD_FRAME_POINTER then you lose the opportunity to
1893 make the frame assumptions. */
1894 if (regno
== STACK_POINTER_REGNUM
1895 || regno
== FRAME_POINTER_REGNUM
1896 || regno
== HARD_FRAME_POINTER_REGNUM
1897 || regno
== cfa_base_preserved_regno
)
1900 bitmap_set_bit (evd
->regs_active
, regno
);
1902 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1903 fprintf (dump_file
, "expanding: r%d into: ", regno
);
1905 result
= expand_loc (l
->elt
->locs
, evd
, max_depth
);
1906 bitmap_clear_bit (evd
->regs_active
, regno
);
1921 /* SCRATCH must be shared because they represent distinct values. */
1924 if (REG_P (XEXP (orig
, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig
, 0))))
1929 if (shared_const_p (orig
))
1939 subreg
= evd
->callback (orig
, evd
->regs_active
, max_depth
,
1945 subreg
= cselib_expand_value_rtx_1 (SUBREG_REG (orig
), evd
,
1949 scopy
= simplify_gen_subreg (GET_MODE (orig
), subreg
,
1950 GET_MODE (SUBREG_REG (orig
)),
1951 SUBREG_BYTE (orig
));
1953 || (GET_CODE (scopy
) == SUBREG
1954 && !REG_P (SUBREG_REG (scopy
))
1955 && !MEM_P (SUBREG_REG (scopy
))))
1965 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1967 fputs ("\nexpanding ", dump_file
);
1968 print_rtl_single (dump_file
, orig
);
1969 fputs (" into...", dump_file
);
1974 result
= evd
->callback (orig
, evd
->regs_active
, max_depth
,
1981 result
= expand_loc (CSELIB_VAL_PTR (orig
)->locs
, evd
, max_depth
);
1987 return evd
->callback (orig
, evd
->regs_active
, max_depth
,
1995 /* Copy the various flags, fields, and other information. We assume
1996 that all fields need copying, and then clear the fields that should
1997 not be copied. That is the sensible default behavior, and forces
1998 us to explicitly document why we are *not* copying a flag. */
2002 copy
= shallow_copy_rtx (orig
);
2004 format_ptr
= GET_RTX_FORMAT (code
);
2006 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2007 switch (*format_ptr
++)
2010 if (XEXP (orig
, i
) != NULL
)
2012 rtx result
= cselib_expand_value_rtx_1 (XEXP (orig
, i
), evd
,
2017 XEXP (copy
, i
) = result
;
2023 if (XVEC (orig
, i
) != NULL
)
2026 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2027 for (j
= 0; j
< XVECLEN (orig
, i
); j
++)
2029 rtx result
= cselib_expand_value_rtx_1 (XVECEXP (orig
, i
, j
),
2030 evd
, max_depth
- 1);
2034 XVECEXP (copy
, i
, j
) = result
;
2048 /* These are left unchanged. */
2058 mode
= GET_MODE (copy
);
2059 /* If an operand has been simplified into CONST_INT, which doesn't
2060 have a mode and the mode isn't derivable from whole rtx's mode,
2061 try simplify_*_operation first with mode from original's operand
2062 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
2064 switch (GET_RTX_CLASS (code
))
2067 if (CONST_INT_P (XEXP (copy
, 0))
2068 && GET_MODE (XEXP (orig
, 0)) != VOIDmode
)
2070 scopy
= simplify_unary_operation (code
, mode
, XEXP (copy
, 0),
2071 GET_MODE (XEXP (orig
, 0)));
2076 case RTX_COMM_ARITH
:
2078 /* These expressions can derive operand modes from the whole rtx's mode. */
2081 case RTX_BITFIELD_OPS
:
2082 if (CONST_INT_P (XEXP (copy
, 0))
2083 && GET_MODE (XEXP (orig
, 0)) != VOIDmode
)
2085 scopy
= simplify_ternary_operation (code
, mode
,
2086 GET_MODE (XEXP (orig
, 0)),
2087 XEXP (copy
, 0), XEXP (copy
, 1),
2094 case RTX_COMM_COMPARE
:
2095 if (CONST_INT_P (XEXP (copy
, 0))
2096 && GET_MODE (XEXP (copy
, 1)) == VOIDmode
2097 && (GET_MODE (XEXP (orig
, 0)) != VOIDmode
2098 || GET_MODE (XEXP (orig
, 1)) != VOIDmode
))
2100 scopy
= simplify_relational_operation (code
, mode
,
2101 (GET_MODE (XEXP (orig
, 0))
2103 ? GET_MODE (XEXP (orig
, 0))
2104 : GET_MODE (XEXP (orig
, 1)),
2114 scopy
= simplify_rtx (copy
);
2120 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2121 with VALUE expressions. This way, it becomes independent of changes
2122 to registers and memory.
2123 X isn't actually modified; if modifications are needed, new rtl is
2124 allocated. However, the return value can share rtl with X.
2125 If X is within a MEM, MEMMODE must be the mode of the MEM. */
2128 cselib_subst_to_values (rtx x
, machine_mode memmode
)
2130 enum rtx_code code
= GET_CODE (x
);
2131 const char *fmt
= GET_RTX_FORMAT (code
);
2141 l
= REG_VALUES (REGNO (x
));
2142 if (l
&& l
->elt
== NULL
)
2144 for (; l
; l
= l
->next
)
2145 if (GET_MODE (l
->elt
->val_rtx
) == GET_MODE (x
))
2146 return l
->elt
->val_rtx
;
2151 e
= cselib_lookup_mem (x
, 0);
2152 /* This used to happen for autoincrements, but we deal with them
2153 properly now. Remove the if stmt for the next release. */
2156 /* Assign a value that doesn't match any other. */
2157 e
= new_cselib_val (next_uid
, GET_MODE (x
), x
);
2162 e
= cselib_lookup (x
, GET_MODE (x
), 0, memmode
);
2172 gcc_assert (memmode
!= VOIDmode
);
2173 offset
= GET_MODE_SIZE (memmode
);
2174 if (code
== PRE_DEC
)
2176 return cselib_subst_to_values (plus_constant (GET_MODE (x
),
2177 XEXP (x
, 0), offset
),
2181 gcc_assert (memmode
!= VOIDmode
);
2182 return cselib_subst_to_values (XEXP (x
, 1), memmode
);
2187 gcc_assert (memmode
!= VOIDmode
);
2188 return cselib_subst_to_values (XEXP (x
, 0), memmode
);
2191 if (GET_MODE (x
) == Pmode
&& CONST_INT_P (XEXP (x
, 1)))
2193 rtx t
= cselib_subst_to_values (XEXP (x
, 0), memmode
);
2194 if (GET_CODE (t
) == VALUE
)
2196 if (SP_DERIVED_VALUE_P (t
) && XEXP (x
, 1) == const0_rtx
)
2198 for (struct elt_loc_list
*l
= CSELIB_VAL_PTR (t
)->locs
;
2200 if (GET_CODE (l
->loc
) == PLUS
2201 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
2202 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
2203 && CONST_INT_P (XEXP (l
->loc
, 1)))
2204 return plus_constant (Pmode
, l
->loc
, INTVAL (XEXP (x
, 1)));
2206 if (t
!= XEXP (x
, 0))
2208 copy
= shallow_copy_rtx (x
);
2218 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2222 rtx t
= cselib_subst_to_values (XEXP (x
, i
), memmode
);
2224 if (t
!= XEXP (x
, i
))
2227 copy
= shallow_copy_rtx (x
);
2231 else if (fmt
[i
] == 'E')
2235 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2237 rtx t
= cselib_subst_to_values (XVECEXP (x
, i
, j
), memmode
);
2239 if (t
!= XVECEXP (x
, i
, j
))
2241 if (XVEC (x
, i
) == XVEC (copy
, i
))
2244 copy
= shallow_copy_rtx (x
);
2245 XVEC (copy
, i
) = shallow_copy_rtvec (XVEC (x
, i
));
2247 XVECEXP (copy
, i
, j
) = t
;
2256 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
2259 cselib_subst_to_values_from_insn (rtx x
, machine_mode memmode
, rtx_insn
*insn
)
2262 gcc_assert (!cselib_current_insn
);
2263 cselib_current_insn
= insn
;
2264 ret
= cselib_subst_to_values (x
, memmode
);
2265 cselib_current_insn
= NULL
;
2269 /* Look up the rtl expression X in our tables and return the value it
2270 has. If CREATE is zero, we return NULL if we don't know the value.
2271 Otherwise, we create a new one if possible, using mode MODE if X
2272 doesn't have a mode (i.e. because it's a constant). When X is part
2273 of an address, MEMMODE should be the mode of the enclosing MEM if
2274 we're tracking autoinc expressions. */
2277 cselib_lookup_1 (rtx x
, machine_mode mode
,
2278 int create
, machine_mode memmode
)
2282 unsigned int hashval
;
2284 if (GET_MODE (x
) != VOIDmode
)
2285 mode
= GET_MODE (x
);
2287 if (GET_CODE (x
) == VALUE
)
2288 return CSELIB_VAL_PTR (x
);
2293 unsigned int i
= REGNO (x
);
2296 if (l
&& l
->elt
== NULL
)
2298 for (; l
; l
= l
->next
)
2299 if (mode
== GET_MODE (l
->elt
->val_rtx
))
2301 promote_debug_loc (l
->elt
->locs
);
2308 if (i
< FIRST_PSEUDO_REGISTER
)
2310 unsigned int n
= hard_regno_nregs (i
, mode
);
2312 if (n
> max_value_regs
)
2316 e
= new_cselib_val (next_uid
, GET_MODE (x
), x
);
2317 if (GET_MODE (x
) == Pmode
&& x
== stack_pointer_rtx
)
2318 SP_DERIVED_VALUE_P (e
->val_rtx
) = 1;
2319 new_elt_loc_list (e
, x
);
2321 scalar_int_mode int_mode
;
2322 if (REG_VALUES (i
) == 0)
2324 /* Maintain the invariant that the first entry of
2325 REG_VALUES, if present, must be the value used to set the
2326 register, or NULL. */
2327 used_regs
[n_used_regs
++] = i
;
2328 REG_VALUES (i
) = new_elt_list (REG_VALUES (i
), NULL
);
2330 else if (cselib_preserve_constants
2331 && is_int_mode (mode
, &int_mode
))
2333 /* During var-tracking, try harder to find equivalences
2334 for SUBREGs. If a setter sets say a DImode register
2335 and user uses that register only in SImode, add a lowpart
2337 struct elt_list
*lwider
= NULL
;
2338 scalar_int_mode lmode
;
2340 if (l
&& l
->elt
== NULL
)
2342 for (; l
; l
= l
->next
)
2343 if (is_int_mode (GET_MODE (l
->elt
->val_rtx
), &lmode
)
2344 && GET_MODE_SIZE (lmode
) > GET_MODE_SIZE (int_mode
)
2346 || partial_subreg_p (lmode
,
2347 GET_MODE (lwider
->elt
->val_rtx
))))
2349 struct elt_loc_list
*el
;
2350 if (i
< FIRST_PSEUDO_REGISTER
2351 && hard_regno_nregs (i
, lmode
) != 1)
2353 for (el
= l
->elt
->locs
; el
; el
= el
->next
)
2354 if (!REG_P (el
->loc
))
2361 rtx sub
= lowpart_subreg (int_mode
, lwider
->elt
->val_rtx
,
2362 GET_MODE (lwider
->elt
->val_rtx
));
2364 new_elt_loc_list (e
, sub
);
2367 REG_VALUES (i
)->next
= new_elt_list (REG_VALUES (i
)->next
, e
);
2368 slot
= cselib_find_slot (mode
, x
, e
->hash
, INSERT
, memmode
);
2374 return cselib_lookup_mem (x
, create
);
2376 hashval
= cselib_hash_rtx (x
, create
, memmode
);
2377 /* Can't even create if hashing is not possible. */
2381 slot
= cselib_find_slot (mode
, x
, hashval
,
2382 create
? INSERT
: NO_INSERT
, memmode
);
2386 e
= (cselib_val
*) *slot
;
2390 e
= new_cselib_val (hashval
, mode
, x
);
2392 /* We have to fill the slot before calling cselib_subst_to_values:
2393 the hash table is inconsistent until we do so, and
2394 cselib_subst_to_values will need to do lookups. */
2396 rtx v
= cselib_subst_to_values (x
, memmode
);
2398 /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2399 VALUE that isn't in the hash tables anymore. */
2400 if (GET_CODE (v
) == VALUE
&& SP_DERIVED_VALUE_P (v
) && PRESERVED_VALUE_P (v
))
2401 PRESERVED_VALUE_P (e
->val_rtx
) = 1;
2403 new_elt_loc_list (e
, v
);
2407 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2410 cselib_lookup_from_insn (rtx x
, machine_mode mode
,
2411 int create
, machine_mode memmode
, rtx_insn
*insn
)
2415 gcc_assert (!cselib_current_insn
);
2416 cselib_current_insn
= insn
;
2418 ret
= cselib_lookup (x
, mode
, create
, memmode
);
2420 cselib_current_insn
= NULL
;
2425 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2426 maintains invariants related with debug insns. */
2429 cselib_lookup (rtx x
, machine_mode mode
,
2430 int create
, machine_mode memmode
)
2432 cselib_val
*ret
= cselib_lookup_1 (x
, mode
, create
, memmode
);
2434 /* ??? Should we return NULL if we're not to create an entry, the
2435 found loc is a debug loc and cselib_current_insn is not DEBUG?
2436 If so, we should also avoid converting val to non-DEBUG; probably
2437 easiest setting cselib_current_insn to NULL before the call
2440 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
2442 fputs ("cselib lookup ", dump_file
);
2443 print_inline_rtx (dump_file
, x
, 2);
2444 fprintf (dump_file
, " => %u:%u\n",
2446 ret
? ret
->hash
: 0);
2452 /* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
2455 cselib_invalidate_regno_val (unsigned int regno
, struct elt_list
**l
)
2457 cselib_val
*v
= (*l
)->elt
;
2458 if (*l
== REG_VALUES (regno
))
2460 /* Maintain the invariant that the first entry of
2461 REG_VALUES, if present, must be the value used to set
2462 the register, or NULL. This is also nice because
2463 then we won't push the same regno onto user_regs
2469 unchain_one_elt_list (l
);
2471 v
= canonical_cselib_val (v
);
2473 bool had_locs
= v
->locs
!= NULL
;
2474 rtx_insn
*setting_insn
= v
->locs
? v
->locs
->setting_insn
: NULL
;
2476 /* Now, we clear the mapping from value to reg. It must exist, so
2477 this code will crash intentionally if it doesn't. */
2478 for (elt_loc_list
**p
= &v
->locs
; ; p
= &(*p
)->next
)
2482 if (REG_P (x
) && REGNO (x
) == regno
)
2484 unchain_one_elt_loc_list (p
);
2489 if (had_locs
&& cselib_useless_value_p (v
))
2491 if (setting_insn
&& DEBUG_INSN_P (setting_insn
))
2492 n_useless_debug_values
++;
2498 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2499 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2500 is used to determine how many hard registers are being changed. If MODE
2501 is VOIDmode, then only REGNO is being changed; this is used when
2502 invalidating call clobbered registers across a call. */
2505 cselib_invalidate_regno (unsigned int regno
, machine_mode mode
)
2507 unsigned int endregno
;
2510 /* If we see pseudos after reload, something is _wrong_. */
2511 gcc_assert (!reload_completed
|| regno
< FIRST_PSEUDO_REGISTER
2512 || reg_renumber
[regno
] < 0);
2514 /* Determine the range of registers that must be invalidated. For
2515 pseudos, only REGNO is affected. For hard regs, we must take MODE
2516 into account, and we must also invalidate lower register numbers
2517 if they contain values that overlap REGNO. */
2518 if (regno
< FIRST_PSEUDO_REGISTER
)
2520 gcc_assert (mode
!= VOIDmode
);
2522 if (regno
< max_value_regs
)
2525 i
= regno
- max_value_regs
;
2527 endregno
= end_hard_regno (mode
, regno
);
2532 endregno
= regno
+ 1;
2535 for (; i
< endregno
; i
++)
2537 struct elt_list
**l
= ®_VALUES (i
);
2539 /* Go through all known values for this reg; if it overlaps the range
2540 we're invalidating, remove the value. */
2543 cselib_val
*v
= (*l
)->elt
;
2544 unsigned int this_last
= i
;
2546 if (i
< FIRST_PSEUDO_REGISTER
&& v
!= NULL
)
2547 this_last
= end_hard_regno (GET_MODE (v
->val_rtx
), i
) - 1;
2549 if (this_last
< regno
|| v
== NULL
2550 || (v
== cfa_base_preserved_val
2551 && i
== cfa_base_preserved_regno
))
2557 /* We have an overlap. */
2558 cselib_invalidate_regno_val (i
, l
);
2563 /* Invalidate any locations in the table which are changed because of a
2564 store to MEM_RTX. If this is called because of a non-const call
2565 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2568 cselib_invalidate_mem (rtx mem_rtx
)
2570 cselib_val
**vp
, *v
, *next
;
2574 mem_addr
= canon_rtx (get_addr (XEXP (mem_rtx
, 0)));
2575 mem_rtx
= canon_rtx (mem_rtx
);
2577 vp
= &first_containing_mem
;
2578 for (v
= *vp
; v
!= &dummy_val
; v
= next
)
2580 bool has_mem
= false;
2581 struct elt_loc_list
**p
= &v
->locs
;
2582 bool had_locs
= v
->locs
!= NULL
;
2583 rtx_insn
*setting_insn
= v
->locs
? v
->locs
->setting_insn
: NULL
;
2589 struct elt_list
**mem_chain
;
2591 /* MEMs may occur in locations only at the top level; below
2592 that every MEM or REG is substituted by its VALUE. */
2598 if (num_mems
< param_max_cselib_memory_locations
2599 && ! canon_anti_dependence (x
, false, mem_rtx
,
2600 GET_MODE (mem_rtx
), mem_addr
))
2608 /* This one overlaps. */
2609 /* We must have a mapping from this MEM's address to the
2610 value (E). Remove that, too. */
2611 addr
= cselib_lookup (XEXP (x
, 0), VOIDmode
, 0, GET_MODE (x
));
2612 addr
= canonical_cselib_val (addr
);
2613 gcc_checking_assert (v
== canonical_cselib_val (v
));
2614 mem_chain
= &addr
->addr_list
;
2617 cselib_val
*canon
= canonical_cselib_val ((*mem_chain
)->elt
);
2621 unchain_one_elt_list (mem_chain
);
2625 /* Record canonicalized elt. */
2626 (*mem_chain
)->elt
= canon
;
2628 mem_chain
= &(*mem_chain
)->next
;
2631 unchain_one_elt_loc_list (p
);
2634 if (had_locs
&& cselib_useless_value_p (v
))
2636 if (setting_insn
&& DEBUG_INSN_P (setting_insn
))
2637 n_useless_debug_values
++;
2642 next
= v
->next_containing_mem
;
2646 vp
= &(*vp
)->next_containing_mem
;
2649 v
->next_containing_mem
= NULL
;
2654 /* Invalidate DEST. */
2657 cselib_invalidate_rtx (rtx dest
)
2659 while (GET_CODE (dest
) == SUBREG
2660 || GET_CODE (dest
) == ZERO_EXTRACT
2661 || GET_CODE (dest
) == STRICT_LOW_PART
)
2662 dest
= XEXP (dest
, 0);
2665 cselib_invalidate_regno (REGNO (dest
), GET_MODE (dest
));
2666 else if (MEM_P (dest
))
2667 cselib_invalidate_mem (dest
);
2670 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2673 cselib_invalidate_rtx_note_stores (rtx dest
, const_rtx
,
2674 void *data ATTRIBUTE_UNUSED
)
2676 cselib_invalidate_rtx (dest
);
2679 /* Record the result of a SET instruction. DEST is being set; the source
2680 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2681 describes its address. */
2684 cselib_record_set (rtx dest
, cselib_val
*src_elt
, cselib_val
*dest_addr_elt
)
2686 if (src_elt
== 0 || side_effects_p (dest
))
2691 unsigned int dreg
= REGNO (dest
);
2692 if (dreg
< FIRST_PSEUDO_REGISTER
)
2694 unsigned int n
= REG_NREGS (dest
);
2696 if (n
> max_value_regs
)
2700 if (REG_VALUES (dreg
) == 0)
2702 used_regs
[n_used_regs
++] = dreg
;
2703 REG_VALUES (dreg
) = new_elt_list (REG_VALUES (dreg
), src_elt
);
2707 /* The register should have been invalidated. */
2708 gcc_assert (REG_VALUES (dreg
)->elt
== 0);
2709 REG_VALUES (dreg
)->elt
= src_elt
;
2712 if (cselib_useless_value_p (src_elt
))
2714 new_elt_loc_list (src_elt
, dest
);
2716 else if (MEM_P (dest
) && dest_addr_elt
!= 0
2717 && cselib_record_memory
)
2719 if (cselib_useless_value_p (src_elt
))
2721 add_mem_for_addr (dest_addr_elt
, src_elt
, dest
);
2725 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2728 cselib_add_permanent_equiv (cselib_val
*elt
, rtx x
, rtx_insn
*insn
)
2731 rtx_insn
*save_cselib_current_insn
= cselib_current_insn
;
2733 gcc_checking_assert (elt
);
2734 gcc_checking_assert (PRESERVED_VALUE_P (elt
->val_rtx
));
2735 gcc_checking_assert (!side_effects_p (x
));
2737 cselib_current_insn
= insn
;
2739 nelt
= cselib_lookup (x
, GET_MODE (elt
->val_rtx
), 1, VOIDmode
);
2743 cselib_any_perm_equivs
= true;
2745 if (!PRESERVED_VALUE_P (nelt
->val_rtx
))
2746 cselib_preserve_value (nelt
);
2748 new_elt_loc_list (nelt
, elt
->val_rtx
);
2751 cselib_current_insn
= save_cselib_current_insn
;
2754 /* Return TRUE if any permanent equivalences have been recorded since
2755 the table was last initialized. */
2757 cselib_have_permanent_equivalences (void)
2759 return cselib_any_perm_equivs
;
2762 /* Record stack_pointer_rtx to be equal to
2763 (plus:P cfa_base_preserved_val offset). Used by var-tracking
2764 at the start of basic blocks for !frame_pointer_needed functions. */
2767 cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset
, rtx_insn
*insn
)
2769 rtx sp_derived_value
= NULL_RTX
;
2770 for (struct elt_loc_list
*l
= cfa_base_preserved_val
->locs
; l
; l
= l
->next
)
2771 if (GET_CODE (l
->loc
) == VALUE
2772 && SP_DERIVED_VALUE_P (l
->loc
))
2774 sp_derived_value
= l
->loc
;
2777 else if (GET_CODE (l
->loc
) == PLUS
2778 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
2779 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
2780 && CONST_INT_P (XEXP (l
->loc
, 1)))
2782 sp_derived_value
= XEXP (l
->loc
, 0);
2783 offset
= offset
+ UINTVAL (XEXP (l
->loc
, 1));
2786 if (sp_derived_value
== NULL_RTX
)
2789 = cselib_lookup_from_insn (plus_constant (Pmode
, sp_derived_value
, offset
),
2790 Pmode
, 1, VOIDmode
, insn
);
2793 PRESERVED_VALUE_P (val
->val_rtx
) = 1;
2794 cselib_record_set (stack_pointer_rtx
, val
, NULL
);
2798 /* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2799 that can be expressed using cfa_base_preserved_val + CONST_INT. */
2802 cselib_sp_derived_value_p (cselib_val
*v
)
2804 if (!SP_DERIVED_VALUE_P (v
->val_rtx
))
2805 for (struct elt_loc_list
*l
= v
->locs
; l
; l
= l
->next
)
2806 if (GET_CODE (l
->loc
) == PLUS
2807 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
2808 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
2809 && CONST_INT_P (XEXP (l
->loc
, 1)))
2810 v
= CSELIB_VAL_PTR (XEXP (l
->loc
, 0));
2811 if (!SP_DERIVED_VALUE_P (v
->val_rtx
))
2813 for (struct elt_loc_list
*l
= v
->locs
; l
; l
= l
->next
)
2814 if (l
->loc
== cfa_base_preserved_val
->val_rtx
)
2816 else if (GET_CODE (l
->loc
) == PLUS
2817 && XEXP (l
->loc
, 0) == cfa_base_preserved_val
->val_rtx
2818 && CONST_INT_P (XEXP (l
->loc
, 1)))
2823 /* There is no good way to determine how many elements there can be
2824 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2825 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2827 struct cselib_record_autoinc_data
2829 struct cselib_set
*sets
;
2833 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2834 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2837 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED
, rtx op ATTRIBUTE_UNUSED
,
2838 rtx dest
, rtx src
, rtx srcoff
, void *arg
)
2840 struct cselib_record_autoinc_data
*data
;
2841 data
= (struct cselib_record_autoinc_data
*)arg
;
2843 data
->sets
[data
->n_sets
].dest
= dest
;
2846 data
->sets
[data
->n_sets
].src
= gen_rtx_PLUS (GET_MODE (src
), src
, srcoff
);
2848 data
->sets
[data
->n_sets
].src
= src
;
2855 /* Record the effects of any sets and autoincs in INSN. */
2857 cselib_record_sets (rtx_insn
*insn
)
2861 struct cselib_set sets
[MAX_SETS
];
2863 int n_sets_before_autoinc
;
2864 int n_strict_low_parts
= 0;
2865 struct cselib_record_autoinc_data data
;
2867 rtx body
= PATTERN (insn
);
2868 if (GET_CODE (body
) == COND_EXEC
)
2870 cond
= COND_EXEC_TEST (body
);
2871 body
= COND_EXEC_CODE (body
);
2874 /* Find all sets. */
2875 if (GET_CODE (body
) == SET
)
2877 sets
[0].src
= SET_SRC (body
);
2878 sets
[0].dest
= SET_DEST (body
);
2881 else if (GET_CODE (body
) == PARALLEL
)
2883 /* Look through the PARALLEL and record the values being
2884 set, if possible. Also handle any CLOBBERs. */
2885 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
2887 rtx x
= XVECEXP (body
, 0, i
);
2889 if (GET_CODE (x
) == SET
)
2891 sets
[n_sets
].src
= SET_SRC (x
);
2892 sets
[n_sets
].dest
= SET_DEST (x
);
2899 && MEM_P (sets
[0].src
)
2900 && !cselib_record_memory
2901 && MEM_READONLY_P (sets
[0].src
))
2903 rtx note
= find_reg_equal_equiv_note (insn
);
2905 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2906 sets
[0].src
= XEXP (note
, 0);
2910 data
.n_sets
= n_sets_before_autoinc
= n_sets
;
2911 for_each_inc_dec (PATTERN (insn
), cselib_record_autoinc_cb
, &data
);
2912 n_sets
= data
.n_sets
;
2914 /* Look up the values that are read. Do this before invalidating the
2915 locations that are written. */
2916 for (i
= 0; i
< n_sets
; i
++)
2918 rtx dest
= sets
[i
].dest
;
2921 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2922 the low part after invalidating any knowledge about larger modes. */
2923 if (GET_CODE (sets
[i
].dest
) == STRICT_LOW_PART
)
2924 sets
[i
].dest
= dest
= XEXP (dest
, 0);
2926 /* We don't know how to record anything but REG or MEM. */
2928 || (MEM_P (dest
) && cselib_record_memory
))
2930 rtx src
= sets
[i
].src
;
2932 src
= gen_rtx_IF_THEN_ELSE (GET_MODE (dest
), cond
, src
, dest
);
2933 sets
[i
].src_elt
= cselib_lookup (src
, GET_MODE (dest
), 1, VOIDmode
);
2936 machine_mode address_mode
= get_address_mode (dest
);
2938 sets
[i
].dest_addr_elt
= cselib_lookup (XEXP (dest
, 0),
2943 sets
[i
].dest_addr_elt
= 0;
2946 /* Improve handling of STRICT_LOW_PART if the current value is known
2947 to be const0_rtx, then the low bits will be set to dest and higher
2948 bits will remain zero. Used in code like:
2950 {di:SI=0;clobber flags:CC;}
2951 flags:CCNO=cmp(bx:SI,0)
2952 strict_low_part(di:QI)=flags:CCNO<=0
2954 where we can note both that di:QI=flags:CCNO<=0 and
2955 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2956 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
2957 scalar_int_mode mode
;
2959 && cselib_record_sets_hook
2961 && HARD_REGISTER_P (dest
)
2963 && is_a
<scalar_int_mode
> (GET_MODE (dest
), &mode
)
2964 && n_sets
+ n_strict_low_parts
< MAX_SETS
)
2966 opt_scalar_int_mode wider_mode_iter
;
2967 FOR_EACH_WIDER_MODE (wider_mode_iter
, mode
)
2969 scalar_int_mode wider_mode
= wider_mode_iter
.require ();
2970 if (GET_MODE_PRECISION (wider_mode
) > BITS_PER_WORD
)
2973 rtx reg
= gen_lowpart (wider_mode
, dest
);
2977 cselib_val
*v
= cselib_lookup (reg
, wider_mode
, 0, VOIDmode
);
2981 struct elt_loc_list
*l
;
2982 for (l
= v
->locs
; l
; l
= l
->next
)
2983 if (l
->loc
== const0_rtx
)
2989 sets
[n_sets
+ n_strict_low_parts
].dest
= reg
;
2990 sets
[n_sets
+ n_strict_low_parts
].src
= dest
;
2991 sets
[n_sets
+ n_strict_low_parts
++].src_elt
= sets
[i
].src_elt
;
2997 if (cselib_record_sets_hook
)
2998 cselib_record_sets_hook (insn
, sets
, n_sets
);
3000 /* Invalidate all locations written by this insn. Note that the elts we
3001 looked up in the previous loop aren't affected, just some of their
3002 locations may go away. */
3003 note_pattern_stores (body
, cselib_invalidate_rtx_note_stores
, NULL
);
3005 for (i
= n_sets_before_autoinc
; i
< n_sets
; i
++)
3006 cselib_invalidate_rtx (sets
[i
].dest
);
3008 /* If this is an asm, look for duplicate sets. This can happen when the
3009 user uses the same value as an output multiple times. This is valid
3010 if the outputs are not actually used thereafter. Treat this case as
3011 if the value isn't actually set. We do this by smashing the destination
3012 to pc_rtx, so that we won't record the value later. */
3013 if (n_sets
>= 2 && asm_noperands (body
) >= 0)
3015 for (i
= 0; i
< n_sets
; i
++)
3017 rtx dest
= sets
[i
].dest
;
3018 if (REG_P (dest
) || MEM_P (dest
))
3021 for (j
= i
+ 1; j
< n_sets
; j
++)
3022 if (rtx_equal_p (dest
, sets
[j
].dest
))
3024 sets
[i
].dest
= pc_rtx
;
3025 sets
[j
].dest
= pc_rtx
;
3031 /* Now enter the equivalences in our tables. */
3032 for (i
= 0; i
< n_sets
; i
++)
3034 rtx dest
= sets
[i
].dest
;
3036 || (MEM_P (dest
) && cselib_record_memory
))
3037 cselib_record_set (dest
, sets
[i
].src_elt
, sets
[i
].dest_addr_elt
);
3040 /* And deal with STRICT_LOW_PART. */
3041 for (i
= 0; i
< n_strict_low_parts
; i
++)
3043 if (! PRESERVED_VALUE_P (sets
[n_sets
+ i
].src_elt
->val_rtx
))
3045 machine_mode dest_mode
= GET_MODE (sets
[n_sets
+ i
].dest
);
3047 = cselib_lookup (sets
[n_sets
+ i
].dest
, dest_mode
, 1, VOIDmode
);
3048 cselib_preserve_value (v
);
3049 rtx r
= gen_rtx_ZERO_EXTEND (dest_mode
,
3050 sets
[n_sets
+ i
].src_elt
->val_rtx
);
3051 cselib_add_permanent_equiv (v
, r
, insn
);
3055 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
3058 fp_setter_insn (rtx_insn
*insn
)
3060 rtx expr
, pat
= NULL_RTX
;
3062 if (!RTX_FRAME_RELATED_P (insn
))
3065 expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
3067 pat
= XEXP (expr
, 0);
3068 if (!modified_in_p (hard_frame_pointer_rtx
, pat
? pat
: insn
))
3071 /* Don't return true for frame pointer restores in the epilogue. */
3072 if (find_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
))
3077 /* V is one of the values in REG_VALUES (REGNO). Return true if it
3078 would be invalidated by CALLEE_ABI. */
3081 cselib_invalidated_by_call_p (const function_abi
&callee_abi
,
3082 unsigned int regno
, cselib_val
*v
)
3084 machine_mode mode
= GET_MODE (v
->val_rtx
);
3085 if (mode
== VOIDmode
)
3087 v
= REG_VALUES (regno
)->elt
;
3089 /* If we don't know what the mode of the constant value is, and we
3090 don't know what mode the register was set in, conservatively
3091 assume that the register is clobbered. The value's going to be
3092 essentially useless in this case anyway. */
3094 mode
= GET_MODE (v
->val_rtx
);
3096 return callee_abi
.clobbers_reg_p (mode
, regno
);
3099 /* Record the effects of INSN. */
3102 cselib_process_insn (rtx_insn
*insn
)
3107 cselib_current_insn
= insn
;
3109 /* Forget everything at a CODE_LABEL or a setjmp. */
3112 && find_reg_note (insn
, REG_SETJMP
, NULL
)))
3113 && !cselib_preserve_constants
)
3115 cselib_reset_table (next_uid
);
3116 cselib_current_insn
= NULL
;
3120 if (! INSN_P (insn
))
3122 cselib_current_insn
= NULL
;
3126 /* If this is a call instruction, forget anything stored in a
3127 call clobbered register, or, if this is not a const call, in
3131 function_abi callee_abi
= insn_callee_abi (insn
);
3132 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3134 elt_list
**l
= ®_VALUES (i
);
3137 cselib_val
*v
= (*l
)->elt
;
3138 if (v
&& cselib_invalidated_by_call_p (callee_abi
, i
, v
))
3139 cselib_invalidate_regno_val (i
, l
);
3145 /* Since it is not clear how cselib is going to be used, be
3146 conservative here and treat looping pure or const functions
3147 as if they were regular functions. */
3148 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
)
3149 || !(RTL_CONST_OR_PURE_CALL_P (insn
)))
3150 cselib_invalidate_mem (callmem
);
3152 /* For const/pure calls, invalidate any argument slots because
3153 they are owned by the callee. */
3154 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
3155 if (GET_CODE (XEXP (x
, 0)) == USE
3156 && MEM_P (XEXP (XEXP (x
, 0), 0)))
3157 cselib_invalidate_mem (XEXP (XEXP (x
, 0), 0));
3160 cselib_record_sets (insn
);
3162 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3163 after we have processed the insn. */
3166 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
3167 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
3168 cselib_invalidate_rtx (XEXP (XEXP (x
, 0), 0));
3170 /* Flush everything on setjmp. */
3171 if (cselib_preserve_constants
3172 && find_reg_note (insn
, REG_SETJMP
, NULL
))
3174 cselib_preserve_only_values ();
3175 cselib_reset_table (next_uid
);
3179 /* On setter of the hard frame pointer if frame_pointer_needed,
3180 invalidate stack_pointer_rtx, so that sp and {,h}fp based
3181 VALUEs are distinct. */
3182 if (reload_completed
3183 && frame_pointer_needed
3184 && fp_setter_insn (insn
))
3185 cselib_invalidate_rtx (stack_pointer_rtx
);
3187 cselib_current_insn
= NULL
;
3189 if (n_useless_values
> MAX_USELESS_VALUES
3190 /* remove_useless_values is linear in the hash table size. Avoid
3191 quadratic behavior for very large hashtables with very few
3192 useless elements. */
3193 && ((unsigned int)n_useless_values
3194 > (cselib_hash_table
->elements () - n_debug_values
) / 4))
3195 remove_useless_values ();
3198 /* Initialize cselib for one pass. The caller must also call
3199 init_alias_analysis. */
3202 cselib_init (int record_what
)
3204 cselib_record_memory
= record_what
& CSELIB_RECORD_MEMORY
;
3205 cselib_preserve_constants
= record_what
& CSELIB_PRESERVE_CONSTANTS
;
3206 cselib_any_perm_equivs
= false;
3208 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3209 see canon_true_dependence. This is only created once. */
3211 callmem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
3213 cselib_nregs
= max_reg_num ();
3215 /* We preserve reg_values to allow expensive clearing of the whole thing.
3216 Reallocate it however if it happens to be too large. */
3217 if (!reg_values
|| reg_values_size
< cselib_nregs
3218 || (reg_values_size
> 10 && reg_values_size
> cselib_nregs
* 4))
3221 /* Some space for newly emit instructions so we don't end up
3222 reallocating in between passes. */
3223 reg_values_size
= cselib_nregs
+ (63 + cselib_nregs
) / 16;
3224 reg_values
= XCNEWVEC (struct elt_list
*, reg_values_size
);
3226 used_regs
= XNEWVEC (unsigned int, cselib_nregs
);
3228 /* FIXME: enable sanitization (PR87845) */
3230 = new hash_table
<cselib_hasher
> (31, /* ggc */ false,
3231 /* sanitize_eq_and_hash */ false);
3232 if (cselib_preserve_constants
)
3233 cselib_preserved_hash_table
3234 = new hash_table
<cselib_hasher
> (31, /* ggc */ false,
3235 /* sanitize_eq_and_hash */ false);
3239 /* Called when the current user is done with cselib. */
3242 cselib_finish (void)
3244 bool preserved
= cselib_preserve_constants
;
3245 cselib_discard_hook
= NULL
;
3246 cselib_preserve_constants
= false;
3247 cselib_any_perm_equivs
= false;
3248 cfa_base_preserved_val
= NULL
;
3249 cfa_base_preserved_regno
= INVALID_REGNUM
;
3250 elt_list_pool
.release ();
3251 elt_loc_list_pool
.release ();
3252 cselib_val_pool
.release ();
3253 value_pool
.release ();
3254 cselib_clear_table ();
3255 delete cselib_hash_table
;
3256 cselib_hash_table
= NULL
;
3258 delete cselib_preserved_hash_table
;
3259 cselib_preserved_hash_table
= NULL
;
3262 n_useless_values
= 0;
3263 n_useless_debug_values
= 0;
3268 /* Dump the cselib_val *X to FILE *OUT. */
3271 dump_cselib_val (cselib_val
**x
, FILE *out
)
3274 bool need_lf
= true;
3276 print_inline_rtx (out
, v
->val_rtx
, 0);
3280 struct elt_loc_list
*l
= v
->locs
;
3286 fputs (" locs:", out
);
3289 if (l
->setting_insn
)
3290 fprintf (out
, "\n from insn %i ",
3291 INSN_UID (l
->setting_insn
));
3293 fprintf (out
, "\n ");
3294 print_inline_rtx (out
, l
->loc
, 4);
3296 while ((l
= l
->next
));
3301 fputs (" no locs", out
);
3307 struct elt_list
*e
= v
->addr_list
;
3313 fputs (" addr list:", out
);
3317 print_inline_rtx (out
, e
->elt
->val_rtx
, 2);
3319 while ((e
= e
->next
));
3324 fputs (" no addrs", out
);
3328 if (v
->next_containing_mem
== &dummy_val
)
3329 fputs (" last mem\n", out
);
3330 else if (v
->next_containing_mem
)
3332 fputs (" next mem ", out
);
3333 print_inline_rtx (out
, v
->next_containing_mem
->val_rtx
, 2);
3342 /* Dump to OUT everything in the CSELIB table. */
3345 dump_cselib_table (FILE *out
)
3347 fprintf (out
, "cselib hash table:\n");
3348 cselib_hash_table
->traverse
<FILE *, dump_cselib_val
> (out
);
3349 fprintf (out
, "cselib preserved hash table:\n");
3350 cselib_preserved_hash_table
->traverse
<FILE *, dump_cselib_val
> (out
);
3351 if (first_containing_mem
!= &dummy_val
)
3353 fputs ("first mem ", out
);
3354 print_inline_rtx (out
, first_containing_mem
->val_rtx
, 2);
3357 fprintf (out
, "next uid %i\n", next_uid
);
3360 #include "gt-cselib.h"