1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
34 #include "function-abi.h"
37 /* A list of cselib_val structures. */
40 struct elt_list
*next
;
44 static bool cselib_record_memory
;
45 static bool cselib_preserve_constants
;
46 static bool cselib_any_perm_equivs
;
47 static inline void promote_debug_loc (struct elt_loc_list
*l
);
48 static struct elt_list
*new_elt_list (struct elt_list
*, cselib_val
*);
49 static void new_elt_loc_list (cselib_val
*, rtx
);
50 static void unchain_one_value (cselib_val
*);
51 static void unchain_one_elt_list (struct elt_list
**);
52 static void unchain_one_elt_loc_list (struct elt_loc_list
**);
53 static void remove_useless_values (void);
54 static unsigned int cselib_hash_rtx (rtx
, int, machine_mode
);
55 static cselib_val
*new_cselib_val (unsigned int, machine_mode
, rtx
);
56 static void add_mem_for_addr (cselib_val
*, cselib_val
*, rtx
);
57 static cselib_val
*cselib_lookup_mem (rtx
, int);
58 static void cselib_invalidate_regno (unsigned int, machine_mode
);
59 static void cselib_invalidate_mem (rtx
);
60 static void cselib_record_set (rtx
, cselib_val
*, cselib_val
*);
61 static void cselib_record_sets (rtx_insn
*);
62 static rtx
autoinc_split (rtx
, rtx
*, machine_mode
);
64 #define PRESERVED_VALUE_P(RTX) \
65 (RTL_FLAG_CHECK1 ("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
67 #define SP_BASED_VALUE_P(RTX) \
68 (RTL_FLAG_CHECK1 ("SP_BASED_VALUE_P", (RTX), VALUE)->jump)
70 #define SP_DERIVED_VALUE_P(RTX) \
71 (RTL_FLAG_CHECK1 ("SP_DERIVED_VALUE_P", (RTX), VALUE)->call)
73 struct expand_value_data
76 cselib_expand_callback callback
;
81 static rtx
cselib_expand_value_rtx_1 (rtx
, struct expand_value_data
*, int);
83 /* This is a global so we don't have to pass this through every function.
84 It is used in new_elt_loc_list to set SETTING_INSN. */
85 static rtx_insn
*cselib_current_insn
;
87 /* There are three ways in which cselib can look up an rtx:
88 - for a REG, the reg_values table (which is indexed by regno) is used
89 - for a MEM, we recursively look up its address and then follow the
90 addr_list of that value
91 - for everything else, we compute a hash value and go through the hash
92 table. Since different rtx's can still have the same hash value,
93 this involves walking the table entries for a given value and comparing
94 the locations of the entries with the rtx we are looking up. */
96 struct cselib_hasher
: nofree_ptr_hash
<cselib_val
>
99 /* The rtx value and its mode (needed separately for constant
103 /* The mode of the contaning MEM, if any, otherwise VOIDmode. */
104 machine_mode memmode
;
106 typedef key
*compare_type
;
107 static inline hashval_t
hash (const cselib_val
*);
108 static inline bool equal (const cselib_val
*, const key
*);
111 /* The hash function for our hash table. The value is always computed with
112 cselib_hash_rtx when adding an element; this function just extracts the
113 hash value from a cselib_val structure. */
116 cselib_hasher::hash (const cselib_val
*v
)
121 /* The equality test for our hash table. The first argument V is a table
122 element (i.e. a cselib_val), while the second arg X is an rtx. We know
123 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
124 CONST of an appropriate mode. */
127 cselib_hasher::equal (const cselib_val
*v
, const key
*x_arg
)
129 struct elt_loc_list
*l
;
131 machine_mode mode
= x_arg
->mode
;
132 machine_mode memmode
= x_arg
->memmode
;
134 if (mode
!= GET_MODE (v
->val_rtx
))
137 if (GET_CODE (x
) == VALUE
)
138 return x
== v
->val_rtx
;
140 if (SP_DERIVED_VALUE_P (v
->val_rtx
) && GET_MODE (x
) == Pmode
)
143 if (autoinc_split (x
, &xoff
, memmode
) == v
->val_rtx
&& xoff
== NULL_RTX
)
147 /* We don't guarantee that distinct rtx's have different hash values,
148 so we need to do a comparison. */
149 for (l
= v
->locs
; l
; l
= l
->next
)
150 if (l
->setting_insn
&& DEBUG_INSN_P (l
->setting_insn
)
151 && (!cselib_current_insn
|| !DEBUG_INSN_P (cselib_current_insn
)))
153 rtx_insn
*save_cselib_current_insn
= cselib_current_insn
;
154 /* If l is so far a debug only loc, without debug stmts it
155 would never be compared to x at all, so temporarily pretend
156 current instruction is that DEBUG_INSN so that we don't
157 promote other debug locs even for unsuccessful comparison. */
158 cselib_current_insn
= l
->setting_insn
;
159 bool match
= rtx_equal_for_cselib_1 (l
->loc
, x
, memmode
, 0);
160 cselib_current_insn
= save_cselib_current_insn
;
163 promote_debug_loc (l
);
167 else if (rtx_equal_for_cselib_1 (l
->loc
, x
, memmode
, 0))
173 /* A table that enables us to look up elts by their value. */
174 static hash_table
<cselib_hasher
> *cselib_hash_table
;
176 /* A table to hold preserved values. */
177 static hash_table
<cselib_hasher
> *cselib_preserved_hash_table
;
179 /* The unique id that the next create value will take. */
180 static unsigned int next_uid
;
182 /* The number of registers we had when the varrays were last resized. */
183 static unsigned int cselib_nregs
;
185 /* Count values without known locations, or with only locations that
186 wouldn't have been known except for debug insns. Whenever this
187 grows too big, we remove these useless values from the table.
189 Counting values with only debug values is a bit tricky. We don't
190 want to increment n_useless_values when we create a value for a
191 debug insn, for this would get n_useless_values out of sync, but we
192 want increment it if all locs in the list that were ever referenced
193 in nondebug insns are removed from the list.
195 In the general case, once we do that, we'd have to stop accepting
196 nondebug expressions in the loc list, to avoid having two values
197 equivalent that, without debug insns, would have been made into
198 separate values. However, because debug insns never introduce
199 equivalences themselves (no assignments), the only means for
200 growing loc lists is through nondebug assignments. If the locs
201 also happen to be referenced in debug insns, it will work just fine.
203 A consequence of this is that there's at most one debug-only loc in
204 each loc list. If we keep it in the first entry, testing whether
205 we have a debug-only loc list takes O(1).
207 Furthermore, since any additional entry in a loc list containing a
208 debug loc would have to come from an assignment (nondebug) that
209 references both the initial debug loc and the newly-equivalent loc,
210 the initial debug loc would be promoted to a nondebug loc, and the
211 loc list would not contain debug locs any more.
213 So the only case we have to be careful with in order to keep
214 n_useless_values in sync between debug and nondebug compilations is
215 to avoid incrementing n_useless_values when removing the single loc
216 from a value that turns out to not appear outside debug values. We
217 increment n_useless_debug_values instead, and leave such values
218 alone until, for other reasons, we garbage-collect useless
220 static int n_useless_values
;
221 static int n_useless_debug_values
;
223 /* Count values whose locs have been taken exclusively from debug
224 insns for the entire life of the value. */
225 static int n_debug_values
;
227 /* Number of useless values before we remove them from the hash table. */
228 #define MAX_USELESS_VALUES 32
230 /* This table maps from register number to values. It does not
231 contain pointers to cselib_val structures, but rather elt_lists.
232 The purpose is to be able to refer to the same register in
233 different modes. The first element of the list defines the mode in
234 which the register was set; if the mode is unknown or the value is
235 no longer valid in that mode, ELT will be NULL for the first
237 static struct elt_list
**reg_values
;
238 static unsigned int reg_values_size
;
239 #define REG_VALUES(i) reg_values[i]
241 /* The largest number of hard regs used by any entry added to the
242 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
243 static unsigned int max_value_regs
;
245 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
246 in cselib_clear_table() for fast emptying. */
247 static unsigned int *used_regs
;
248 static unsigned int n_used_regs
;
250 /* We pass this to cselib_invalidate_mem to invalidate all of
251 memory for a non-const call instruction. */
252 static GTY(()) rtx callmem
;
254 /* Set by discard_useless_locs if it deleted the last location of any
256 static int values_became_useless
;
258 /* Used as stop element of the containing_mem list so we can check
259 presence in the list by checking the next pointer. */
260 static cselib_val dummy_val
;
262 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
263 that is constant through the whole function and should never be
265 static cselib_val
*cfa_base_preserved_val
;
266 static unsigned int cfa_base_preserved_regno
= INVALID_REGNUM
;
268 /* Used to list all values that contain memory reference.
269 May or may not contain the useless values - the list is compacted
270 each time memory is invalidated. */
271 static cselib_val
*first_containing_mem
= &dummy_val
;
273 static object_allocator
<elt_list
> elt_list_pool ("elt_list");
274 static object_allocator
<elt_loc_list
> elt_loc_list_pool ("elt_loc_list");
275 static object_allocator
<cselib_val
> cselib_val_pool ("cselib_val_list");
277 static pool_allocator
value_pool ("value", RTX_CODE_SIZE (VALUE
));
279 /* If nonnull, cselib will call this function before freeing useless
280 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
281 void (*cselib_discard_hook
) (cselib_val
*);
283 /* If nonnull, cselib will call this function before recording sets or
284 even clobbering outputs of INSN. All the recorded sets will be
285 represented in the array sets[n_sets]. new_val_min can be used to
286 tell whether values present in sets are introduced by this
288 void (*cselib_record_sets_hook
) (rtx_insn
*insn
, struct cselib_set
*sets
,
293 /* Allocate a struct elt_list and fill in its two elements with the
296 static inline struct elt_list
*
297 new_elt_list (struct elt_list
*next
, cselib_val
*elt
)
299 elt_list
*el
= elt_list_pool
.allocate ();
305 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
309 new_elt_loc_list (cselib_val
*val
, rtx loc
)
311 struct elt_loc_list
*el
, *next
= val
->locs
;
313 gcc_checking_assert (!next
|| !next
->setting_insn
314 || !DEBUG_INSN_P (next
->setting_insn
)
315 || cselib_current_insn
== next
->setting_insn
);
317 /* If we're creating the first loc in a debug insn context, we've
318 just created a debug value. Count it. */
319 if (!next
&& cselib_current_insn
&& DEBUG_INSN_P (cselib_current_insn
))
322 val
= canonical_cselib_val (val
);
325 if (GET_CODE (loc
) == VALUE
)
327 loc
= canonical_cselib_val (CSELIB_VAL_PTR (loc
))->val_rtx
;
329 gcc_checking_assert (PRESERVED_VALUE_P (loc
)
330 == PRESERVED_VALUE_P (val
->val_rtx
));
332 if (val
->val_rtx
== loc
)
334 else if (val
->uid
> CSELIB_VAL_PTR (loc
)->uid
)
336 /* Reverse the insertion. */
337 new_elt_loc_list (CSELIB_VAL_PTR (loc
), val
->val_rtx
);
341 gcc_checking_assert (val
->uid
< CSELIB_VAL_PTR (loc
)->uid
);
343 if (CSELIB_VAL_PTR (loc
)->locs
)
345 /* Bring all locs from LOC to VAL. */
346 for (el
= CSELIB_VAL_PTR (loc
)->locs
; el
->next
; el
= el
->next
)
348 /* Adjust values that have LOC as canonical so that VAL
349 becomes their canonical. */
350 if (el
->loc
&& GET_CODE (el
->loc
) == VALUE
)
352 gcc_checking_assert (CSELIB_VAL_PTR (el
->loc
)->locs
->loc
354 CSELIB_VAL_PTR (el
->loc
)->locs
->loc
= val
->val_rtx
;
357 el
->next
= val
->locs
;
358 next
= val
->locs
= CSELIB_VAL_PTR (loc
)->locs
;
361 if (CSELIB_VAL_PTR (loc
)->addr_list
)
363 /* Bring in addr_list into canonical node. */
364 struct elt_list
*last
= CSELIB_VAL_PTR (loc
)->addr_list
;
367 last
->next
= val
->addr_list
;
368 val
->addr_list
= CSELIB_VAL_PTR (loc
)->addr_list
;
369 CSELIB_VAL_PTR (loc
)->addr_list
= NULL
;
372 if (CSELIB_VAL_PTR (loc
)->next_containing_mem
!= NULL
373 && val
->next_containing_mem
== NULL
)
375 /* Add VAL to the containing_mem list after LOC. LOC will
376 be removed when we notice it doesn't contain any
378 val
->next_containing_mem
= CSELIB_VAL_PTR (loc
)->next_containing_mem
;
379 CSELIB_VAL_PTR (loc
)->next_containing_mem
= val
;
382 /* Chain LOC back to VAL. */
383 el
= elt_loc_list_pool
.allocate ();
384 el
->loc
= val
->val_rtx
;
385 el
->setting_insn
= cselib_current_insn
;
387 CSELIB_VAL_PTR (loc
)->locs
= el
;
390 el
= elt_loc_list_pool
.allocate ();
392 el
->setting_insn
= cselib_current_insn
;
397 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
398 originating from a debug insn, maintaining the debug values
402 promote_debug_loc (struct elt_loc_list
*l
)
404 if (l
&& l
->setting_insn
&& DEBUG_INSN_P (l
->setting_insn
)
405 && (!cselib_current_insn
|| !DEBUG_INSN_P (cselib_current_insn
)))
408 l
->setting_insn
= cselib_current_insn
;
409 if (cselib_preserve_constants
&& l
->next
)
411 gcc_assert (l
->next
->setting_insn
412 && DEBUG_INSN_P (l
->next
->setting_insn
)
414 l
->next
->setting_insn
= cselib_current_insn
;
417 gcc_assert (!l
->next
);
421 /* The elt_list at *PL is no longer needed. Unchain it and free its
425 unchain_one_elt_list (struct elt_list
**pl
)
427 struct elt_list
*l
= *pl
;
430 elt_list_pool
.remove (l
);
433 /* Likewise for elt_loc_lists. */
436 unchain_one_elt_loc_list (struct elt_loc_list
**pl
)
438 struct elt_loc_list
*l
= *pl
;
441 elt_loc_list_pool
.remove (l
);
444 /* Likewise for cselib_vals. This also frees the addr_list associated with
448 unchain_one_value (cselib_val
*v
)
451 unchain_one_elt_list (&v
->addr_list
);
453 cselib_val_pool
.remove (v
);
456 /* Remove all entries from the hash table. Also used during
460 cselib_clear_table (void)
462 cselib_reset_table (1);
465 /* Return TRUE if V is a constant, a function invariant or a VALUE
466 equivalence; FALSE otherwise. */
469 invariant_or_equiv_p (cselib_val
*v
)
471 struct elt_loc_list
*l
;
473 if (v
== cfa_base_preserved_val
)
476 /* Keep VALUE equivalences around. */
477 for (l
= v
->locs
; l
; l
= l
->next
)
478 if (GET_CODE (l
->loc
) == VALUE
)
482 && v
->locs
->next
== NULL
)
484 if (CONSTANT_P (v
->locs
->loc
)
485 && (GET_CODE (v
->locs
->loc
) != CONST
486 || !references_value_p (v
->locs
->loc
, 0)))
488 /* Although a debug expr may be bound to different expressions,
489 we can preserve it as if it was constant, to get unification
490 and proper merging within var-tracking. */
491 if (GET_CODE (v
->locs
->loc
) == DEBUG_EXPR
492 || GET_CODE (v
->locs
->loc
) == DEBUG_IMPLICIT_PTR
493 || GET_CODE (v
->locs
->loc
) == ENTRY_VALUE
494 || GET_CODE (v
->locs
->loc
) == DEBUG_PARAMETER_REF
)
497 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
498 if (GET_CODE (v
->locs
->loc
) == PLUS
499 && CONST_INT_P (XEXP (v
->locs
->loc
, 1))
500 && GET_CODE (XEXP (v
->locs
->loc
, 0)) == VALUE
501 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v
->locs
->loc
, 0))))
508 /* Remove from hash table all VALUEs except constants, function
509 invariants and VALUE equivalences. */
512 preserve_constants_and_equivs (cselib_val
**x
, void *info ATTRIBUTE_UNUSED
)
516 if (invariant_or_equiv_p (v
))
518 cselib_hasher::key lookup
= {
519 GET_MODE (v
->val_rtx
), v
->val_rtx
, VOIDmode
522 = cselib_preserved_hash_table
->find_slot_with_hash (&lookup
,
528 cselib_hash_table
->clear_slot (x
);
533 /* Remove all entries from the hash table, arranging for the next
534 value to be numbered NUM. */
537 cselib_reset_table (unsigned int num
)
543 if (cfa_base_preserved_val
)
545 unsigned int regno
= cfa_base_preserved_regno
;
546 unsigned int new_used_regs
= 0;
547 for (i
= 0; i
< n_used_regs
; i
++)
548 if (used_regs
[i
] == regno
)
554 REG_VALUES (used_regs
[i
]) = 0;
555 gcc_assert (new_used_regs
== 1);
556 n_used_regs
= new_used_regs
;
557 used_regs
[0] = regno
;
559 = hard_regno_nregs (regno
,
560 GET_MODE (cfa_base_preserved_val
->locs
->loc
));
562 /* If cfa_base is sp + const_int, need to preserve also the
563 SP_DERIVED_VALUE_P value. */
564 for (struct elt_loc_list
*l
= cfa_base_preserved_val
->locs
;
566 if (GET_CODE (l
->loc
) == PLUS
567 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
568 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
569 && CONST_INT_P (XEXP (l
->loc
, 1)))
571 if (! invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (l
->loc
, 0))))
573 rtx val
= cfa_base_preserved_val
->val_rtx
;
574 rtx_insn
*save_cselib_current_insn
= cselib_current_insn
;
575 cselib_current_insn
= l
->setting_insn
;
576 new_elt_loc_list (CSELIB_VAL_PTR (XEXP (l
->loc
, 0)),
577 plus_constant (Pmode
, val
,
578 -UINTVAL (XEXP (l
->loc
, 1))));
579 cselib_current_insn
= save_cselib_current_insn
;
586 for (i
= 0; i
< n_used_regs
; i
++)
587 REG_VALUES (used_regs
[i
]) = 0;
591 if (cselib_preserve_constants
)
592 cselib_hash_table
->traverse
<void *, preserve_constants_and_equivs
> (NULL
);
595 cselib_hash_table
->empty ();
596 gcc_checking_assert (!cselib_any_perm_equivs
);
599 n_useless_values
= 0;
600 n_useless_debug_values
= 0;
605 first_containing_mem
= &dummy_val
;
608 /* Return the number of the next value that will be generated. */
611 cselib_get_next_uid (void)
616 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
617 INSERTing if requested. When X is part of the address of a MEM,
618 MEMMODE should specify the mode of the MEM. */
621 cselib_find_slot (machine_mode mode
, rtx x
, hashval_t hash
,
622 enum insert_option insert
, machine_mode memmode
)
624 cselib_val
**slot
= NULL
;
625 cselib_hasher::key lookup
= { mode
, x
, memmode
};
626 if (cselib_preserve_constants
)
627 slot
= cselib_preserved_hash_table
->find_slot_with_hash (&lookup
, hash
,
630 slot
= cselib_hash_table
->find_slot_with_hash (&lookup
, hash
, insert
);
634 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
635 only return true for values which point to a cselib_val whose value
636 element has been set to zero, which implies the cselib_val will be
640 references_value_p (const_rtx x
, int only_useless
)
642 const enum rtx_code code
= GET_CODE (x
);
643 const char *fmt
= GET_RTX_FORMAT (code
);
646 if (GET_CODE (x
) == VALUE
648 || (CSELIB_VAL_PTR (x
)->locs
== 0 && !PRESERVED_VALUE_P (x
))))
651 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
653 if (fmt
[i
] == 'e' && references_value_p (XEXP (x
, i
), only_useless
))
655 else if (fmt
[i
] == 'E')
656 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
657 if (references_value_p (XVECEXP (x
, i
, j
), only_useless
))
664 /* Return true if V is a useless VALUE and can be discarded as such. */
667 cselib_useless_value_p (cselib_val
*v
)
670 && !PRESERVED_VALUE_P (v
->val_rtx
)
671 && !SP_DERIVED_VALUE_P (v
->val_rtx
));
674 /* For all locations found in X, delete locations that reference useless
675 values (i.e. values without any location). Called through
679 discard_useless_locs (cselib_val
**x
, void *info ATTRIBUTE_UNUSED
)
682 struct elt_loc_list
**p
= &v
->locs
;
683 bool had_locs
= v
->locs
!= NULL
;
684 rtx_insn
*setting_insn
= v
->locs
? v
->locs
->setting_insn
: NULL
;
688 if (references_value_p ((*p
)->loc
, 1))
689 unchain_one_elt_loc_list (p
);
694 if (had_locs
&& cselib_useless_value_p (v
))
696 if (setting_insn
&& DEBUG_INSN_P (setting_insn
))
697 n_useless_debug_values
++;
700 values_became_useless
= 1;
705 /* If X is a value with no locations, remove it from the hashtable. */
708 discard_useless_values (cselib_val
**x
, void *info ATTRIBUTE_UNUSED
)
712 if (v
->locs
== 0 && cselib_useless_value_p (v
))
714 if (cselib_discard_hook
)
715 cselib_discard_hook (v
);
717 CSELIB_VAL_PTR (v
->val_rtx
) = NULL
;
718 cselib_hash_table
->clear_slot (x
);
719 unchain_one_value (v
);
726 /* Clean out useless values (i.e. those which no longer have locations
727 associated with them) from the hash table. */
730 remove_useless_values (void)
734 /* First pass: eliminate locations that reference the value. That in
735 turn can make more values useless. */
738 values_became_useless
= 0;
739 cselib_hash_table
->traverse
<void *, discard_useless_locs
> (NULL
);
741 while (values_became_useless
);
743 /* Second pass: actually remove the values. */
745 p
= &first_containing_mem
;
746 for (v
= *p
; v
!= &dummy_val
; v
= v
->next_containing_mem
)
747 if (v
->locs
&& v
== canonical_cselib_val (v
))
750 p
= &(*p
)->next_containing_mem
;
754 n_useless_values
+= n_useless_debug_values
;
755 n_debug_values
-= n_useless_debug_values
;
756 n_useless_debug_values
= 0;
758 cselib_hash_table
->traverse
<void *, discard_useless_values
> (NULL
);
760 gcc_assert (!n_useless_values
);
763 /* Arrange for a value to not be removed from the hash table even if
764 it becomes useless. */
767 cselib_preserve_value (cselib_val
*v
)
769 PRESERVED_VALUE_P (v
->val_rtx
) = 1;
772 /* Test whether a value is preserved. */
775 cselib_preserved_value_p (cselib_val
*v
)
777 return PRESERVED_VALUE_P (v
->val_rtx
);
780 /* Arrange for a REG value to be assumed constant through the whole function,
781 never invalidated and preserved across cselib_reset_table calls. */
784 cselib_preserve_cfa_base_value (cselib_val
*v
, unsigned int regno
)
786 if (cselib_preserve_constants
788 && REG_P (v
->locs
->loc
))
790 cfa_base_preserved_val
= v
;
791 cfa_base_preserved_regno
= regno
;
795 /* Clean all non-constant expressions in the hash table, but retain
799 cselib_preserve_only_values (void)
803 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
804 cselib_invalidate_regno (i
, reg_raw_mode
[i
]);
806 cselib_invalidate_mem (callmem
);
808 remove_useless_values ();
810 gcc_assert (first_containing_mem
== &dummy_val
);
813 /* Arrange for a value to be marked as based on stack pointer
814 for find_base_term purposes. */
817 cselib_set_value_sp_based (cselib_val
*v
)
819 SP_BASED_VALUE_P (v
->val_rtx
) = 1;
822 /* Test whether a value is based on stack pointer for
823 find_base_term purposes. */
826 cselib_sp_based_value_p (cselib_val
*v
)
828 return SP_BASED_VALUE_P (v
->val_rtx
);
831 /* Return the mode in which a register was last set. If X is not a
832 register, return its mode. If the mode in which the register was
833 set is not known, or the value was already clobbered, return
837 cselib_reg_set_mode (const_rtx x
)
842 if (REG_VALUES (REGNO (x
)) == NULL
843 || REG_VALUES (REGNO (x
))->elt
== NULL
)
846 return GET_MODE (REG_VALUES (REGNO (x
))->elt
->val_rtx
);
849 /* If x is a PLUS or an autoinc operation, expand the operation,
850 storing the offset, if any, in *OFF. */
853 autoinc_split (rtx x
, rtx
*off
, machine_mode memmode
)
855 switch (GET_CODE (x
))
863 if (memmode
== VOIDmode
)
866 *off
= gen_int_mode (-GET_MODE_SIZE (memmode
), GET_MODE (x
));
871 if (memmode
== VOIDmode
)
874 *off
= gen_int_mode (GET_MODE_SIZE (memmode
), GET_MODE (x
));
892 if (GET_MODE (x
) == Pmode
893 && (REG_P (x
) || MEM_P (x
) || GET_CODE (x
) == VALUE
)
894 && (*off
== NULL_RTX
|| CONST_INT_P (*off
)))
897 if (GET_CODE (x
) == VALUE
)
898 e
= CSELIB_VAL_PTR (x
);
900 e
= cselib_lookup (x
, GET_MODE (x
), 0, memmode
);
903 if (SP_DERIVED_VALUE_P (e
->val_rtx
)
904 && (*off
== NULL_RTX
|| *off
== const0_rtx
))
909 for (struct elt_loc_list
*l
= e
->locs
; l
; l
= l
->next
)
910 if (GET_CODE (l
->loc
) == PLUS
911 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
912 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
913 && CONST_INT_P (XEXP (l
->loc
, 1)))
915 if (*off
== NULL_RTX
)
916 *off
= XEXP (l
->loc
, 1);
918 *off
= plus_constant (Pmode
, *off
,
919 INTVAL (XEXP (l
->loc
, 1)));
920 if (*off
== const0_rtx
)
922 return XEXP (l
->loc
, 0);
929 /* Return nonzero if we can prove that X and Y contain the same value,
930 taking our gathered information into account. MEMMODE holds the
931 mode of the enclosing MEM, if any, as required to deal with autoinc
932 addressing modes. If X and Y are not (known to be) part of
933 addresses, MEMMODE should be VOIDmode. */
936 rtx_equal_for_cselib_1 (rtx x
, rtx y
, machine_mode memmode
, int depth
)
942 if (REG_P (x
) || MEM_P (x
))
944 cselib_val
*e
= cselib_lookup (x
, GET_MODE (x
), 0, memmode
);
950 if (REG_P (y
) || MEM_P (y
))
952 cselib_val
*e
= cselib_lookup (y
, GET_MODE (y
), 0, memmode
);
961 if (GET_CODE (x
) == VALUE
)
963 cselib_val
*e
= canonical_cselib_val (CSELIB_VAL_PTR (x
));
964 struct elt_loc_list
*l
;
966 if (GET_CODE (y
) == VALUE
)
967 return e
== canonical_cselib_val (CSELIB_VAL_PTR (y
));
969 if ((SP_DERIVED_VALUE_P (x
)
970 || SP_DERIVED_VALUE_P (e
->val_rtx
))
971 && GET_MODE (y
) == Pmode
)
974 rtx yr
= autoinc_split (y
, &yoff
, memmode
);
975 if ((yr
== x
|| yr
== e
->val_rtx
) && yoff
== NULL_RTX
)
982 for (l
= e
->locs
; l
; l
= l
->next
)
986 /* Avoid infinite recursion. We know we have the canonical
987 value, so we can just skip any values in the equivalence
989 if (REG_P (t
) || MEM_P (t
) || GET_CODE (t
) == VALUE
)
991 else if (rtx_equal_for_cselib_1 (t
, y
, memmode
, depth
+ 1))
997 else if (GET_CODE (y
) == VALUE
)
999 cselib_val
*e
= canonical_cselib_val (CSELIB_VAL_PTR (y
));
1000 struct elt_loc_list
*l
;
1002 if ((SP_DERIVED_VALUE_P (y
)
1003 || SP_DERIVED_VALUE_P (e
->val_rtx
))
1004 && GET_MODE (x
) == Pmode
)
1007 rtx xr
= autoinc_split (x
, &xoff
, memmode
);
1008 if ((xr
== y
|| xr
== e
->val_rtx
) && xoff
== NULL_RTX
)
1015 for (l
= e
->locs
; l
; l
= l
->next
)
1019 if (REG_P (t
) || MEM_P (t
) || GET_CODE (t
) == VALUE
)
1021 else if (rtx_equal_for_cselib_1 (x
, t
, memmode
, depth
+ 1))
1028 if (GET_MODE (x
) != GET_MODE (y
))
1031 if (GET_CODE (x
) != GET_CODE (y
)
1032 || (GET_CODE (x
) == PLUS
1033 && GET_MODE (x
) == Pmode
1034 && CONST_INT_P (XEXP (x
, 1))
1035 && CONST_INT_P (XEXP (y
, 1))))
1037 rtx xorig
= x
, yorig
= y
;
1038 rtx xoff
= NULL
, yoff
= NULL
;
1040 x
= autoinc_split (x
, &xoff
, memmode
);
1041 y
= autoinc_split (y
, &yoff
, memmode
);
1043 /* Don't recurse if nothing changed. */
1044 if (x
!= xorig
|| y
!= yorig
)
1049 if (xoff
&& !rtx_equal_for_cselib_1 (xoff
, yoff
, memmode
, depth
))
1052 return rtx_equal_for_cselib_1 (x
, y
, memmode
, depth
);
1055 if (GET_CODE (xorig
) != GET_CODE (yorig
))
1059 /* These won't be handled correctly by the code below. */
1060 switch (GET_CODE (x
))
1067 if (!same_vector_encodings_p (x
, y
))
1071 case DEBUG_IMPLICIT_PTR
:
1072 return DEBUG_IMPLICIT_PTR_DECL (x
)
1073 == DEBUG_IMPLICIT_PTR_DECL (y
);
1075 case DEBUG_PARAMETER_REF
:
1076 return DEBUG_PARAMETER_REF_DECL (x
)
1077 == DEBUG_PARAMETER_REF_DECL (y
);
1080 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
1081 use rtx_equal_for_cselib_1 to compare the operands. */
1082 return rtx_equal_p (ENTRY_VALUE_EXP (x
), ENTRY_VALUE_EXP (y
));
1085 return label_ref_label (x
) == label_ref_label (y
);
1088 return REGNO (x
) == REGNO (y
);
1091 /* We have to compare any autoinc operations in the addresses
1092 using this MEM's mode. */
1093 return rtx_equal_for_cselib_1 (XEXP (x
, 0), XEXP (y
, 0), GET_MODE (x
),
1100 code
= GET_CODE (x
);
1101 fmt
= GET_RTX_FORMAT (code
);
1103 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1110 if (XWINT (x
, i
) != XWINT (y
, i
))
1116 if (XINT (x
, i
) != XINT (y
, i
))
1121 if (maybe_ne (SUBREG_BYTE (x
), SUBREG_BYTE (y
)))
1127 /* Two vectors must have the same length. */
1128 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1131 /* And the corresponding elements must match. */
1132 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1133 if (! rtx_equal_for_cselib_1 (XVECEXP (x
, i
, j
),
1134 XVECEXP (y
, i
, j
), memmode
, depth
))
1140 && targetm
.commutative_p (x
, UNKNOWN
)
1141 && rtx_equal_for_cselib_1 (XEXP (x
, 1), XEXP (y
, 0), memmode
,
1143 && rtx_equal_for_cselib_1 (XEXP (x
, 0), XEXP (y
, 1), memmode
,
1146 if (! rtx_equal_for_cselib_1 (XEXP (x
, i
), XEXP (y
, i
), memmode
,
1153 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1158 /* These are just backpointers, so they don't matter. */
1165 /* It is believed that rtx's at this level will never
1166 contain anything but integers and other rtx's,
1167 except for within LABEL_REFs and SYMBOL_REFs. */
1175 /* Wrapper for rtx_equal_for_cselib_p to determine whether a SET is
1176 truly redundant, taking into account aliasing information. */
1178 cselib_redundant_set_p (rtx set
)
1180 gcc_assert (GET_CODE (set
) == SET
);
1181 rtx dest
= SET_DEST (set
);
1182 if (cselib_reg_set_mode (dest
) != GET_MODE (dest
))
1185 if (!rtx_equal_for_cselib_p (dest
, SET_SRC (set
)))
1188 while (GET_CODE (dest
) == SUBREG
1189 || GET_CODE (dest
) == ZERO_EXTRACT
1190 || GET_CODE (dest
) == STRICT_LOW_PART
)
1191 dest
= XEXP (dest
, 0);
1193 if (!flag_strict_aliasing
|| !MEM_P (dest
))
1196 /* For a store we need to check that suppressing it will not change
1197 the effective alias set. */
1198 rtx dest_addr
= XEXP (dest
, 0);
1200 /* Lookup the equivalents to the original dest (rather than just the
1202 cselib_val
*src_val
= cselib_lookup (SET_DEST (set
),
1203 GET_MODE (SET_DEST (set
)),
1208 /* Walk the list of source equivalents to find the MEM accessing
1209 the same location. */
1210 for (elt_loc_list
*l
= src_val
->locs
; l
; l
= l
->next
)
1212 rtx src_equiv
= l
->loc
;
1213 while (GET_CODE (src_equiv
) == SUBREG
1214 || GET_CODE (src_equiv
) == ZERO_EXTRACT
1215 || GET_CODE (src_equiv
) == STRICT_LOW_PART
)
1216 src_equiv
= XEXP (src_equiv
, 0);
1218 if (MEM_P (src_equiv
))
1220 /* Match the MEMs by comparing the addresses. We can
1221 only remove the later store if the earlier aliases at
1222 least all the accesses of the later one. */
1223 if (rtx_equal_for_cselib_1 (dest_addr
, XEXP (src_equiv
, 0),
1224 GET_MODE (dest
), 0))
1225 return mems_same_for_tbaa_p (src_equiv
, dest
);
1230 /* We failed to find a recorded value in the cselib history, so try
1231 the source of this set; this catches cases such as *p = *q when p
1232 and q have the same value. */
1233 rtx src
= SET_SRC (set
);
1234 while (GET_CODE (src
) == SUBREG
)
1235 src
= XEXP (src
, 0);
1238 && rtx_equal_for_cselib_1 (dest_addr
, XEXP (src
, 0), GET_MODE (dest
), 0))
1239 return mems_same_for_tbaa_p (src
, dest
);
1244 /* Helper function for cselib_hash_rtx. Arguments like for cselib_hash_rtx,
1245 except that it hashes (plus:P x c). */
1248 cselib_hash_plus_const_int (rtx x
, HOST_WIDE_INT c
, int create
,
1249 machine_mode memmode
)
1251 cselib_val
*e
= cselib_lookup (x
, GET_MODE (x
), create
, memmode
);
1255 if (! SP_DERIVED_VALUE_P (e
->val_rtx
))
1256 for (struct elt_loc_list
*l
= e
->locs
; l
; l
= l
->next
)
1257 if (GET_CODE (l
->loc
) == PLUS
1258 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
1259 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
1260 && CONST_INT_P (XEXP (l
->loc
, 1)))
1262 e
= CSELIB_VAL_PTR (XEXP (l
->loc
, 0));
1263 c
= trunc_int_for_mode (c
+ UINTVAL (XEXP (l
->loc
, 1)), Pmode
);
1269 unsigned hash
= (unsigned) PLUS
+ (unsigned) GET_MODE (x
);
1271 unsigned int tem_hash
= (unsigned) CONST_INT
+ (unsigned) VOIDmode
;
1272 tem_hash
+= ((unsigned) CONST_INT
<< 7) + (unsigned HOST_WIDE_INT
) c
;
1274 tem_hash
= (unsigned int) CONST_INT
;
1276 return hash
? hash
: 1 + (unsigned int) PLUS
;
1279 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1280 For registers and memory locations, we look up their cselib_val structure
1281 and return its VALUE element.
1282 Possible reasons for return 0 are: the object is volatile, or we couldn't
1283 find a register or memory location in the table and CREATE is zero. If
1284 CREATE is nonzero, table elts are created for regs and mem.
1285 N.B. this hash function returns the same hash value for RTXes that
1286 differ only in the order of operands, thus it is suitable for comparisons
1287 that take commutativity into account.
1288 If we wanted to also support associative rules, we'd have to use a different
1289 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1290 MEMMODE indicates the mode of an enclosing MEM, and it's only
1291 used to compute autoinc values.
1292 We used to have a MODE argument for hashing for CONST_INTs, but that
1293 didn't make sense, since it caused spurious hash differences between
1294 (set (reg:SI 1) (const_int))
1295 (plus:SI (reg:SI 2) (reg:SI 1))
1297 (plus:SI (reg:SI 2) (const_int))
1298 If the mode is important in any context, it must be checked specifically
1299 in a comparison anyway, since relying on hash differences is unsafe. */
1302 cselib_hash_rtx (rtx x
, int create
, machine_mode memmode
)
1309 unsigned int hash
= 0;
1311 code
= GET_CODE (x
);
1312 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1317 e
= CSELIB_VAL_PTR (x
);
1322 e
= cselib_lookup (x
, GET_MODE (x
), create
, memmode
);
1329 hash
+= ((unsigned) DEBUG_EXPR
<< 7)
1330 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x
));
1331 return hash
? hash
: (unsigned int) DEBUG_EXPR
;
1333 case DEBUG_IMPLICIT_PTR
:
1334 hash
+= ((unsigned) DEBUG_IMPLICIT_PTR
<< 7)
1335 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x
));
1336 return hash
? hash
: (unsigned int) DEBUG_IMPLICIT_PTR
;
1338 case DEBUG_PARAMETER_REF
:
1339 hash
+= ((unsigned) DEBUG_PARAMETER_REF
<< 7)
1340 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x
));
1341 return hash
? hash
: (unsigned int) DEBUG_PARAMETER_REF
;
1344 /* ENTRY_VALUEs are function invariant, thus try to avoid
1345 recursing on argument if ENTRY_VALUE is one of the
1346 forms emitted by expand_debug_expr, otherwise
1347 ENTRY_VALUE hash would depend on the current value
1348 in some register or memory. */
1349 if (REG_P (ENTRY_VALUE_EXP (x
)))
1350 hash
+= (unsigned int) REG
1351 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x
))
1352 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x
));
1353 else if (MEM_P (ENTRY_VALUE_EXP (x
))
1354 && REG_P (XEXP (ENTRY_VALUE_EXP (x
), 0)))
1355 hash
+= (unsigned int) MEM
1356 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x
), 0))
1357 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x
), 0));
1359 hash
+= cselib_hash_rtx (ENTRY_VALUE_EXP (x
), create
, memmode
);
1360 return hash
? hash
: (unsigned int) ENTRY_VALUE
;
1363 hash
+= ((unsigned) CONST_INT
<< 7) + UINTVAL (x
);
1364 return hash
? hash
: (unsigned int) CONST_INT
;
1366 case CONST_WIDE_INT
:
1367 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (x
); i
++)
1368 hash
+= CONST_WIDE_INT_ELT (x
, i
);
1371 case CONST_POLY_INT
:
1375 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
1376 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
1381 /* This is like the general case, except that it only counts
1382 the integers representing the constant. */
1383 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1384 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (x
) == VOIDmode
)
1385 hash
+= ((unsigned) CONST_DOUBLE_LOW (x
)
1386 + (unsigned) CONST_DOUBLE_HIGH (x
));
1388 hash
+= real_hash (CONST_DOUBLE_REAL_VALUE (x
));
1389 return hash
? hash
: (unsigned int) CONST_DOUBLE
;
1392 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
1393 hash
+= fixed_hash (CONST_FIXED_VALUE (x
));
1394 return hash
? hash
: (unsigned int) CONST_FIXED
;
1401 units
= const_vector_encoded_nelts (x
);
1403 for (i
= 0; i
< units
; ++i
)
1405 elt
= CONST_VECTOR_ENCODED_ELT (x
, i
);
1406 hash
+= cselib_hash_rtx (elt
, 0, memmode
);
1412 /* Assume there is only one rtx object for any given label. */
1414 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1415 differences and differences between each stage's debugging dumps. */
1416 hash
+= (((unsigned int) LABEL_REF
<< 7)
1417 + CODE_LABEL_NUMBER (label_ref_label (x
)));
1418 return hash
? hash
: (unsigned int) LABEL_REF
;
1422 /* Don't hash on the symbol's address to avoid bootstrap differences.
1423 Different hash values may cause expressions to be recorded in
1424 different orders and thus different registers to be used in the
1425 final assembler. This also avoids differences in the dump files
1426 between various stages. */
1428 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
1431 h
+= (h
<< 7) + *p
++; /* ??? revisit */
1433 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
1434 return hash
? hash
: (unsigned int) SYMBOL_REF
;
1439 /* We can't compute these without knowing the MEM mode. */
1440 gcc_assert (memmode
!= VOIDmode
);
1441 offset
= GET_MODE_SIZE (memmode
);
1442 if (code
== PRE_DEC
)
1444 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1445 like (mem:MEMMODE (plus (reg) (const_int I))). */
1446 if (GET_MODE (x
) == Pmode
1447 && (REG_P (XEXP (x
, 0))
1448 || MEM_P (XEXP (x
, 0))
1449 || GET_CODE (XEXP (x
, 0)) == VALUE
))
1452 if (offset
.is_constant (&c
))
1453 return cselib_hash_plus_const_int (XEXP (x
, 0),
1454 trunc_int_for_mode (c
, Pmode
),
1457 hash
= ((unsigned) PLUS
+ (unsigned) GET_MODE (x
)
1458 + cselib_hash_rtx (XEXP (x
, 0), create
, memmode
)
1459 + cselib_hash_rtx (gen_int_mode (offset
, GET_MODE (x
)),
1461 return hash
? hash
: 1 + (unsigned) PLUS
;
1464 gcc_assert (memmode
!= VOIDmode
);
1465 return cselib_hash_rtx (XEXP (x
, 1), create
, memmode
);
1470 gcc_assert (memmode
!= VOIDmode
);
1471 return cselib_hash_rtx (XEXP (x
, 0), create
, memmode
);
1475 case UNSPEC_VOLATILE
:
1479 if (MEM_VOLATILE_P (x
))
1485 if (GET_MODE (x
) == Pmode
1486 && (REG_P (XEXP (x
, 0))
1487 || MEM_P (XEXP (x
, 0))
1488 || GET_CODE (XEXP (x
, 0)) == VALUE
)
1489 && CONST_INT_P (XEXP (x
, 1)))
1490 return cselib_hash_plus_const_int (XEXP (x
, 0), INTVAL (XEXP (x
, 1)),
1498 i
= GET_RTX_LENGTH (code
) - 1;
1499 fmt
= GET_RTX_FORMAT (code
);
1506 rtx tem
= XEXP (x
, i
);
1507 unsigned int tem_hash
= cselib_hash_rtx (tem
, create
, memmode
);
1516 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1518 unsigned int tem_hash
1519 = cselib_hash_rtx (XVECEXP (x
, i
, j
), create
, memmode
);
1530 const unsigned char *p
= (const unsigned char *) XSTR (x
, i
);
1539 hash
+= XINT (x
, i
);
1543 hash
+= constant_lower_bound (SUBREG_BYTE (x
));
1556 return hash
? hash
: 1 + (unsigned int) GET_CODE (x
);
1559 /* Create a new value structure for VALUE and initialize it. The mode of the
1562 static inline cselib_val
*
1563 new_cselib_val (unsigned int hash
, machine_mode mode
, rtx x
)
1565 cselib_val
*e
= cselib_val_pool
.allocate ();
1568 gcc_assert (next_uid
);
1571 e
->uid
= next_uid
++;
1572 /* We use an alloc pool to allocate this RTL construct because it
1573 accounts for about 8% of the overall memory usage. We know
1574 precisely when we can have VALUE RTXen (when cselib is active)
1575 so we don't need to put them in garbage collected memory.
1576 ??? Why should a VALUE be an RTX in the first place? */
1577 e
->val_rtx
= (rtx_def
*) value_pool
.allocate ();
1578 memset (e
->val_rtx
, 0, RTX_HDR_SIZE
);
1579 PUT_CODE (e
->val_rtx
, VALUE
);
1580 PUT_MODE (e
->val_rtx
, mode
);
1581 CSELIB_VAL_PTR (e
->val_rtx
) = e
;
1584 e
->next_containing_mem
= 0;
1586 scalar_int_mode int_mode
;
1587 if (REG_P (x
) && is_int_mode (mode
, &int_mode
)
1588 && GET_MODE_SIZE (int_mode
) > 1
1589 && REG_VALUES (REGNO (x
)) != NULL
1590 && (!cselib_current_insn
|| !DEBUG_INSN_P (cselib_current_insn
)))
1592 rtx copy
= shallow_copy_rtx (x
);
1593 scalar_int_mode narrow_mode_iter
;
1594 FOR_EACH_MODE_UNTIL (narrow_mode_iter
, int_mode
)
1596 PUT_MODE_RAW (copy
, narrow_mode_iter
);
1597 cselib_val
*v
= cselib_lookup (copy
, narrow_mode_iter
, 0, VOIDmode
);
1600 rtx sub
= lowpart_subreg (narrow_mode_iter
, e
->val_rtx
, int_mode
);
1602 new_elt_loc_list (v
, sub
);
1607 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1609 fprintf (dump_file
, "cselib value %u:%u ", e
->uid
, hash
);
1610 if (flag_dump_noaddr
|| flag_dump_unnumbered
)
1611 fputs ("# ", dump_file
);
1613 fprintf (dump_file
, "%p ", (void*)e
);
1614 print_rtl_single (dump_file
, x
);
1615 fputc ('\n', dump_file
);
1621 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1622 contains the data at this address. X is a MEM that represents the
1623 value. Update the two value structures to represent this situation. */
1626 add_mem_for_addr (cselib_val
*addr_elt
, cselib_val
*mem_elt
, rtx x
)
1628 addr_elt
= canonical_cselib_val (addr_elt
);
1629 mem_elt
= canonical_cselib_val (mem_elt
);
1631 /* Avoid duplicates. */
1632 addr_space_t as
= MEM_ADDR_SPACE (x
);
1633 for (elt_loc_list
*l
= mem_elt
->locs
; l
; l
= l
->next
)
1635 && CSELIB_VAL_PTR (XEXP (l
->loc
, 0)) == addr_elt
1636 && MEM_ADDR_SPACE (l
->loc
) == as
)
1638 promote_debug_loc (l
);
1642 addr_elt
->addr_list
= new_elt_list (addr_elt
->addr_list
, mem_elt
);
1643 new_elt_loc_list (mem_elt
,
1644 replace_equiv_address_nv (x
, addr_elt
->val_rtx
));
1645 if (mem_elt
->next_containing_mem
== NULL
)
1647 mem_elt
->next_containing_mem
= first_containing_mem
;
1648 first_containing_mem
= mem_elt
;
1652 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1653 If CREATE, make a new one if we haven't seen it before. */
1656 cselib_lookup_mem (rtx x
, int create
)
1658 machine_mode mode
= GET_MODE (x
);
1659 machine_mode addr_mode
;
1662 cselib_val
*mem_elt
;
1664 if (MEM_VOLATILE_P (x
) || mode
== BLKmode
1665 || !cselib_record_memory
1666 || (FLOAT_MODE_P (mode
) && flag_float_store
))
1669 addr_mode
= GET_MODE (XEXP (x
, 0));
1670 if (addr_mode
== VOIDmode
)
1673 /* Look up the value for the address. */
1674 addr
= cselib_lookup (XEXP (x
, 0), addr_mode
, create
, mode
);
1677 addr
= canonical_cselib_val (addr
);
1679 /* Find a value that describes a value of our mode at that address. */
1680 addr_space_t as
= MEM_ADDR_SPACE (x
);
1681 for (elt_list
*l
= addr
->addr_list
; l
; l
= l
->next
)
1682 if (GET_MODE (l
->elt
->val_rtx
) == mode
)
1684 for (elt_loc_list
*l2
= l
->elt
->locs
; l2
; l2
= l2
->next
)
1685 if (MEM_P (l2
->loc
) && MEM_ADDR_SPACE (l2
->loc
) == as
)
1687 promote_debug_loc (l
->elt
->locs
);
1695 mem_elt
= new_cselib_val (next_uid
, mode
, x
);
1696 add_mem_for_addr (addr
, mem_elt
, x
);
1697 slot
= cselib_find_slot (mode
, x
, mem_elt
->hash
, INSERT
, VOIDmode
);
1702 /* Search through the possible substitutions in P. We prefer a non reg
1703 substitution because this allows us to expand the tree further. If
1704 we find, just a reg, take the lowest regno. There may be several
1705 non-reg results, we just take the first one because they will all
1706 expand to the same place. */
1709 expand_loc (struct elt_loc_list
*p
, struct expand_value_data
*evd
,
1712 rtx reg_result
= NULL
;
1713 unsigned int regno
= UINT_MAX
;
1714 struct elt_loc_list
*p_in
= p
;
1716 for (; p
; p
= p
->next
)
1718 /* Return these right away to avoid returning stack pointer based
1719 expressions for frame pointer and vice versa, which is something
1720 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1721 for more details. */
1723 && (REGNO (p
->loc
) == STACK_POINTER_REGNUM
1724 || REGNO (p
->loc
) == FRAME_POINTER_REGNUM
1725 || REGNO (p
->loc
) == HARD_FRAME_POINTER_REGNUM
1726 || REGNO (p
->loc
) == cfa_base_preserved_regno
))
1728 /* Avoid infinite recursion trying to expand a reg into a
1730 if ((REG_P (p
->loc
))
1731 && (REGNO (p
->loc
) < regno
)
1732 && !bitmap_bit_p (evd
->regs_active
, REGNO (p
->loc
)))
1734 reg_result
= p
->loc
;
1735 regno
= REGNO (p
->loc
);
1737 /* Avoid infinite recursion and do not try to expand the
1739 else if (GET_CODE (p
->loc
) == VALUE
1740 && CSELIB_VAL_PTR (p
->loc
)->locs
== p_in
)
1742 else if (!REG_P (p
->loc
))
1745 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1747 print_inline_rtx (dump_file
, p
->loc
, 0);
1748 fprintf (dump_file
, "\n");
1750 if (GET_CODE (p
->loc
) == LO_SUM
1751 && GET_CODE (XEXP (p
->loc
, 1)) == SYMBOL_REF
1753 && (note
= find_reg_note (p
->setting_insn
, REG_EQUAL
, NULL_RTX
))
1754 && XEXP (note
, 0) == XEXP (p
->loc
, 1))
1755 return XEXP (p
->loc
, 1);
1756 result
= cselib_expand_value_rtx_1 (p
->loc
, evd
, max_depth
- 1);
1763 if (regno
!= UINT_MAX
)
1766 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1767 fprintf (dump_file
, "r%d\n", regno
);
1769 result
= cselib_expand_value_rtx_1 (reg_result
, evd
, max_depth
- 1);
1774 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1778 print_inline_rtx (dump_file
, reg_result
, 0);
1779 fprintf (dump_file
, "\n");
1782 fprintf (dump_file
, "NULL\n");
1788 /* Forward substitute and expand an expression out to its roots.
1789 This is the opposite of common subexpression. Because local value
1790 numbering is such a weak optimization, the expanded expression is
1791 pretty much unique (not from a pointer equals point of view but
1792 from a tree shape point of view.
1794 This function returns NULL if the expansion fails. The expansion
1795 will fail if there is no value number for one of the operands or if
1796 one of the operands has been overwritten between the current insn
1797 and the beginning of the basic block. For instance x has no
1803 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1804 It is clear on return. */
1807 cselib_expand_value_rtx (rtx orig
, bitmap regs_active
, int max_depth
)
1809 struct expand_value_data evd
;
1811 evd
.regs_active
= regs_active
;
1812 evd
.callback
= NULL
;
1813 evd
.callback_arg
= NULL
;
1816 return cselib_expand_value_rtx_1 (orig
, &evd
, max_depth
);
1819 /* Same as cselib_expand_value_rtx, but using a callback to try to
1820 resolve some expressions. The CB function should return ORIG if it
1821 can't or does not want to deal with a certain RTX. Any other
1822 return value, including NULL, will be used as the expansion for
1823 VALUE, without any further changes. */
1826 cselib_expand_value_rtx_cb (rtx orig
, bitmap regs_active
, int max_depth
,
1827 cselib_expand_callback cb
, void *data
)
1829 struct expand_value_data evd
;
1831 evd
.regs_active
= regs_active
;
1833 evd
.callback_arg
= data
;
1836 return cselib_expand_value_rtx_1 (orig
, &evd
, max_depth
);
1839 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1840 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1841 would return NULL or non-NULL, without allocating new rtx. */
1844 cselib_dummy_expand_value_rtx_cb (rtx orig
, bitmap regs_active
, int max_depth
,
1845 cselib_expand_callback cb
, void *data
)
1847 struct expand_value_data evd
;
1849 evd
.regs_active
= regs_active
;
1851 evd
.callback_arg
= data
;
1854 return cselib_expand_value_rtx_1 (orig
, &evd
, max_depth
) != NULL
;
1857 /* Internal implementation of cselib_expand_value_rtx and
1858 cselib_expand_value_rtx_cb. */
1861 cselib_expand_value_rtx_1 (rtx orig
, struct expand_value_data
*evd
,
1867 const char *format_ptr
;
1870 code
= GET_CODE (orig
);
1872 /* For the context of dse, if we end up expand into a huge tree, we
1873 will not have a useful address, so we might as well just give up
1882 struct elt_list
*l
= REG_VALUES (REGNO (orig
));
1884 if (l
&& l
->elt
== NULL
)
1886 for (; l
; l
= l
->next
)
1887 if (GET_MODE (l
->elt
->val_rtx
) == GET_MODE (orig
))
1890 unsigned regno
= REGNO (orig
);
1892 /* The only thing that we are not willing to do (this
1893 is requirement of dse and if others potential uses
1894 need this function we should add a parm to control
1895 it) is that we will not substitute the
1896 STACK_POINTER_REGNUM, FRAME_POINTER or the
1899 These expansions confuses the code that notices that
1900 stores into the frame go dead at the end of the
1901 function and that the frame is not effected by calls
1902 to subroutines. If you allow the
1903 STACK_POINTER_REGNUM substitution, then dse will
1904 think that parameter pushing also goes dead which is
1905 wrong. If you allow the FRAME_POINTER or the
1906 HARD_FRAME_POINTER then you lose the opportunity to
1907 make the frame assumptions. */
1908 if (regno
== STACK_POINTER_REGNUM
1909 || regno
== FRAME_POINTER_REGNUM
1910 || regno
== HARD_FRAME_POINTER_REGNUM
1911 || regno
== cfa_base_preserved_regno
)
1914 bitmap_set_bit (evd
->regs_active
, regno
);
1916 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1917 fprintf (dump_file
, "expanding: r%d into: ", regno
);
1919 result
= expand_loc (l
->elt
->locs
, evd
, max_depth
);
1920 bitmap_clear_bit (evd
->regs_active
, regno
);
1935 /* SCRATCH must be shared because they represent distinct values. */
1938 if (REG_P (XEXP (orig
, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig
, 0))))
1943 if (shared_const_p (orig
))
1953 subreg
= evd
->callback (orig
, evd
->regs_active
, max_depth
,
1959 subreg
= cselib_expand_value_rtx_1 (SUBREG_REG (orig
), evd
,
1963 scopy
= simplify_gen_subreg (GET_MODE (orig
), subreg
,
1964 GET_MODE (SUBREG_REG (orig
)),
1965 SUBREG_BYTE (orig
));
1967 || (GET_CODE (scopy
) == SUBREG
1968 && !REG_P (SUBREG_REG (scopy
))
1969 && !MEM_P (SUBREG_REG (scopy
))))
1979 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
1981 fputs ("\nexpanding ", dump_file
);
1982 print_rtl_single (dump_file
, orig
);
1983 fputs (" into...", dump_file
);
1988 result
= evd
->callback (orig
, evd
->regs_active
, max_depth
,
1995 result
= expand_loc (CSELIB_VAL_PTR (orig
)->locs
, evd
, max_depth
);
2001 return evd
->callback (orig
, evd
->regs_active
, max_depth
,
2009 /* Copy the various flags, fields, and other information. We assume
2010 that all fields need copying, and then clear the fields that should
2011 not be copied. That is the sensible default behavior, and forces
2012 us to explicitly document why we are *not* copying a flag. */
2016 copy
= shallow_copy_rtx (orig
);
2018 format_ptr
= GET_RTX_FORMAT (code
);
2020 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2021 switch (*format_ptr
++)
2024 if (XEXP (orig
, i
) != NULL
)
2026 rtx result
= cselib_expand_value_rtx_1 (XEXP (orig
, i
), evd
,
2031 XEXP (copy
, i
) = result
;
2037 if (XVEC (orig
, i
) != NULL
)
2040 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
2041 for (j
= 0; j
< XVECLEN (orig
, i
); j
++)
2043 rtx result
= cselib_expand_value_rtx_1 (XVECEXP (orig
, i
, j
),
2044 evd
, max_depth
- 1);
2048 XVECEXP (copy
, i
, j
) = result
;
2062 /* These are left unchanged. */
2072 mode
= GET_MODE (copy
);
2073 /* If an operand has been simplified into CONST_INT, which doesn't
2074 have a mode and the mode isn't derivable from whole rtx's mode,
2075 try simplify_*_operation first with mode from original's operand
2076 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
2078 switch (GET_RTX_CLASS (code
))
2081 if (CONST_INT_P (XEXP (copy
, 0))
2082 && GET_MODE (XEXP (orig
, 0)) != VOIDmode
)
2084 scopy
= simplify_unary_operation (code
, mode
, XEXP (copy
, 0),
2085 GET_MODE (XEXP (orig
, 0)));
2090 case RTX_COMM_ARITH
:
2092 /* These expressions can derive operand modes from the whole rtx's mode. */
2095 case RTX_BITFIELD_OPS
:
2096 if (CONST_INT_P (XEXP (copy
, 0))
2097 && GET_MODE (XEXP (orig
, 0)) != VOIDmode
)
2099 scopy
= simplify_ternary_operation (code
, mode
,
2100 GET_MODE (XEXP (orig
, 0)),
2101 XEXP (copy
, 0), XEXP (copy
, 1),
2108 case RTX_COMM_COMPARE
:
2109 if (CONST_INT_P (XEXP (copy
, 0))
2110 && GET_MODE (XEXP (copy
, 1)) == VOIDmode
2111 && (GET_MODE (XEXP (orig
, 0)) != VOIDmode
2112 || GET_MODE (XEXP (orig
, 1)) != VOIDmode
))
2114 scopy
= simplify_relational_operation (code
, mode
,
2115 (GET_MODE (XEXP (orig
, 0))
2117 ? GET_MODE (XEXP (orig
, 0))
2118 : GET_MODE (XEXP (orig
, 1)),
2128 scopy
= simplify_rtx (copy
);
2134 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
2135 with VALUE expressions. This way, it becomes independent of changes
2136 to registers and memory.
2137 X isn't actually modified; if modifications are needed, new rtl is
2138 allocated. However, the return value can share rtl with X.
2139 If X is within a MEM, MEMMODE must be the mode of the MEM. */
2142 cselib_subst_to_values (rtx x
, machine_mode memmode
)
2144 enum rtx_code code
= GET_CODE (x
);
2145 const char *fmt
= GET_RTX_FORMAT (code
);
2155 l
= REG_VALUES (REGNO (x
));
2156 if (l
&& l
->elt
== NULL
)
2158 for (; l
; l
= l
->next
)
2159 if (GET_MODE (l
->elt
->val_rtx
) == GET_MODE (x
))
2160 return l
->elt
->val_rtx
;
2165 e
= cselib_lookup_mem (x
, 0);
2166 /* This used to happen for autoincrements, but we deal with them
2167 properly now. Remove the if stmt for the next release. */
2170 /* Assign a value that doesn't match any other. */
2171 e
= new_cselib_val (next_uid
, GET_MODE (x
), x
);
2176 e
= cselib_lookup (x
, GET_MODE (x
), 0, memmode
);
2186 gcc_assert (memmode
!= VOIDmode
);
2187 offset
= GET_MODE_SIZE (memmode
);
2188 if (code
== PRE_DEC
)
2190 return cselib_subst_to_values (plus_constant (GET_MODE (x
),
2191 XEXP (x
, 0), offset
),
2195 gcc_assert (memmode
!= VOIDmode
);
2196 return cselib_subst_to_values (XEXP (x
, 1), memmode
);
2201 gcc_assert (memmode
!= VOIDmode
);
2202 return cselib_subst_to_values (XEXP (x
, 0), memmode
);
2205 if (GET_MODE (x
) == Pmode
&& CONST_INT_P (XEXP (x
, 1)))
2207 rtx t
= cselib_subst_to_values (XEXP (x
, 0), memmode
);
2208 if (GET_CODE (t
) == VALUE
)
2210 if (SP_DERIVED_VALUE_P (t
) && XEXP (x
, 1) == const0_rtx
)
2212 for (struct elt_loc_list
*l
= CSELIB_VAL_PTR (t
)->locs
;
2214 if (GET_CODE (l
->loc
) == PLUS
2215 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
2216 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
2217 && CONST_INT_P (XEXP (l
->loc
, 1)))
2218 return plus_constant (Pmode
, l
->loc
, INTVAL (XEXP (x
, 1)));
2220 if (t
!= XEXP (x
, 0))
2222 copy
= shallow_copy_rtx (x
);
2232 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2236 rtx t
= cselib_subst_to_values (XEXP (x
, i
), memmode
);
2238 if (t
!= XEXP (x
, i
))
2241 copy
= shallow_copy_rtx (x
);
2245 else if (fmt
[i
] == 'E')
2249 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2251 rtx t
= cselib_subst_to_values (XVECEXP (x
, i
, j
), memmode
);
2253 if (t
!= XVECEXP (x
, i
, j
))
2255 if (XVEC (x
, i
) == XVEC (copy
, i
))
2258 copy
= shallow_copy_rtx (x
);
2259 XVEC (copy
, i
) = shallow_copy_rtvec (XVEC (x
, i
));
2261 XVECEXP (copy
, i
, j
) = t
;
2270 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
2273 cselib_subst_to_values_from_insn (rtx x
, machine_mode memmode
, rtx_insn
*insn
)
2276 gcc_assert (!cselib_current_insn
);
2277 cselib_current_insn
= insn
;
2278 ret
= cselib_subst_to_values (x
, memmode
);
2279 cselib_current_insn
= NULL
;
2283 /* Look up the rtl expression X in our tables and return the value it
2284 has. If CREATE is zero, we return NULL if we don't know the value.
2285 Otherwise, we create a new one if possible, using mode MODE if X
2286 doesn't have a mode (i.e. because it's a constant). When X is part
2287 of an address, MEMMODE should be the mode of the enclosing MEM if
2288 we're tracking autoinc expressions. */
2291 cselib_lookup_1 (rtx x
, machine_mode mode
,
2292 int create
, machine_mode memmode
)
2296 unsigned int hashval
;
2298 if (GET_MODE (x
) != VOIDmode
)
2299 mode
= GET_MODE (x
);
2301 if (GET_CODE (x
) == VALUE
)
2302 return CSELIB_VAL_PTR (x
);
2307 unsigned int i
= REGNO (x
);
2310 if (l
&& l
->elt
== NULL
)
2312 for (; l
; l
= l
->next
)
2313 if (mode
== GET_MODE (l
->elt
->val_rtx
))
2315 promote_debug_loc (l
->elt
->locs
);
2322 if (i
< FIRST_PSEUDO_REGISTER
)
2324 unsigned int n
= hard_regno_nregs (i
, mode
);
2326 if (n
> max_value_regs
)
2330 e
= new_cselib_val (next_uid
, GET_MODE (x
), x
);
2331 if (GET_MODE (x
) == Pmode
&& x
== stack_pointer_rtx
)
2332 SP_DERIVED_VALUE_P (e
->val_rtx
) = 1;
2333 new_elt_loc_list (e
, x
);
2335 scalar_int_mode int_mode
;
2336 if (REG_VALUES (i
) == 0)
2338 /* Maintain the invariant that the first entry of
2339 REG_VALUES, if present, must be the value used to set the
2340 register, or NULL. */
2341 used_regs
[n_used_regs
++] = i
;
2342 REG_VALUES (i
) = new_elt_list (REG_VALUES (i
), NULL
);
2344 else if (cselib_preserve_constants
2345 && is_int_mode (mode
, &int_mode
))
2347 /* During var-tracking, try harder to find equivalences
2348 for SUBREGs. If a setter sets say a DImode register
2349 and user uses that register only in SImode, add a lowpart
2351 struct elt_list
*lwider
= NULL
;
2352 scalar_int_mode lmode
;
2354 if (l
&& l
->elt
== NULL
)
2356 for (; l
; l
= l
->next
)
2357 if (is_int_mode (GET_MODE (l
->elt
->val_rtx
), &lmode
)
2358 && GET_MODE_SIZE (lmode
) > GET_MODE_SIZE (int_mode
)
2360 || partial_subreg_p (lmode
,
2361 GET_MODE (lwider
->elt
->val_rtx
))))
2363 struct elt_loc_list
*el
;
2364 if (i
< FIRST_PSEUDO_REGISTER
2365 && hard_regno_nregs (i
, lmode
) != 1)
2367 for (el
= l
->elt
->locs
; el
; el
= el
->next
)
2368 if (!REG_P (el
->loc
))
2375 rtx sub
= lowpart_subreg (int_mode
, lwider
->elt
->val_rtx
,
2376 GET_MODE (lwider
->elt
->val_rtx
));
2378 new_elt_loc_list (e
, sub
);
2381 REG_VALUES (i
)->next
= new_elt_list (REG_VALUES (i
)->next
, e
);
2382 slot
= cselib_find_slot (mode
, x
, e
->hash
, INSERT
, memmode
);
2388 return cselib_lookup_mem (x
, create
);
2390 hashval
= cselib_hash_rtx (x
, create
, memmode
);
2391 /* Can't even create if hashing is not possible. */
2395 slot
= cselib_find_slot (mode
, x
, hashval
,
2396 create
? INSERT
: NO_INSERT
, memmode
);
2400 e
= (cselib_val
*) *slot
;
2404 e
= new_cselib_val (hashval
, mode
, x
);
2406 /* We have to fill the slot before calling cselib_subst_to_values:
2407 the hash table is inconsistent until we do so, and
2408 cselib_subst_to_values will need to do lookups. */
2410 rtx v
= cselib_subst_to_values (x
, memmode
);
2412 /* If cselib_preserve_constants, we might get a SP_DERIVED_VALUE_P
2413 VALUE that isn't in the hash tables anymore. */
2414 if (GET_CODE (v
) == VALUE
&& SP_DERIVED_VALUE_P (v
) && PRESERVED_VALUE_P (v
))
2415 PRESERVED_VALUE_P (e
->val_rtx
) = 1;
2417 new_elt_loc_list (e
, v
);
2421 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2424 cselib_lookup_from_insn (rtx x
, machine_mode mode
,
2425 int create
, machine_mode memmode
, rtx_insn
*insn
)
2429 gcc_assert (!cselib_current_insn
);
2430 cselib_current_insn
= insn
;
2432 ret
= cselib_lookup (x
, mode
, create
, memmode
);
2434 cselib_current_insn
= NULL
;
2439 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2440 maintains invariants related with debug insns. */
2443 cselib_lookup (rtx x
, machine_mode mode
,
2444 int create
, machine_mode memmode
)
2446 cselib_val
*ret
= cselib_lookup_1 (x
, mode
, create
, memmode
);
2448 /* ??? Should we return NULL if we're not to create an entry, the
2449 found loc is a debug loc and cselib_current_insn is not DEBUG?
2450 If so, we should also avoid converting val to non-DEBUG; probably
2451 easiest setting cselib_current_insn to NULL before the call
2454 if (dump_file
&& (dump_flags
& TDF_CSELIB
))
2456 fputs ("cselib lookup ", dump_file
);
2457 print_inline_rtx (dump_file
, x
, 2);
2458 fprintf (dump_file
, " => %u:%u\n",
2460 ret
? ret
->hash
: 0);
2466 /* Invalidate the value at *L, which is part of REG_VALUES (REGNO). */
2469 cselib_invalidate_regno_val (unsigned int regno
, struct elt_list
**l
)
2471 cselib_val
*v
= (*l
)->elt
;
2472 if (*l
== REG_VALUES (regno
))
2474 /* Maintain the invariant that the first entry of
2475 REG_VALUES, if present, must be the value used to set
2476 the register, or NULL. This is also nice because
2477 then we won't push the same regno onto user_regs
2483 unchain_one_elt_list (l
);
2485 v
= canonical_cselib_val (v
);
2487 bool had_locs
= v
->locs
!= NULL
;
2488 rtx_insn
*setting_insn
= v
->locs
? v
->locs
->setting_insn
: NULL
;
2490 /* Now, we clear the mapping from value to reg. It must exist, so
2491 this code will crash intentionally if it doesn't. */
2492 for (elt_loc_list
**p
= &v
->locs
; ; p
= &(*p
)->next
)
2496 if (REG_P (x
) && REGNO (x
) == regno
)
2498 unchain_one_elt_loc_list (p
);
2503 if (had_locs
&& cselib_useless_value_p (v
))
2505 if (setting_insn
&& DEBUG_INSN_P (setting_insn
))
2506 n_useless_debug_values
++;
2512 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2513 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2514 is used to determine how many hard registers are being changed. If MODE
2515 is VOIDmode, then only REGNO is being changed; this is used when
2516 invalidating call clobbered registers across a call. */
2519 cselib_invalidate_regno (unsigned int regno
, machine_mode mode
)
2521 unsigned int endregno
;
2524 /* If we see pseudos after reload, something is _wrong_. */
2525 gcc_assert (!reload_completed
|| regno
< FIRST_PSEUDO_REGISTER
2526 || reg_renumber
[regno
] < 0);
2528 /* Determine the range of registers that must be invalidated. For
2529 pseudos, only REGNO is affected. For hard regs, we must take MODE
2530 into account, and we must also invalidate lower register numbers
2531 if they contain values that overlap REGNO. */
2532 if (regno
< FIRST_PSEUDO_REGISTER
)
2534 gcc_assert (mode
!= VOIDmode
);
2536 if (regno
< max_value_regs
)
2539 i
= regno
- max_value_regs
;
2541 endregno
= end_hard_regno (mode
, regno
);
2546 endregno
= regno
+ 1;
2549 for (; i
< endregno
; i
++)
2551 struct elt_list
**l
= ®_VALUES (i
);
2553 /* Go through all known values for this reg; if it overlaps the range
2554 we're invalidating, remove the value. */
2557 cselib_val
*v
= (*l
)->elt
;
2558 unsigned int this_last
= i
;
2560 if (i
< FIRST_PSEUDO_REGISTER
&& v
!= NULL
)
2561 this_last
= end_hard_regno (GET_MODE (v
->val_rtx
), i
) - 1;
2563 if (this_last
< regno
|| v
== NULL
2564 || (v
== cfa_base_preserved_val
2565 && i
== cfa_base_preserved_regno
))
2571 /* We have an overlap. */
2572 cselib_invalidate_regno_val (i
, l
);
2577 /* Invalidate any locations in the table which are changed because of a
2578 store to MEM_RTX. If this is called because of a non-const call
2579 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2582 cselib_invalidate_mem (rtx mem_rtx
)
2584 cselib_val
**vp
, *v
, *next
;
2588 mem_addr
= canon_rtx (get_addr (XEXP (mem_rtx
, 0)));
2589 mem_rtx
= canon_rtx (mem_rtx
);
2591 vp
= &first_containing_mem
;
2592 for (v
= *vp
; v
!= &dummy_val
; v
= next
)
2594 bool has_mem
= false;
2595 struct elt_loc_list
**p
= &v
->locs
;
2596 bool had_locs
= v
->locs
!= NULL
;
2597 rtx_insn
*setting_insn
= v
->locs
? v
->locs
->setting_insn
: NULL
;
2603 struct elt_list
**mem_chain
;
2605 /* MEMs may occur in locations only at the top level; below
2606 that every MEM or REG is substituted by its VALUE. */
2612 if (num_mems
< param_max_cselib_memory_locations
2613 && ! canon_anti_dependence (x
, false, mem_rtx
,
2614 GET_MODE (mem_rtx
), mem_addr
))
2622 /* This one overlaps. */
2623 /* We must have a mapping from this MEM's address to the
2624 value (E). Remove that, too. */
2625 addr
= cselib_lookup (XEXP (x
, 0), VOIDmode
, 0, GET_MODE (x
));
2626 addr
= canonical_cselib_val (addr
);
2627 gcc_checking_assert (v
== canonical_cselib_val (v
));
2628 mem_chain
= &addr
->addr_list
;
2631 cselib_val
*canon
= canonical_cselib_val ((*mem_chain
)->elt
);
2635 unchain_one_elt_list (mem_chain
);
2639 /* Record canonicalized elt. */
2640 (*mem_chain
)->elt
= canon
;
2642 mem_chain
= &(*mem_chain
)->next
;
2645 unchain_one_elt_loc_list (p
);
2648 if (had_locs
&& cselib_useless_value_p (v
))
2650 if (setting_insn
&& DEBUG_INSN_P (setting_insn
))
2651 n_useless_debug_values
++;
2656 next
= v
->next_containing_mem
;
2660 vp
= &(*vp
)->next_containing_mem
;
2663 v
->next_containing_mem
= NULL
;
2668 /* Invalidate DEST. */
2671 cselib_invalidate_rtx (rtx dest
)
2673 while (GET_CODE (dest
) == SUBREG
2674 || GET_CODE (dest
) == ZERO_EXTRACT
2675 || GET_CODE (dest
) == STRICT_LOW_PART
)
2676 dest
= XEXP (dest
, 0);
2679 cselib_invalidate_regno (REGNO (dest
), GET_MODE (dest
));
2680 else if (MEM_P (dest
))
2681 cselib_invalidate_mem (dest
);
2684 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2687 cselib_invalidate_rtx_note_stores (rtx dest
, const_rtx
,
2688 void *data ATTRIBUTE_UNUSED
)
2690 cselib_invalidate_rtx (dest
);
2693 /* Record the result of a SET instruction. DEST is being set; the source
2694 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2695 describes its address. */
2698 cselib_record_set (rtx dest
, cselib_val
*src_elt
, cselib_val
*dest_addr_elt
)
2700 if (src_elt
== 0 || side_effects_p (dest
))
2705 unsigned int dreg
= REGNO (dest
);
2706 if (dreg
< FIRST_PSEUDO_REGISTER
)
2708 unsigned int n
= REG_NREGS (dest
);
2710 if (n
> max_value_regs
)
2714 if (REG_VALUES (dreg
) == 0)
2716 used_regs
[n_used_regs
++] = dreg
;
2717 REG_VALUES (dreg
) = new_elt_list (REG_VALUES (dreg
), src_elt
);
2721 /* The register should have been invalidated. */
2722 gcc_assert (REG_VALUES (dreg
)->elt
== 0);
2723 REG_VALUES (dreg
)->elt
= src_elt
;
2726 if (cselib_useless_value_p (src_elt
))
2728 new_elt_loc_list (src_elt
, dest
);
2730 else if (MEM_P (dest
) && dest_addr_elt
!= 0
2731 && cselib_record_memory
)
2733 if (cselib_useless_value_p (src_elt
))
2735 add_mem_for_addr (dest_addr_elt
, src_elt
, dest
);
2739 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2742 cselib_add_permanent_equiv (cselib_val
*elt
, rtx x
, rtx_insn
*insn
)
2745 rtx_insn
*save_cselib_current_insn
= cselib_current_insn
;
2747 gcc_checking_assert (elt
);
2748 gcc_checking_assert (PRESERVED_VALUE_P (elt
->val_rtx
));
2749 gcc_checking_assert (!side_effects_p (x
));
2751 cselib_current_insn
= insn
;
2753 nelt
= cselib_lookup (x
, GET_MODE (elt
->val_rtx
), 1, VOIDmode
);
2757 cselib_any_perm_equivs
= true;
2759 if (!PRESERVED_VALUE_P (nelt
->val_rtx
))
2760 cselib_preserve_value (nelt
);
2762 new_elt_loc_list (nelt
, elt
->val_rtx
);
2765 cselib_current_insn
= save_cselib_current_insn
;
2768 /* Return TRUE if any permanent equivalences have been recorded since
2769 the table was last initialized. */
2771 cselib_have_permanent_equivalences (void)
2773 return cselib_any_perm_equivs
;
2776 /* Record stack_pointer_rtx to be equal to
2777 (plus:P cfa_base_preserved_val offset). Used by var-tracking
2778 at the start of basic blocks for !frame_pointer_needed functions. */
2781 cselib_record_sp_cfa_base_equiv (HOST_WIDE_INT offset
, rtx_insn
*insn
)
2783 rtx sp_derived_value
= NULL_RTX
;
2784 for (struct elt_loc_list
*l
= cfa_base_preserved_val
->locs
; l
; l
= l
->next
)
2785 if (GET_CODE (l
->loc
) == VALUE
2786 && SP_DERIVED_VALUE_P (l
->loc
))
2788 sp_derived_value
= l
->loc
;
2791 else if (GET_CODE (l
->loc
) == PLUS
2792 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
2793 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
2794 && CONST_INT_P (XEXP (l
->loc
, 1)))
2796 sp_derived_value
= XEXP (l
->loc
, 0);
2797 offset
= offset
+ UINTVAL (XEXP (l
->loc
, 1));
2800 if (sp_derived_value
== NULL_RTX
)
2803 = cselib_lookup_from_insn (plus_constant (Pmode
, sp_derived_value
, offset
),
2804 Pmode
, 1, VOIDmode
, insn
);
2807 PRESERVED_VALUE_P (val
->val_rtx
) = 1;
2808 cselib_record_set (stack_pointer_rtx
, val
, NULL
);
2812 /* Return true if V is SP_DERIVED_VALUE_P (or SP_DERIVED_VALUE_P + CONST_INT)
2813 that can be expressed using cfa_base_preserved_val + CONST_INT. */
2816 cselib_sp_derived_value_p (cselib_val
*v
)
2818 if (!SP_DERIVED_VALUE_P (v
->val_rtx
))
2819 for (struct elt_loc_list
*l
= v
->locs
; l
; l
= l
->next
)
2820 if (GET_CODE (l
->loc
) == PLUS
2821 && GET_CODE (XEXP (l
->loc
, 0)) == VALUE
2822 && SP_DERIVED_VALUE_P (XEXP (l
->loc
, 0))
2823 && CONST_INT_P (XEXP (l
->loc
, 1)))
2824 v
= CSELIB_VAL_PTR (XEXP (l
->loc
, 0));
2825 if (!SP_DERIVED_VALUE_P (v
->val_rtx
))
2827 for (struct elt_loc_list
*l
= v
->locs
; l
; l
= l
->next
)
2828 if (l
->loc
== cfa_base_preserved_val
->val_rtx
)
2830 else if (GET_CODE (l
->loc
) == PLUS
2831 && XEXP (l
->loc
, 0) == cfa_base_preserved_val
->val_rtx
2832 && CONST_INT_P (XEXP (l
->loc
, 1)))
2837 /* There is no good way to determine how many elements there can be
2838 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2839 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2841 struct cselib_record_autoinc_data
2843 struct cselib_set
*sets
;
2847 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2848 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2851 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED
, rtx op ATTRIBUTE_UNUSED
,
2852 rtx dest
, rtx src
, rtx srcoff
, void *arg
)
2854 struct cselib_record_autoinc_data
*data
;
2855 data
= (struct cselib_record_autoinc_data
*)arg
;
2857 data
->sets
[data
->n_sets
].dest
= dest
;
2860 data
->sets
[data
->n_sets
].src
= gen_rtx_PLUS (GET_MODE (src
), src
, srcoff
);
2862 data
->sets
[data
->n_sets
].src
= src
;
2869 /* Record the effects of any sets and autoincs in INSN. */
2871 cselib_record_sets (rtx_insn
*insn
)
2875 struct cselib_set sets
[MAX_SETS
];
2877 int n_sets_before_autoinc
;
2878 int n_strict_low_parts
= 0;
2879 struct cselib_record_autoinc_data data
;
2881 rtx body
= PATTERN (insn
);
2882 if (GET_CODE (body
) == COND_EXEC
)
2884 cond
= COND_EXEC_TEST (body
);
2885 body
= COND_EXEC_CODE (body
);
2888 /* Find all sets. */
2889 if (GET_CODE (body
) == SET
)
2891 sets
[0].src
= SET_SRC (body
);
2892 sets
[0].dest
= SET_DEST (body
);
2895 else if (GET_CODE (body
) == PARALLEL
)
2897 /* Look through the PARALLEL and record the values being
2898 set, if possible. Also handle any CLOBBERs. */
2899 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; --i
)
2901 rtx x
= XVECEXP (body
, 0, i
);
2903 if (GET_CODE (x
) == SET
)
2905 sets
[n_sets
].src
= SET_SRC (x
);
2906 sets
[n_sets
].dest
= SET_DEST (x
);
2913 && MEM_P (sets
[0].src
)
2914 && !cselib_record_memory
2915 && MEM_READONLY_P (sets
[0].src
))
2917 rtx note
= find_reg_equal_equiv_note (insn
);
2919 if (note
&& CONSTANT_P (XEXP (note
, 0)))
2920 sets
[0].src
= XEXP (note
, 0);
2924 data
.n_sets
= n_sets_before_autoinc
= n_sets
;
2925 for_each_inc_dec (PATTERN (insn
), cselib_record_autoinc_cb
, &data
);
2926 n_sets
= data
.n_sets
;
2928 /* Look up the values that are read. Do this before invalidating the
2929 locations that are written. */
2930 for (i
= 0; i
< n_sets
; i
++)
2932 rtx dest
= sets
[i
].dest
;
2935 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2936 the low part after invalidating any knowledge about larger modes. */
2937 if (GET_CODE (sets
[i
].dest
) == STRICT_LOW_PART
)
2938 sets
[i
].dest
= dest
= XEXP (dest
, 0);
2940 /* We don't know how to record anything but REG or MEM. */
2942 || (MEM_P (dest
) && cselib_record_memory
))
2944 rtx src
= sets
[i
].src
;
2946 src
= gen_rtx_IF_THEN_ELSE (GET_MODE (dest
), cond
, src
, dest
);
2947 sets
[i
].src_elt
= cselib_lookup (src
, GET_MODE (dest
), 1, VOIDmode
);
2950 machine_mode address_mode
= get_address_mode (dest
);
2952 sets
[i
].dest_addr_elt
= cselib_lookup (XEXP (dest
, 0),
2957 sets
[i
].dest_addr_elt
= 0;
2960 /* Improve handling of STRICT_LOW_PART if the current value is known
2961 to be const0_rtx, then the low bits will be set to dest and higher
2962 bits will remain zero. Used in code like:
2964 {di:SI=0;clobber flags:CC;}
2965 flags:CCNO=cmp(bx:SI,0)
2966 strict_low_part(di:QI)=flags:CCNO<=0
2968 where we can note both that di:QI=flags:CCNO<=0 and
2969 also that because di:SI is known to be 0 and strict_low_part(di:QI)
2970 preserves the upper bits that di:SI=zero_extend(flags:CCNO<=0). */
2971 scalar_int_mode mode
;
2973 && cselib_record_sets_hook
2975 && HARD_REGISTER_P (dest
)
2977 && is_a
<scalar_int_mode
> (GET_MODE (dest
), &mode
)
2978 && n_sets
+ n_strict_low_parts
< MAX_SETS
)
2980 opt_scalar_int_mode wider_mode_iter
;
2981 FOR_EACH_WIDER_MODE (wider_mode_iter
, mode
)
2983 scalar_int_mode wider_mode
= wider_mode_iter
.require ();
2984 if (GET_MODE_PRECISION (wider_mode
) > BITS_PER_WORD
)
2987 rtx reg
= gen_lowpart (wider_mode
, dest
);
2991 cselib_val
*v
= cselib_lookup (reg
, wider_mode
, 0, VOIDmode
);
2995 struct elt_loc_list
*l
;
2996 for (l
= v
->locs
; l
; l
= l
->next
)
2997 if (l
->loc
== const0_rtx
)
3003 sets
[n_sets
+ n_strict_low_parts
].dest
= reg
;
3004 sets
[n_sets
+ n_strict_low_parts
].src
= dest
;
3005 sets
[n_sets
+ n_strict_low_parts
++].src_elt
= sets
[i
].src_elt
;
3011 if (cselib_record_sets_hook
)
3012 cselib_record_sets_hook (insn
, sets
, n_sets
);
3014 /* Invalidate all locations written by this insn. Note that the elts we
3015 looked up in the previous loop aren't affected, just some of their
3016 locations may go away. */
3017 note_pattern_stores (body
, cselib_invalidate_rtx_note_stores
, NULL
);
3019 for (i
= n_sets_before_autoinc
; i
< n_sets
; i
++)
3020 cselib_invalidate_rtx (sets
[i
].dest
);
3022 /* If this is an asm, look for duplicate sets. This can happen when the
3023 user uses the same value as an output multiple times. This is valid
3024 if the outputs are not actually used thereafter. Treat this case as
3025 if the value isn't actually set. We do this by smashing the destination
3026 to pc_rtx, so that we won't record the value later. */
3027 if (n_sets
>= 2 && asm_noperands (body
) >= 0)
3029 for (i
= 0; i
< n_sets
; i
++)
3031 rtx dest
= sets
[i
].dest
;
3032 if (REG_P (dest
) || MEM_P (dest
))
3035 for (j
= i
+ 1; j
< n_sets
; j
++)
3036 if (rtx_equal_p (dest
, sets
[j
].dest
))
3038 sets
[i
].dest
= pc_rtx
;
3039 sets
[j
].dest
= pc_rtx
;
3045 /* Now enter the equivalences in our tables. */
3046 for (i
= 0; i
< n_sets
; i
++)
3048 rtx dest
= sets
[i
].dest
;
3050 || (MEM_P (dest
) && cselib_record_memory
))
3051 cselib_record_set (dest
, sets
[i
].src_elt
, sets
[i
].dest_addr_elt
);
3054 /* And deal with STRICT_LOW_PART. */
3055 for (i
= 0; i
< n_strict_low_parts
; i
++)
3057 if (! PRESERVED_VALUE_P (sets
[n_sets
+ i
].src_elt
->val_rtx
))
3059 machine_mode dest_mode
= GET_MODE (sets
[n_sets
+ i
].dest
);
3061 = cselib_lookup (sets
[n_sets
+ i
].dest
, dest_mode
, 1, VOIDmode
);
3062 cselib_preserve_value (v
);
3063 rtx r
= gen_rtx_ZERO_EXTEND (dest_mode
,
3064 sets
[n_sets
+ i
].src_elt
->val_rtx
);
3065 cselib_add_permanent_equiv (v
, r
, insn
);
3069 /* Return true if INSN in the prologue initializes hard_frame_pointer_rtx. */
3072 fp_setter_insn (rtx_insn
*insn
)
3074 rtx expr
, pat
= NULL_RTX
;
3076 if (!RTX_FRAME_RELATED_P (insn
))
3079 expr
= find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
);
3081 pat
= XEXP (expr
, 0);
3082 if (!modified_in_p (hard_frame_pointer_rtx
, pat
? pat
: insn
))
3085 /* Don't return true for frame pointer restores in the epilogue. */
3086 if (find_reg_note (insn
, REG_CFA_RESTORE
, hard_frame_pointer_rtx
))
3091 /* V is one of the values in REG_VALUES (REGNO). Return true if it
3092 would be invalidated by CALLEE_ABI. */
3095 cselib_invalidated_by_call_p (const function_abi
&callee_abi
,
3096 unsigned int regno
, cselib_val
*v
)
3098 machine_mode mode
= GET_MODE (v
->val_rtx
);
3099 if (mode
== VOIDmode
)
3101 v
= REG_VALUES (regno
)->elt
;
3103 /* If we don't know what the mode of the constant value is, and we
3104 don't know what mode the register was set in, conservatively
3105 assume that the register is clobbered. The value's going to be
3106 essentially useless in this case anyway. */
3108 mode
= GET_MODE (v
->val_rtx
);
3110 return callee_abi
.clobbers_reg_p (mode
, regno
);
3113 /* Record the effects of INSN. */
3116 cselib_process_insn (rtx_insn
*insn
)
3121 cselib_current_insn
= insn
;
3123 /* Forget everything at a CODE_LABEL or a setjmp. */
3126 && find_reg_note (insn
, REG_SETJMP
, NULL
)))
3127 && !cselib_preserve_constants
)
3129 cselib_reset_table (next_uid
);
3130 cselib_current_insn
= NULL
;
3134 if (! INSN_P (insn
))
3136 cselib_current_insn
= NULL
;
3140 /* If this is a call instruction, forget anything stored in a
3141 call clobbered register, or, if this is not a const call, in
3145 function_abi callee_abi
= insn_callee_abi (insn
);
3146 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
3148 elt_list
**l
= ®_VALUES (i
);
3151 cselib_val
*v
= (*l
)->elt
;
3152 if (v
&& cselib_invalidated_by_call_p (callee_abi
, i
, v
))
3153 cselib_invalidate_regno_val (i
, l
);
3159 /* Since it is not clear how cselib is going to be used, be
3160 conservative here and treat looping pure or const functions
3161 as if they were regular functions. */
3162 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
)
3163 || !(RTL_CONST_OR_PURE_CALL_P (insn
)))
3164 cselib_invalidate_mem (callmem
);
3166 /* For const/pure calls, invalidate any argument slots because
3167 they are owned by the callee. */
3168 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
3169 if (GET_CODE (XEXP (x
, 0)) == USE
3170 && MEM_P (XEXP (XEXP (x
, 0), 0)))
3171 cselib_invalidate_mem (XEXP (XEXP (x
, 0), 0));
3174 cselib_record_sets (insn
);
3176 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
3177 after we have processed the insn. */
3180 for (x
= CALL_INSN_FUNCTION_USAGE (insn
); x
; x
= XEXP (x
, 1))
3181 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
3182 cselib_invalidate_rtx (XEXP (XEXP (x
, 0), 0));
3184 /* Flush everything on setjmp. */
3185 if (cselib_preserve_constants
3186 && find_reg_note (insn
, REG_SETJMP
, NULL
))
3188 cselib_preserve_only_values ();
3189 cselib_reset_table (next_uid
);
3193 /* On setter of the hard frame pointer if frame_pointer_needed,
3194 invalidate stack_pointer_rtx, so that sp and {,h}fp based
3195 VALUEs are distinct. */
3196 if (reload_completed
3197 && frame_pointer_needed
3198 && fp_setter_insn (insn
))
3199 cselib_invalidate_rtx (stack_pointer_rtx
);
3201 cselib_current_insn
= NULL
;
3203 if (n_useless_values
> MAX_USELESS_VALUES
3204 /* remove_useless_values is linear in the hash table size. Avoid
3205 quadratic behavior for very large hashtables with very few
3206 useless elements. */
3207 && ((unsigned int)n_useless_values
3208 > (cselib_hash_table
->elements () - n_debug_values
) / 4))
3209 remove_useless_values ();
3212 /* Initialize cselib for one pass. The caller must also call
3213 init_alias_analysis. */
3216 cselib_init (int record_what
)
3218 cselib_record_memory
= record_what
& CSELIB_RECORD_MEMORY
;
3219 cselib_preserve_constants
= record_what
& CSELIB_PRESERVE_CONSTANTS
;
3220 cselib_any_perm_equivs
= false;
3222 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
3223 see canon_true_dependence. This is only created once. */
3225 callmem
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
3227 cselib_nregs
= max_reg_num ();
3229 /* We preserve reg_values to allow expensive clearing of the whole thing.
3230 Reallocate it however if it happens to be too large. */
3231 if (!reg_values
|| reg_values_size
< cselib_nregs
3232 || (reg_values_size
> 10 && reg_values_size
> cselib_nregs
* 4))
3235 /* Some space for newly emit instructions so we don't end up
3236 reallocating in between passes. */
3237 reg_values_size
= cselib_nregs
+ (63 + cselib_nregs
) / 16;
3238 reg_values
= XCNEWVEC (struct elt_list
*, reg_values_size
);
3240 used_regs
= XNEWVEC (unsigned int, cselib_nregs
);
3242 /* FIXME: enable sanitization (PR87845) */
3244 = new hash_table
<cselib_hasher
> (31, /* ggc */ false,
3245 /* sanitize_eq_and_hash */ false);
3246 if (cselib_preserve_constants
)
3247 cselib_preserved_hash_table
3248 = new hash_table
<cselib_hasher
> (31, /* ggc */ false,
3249 /* sanitize_eq_and_hash */ false);
3253 /* Called when the current user is done with cselib. */
3256 cselib_finish (void)
3258 bool preserved
= cselib_preserve_constants
;
3259 cselib_discard_hook
= NULL
;
3260 cselib_preserve_constants
= false;
3261 cselib_any_perm_equivs
= false;
3262 cfa_base_preserved_val
= NULL
;
3263 cfa_base_preserved_regno
= INVALID_REGNUM
;
3264 elt_list_pool
.release ();
3265 elt_loc_list_pool
.release ();
3266 cselib_val_pool
.release ();
3267 value_pool
.release ();
3268 cselib_clear_table ();
3269 delete cselib_hash_table
;
3270 cselib_hash_table
= NULL
;
3272 delete cselib_preserved_hash_table
;
3273 cselib_preserved_hash_table
= NULL
;
3276 n_useless_values
= 0;
3277 n_useless_debug_values
= 0;
3282 /* Dump the cselib_val *X to FILE *OUT. */
3285 dump_cselib_val (cselib_val
**x
, FILE *out
)
3288 bool need_lf
= true;
3290 print_inline_rtx (out
, v
->val_rtx
, 0);
3294 struct elt_loc_list
*l
= v
->locs
;
3300 fputs (" locs:", out
);
3303 if (l
->setting_insn
)
3304 fprintf (out
, "\n from insn %i ",
3305 INSN_UID (l
->setting_insn
));
3307 fprintf (out
, "\n ");
3308 print_inline_rtx (out
, l
->loc
, 4);
3310 while ((l
= l
->next
));
3315 fputs (" no locs", out
);
3321 struct elt_list
*e
= v
->addr_list
;
3327 fputs (" addr list:", out
);
3331 print_inline_rtx (out
, e
->elt
->val_rtx
, 2);
3333 while ((e
= e
->next
));
3338 fputs (" no addrs", out
);
3342 if (v
->next_containing_mem
== &dummy_val
)
3343 fputs (" last mem\n", out
);
3344 else if (v
->next_containing_mem
)
3346 fputs (" next mem ", out
);
3347 print_inline_rtx (out
, v
->next_containing_mem
->val_rtx
, 2);
3356 /* Dump to OUT everything in the CSELIB table. */
3359 dump_cselib_table (FILE *out
)
3361 fprintf (out
, "cselib hash table:\n");
3362 cselib_hash_table
->traverse
<FILE *, dump_cselib_val
> (out
);
3363 fprintf (out
, "cselib preserved hash table:\n");
3364 cselib_preserved_hash_table
->traverse
<FILE *, dump_cselib_val
> (out
);
3365 if (first_containing_mem
!= &dummy_val
)
3367 fputs ("first mem ", out
);
3368 print_inline_rtx (out
, first_containing_mem
->val_rtx
, 2);
3371 fprintf (out
, "next uid %i\n", next_uid
);
3374 #include "gt-cselib.h"