re PR target/53682 (ICE in cselib_lookup (SEGV) on i586-linux-gnu)
[official-gcc.git] / gcc / cselib.c
blobd338c31be1aae163a2f3c3ea79fd6ab21c227f08
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "emit-rtl.h"
36 #include "diagnostic-core.h"
37 #include "ggc.h"
38 #include "hashtab.h"
39 #include "tree-pass.h"
40 #include "cselib.h"
41 #include "params.h"
42 #include "alloc-pool.h"
43 #include "target.h"
44 #include "bitmap.h"
46 /* A list of cselib_val structures. */
47 struct elt_list {
48 struct elt_list *next;
49 cselib_val *elt;
52 static bool cselib_record_memory;
53 static bool cselib_preserve_constants;
54 static bool cselib_any_perm_equivs;
55 static int entry_and_rtx_equal_p (const void *, const void *);
56 static hashval_t get_value_hash (const void *);
57 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
58 static void new_elt_loc_list (cselib_val *, rtx);
59 static void unchain_one_value (cselib_val *);
60 static void unchain_one_elt_list (struct elt_list **);
61 static void unchain_one_elt_loc_list (struct elt_loc_list **);
62 static int discard_useless_locs (void **, void *);
63 static int discard_useless_values (void **, void *);
64 static void remove_useless_values (void);
65 static int rtx_equal_for_cselib_1 (rtx, rtx, enum machine_mode);
66 static unsigned int cselib_hash_rtx (rtx, int, enum machine_mode);
67 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
68 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
69 static cselib_val *cselib_lookup_mem (rtx, int);
70 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
71 static void cselib_invalidate_mem (rtx);
72 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
73 static void cselib_record_sets (rtx);
75 struct expand_value_data
77 bitmap regs_active;
78 cselib_expand_callback callback;
79 void *callback_arg;
80 bool dummy;
83 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
85 /* There are three ways in which cselib can look up an rtx:
86 - for a REG, the reg_values table (which is indexed by regno) is used
87 - for a MEM, we recursively look up its address and then follow the
88 addr_list of that value
89 - for everything else, we compute a hash value and go through the hash
90 table. Since different rtx's can still have the same hash value,
91 this involves walking the table entries for a given value and comparing
92 the locations of the entries with the rtx we are looking up. */
94 /* A table that enables us to look up elts by their value. */
95 static htab_t cselib_hash_table;
97 /* This is a global so we don't have to pass this through every function.
98 It is used in new_elt_loc_list to set SETTING_INSN. */
99 static rtx cselib_current_insn;
101 /* The unique id that the next create value will take. */
102 static unsigned int next_uid;
104 /* The number of registers we had when the varrays were last resized. */
105 static unsigned int cselib_nregs;
107 /* Count values without known locations, or with only locations that
108 wouldn't have been known except for debug insns. Whenever this
109 grows too big, we remove these useless values from the table.
111 Counting values with only debug values is a bit tricky. We don't
112 want to increment n_useless_values when we create a value for a
113 debug insn, for this would get n_useless_values out of sync, but we
114 want increment it if all locs in the list that were ever referenced
115 in nondebug insns are removed from the list.
117 In the general case, once we do that, we'd have to stop accepting
118 nondebug expressions in the loc list, to avoid having two values
119 equivalent that, without debug insns, would have been made into
120 separate values. However, because debug insns never introduce
121 equivalences themselves (no assignments), the only means for
122 growing loc lists is through nondebug assignments. If the locs
123 also happen to be referenced in debug insns, it will work just fine.
125 A consequence of this is that there's at most one debug-only loc in
126 each loc list. If we keep it in the first entry, testing whether
127 we have a debug-only loc list takes O(1).
129 Furthermore, since any additional entry in a loc list containing a
130 debug loc would have to come from an assignment (nondebug) that
131 references both the initial debug loc and the newly-equivalent loc,
132 the initial debug loc would be promoted to a nondebug loc, and the
133 loc list would not contain debug locs any more.
135 So the only case we have to be careful with in order to keep
136 n_useless_values in sync between debug and nondebug compilations is
137 to avoid incrementing n_useless_values when removing the single loc
138 from a value that turns out to not appear outside debug values. We
139 increment n_useless_debug_values instead, and leave such values
140 alone until, for other reasons, we garbage-collect useless
141 values. */
142 static int n_useless_values;
143 static int n_useless_debug_values;
145 /* Count values whose locs have been taken exclusively from debug
146 insns for the entire life of the value. */
147 static int n_debug_values;
149 /* Number of useless values before we remove them from the hash table. */
150 #define MAX_USELESS_VALUES 32
152 /* This table maps from register number to values. It does not
153 contain pointers to cselib_val structures, but rather elt_lists.
154 The purpose is to be able to refer to the same register in
155 different modes. The first element of the list defines the mode in
156 which the register was set; if the mode is unknown or the value is
157 no longer valid in that mode, ELT will be NULL for the first
158 element. */
159 static struct elt_list **reg_values;
160 static unsigned int reg_values_size;
161 #define REG_VALUES(i) reg_values[i]
163 /* The largest number of hard regs used by any entry added to the
164 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
165 static unsigned int max_value_regs;
167 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
168 in cselib_clear_table() for fast emptying. */
169 static unsigned int *used_regs;
170 static unsigned int n_used_regs;
172 /* We pass this to cselib_invalidate_mem to invalidate all of
173 memory for a non-const call instruction. */
174 static GTY(()) rtx callmem;
176 /* Set by discard_useless_locs if it deleted the last location of any
177 value. */
178 static int values_became_useless;
180 /* Used as stop element of the containing_mem list so we can check
181 presence in the list by checking the next pointer. */
182 static cselib_val dummy_val;
184 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
185 that is constant through the whole function and should never be
186 eliminated. */
187 static cselib_val *cfa_base_preserved_val;
188 static unsigned int cfa_base_preserved_regno = INVALID_REGNUM;
190 /* Used to list all values that contain memory reference.
191 May or may not contain the useless values - the list is compacted
192 each time memory is invalidated. */
193 static cselib_val *first_containing_mem = &dummy_val;
194 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
196 /* If nonnull, cselib will call this function before freeing useless
197 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
198 void (*cselib_discard_hook) (cselib_val *);
200 /* If nonnull, cselib will call this function before recording sets or
201 even clobbering outputs of INSN. All the recorded sets will be
202 represented in the array sets[n_sets]. new_val_min can be used to
203 tell whether values present in sets are introduced by this
204 instruction. */
205 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
206 int n_sets);
208 #define PRESERVED_VALUE_P(RTX) \
209 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
213 /* Allocate a struct elt_list and fill in its two elements with the
214 arguments. */
216 static inline struct elt_list *
217 new_elt_list (struct elt_list *next, cselib_val *elt)
219 struct elt_list *el;
220 el = (struct elt_list *) pool_alloc (elt_list_pool);
221 el->next = next;
222 el->elt = elt;
223 return el;
226 /* Allocate a struct elt_loc_list with LOC and prepend it to VAL's loc
227 list. */
229 static inline void
230 new_elt_loc_list (cselib_val *val, rtx loc)
232 struct elt_loc_list *el, *next = val->locs;
234 gcc_checking_assert (!next || !next->setting_insn
235 || !DEBUG_INSN_P (next->setting_insn)
236 || cselib_current_insn == next->setting_insn);
238 /* If we're creating the first loc in a debug insn context, we've
239 just created a debug value. Count it. */
240 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
241 n_debug_values++;
243 val = canonical_cselib_val (val);
244 next = val->locs;
246 if (GET_CODE (loc) == VALUE)
248 loc = canonical_cselib_val (CSELIB_VAL_PTR (loc))->val_rtx;
250 gcc_checking_assert (PRESERVED_VALUE_P (loc)
251 == PRESERVED_VALUE_P (val->val_rtx));
253 if (val->val_rtx == loc)
254 return;
255 else if (val->uid > CSELIB_VAL_PTR (loc)->uid)
257 /* Reverse the insertion. */
258 new_elt_loc_list (CSELIB_VAL_PTR (loc), val->val_rtx);
259 return;
262 gcc_checking_assert (val->uid < CSELIB_VAL_PTR (loc)->uid);
264 if (CSELIB_VAL_PTR (loc)->locs)
266 /* Bring all locs from LOC to VAL. */
267 for (el = CSELIB_VAL_PTR (loc)->locs; el->next; el = el->next)
269 /* Adjust values that have LOC as canonical so that VAL
270 becomes their canonical. */
271 if (el->loc && GET_CODE (el->loc) == VALUE)
273 gcc_checking_assert (CSELIB_VAL_PTR (el->loc)->locs->loc
274 == loc);
275 CSELIB_VAL_PTR (el->loc)->locs->loc = val->val_rtx;
278 el->next = val->locs;
279 next = val->locs = CSELIB_VAL_PTR (loc)->locs;
282 if (CSELIB_VAL_PTR (loc)->addr_list)
284 /* Bring in addr_list into canonical node. */
285 struct elt_list *last = CSELIB_VAL_PTR (loc)->addr_list;
286 while (last->next)
287 last = last->next;
288 last->next = val->addr_list;
289 val->addr_list = CSELIB_VAL_PTR (loc)->addr_list;
290 CSELIB_VAL_PTR (loc)->addr_list = NULL;
293 if (CSELIB_VAL_PTR (loc)->next_containing_mem != NULL
294 && val->next_containing_mem == NULL)
296 /* Add VAL to the containing_mem list after LOC. LOC will
297 be removed when we notice it doesn't contain any
298 MEMs. */
299 val->next_containing_mem = CSELIB_VAL_PTR (loc)->next_containing_mem;
300 CSELIB_VAL_PTR (loc)->next_containing_mem = val;
303 /* Chain LOC back to VAL. */
304 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
305 el->loc = val->val_rtx;
306 el->setting_insn = cselib_current_insn;
307 el->next = NULL;
308 CSELIB_VAL_PTR (loc)->locs = el;
311 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
312 el->loc = loc;
313 el->setting_insn = cselib_current_insn;
314 el->next = next;
315 val->locs = el;
318 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
319 originating from a debug insn, maintaining the debug values
320 count. */
322 static inline void
323 promote_debug_loc (struct elt_loc_list *l)
325 if (l && l->setting_insn && DEBUG_INSN_P (l->setting_insn)
326 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
328 n_debug_values--;
329 l->setting_insn = cselib_current_insn;
330 if (cselib_preserve_constants && l->next)
332 gcc_assert (l->next->setting_insn
333 && DEBUG_INSN_P (l->next->setting_insn)
334 && !l->next->next);
335 l->next->setting_insn = cselib_current_insn;
337 else
338 gcc_assert (!l->next);
342 /* The elt_list at *PL is no longer needed. Unchain it and free its
343 storage. */
345 static inline void
346 unchain_one_elt_list (struct elt_list **pl)
348 struct elt_list *l = *pl;
350 *pl = l->next;
351 pool_free (elt_list_pool, l);
354 /* Likewise for elt_loc_lists. */
356 static void
357 unchain_one_elt_loc_list (struct elt_loc_list **pl)
359 struct elt_loc_list *l = *pl;
361 *pl = l->next;
362 pool_free (elt_loc_list_pool, l);
365 /* Likewise for cselib_vals. This also frees the addr_list associated with
366 V. */
368 static void
369 unchain_one_value (cselib_val *v)
371 while (v->addr_list)
372 unchain_one_elt_list (&v->addr_list);
374 pool_free (cselib_val_pool, v);
377 /* Remove all entries from the hash table. Also used during
378 initialization. */
380 void
381 cselib_clear_table (void)
383 cselib_reset_table (1);
386 /* Return TRUE if V is a constant, a function invariant or a VALUE
387 equivalence; FALSE otherwise. */
389 static bool
390 invariant_or_equiv_p (cselib_val *v)
392 struct elt_loc_list *l;
394 if (v == cfa_base_preserved_val)
395 return true;
397 /* Keep VALUE equivalences around. */
398 for (l = v->locs; l; l = l->next)
399 if (GET_CODE (l->loc) == VALUE)
400 return true;
402 if (v->locs != NULL
403 && v->locs->next == NULL)
405 if (CONSTANT_P (v->locs->loc)
406 && (GET_CODE (v->locs->loc) != CONST
407 || !references_value_p (v->locs->loc, 0)))
408 return true;
409 /* Although a debug expr may be bound to different expressions,
410 we can preserve it as if it was constant, to get unification
411 and proper merging within var-tracking. */
412 if (GET_CODE (v->locs->loc) == DEBUG_EXPR
413 || GET_CODE (v->locs->loc) == DEBUG_IMPLICIT_PTR
414 || GET_CODE (v->locs->loc) == ENTRY_VALUE
415 || GET_CODE (v->locs->loc) == DEBUG_PARAMETER_REF)
416 return true;
418 /* (plus (value V) (const_int C)) is invariant iff V is invariant. */
419 if (GET_CODE (v->locs->loc) == PLUS
420 && CONST_INT_P (XEXP (v->locs->loc, 1))
421 && GET_CODE (XEXP (v->locs->loc, 0)) == VALUE
422 && invariant_or_equiv_p (CSELIB_VAL_PTR (XEXP (v->locs->loc, 0))))
423 return true;
426 return false;
429 /* Remove from hash table all VALUEs except constants, function
430 invariants and VALUE equivalences. */
432 static int
433 preserve_constants_and_equivs (void **x, void *info ATTRIBUTE_UNUSED)
435 cselib_val *v = (cselib_val *)*x;
437 if (!invariant_or_equiv_p (v))
438 htab_clear_slot (cselib_hash_table, x);
439 return 1;
442 /* Remove all entries from the hash table, arranging for the next
443 value to be numbered NUM. */
445 void
446 cselib_reset_table (unsigned int num)
448 unsigned int i;
450 max_value_regs = 0;
452 if (cfa_base_preserved_val)
454 unsigned int regno = cfa_base_preserved_regno;
455 unsigned int new_used_regs = 0;
456 for (i = 0; i < n_used_regs; i++)
457 if (used_regs[i] == regno)
459 new_used_regs = 1;
460 continue;
462 else
463 REG_VALUES (used_regs[i]) = 0;
464 gcc_assert (new_used_regs == 1);
465 n_used_regs = new_used_regs;
466 used_regs[0] = regno;
467 max_value_regs
468 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
470 else
472 for (i = 0; i < n_used_regs; i++)
473 REG_VALUES (used_regs[i]) = 0;
474 n_used_regs = 0;
477 if (cselib_preserve_constants)
478 htab_traverse (cselib_hash_table, preserve_constants_and_equivs, NULL);
479 else
481 htab_empty (cselib_hash_table);
482 gcc_checking_assert (!cselib_any_perm_equivs);
485 n_useless_values = 0;
486 n_useless_debug_values = 0;
487 n_debug_values = 0;
489 next_uid = num;
491 first_containing_mem = &dummy_val;
494 /* Return the number of the next value that will be generated. */
496 unsigned int
497 cselib_get_next_uid (void)
499 return next_uid;
502 /* See the documentation of cselib_find_slot below. */
503 static enum machine_mode find_slot_memmode;
505 /* Search for X, whose hashcode is HASH, in CSELIB_HASH_TABLE,
506 INSERTing if requested. When X is part of the address of a MEM,
507 MEMMODE should specify the mode of the MEM. While searching the
508 table, MEMMODE is held in FIND_SLOT_MEMMODE, so that autoinc RTXs
509 in X can be resolved. */
511 static void **
512 cselib_find_slot (rtx x, hashval_t hash, enum insert_option insert,
513 enum machine_mode memmode)
515 void **slot;
516 find_slot_memmode = memmode;
517 slot = htab_find_slot_with_hash (cselib_hash_table, x, hash, insert);
518 find_slot_memmode = VOIDmode;
519 return slot;
522 /* The equality test for our hash table. The first argument ENTRY is a table
523 element (i.e. a cselib_val), while the second arg X is an rtx. We know
524 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
525 CONST of an appropriate mode. */
527 static int
528 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
530 struct elt_loc_list *l;
531 const cselib_val *const v = (const cselib_val *) entry;
532 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
533 enum machine_mode mode = GET_MODE (x);
535 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
536 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
538 if (mode != GET_MODE (v->val_rtx))
539 return 0;
541 /* Unwrap X if necessary. */
542 if (GET_CODE (x) == CONST
543 && (CONST_INT_P (XEXP (x, 0))
544 || GET_CODE (XEXP (x, 0)) == CONST_FIXED
545 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
546 x = XEXP (x, 0);
548 /* We don't guarantee that distinct rtx's have different hash values,
549 so we need to do a comparison. */
550 for (l = v->locs; l; l = l->next)
551 if (rtx_equal_for_cselib_1 (l->loc, x, find_slot_memmode))
553 promote_debug_loc (l);
554 return 1;
557 return 0;
560 /* The hash function for our hash table. The value is always computed with
561 cselib_hash_rtx when adding an element; this function just extracts the
562 hash value from a cselib_val structure. */
564 static hashval_t
565 get_value_hash (const void *entry)
567 const cselib_val *const v = (const cselib_val *) entry;
568 return v->hash;
571 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
572 only return true for values which point to a cselib_val whose value
573 element has been set to zero, which implies the cselib_val will be
574 removed. */
577 references_value_p (const_rtx x, int only_useless)
579 const enum rtx_code code = GET_CODE (x);
580 const char *fmt = GET_RTX_FORMAT (code);
581 int i, j;
583 if (GET_CODE (x) == VALUE
584 && (! only_useless ||
585 (CSELIB_VAL_PTR (x)->locs == 0 && !PRESERVED_VALUE_P (x))))
586 return 1;
588 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
590 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
591 return 1;
592 else if (fmt[i] == 'E')
593 for (j = 0; j < XVECLEN (x, i); j++)
594 if (references_value_p (XVECEXP (x, i, j), only_useless))
595 return 1;
598 return 0;
601 /* For all locations found in X, delete locations that reference useless
602 values (i.e. values without any location). Called through
603 htab_traverse. */
605 static int
606 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
608 cselib_val *v = (cselib_val *)*x;
609 struct elt_loc_list **p = &v->locs;
610 bool had_locs = v->locs != NULL;
611 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
613 while (*p)
615 if (references_value_p ((*p)->loc, 1))
616 unchain_one_elt_loc_list (p);
617 else
618 p = &(*p)->next;
621 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
623 if (setting_insn && DEBUG_INSN_P (setting_insn))
624 n_useless_debug_values++;
625 else
626 n_useless_values++;
627 values_became_useless = 1;
629 return 1;
632 /* If X is a value with no locations, remove it from the hashtable. */
634 static int
635 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
637 cselib_val *v = (cselib_val *)*x;
639 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
641 if (cselib_discard_hook)
642 cselib_discard_hook (v);
644 CSELIB_VAL_PTR (v->val_rtx) = NULL;
645 htab_clear_slot (cselib_hash_table, x);
646 unchain_one_value (v);
647 n_useless_values--;
650 return 1;
653 /* Clean out useless values (i.e. those which no longer have locations
654 associated with them) from the hash table. */
656 static void
657 remove_useless_values (void)
659 cselib_val **p, *v;
661 /* First pass: eliminate locations that reference the value. That in
662 turn can make more values useless. */
665 values_became_useless = 0;
666 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
668 while (values_became_useless);
670 /* Second pass: actually remove the values. */
672 p = &first_containing_mem;
673 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
674 if (v->locs && v == canonical_cselib_val (v))
676 *p = v;
677 p = &(*p)->next_containing_mem;
679 *p = &dummy_val;
681 n_useless_values += n_useless_debug_values;
682 n_debug_values -= n_useless_debug_values;
683 n_useless_debug_values = 0;
685 htab_traverse (cselib_hash_table, discard_useless_values, 0);
687 gcc_assert (!n_useless_values);
690 /* Arrange for a value to not be removed from the hash table even if
691 it becomes useless. */
693 void
694 cselib_preserve_value (cselib_val *v)
696 PRESERVED_VALUE_P (v->val_rtx) = 1;
699 /* Test whether a value is preserved. */
701 bool
702 cselib_preserved_value_p (cselib_val *v)
704 return PRESERVED_VALUE_P (v->val_rtx);
707 /* Arrange for a REG value to be assumed constant through the whole function,
708 never invalidated and preserved across cselib_reset_table calls. */
710 void
711 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
713 if (cselib_preserve_constants
714 && v->locs
715 && REG_P (v->locs->loc))
717 cfa_base_preserved_val = v;
718 cfa_base_preserved_regno = regno;
722 /* Clean all non-constant expressions in the hash table, but retain
723 their values. */
725 void
726 cselib_preserve_only_values (void)
728 int i;
730 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
731 cselib_invalidate_regno (i, reg_raw_mode[i]);
733 cselib_invalidate_mem (callmem);
735 remove_useless_values ();
737 gcc_assert (first_containing_mem == &dummy_val);
740 /* Return the mode in which a register was last set. If X is not a
741 register, return its mode. If the mode in which the register was
742 set is not known, or the value was already clobbered, return
743 VOIDmode. */
745 enum machine_mode
746 cselib_reg_set_mode (const_rtx x)
748 if (!REG_P (x))
749 return GET_MODE (x);
751 if (REG_VALUES (REGNO (x)) == NULL
752 || REG_VALUES (REGNO (x))->elt == NULL)
753 return VOIDmode;
755 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
758 /* Return nonzero if we can prove that X and Y contain the same value, taking
759 our gathered information into account. */
762 rtx_equal_for_cselib_p (rtx x, rtx y)
764 return rtx_equal_for_cselib_1 (x, y, VOIDmode);
767 /* If x is a PLUS or an autoinc operation, expand the operation,
768 storing the offset, if any, in *OFF. */
770 static rtx
771 autoinc_split (rtx x, rtx *off, enum machine_mode memmode)
773 switch (GET_CODE (x))
775 case PLUS:
776 *off = XEXP (x, 1);
777 return XEXP (x, 0);
779 case PRE_DEC:
780 if (memmode == VOIDmode)
781 return x;
783 *off = GEN_INT (-GET_MODE_SIZE (memmode));
784 return XEXP (x, 0);
785 break;
787 case PRE_INC:
788 if (memmode == VOIDmode)
789 return x;
791 *off = GEN_INT (GET_MODE_SIZE (memmode));
792 return XEXP (x, 0);
794 case PRE_MODIFY:
795 return XEXP (x, 1);
797 case POST_DEC:
798 case POST_INC:
799 case POST_MODIFY:
800 return XEXP (x, 0);
802 default:
803 return x;
807 /* Return nonzero if we can prove that X and Y contain the same value,
808 taking our gathered information into account. MEMMODE holds the
809 mode of the enclosing MEM, if any, as required to deal with autoinc
810 addressing modes. If X and Y are not (known to be) part of
811 addresses, MEMMODE should be VOIDmode. */
813 static int
814 rtx_equal_for_cselib_1 (rtx x, rtx y, enum machine_mode memmode)
816 enum rtx_code code;
817 const char *fmt;
818 int i;
820 if (REG_P (x) || MEM_P (x))
822 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0, memmode);
824 if (e)
825 x = e->val_rtx;
828 if (REG_P (y) || MEM_P (y))
830 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0, memmode);
832 if (e)
833 y = e->val_rtx;
836 if (x == y)
837 return 1;
839 if (GET_CODE (x) == VALUE)
841 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (x));
842 struct elt_loc_list *l;
844 if (GET_CODE (y) == VALUE)
845 return e == canonical_cselib_val (CSELIB_VAL_PTR (y));
847 for (l = e->locs; l; l = l->next)
849 rtx t = l->loc;
851 /* Avoid infinite recursion. We know we have the canonical
852 value, so we can just skip any values in the equivalence
853 list. */
854 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
855 continue;
856 else if (rtx_equal_for_cselib_1 (t, y, memmode))
857 return 1;
860 return 0;
862 else if (GET_CODE (y) == VALUE)
864 cselib_val *e = canonical_cselib_val (CSELIB_VAL_PTR (y));
865 struct elt_loc_list *l;
867 for (l = e->locs; l; l = l->next)
869 rtx t = l->loc;
871 if (REG_P (t) || MEM_P (t) || GET_CODE (t) == VALUE)
872 continue;
873 else if (rtx_equal_for_cselib_1 (x, t, memmode))
874 return 1;
877 return 0;
880 if (GET_MODE (x) != GET_MODE (y))
881 return 0;
883 if (GET_CODE (x) != GET_CODE (y))
885 rtx xorig = x, yorig = y;
886 rtx xoff = NULL, yoff = NULL;
888 x = autoinc_split (x, &xoff, memmode);
889 y = autoinc_split (y, &yoff, memmode);
891 if (!xoff != !yoff)
892 return 0;
894 if (xoff && !rtx_equal_for_cselib_1 (xoff, yoff, memmode))
895 return 0;
897 /* Don't recurse if nothing changed. */
898 if (x != xorig || y != yorig)
899 return rtx_equal_for_cselib_1 (x, y, memmode);
901 return 0;
904 /* These won't be handled correctly by the code below. */
905 switch (GET_CODE (x))
907 case CONST_DOUBLE:
908 case CONST_FIXED:
909 case DEBUG_EXPR:
910 return 0;
912 case DEBUG_IMPLICIT_PTR:
913 return DEBUG_IMPLICIT_PTR_DECL (x)
914 == DEBUG_IMPLICIT_PTR_DECL (y);
916 case DEBUG_PARAMETER_REF:
917 return DEBUG_PARAMETER_REF_DECL (x)
918 == DEBUG_PARAMETER_REF_DECL (y);
920 case ENTRY_VALUE:
921 /* ENTRY_VALUEs are function invariant, it is thus undesirable to
922 use rtx_equal_for_cselib_1 to compare the operands. */
923 return rtx_equal_p (ENTRY_VALUE_EXP (x), ENTRY_VALUE_EXP (y));
925 case LABEL_REF:
926 return XEXP (x, 0) == XEXP (y, 0);
928 case MEM:
929 /* We have to compare any autoinc operations in the addresses
930 using this MEM's mode. */
931 return rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 0), GET_MODE (x));
933 default:
934 break;
937 code = GET_CODE (x);
938 fmt = GET_RTX_FORMAT (code);
940 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
942 int j;
944 switch (fmt[i])
946 case 'w':
947 if (XWINT (x, i) != XWINT (y, i))
948 return 0;
949 break;
951 case 'n':
952 case 'i':
953 if (XINT (x, i) != XINT (y, i))
954 return 0;
955 break;
957 case 'V':
958 case 'E':
959 /* Two vectors must have the same length. */
960 if (XVECLEN (x, i) != XVECLEN (y, i))
961 return 0;
963 /* And the corresponding elements must match. */
964 for (j = 0; j < XVECLEN (x, i); j++)
965 if (! rtx_equal_for_cselib_1 (XVECEXP (x, i, j),
966 XVECEXP (y, i, j), memmode))
967 return 0;
968 break;
970 case 'e':
971 if (i == 1
972 && targetm.commutative_p (x, UNKNOWN)
973 && rtx_equal_for_cselib_1 (XEXP (x, 1), XEXP (y, 0), memmode)
974 && rtx_equal_for_cselib_1 (XEXP (x, 0), XEXP (y, 1), memmode))
975 return 1;
976 if (! rtx_equal_for_cselib_1 (XEXP (x, i), XEXP (y, i), memmode))
977 return 0;
978 break;
980 case 'S':
981 case 's':
982 if (strcmp (XSTR (x, i), XSTR (y, i)))
983 return 0;
984 break;
986 case 'u':
987 /* These are just backpointers, so they don't matter. */
988 break;
990 case '0':
991 case 't':
992 break;
994 /* It is believed that rtx's at this level will never
995 contain anything but integers and other rtx's,
996 except for within LABEL_REFs and SYMBOL_REFs. */
997 default:
998 gcc_unreachable ();
1001 return 1;
1004 /* We need to pass down the mode of constants through the hash table
1005 functions. For that purpose, wrap them in a CONST of the appropriate
1006 mode. */
1007 static rtx
1008 wrap_constant (enum machine_mode mode, rtx x)
1010 if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
1011 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
1012 return x;
1013 gcc_assert (mode != VOIDmode);
1014 return gen_rtx_CONST (mode, x);
1017 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
1018 For registers and memory locations, we look up their cselib_val structure
1019 and return its VALUE element.
1020 Possible reasons for return 0 are: the object is volatile, or we couldn't
1021 find a register or memory location in the table and CREATE is zero. If
1022 CREATE is nonzero, table elts are created for regs and mem.
1023 N.B. this hash function returns the same hash value for RTXes that
1024 differ only in the order of operands, thus it is suitable for comparisons
1025 that take commutativity into account.
1026 If we wanted to also support associative rules, we'd have to use a different
1027 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
1028 MEMMODE indicates the mode of an enclosing MEM, and it's only
1029 used to compute autoinc values.
1030 We used to have a MODE argument for hashing for CONST_INTs, but that
1031 didn't make sense, since it caused spurious hash differences between
1032 (set (reg:SI 1) (const_int))
1033 (plus:SI (reg:SI 2) (reg:SI 1))
1035 (plus:SI (reg:SI 2) (const_int))
1036 If the mode is important in any context, it must be checked specifically
1037 in a comparison anyway, since relying on hash differences is unsafe. */
1039 static unsigned int
1040 cselib_hash_rtx (rtx x, int create, enum machine_mode memmode)
1042 cselib_val *e;
1043 int i, j;
1044 enum rtx_code code;
1045 const char *fmt;
1046 unsigned int hash = 0;
1048 code = GET_CODE (x);
1049 hash += (unsigned) code + (unsigned) GET_MODE (x);
1051 switch (code)
1053 case VALUE:
1054 e = CSELIB_VAL_PTR (x);
1055 return e->hash;
1057 case MEM:
1058 case REG:
1059 e = cselib_lookup (x, GET_MODE (x), create, memmode);
1060 if (! e)
1061 return 0;
1063 return e->hash;
1065 case DEBUG_EXPR:
1066 hash += ((unsigned) DEBUG_EXPR << 7)
1067 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
1068 return hash ? hash : (unsigned int) DEBUG_EXPR;
1070 case DEBUG_IMPLICIT_PTR:
1071 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
1072 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
1073 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
1075 case DEBUG_PARAMETER_REF:
1076 hash += ((unsigned) DEBUG_PARAMETER_REF << 7)
1077 + DECL_UID (DEBUG_PARAMETER_REF_DECL (x));
1078 return hash ? hash : (unsigned int) DEBUG_PARAMETER_REF;
1080 case ENTRY_VALUE:
1081 /* ENTRY_VALUEs are function invariant, thus try to avoid
1082 recursing on argument if ENTRY_VALUE is one of the
1083 forms emitted by expand_debug_expr, otherwise
1084 ENTRY_VALUE hash would depend on the current value
1085 in some register or memory. */
1086 if (REG_P (ENTRY_VALUE_EXP (x)))
1087 hash += (unsigned int) REG
1088 + (unsigned int) GET_MODE (ENTRY_VALUE_EXP (x))
1089 + (unsigned int) REGNO (ENTRY_VALUE_EXP (x));
1090 else if (MEM_P (ENTRY_VALUE_EXP (x))
1091 && REG_P (XEXP (ENTRY_VALUE_EXP (x), 0)))
1092 hash += (unsigned int) MEM
1093 + (unsigned int) GET_MODE (XEXP (ENTRY_VALUE_EXP (x), 0))
1094 + (unsigned int) REGNO (XEXP (ENTRY_VALUE_EXP (x), 0));
1095 else
1096 hash += cselib_hash_rtx (ENTRY_VALUE_EXP (x), create, memmode);
1097 return hash ? hash : (unsigned int) ENTRY_VALUE;
1099 case CONST_INT:
1100 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
1101 return hash ? hash : (unsigned int) CONST_INT;
1103 case CONST_DOUBLE:
1104 /* This is like the general case, except that it only counts
1105 the integers representing the constant. */
1106 hash += (unsigned) code + (unsigned) GET_MODE (x);
1107 if (GET_MODE (x) != VOIDmode)
1108 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
1109 else
1110 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1111 + (unsigned) CONST_DOUBLE_HIGH (x));
1112 return hash ? hash : (unsigned int) CONST_DOUBLE;
1114 case CONST_FIXED:
1115 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1116 hash += fixed_hash (CONST_FIXED_VALUE (x));
1117 return hash ? hash : (unsigned int) CONST_FIXED;
1119 case CONST_VECTOR:
1121 int units;
1122 rtx elt;
1124 units = CONST_VECTOR_NUNITS (x);
1126 for (i = 0; i < units; ++i)
1128 elt = CONST_VECTOR_ELT (x, i);
1129 hash += cselib_hash_rtx (elt, 0, memmode);
1132 return hash;
1135 /* Assume there is only one rtx object for any given label. */
1136 case LABEL_REF:
1137 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1138 differences and differences between each stage's debugging dumps. */
1139 hash += (((unsigned int) LABEL_REF << 7)
1140 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1141 return hash ? hash : (unsigned int) LABEL_REF;
1143 case SYMBOL_REF:
1145 /* Don't hash on the symbol's address to avoid bootstrap differences.
1146 Different hash values may cause expressions to be recorded in
1147 different orders and thus different registers to be used in the
1148 final assembler. This also avoids differences in the dump files
1149 between various stages. */
1150 unsigned int h = 0;
1151 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1153 while (*p)
1154 h += (h << 7) + *p++; /* ??? revisit */
1156 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1157 return hash ? hash : (unsigned int) SYMBOL_REF;
1160 case PRE_DEC:
1161 case PRE_INC:
1162 /* We can't compute these without knowing the MEM mode. */
1163 gcc_assert (memmode != VOIDmode);
1164 i = GET_MODE_SIZE (memmode);
1165 if (code == PRE_DEC)
1166 i = -i;
1167 /* Adjust the hash so that (mem:MEMMODE (pre_* (reg))) hashes
1168 like (mem:MEMMODE (plus (reg) (const_int I))). */
1169 hash += (unsigned) PLUS - (unsigned)code
1170 + cselib_hash_rtx (XEXP (x, 0), create, memmode)
1171 + cselib_hash_rtx (GEN_INT (i), create, memmode);
1172 return hash ? hash : 1 + (unsigned) PLUS;
1174 case PRE_MODIFY:
1175 gcc_assert (memmode != VOIDmode);
1176 return cselib_hash_rtx (XEXP (x, 1), create, memmode);
1178 case POST_DEC:
1179 case POST_INC:
1180 case POST_MODIFY:
1181 gcc_assert (memmode != VOIDmode);
1182 return cselib_hash_rtx (XEXP (x, 0), create, memmode);
1184 case PC:
1185 case CC0:
1186 case CALL:
1187 case UNSPEC_VOLATILE:
1188 return 0;
1190 case ASM_OPERANDS:
1191 if (MEM_VOLATILE_P (x))
1192 return 0;
1194 break;
1196 default:
1197 break;
1200 i = GET_RTX_LENGTH (code) - 1;
1201 fmt = GET_RTX_FORMAT (code);
1202 for (; i >= 0; i--)
1204 switch (fmt[i])
1206 case 'e':
1208 rtx tem = XEXP (x, i);
1209 unsigned int tem_hash = cselib_hash_rtx (tem, create, memmode);
1211 if (tem_hash == 0)
1212 return 0;
1214 hash += tem_hash;
1216 break;
1217 case 'E':
1218 for (j = 0; j < XVECLEN (x, i); j++)
1220 unsigned int tem_hash
1221 = cselib_hash_rtx (XVECEXP (x, i, j), create, memmode);
1223 if (tem_hash == 0)
1224 return 0;
1226 hash += tem_hash;
1228 break;
1230 case 's':
1232 const unsigned char *p = (const unsigned char *) XSTR (x, i);
1234 if (p)
1235 while (*p)
1236 hash += *p++;
1237 break;
1240 case 'i':
1241 hash += XINT (x, i);
1242 break;
1244 case '0':
1245 case 't':
1246 /* unused */
1247 break;
1249 default:
1250 gcc_unreachable ();
1254 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
1257 /* Create a new value structure for VALUE and initialize it. The mode of the
1258 value is MODE. */
1260 static inline cselib_val *
1261 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
1263 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
1265 gcc_assert (hash);
1266 gcc_assert (next_uid);
1268 e->hash = hash;
1269 e->uid = next_uid++;
1270 /* We use an alloc pool to allocate this RTL construct because it
1271 accounts for about 8% of the overall memory usage. We know
1272 precisely when we can have VALUE RTXen (when cselib is active)
1273 so we don't need to put them in garbage collected memory.
1274 ??? Why should a VALUE be an RTX in the first place? */
1275 e->val_rtx = (rtx) pool_alloc (value_pool);
1276 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1277 PUT_CODE (e->val_rtx, VALUE);
1278 PUT_MODE (e->val_rtx, mode);
1279 CSELIB_VAL_PTR (e->val_rtx) = e;
1280 e->addr_list = 0;
1281 e->locs = 0;
1282 e->next_containing_mem = 0;
1284 if (dump_file && (dump_flags & TDF_CSELIB))
1286 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1287 if (flag_dump_noaddr || flag_dump_unnumbered)
1288 fputs ("# ", dump_file);
1289 else
1290 fprintf (dump_file, "%p ", (void*)e);
1291 print_rtl_single (dump_file, x);
1292 fputc ('\n', dump_file);
1295 return e;
1298 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1299 contains the data at this address. X is a MEM that represents the
1300 value. Update the two value structures to represent this situation. */
1302 static void
1303 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1305 struct elt_loc_list *l;
1307 addr_elt = canonical_cselib_val (addr_elt);
1308 mem_elt = canonical_cselib_val (mem_elt);
1310 /* Avoid duplicates. */
1311 for (l = mem_elt->locs; l; l = l->next)
1312 if (MEM_P (l->loc)
1313 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1315 promote_debug_loc (l);
1316 return;
1319 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1320 new_elt_loc_list (mem_elt,
1321 replace_equiv_address_nv (x, addr_elt->val_rtx));
1322 if (mem_elt->next_containing_mem == NULL)
1324 mem_elt->next_containing_mem = first_containing_mem;
1325 first_containing_mem = mem_elt;
1329 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1330 If CREATE, make a new one if we haven't seen it before. */
1332 static cselib_val *
1333 cselib_lookup_mem (rtx x, int create)
1335 enum machine_mode mode = GET_MODE (x);
1336 enum machine_mode addr_mode;
1337 void **slot;
1338 cselib_val *addr;
1339 cselib_val *mem_elt;
1340 struct elt_list *l;
1342 if (MEM_VOLATILE_P (x) || mode == BLKmode
1343 || !cselib_record_memory
1344 || (FLOAT_MODE_P (mode) && flag_float_store))
1345 return 0;
1347 addr_mode = GET_MODE (XEXP (x, 0));
1348 if (addr_mode == VOIDmode)
1349 addr_mode = Pmode;
1351 /* Look up the value for the address. */
1352 addr = cselib_lookup (XEXP (x, 0), addr_mode, create, mode);
1353 if (! addr)
1354 return 0;
1356 addr = canonical_cselib_val (addr);
1357 /* Find a value that describes a value of our mode at that address. */
1358 for (l = addr->addr_list; l; l = l->next)
1359 if (GET_MODE (l->elt->val_rtx) == mode)
1361 promote_debug_loc (l->elt->locs);
1362 return l->elt;
1365 if (! create)
1366 return 0;
1368 mem_elt = new_cselib_val (next_uid, mode, x);
1369 add_mem_for_addr (addr, mem_elt, x);
1370 slot = cselib_find_slot (wrap_constant (mode, x), mem_elt->hash,
1371 INSERT, mode);
1372 *slot = mem_elt;
1373 return mem_elt;
1376 /* Search through the possible substitutions in P. We prefer a non reg
1377 substitution because this allows us to expand the tree further. If
1378 we find, just a reg, take the lowest regno. There may be several
1379 non-reg results, we just take the first one because they will all
1380 expand to the same place. */
1382 static rtx
1383 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1384 int max_depth)
1386 rtx reg_result = NULL;
1387 unsigned int regno = UINT_MAX;
1388 struct elt_loc_list *p_in = p;
1390 for (; p; p = p->next)
1392 /* Return these right away to avoid returning stack pointer based
1393 expressions for frame pointer and vice versa, which is something
1394 that would confuse DSE. See the comment in cselib_expand_value_rtx_1
1395 for more details. */
1396 if (REG_P (p->loc)
1397 && (REGNO (p->loc) == STACK_POINTER_REGNUM
1398 || REGNO (p->loc) == FRAME_POINTER_REGNUM
1399 || REGNO (p->loc) == HARD_FRAME_POINTER_REGNUM
1400 || REGNO (p->loc) == cfa_base_preserved_regno))
1401 return p->loc;
1402 /* Avoid infinite recursion trying to expand a reg into a
1403 the same reg. */
1404 if ((REG_P (p->loc))
1405 && (REGNO (p->loc) < regno)
1406 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1408 reg_result = p->loc;
1409 regno = REGNO (p->loc);
1411 /* Avoid infinite recursion and do not try to expand the
1412 value. */
1413 else if (GET_CODE (p->loc) == VALUE
1414 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1415 continue;
1416 else if (!REG_P (p->loc))
1418 rtx result, note;
1419 if (dump_file && (dump_flags & TDF_CSELIB))
1421 print_inline_rtx (dump_file, p->loc, 0);
1422 fprintf (dump_file, "\n");
1424 if (GET_CODE (p->loc) == LO_SUM
1425 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1426 && p->setting_insn
1427 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1428 && XEXP (note, 0) == XEXP (p->loc, 1))
1429 return XEXP (p->loc, 1);
1430 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1431 if (result)
1432 return result;
1437 if (regno != UINT_MAX)
1439 rtx result;
1440 if (dump_file && (dump_flags & TDF_CSELIB))
1441 fprintf (dump_file, "r%d\n", regno);
1443 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1444 if (result)
1445 return result;
1448 if (dump_file && (dump_flags & TDF_CSELIB))
1450 if (reg_result)
1452 print_inline_rtx (dump_file, reg_result, 0);
1453 fprintf (dump_file, "\n");
1455 else
1456 fprintf (dump_file, "NULL\n");
1458 return reg_result;
1462 /* Forward substitute and expand an expression out to its roots.
1463 This is the opposite of common subexpression. Because local value
1464 numbering is such a weak optimization, the expanded expression is
1465 pretty much unique (not from a pointer equals point of view but
1466 from a tree shape point of view.
1468 This function returns NULL if the expansion fails. The expansion
1469 will fail if there is no value number for one of the operands or if
1470 one of the operands has been overwritten between the current insn
1471 and the beginning of the basic block. For instance x has no
1472 expansion in:
1474 r1 <- r1 + 3
1475 x <- r1 + 8
1477 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1478 It is clear on return. */
1481 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1483 struct expand_value_data evd;
1485 evd.regs_active = regs_active;
1486 evd.callback = NULL;
1487 evd.callback_arg = NULL;
1488 evd.dummy = false;
1490 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1493 /* Same as cselib_expand_value_rtx, but using a callback to try to
1494 resolve some expressions. The CB function should return ORIG if it
1495 can't or does not want to deal with a certain RTX. Any other
1496 return value, including NULL, will be used as the expansion for
1497 VALUE, without any further changes. */
1500 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1501 cselib_expand_callback cb, void *data)
1503 struct expand_value_data evd;
1505 evd.regs_active = regs_active;
1506 evd.callback = cb;
1507 evd.callback_arg = data;
1508 evd.dummy = false;
1510 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1513 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1514 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1515 would return NULL or non-NULL, without allocating new rtx. */
1517 bool
1518 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1519 cselib_expand_callback cb, void *data)
1521 struct expand_value_data evd;
1523 evd.regs_active = regs_active;
1524 evd.callback = cb;
1525 evd.callback_arg = data;
1526 evd.dummy = true;
1528 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1531 /* Internal implementation of cselib_expand_value_rtx and
1532 cselib_expand_value_rtx_cb. */
1534 static rtx
1535 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1536 int max_depth)
1538 rtx copy, scopy;
1539 int i, j;
1540 RTX_CODE code;
1541 const char *format_ptr;
1542 enum machine_mode mode;
1544 code = GET_CODE (orig);
1546 /* For the context of dse, if we end up expand into a huge tree, we
1547 will not have a useful address, so we might as well just give up
1548 quickly. */
1549 if (max_depth <= 0)
1550 return NULL;
1552 switch (code)
1554 case REG:
1556 struct elt_list *l = REG_VALUES (REGNO (orig));
1558 if (l && l->elt == NULL)
1559 l = l->next;
1560 for (; l; l = l->next)
1561 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1563 rtx result;
1564 unsigned regno = REGNO (orig);
1566 /* The only thing that we are not willing to do (this
1567 is requirement of dse and if others potential uses
1568 need this function we should add a parm to control
1569 it) is that we will not substitute the
1570 STACK_POINTER_REGNUM, FRAME_POINTER or the
1571 HARD_FRAME_POINTER.
1573 These expansions confuses the code that notices that
1574 stores into the frame go dead at the end of the
1575 function and that the frame is not effected by calls
1576 to subroutines. If you allow the
1577 STACK_POINTER_REGNUM substitution, then dse will
1578 think that parameter pushing also goes dead which is
1579 wrong. If you allow the FRAME_POINTER or the
1580 HARD_FRAME_POINTER then you lose the opportunity to
1581 make the frame assumptions. */
1582 if (regno == STACK_POINTER_REGNUM
1583 || regno == FRAME_POINTER_REGNUM
1584 || regno == HARD_FRAME_POINTER_REGNUM
1585 || regno == cfa_base_preserved_regno)
1586 return orig;
1588 bitmap_set_bit (evd->regs_active, regno);
1590 if (dump_file && (dump_flags & TDF_CSELIB))
1591 fprintf (dump_file, "expanding: r%d into: ", regno);
1593 result = expand_loc (l->elt->locs, evd, max_depth);
1594 bitmap_clear_bit (evd->regs_active, regno);
1596 if (result)
1597 return result;
1598 else
1599 return orig;
1603 case CONST_INT:
1604 case CONST_DOUBLE:
1605 case CONST_VECTOR:
1606 case SYMBOL_REF:
1607 case CODE_LABEL:
1608 case PC:
1609 case CC0:
1610 case SCRATCH:
1611 /* SCRATCH must be shared because they represent distinct values. */
1612 return orig;
1613 case CLOBBER:
1614 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1615 return orig;
1616 break;
1618 case CONST:
1619 if (shared_const_p (orig))
1620 return orig;
1621 break;
1623 case SUBREG:
1625 rtx subreg;
1627 if (evd->callback)
1629 subreg = evd->callback (orig, evd->regs_active, max_depth,
1630 evd->callback_arg);
1631 if (subreg != orig)
1632 return subreg;
1635 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1636 max_depth - 1);
1637 if (!subreg)
1638 return NULL;
1639 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1640 GET_MODE (SUBREG_REG (orig)),
1641 SUBREG_BYTE (orig));
1642 if (scopy == NULL
1643 || (GET_CODE (scopy) == SUBREG
1644 && !REG_P (SUBREG_REG (scopy))
1645 && !MEM_P (SUBREG_REG (scopy))))
1646 return NULL;
1648 return scopy;
1651 case VALUE:
1653 rtx result;
1655 if (dump_file && (dump_flags & TDF_CSELIB))
1657 fputs ("\nexpanding ", dump_file);
1658 print_rtl_single (dump_file, orig);
1659 fputs (" into...", dump_file);
1662 if (evd->callback)
1664 result = evd->callback (orig, evd->regs_active, max_depth,
1665 evd->callback_arg);
1667 if (result != orig)
1668 return result;
1671 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1672 return result;
1675 case DEBUG_EXPR:
1676 if (evd->callback)
1677 return evd->callback (orig, evd->regs_active, max_depth,
1678 evd->callback_arg);
1679 return orig;
1681 default:
1682 break;
1685 /* Copy the various flags, fields, and other information. We assume
1686 that all fields need copying, and then clear the fields that should
1687 not be copied. That is the sensible default behavior, and forces
1688 us to explicitly document why we are *not* copying a flag. */
1689 if (evd->dummy)
1690 copy = NULL;
1691 else
1692 copy = shallow_copy_rtx (orig);
1694 format_ptr = GET_RTX_FORMAT (code);
1696 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1697 switch (*format_ptr++)
1699 case 'e':
1700 if (XEXP (orig, i) != NULL)
1702 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1703 max_depth - 1);
1704 if (!result)
1705 return NULL;
1706 if (copy)
1707 XEXP (copy, i) = result;
1709 break;
1711 case 'E':
1712 case 'V':
1713 if (XVEC (orig, i) != NULL)
1715 if (copy)
1716 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1717 for (j = 0; j < XVECLEN (orig, i); j++)
1719 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1720 evd, max_depth - 1);
1721 if (!result)
1722 return NULL;
1723 if (copy)
1724 XVECEXP (copy, i, j) = result;
1727 break;
1729 case 't':
1730 case 'w':
1731 case 'i':
1732 case 's':
1733 case 'S':
1734 case 'T':
1735 case 'u':
1736 case 'B':
1737 case '0':
1738 /* These are left unchanged. */
1739 break;
1741 default:
1742 gcc_unreachable ();
1745 if (evd->dummy)
1746 return orig;
1748 mode = GET_MODE (copy);
1749 /* If an operand has been simplified into CONST_INT, which doesn't
1750 have a mode and the mode isn't derivable from whole rtx's mode,
1751 try simplify_*_operation first with mode from original's operand
1752 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1753 scopy = copy;
1754 switch (GET_RTX_CLASS (code))
1756 case RTX_UNARY:
1757 if (CONST_INT_P (XEXP (copy, 0))
1758 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1760 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1761 GET_MODE (XEXP (orig, 0)));
1762 if (scopy)
1763 return scopy;
1765 break;
1766 case RTX_COMM_ARITH:
1767 case RTX_BIN_ARITH:
1768 /* These expressions can derive operand modes from the whole rtx's mode. */
1769 break;
1770 case RTX_TERNARY:
1771 case RTX_BITFIELD_OPS:
1772 if (CONST_INT_P (XEXP (copy, 0))
1773 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1775 scopy = simplify_ternary_operation (code, mode,
1776 GET_MODE (XEXP (orig, 0)),
1777 XEXP (copy, 0), XEXP (copy, 1),
1778 XEXP (copy, 2));
1779 if (scopy)
1780 return scopy;
1782 break;
1783 case RTX_COMPARE:
1784 case RTX_COMM_COMPARE:
1785 if (CONST_INT_P (XEXP (copy, 0))
1786 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1787 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1788 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1790 scopy = simplify_relational_operation (code, mode,
1791 (GET_MODE (XEXP (orig, 0))
1792 != VOIDmode)
1793 ? GET_MODE (XEXP (orig, 0))
1794 : GET_MODE (XEXP (orig, 1)),
1795 XEXP (copy, 0),
1796 XEXP (copy, 1));
1797 if (scopy)
1798 return scopy;
1800 break;
1801 default:
1802 break;
1804 scopy = simplify_rtx (copy);
1805 if (scopy)
1806 return scopy;
1807 return copy;
1810 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1811 with VALUE expressions. This way, it becomes independent of changes
1812 to registers and memory.
1813 X isn't actually modified; if modifications are needed, new rtl is
1814 allocated. However, the return value can share rtl with X.
1815 If X is within a MEM, MEMMODE must be the mode of the MEM. */
1818 cselib_subst_to_values (rtx x, enum machine_mode memmode)
1820 enum rtx_code code = GET_CODE (x);
1821 const char *fmt = GET_RTX_FORMAT (code);
1822 cselib_val *e;
1823 struct elt_list *l;
1824 rtx copy = x;
1825 int i;
1827 switch (code)
1829 case REG:
1830 l = REG_VALUES (REGNO (x));
1831 if (l && l->elt == NULL)
1832 l = l->next;
1833 for (; l; l = l->next)
1834 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1835 return l->elt->val_rtx;
1837 gcc_unreachable ();
1839 case MEM:
1840 e = cselib_lookup_mem (x, 0);
1841 /* This used to happen for autoincrements, but we deal with them
1842 properly now. Remove the if stmt for the next release. */
1843 if (! e)
1845 /* Assign a value that doesn't match any other. */
1846 e = new_cselib_val (next_uid, GET_MODE (x), x);
1848 return e->val_rtx;
1850 case ENTRY_VALUE:
1851 e = cselib_lookup (x, GET_MODE (x), 0, memmode);
1852 if (! e)
1853 break;
1854 return e->val_rtx;
1856 case CONST_DOUBLE:
1857 case CONST_VECTOR:
1858 case CONST_INT:
1859 case CONST_FIXED:
1860 return x;
1862 case PRE_DEC:
1863 case PRE_INC:
1864 gcc_assert (memmode != VOIDmode);
1865 i = GET_MODE_SIZE (memmode);
1866 if (code == PRE_DEC)
1867 i = -i;
1868 return cselib_subst_to_values (plus_constant (GET_MODE (x),
1869 XEXP (x, 0), i),
1870 memmode);
1872 case PRE_MODIFY:
1873 gcc_assert (memmode != VOIDmode);
1874 return cselib_subst_to_values (XEXP (x, 1), memmode);
1876 case POST_DEC:
1877 case POST_INC:
1878 case POST_MODIFY:
1879 gcc_assert (memmode != VOIDmode);
1880 return cselib_subst_to_values (XEXP (x, 0), memmode);
1882 default:
1883 break;
1886 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1888 if (fmt[i] == 'e')
1890 rtx t = cselib_subst_to_values (XEXP (x, i), memmode);
1892 if (t != XEXP (x, i))
1894 if (x == copy)
1895 copy = shallow_copy_rtx (x);
1896 XEXP (copy, i) = t;
1899 else if (fmt[i] == 'E')
1901 int j;
1903 for (j = 0; j < XVECLEN (x, i); j++)
1905 rtx t = cselib_subst_to_values (XVECEXP (x, i, j), memmode);
1907 if (t != XVECEXP (x, i, j))
1909 if (XVEC (x, i) == XVEC (copy, i))
1911 if (x == copy)
1912 copy = shallow_copy_rtx (x);
1913 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1915 XVECEXP (copy, i, j) = t;
1921 return copy;
1924 /* Wrapper for cselib_subst_to_values, that indicates X is in INSN. */
1927 cselib_subst_to_values_from_insn (rtx x, enum machine_mode memmode, rtx insn)
1929 rtx ret;
1930 gcc_assert (!cselib_current_insn);
1931 cselib_current_insn = insn;
1932 ret = cselib_subst_to_values (x, memmode);
1933 cselib_current_insn = NULL;
1934 return ret;
1937 /* Look up the rtl expression X in our tables and return the value it
1938 has. If CREATE is zero, we return NULL if we don't know the value.
1939 Otherwise, we create a new one if possible, using mode MODE if X
1940 doesn't have a mode (i.e. because it's a constant). When X is part
1941 of an address, MEMMODE should be the mode of the enclosing MEM if
1942 we're tracking autoinc expressions. */
1944 static cselib_val *
1945 cselib_lookup_1 (rtx x, enum machine_mode mode,
1946 int create, enum machine_mode memmode)
1948 void **slot;
1949 cselib_val *e;
1950 unsigned int hashval;
1952 if (GET_MODE (x) != VOIDmode)
1953 mode = GET_MODE (x);
1955 if (GET_CODE (x) == VALUE)
1956 return CSELIB_VAL_PTR (x);
1958 if (REG_P (x))
1960 struct elt_list *l;
1961 unsigned int i = REGNO (x);
1963 l = REG_VALUES (i);
1964 if (l && l->elt == NULL)
1965 l = l->next;
1966 for (; l; l = l->next)
1967 if (mode == GET_MODE (l->elt->val_rtx))
1969 promote_debug_loc (l->elt->locs);
1970 return l->elt;
1973 if (! create)
1974 return 0;
1976 if (i < FIRST_PSEUDO_REGISTER)
1978 unsigned int n = hard_regno_nregs[i][mode];
1980 if (n > max_value_regs)
1981 max_value_regs = n;
1984 e = new_cselib_val (next_uid, GET_MODE (x), x);
1985 new_elt_loc_list (e, x);
1986 if (REG_VALUES (i) == 0)
1988 /* Maintain the invariant that the first entry of
1989 REG_VALUES, if present, must be the value used to set the
1990 register, or NULL. */
1991 used_regs[n_used_regs++] = i;
1992 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1994 else if (cselib_preserve_constants
1995 && GET_MODE_CLASS (mode) == MODE_INT)
1997 /* During var-tracking, try harder to find equivalences
1998 for SUBREGs. If a setter sets say a DImode register
1999 and user uses that register only in SImode, add a lowpart
2000 subreg location. */
2001 struct elt_list *lwider = NULL;
2002 l = REG_VALUES (i);
2003 if (l && l->elt == NULL)
2004 l = l->next;
2005 for (; l; l = l->next)
2006 if (GET_MODE_CLASS (GET_MODE (l->elt->val_rtx)) == MODE_INT
2007 && GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2008 > GET_MODE_SIZE (mode)
2009 && (lwider == NULL
2010 || GET_MODE_SIZE (GET_MODE (l->elt->val_rtx))
2011 < GET_MODE_SIZE (GET_MODE (lwider->elt->val_rtx))))
2013 struct elt_loc_list *el;
2014 if (i < FIRST_PSEUDO_REGISTER
2015 && hard_regno_nregs[i][GET_MODE (l->elt->val_rtx)] != 1)
2016 continue;
2017 for (el = l->elt->locs; el; el = el->next)
2018 if (!REG_P (el->loc))
2019 break;
2020 if (el)
2021 lwider = l;
2023 if (lwider)
2025 rtx sub = lowpart_subreg (mode, lwider->elt->val_rtx,
2026 GET_MODE (lwider->elt->val_rtx));
2027 if (sub)
2028 new_elt_loc_list (e, sub);
2031 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
2032 slot = cselib_find_slot (x, e->hash, INSERT, memmode);
2033 *slot = e;
2034 return e;
2037 if (MEM_P (x))
2038 return cselib_lookup_mem (x, create);
2040 hashval = cselib_hash_rtx (x, create, memmode);
2041 /* Can't even create if hashing is not possible. */
2042 if (! hashval)
2043 return 0;
2045 slot = cselib_find_slot (wrap_constant (mode, x), hashval,
2046 create ? INSERT : NO_INSERT, memmode);
2047 if (slot == 0)
2048 return 0;
2050 e = (cselib_val *) *slot;
2051 if (e)
2052 return e;
2054 e = new_cselib_val (hashval, mode, x);
2056 /* We have to fill the slot before calling cselib_subst_to_values:
2057 the hash table is inconsistent until we do so, and
2058 cselib_subst_to_values will need to do lookups. */
2059 *slot = (void *) e;
2060 new_elt_loc_list (e, cselib_subst_to_values (x, memmode));
2061 return e;
2064 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
2066 cselib_val *
2067 cselib_lookup_from_insn (rtx x, enum machine_mode mode,
2068 int create, enum machine_mode memmode, rtx insn)
2070 cselib_val *ret;
2072 gcc_assert (!cselib_current_insn);
2073 cselib_current_insn = insn;
2075 ret = cselib_lookup (x, mode, create, memmode);
2077 cselib_current_insn = NULL;
2079 return ret;
2082 /* Wrapper for cselib_lookup_1, that logs the lookup result and
2083 maintains invariants related with debug insns. */
2085 cselib_val *
2086 cselib_lookup (rtx x, enum machine_mode mode,
2087 int create, enum machine_mode memmode)
2089 cselib_val *ret = cselib_lookup_1 (x, mode, create, memmode);
2091 /* ??? Should we return NULL if we're not to create an entry, the
2092 found loc is a debug loc and cselib_current_insn is not DEBUG?
2093 If so, we should also avoid converting val to non-DEBUG; probably
2094 easiest setting cselib_current_insn to NULL before the call
2095 above. */
2097 if (dump_file && (dump_flags & TDF_CSELIB))
2099 fputs ("cselib lookup ", dump_file);
2100 print_inline_rtx (dump_file, x, 2);
2101 fprintf (dump_file, " => %u:%u\n",
2102 ret ? ret->uid : 0,
2103 ret ? ret->hash : 0);
2106 return ret;
2109 /* Invalidate any entries in reg_values that overlap REGNO. This is called
2110 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
2111 is used to determine how many hard registers are being changed. If MODE
2112 is VOIDmode, then only REGNO is being changed; this is used when
2113 invalidating call clobbered registers across a call. */
2115 static void
2116 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
2118 unsigned int endregno;
2119 unsigned int i;
2121 /* If we see pseudos after reload, something is _wrong_. */
2122 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
2123 || reg_renumber[regno] < 0);
2125 /* Determine the range of registers that must be invalidated. For
2126 pseudos, only REGNO is affected. For hard regs, we must take MODE
2127 into account, and we must also invalidate lower register numbers
2128 if they contain values that overlap REGNO. */
2129 if (regno < FIRST_PSEUDO_REGISTER)
2131 gcc_assert (mode != VOIDmode);
2133 if (regno < max_value_regs)
2134 i = 0;
2135 else
2136 i = regno - max_value_regs;
2138 endregno = end_hard_regno (mode, regno);
2140 else
2142 i = regno;
2143 endregno = regno + 1;
2146 for (; i < endregno; i++)
2148 struct elt_list **l = &REG_VALUES (i);
2150 /* Go through all known values for this reg; if it overlaps the range
2151 we're invalidating, remove the value. */
2152 while (*l)
2154 cselib_val *v = (*l)->elt;
2155 bool had_locs;
2156 rtx setting_insn;
2157 struct elt_loc_list **p;
2158 unsigned int this_last = i;
2160 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
2161 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
2163 if (this_last < regno || v == NULL
2164 || (v == cfa_base_preserved_val
2165 && i == cfa_base_preserved_regno))
2167 l = &(*l)->next;
2168 continue;
2171 /* We have an overlap. */
2172 if (*l == REG_VALUES (i))
2174 /* Maintain the invariant that the first entry of
2175 REG_VALUES, if present, must be the value used to set
2176 the register, or NULL. This is also nice because
2177 then we won't push the same regno onto user_regs
2178 multiple times. */
2179 (*l)->elt = NULL;
2180 l = &(*l)->next;
2182 else
2183 unchain_one_elt_list (l);
2185 v = canonical_cselib_val (v);
2187 had_locs = v->locs != NULL;
2188 setting_insn = v->locs ? v->locs->setting_insn : NULL;
2190 /* Now, we clear the mapping from value to reg. It must exist, so
2191 this code will crash intentionally if it doesn't. */
2192 for (p = &v->locs; ; p = &(*p)->next)
2194 rtx x = (*p)->loc;
2196 if (REG_P (x) && REGNO (x) == i)
2198 unchain_one_elt_loc_list (p);
2199 break;
2203 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2205 if (setting_insn && DEBUG_INSN_P (setting_insn))
2206 n_useless_debug_values++;
2207 else
2208 n_useless_values++;
2214 /* Invalidate any locations in the table which are changed because of a
2215 store to MEM_RTX. If this is called because of a non-const call
2216 instruction, MEM_RTX is (mem:BLK const0_rtx). */
2218 static void
2219 cselib_invalidate_mem (rtx mem_rtx)
2221 cselib_val **vp, *v, *next;
2222 int num_mems = 0;
2223 rtx mem_addr;
2225 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
2226 mem_rtx = canon_rtx (mem_rtx);
2228 vp = &first_containing_mem;
2229 for (v = *vp; v != &dummy_val; v = next)
2231 bool has_mem = false;
2232 struct elt_loc_list **p = &v->locs;
2233 bool had_locs = v->locs != NULL;
2234 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
2236 while (*p)
2238 rtx x = (*p)->loc;
2239 cselib_val *addr;
2240 struct elt_list **mem_chain;
2242 /* MEMs may occur in locations only at the top level; below
2243 that every MEM or REG is substituted by its VALUE. */
2244 if (!MEM_P (x))
2246 p = &(*p)->next;
2247 continue;
2249 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
2250 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx),
2251 mem_addr, x, NULL_RTX))
2253 has_mem = true;
2254 num_mems++;
2255 p = &(*p)->next;
2256 continue;
2259 /* This one overlaps. */
2260 /* We must have a mapping from this MEM's address to the
2261 value (E). Remove that, too. */
2262 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0, GET_MODE (x));
2263 addr = canonical_cselib_val (addr);
2264 gcc_checking_assert (v == canonical_cselib_val (v));
2265 mem_chain = &addr->addr_list;
2266 for (;;)
2268 cselib_val *canon = canonical_cselib_val ((*mem_chain)->elt);
2270 if (canon == v)
2272 unchain_one_elt_list (mem_chain);
2273 break;
2276 /* Record canonicalized elt. */
2277 (*mem_chain)->elt = canon;
2279 mem_chain = &(*mem_chain)->next;
2282 unchain_one_elt_loc_list (p);
2285 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
2287 if (setting_insn && DEBUG_INSN_P (setting_insn))
2288 n_useless_debug_values++;
2289 else
2290 n_useless_values++;
2293 next = v->next_containing_mem;
2294 if (has_mem)
2296 *vp = v;
2297 vp = &(*vp)->next_containing_mem;
2299 else
2300 v->next_containing_mem = NULL;
2302 *vp = &dummy_val;
2305 /* Invalidate DEST, which is being assigned to or clobbered. */
2307 void
2308 cselib_invalidate_rtx (rtx dest)
2310 while (GET_CODE (dest) == SUBREG
2311 || GET_CODE (dest) == ZERO_EXTRACT
2312 || GET_CODE (dest) == STRICT_LOW_PART)
2313 dest = XEXP (dest, 0);
2315 if (REG_P (dest))
2316 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
2317 else if (MEM_P (dest))
2318 cselib_invalidate_mem (dest);
2321 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
2323 static void
2324 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
2325 void *data ATTRIBUTE_UNUSED)
2327 cselib_invalidate_rtx (dest);
2330 /* Record the result of a SET instruction. DEST is being set; the source
2331 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
2332 describes its address. */
2334 static void
2335 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
2337 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
2339 if (src_elt == 0 || side_effects_p (dest))
2340 return;
2342 if (dreg >= 0)
2344 if (dreg < FIRST_PSEUDO_REGISTER)
2346 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
2348 if (n > max_value_regs)
2349 max_value_regs = n;
2352 if (REG_VALUES (dreg) == 0)
2354 used_regs[n_used_regs++] = dreg;
2355 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2357 else
2359 /* The register should have been invalidated. */
2360 gcc_assert (REG_VALUES (dreg)->elt == 0);
2361 REG_VALUES (dreg)->elt = src_elt;
2364 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2365 n_useless_values--;
2366 new_elt_loc_list (src_elt, dest);
2368 else if (MEM_P (dest) && dest_addr_elt != 0
2369 && cselib_record_memory)
2371 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2372 n_useless_values--;
2373 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2377 /* Make ELT and X's VALUE equivalent to each other at INSN. */
2379 void
2380 cselib_add_permanent_equiv (cselib_val *elt, rtx x, rtx insn)
2382 cselib_val *nelt;
2383 rtx save_cselib_current_insn = cselib_current_insn;
2385 gcc_checking_assert (elt);
2386 gcc_checking_assert (PRESERVED_VALUE_P (elt->val_rtx));
2387 gcc_checking_assert (!side_effects_p (x));
2389 cselib_current_insn = insn;
2391 nelt = cselib_lookup (x, GET_MODE (elt->val_rtx), 1, VOIDmode);
2393 if (nelt != elt)
2395 cselib_any_perm_equivs = true;
2397 if (!PRESERVED_VALUE_P (nelt->val_rtx))
2398 cselib_preserve_value (nelt);
2400 new_elt_loc_list (nelt, elt->val_rtx);
2403 cselib_current_insn = save_cselib_current_insn;
2406 /* Return TRUE if any permanent equivalences have been recorded since
2407 the table was last initialized. */
2408 bool
2409 cselib_have_permanent_equivalences (void)
2411 return cselib_any_perm_equivs;
2414 /* There is no good way to determine how many elements there can be
2415 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2416 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2418 struct cselib_record_autoinc_data
2420 struct cselib_set *sets;
2421 int n_sets;
2424 /* Callback for for_each_inc_dec. Records in ARG the SETs implied by
2425 autoinc RTXs: SRC plus SRCOFF if non-NULL is stored in DEST. */
2427 static int
2428 cselib_record_autoinc_cb (rtx mem ATTRIBUTE_UNUSED, rtx op ATTRIBUTE_UNUSED,
2429 rtx dest, rtx src, rtx srcoff, void *arg)
2431 struct cselib_record_autoinc_data *data;
2432 data = (struct cselib_record_autoinc_data *)arg;
2434 data->sets[data->n_sets].dest = dest;
2436 if (srcoff)
2437 data->sets[data->n_sets].src = gen_rtx_PLUS (GET_MODE (src), src, srcoff);
2438 else
2439 data->sets[data->n_sets].src = src;
2441 data->n_sets++;
2443 return -1;
2446 /* Record the effects of any sets and autoincs in INSN. */
2447 static void
2448 cselib_record_sets (rtx insn)
2450 int n_sets = 0;
2451 int i;
2452 struct cselib_set sets[MAX_SETS];
2453 rtx body = PATTERN (insn);
2454 rtx cond = 0;
2455 int n_sets_before_autoinc;
2456 struct cselib_record_autoinc_data data;
2458 body = PATTERN (insn);
2459 if (GET_CODE (body) == COND_EXEC)
2461 cond = COND_EXEC_TEST (body);
2462 body = COND_EXEC_CODE (body);
2465 /* Find all sets. */
2466 if (GET_CODE (body) == SET)
2468 sets[0].src = SET_SRC (body);
2469 sets[0].dest = SET_DEST (body);
2470 n_sets = 1;
2472 else if (GET_CODE (body) == PARALLEL)
2474 /* Look through the PARALLEL and record the values being
2475 set, if possible. Also handle any CLOBBERs. */
2476 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2478 rtx x = XVECEXP (body, 0, i);
2480 if (GET_CODE (x) == SET)
2482 sets[n_sets].src = SET_SRC (x);
2483 sets[n_sets].dest = SET_DEST (x);
2484 n_sets++;
2489 if (n_sets == 1
2490 && MEM_P (sets[0].src)
2491 && !cselib_record_memory
2492 && MEM_READONLY_P (sets[0].src))
2494 rtx note = find_reg_equal_equiv_note (insn);
2496 if (note && CONSTANT_P (XEXP (note, 0)))
2497 sets[0].src = XEXP (note, 0);
2500 data.sets = sets;
2501 data.n_sets = n_sets_before_autoinc = n_sets;
2502 for_each_inc_dec (&insn, cselib_record_autoinc_cb, &data);
2503 n_sets = data.n_sets;
2505 /* Look up the values that are read. Do this before invalidating the
2506 locations that are written. */
2507 for (i = 0; i < n_sets; i++)
2509 rtx dest = sets[i].dest;
2511 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2512 the low part after invalidating any knowledge about larger modes. */
2513 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2514 sets[i].dest = dest = XEXP (dest, 0);
2516 /* We don't know how to record anything but REG or MEM. */
2517 if (REG_P (dest)
2518 || (MEM_P (dest) && cselib_record_memory))
2520 rtx src = sets[i].src;
2521 if (cond)
2522 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2523 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1, VOIDmode);
2524 if (MEM_P (dest))
2526 enum machine_mode address_mode = get_address_mode (dest);
2528 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2529 address_mode, 1,
2530 GET_MODE (dest));
2532 else
2533 sets[i].dest_addr_elt = 0;
2537 if (cselib_record_sets_hook)
2538 cselib_record_sets_hook (insn, sets, n_sets);
2540 /* Invalidate all locations written by this insn. Note that the elts we
2541 looked up in the previous loop aren't affected, just some of their
2542 locations may go away. */
2543 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2545 for (i = n_sets_before_autoinc; i < n_sets; i++)
2546 cselib_invalidate_rtx (sets[i].dest);
2548 /* If this is an asm, look for duplicate sets. This can happen when the
2549 user uses the same value as an output multiple times. This is valid
2550 if the outputs are not actually used thereafter. Treat this case as
2551 if the value isn't actually set. We do this by smashing the destination
2552 to pc_rtx, so that we won't record the value later. */
2553 if (n_sets >= 2 && asm_noperands (body) >= 0)
2555 for (i = 0; i < n_sets; i++)
2557 rtx dest = sets[i].dest;
2558 if (REG_P (dest) || MEM_P (dest))
2560 int j;
2561 for (j = i + 1; j < n_sets; j++)
2562 if (rtx_equal_p (dest, sets[j].dest))
2564 sets[i].dest = pc_rtx;
2565 sets[j].dest = pc_rtx;
2571 /* Now enter the equivalences in our tables. */
2572 for (i = 0; i < n_sets; i++)
2574 rtx dest = sets[i].dest;
2575 if (REG_P (dest)
2576 || (MEM_P (dest) && cselib_record_memory))
2577 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2581 /* Record the effects of INSN. */
2583 void
2584 cselib_process_insn (rtx insn)
2586 int i;
2587 rtx x;
2589 cselib_current_insn = insn;
2591 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
2592 if (LABEL_P (insn)
2593 || (CALL_P (insn)
2594 && find_reg_note (insn, REG_SETJMP, NULL))
2595 || (NONJUMP_INSN_P (insn)
2596 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2597 && MEM_VOLATILE_P (PATTERN (insn))))
2599 cselib_reset_table (next_uid);
2600 cselib_current_insn = NULL_RTX;
2601 return;
2604 if (! INSN_P (insn))
2606 cselib_current_insn = NULL_RTX;
2607 return;
2610 /* If this is a call instruction, forget anything stored in a
2611 call clobbered register, or, if this is not a const call, in
2612 memory. */
2613 if (CALL_P (insn))
2615 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2616 if (call_used_regs[i]
2617 || (REG_VALUES (i) && REG_VALUES (i)->elt
2618 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2619 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2620 cselib_invalidate_regno (i, reg_raw_mode[i]);
2622 /* Since it is not clear how cselib is going to be used, be
2623 conservative here and treat looping pure or const functions
2624 as if they were regular functions. */
2625 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2626 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2627 cselib_invalidate_mem (callmem);
2630 cselib_record_sets (insn);
2632 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2633 after we have processed the insn. */
2634 if (CALL_P (insn))
2635 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2636 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2637 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2639 cselib_current_insn = NULL_RTX;
2641 if (n_useless_values > MAX_USELESS_VALUES
2642 /* remove_useless_values is linear in the hash table size. Avoid
2643 quadratic behavior for very large hashtables with very few
2644 useless elements. */
2645 && ((unsigned int)n_useless_values
2646 > (cselib_hash_table->n_elements
2647 - cselib_hash_table->n_deleted
2648 - n_debug_values) / 4))
2649 remove_useless_values ();
2652 /* Initialize cselib for one pass. The caller must also call
2653 init_alias_analysis. */
2655 void
2656 cselib_init (int record_what)
2658 elt_list_pool = create_alloc_pool ("elt_list",
2659 sizeof (struct elt_list), 10);
2660 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2661 sizeof (struct elt_loc_list), 10);
2662 cselib_val_pool = create_alloc_pool ("cselib_val_list",
2663 sizeof (cselib_val), 10);
2664 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2665 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2666 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2667 cselib_any_perm_equivs = false;
2669 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2670 see canon_true_dependence. This is only created once. */
2671 if (! callmem)
2672 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2674 cselib_nregs = max_reg_num ();
2676 /* We preserve reg_values to allow expensive clearing of the whole thing.
2677 Reallocate it however if it happens to be too large. */
2678 if (!reg_values || reg_values_size < cselib_nregs
2679 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2681 free (reg_values);
2682 /* Some space for newly emit instructions so we don't end up
2683 reallocating in between passes. */
2684 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2685 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2687 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2688 n_used_regs = 0;
2689 cselib_hash_table = htab_create (31, get_value_hash,
2690 entry_and_rtx_equal_p, NULL);
2691 next_uid = 1;
2694 /* Called when the current user is done with cselib. */
2696 void
2697 cselib_finish (void)
2699 cselib_discard_hook = NULL;
2700 cselib_preserve_constants = false;
2701 cselib_any_perm_equivs = false;
2702 cfa_base_preserved_val = NULL;
2703 cfa_base_preserved_regno = INVALID_REGNUM;
2704 free_alloc_pool (elt_list_pool);
2705 free_alloc_pool (elt_loc_list_pool);
2706 free_alloc_pool (cselib_val_pool);
2707 free_alloc_pool (value_pool);
2708 cselib_clear_table ();
2709 htab_delete (cselib_hash_table);
2710 free (used_regs);
2711 used_regs = 0;
2712 cselib_hash_table = 0;
2713 n_useless_values = 0;
2714 n_useless_debug_values = 0;
2715 n_debug_values = 0;
2716 next_uid = 0;
2719 /* Dump the cselib_val *X to FILE *info. */
2721 static int
2722 dump_cselib_val (void **x, void *info)
2724 cselib_val *v = (cselib_val *)*x;
2725 FILE *out = (FILE *)info;
2726 bool need_lf = true;
2728 print_inline_rtx (out, v->val_rtx, 0);
2730 if (v->locs)
2732 struct elt_loc_list *l = v->locs;
2733 if (need_lf)
2735 fputc ('\n', out);
2736 need_lf = false;
2738 fputs (" locs:", out);
2741 if (l->setting_insn)
2742 fprintf (out, "\n from insn %i ",
2743 INSN_UID (l->setting_insn));
2744 else
2745 fprintf (out, "\n ");
2746 print_inline_rtx (out, l->loc, 4);
2748 while ((l = l->next));
2749 fputc ('\n', out);
2751 else
2753 fputs (" no locs", out);
2754 need_lf = true;
2757 if (v->addr_list)
2759 struct elt_list *e = v->addr_list;
2760 if (need_lf)
2762 fputc ('\n', out);
2763 need_lf = false;
2765 fputs (" addr list:", out);
2768 fputs ("\n ", out);
2769 print_inline_rtx (out, e->elt->val_rtx, 2);
2771 while ((e = e->next));
2772 fputc ('\n', out);
2774 else
2776 fputs (" no addrs", out);
2777 need_lf = true;
2780 if (v->next_containing_mem == &dummy_val)
2781 fputs (" last mem\n", out);
2782 else if (v->next_containing_mem)
2784 fputs (" next mem ", out);
2785 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2786 fputc ('\n', out);
2788 else if (need_lf)
2789 fputc ('\n', out);
2791 return 1;
2794 /* Dump to OUT everything in the CSELIB table. */
2796 void
2797 dump_cselib_table (FILE *out)
2799 fprintf (out, "cselib hash table:\n");
2800 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2801 if (first_containing_mem != &dummy_val)
2803 fputs ("first mem ", out);
2804 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2805 fputc ('\n', out);
2807 fprintf (out, "next uid %i\n", next_uid);
2810 #include "gt-cselib.h"