2011-02-01 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / cselib.c
blob8f5e45b08d2648efb5e9d16f65511e1662f31ac5
1 /* Common subexpression elimination library for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
27 #include "rtl.h"
28 #include "tm_p.h"
29 #include "regs.h"
30 #include "hard-reg-set.h"
31 #include "flags.h"
32 #include "insn-config.h"
33 #include "recog.h"
34 #include "function.h"
35 #include "emit-rtl.h"
36 #include "diagnostic-core.h"
37 #include "output.h"
38 #include "ggc.h"
39 #include "hashtab.h"
40 #include "tree-pass.h"
41 #include "cselib.h"
42 #include "params.h"
43 #include "alloc-pool.h"
44 #include "target.h"
45 #include "bitmap.h"
47 /* A list of cselib_val structures. */
48 struct elt_list {
49 struct elt_list *next;
50 cselib_val *elt;
53 static bool cselib_record_memory;
54 static bool cselib_preserve_constants;
55 static int entry_and_rtx_equal_p (const void *, const void *);
56 static hashval_t get_value_hash (const void *);
57 static struct elt_list *new_elt_list (struct elt_list *, cselib_val *);
58 static struct elt_loc_list *new_elt_loc_list (struct elt_loc_list *, rtx);
59 static void unchain_one_value (cselib_val *);
60 static void unchain_one_elt_list (struct elt_list **);
61 static void unchain_one_elt_loc_list (struct elt_loc_list **);
62 static int discard_useless_locs (void **, void *);
63 static int discard_useless_values (void **, void *);
64 static void remove_useless_values (void);
65 static unsigned int cselib_hash_rtx (rtx, int);
66 static cselib_val *new_cselib_val (unsigned int, enum machine_mode, rtx);
67 static void add_mem_for_addr (cselib_val *, cselib_val *, rtx);
68 static cselib_val *cselib_lookup_mem (rtx, int);
69 static void cselib_invalidate_regno (unsigned int, enum machine_mode);
70 static void cselib_invalidate_mem (rtx);
71 static void cselib_record_set (rtx, cselib_val *, cselib_val *);
72 static void cselib_record_sets (rtx);
74 struct expand_value_data
76 bitmap regs_active;
77 cselib_expand_callback callback;
78 void *callback_arg;
79 bool dummy;
82 static rtx cselib_expand_value_rtx_1 (rtx, struct expand_value_data *, int);
84 /* There are three ways in which cselib can look up an rtx:
85 - for a REG, the reg_values table (which is indexed by regno) is used
86 - for a MEM, we recursively look up its address and then follow the
87 addr_list of that value
88 - for everything else, we compute a hash value and go through the hash
89 table. Since different rtx's can still have the same hash value,
90 this involves walking the table entries for a given value and comparing
91 the locations of the entries with the rtx we are looking up. */
93 /* A table that enables us to look up elts by their value. */
94 static htab_t cselib_hash_table;
96 /* This is a global so we don't have to pass this through every function.
97 It is used in new_elt_loc_list to set SETTING_INSN. */
98 static rtx cselib_current_insn;
100 /* The unique id that the next create value will take. */
101 static unsigned int next_uid;
103 /* The number of registers we had when the varrays were last resized. */
104 static unsigned int cselib_nregs;
106 /* Count values without known locations, or with only locations that
107 wouldn't have been known except for debug insns. Whenever this
108 grows too big, we remove these useless values from the table.
110 Counting values with only debug values is a bit tricky. We don't
111 want to increment n_useless_values when we create a value for a
112 debug insn, for this would get n_useless_values out of sync, but we
113 want increment it if all locs in the list that were ever referenced
114 in nondebug insns are removed from the list.
116 In the general case, once we do that, we'd have to stop accepting
117 nondebug expressions in the loc list, to avoid having two values
118 equivalent that, without debug insns, would have been made into
119 separate values. However, because debug insns never introduce
120 equivalences themselves (no assignments), the only means for
121 growing loc lists is through nondebug assignments. If the locs
122 also happen to be referenced in debug insns, it will work just fine.
124 A consequence of this is that there's at most one debug-only loc in
125 each loc list. If we keep it in the first entry, testing whether
126 we have a debug-only loc list takes O(1).
128 Furthermore, since any additional entry in a loc list containing a
129 debug loc would have to come from an assignment (nondebug) that
130 references both the initial debug loc and the newly-equivalent loc,
131 the initial debug loc would be promoted to a nondebug loc, and the
132 loc list would not contain debug locs any more.
134 So the only case we have to be careful with in order to keep
135 n_useless_values in sync between debug and nondebug compilations is
136 to avoid incrementing n_useless_values when removing the single loc
137 from a value that turns out to not appear outside debug values. We
138 increment n_useless_debug_values instead, and leave such values
139 alone until, for other reasons, we garbage-collect useless
140 values. */
141 static int n_useless_values;
142 static int n_useless_debug_values;
144 /* Count values whose locs have been taken exclusively from debug
145 insns for the entire life of the value. */
146 static int n_debug_values;
148 /* Number of useless values before we remove them from the hash table. */
149 #define MAX_USELESS_VALUES 32
151 /* This table maps from register number to values. It does not
152 contain pointers to cselib_val structures, but rather elt_lists.
153 The purpose is to be able to refer to the same register in
154 different modes. The first element of the list defines the mode in
155 which the register was set; if the mode is unknown or the value is
156 no longer valid in that mode, ELT will be NULL for the first
157 element. */
158 static struct elt_list **reg_values;
159 static unsigned int reg_values_size;
160 #define REG_VALUES(i) reg_values[i]
162 /* The largest number of hard regs used by any entry added to the
163 REG_VALUES table. Cleared on each cselib_clear_table() invocation. */
164 static unsigned int max_value_regs;
166 /* Here the set of indices I with REG_VALUES(I) != 0 is saved. This is used
167 in cselib_clear_table() for fast emptying. */
168 static unsigned int *used_regs;
169 static unsigned int n_used_regs;
171 /* We pass this to cselib_invalidate_mem to invalidate all of
172 memory for a non-const call instruction. */
173 static GTY(()) rtx callmem;
175 /* Set by discard_useless_locs if it deleted the last location of any
176 value. */
177 static int values_became_useless;
179 /* Used as stop element of the containing_mem list so we can check
180 presence in the list by checking the next pointer. */
181 static cselib_val dummy_val;
183 /* If non-NULL, value of the eliminated arg_pointer_rtx or frame_pointer_rtx
184 that is constant through the whole function and should never be
185 eliminated. */
186 static cselib_val *cfa_base_preserved_val;
187 static unsigned int cfa_base_preserved_regno;
189 /* Used to list all values that contain memory reference.
190 May or may not contain the useless values - the list is compacted
191 each time memory is invalidated. */
192 static cselib_val *first_containing_mem = &dummy_val;
193 static alloc_pool elt_loc_list_pool, elt_list_pool, cselib_val_pool, value_pool;
195 /* If nonnull, cselib will call this function before freeing useless
196 VALUEs. A VALUE is deemed useless if its "locs" field is null. */
197 void (*cselib_discard_hook) (cselib_val *);
199 /* If nonnull, cselib will call this function before recording sets or
200 even clobbering outputs of INSN. All the recorded sets will be
201 represented in the array sets[n_sets]. new_val_min can be used to
202 tell whether values present in sets are introduced by this
203 instruction. */
204 void (*cselib_record_sets_hook) (rtx insn, struct cselib_set *sets,
205 int n_sets);
207 #define PRESERVED_VALUE_P(RTX) \
208 (RTL_FLAG_CHECK1("PRESERVED_VALUE_P", (RTX), VALUE)->unchanging)
212 /* Allocate a struct elt_list and fill in its two elements with the
213 arguments. */
215 static inline struct elt_list *
216 new_elt_list (struct elt_list *next, cselib_val *elt)
218 struct elt_list *el;
219 el = (struct elt_list *) pool_alloc (elt_list_pool);
220 el->next = next;
221 el->elt = elt;
222 return el;
225 /* Allocate a struct elt_loc_list and fill in its two elements with the
226 arguments. */
228 static inline struct elt_loc_list *
229 new_elt_loc_list (struct elt_loc_list *next, rtx loc)
231 struct elt_loc_list *el;
232 el = (struct elt_loc_list *) pool_alloc (elt_loc_list_pool);
233 el->next = next;
234 el->loc = loc;
235 el->setting_insn = cselib_current_insn;
236 gcc_assert (!next || !next->setting_insn
237 || !DEBUG_INSN_P (next->setting_insn));
239 /* If we're creating the first loc in a debug insn context, we've
240 just created a debug value. Count it. */
241 if (!next && cselib_current_insn && DEBUG_INSN_P (cselib_current_insn))
242 n_debug_values++;
244 return el;
247 /* Promote loc L to a nondebug cselib_current_insn if L is marked as
248 originating from a debug insn, maintaining the debug values
249 count. */
251 static inline void
252 promote_debug_loc (struct elt_loc_list *l)
254 if (l->setting_insn && DEBUG_INSN_P (l->setting_insn)
255 && (!cselib_current_insn || !DEBUG_INSN_P (cselib_current_insn)))
257 n_debug_values--;
258 l->setting_insn = cselib_current_insn;
259 gcc_assert (!l->next);
263 /* The elt_list at *PL is no longer needed. Unchain it and free its
264 storage. */
266 static inline void
267 unchain_one_elt_list (struct elt_list **pl)
269 struct elt_list *l = *pl;
271 *pl = l->next;
272 pool_free (elt_list_pool, l);
275 /* Likewise for elt_loc_lists. */
277 static void
278 unchain_one_elt_loc_list (struct elt_loc_list **pl)
280 struct elt_loc_list *l = *pl;
282 *pl = l->next;
283 pool_free (elt_loc_list_pool, l);
286 /* Likewise for cselib_vals. This also frees the addr_list associated with
287 V. */
289 static void
290 unchain_one_value (cselib_val *v)
292 while (v->addr_list)
293 unchain_one_elt_list (&v->addr_list);
295 pool_free (cselib_val_pool, v);
298 /* Remove all entries from the hash table. Also used during
299 initialization. */
301 void
302 cselib_clear_table (void)
304 cselib_reset_table (1);
307 /* Remove from hash table all VALUEs except constants. */
309 static int
310 preserve_only_constants (void **x, void *info ATTRIBUTE_UNUSED)
312 cselib_val *v = (cselib_val *)*x;
314 if (v->locs != NULL
315 && v->locs->next == NULL)
317 if (CONSTANT_P (v->locs->loc)
318 && (GET_CODE (v->locs->loc) != CONST
319 || !references_value_p (v->locs->loc, 0)))
320 return 1;
321 if (cfa_base_preserved_val)
323 if (v == cfa_base_preserved_val)
324 return 1;
325 if (GET_CODE (v->locs->loc) == PLUS
326 && CONST_INT_P (XEXP (v->locs->loc, 1))
327 && XEXP (v->locs->loc, 0) == cfa_base_preserved_val->val_rtx)
328 return 1;
332 htab_clear_slot (cselib_hash_table, x);
333 return 1;
336 /* Remove all entries from the hash table, arranging for the next
337 value to be numbered NUM. */
339 void
340 cselib_reset_table (unsigned int num)
342 unsigned int i;
344 max_value_regs = 0;
346 if (cfa_base_preserved_val)
348 unsigned int regno = cfa_base_preserved_regno;
349 unsigned int new_used_regs = 0;
350 for (i = 0; i < n_used_regs; i++)
351 if (used_regs[i] == regno)
353 new_used_regs = 1;
354 continue;
356 else
357 REG_VALUES (used_regs[i]) = 0;
358 gcc_assert (new_used_regs == 1);
359 n_used_regs = new_used_regs;
360 used_regs[0] = regno;
361 max_value_regs
362 = hard_regno_nregs[regno][GET_MODE (cfa_base_preserved_val->locs->loc)];
364 else
366 for (i = 0; i < n_used_regs; i++)
367 REG_VALUES (used_regs[i]) = 0;
368 n_used_regs = 0;
371 if (cselib_preserve_constants)
372 htab_traverse (cselib_hash_table, preserve_only_constants, NULL);
373 else
374 htab_empty (cselib_hash_table);
376 n_useless_values = 0;
377 n_useless_debug_values = 0;
378 n_debug_values = 0;
380 next_uid = num;
382 first_containing_mem = &dummy_val;
385 /* Return the number of the next value that will be generated. */
387 unsigned int
388 cselib_get_next_uid (void)
390 return next_uid;
393 /* The equality test for our hash table. The first argument ENTRY is a table
394 element (i.e. a cselib_val), while the second arg X is an rtx. We know
395 that all callers of htab_find_slot_with_hash will wrap CONST_INTs into a
396 CONST of an appropriate mode. */
398 static int
399 entry_and_rtx_equal_p (const void *entry, const void *x_arg)
401 struct elt_loc_list *l;
402 const cselib_val *const v = (const cselib_val *) entry;
403 rtx x = CONST_CAST_RTX ((const_rtx)x_arg);
404 enum machine_mode mode = GET_MODE (x);
406 gcc_assert (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
407 && (mode != VOIDmode || GET_CODE (x) != CONST_DOUBLE));
409 if (mode != GET_MODE (v->val_rtx))
410 return 0;
412 /* Unwrap X if necessary. */
413 if (GET_CODE (x) == CONST
414 && (CONST_INT_P (XEXP (x, 0))
415 || GET_CODE (XEXP (x, 0)) == CONST_FIXED
416 || GET_CODE (XEXP (x, 0)) == CONST_DOUBLE))
417 x = XEXP (x, 0);
419 /* We don't guarantee that distinct rtx's have different hash values,
420 so we need to do a comparison. */
421 for (l = v->locs; l; l = l->next)
422 if (rtx_equal_for_cselib_p (l->loc, x))
424 promote_debug_loc (l);
425 return 1;
428 return 0;
431 /* The hash function for our hash table. The value is always computed with
432 cselib_hash_rtx when adding an element; this function just extracts the
433 hash value from a cselib_val structure. */
435 static hashval_t
436 get_value_hash (const void *entry)
438 const cselib_val *const v = (const cselib_val *) entry;
439 return v->hash;
442 /* Return true if X contains a VALUE rtx. If ONLY_USELESS is set, we
443 only return true for values which point to a cselib_val whose value
444 element has been set to zero, which implies the cselib_val will be
445 removed. */
448 references_value_p (const_rtx x, int only_useless)
450 const enum rtx_code code = GET_CODE (x);
451 const char *fmt = GET_RTX_FORMAT (code);
452 int i, j;
454 if (GET_CODE (x) == VALUE
455 && (! only_useless || CSELIB_VAL_PTR (x)->locs == 0))
456 return 1;
458 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
460 if (fmt[i] == 'e' && references_value_p (XEXP (x, i), only_useless))
461 return 1;
462 else if (fmt[i] == 'E')
463 for (j = 0; j < XVECLEN (x, i); j++)
464 if (references_value_p (XVECEXP (x, i, j), only_useless))
465 return 1;
468 return 0;
471 /* For all locations found in X, delete locations that reference useless
472 values (i.e. values without any location). Called through
473 htab_traverse. */
475 static int
476 discard_useless_locs (void **x, void *info ATTRIBUTE_UNUSED)
478 cselib_val *v = (cselib_val *)*x;
479 struct elt_loc_list **p = &v->locs;
480 bool had_locs = v->locs != NULL;
481 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
483 while (*p)
485 if (references_value_p ((*p)->loc, 1))
486 unchain_one_elt_loc_list (p);
487 else
488 p = &(*p)->next;
491 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
493 if (setting_insn && DEBUG_INSN_P (setting_insn))
494 n_useless_debug_values++;
495 else
496 n_useless_values++;
497 values_became_useless = 1;
499 return 1;
502 /* If X is a value with no locations, remove it from the hashtable. */
504 static int
505 discard_useless_values (void **x, void *info ATTRIBUTE_UNUSED)
507 cselib_val *v = (cselib_val *)*x;
509 if (v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
511 if (cselib_discard_hook)
512 cselib_discard_hook (v);
514 CSELIB_VAL_PTR (v->val_rtx) = NULL;
515 htab_clear_slot (cselib_hash_table, x);
516 unchain_one_value (v);
517 n_useless_values--;
520 return 1;
523 /* Clean out useless values (i.e. those which no longer have locations
524 associated with them) from the hash table. */
526 static void
527 remove_useless_values (void)
529 cselib_val **p, *v;
531 /* First pass: eliminate locations that reference the value. That in
532 turn can make more values useless. */
535 values_became_useless = 0;
536 htab_traverse (cselib_hash_table, discard_useless_locs, 0);
538 while (values_became_useless);
540 /* Second pass: actually remove the values. */
542 p = &first_containing_mem;
543 for (v = *p; v != &dummy_val; v = v->next_containing_mem)
544 if (v->locs)
546 *p = v;
547 p = &(*p)->next_containing_mem;
549 *p = &dummy_val;
551 n_useless_values += n_useless_debug_values;
552 n_debug_values -= n_useless_debug_values;
553 n_useless_debug_values = 0;
555 htab_traverse (cselib_hash_table, discard_useless_values, 0);
557 gcc_assert (!n_useless_values);
560 /* Arrange for a value to not be removed from the hash table even if
561 it becomes useless. */
563 void
564 cselib_preserve_value (cselib_val *v)
566 PRESERVED_VALUE_P (v->val_rtx) = 1;
569 /* Test whether a value is preserved. */
571 bool
572 cselib_preserved_value_p (cselib_val *v)
574 return PRESERVED_VALUE_P (v->val_rtx);
577 /* Arrange for a REG value to be assumed constant through the whole function,
578 never invalidated and preserved across cselib_reset_table calls. */
580 void
581 cselib_preserve_cfa_base_value (cselib_val *v, unsigned int regno)
583 if (cselib_preserve_constants
584 && v->locs
585 && REG_P (v->locs->loc))
587 cfa_base_preserved_val = v;
588 cfa_base_preserved_regno = regno;
592 /* Clean all non-constant expressions in the hash table, but retain
593 their values. */
595 void
596 cselib_preserve_only_values (void)
598 int i;
600 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
601 cselib_invalidate_regno (i, reg_raw_mode[i]);
603 cselib_invalidate_mem (callmem);
605 remove_useless_values ();
607 gcc_assert (first_containing_mem == &dummy_val);
610 /* Return the mode in which a register was last set. If X is not a
611 register, return its mode. If the mode in which the register was
612 set is not known, or the value was already clobbered, return
613 VOIDmode. */
615 enum machine_mode
616 cselib_reg_set_mode (const_rtx x)
618 if (!REG_P (x))
619 return GET_MODE (x);
621 if (REG_VALUES (REGNO (x)) == NULL
622 || REG_VALUES (REGNO (x))->elt == NULL)
623 return VOIDmode;
625 return GET_MODE (REG_VALUES (REGNO (x))->elt->val_rtx);
628 /* Return nonzero if we can prove that X and Y contain the same value, taking
629 our gathered information into account. */
632 rtx_equal_for_cselib_p (rtx x, rtx y)
634 enum rtx_code code;
635 const char *fmt;
636 int i;
638 if (REG_P (x) || MEM_P (x))
640 cselib_val *e = cselib_lookup (x, GET_MODE (x), 0);
642 if (e)
643 x = e->val_rtx;
646 if (REG_P (y) || MEM_P (y))
648 cselib_val *e = cselib_lookup (y, GET_MODE (y), 0);
650 if (e)
651 y = e->val_rtx;
654 if (x == y)
655 return 1;
657 if (GET_CODE (x) == VALUE && GET_CODE (y) == VALUE)
658 return CSELIB_VAL_PTR (x) == CSELIB_VAL_PTR (y);
660 if (GET_CODE (x) == VALUE)
662 cselib_val *e = CSELIB_VAL_PTR (x);
663 struct elt_loc_list *l;
665 for (l = e->locs; l; l = l->next)
667 rtx t = l->loc;
669 /* Avoid infinite recursion. */
670 if (REG_P (t) || MEM_P (t))
671 continue;
672 else if (rtx_equal_for_cselib_p (t, y))
673 return 1;
676 return 0;
679 if (GET_CODE (y) == VALUE)
681 cselib_val *e = CSELIB_VAL_PTR (y);
682 struct elt_loc_list *l;
684 for (l = e->locs; l; l = l->next)
686 rtx t = l->loc;
688 if (REG_P (t) || MEM_P (t))
689 continue;
690 else if (rtx_equal_for_cselib_p (x, t))
691 return 1;
694 return 0;
697 if (GET_CODE (x) != GET_CODE (y) || GET_MODE (x) != GET_MODE (y))
698 return 0;
700 /* These won't be handled correctly by the code below. */
701 switch (GET_CODE (x))
703 case CONST_DOUBLE:
704 case CONST_FIXED:
705 case DEBUG_EXPR:
706 return 0;
708 case DEBUG_IMPLICIT_PTR:
709 return DEBUG_IMPLICIT_PTR_DECL (x)
710 == DEBUG_IMPLICIT_PTR_DECL (y);
712 case LABEL_REF:
713 return XEXP (x, 0) == XEXP (y, 0);
715 default:
716 break;
719 code = GET_CODE (x);
720 fmt = GET_RTX_FORMAT (code);
722 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
724 int j;
726 switch (fmt[i])
728 case 'w':
729 if (XWINT (x, i) != XWINT (y, i))
730 return 0;
731 break;
733 case 'n':
734 case 'i':
735 if (XINT (x, i) != XINT (y, i))
736 return 0;
737 break;
739 case 'V':
740 case 'E':
741 /* Two vectors must have the same length. */
742 if (XVECLEN (x, i) != XVECLEN (y, i))
743 return 0;
745 /* And the corresponding elements must match. */
746 for (j = 0; j < XVECLEN (x, i); j++)
747 if (! rtx_equal_for_cselib_p (XVECEXP (x, i, j),
748 XVECEXP (y, i, j)))
749 return 0;
750 break;
752 case 'e':
753 if (i == 1
754 && targetm.commutative_p (x, UNKNOWN)
755 && rtx_equal_for_cselib_p (XEXP (x, 1), XEXP (y, 0))
756 && rtx_equal_for_cselib_p (XEXP (x, 0), XEXP (y, 1)))
757 return 1;
758 if (! rtx_equal_for_cselib_p (XEXP (x, i), XEXP (y, i)))
759 return 0;
760 break;
762 case 'S':
763 case 's':
764 if (strcmp (XSTR (x, i), XSTR (y, i)))
765 return 0;
766 break;
768 case 'u':
769 /* These are just backpointers, so they don't matter. */
770 break;
772 case '0':
773 case 't':
774 break;
776 /* It is believed that rtx's at this level will never
777 contain anything but integers and other rtx's,
778 except for within LABEL_REFs and SYMBOL_REFs. */
779 default:
780 gcc_unreachable ();
783 return 1;
786 /* We need to pass down the mode of constants through the hash table
787 functions. For that purpose, wrap them in a CONST of the appropriate
788 mode. */
789 static rtx
790 wrap_constant (enum machine_mode mode, rtx x)
792 if (!CONST_INT_P (x) && GET_CODE (x) != CONST_FIXED
793 && (GET_CODE (x) != CONST_DOUBLE || GET_MODE (x) != VOIDmode))
794 return x;
795 gcc_assert (mode != VOIDmode);
796 return gen_rtx_CONST (mode, x);
799 /* Hash an rtx. Return 0 if we couldn't hash the rtx.
800 For registers and memory locations, we look up their cselib_val structure
801 and return its VALUE element.
802 Possible reasons for return 0 are: the object is volatile, or we couldn't
803 find a register or memory location in the table and CREATE is zero. If
804 CREATE is nonzero, table elts are created for regs and mem.
805 N.B. this hash function returns the same hash value for RTXes that
806 differ only in the order of operands, thus it is suitable for comparisons
807 that take commutativity into account.
808 If we wanted to also support associative rules, we'd have to use a different
809 strategy to avoid returning spurious 0, e.g. return ~(~0U >> 1) .
810 We used to have a MODE argument for hashing for CONST_INTs, but that
811 didn't make sense, since it caused spurious hash differences between
812 (set (reg:SI 1) (const_int))
813 (plus:SI (reg:SI 2) (reg:SI 1))
815 (plus:SI (reg:SI 2) (const_int))
816 If the mode is important in any context, it must be checked specifically
817 in a comparison anyway, since relying on hash differences is unsafe. */
819 static unsigned int
820 cselib_hash_rtx (rtx x, int create)
822 cselib_val *e;
823 int i, j;
824 enum rtx_code code;
825 const char *fmt;
826 unsigned int hash = 0;
828 code = GET_CODE (x);
829 hash += (unsigned) code + (unsigned) GET_MODE (x);
831 switch (code)
833 case MEM:
834 case REG:
835 e = cselib_lookup (x, GET_MODE (x), create);
836 if (! e)
837 return 0;
839 return e->hash;
841 case DEBUG_EXPR:
842 hash += ((unsigned) DEBUG_EXPR << 7)
843 + DEBUG_TEMP_UID (DEBUG_EXPR_TREE_DECL (x));
844 return hash ? hash : (unsigned int) DEBUG_EXPR;
846 case DEBUG_IMPLICIT_PTR:
847 hash += ((unsigned) DEBUG_IMPLICIT_PTR << 7)
848 + DECL_UID (DEBUG_IMPLICIT_PTR_DECL (x));
849 return hash ? hash : (unsigned int) DEBUG_IMPLICIT_PTR;
851 case CONST_INT:
852 hash += ((unsigned) CONST_INT << 7) + INTVAL (x);
853 return hash ? hash : (unsigned int) CONST_INT;
855 case CONST_DOUBLE:
856 /* This is like the general case, except that it only counts
857 the integers representing the constant. */
858 hash += (unsigned) code + (unsigned) GET_MODE (x);
859 if (GET_MODE (x) != VOIDmode)
860 hash += real_hash (CONST_DOUBLE_REAL_VALUE (x));
861 else
862 hash += ((unsigned) CONST_DOUBLE_LOW (x)
863 + (unsigned) CONST_DOUBLE_HIGH (x));
864 return hash ? hash : (unsigned int) CONST_DOUBLE;
866 case CONST_FIXED:
867 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
868 hash += fixed_hash (CONST_FIXED_VALUE (x));
869 return hash ? hash : (unsigned int) CONST_FIXED;
871 case CONST_VECTOR:
873 int units;
874 rtx elt;
876 units = CONST_VECTOR_NUNITS (x);
878 for (i = 0; i < units; ++i)
880 elt = CONST_VECTOR_ELT (x, i);
881 hash += cselib_hash_rtx (elt, 0);
884 return hash;
887 /* Assume there is only one rtx object for any given label. */
888 case LABEL_REF:
889 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
890 differences and differences between each stage's debugging dumps. */
891 hash += (((unsigned int) LABEL_REF << 7)
892 + CODE_LABEL_NUMBER (XEXP (x, 0)));
893 return hash ? hash : (unsigned int) LABEL_REF;
895 case SYMBOL_REF:
897 /* Don't hash on the symbol's address to avoid bootstrap differences.
898 Different hash values may cause expressions to be recorded in
899 different orders and thus different registers to be used in the
900 final assembler. This also avoids differences in the dump files
901 between various stages. */
902 unsigned int h = 0;
903 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
905 while (*p)
906 h += (h << 7) + *p++; /* ??? revisit */
908 hash += ((unsigned int) SYMBOL_REF << 7) + h;
909 return hash ? hash : (unsigned int) SYMBOL_REF;
912 case PRE_DEC:
913 case PRE_INC:
914 case POST_DEC:
915 case POST_INC:
916 case POST_MODIFY:
917 case PRE_MODIFY:
918 case PC:
919 case CC0:
920 case CALL:
921 case UNSPEC_VOLATILE:
922 return 0;
924 case ASM_OPERANDS:
925 if (MEM_VOLATILE_P (x))
926 return 0;
928 break;
930 default:
931 break;
934 i = GET_RTX_LENGTH (code) - 1;
935 fmt = GET_RTX_FORMAT (code);
936 for (; i >= 0; i--)
938 switch (fmt[i])
940 case 'e':
942 rtx tem = XEXP (x, i);
943 unsigned int tem_hash = cselib_hash_rtx (tem, create);
945 if (tem_hash == 0)
946 return 0;
948 hash += tem_hash;
950 break;
951 case 'E':
952 for (j = 0; j < XVECLEN (x, i); j++)
954 unsigned int tem_hash
955 = cselib_hash_rtx (XVECEXP (x, i, j), create);
957 if (tem_hash == 0)
958 return 0;
960 hash += tem_hash;
962 break;
964 case 's':
966 const unsigned char *p = (const unsigned char *) XSTR (x, i);
968 if (p)
969 while (*p)
970 hash += *p++;
971 break;
974 case 'i':
975 hash += XINT (x, i);
976 break;
978 case '0':
979 case 't':
980 /* unused */
981 break;
983 default:
984 gcc_unreachable ();
988 return hash ? hash : 1 + (unsigned int) GET_CODE (x);
991 /* Create a new value structure for VALUE and initialize it. The mode of the
992 value is MODE. */
994 static inline cselib_val *
995 new_cselib_val (unsigned int hash, enum machine_mode mode, rtx x)
997 cselib_val *e = (cselib_val *) pool_alloc (cselib_val_pool);
999 gcc_assert (hash);
1000 gcc_assert (next_uid);
1002 e->hash = hash;
1003 e->uid = next_uid++;
1004 /* We use an alloc pool to allocate this RTL construct because it
1005 accounts for about 8% of the overall memory usage. We know
1006 precisely when we can have VALUE RTXen (when cselib is active)
1007 so we don't need to put them in garbage collected memory.
1008 ??? Why should a VALUE be an RTX in the first place? */
1009 e->val_rtx = (rtx) pool_alloc (value_pool);
1010 memset (e->val_rtx, 0, RTX_HDR_SIZE);
1011 PUT_CODE (e->val_rtx, VALUE);
1012 PUT_MODE (e->val_rtx, mode);
1013 CSELIB_VAL_PTR (e->val_rtx) = e;
1014 e->addr_list = 0;
1015 e->locs = 0;
1016 e->next_containing_mem = 0;
1018 if (dump_file && (dump_flags & TDF_DETAILS))
1020 fprintf (dump_file, "cselib value %u:%u ", e->uid, hash);
1021 if (flag_dump_noaddr || flag_dump_unnumbered)
1022 fputs ("# ", dump_file);
1023 else
1024 fprintf (dump_file, "%p ", (void*)e);
1025 print_rtl_single (dump_file, x);
1026 fputc ('\n', dump_file);
1029 return e;
1032 /* ADDR_ELT is a value that is used as address. MEM_ELT is the value that
1033 contains the data at this address. X is a MEM that represents the
1034 value. Update the two value structures to represent this situation. */
1036 static void
1037 add_mem_for_addr (cselib_val *addr_elt, cselib_val *mem_elt, rtx x)
1039 struct elt_loc_list *l;
1041 /* Avoid duplicates. */
1042 for (l = mem_elt->locs; l; l = l->next)
1043 if (MEM_P (l->loc)
1044 && CSELIB_VAL_PTR (XEXP (l->loc, 0)) == addr_elt)
1046 promote_debug_loc (l);
1047 return;
1050 addr_elt->addr_list = new_elt_list (addr_elt->addr_list, mem_elt);
1051 mem_elt->locs
1052 = new_elt_loc_list (mem_elt->locs,
1053 replace_equiv_address_nv (x, addr_elt->val_rtx));
1054 if (mem_elt->next_containing_mem == NULL)
1056 mem_elt->next_containing_mem = first_containing_mem;
1057 first_containing_mem = mem_elt;
1061 /* Subroutine of cselib_lookup. Return a value for X, which is a MEM rtx.
1062 If CREATE, make a new one if we haven't seen it before. */
1064 static cselib_val *
1065 cselib_lookup_mem (rtx x, int create)
1067 enum machine_mode mode = GET_MODE (x);
1068 void **slot;
1069 cselib_val *addr;
1070 cselib_val *mem_elt;
1071 struct elt_list *l;
1073 if (MEM_VOLATILE_P (x) || mode == BLKmode
1074 || !cselib_record_memory
1075 || (FLOAT_MODE_P (mode) && flag_float_store))
1076 return 0;
1078 /* Look up the value for the address. */
1079 addr = cselib_lookup (XEXP (x, 0), mode, create);
1080 if (! addr)
1081 return 0;
1083 /* Find a value that describes a value of our mode at that address. */
1084 for (l = addr->addr_list; l; l = l->next)
1085 if (GET_MODE (l->elt->val_rtx) == mode)
1087 promote_debug_loc (l->elt->locs);
1088 return l->elt;
1091 if (! create)
1092 return 0;
1094 mem_elt = new_cselib_val (next_uid, mode, x);
1095 add_mem_for_addr (addr, mem_elt, x);
1096 slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
1097 mem_elt->hash, INSERT);
1098 *slot = mem_elt;
1099 return mem_elt;
1102 /* Search thru the possible substitutions in P. We prefer a non reg
1103 substitution because this allows us to expand the tree further. If
1104 we find, just a reg, take the lowest regno. There may be several
1105 non-reg results, we just take the first one because they will all
1106 expand to the same place. */
1108 static rtx
1109 expand_loc (struct elt_loc_list *p, struct expand_value_data *evd,
1110 int max_depth)
1112 rtx reg_result = NULL;
1113 unsigned int regno = UINT_MAX;
1114 struct elt_loc_list *p_in = p;
1116 for (; p; p = p -> next)
1118 /* Avoid infinite recursion trying to expand a reg into a
1119 the same reg. */
1120 if ((REG_P (p->loc))
1121 && (REGNO (p->loc) < regno)
1122 && !bitmap_bit_p (evd->regs_active, REGNO (p->loc)))
1124 reg_result = p->loc;
1125 regno = REGNO (p->loc);
1127 /* Avoid infinite recursion and do not try to expand the
1128 value. */
1129 else if (GET_CODE (p->loc) == VALUE
1130 && CSELIB_VAL_PTR (p->loc)->locs == p_in)
1131 continue;
1132 else if (!REG_P (p->loc))
1134 rtx result, note;
1135 if (dump_file && (dump_flags & TDF_DETAILS))
1137 print_inline_rtx (dump_file, p->loc, 0);
1138 fprintf (dump_file, "\n");
1140 if (GET_CODE (p->loc) == LO_SUM
1141 && GET_CODE (XEXP (p->loc, 1)) == SYMBOL_REF
1142 && p->setting_insn
1143 && (note = find_reg_note (p->setting_insn, REG_EQUAL, NULL_RTX))
1144 && XEXP (note, 0) == XEXP (p->loc, 1))
1145 return XEXP (p->loc, 1);
1146 result = cselib_expand_value_rtx_1 (p->loc, evd, max_depth - 1);
1147 if (result)
1148 return result;
1153 if (regno != UINT_MAX)
1155 rtx result;
1156 if (dump_file && (dump_flags & TDF_DETAILS))
1157 fprintf (dump_file, "r%d\n", regno);
1159 result = cselib_expand_value_rtx_1 (reg_result, evd, max_depth - 1);
1160 if (result)
1161 return result;
1164 if (dump_file && (dump_flags & TDF_DETAILS))
1166 if (reg_result)
1168 print_inline_rtx (dump_file, reg_result, 0);
1169 fprintf (dump_file, "\n");
1171 else
1172 fprintf (dump_file, "NULL\n");
1174 return reg_result;
1178 /* Forward substitute and expand an expression out to its roots.
1179 This is the opposite of common subexpression. Because local value
1180 numbering is such a weak optimization, the expanded expression is
1181 pretty much unique (not from a pointer equals point of view but
1182 from a tree shape point of view.
1184 This function returns NULL if the expansion fails. The expansion
1185 will fail if there is no value number for one of the operands or if
1186 one of the operands has been overwritten between the current insn
1187 and the beginning of the basic block. For instance x has no
1188 expansion in:
1190 r1 <- r1 + 3
1191 x <- r1 + 8
1193 REGS_ACTIVE is a scratch bitmap that should be clear when passing in.
1194 It is clear on return. */
1197 cselib_expand_value_rtx (rtx orig, bitmap regs_active, int max_depth)
1199 struct expand_value_data evd;
1201 evd.regs_active = regs_active;
1202 evd.callback = NULL;
1203 evd.callback_arg = NULL;
1204 evd.dummy = false;
1206 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1209 /* Same as cselib_expand_value_rtx, but using a callback to try to
1210 resolve some expressions. The CB function should return ORIG if it
1211 can't or does not want to deal with a certain RTX. Any other
1212 return value, including NULL, will be used as the expansion for
1213 VALUE, without any further changes. */
1216 cselib_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1217 cselib_expand_callback cb, void *data)
1219 struct expand_value_data evd;
1221 evd.regs_active = regs_active;
1222 evd.callback = cb;
1223 evd.callback_arg = data;
1224 evd.dummy = false;
1226 return cselib_expand_value_rtx_1 (orig, &evd, max_depth);
1229 /* Similar to cselib_expand_value_rtx_cb, but no rtxs are actually copied
1230 or simplified. Useful to find out whether cselib_expand_value_rtx_cb
1231 would return NULL or non-NULL, without allocating new rtx. */
1233 bool
1234 cselib_dummy_expand_value_rtx_cb (rtx orig, bitmap regs_active, int max_depth,
1235 cselib_expand_callback cb, void *data)
1237 struct expand_value_data evd;
1239 evd.regs_active = regs_active;
1240 evd.callback = cb;
1241 evd.callback_arg = data;
1242 evd.dummy = true;
1244 return cselib_expand_value_rtx_1 (orig, &evd, max_depth) != NULL;
1247 /* Internal implementation of cselib_expand_value_rtx and
1248 cselib_expand_value_rtx_cb. */
1250 static rtx
1251 cselib_expand_value_rtx_1 (rtx orig, struct expand_value_data *evd,
1252 int max_depth)
1254 rtx copy, scopy;
1255 int i, j;
1256 RTX_CODE code;
1257 const char *format_ptr;
1258 enum machine_mode mode;
1260 code = GET_CODE (orig);
1262 /* For the context of dse, if we end up expand into a huge tree, we
1263 will not have a useful address, so we might as well just give up
1264 quickly. */
1265 if (max_depth <= 0)
1266 return NULL;
1268 switch (code)
1270 case REG:
1272 struct elt_list *l = REG_VALUES (REGNO (orig));
1274 if (l && l->elt == NULL)
1275 l = l->next;
1276 for (; l; l = l->next)
1277 if (GET_MODE (l->elt->val_rtx) == GET_MODE (orig))
1279 rtx result;
1280 int regno = REGNO (orig);
1282 /* The only thing that we are not willing to do (this
1283 is requirement of dse and if others potential uses
1284 need this function we should add a parm to control
1285 it) is that we will not substitute the
1286 STACK_POINTER_REGNUM, FRAME_POINTER or the
1287 HARD_FRAME_POINTER.
1289 These expansions confuses the code that notices that
1290 stores into the frame go dead at the end of the
1291 function and that the frame is not effected by calls
1292 to subroutines. If you allow the
1293 STACK_POINTER_REGNUM substitution, then dse will
1294 think that parameter pushing also goes dead which is
1295 wrong. If you allow the FRAME_POINTER or the
1296 HARD_FRAME_POINTER then you lose the opportunity to
1297 make the frame assumptions. */
1298 if (regno == STACK_POINTER_REGNUM
1299 || regno == FRAME_POINTER_REGNUM
1300 || regno == HARD_FRAME_POINTER_REGNUM)
1301 return orig;
1303 bitmap_set_bit (evd->regs_active, regno);
1305 if (dump_file && (dump_flags & TDF_DETAILS))
1306 fprintf (dump_file, "expanding: r%d into: ", regno);
1308 result = expand_loc (l->elt->locs, evd, max_depth);
1309 bitmap_clear_bit (evd->regs_active, regno);
1311 if (result)
1312 return result;
1313 else
1314 return orig;
1318 case CONST_INT:
1319 case CONST_DOUBLE:
1320 case CONST_VECTOR:
1321 case SYMBOL_REF:
1322 case CODE_LABEL:
1323 case PC:
1324 case CC0:
1325 case SCRATCH:
1326 /* SCRATCH must be shared because they represent distinct values. */
1327 return orig;
1328 case CLOBBER:
1329 if (REG_P (XEXP (orig, 0)) && HARD_REGISTER_NUM_P (REGNO (XEXP (orig, 0))))
1330 return orig;
1331 break;
1333 case CONST:
1334 if (shared_const_p (orig))
1335 return orig;
1336 break;
1338 case SUBREG:
1340 rtx subreg;
1342 if (evd->callback)
1344 subreg = evd->callback (orig, evd->regs_active, max_depth,
1345 evd->callback_arg);
1346 if (subreg != orig)
1347 return subreg;
1350 subreg = cselib_expand_value_rtx_1 (SUBREG_REG (orig), evd,
1351 max_depth - 1);
1352 if (!subreg)
1353 return NULL;
1354 scopy = simplify_gen_subreg (GET_MODE (orig), subreg,
1355 GET_MODE (SUBREG_REG (orig)),
1356 SUBREG_BYTE (orig));
1357 if (scopy == NULL
1358 || (GET_CODE (scopy) == SUBREG
1359 && !REG_P (SUBREG_REG (scopy))
1360 && !MEM_P (SUBREG_REG (scopy))))
1361 return NULL;
1363 return scopy;
1366 case VALUE:
1368 rtx result;
1370 if (dump_file && (dump_flags & TDF_DETAILS))
1372 fputs ("\nexpanding ", dump_file);
1373 print_rtl_single (dump_file, orig);
1374 fputs (" into...", dump_file);
1377 if (evd->callback)
1379 result = evd->callback (orig, evd->regs_active, max_depth,
1380 evd->callback_arg);
1382 if (result != orig)
1383 return result;
1386 result = expand_loc (CSELIB_VAL_PTR (orig)->locs, evd, max_depth);
1387 return result;
1390 case DEBUG_EXPR:
1391 if (evd->callback)
1392 return evd->callback (orig, evd->regs_active, max_depth,
1393 evd->callback_arg);
1394 return orig;
1396 default:
1397 break;
1400 /* Copy the various flags, fields, and other information. We assume
1401 that all fields need copying, and then clear the fields that should
1402 not be copied. That is the sensible default behavior, and forces
1403 us to explicitly document why we are *not* copying a flag. */
1404 if (evd->dummy)
1405 copy = NULL;
1406 else
1407 copy = shallow_copy_rtx (orig);
1409 format_ptr = GET_RTX_FORMAT (code);
1411 for (i = 0; i < GET_RTX_LENGTH (code); i++)
1412 switch (*format_ptr++)
1414 case 'e':
1415 if (XEXP (orig, i) != NULL)
1417 rtx result = cselib_expand_value_rtx_1 (XEXP (orig, i), evd,
1418 max_depth - 1);
1419 if (!result)
1420 return NULL;
1421 if (copy)
1422 XEXP (copy, i) = result;
1424 break;
1426 case 'E':
1427 case 'V':
1428 if (XVEC (orig, i) != NULL)
1430 if (copy)
1431 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
1432 for (j = 0; j < XVECLEN (orig, i); j++)
1434 rtx result = cselib_expand_value_rtx_1 (XVECEXP (orig, i, j),
1435 evd, max_depth - 1);
1436 if (!result)
1437 return NULL;
1438 if (copy)
1439 XVECEXP (copy, i, j) = result;
1442 break;
1444 case 't':
1445 case 'w':
1446 case 'i':
1447 case 's':
1448 case 'S':
1449 case 'T':
1450 case 'u':
1451 case 'B':
1452 case '0':
1453 /* These are left unchanged. */
1454 break;
1456 default:
1457 gcc_unreachable ();
1460 if (evd->dummy)
1461 return orig;
1463 mode = GET_MODE (copy);
1464 /* If an operand has been simplified into CONST_INT, which doesn't
1465 have a mode and the mode isn't derivable from whole rtx's mode,
1466 try simplify_*_operation first with mode from original's operand
1467 and as a fallback wrap CONST_INT into gen_rtx_CONST. */
1468 scopy = copy;
1469 switch (GET_RTX_CLASS (code))
1471 case RTX_UNARY:
1472 if (CONST_INT_P (XEXP (copy, 0))
1473 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1475 scopy = simplify_unary_operation (code, mode, XEXP (copy, 0),
1476 GET_MODE (XEXP (orig, 0)));
1477 if (scopy)
1478 return scopy;
1480 break;
1481 case RTX_COMM_ARITH:
1482 case RTX_BIN_ARITH:
1483 /* These expressions can derive operand modes from the whole rtx's mode. */
1484 break;
1485 case RTX_TERNARY:
1486 case RTX_BITFIELD_OPS:
1487 if (CONST_INT_P (XEXP (copy, 0))
1488 && GET_MODE (XEXP (orig, 0)) != VOIDmode)
1490 scopy = simplify_ternary_operation (code, mode,
1491 GET_MODE (XEXP (orig, 0)),
1492 XEXP (copy, 0), XEXP (copy, 1),
1493 XEXP (copy, 2));
1494 if (scopy)
1495 return scopy;
1497 break;
1498 case RTX_COMPARE:
1499 case RTX_COMM_COMPARE:
1500 if (CONST_INT_P (XEXP (copy, 0))
1501 && GET_MODE (XEXP (copy, 1)) == VOIDmode
1502 && (GET_MODE (XEXP (orig, 0)) != VOIDmode
1503 || GET_MODE (XEXP (orig, 1)) != VOIDmode))
1505 scopy = simplify_relational_operation (code, mode,
1506 (GET_MODE (XEXP (orig, 0))
1507 != VOIDmode)
1508 ? GET_MODE (XEXP (orig, 0))
1509 : GET_MODE (XEXP (orig, 1)),
1510 XEXP (copy, 0),
1511 XEXP (copy, 1));
1512 if (scopy)
1513 return scopy;
1515 break;
1516 default:
1517 break;
1519 scopy = simplify_rtx (copy);
1520 if (scopy)
1521 return scopy;
1522 return copy;
1525 /* Walk rtx X and replace all occurrences of REG and MEM subexpressions
1526 with VALUE expressions. This way, it becomes independent of changes
1527 to registers and memory.
1528 X isn't actually modified; if modifications are needed, new rtl is
1529 allocated. However, the return value can share rtl with X. */
1532 cselib_subst_to_values (rtx x)
1534 enum rtx_code code = GET_CODE (x);
1535 const char *fmt = GET_RTX_FORMAT (code);
1536 cselib_val *e;
1537 struct elt_list *l;
1538 rtx copy = x;
1539 int i;
1541 switch (code)
1543 case REG:
1544 l = REG_VALUES (REGNO (x));
1545 if (l && l->elt == NULL)
1546 l = l->next;
1547 for (; l; l = l->next)
1548 if (GET_MODE (l->elt->val_rtx) == GET_MODE (x))
1549 return l->elt->val_rtx;
1551 gcc_unreachable ();
1553 case MEM:
1554 e = cselib_lookup_mem (x, 0);
1555 if (! e)
1557 /* This happens for autoincrements. Assign a value that doesn't
1558 match any other. */
1559 e = new_cselib_val (next_uid, GET_MODE (x), x);
1561 return e->val_rtx;
1563 case CONST_DOUBLE:
1564 case CONST_VECTOR:
1565 case CONST_INT:
1566 case CONST_FIXED:
1567 return x;
1569 case POST_INC:
1570 case PRE_INC:
1571 case POST_DEC:
1572 case PRE_DEC:
1573 case POST_MODIFY:
1574 case PRE_MODIFY:
1575 e = new_cselib_val (next_uid, GET_MODE (x), x);
1576 return e->val_rtx;
1578 default:
1579 break;
1582 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1584 if (fmt[i] == 'e')
1586 rtx t = cselib_subst_to_values (XEXP (x, i));
1588 if (t != XEXP (x, i))
1590 if (x == copy)
1591 copy = shallow_copy_rtx (x);
1592 XEXP (copy, i) = t;
1595 else if (fmt[i] == 'E')
1597 int j;
1599 for (j = 0; j < XVECLEN (x, i); j++)
1601 rtx t = cselib_subst_to_values (XVECEXP (x, i, j));
1603 if (t != XVECEXP (x, i, j))
1605 if (XVEC (x, i) == XVEC (copy, i))
1607 if (x == copy)
1608 copy = shallow_copy_rtx (x);
1609 XVEC (copy, i) = shallow_copy_rtvec (XVEC (x, i));
1611 XVECEXP (copy, i, j) = t;
1617 return copy;
1620 /* Look up the rtl expression X in our tables and return the value it has.
1621 If CREATE is zero, we return NULL if we don't know the value. Otherwise,
1622 we create a new one if possible, using mode MODE if X doesn't have a mode
1623 (i.e. because it's a constant). */
1625 static cselib_val *
1626 cselib_lookup_1 (rtx x, enum machine_mode mode, int create)
1628 void **slot;
1629 cselib_val *e;
1630 unsigned int hashval;
1632 if (GET_MODE (x) != VOIDmode)
1633 mode = GET_MODE (x);
1635 if (GET_CODE (x) == VALUE)
1636 return CSELIB_VAL_PTR (x);
1638 if (REG_P (x))
1640 struct elt_list *l;
1641 unsigned int i = REGNO (x);
1643 l = REG_VALUES (i);
1644 if (l && l->elt == NULL)
1645 l = l->next;
1646 for (; l; l = l->next)
1647 if (mode == GET_MODE (l->elt->val_rtx))
1649 promote_debug_loc (l->elt->locs);
1650 return l->elt;
1653 if (! create)
1654 return 0;
1656 if (i < FIRST_PSEUDO_REGISTER)
1658 unsigned int n = hard_regno_nregs[i][mode];
1660 if (n > max_value_regs)
1661 max_value_regs = n;
1664 e = new_cselib_val (next_uid, GET_MODE (x), x);
1665 e->locs = new_elt_loc_list (e->locs, x);
1666 if (REG_VALUES (i) == 0)
1668 /* Maintain the invariant that the first entry of
1669 REG_VALUES, if present, must be the value used to set the
1670 register, or NULL. */
1671 used_regs[n_used_regs++] = i;
1672 REG_VALUES (i) = new_elt_list (REG_VALUES (i), NULL);
1674 REG_VALUES (i)->next = new_elt_list (REG_VALUES (i)->next, e);
1675 slot = htab_find_slot_with_hash (cselib_hash_table, x, e->hash, INSERT);
1676 *slot = e;
1677 return e;
1680 if (MEM_P (x))
1681 return cselib_lookup_mem (x, create);
1683 hashval = cselib_hash_rtx (x, create);
1684 /* Can't even create if hashing is not possible. */
1685 if (! hashval)
1686 return 0;
1688 slot = htab_find_slot_with_hash (cselib_hash_table, wrap_constant (mode, x),
1689 hashval, create ? INSERT : NO_INSERT);
1690 if (slot == 0)
1691 return 0;
1693 e = (cselib_val *) *slot;
1694 if (e)
1695 return e;
1697 e = new_cselib_val (hashval, mode, x);
1699 /* We have to fill the slot before calling cselib_subst_to_values:
1700 the hash table is inconsistent until we do so, and
1701 cselib_subst_to_values will need to do lookups. */
1702 *slot = (void *) e;
1703 e->locs = new_elt_loc_list (e->locs, cselib_subst_to_values (x));
1704 return e;
1707 /* Wrapper for cselib_lookup, that indicates X is in INSN. */
1709 cselib_val *
1710 cselib_lookup_from_insn (rtx x, enum machine_mode mode,
1711 int create, rtx insn)
1713 cselib_val *ret;
1715 gcc_assert (!cselib_current_insn);
1716 cselib_current_insn = insn;
1718 ret = cselib_lookup (x, mode, create);
1720 cselib_current_insn = NULL;
1722 return ret;
1725 /* Wrapper for cselib_lookup_1, that logs the lookup result and
1726 maintains invariants related with debug insns. */
1728 cselib_val *
1729 cselib_lookup (rtx x, enum machine_mode mode, int create)
1731 cselib_val *ret = cselib_lookup_1 (x, mode, create);
1733 /* ??? Should we return NULL if we're not to create an entry, the
1734 found loc is a debug loc and cselib_current_insn is not DEBUG?
1735 If so, we should also avoid converting val to non-DEBUG; probably
1736 easiest setting cselib_current_insn to NULL before the call
1737 above. */
1739 if (dump_file && (dump_flags & TDF_DETAILS))
1741 fputs ("cselib lookup ", dump_file);
1742 print_inline_rtx (dump_file, x, 2);
1743 fprintf (dump_file, " => %u:%u\n",
1744 ret ? ret->uid : 0,
1745 ret ? ret->hash : 0);
1748 return ret;
1751 /* Invalidate any entries in reg_values that overlap REGNO. This is called
1752 if REGNO is changing. MODE is the mode of the assignment to REGNO, which
1753 is used to determine how many hard registers are being changed. If MODE
1754 is VOIDmode, then only REGNO is being changed; this is used when
1755 invalidating call clobbered registers across a call. */
1757 static void
1758 cselib_invalidate_regno (unsigned int regno, enum machine_mode mode)
1760 unsigned int endregno;
1761 unsigned int i;
1763 /* If we see pseudos after reload, something is _wrong_. */
1764 gcc_assert (!reload_completed || regno < FIRST_PSEUDO_REGISTER
1765 || reg_renumber[regno] < 0);
1767 /* Determine the range of registers that must be invalidated. For
1768 pseudos, only REGNO is affected. For hard regs, we must take MODE
1769 into account, and we must also invalidate lower register numbers
1770 if they contain values that overlap REGNO. */
1771 if (regno < FIRST_PSEUDO_REGISTER)
1773 gcc_assert (mode != VOIDmode);
1775 if (regno < max_value_regs)
1776 i = 0;
1777 else
1778 i = regno - max_value_regs;
1780 endregno = end_hard_regno (mode, regno);
1782 else
1784 i = regno;
1785 endregno = regno + 1;
1788 for (; i < endregno; i++)
1790 struct elt_list **l = &REG_VALUES (i);
1792 /* Go through all known values for this reg; if it overlaps the range
1793 we're invalidating, remove the value. */
1794 while (*l)
1796 cselib_val *v = (*l)->elt;
1797 bool had_locs;
1798 rtx setting_insn;
1799 struct elt_loc_list **p;
1800 unsigned int this_last = i;
1802 if (i < FIRST_PSEUDO_REGISTER && v != NULL)
1803 this_last = end_hard_regno (GET_MODE (v->val_rtx), i) - 1;
1805 if (this_last < regno || v == NULL
1806 || (v == cfa_base_preserved_val
1807 && i == cfa_base_preserved_regno))
1809 l = &(*l)->next;
1810 continue;
1813 /* We have an overlap. */
1814 if (*l == REG_VALUES (i))
1816 /* Maintain the invariant that the first entry of
1817 REG_VALUES, if present, must be the value used to set
1818 the register, or NULL. This is also nice because
1819 then we won't push the same regno onto user_regs
1820 multiple times. */
1821 (*l)->elt = NULL;
1822 l = &(*l)->next;
1824 else
1825 unchain_one_elt_list (l);
1827 had_locs = v->locs != NULL;
1828 setting_insn = v->locs ? v->locs->setting_insn : NULL;
1830 /* Now, we clear the mapping from value to reg. It must exist, so
1831 this code will crash intentionally if it doesn't. */
1832 for (p = &v->locs; ; p = &(*p)->next)
1834 rtx x = (*p)->loc;
1836 if (REG_P (x) && REGNO (x) == i)
1838 unchain_one_elt_loc_list (p);
1839 break;
1843 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
1845 if (setting_insn && DEBUG_INSN_P (setting_insn))
1846 n_useless_debug_values++;
1847 else
1848 n_useless_values++;
1854 /* Return 1 if X has a value that can vary even between two
1855 executions of the program. 0 means X can be compared reliably
1856 against certain constants or near-constants. */
1858 static bool
1859 cselib_rtx_varies_p (const_rtx x ATTRIBUTE_UNUSED, bool from_alias ATTRIBUTE_UNUSED)
1861 /* We actually don't need to verify very hard. This is because
1862 if X has actually changed, we invalidate the memory anyway,
1863 so assume that all common memory addresses are
1864 invariant. */
1865 return 0;
1868 /* Invalidate any locations in the table which are changed because of a
1869 store to MEM_RTX. If this is called because of a non-const call
1870 instruction, MEM_RTX is (mem:BLK const0_rtx). */
1872 static void
1873 cselib_invalidate_mem (rtx mem_rtx)
1875 cselib_val **vp, *v, *next;
1876 int num_mems = 0;
1877 rtx mem_addr;
1879 mem_addr = canon_rtx (get_addr (XEXP (mem_rtx, 0)));
1880 mem_rtx = canon_rtx (mem_rtx);
1882 vp = &first_containing_mem;
1883 for (v = *vp; v != &dummy_val; v = next)
1885 bool has_mem = false;
1886 struct elt_loc_list **p = &v->locs;
1887 bool had_locs = v->locs != NULL;
1888 rtx setting_insn = v->locs ? v->locs->setting_insn : NULL;
1890 while (*p)
1892 rtx x = (*p)->loc;
1893 cselib_val *addr;
1894 struct elt_list **mem_chain;
1896 /* MEMs may occur in locations only at the top level; below
1897 that every MEM or REG is substituted by its VALUE. */
1898 if (!MEM_P (x))
1900 p = &(*p)->next;
1901 continue;
1903 if (num_mems < PARAM_VALUE (PARAM_MAX_CSELIB_MEMORY_LOCATIONS)
1904 && ! canon_true_dependence (mem_rtx, GET_MODE (mem_rtx), mem_addr,
1905 x, NULL_RTX, cselib_rtx_varies_p))
1907 has_mem = true;
1908 num_mems++;
1909 p = &(*p)->next;
1910 continue;
1913 /* This one overlaps. */
1914 /* We must have a mapping from this MEM's address to the
1915 value (E). Remove that, too. */
1916 addr = cselib_lookup (XEXP (x, 0), VOIDmode, 0);
1917 mem_chain = &addr->addr_list;
1918 for (;;)
1920 if ((*mem_chain)->elt == v)
1922 unchain_one_elt_list (mem_chain);
1923 break;
1926 mem_chain = &(*mem_chain)->next;
1929 unchain_one_elt_loc_list (p);
1932 if (had_locs && v->locs == 0 && !PRESERVED_VALUE_P (v->val_rtx))
1934 if (setting_insn && DEBUG_INSN_P (setting_insn))
1935 n_useless_debug_values++;
1936 else
1937 n_useless_values++;
1940 next = v->next_containing_mem;
1941 if (has_mem)
1943 *vp = v;
1944 vp = &(*vp)->next_containing_mem;
1946 else
1947 v->next_containing_mem = NULL;
1949 *vp = &dummy_val;
1952 /* Invalidate DEST, which is being assigned to or clobbered. */
1954 void
1955 cselib_invalidate_rtx (rtx dest)
1957 while (GET_CODE (dest) == SUBREG
1958 || GET_CODE (dest) == ZERO_EXTRACT
1959 || GET_CODE (dest) == STRICT_LOW_PART)
1960 dest = XEXP (dest, 0);
1962 if (REG_P (dest))
1963 cselib_invalidate_regno (REGNO (dest), GET_MODE (dest));
1964 else if (MEM_P (dest))
1965 cselib_invalidate_mem (dest);
1967 /* Some machines don't define AUTO_INC_DEC, but they still use push
1968 instructions. We need to catch that case here in order to
1969 invalidate the stack pointer correctly. Note that invalidating
1970 the stack pointer is different from invalidating DEST. */
1971 if (push_operand (dest, GET_MODE (dest)))
1972 cselib_invalidate_rtx (stack_pointer_rtx);
1975 /* A wrapper for cselib_invalidate_rtx to be called via note_stores. */
1977 static void
1978 cselib_invalidate_rtx_note_stores (rtx dest, const_rtx ignore ATTRIBUTE_UNUSED,
1979 void *data ATTRIBUTE_UNUSED)
1981 cselib_invalidate_rtx (dest);
1984 /* Record the result of a SET instruction. DEST is being set; the source
1985 contains the value described by SRC_ELT. If DEST is a MEM, DEST_ADDR_ELT
1986 describes its address. */
1988 static void
1989 cselib_record_set (rtx dest, cselib_val *src_elt, cselib_val *dest_addr_elt)
1991 int dreg = REG_P (dest) ? (int) REGNO (dest) : -1;
1993 if (src_elt == 0 || side_effects_p (dest))
1994 return;
1996 if (dreg >= 0)
1998 if (dreg < FIRST_PSEUDO_REGISTER)
2000 unsigned int n = hard_regno_nregs[dreg][GET_MODE (dest)];
2002 if (n > max_value_regs)
2003 max_value_regs = n;
2006 if (REG_VALUES (dreg) == 0)
2008 used_regs[n_used_regs++] = dreg;
2009 REG_VALUES (dreg) = new_elt_list (REG_VALUES (dreg), src_elt);
2011 else
2013 /* The register should have been invalidated. */
2014 gcc_assert (REG_VALUES (dreg)->elt == 0);
2015 REG_VALUES (dreg)->elt = src_elt;
2018 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2019 n_useless_values--;
2020 src_elt->locs = new_elt_loc_list (src_elt->locs, dest);
2022 else if (MEM_P (dest) && dest_addr_elt != 0
2023 && cselib_record_memory)
2025 if (src_elt->locs == 0 && !PRESERVED_VALUE_P (src_elt->val_rtx))
2026 n_useless_values--;
2027 add_mem_for_addr (dest_addr_elt, src_elt, dest);
2031 /* There is no good way to determine how many elements there can be
2032 in a PARALLEL. Since it's fairly cheap, use a really large number. */
2033 #define MAX_SETS (FIRST_PSEUDO_REGISTER * 2)
2035 /* Record the effects of any sets in INSN. */
2036 static void
2037 cselib_record_sets (rtx insn)
2039 int n_sets = 0;
2040 int i;
2041 struct cselib_set sets[MAX_SETS];
2042 rtx body = PATTERN (insn);
2043 rtx cond = 0;
2045 body = PATTERN (insn);
2046 if (GET_CODE (body) == COND_EXEC)
2048 cond = COND_EXEC_TEST (body);
2049 body = COND_EXEC_CODE (body);
2052 /* Find all sets. */
2053 if (GET_CODE (body) == SET)
2055 sets[0].src = SET_SRC (body);
2056 sets[0].dest = SET_DEST (body);
2057 n_sets = 1;
2059 else if (GET_CODE (body) == PARALLEL)
2061 /* Look through the PARALLEL and record the values being
2062 set, if possible. Also handle any CLOBBERs. */
2063 for (i = XVECLEN (body, 0) - 1; i >= 0; --i)
2065 rtx x = XVECEXP (body, 0, i);
2067 if (GET_CODE (x) == SET)
2069 sets[n_sets].src = SET_SRC (x);
2070 sets[n_sets].dest = SET_DEST (x);
2071 n_sets++;
2076 if (n_sets == 1
2077 && MEM_P (sets[0].src)
2078 && !cselib_record_memory
2079 && MEM_READONLY_P (sets[0].src))
2081 rtx note = find_reg_equal_equiv_note (insn);
2083 if (note && CONSTANT_P (XEXP (note, 0)))
2084 sets[0].src = XEXP (note, 0);
2087 /* Look up the values that are read. Do this before invalidating the
2088 locations that are written. */
2089 for (i = 0; i < n_sets; i++)
2091 rtx dest = sets[i].dest;
2093 /* A STRICT_LOW_PART can be ignored; we'll record the equivalence for
2094 the low part after invalidating any knowledge about larger modes. */
2095 if (GET_CODE (sets[i].dest) == STRICT_LOW_PART)
2096 sets[i].dest = dest = XEXP (dest, 0);
2098 /* We don't know how to record anything but REG or MEM. */
2099 if (REG_P (dest)
2100 || (MEM_P (dest) && cselib_record_memory))
2102 rtx src = sets[i].src;
2103 if (cond)
2104 src = gen_rtx_IF_THEN_ELSE (GET_MODE (dest), cond, src, dest);
2105 sets[i].src_elt = cselib_lookup (src, GET_MODE (dest), 1);
2106 if (MEM_P (dest))
2108 enum machine_mode address_mode
2109 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (dest));
2111 sets[i].dest_addr_elt = cselib_lookup (XEXP (dest, 0),
2112 address_mode, 1);
2114 else
2115 sets[i].dest_addr_elt = 0;
2119 if (cselib_record_sets_hook)
2120 cselib_record_sets_hook (insn, sets, n_sets);
2122 /* Invalidate all locations written by this insn. Note that the elts we
2123 looked up in the previous loop aren't affected, just some of their
2124 locations may go away. */
2125 note_stores (body, cselib_invalidate_rtx_note_stores, NULL);
2127 /* If this is an asm, look for duplicate sets. This can happen when the
2128 user uses the same value as an output multiple times. This is valid
2129 if the outputs are not actually used thereafter. Treat this case as
2130 if the value isn't actually set. We do this by smashing the destination
2131 to pc_rtx, so that we won't record the value later. */
2132 if (n_sets >= 2 && asm_noperands (body) >= 0)
2134 for (i = 0; i < n_sets; i++)
2136 rtx dest = sets[i].dest;
2137 if (REG_P (dest) || MEM_P (dest))
2139 int j;
2140 for (j = i + 1; j < n_sets; j++)
2141 if (rtx_equal_p (dest, sets[j].dest))
2143 sets[i].dest = pc_rtx;
2144 sets[j].dest = pc_rtx;
2150 /* Now enter the equivalences in our tables. */
2151 for (i = 0; i < n_sets; i++)
2153 rtx dest = sets[i].dest;
2154 if (REG_P (dest)
2155 || (MEM_P (dest) && cselib_record_memory))
2156 cselib_record_set (dest, sets[i].src_elt, sets[i].dest_addr_elt);
2160 /* Record the effects of INSN. */
2162 void
2163 cselib_process_insn (rtx insn)
2165 int i;
2166 rtx x;
2168 cselib_current_insn = insn;
2170 /* Forget everything at a CODE_LABEL, a volatile asm, or a setjmp. */
2171 if (LABEL_P (insn)
2172 || (CALL_P (insn)
2173 && find_reg_note (insn, REG_SETJMP, NULL))
2174 || (NONJUMP_INSN_P (insn)
2175 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2176 && MEM_VOLATILE_P (PATTERN (insn))))
2178 cselib_reset_table (next_uid);
2179 cselib_current_insn = NULL_RTX;
2180 return;
2183 if (! INSN_P (insn))
2185 cselib_current_insn = NULL_RTX;
2186 return;
2189 /* If this is a call instruction, forget anything stored in a
2190 call clobbered register, or, if this is not a const call, in
2191 memory. */
2192 if (CALL_P (insn))
2194 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2195 if (call_used_regs[i]
2196 || (REG_VALUES (i) && REG_VALUES (i)->elt
2197 && HARD_REGNO_CALL_PART_CLOBBERED (i,
2198 GET_MODE (REG_VALUES (i)->elt->val_rtx))))
2199 cselib_invalidate_regno (i, reg_raw_mode[i]);
2201 /* Since it is not clear how cselib is going to be used, be
2202 conservative here and treat looping pure or const functions
2203 as if they were regular functions. */
2204 if (RTL_LOOPING_CONST_OR_PURE_CALL_P (insn)
2205 || !(RTL_CONST_OR_PURE_CALL_P (insn)))
2206 cselib_invalidate_mem (callmem);
2209 cselib_record_sets (insn);
2211 #ifdef AUTO_INC_DEC
2212 /* Clobber any registers which appear in REG_INC notes. We
2213 could keep track of the changes to their values, but it is
2214 unlikely to help. */
2215 for (x = REG_NOTES (insn); x; x = XEXP (x, 1))
2216 if (REG_NOTE_KIND (x) == REG_INC)
2217 cselib_invalidate_rtx (XEXP (x, 0));
2218 #endif
2220 /* Look for any CLOBBERs in CALL_INSN_FUNCTION_USAGE, but only
2221 after we have processed the insn. */
2222 if (CALL_P (insn))
2223 for (x = CALL_INSN_FUNCTION_USAGE (insn); x; x = XEXP (x, 1))
2224 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
2225 cselib_invalidate_rtx (XEXP (XEXP (x, 0), 0));
2227 cselib_current_insn = NULL_RTX;
2229 if (n_useless_values > MAX_USELESS_VALUES
2230 /* remove_useless_values is linear in the hash table size. Avoid
2231 quadratic behavior for very large hashtables with very few
2232 useless elements. */
2233 && ((unsigned int)n_useless_values
2234 > (cselib_hash_table->n_elements
2235 - cselib_hash_table->n_deleted
2236 - n_debug_values) / 4))
2237 remove_useless_values ();
2240 /* Initialize cselib for one pass. The caller must also call
2241 init_alias_analysis. */
2243 void
2244 cselib_init (int record_what)
2246 elt_list_pool = create_alloc_pool ("elt_list",
2247 sizeof (struct elt_list), 10);
2248 elt_loc_list_pool = create_alloc_pool ("elt_loc_list",
2249 sizeof (struct elt_loc_list), 10);
2250 cselib_val_pool = create_alloc_pool ("cselib_val_list",
2251 sizeof (cselib_val), 10);
2252 value_pool = create_alloc_pool ("value", RTX_CODE_SIZE (VALUE), 100);
2253 cselib_record_memory = record_what & CSELIB_RECORD_MEMORY;
2254 cselib_preserve_constants = record_what & CSELIB_PRESERVE_CONSTANTS;
2256 /* (mem:BLK (scratch)) is a special mechanism to conflict with everything,
2257 see canon_true_dependence. This is only created once. */
2258 if (! callmem)
2259 callmem = gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode));
2261 cselib_nregs = max_reg_num ();
2263 /* We preserve reg_values to allow expensive clearing of the whole thing.
2264 Reallocate it however if it happens to be too large. */
2265 if (!reg_values || reg_values_size < cselib_nregs
2266 || (reg_values_size > 10 && reg_values_size > cselib_nregs * 4))
2268 if (reg_values)
2269 free (reg_values);
2270 /* Some space for newly emit instructions so we don't end up
2271 reallocating in between passes. */
2272 reg_values_size = cselib_nregs + (63 + cselib_nregs) / 16;
2273 reg_values = XCNEWVEC (struct elt_list *, reg_values_size);
2275 used_regs = XNEWVEC (unsigned int, cselib_nregs);
2276 n_used_regs = 0;
2277 cselib_hash_table = htab_create (31, get_value_hash,
2278 entry_and_rtx_equal_p, NULL);
2279 next_uid = 1;
2282 /* Called when the current user is done with cselib. */
2284 void
2285 cselib_finish (void)
2287 cselib_discard_hook = NULL;
2288 cselib_preserve_constants = false;
2289 cfa_base_preserved_val = NULL;
2290 cfa_base_preserved_regno = INVALID_REGNUM;
2291 free_alloc_pool (elt_list_pool);
2292 free_alloc_pool (elt_loc_list_pool);
2293 free_alloc_pool (cselib_val_pool);
2294 free_alloc_pool (value_pool);
2295 cselib_clear_table ();
2296 htab_delete (cselib_hash_table);
2297 free (used_regs);
2298 used_regs = 0;
2299 cselib_hash_table = 0;
2300 n_useless_values = 0;
2301 n_useless_debug_values = 0;
2302 n_debug_values = 0;
2303 next_uid = 0;
2306 /* Dump the cselib_val *X to FILE *info. */
2308 static int
2309 dump_cselib_val (void **x, void *info)
2311 cselib_val *v = (cselib_val *)*x;
2312 FILE *out = (FILE *)info;
2313 bool need_lf = true;
2315 print_inline_rtx (out, v->val_rtx, 0);
2317 if (v->locs)
2319 struct elt_loc_list *l = v->locs;
2320 if (need_lf)
2322 fputc ('\n', out);
2323 need_lf = false;
2325 fputs (" locs:", out);
2328 fprintf (out, "\n from insn %i ",
2329 INSN_UID (l->setting_insn));
2330 print_inline_rtx (out, l->loc, 4);
2332 while ((l = l->next));
2333 fputc ('\n', out);
2335 else
2337 fputs (" no locs", out);
2338 need_lf = true;
2341 if (v->addr_list)
2343 struct elt_list *e = v->addr_list;
2344 if (need_lf)
2346 fputc ('\n', out);
2347 need_lf = false;
2349 fputs (" addr list:", out);
2352 fputs ("\n ", out);
2353 print_inline_rtx (out, e->elt->val_rtx, 2);
2355 while ((e = e->next));
2356 fputc ('\n', out);
2358 else
2360 fputs (" no addrs", out);
2361 need_lf = true;
2364 if (v->next_containing_mem == &dummy_val)
2365 fputs (" last mem\n", out);
2366 else if (v->next_containing_mem)
2368 fputs (" next mem ", out);
2369 print_inline_rtx (out, v->next_containing_mem->val_rtx, 2);
2370 fputc ('\n', out);
2372 else if (need_lf)
2373 fputc ('\n', out);
2375 return 1;
2378 /* Dump to OUT everything in the CSELIB table. */
2380 void
2381 dump_cselib_table (FILE *out)
2383 fprintf (out, "cselib hash table:\n");
2384 htab_traverse (cselib_hash_table, dump_cselib_val, out);
2385 if (first_containing_mem != &dummy_val)
2387 fputs ("first mem ", out);
2388 print_inline_rtx (out, first_containing_mem->val_rtx, 2);
2389 fputc ('\n', out);
2391 fprintf (out, "next uid %i\n", next_uid);
2394 #include "gt-cselib.h"