match_asm_constraints: Use copy_rtx where needed (PR88001)
[official-gcc.git] / gcc / reginfo.c
blob33befa5d350e1d89f9635bc1d7e1e905218916f6
1 /* Compute different info about registers.
2 Copyright (C) 1987-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* This file contains regscan pass of the compiler and passes for
22 dealing with info about modes of pseudo-registers inside
23 subregisters. It also defines some tables of information about the
24 hardware registers, function init_reg_sets to initialize the
25 tables, and other auxiliary functions to deal with info about
26 registers and their classes. */
28 #include "config.h"
29 #include "system.h"
30 #include "coretypes.h"
31 #include "backend.h"
32 #include "target.h"
33 #include "rtl.h"
34 #include "tree.h"
35 #include "df.h"
36 #include "memmodel.h"
37 #include "tm_p.h"
38 #include "insn-config.h"
39 #include "regs.h"
40 #include "ira.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "reload.h"
44 #include "output.h"
45 #include "tree-pass.h"
47 /* Maximum register number used in this function, plus one. */
49 int max_regno;
51 /* Used to cache the results of simplifiable_subregs. SHAPE is the input
52 parameter and SIMPLIFIABLE_REGS is the result. */
53 struct simplifiable_subreg
55 simplifiable_subreg (const subreg_shape &);
57 subreg_shape shape;
58 HARD_REG_SET simplifiable_regs;
61 struct target_hard_regs default_target_hard_regs;
62 struct target_regs default_target_regs;
63 #if SWITCHABLE_TARGET
64 struct target_hard_regs *this_target_hard_regs = &default_target_hard_regs;
65 struct target_regs *this_target_regs = &default_target_regs;
66 #endif
68 /* Data for initializing fixed_regs. */
69 static const char initial_fixed_regs[] = FIXED_REGISTERS;
71 /* Data for initializing call_used_regs. */
72 static const char initial_call_used_regs[] = CALL_USED_REGISTERS;
74 #ifdef CALL_REALLY_USED_REGISTERS
75 /* Data for initializing call_really_used_regs. */
76 static const char initial_call_really_used_regs[] = CALL_REALLY_USED_REGISTERS;
77 #endif
79 #ifdef CALL_REALLY_USED_REGISTERS
80 #define CALL_REALLY_USED_REGNO_P(X) call_really_used_regs[X]
81 #else
82 #define CALL_REALLY_USED_REGNO_P(X) call_used_regs[X]
83 #endif
85 /* Indexed by hard register number, contains 1 for registers
86 that are being used for global register decls.
87 These must be exempt from ordinary flow analysis
88 and are also considered fixed. */
89 char global_regs[FIRST_PSEUDO_REGISTER];
91 /* Declaration for the global register. */
92 tree global_regs_decl[FIRST_PSEUDO_REGISTER];
94 /* Same information as REGS_INVALIDATED_BY_CALL but in regset form to be used
95 in dataflow more conveniently. */
96 regset regs_invalidated_by_call_regset;
98 /* Same information as FIXED_REG_SET but in regset form. */
99 regset fixed_reg_set_regset;
101 /* The bitmap_obstack is used to hold some static variables that
102 should not be reset after each function is compiled. */
103 static bitmap_obstack persistent_obstack;
105 /* Used to initialize reg_alloc_order. */
106 #ifdef REG_ALLOC_ORDER
107 static int initial_reg_alloc_order[FIRST_PSEUDO_REGISTER] = REG_ALLOC_ORDER;
108 #endif
110 /* The same information, but as an array of unsigned ints. We copy from
111 these unsigned ints to the table above. We do this so the tm.h files
112 do not have to be aware of the wordsize for machines with <= 64 regs.
113 Note that we hard-code 32 here, not HOST_BITS_PER_INT. */
114 #define N_REG_INTS \
115 ((FIRST_PSEUDO_REGISTER + (32 - 1)) / 32)
117 static const unsigned int_reg_class_contents[N_REG_CLASSES][N_REG_INTS]
118 = REG_CLASS_CONTENTS;
120 /* Array containing all of the register names. */
121 static const char *const initial_reg_names[] = REGISTER_NAMES;
123 /* Array containing all of the register class names. */
124 const char * reg_class_names[] = REG_CLASS_NAMES;
126 /* No more global register variables may be declared; true once
127 reginfo has been initialized. */
128 static int no_global_reg_vars = 0;
130 /* Given a register bitmap, turn on the bits in a HARD_REG_SET that
131 correspond to the hard registers, if any, set in that map. This
132 could be done far more efficiently by having all sorts of special-cases
133 with moving single words, but probably isn't worth the trouble. */
134 void
135 reg_set_to_hard_reg_set (HARD_REG_SET *to, const_bitmap from)
137 unsigned i;
138 bitmap_iterator bi;
140 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
142 if (i >= FIRST_PSEUDO_REGISTER)
143 return;
144 SET_HARD_REG_BIT (*to, i);
148 /* Function called only once per target_globals to initialize the
149 target_hard_regs structure. Once this is done, various switches
150 may override. */
151 void
152 init_reg_sets (void)
154 int i, j;
156 /* First copy the register information from the initial int form into
157 the regsets. */
159 for (i = 0; i < N_REG_CLASSES; i++)
161 CLEAR_HARD_REG_SET (reg_class_contents[i]);
163 /* Note that we hard-code 32 here, not HOST_BITS_PER_INT. */
164 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
165 if (int_reg_class_contents[i][j / 32]
166 & ((unsigned) 1 << (j % 32)))
167 SET_HARD_REG_BIT (reg_class_contents[i], j);
170 /* Sanity check: make sure the target macros FIXED_REGISTERS and
171 CALL_USED_REGISTERS had the right number of initializers. */
172 gcc_assert (sizeof fixed_regs == sizeof initial_fixed_regs);
173 gcc_assert (sizeof call_used_regs == sizeof initial_call_used_regs);
174 #ifdef CALL_REALLY_USED_REGISTERS
175 gcc_assert (sizeof call_really_used_regs
176 == sizeof initial_call_really_used_regs);
177 #endif
178 #ifdef REG_ALLOC_ORDER
179 gcc_assert (sizeof reg_alloc_order == sizeof initial_reg_alloc_order);
180 #endif
181 gcc_assert (sizeof reg_names == sizeof initial_reg_names);
183 memcpy (fixed_regs, initial_fixed_regs, sizeof fixed_regs);
184 memcpy (call_used_regs, initial_call_used_regs, sizeof call_used_regs);
185 #ifdef CALL_REALLY_USED_REGISTERS
186 memcpy (call_really_used_regs, initial_call_really_used_regs,
187 sizeof call_really_used_regs);
188 #endif
189 #ifdef REG_ALLOC_ORDER
190 memcpy (reg_alloc_order, initial_reg_alloc_order, sizeof reg_alloc_order);
191 #endif
192 memcpy (reg_names, initial_reg_names, sizeof reg_names);
194 SET_HARD_REG_SET (accessible_reg_set);
195 SET_HARD_REG_SET (operand_reg_set);
198 /* We need to save copies of some of the register information which
199 can be munged by command-line switches so we can restore it during
200 subsequent back-end reinitialization. */
201 static char saved_fixed_regs[FIRST_PSEUDO_REGISTER];
202 static char saved_call_used_regs[FIRST_PSEUDO_REGISTER];
203 #ifdef CALL_REALLY_USED_REGISTERS
204 static char saved_call_really_used_regs[FIRST_PSEUDO_REGISTER];
205 #endif
206 static const char *saved_reg_names[FIRST_PSEUDO_REGISTER];
207 static HARD_REG_SET saved_accessible_reg_set;
208 static HARD_REG_SET saved_operand_reg_set;
210 /* Save the register information. */
211 void
212 save_register_info (void)
214 /* Sanity check: make sure the target macros FIXED_REGISTERS and
215 CALL_USED_REGISTERS had the right number of initializers. */
216 gcc_assert (sizeof fixed_regs == sizeof saved_fixed_regs);
217 gcc_assert (sizeof call_used_regs == sizeof saved_call_used_regs);
218 memcpy (saved_fixed_regs, fixed_regs, sizeof fixed_regs);
219 memcpy (saved_call_used_regs, call_used_regs, sizeof call_used_regs);
221 /* Likewise for call_really_used_regs. */
222 #ifdef CALL_REALLY_USED_REGISTERS
223 gcc_assert (sizeof call_really_used_regs
224 == sizeof saved_call_really_used_regs);
225 memcpy (saved_call_really_used_regs, call_really_used_regs,
226 sizeof call_really_used_regs);
227 #endif
229 /* And similarly for reg_names. */
230 gcc_assert (sizeof reg_names == sizeof saved_reg_names);
231 memcpy (saved_reg_names, reg_names, sizeof reg_names);
232 COPY_HARD_REG_SET (saved_accessible_reg_set, accessible_reg_set);
233 COPY_HARD_REG_SET (saved_operand_reg_set, operand_reg_set);
236 /* Restore the register information. */
237 static void
238 restore_register_info (void)
240 memcpy (fixed_regs, saved_fixed_regs, sizeof fixed_regs);
241 memcpy (call_used_regs, saved_call_used_regs, sizeof call_used_regs);
243 #ifdef CALL_REALLY_USED_REGISTERS
244 memcpy (call_really_used_regs, saved_call_really_used_regs,
245 sizeof call_really_used_regs);
246 #endif
248 memcpy (reg_names, saved_reg_names, sizeof reg_names);
249 COPY_HARD_REG_SET (accessible_reg_set, saved_accessible_reg_set);
250 COPY_HARD_REG_SET (operand_reg_set, saved_operand_reg_set);
253 /* After switches have been processed, which perhaps alter
254 `fixed_regs' and `call_used_regs', convert them to HARD_REG_SETs. */
255 static void
256 init_reg_sets_1 (void)
258 unsigned int i, j;
259 unsigned int /* machine_mode */ m;
261 restore_register_info ();
263 #ifdef REG_ALLOC_ORDER
264 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
265 inv_reg_alloc_order[reg_alloc_order[i]] = i;
266 #endif
268 /* Let the target tweak things if necessary. */
270 targetm.conditional_register_usage ();
272 /* Compute number of hard regs in each class. */
274 memset (reg_class_size, 0, sizeof reg_class_size);
275 for (i = 0; i < N_REG_CLASSES; i++)
277 bool any_nonfixed = false;
278 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
279 if (TEST_HARD_REG_BIT (reg_class_contents[i], j))
281 reg_class_size[i]++;
282 if (!fixed_regs[j])
283 any_nonfixed = true;
285 class_only_fixed_regs[i] = !any_nonfixed;
288 /* Initialize the table of subunions.
289 reg_class_subunion[I][J] gets the largest-numbered reg-class
290 that is contained in the union of classes I and J. */
292 memset (reg_class_subunion, 0, sizeof reg_class_subunion);
293 for (i = 0; i < N_REG_CLASSES; i++)
295 for (j = 0; j < N_REG_CLASSES; j++)
297 HARD_REG_SET c;
298 int k;
300 COPY_HARD_REG_SET (c, reg_class_contents[i]);
301 IOR_HARD_REG_SET (c, reg_class_contents[j]);
302 for (k = 0; k < N_REG_CLASSES; k++)
303 if (hard_reg_set_subset_p (reg_class_contents[k], c)
304 && !hard_reg_set_subset_p (reg_class_contents[k],
305 reg_class_contents
306 [(int) reg_class_subunion[i][j]]))
307 reg_class_subunion[i][j] = (enum reg_class) k;
311 /* Initialize the table of superunions.
312 reg_class_superunion[I][J] gets the smallest-numbered reg-class
313 containing the union of classes I and J. */
315 memset (reg_class_superunion, 0, sizeof reg_class_superunion);
316 for (i = 0; i < N_REG_CLASSES; i++)
318 for (j = 0; j < N_REG_CLASSES; j++)
320 HARD_REG_SET c;
321 int k;
323 COPY_HARD_REG_SET (c, reg_class_contents[i]);
324 IOR_HARD_REG_SET (c, reg_class_contents[j]);
325 for (k = 0; k < N_REG_CLASSES; k++)
326 if (hard_reg_set_subset_p (c, reg_class_contents[k]))
327 break;
329 reg_class_superunion[i][j] = (enum reg_class) k;
333 /* Initialize the tables of subclasses and superclasses of each reg class.
334 First clear the whole table, then add the elements as they are found. */
336 for (i = 0; i < N_REG_CLASSES; i++)
338 for (j = 0; j < N_REG_CLASSES; j++)
339 reg_class_subclasses[i][j] = LIM_REG_CLASSES;
342 for (i = 0; i < N_REG_CLASSES; i++)
344 if (i == (int) NO_REGS)
345 continue;
347 for (j = i + 1; j < N_REG_CLASSES; j++)
348 if (hard_reg_set_subset_p (reg_class_contents[i],
349 reg_class_contents[j]))
351 /* Reg class I is a subclass of J.
352 Add J to the table of superclasses of I. */
353 enum reg_class *p;
355 /* Add I to the table of superclasses of J. */
356 p = &reg_class_subclasses[j][0];
357 while (*p != LIM_REG_CLASSES) p++;
358 *p = (enum reg_class) i;
362 /* Initialize "constant" tables. */
364 CLEAR_HARD_REG_SET (fixed_reg_set);
365 CLEAR_HARD_REG_SET (call_used_reg_set);
366 CLEAR_HARD_REG_SET (call_fixed_reg_set);
367 CLEAR_HARD_REG_SET (regs_invalidated_by_call);
368 if (!regs_invalidated_by_call_regset)
370 bitmap_obstack_initialize (&persistent_obstack);
371 regs_invalidated_by_call_regset = ALLOC_REG_SET (&persistent_obstack);
373 else
374 CLEAR_REG_SET (regs_invalidated_by_call_regset);
375 if (!fixed_reg_set_regset)
376 fixed_reg_set_regset = ALLOC_REG_SET (&persistent_obstack);
377 else
378 CLEAR_REG_SET (fixed_reg_set_regset);
380 AND_HARD_REG_SET (operand_reg_set, accessible_reg_set);
381 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
383 /* As a special exception, registers whose class is NO_REGS are
384 not accepted by `register_operand'. The reason for this change
385 is to allow the representation of special architecture artifacts
386 (such as a condition code register) without extending the rtl
387 definitions. Since registers of class NO_REGS cannot be used
388 as registers in any case where register classes are examined,
389 it is better to apply this exception in a target-independent way. */
390 if (REGNO_REG_CLASS (i) == NO_REGS)
391 CLEAR_HARD_REG_BIT (operand_reg_set, i);
393 /* If a register is too limited to be treated as a register operand,
394 then it should never be allocated to a pseudo. */
395 if (!TEST_HARD_REG_BIT (operand_reg_set, i))
397 fixed_regs[i] = 1;
398 call_used_regs[i] = 1;
401 /* call_used_regs must include fixed_regs. */
402 gcc_assert (!fixed_regs[i] || call_used_regs[i]);
403 #ifdef CALL_REALLY_USED_REGISTERS
404 /* call_used_regs must include call_really_used_regs. */
405 gcc_assert (!call_really_used_regs[i] || call_used_regs[i]);
406 #endif
408 if (fixed_regs[i])
410 SET_HARD_REG_BIT (fixed_reg_set, i);
411 SET_REGNO_REG_SET (fixed_reg_set_regset, i);
414 if (call_used_regs[i])
415 SET_HARD_REG_BIT (call_used_reg_set, i);
417 /* There are a couple of fixed registers that we know are safe to
418 exclude from being clobbered by calls:
420 The frame pointer is always preserved across calls. The arg
421 pointer is if it is fixed. The stack pointer usually is,
422 unless TARGET_RETURN_POPS_ARGS, in which case an explicit
423 CLOBBER will be present. If we are generating PIC code, the
424 PIC offset table register is preserved across calls, though the
425 target can override that. */
427 if (i == STACK_POINTER_REGNUM)
429 else if (global_regs[i])
431 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
432 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
434 else if (i == FRAME_POINTER_REGNUM)
436 else if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
437 && i == HARD_FRAME_POINTER_REGNUM)
439 else if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
440 && i == ARG_POINTER_REGNUM && fixed_regs[i])
442 else if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
443 && i == (unsigned) PIC_OFFSET_TABLE_REGNUM && fixed_regs[i])
445 else if (CALL_REALLY_USED_REGNO_P (i))
447 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
448 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
452 COPY_HARD_REG_SET (call_fixed_reg_set, fixed_reg_set);
453 COPY_HARD_REG_SET (fixed_nonglobal_reg_set, fixed_reg_set);
455 /* Preserve global registers if called more than once. */
456 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
458 if (global_regs[i])
460 fixed_regs[i] = call_used_regs[i] = 1;
461 SET_HARD_REG_BIT (fixed_reg_set, i);
462 SET_HARD_REG_BIT (call_used_reg_set, i);
463 SET_HARD_REG_BIT (call_fixed_reg_set, i);
467 memset (have_regs_of_mode, 0, sizeof (have_regs_of_mode));
468 memset (contains_reg_of_mode, 0, sizeof (contains_reg_of_mode));
469 for (m = 0; m < (unsigned int) MAX_MACHINE_MODE; m++)
471 HARD_REG_SET ok_regs, ok_regs2;
472 CLEAR_HARD_REG_SET (ok_regs);
473 CLEAR_HARD_REG_SET (ok_regs2);
474 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
475 if (!TEST_HARD_REG_BIT (fixed_nonglobal_reg_set, j)
476 && targetm.hard_regno_mode_ok (j, (machine_mode) m))
478 SET_HARD_REG_BIT (ok_regs, j);
479 if (!fixed_regs[j])
480 SET_HARD_REG_BIT (ok_regs2, j);
483 for (i = 0; i < N_REG_CLASSES; i++)
484 if ((targetm.class_max_nregs ((reg_class_t) i, (machine_mode) m)
485 <= reg_class_size[i])
486 && hard_reg_set_intersect_p (ok_regs, reg_class_contents[i]))
488 contains_reg_of_mode[i][m] = 1;
489 if (hard_reg_set_intersect_p (ok_regs2, reg_class_contents[i]))
491 have_regs_of_mode[m] = 1;
492 contains_allocatable_reg_of_mode[i][m] = 1;
498 /* Compute the table of register modes.
499 These values are used to record death information for individual registers
500 (as opposed to a multi-register mode).
501 This function might be invoked more than once, if the target has support
502 for changing register usage conventions on a per-function basis.
504 void
505 init_reg_modes_target (void)
507 int i, j;
509 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
510 for (j = 0; j < MAX_MACHINE_MODE; j++)
511 this_target_regs->x_hard_regno_nregs[i][j]
512 = targetm.hard_regno_nregs (i, (machine_mode) j);
514 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
516 reg_raw_mode[i] = choose_hard_reg_mode (i, 1, false);
518 /* If we couldn't find a valid mode, just use the previous mode
519 if it is suitable, otherwise fall back on word_mode. */
520 if (reg_raw_mode[i] == VOIDmode)
522 if (i > 0 && hard_regno_nregs (i, reg_raw_mode[i - 1]) == 1)
523 reg_raw_mode[i] = reg_raw_mode[i - 1];
524 else
525 reg_raw_mode[i] = word_mode;
530 /* Finish initializing the register sets and initialize the register modes.
531 This function might be invoked more than once, if the target has support
532 for changing register usage conventions on a per-function basis.
534 void
535 init_regs (void)
537 /* This finishes what was started by init_reg_sets, but couldn't be done
538 until after register usage was specified. */
539 init_reg_sets_1 ();
542 /* The same as previous function plus initializing IRA. */
543 void
544 reinit_regs (void)
546 init_regs ();
547 /* caller_save needs to be re-initialized. */
548 caller_save_initialized_p = false;
549 if (this_target_rtl->target_specific_initialized)
551 ira_init ();
552 recog_init ();
556 /* Initialize some fake stack-frame MEM references for use in
557 memory_move_secondary_cost. */
558 void
559 init_fake_stack_mems (void)
561 int i;
563 for (i = 0; i < MAX_MACHINE_MODE; i++)
564 top_of_stack[i] = gen_rtx_MEM ((machine_mode) i, stack_pointer_rtx);
568 /* Compute cost of moving data from a register of class FROM to one of
569 TO, using MODE. */
572 register_move_cost (machine_mode mode, reg_class_t from, reg_class_t to)
574 return targetm.register_move_cost (mode, from, to);
577 /* Compute cost of moving registers to/from memory. */
580 memory_move_cost (machine_mode mode, reg_class_t rclass, bool in)
582 return targetm.memory_move_cost (mode, rclass, in);
585 /* Compute extra cost of moving registers to/from memory due to reloads.
586 Only needed if secondary reloads are required for memory moves. */
588 memory_move_secondary_cost (machine_mode mode, reg_class_t rclass,
589 bool in)
591 reg_class_t altclass;
592 int partial_cost = 0;
593 /* We need a memory reference to feed to SECONDARY... macros. */
594 /* mem may be unused even if the SECONDARY_ macros are defined. */
595 rtx mem ATTRIBUTE_UNUSED = top_of_stack[(int) mode];
597 altclass = secondary_reload_class (in ? 1 : 0, rclass, mode, mem);
599 if (altclass == NO_REGS)
600 return 0;
602 if (in)
603 partial_cost = register_move_cost (mode, altclass, rclass);
604 else
605 partial_cost = register_move_cost (mode, rclass, altclass);
607 if (rclass == altclass)
608 /* This isn't simply a copy-to-temporary situation. Can't guess
609 what it is, so TARGET_MEMORY_MOVE_COST really ought not to be
610 calling here in that case.
612 I'm tempted to put in an assert here, but returning this will
613 probably only give poor estimates, which is what we would've
614 had before this code anyways. */
615 return partial_cost;
617 /* Check if the secondary reload register will also need a
618 secondary reload. */
619 return memory_move_secondary_cost (mode, altclass, in) + partial_cost;
622 /* Return a machine mode that is legitimate for hard reg REGNO and large
623 enough to save nregs. If we can't find one, return VOIDmode.
624 If CALL_SAVED is true, only consider modes that are call saved. */
625 machine_mode
626 choose_hard_reg_mode (unsigned int regno ATTRIBUTE_UNUSED,
627 unsigned int nregs, bool call_saved)
629 unsigned int /* machine_mode */ m;
630 machine_mode found_mode = VOIDmode, mode;
632 /* We first look for the largest integer mode that can be validly
633 held in REGNO. If none, we look for the largest floating-point mode.
634 If we still didn't find a valid mode, try CCmode.
636 The tests use maybe_gt rather than known_gt because we want (for example)
637 N V4SFs to win over plain V4SF even though N might be 1. */
638 FOR_EACH_MODE_IN_CLASS (mode, MODE_INT)
639 if (hard_regno_nregs (regno, mode) == nregs
640 && targetm.hard_regno_mode_ok (regno, mode)
641 && (!call_saved
642 || !targetm.hard_regno_call_part_clobbered (regno, mode))
643 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
644 found_mode = mode;
646 FOR_EACH_MODE_IN_CLASS (mode, MODE_FLOAT)
647 if (hard_regno_nregs (regno, mode) == nregs
648 && targetm.hard_regno_mode_ok (regno, mode)
649 && (!call_saved
650 || !targetm.hard_regno_call_part_clobbered (regno, mode))
651 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
652 found_mode = mode;
654 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_FLOAT)
655 if (hard_regno_nregs (regno, mode) == nregs
656 && targetm.hard_regno_mode_ok (regno, mode)
657 && (!call_saved
658 || !targetm.hard_regno_call_part_clobbered (regno, mode))
659 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
660 found_mode = mode;
662 FOR_EACH_MODE_IN_CLASS (mode, MODE_VECTOR_INT)
663 if (hard_regno_nregs (regno, mode) == nregs
664 && targetm.hard_regno_mode_ok (regno, mode)
665 && (!call_saved
666 || !targetm.hard_regno_call_part_clobbered (regno, mode))
667 && maybe_gt (GET_MODE_SIZE (mode), GET_MODE_SIZE (found_mode)))
668 found_mode = mode;
670 if (found_mode != VOIDmode)
671 return found_mode;
673 /* Iterate over all of the CCmodes. */
674 for (m = (unsigned int) CCmode; m < (unsigned int) NUM_MACHINE_MODES; ++m)
676 mode = (machine_mode) m;
677 if (hard_regno_nregs (regno, mode) == nregs
678 && targetm.hard_regno_mode_ok (regno, mode)
679 && (!call_saved
680 || !targetm.hard_regno_call_part_clobbered (regno, mode)))
681 return mode;
684 /* We can't find a mode valid for this register. */
685 return VOIDmode;
688 /* Specify the usage characteristics of the register named NAME.
689 It should be a fixed register if FIXED and a
690 call-used register if CALL_USED. */
691 void
692 fix_register (const char *name, int fixed, int call_used)
694 int i;
695 int reg, nregs;
697 /* Decode the name and update the primary form of
698 the register info. */
700 if ((reg = decode_reg_name_and_count (name, &nregs)) >= 0)
702 gcc_assert (nregs >= 1);
703 for (i = reg; i < reg + nregs; i++)
705 if ((i == STACK_POINTER_REGNUM
706 #ifdef HARD_FRAME_POINTER_REGNUM
707 || i == HARD_FRAME_POINTER_REGNUM
708 #else
709 || i == FRAME_POINTER_REGNUM
710 #endif
712 && (fixed == 0 || call_used == 0))
714 switch (fixed)
716 case 0:
717 switch (call_used)
719 case 0:
720 error ("can%'t use %qs as a call-saved register", name);
721 break;
723 case 1:
724 error ("can%'t use %qs as a call-used register", name);
725 break;
727 default:
728 gcc_unreachable ();
730 break;
732 case 1:
733 switch (call_used)
735 case 1:
736 error ("can%'t use %qs as a fixed register", name);
737 break;
739 case 0:
740 default:
741 gcc_unreachable ();
743 break;
745 default:
746 gcc_unreachable ();
749 else
751 fixed_regs[i] = fixed;
752 call_used_regs[i] = call_used;
753 #ifdef CALL_REALLY_USED_REGISTERS
754 if (fixed == 0)
755 call_really_used_regs[i] = call_used;
756 #endif
760 else
762 warning (0, "unknown register name: %s", name);
766 /* Mark register number I as global. */
767 void
768 globalize_reg (tree decl, int i)
770 location_t loc = DECL_SOURCE_LOCATION (decl);
772 #ifdef STACK_REGS
773 if (IN_RANGE (i, FIRST_STACK_REG, LAST_STACK_REG))
775 error ("stack register used for global register variable");
776 return;
778 #endif
780 if (fixed_regs[i] == 0 && no_global_reg_vars)
781 error_at (loc, "global register variable follows a function definition");
783 if (global_regs[i])
785 auto_diagnostic_group d;
786 warning_at (loc, 0,
787 "register of %qD used for multiple global register variables",
788 decl);
789 inform (DECL_SOURCE_LOCATION (global_regs_decl[i]),
790 "conflicts with %qD", global_regs_decl[i]);
791 return;
794 if (call_used_regs[i] && ! fixed_regs[i])
795 warning_at (loc, 0, "call-clobbered register used for global register variable");
797 global_regs[i] = 1;
798 global_regs_decl[i] = decl;
800 /* If we're globalizing the frame pointer, we need to set the
801 appropriate regs_invalidated_by_call bit, even if it's already
802 set in fixed_regs. */
803 if (i != STACK_POINTER_REGNUM)
805 SET_HARD_REG_BIT (regs_invalidated_by_call, i);
806 SET_REGNO_REG_SET (regs_invalidated_by_call_regset, i);
809 /* If already fixed, nothing else to do. */
810 if (fixed_regs[i])
811 return;
813 fixed_regs[i] = call_used_regs[i] = 1;
814 #ifdef CALL_REALLY_USED_REGISTERS
815 call_really_used_regs[i] = 1;
816 #endif
818 SET_HARD_REG_BIT (fixed_reg_set, i);
819 SET_HARD_REG_BIT (call_used_reg_set, i);
820 SET_HARD_REG_BIT (call_fixed_reg_set, i);
822 reinit_regs ();
826 /* Structure used to record preferences of given pseudo. */
827 struct reg_pref
829 /* (enum reg_class) prefclass is the preferred class. May be
830 NO_REGS if no class is better than memory. */
831 char prefclass;
833 /* altclass is a register class that we should use for allocating
834 pseudo if no register in the preferred class is available.
835 If no register in this class is available, memory is preferred.
837 It might appear to be more general to have a bitmask of classes here,
838 but since it is recommended that there be a class corresponding to the
839 union of most major pair of classes, that generality is not required. */
840 char altclass;
842 /* allocnoclass is a register class that IRA uses for allocating
843 the pseudo. */
844 char allocnoclass;
847 /* Record preferences of each pseudo. This is available after RA is
848 run. */
849 static struct reg_pref *reg_pref;
851 /* Current size of reg_info. */
852 static int reg_info_size;
853 /* Max_reg_num still last resize_reg_info call. */
854 static int max_regno_since_last_resize;
856 /* Return the reg_class in which pseudo reg number REGNO is best allocated.
857 This function is sometimes called before the info has been computed.
858 When that happens, just return GENERAL_REGS, which is innocuous. */
859 enum reg_class
860 reg_preferred_class (int regno)
862 if (reg_pref == 0)
863 return GENERAL_REGS;
865 gcc_assert (regno < reg_info_size);
866 return (enum reg_class) reg_pref[regno].prefclass;
869 enum reg_class
870 reg_alternate_class (int regno)
872 if (reg_pref == 0)
873 return ALL_REGS;
875 gcc_assert (regno < reg_info_size);
876 return (enum reg_class) reg_pref[regno].altclass;
879 /* Return the reg_class which is used by IRA for its allocation. */
880 enum reg_class
881 reg_allocno_class (int regno)
883 if (reg_pref == 0)
884 return NO_REGS;
886 gcc_assert (regno < reg_info_size);
887 return (enum reg_class) reg_pref[regno].allocnoclass;
892 /* Allocate space for reg info and initilize it. */
893 static void
894 allocate_reg_info (void)
896 int i;
898 max_regno_since_last_resize = max_reg_num ();
899 reg_info_size = max_regno_since_last_resize * 3 / 2 + 1;
900 gcc_assert (! reg_pref && ! reg_renumber);
901 reg_renumber = XNEWVEC (short, reg_info_size);
902 reg_pref = XCNEWVEC (struct reg_pref, reg_info_size);
903 memset (reg_renumber, -1, reg_info_size * sizeof (short));
904 for (i = 0; i < reg_info_size; i++)
906 reg_pref[i].prefclass = GENERAL_REGS;
907 reg_pref[i].altclass = ALL_REGS;
908 reg_pref[i].allocnoclass = GENERAL_REGS;
913 /* Resize reg info. The new elements will be initialized. Return TRUE
914 if new pseudos were added since the last call. */
915 bool
916 resize_reg_info (void)
918 int old, i;
919 bool change_p;
921 if (reg_pref == NULL)
923 allocate_reg_info ();
924 return true;
926 change_p = max_regno_since_last_resize != max_reg_num ();
927 max_regno_since_last_resize = max_reg_num ();
928 if (reg_info_size >= max_reg_num ())
929 return change_p;
930 old = reg_info_size;
931 reg_info_size = max_reg_num () * 3 / 2 + 1;
932 gcc_assert (reg_pref && reg_renumber);
933 reg_renumber = XRESIZEVEC (short, reg_renumber, reg_info_size);
934 reg_pref = XRESIZEVEC (struct reg_pref, reg_pref, reg_info_size);
935 memset (reg_pref + old, -1,
936 (reg_info_size - old) * sizeof (struct reg_pref));
937 memset (reg_renumber + old, -1, (reg_info_size - old) * sizeof (short));
938 for (i = old; i < reg_info_size; i++)
940 reg_pref[i].prefclass = GENERAL_REGS;
941 reg_pref[i].altclass = ALL_REGS;
942 reg_pref[i].allocnoclass = GENERAL_REGS;
944 return true;
948 /* Free up the space allocated by allocate_reg_info. */
949 void
950 free_reg_info (void)
952 if (reg_pref)
954 free (reg_pref);
955 reg_pref = NULL;
958 if (reg_renumber)
960 free (reg_renumber);
961 reg_renumber = NULL;
965 /* Initialize some global data for this pass. */
966 static unsigned int
967 reginfo_init (void)
969 if (df)
970 df_compute_regs_ever_live (true);
972 /* This prevents dump_reg_info from losing if called
973 before reginfo is run. */
974 reg_pref = NULL;
975 reg_info_size = max_regno_since_last_resize = 0;
976 /* No more global register variables may be declared. */
977 no_global_reg_vars = 1;
978 return 1;
981 namespace {
983 const pass_data pass_data_reginfo_init =
985 RTL_PASS, /* type */
986 "reginfo", /* name */
987 OPTGROUP_NONE, /* optinfo_flags */
988 TV_NONE, /* tv_id */
989 0, /* properties_required */
990 0, /* properties_provided */
991 0, /* properties_destroyed */
992 0, /* todo_flags_start */
993 0, /* todo_flags_finish */
996 class pass_reginfo_init : public rtl_opt_pass
998 public:
999 pass_reginfo_init (gcc::context *ctxt)
1000 : rtl_opt_pass (pass_data_reginfo_init, ctxt)
1003 /* opt_pass methods: */
1004 virtual unsigned int execute (function *) { return reginfo_init (); }
1006 }; // class pass_reginfo_init
1008 } // anon namespace
1010 rtl_opt_pass *
1011 make_pass_reginfo_init (gcc::context *ctxt)
1013 return new pass_reginfo_init (ctxt);
1018 /* Set up preferred, alternate, and allocno classes for REGNO as
1019 PREFCLASS, ALTCLASS, and ALLOCNOCLASS. */
1020 void
1021 setup_reg_classes (int regno,
1022 enum reg_class prefclass, enum reg_class altclass,
1023 enum reg_class allocnoclass)
1025 if (reg_pref == NULL)
1026 return;
1027 gcc_assert (reg_info_size >= max_reg_num ());
1028 reg_pref[regno].prefclass = prefclass;
1029 reg_pref[regno].altclass = altclass;
1030 reg_pref[regno].allocnoclass = allocnoclass;
1034 /* This is the `regscan' pass of the compiler, run just before cse and
1035 again just before loop. It finds the first and last use of each
1036 pseudo-register. */
1038 static void reg_scan_mark_refs (rtx, rtx_insn *);
1040 void
1041 reg_scan (rtx_insn *f, unsigned int nregs ATTRIBUTE_UNUSED)
1043 rtx_insn *insn;
1045 timevar_push (TV_REG_SCAN);
1047 for (insn = f; insn; insn = NEXT_INSN (insn))
1048 if (INSN_P (insn))
1050 reg_scan_mark_refs (PATTERN (insn), insn);
1051 if (REG_NOTES (insn))
1052 reg_scan_mark_refs (REG_NOTES (insn), insn);
1055 timevar_pop (TV_REG_SCAN);
1059 /* X is the expression to scan. INSN is the insn it appears in.
1060 NOTE_FLAG is nonzero if X is from INSN's notes rather than its body.
1061 We should only record information for REGs with numbers
1062 greater than or equal to MIN_REGNO. */
1063 static void
1064 reg_scan_mark_refs (rtx x, rtx_insn *insn)
1066 enum rtx_code code;
1067 rtx dest;
1068 rtx note;
1070 if (!x)
1071 return;
1072 code = GET_CODE (x);
1073 switch (code)
1075 case CONST:
1076 CASE_CONST_ANY:
1077 case CC0:
1078 case PC:
1079 case SYMBOL_REF:
1080 case LABEL_REF:
1081 case ADDR_VEC:
1082 case ADDR_DIFF_VEC:
1083 case REG:
1084 return;
1086 case EXPR_LIST:
1087 if (XEXP (x, 0))
1088 reg_scan_mark_refs (XEXP (x, 0), insn);
1089 if (XEXP (x, 1))
1090 reg_scan_mark_refs (XEXP (x, 1), insn);
1091 break;
1093 case INSN_LIST:
1094 case INT_LIST:
1095 if (XEXP (x, 1))
1096 reg_scan_mark_refs (XEXP (x, 1), insn);
1097 break;
1099 case CLOBBER:
1100 if (MEM_P (XEXP (x, 0)))
1101 reg_scan_mark_refs (XEXP (XEXP (x, 0), 0), insn);
1102 break;
1104 case CLOBBER_HIGH:
1105 gcc_assert (!(MEM_P (XEXP (x, 0))));
1106 break;
1108 case SET:
1109 /* Count a set of the destination if it is a register. */
1110 for (dest = SET_DEST (x);
1111 GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART
1112 || GET_CODE (dest) == ZERO_EXTRACT;
1113 dest = XEXP (dest, 0))
1116 /* If this is setting a pseudo from another pseudo or the sum of a
1117 pseudo and a constant integer and the other pseudo is known to be
1118 a pointer, set the destination to be a pointer as well.
1120 Likewise if it is setting the destination from an address or from a
1121 value equivalent to an address or to the sum of an address and
1122 something else.
1124 But don't do any of this if the pseudo corresponds to a user
1125 variable since it should have already been set as a pointer based
1126 on the type. */
1128 if (REG_P (SET_DEST (x))
1129 && REGNO (SET_DEST (x)) >= FIRST_PSEUDO_REGISTER
1130 /* If the destination pseudo is set more than once, then other
1131 sets might not be to a pointer value (consider access to a
1132 union in two threads of control in the presence of global
1133 optimizations). So only set REG_POINTER on the destination
1134 pseudo if this is the only set of that pseudo. */
1135 && DF_REG_DEF_COUNT (REGNO (SET_DEST (x))) == 1
1136 && ! REG_USERVAR_P (SET_DEST (x))
1137 && ! REG_POINTER (SET_DEST (x))
1138 && ((REG_P (SET_SRC (x))
1139 && REG_POINTER (SET_SRC (x)))
1140 || ((GET_CODE (SET_SRC (x)) == PLUS
1141 || GET_CODE (SET_SRC (x)) == LO_SUM)
1142 && CONST_INT_P (XEXP (SET_SRC (x), 1))
1143 && REG_P (XEXP (SET_SRC (x), 0))
1144 && REG_POINTER (XEXP (SET_SRC (x), 0)))
1145 || GET_CODE (SET_SRC (x)) == CONST
1146 || GET_CODE (SET_SRC (x)) == SYMBOL_REF
1147 || GET_CODE (SET_SRC (x)) == LABEL_REF
1148 || (GET_CODE (SET_SRC (x)) == HIGH
1149 && (GET_CODE (XEXP (SET_SRC (x), 0)) == CONST
1150 || GET_CODE (XEXP (SET_SRC (x), 0)) == SYMBOL_REF
1151 || GET_CODE (XEXP (SET_SRC (x), 0)) == LABEL_REF))
1152 || ((GET_CODE (SET_SRC (x)) == PLUS
1153 || GET_CODE (SET_SRC (x)) == LO_SUM)
1154 && (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST
1155 || GET_CODE (XEXP (SET_SRC (x), 1)) == SYMBOL_REF
1156 || GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF))
1157 || ((note = find_reg_note (insn, REG_EQUAL, 0)) != 0
1158 && (GET_CODE (XEXP (note, 0)) == CONST
1159 || GET_CODE (XEXP (note, 0)) == SYMBOL_REF
1160 || GET_CODE (XEXP (note, 0)) == LABEL_REF))))
1161 REG_POINTER (SET_DEST (x)) = 1;
1163 /* If this is setting a register from a register or from a simple
1164 conversion of a register, propagate REG_EXPR. */
1165 if (REG_P (dest) && !REG_ATTRS (dest))
1166 set_reg_attrs_from_value (dest, SET_SRC (x));
1168 /* fall through */
1170 default:
1172 const char *fmt = GET_RTX_FORMAT (code);
1173 int i;
1174 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1176 if (fmt[i] == 'e')
1177 reg_scan_mark_refs (XEXP (x, i), insn);
1178 else if (fmt[i] == 'E' && XVEC (x, i) != 0)
1180 int j;
1181 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1182 reg_scan_mark_refs (XVECEXP (x, i, j), insn);
1190 /* Return nonzero if C1 is a subset of C2, i.e., if every register in C1
1191 is also in C2. */
1193 reg_class_subset_p (reg_class_t c1, reg_class_t c2)
1195 return (c1 == c2
1196 || c2 == ALL_REGS
1197 || hard_reg_set_subset_p (reg_class_contents[(int) c1],
1198 reg_class_contents[(int) c2]));
1201 /* Return nonzero if there is a register that is in both C1 and C2. */
1203 reg_classes_intersect_p (reg_class_t c1, reg_class_t c2)
1205 return (c1 == c2
1206 || c1 == ALL_REGS
1207 || c2 == ALL_REGS
1208 || hard_reg_set_intersect_p (reg_class_contents[(int) c1],
1209 reg_class_contents[(int) c2]));
1213 inline hashval_t
1214 simplifiable_subregs_hasher::hash (const simplifiable_subreg *value)
1216 inchash::hash h;
1217 h.add_hwi (value->shape.unique_id ());
1218 return h.end ();
1221 inline bool
1222 simplifiable_subregs_hasher::equal (const simplifiable_subreg *value,
1223 const subreg_shape *compare)
1225 return value->shape == *compare;
1228 inline simplifiable_subreg::simplifiable_subreg (const subreg_shape &shape_in)
1229 : shape (shape_in)
1231 CLEAR_HARD_REG_SET (simplifiable_regs);
1234 /* Return the set of hard registers that are able to form the subreg
1235 described by SHAPE. */
1237 const HARD_REG_SET &
1238 simplifiable_subregs (const subreg_shape &shape)
1240 if (!this_target_hard_regs->x_simplifiable_subregs)
1241 this_target_hard_regs->x_simplifiable_subregs
1242 = new hash_table <simplifiable_subregs_hasher> (30);
1243 inchash::hash h;
1244 h.add_hwi (shape.unique_id ());
1245 simplifiable_subreg **slot
1246 = (this_target_hard_regs->x_simplifiable_subregs
1247 ->find_slot_with_hash (&shape, h.end (), INSERT));
1249 if (!*slot)
1251 simplifiable_subreg *info = new simplifiable_subreg (shape);
1252 for (unsigned int i = 0; i < FIRST_PSEUDO_REGISTER; ++i)
1253 if (targetm.hard_regno_mode_ok (i, shape.inner_mode)
1254 && simplify_subreg_regno (i, shape.inner_mode, shape.offset,
1255 shape.outer_mode) >= 0)
1256 SET_HARD_REG_BIT (info->simplifiable_regs, i);
1257 *slot = info;
1259 return (*slot)->simplifiable_regs;
1262 /* Passes for keeping and updating info about modes of registers
1263 inside subregisters. */
1265 static HARD_REG_SET **valid_mode_changes;
1266 static obstack valid_mode_changes_obstack;
1268 /* Restrict the choice of register for SUBREG_REG (SUBREG) based
1269 on information about SUBREG.
1271 If PARTIAL_DEF, SUBREG is a partial definition of a multipart inner
1272 register and we want to ensure that the other parts of the inner
1273 register are correctly preserved. If !PARTIAL_DEF we need to
1274 ensure that SUBREG itself can be formed. */
1276 static void
1277 record_subregs_of_mode (rtx subreg, bool partial_def)
1279 unsigned int regno;
1281 if (!REG_P (SUBREG_REG (subreg)))
1282 return;
1284 regno = REGNO (SUBREG_REG (subreg));
1285 if (regno < FIRST_PSEUDO_REGISTER)
1286 return;
1288 subreg_shape shape (shape_of_subreg (subreg));
1289 if (partial_def)
1291 /* The number of independently-accessible SHAPE.outer_mode values
1292 in SHAPE.inner_mode is GET_MODE_SIZE (SHAPE.inner_mode) / SIZE.
1293 We need to check that the assignment will preserve all the other
1294 SIZE-byte chunks in the inner register besides the one that
1295 includes SUBREG.
1297 In practice it is enough to check whether an equivalent
1298 SHAPE.inner_mode value in an adjacent SIZE-byte chunk can be formed.
1299 If the underlying registers are small enough, both subregs will
1300 be valid. If the underlying registers are too large, one of the
1301 subregs will be invalid.
1303 This relies on the fact that we've already been passed
1304 SUBREG with PARTIAL_DEF set to false.
1306 The size of the outer mode must ordered wrt the size of the
1307 inner mode's registers, since otherwise we wouldn't know at
1308 compile time how many registers the outer mode occupies. */
1309 poly_uint64 size = ordered_max (REGMODE_NATURAL_SIZE (shape.inner_mode),
1310 GET_MODE_SIZE (shape.outer_mode));
1311 gcc_checking_assert (known_lt (size, GET_MODE_SIZE (shape.inner_mode)));
1312 if (known_ge (shape.offset, size))
1313 shape.offset -= size;
1314 else
1315 shape.offset += size;
1318 if (valid_mode_changes[regno])
1319 AND_HARD_REG_SET (*valid_mode_changes[regno],
1320 simplifiable_subregs (shape));
1321 else
1323 valid_mode_changes[regno]
1324 = XOBNEW (&valid_mode_changes_obstack, HARD_REG_SET);
1325 COPY_HARD_REG_SET (*valid_mode_changes[regno],
1326 simplifiable_subregs (shape));
1330 /* Call record_subregs_of_mode for all the subregs in X. */
1331 static void
1332 find_subregs_of_mode (rtx x)
1334 enum rtx_code code = GET_CODE (x);
1335 const char * const fmt = GET_RTX_FORMAT (code);
1336 int i;
1338 if (code == SUBREG)
1339 record_subregs_of_mode (x, false);
1341 /* Time for some deep diving. */
1342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1344 if (fmt[i] == 'e')
1345 find_subregs_of_mode (XEXP (x, i));
1346 else if (fmt[i] == 'E')
1348 int j;
1349 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1350 find_subregs_of_mode (XVECEXP (x, i, j));
1355 void
1356 init_subregs_of_mode (void)
1358 basic_block bb;
1359 rtx_insn *insn;
1361 gcc_obstack_init (&valid_mode_changes_obstack);
1362 valid_mode_changes = XCNEWVEC (HARD_REG_SET *, max_reg_num ());
1364 FOR_EACH_BB_FN (bb, cfun)
1365 FOR_BB_INSNS (bb, insn)
1366 if (NONDEBUG_INSN_P (insn))
1368 find_subregs_of_mode (PATTERN (insn));
1369 df_ref def;
1370 FOR_EACH_INSN_DEF (def, insn)
1371 if (DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL)
1372 && read_modify_subreg_p (DF_REF_REG (def)))
1373 record_subregs_of_mode (DF_REF_REG (def), true);
1377 const HARD_REG_SET *
1378 valid_mode_changes_for_regno (unsigned int regno)
1380 return valid_mode_changes[regno];
1383 void
1384 finish_subregs_of_mode (void)
1386 XDELETEVEC (valid_mode_changes);
1387 obstack_free (&valid_mode_changes_obstack, NULL);
1390 /* Free all data attached to the structure. This isn't a destructor because
1391 we don't want to run on exit. */
1393 void
1394 target_hard_regs::finalize ()
1396 delete x_simplifiable_subregs;