* config/bfin/bfin.h (TARGET_CPU_CPP_BUILTINS): Define
[official-gcc/alias-decl.git] / gcc / global.c
blobc346e6ad5f2bab4b7d4f2fffdc06fb3b1934a548
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "tree-pass.h"
40 #include "timevar.h"
41 #include "df.h"
42 #include "vecprim.h"
43 #include "dbgcnt.h"
45 /* This pass of the compiler performs global register allocation.
46 It assigns hard register numbers to all the pseudo registers
47 that were not handled in local_alloc. Assignments are recorded
48 in the vector reg_renumber, not by changing the rtl code.
49 (Such changes are made by final). The entry point is
50 the function global_alloc.
52 After allocation is complete, the reload pass is run as a subroutine
53 of this pass, so that when a pseudo reg loses its hard reg due to
54 spilling it is possible to make a second attempt to find a hard
55 reg for it. The reload pass is independent in other respects
56 and it is run even when stupid register allocation is in use.
58 1. Assign allocation-numbers (allocnos) to the pseudo-registers
59 still needing allocations and to the pseudo-registers currently
60 allocated by local-alloc which may be spilled by reload.
61 Set up tables reg_allocno and allocno_reg to map
62 reg numbers to allocnos and vice versa.
63 max_allocno gets the number of allocnos in use.
65 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
66 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
67 for conflicts between allocnos and explicit hard register use
68 (which includes use of pseudo-registers allocated by local_alloc).
70 3. For each basic block
71 walk forward through the block, recording which
72 pseudo-registers and which hardware registers are live.
73 Build the conflict matrix between the pseudo-registers
74 and another of pseudo-registers versus hardware registers.
75 Also record the preferred hardware registers
76 for each pseudo-register.
78 4. Sort a table of the allocnos into order of
79 desirability of the variables.
81 5. Allocate the variables in that order; each if possible into
82 a preferred register, else into another register. */
84 /* Number of pseudo-registers which are candidates for allocation. */
86 static int max_allocno;
88 /* Indexed by (pseudo) reg number, gives the allocno, or -1
89 for pseudo registers which are not to be allocated. */
91 static int *reg_allocno;
93 struct allocno
95 int reg;
96 /* Gives the number of consecutive hard registers needed by that
97 pseudo reg. */
98 int size;
100 /* Number of calls crossed by each allocno. */
101 int calls_crossed;
103 /* Number of calls that might throw crossed by each allocno. */
104 int throwing_calls_crossed;
106 /* Number of refs to each allocno. */
107 int n_refs;
109 /* Frequency of uses of each allocno. */
110 int freq;
112 /* Guess at live length of each allocno.
113 This is actually the max of the live lengths of the regs. */
114 int live_length;
116 /* Set of hard regs conflicting with allocno N. */
118 HARD_REG_SET hard_reg_conflicts;
120 /* Set of hard regs preferred by allocno N.
121 This is used to make allocnos go into regs that are copied to or from them,
122 when possible, to reduce register shuffling. */
124 HARD_REG_SET hard_reg_preferences;
126 /* Similar, but just counts register preferences made in simple copy
127 operations, rather than arithmetic. These are given priority because
128 we can always eliminate an insn by using these, but using a register
129 in the above list won't always eliminate an insn. */
131 HARD_REG_SET hard_reg_copy_preferences;
133 /* Similar to hard_reg_preferences, but includes bits for subsequent
134 registers when an allocno is multi-word. The above variable is used for
135 allocation while this is used to build reg_someone_prefers, below. */
137 HARD_REG_SET hard_reg_full_preferences;
139 /* Set of hard registers that some later allocno has a preference for. */
141 HARD_REG_SET regs_someone_prefers;
143 #ifdef STACK_REGS
144 /* Set to true if allocno can't be allocated in the stack register. */
145 bool no_stack_reg;
146 #endif
149 static struct allocno *allocno;
151 /* A vector of the integers from 0 to max_allocno-1,
152 sorted in the order of first-to-be-allocated first. */
154 static int *allocno_order;
156 /* Define the number of bits in each element of `conflicts' and what
157 type that element has. We use the largest integer format on the
158 host machine. */
160 #define INT_BITS HOST_BITS_PER_WIDE_INT
161 #define INT_TYPE HOST_WIDE_INT
163 /* max_allocno by max_allocno array of bits,
164 recording whether two allocno's conflict (can't go in the same
165 hardware register).
167 `conflicts' is symmetric after the call to mirror_conflicts. */
169 static INT_TYPE *conflicts;
171 /* Number of ints required to hold max_allocno bits.
172 This is the length of a row in `conflicts'. */
174 static int allocno_row_words;
176 /* Two macros to test or store 1 in an element of `conflicts'. */
178 #define CONFLICTP(I, J) \
179 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
180 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
182 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
183 and execute CODE. */
184 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
185 do { \
186 int i_; \
187 int allocno_; \
188 INT_TYPE *p_ = (ALLOCNO_SET); \
190 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
191 i_--, allocno_ += INT_BITS) \
193 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
195 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
197 if (word_ & 1) \
198 {CODE;} \
201 } while (0)
203 /* Set of hard regs currently live (during scan of all insns). */
205 static HARD_REG_SET hard_regs_live;
207 /* Set of registers that global-alloc isn't supposed to use. */
209 static HARD_REG_SET no_global_alloc_regs;
211 /* Set of registers used so far. */
213 static HARD_REG_SET regs_used_so_far;
215 /* Number of refs to each hard reg, as used by local alloc.
216 It is zero for a reg that contains global pseudos or is explicitly used. */
218 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
220 /* Frequency of uses of given hard reg. */
221 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
223 /* Guess at live length of each hard reg, as used by local alloc.
224 This is actually the sum of the live lengths of the specific regs. */
226 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
228 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
229 element I, and hard register number J. */
231 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
233 /* Bit mask for allocnos live at current point in the scan. */
235 static INT_TYPE *allocnos_live;
237 /* Test, set or clear bit number I in allocnos_live,
238 a bit vector indexed by allocno. */
240 #define SET_ALLOCNO_LIVE(I) \
241 (allocnos_live[(unsigned) (I) / INT_BITS] \
242 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
244 #define CLEAR_ALLOCNO_LIVE(I) \
245 (allocnos_live[(unsigned) (I) / INT_BITS] \
246 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
248 /* This is turned off because it doesn't work right for DImode.
249 (And it is only used for DImode, so the other cases are worthless.)
250 The problem is that it isn't true that there is NO possibility of conflict;
251 only that there is no conflict if the two pseudos get the exact same regs.
252 If they were allocated with a partial overlap, there would be a conflict.
253 We can't safely turn off the conflict unless we have another way to
254 prevent the partial overlap.
256 Idea: change hard_reg_conflicts so that instead of recording which
257 hard regs the allocno may not overlap, it records where the allocno
258 may not start. Change both where it is used and where it is updated.
259 Then there is a way to record that (reg:DI 108) may start at 10
260 but not at 9 or 11. There is still the question of how to record
261 this semi-conflict between two pseudos. */
262 #if 0
263 /* Reg pairs for which conflict after the current insn
264 is inhibited by a REG_NO_CONFLICT note.
265 If the table gets full, we ignore any other notes--that is conservative. */
266 #define NUM_NO_CONFLICT_PAIRS 4
267 /* Number of pairs in use in this insn. */
268 int n_no_conflict_pairs;
269 static struct { int allocno1, allocno2;}
270 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
271 #endif /* 0 */
273 /* Record all regs that are set in any one insn.
274 Communication from mark_reg_{store,clobber} and global_conflicts. */
276 static VEC(rtx, heap) *regs_set;
279 /* Return true if *LOC contains an asm. */
281 static int
282 insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
284 if ( !*loc)
285 return 0;
286 if (GET_CODE (*loc) == ASM_OPERANDS)
287 return 1;
288 return 0;
292 /* Return true if INSN contains an ASM. */
294 static int
295 insn_contains_asm (rtx insn)
297 return for_each_rtx (&insn, insn_contains_asm_1, NULL);
301 static void
302 compute_regs_asm_clobbered (char *regs_asm_clobbered)
304 basic_block bb;
306 memset (regs_asm_clobbered, 0, sizeof (char) * FIRST_PSEUDO_REGISTER);
308 FOR_EACH_BB (bb)
310 rtx insn;
311 FOR_BB_INSNS_REVERSE (bb, insn)
313 struct df_ref **def_rec;
314 if (insn_contains_asm (insn))
315 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
317 struct df_ref *def = *def_rec;
318 unsigned int dregno = DF_REF_REGNO (def);
319 if (dregno < FIRST_PSEUDO_REGISTER)
321 unsigned int i;
322 enum machine_mode mode = GET_MODE (DF_REF_REAL_REG (def));
323 unsigned int end = dregno
324 + hard_regno_nregs[dregno][mode] - 1;
325 for (i = dregno; i <= end; ++i)
326 regs_asm_clobbered[i] = 1;
334 /* All registers that can be eliminated. */
336 static HARD_REG_SET eliminable_regset;
338 static int allocno_compare (const void *, const void *);
339 static void global_conflicts (void);
340 static void mirror_conflicts (void);
341 static void expand_preferences (void);
342 static void prune_preferences (void);
343 static void find_reg (int, HARD_REG_SET, int, int, int);
344 static void record_one_conflict (int);
345 static void record_conflicts (int *, int);
346 static void mark_reg_store (rtx, const_rtx, void *);
347 static void mark_reg_clobber (rtx, const_rtx, void *);
348 static void mark_reg_conflicts (rtx);
349 static void mark_reg_death (rtx);
350 static void set_preference (rtx, rtx);
351 static void dump_conflicts (FILE *);
352 static void reg_becomes_live (rtx, const_rtx, void *);
353 static void reg_dies (int, enum machine_mode, struct insn_chain *);
358 /* Look through the list of eliminable registers. Set ELIM_SET to the
359 set of registers which may be eliminated. Set NO_GLOBAL_SET to the
360 set of registers which may not be used across blocks.
362 This will normally be called with ELIM_SET as the file static
363 variable eliminable_regset, and NO_GLOBAL_SET as the file static
364 variable NO_GLOBAL_ALLOC_REGS. */
366 static void
367 compute_regsets (HARD_REG_SET *elim_set,
368 HARD_REG_SET *no_global_set)
371 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
372 Unlike regs_ever_live, elements of this array corresponding to
373 eliminable regs like the frame pointer are set if an asm sets them. */
374 char *regs_asm_clobbered = alloca (FIRST_PSEUDO_REGISTER * sizeof (char));
376 #ifdef ELIMINABLE_REGS
377 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
378 size_t i;
379 #endif
380 int need_fp
381 = (! flag_omit_frame_pointer
382 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
383 || FRAME_POINTER_REQUIRED);
385 max_regno = max_reg_num ();
386 compact_blocks ();
388 max_allocno = 0;
390 /* A machine may have certain hard registers that
391 are safe to use only within a basic block. */
393 CLEAR_HARD_REG_SET (*no_global_set);
394 CLEAR_HARD_REG_SET (*elim_set);
396 compute_regs_asm_clobbered (regs_asm_clobbered);
397 /* Build the regset of all eliminable registers and show we can't use those
398 that we already know won't be eliminated. */
399 #ifdef ELIMINABLE_REGS
400 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
402 bool cannot_elim
403 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
404 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
406 if (!regs_asm_clobbered[eliminables[i].from])
408 SET_HARD_REG_BIT (*elim_set, eliminables[i].from);
410 if (cannot_elim)
411 SET_HARD_REG_BIT (*no_global_set, eliminables[i].from);
413 else if (cannot_elim)
414 error ("%s cannot be used in asm here",
415 reg_names[eliminables[i].from]);
416 else
417 df_set_regs_ever_live (eliminables[i].from, true);
419 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
420 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
422 SET_HARD_REG_BIT (*elim_set, HARD_FRAME_POINTER_REGNUM);
423 if (need_fp)
424 SET_HARD_REG_BIT (*no_global_set, HARD_FRAME_POINTER_REGNUM);
426 else if (need_fp)
427 error ("%s cannot be used in asm here",
428 reg_names[HARD_FRAME_POINTER_REGNUM]);
429 else
430 df_set_regs_ever_live (HARD_FRAME_POINTER_REGNUM, true);
431 #endif
433 #else
434 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
436 SET_HARD_REG_BIT (*elim_set, FRAME_POINTER_REGNUM);
437 if (need_fp)
438 SET_HARD_REG_BIT (*no_global_set, FRAME_POINTER_REGNUM);
440 else if (need_fp)
441 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
442 else
443 df_set_regs_ever_live (FRAME_POINTER_REGNUM, true);
444 #endif
447 /* Perform allocation of pseudo-registers not allocated by local_alloc.
449 Return value is nonzero if reload failed
450 and we must not do any more for this function. */
452 static int
453 global_alloc (void)
455 int retval;
456 size_t i;
458 compute_regsets (&eliminable_regset, &no_global_alloc_regs);
460 /* Track which registers have already been used. Start with registers
461 explicitly in the rtl, then registers allocated by local register
462 allocation. */
464 CLEAR_HARD_REG_SET (regs_used_so_far);
465 #ifdef LEAF_REGISTERS
466 /* If we are doing the leaf function optimization, and this is a leaf
467 function, it means that the registers that take work to save are those
468 that need a register window. So prefer the ones that can be used in
469 a leaf function. */
471 const char *cheap_regs;
472 const char *const leaf_regs = LEAF_REGISTERS;
474 if (only_leaf_regs_used () && leaf_function_p ())
475 cheap_regs = leaf_regs;
476 else
477 cheap_regs = call_used_regs;
478 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
479 if (df_regs_ever_live_p (i) || cheap_regs[i])
480 SET_HARD_REG_BIT (regs_used_so_far, i);
482 #else
483 /* We consider registers that do not have to be saved over calls as if
484 they were already used since there is no cost in using them. */
485 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
486 if (df_regs_ever_live_p (i) || call_used_regs[i])
487 SET_HARD_REG_BIT (regs_used_so_far, i);
488 #endif
490 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
491 if (reg_renumber[i] >= 0)
492 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
494 /* Establish mappings from register number to allocation number
495 and vice versa. In the process, count the allocnos. */
497 reg_allocno = XNEWVEC (int, max_regno);
499 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
500 reg_allocno[i] = -1;
502 max_allocno = 0;
503 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
504 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
505 that we are supposed to refrain from putting in a hard reg.
506 -2 means do make an allocno but don't allocate it. */
507 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
508 /* Don't allocate pseudos that cross calls,
509 if this function receives a nonlocal goto. */
510 && (! current_function_has_nonlocal_label
511 || REG_N_CALLS_CROSSED (i) == 0))
513 reg_allocno[i] = max_allocno++;
514 gcc_assert (REG_LIVE_LENGTH (i));
516 else
517 reg_allocno[i] = -1;
519 allocno = XCNEWVEC (struct allocno, max_allocno);
521 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
522 if (reg_allocno[i] >= 0)
524 int num = reg_allocno[i];
525 allocno[num].reg = i;
526 allocno[num].size = PSEUDO_REGNO_SIZE (i);
527 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
528 allocno[num].throwing_calls_crossed
529 += REG_N_THROWING_CALLS_CROSSED (i);
530 allocno[num].n_refs += REG_N_REFS (i);
531 allocno[num].freq += REG_FREQ (i);
532 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
533 allocno[num].live_length = REG_LIVE_LENGTH (i);
536 /* Calculate amount of usage of each hard reg by pseudos
537 allocated by local-alloc. This is to see if we want to
538 override it. */
539 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
540 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
541 memset (local_reg_freq, 0, sizeof local_reg_freq);
542 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
543 if (reg_renumber[i] >= 0)
545 int regno = reg_renumber[i];
546 int endregno = end_hard_regno (PSEUDO_REGNO_MODE (i), regno);
547 int j;
549 for (j = regno; j < endregno; j++)
551 local_reg_n_refs[j] += REG_N_REFS (i);
552 local_reg_freq[j] += REG_FREQ (i);
553 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
557 /* We can't override local-alloc for a reg used not just by local-alloc. */
558 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
559 if (df_regs_ever_live_p (i))
560 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
562 if (dump_file)
564 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
566 fprintf (dump_file, "%d REG_N_REFS=%d, REG_FREQ=%d, REG_LIVE_LENGTH=%d\n",
567 (int)i, REG_N_REFS (i), REG_FREQ (i), REG_LIVE_LENGTH (i));
569 fprintf (dump_file, "regs_ever_live =");
570 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
571 if (df_regs_ever_live_p (i))
572 fprintf (dump_file, " %d", (int)i);
573 fprintf (dump_file, "\n");
575 allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
577 /* We used to use alloca here, but the size of what it would try to
578 allocate would occasionally cause it to exceed the stack limit and
579 cause unpredictable core dumps. Some examples were > 2Mb in size. */
580 conflicts = XCNEWVEC (INT_TYPE, max_allocno * allocno_row_words);
582 allocnos_live = XNEWVEC (INT_TYPE, allocno_row_words);
584 /* If there is work to be done (at least one reg to allocate),
585 perform global conflict analysis and allocate the regs. */
587 if (max_allocno > 0)
589 /* Make a vector that mark_reg_{store,clobber} will store in. */
590 if (!regs_set)
591 regs_set = VEC_alloc (rtx, heap, 10);
593 /* Scan all the insns and compute the conflicts among allocnos
594 and between allocnos and hard regs. */
596 global_conflicts ();
598 mirror_conflicts ();
600 /* Eliminate conflicts between pseudos and eliminable registers. If
601 the register is not eliminated, the pseudo won't really be able to
602 live in the eliminable register, so the conflict doesn't matter.
603 If we do eliminate the register, the conflict will no longer exist.
604 So in either case, we can ignore the conflict. Likewise for
605 preferences. */
607 for (i = 0; i < (size_t) max_allocno; i++)
609 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
610 eliminable_regset);
611 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
612 eliminable_regset);
613 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
614 eliminable_regset);
617 /* Try to expand the preferences by merging them between allocnos. */
619 expand_preferences ();
621 /* Determine the order to allocate the remaining pseudo registers. */
623 allocno_order = XNEWVEC (int, max_allocno);
624 for (i = 0; i < (size_t) max_allocno; i++)
625 allocno_order[i] = i;
627 /* Default the size to 1, since allocno_compare uses it to divide by.
628 Also convert allocno_live_length of zero to -1. A length of zero
629 can occur when all the registers for that allocno have reg_live_length
630 equal to -2. In this case, we want to make an allocno, but not
631 allocate it. So avoid the divide-by-zero and set it to a low
632 priority. */
634 for (i = 0; i < (size_t) max_allocno; i++)
636 if (allocno[i].size == 0)
637 allocno[i].size = 1;
638 if (allocno[i].live_length == 0)
639 allocno[i].live_length = -1;
642 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
644 prune_preferences ();
646 if (dump_file)
647 dump_conflicts (dump_file);
649 /* Try allocating them, one by one, in that order,
650 except for parameters marked with reg_live_length[regno] == -2. */
652 for (i = 0; i < (size_t) max_allocno; i++)
653 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
654 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
656 if (!dbg_cnt (global_alloc_at_reg))
657 break;
658 /* If we have more than one register class,
659 first try allocating in the class that is cheapest
660 for this pseudo-reg. If that fails, try any reg. */
661 if (N_REG_CLASSES > 1)
663 find_reg (allocno_order[i], 0, 0, 0, 0);
664 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
665 continue;
667 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
668 find_reg (allocno_order[i], 0, 1, 0, 0);
671 free (allocno_order);
674 /* Do the reloads now while the allocno data still exists, so that we can
675 try to assign new hard regs to any pseudo regs that are spilled. */
677 #if 0 /* We need to eliminate regs even if there is no rtl code,
678 for the sake of debugging information. */
679 if (n_basic_blocks > NUM_FIXED_BLOCKS)
680 #endif
682 build_insn_chain (get_insns ());
683 retval = reload (get_insns (), 1);
686 /* Clean up. */
687 free (reg_allocno);
688 free (allocno);
689 free (conflicts);
690 free (allocnos_live);
692 return retval;
695 /* Sort predicate for ordering the allocnos.
696 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
698 static int
699 allocno_compare (const void *v1p, const void *v2p)
701 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
702 /* Note that the quotient will never be bigger than
703 the value of floor_log2 times the maximum number of
704 times a register can occur in one insn (surely less than 100)
705 weighted by the frequency (maximally REG_FREQ_MAX).
706 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
707 int pri1
708 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
709 / allocno[v1].live_length)
710 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
711 int pri2
712 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
713 / allocno[v2].live_length)
714 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
715 if (pri2 - pri1)
716 return pri2 - pri1;
718 /* If regs are equally good, sort by allocno,
719 so that the results of qsort leave nothing to chance. */
720 return v1 - v2;
723 /* Scan the rtl code and record all conflicts and register preferences in the
724 conflict matrices and preference tables. */
726 static void
727 global_conflicts (void)
729 unsigned i;
730 basic_block b;
731 rtx insn;
732 int *block_start_allocnos;
734 block_start_allocnos = XNEWVEC (int, max_allocno);
736 FOR_EACH_BB (b)
738 memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
740 /* Initialize table of registers currently live
741 to the state at the beginning of this basic block.
742 This also marks the conflicts among hard registers
743 and any allocnos that are live.
745 For pseudo-regs, there is only one bit for each one
746 no matter how many hard regs it occupies.
747 This is ok; we know the size from PSEUDO_REGNO_SIZE.
748 For explicit hard regs, we cannot know the size that way
749 since one hard reg can be used with various sizes.
750 Therefore, we must require that all the hard regs
751 implicitly live as part of a multi-word hard reg
752 be explicitly marked in basic_block_live_at_start. */
755 int ax = 0;
756 reg_set_iterator rsi;
758 REG_SET_TO_HARD_REG_SET (hard_regs_live, DF_RA_LIVE_TOP (b));
759 EXECUTE_IF_SET_IN_REG_SET (DF_RA_LIVE_TOP (b), FIRST_PSEUDO_REGISTER, i, rsi)
761 int a = reg_allocno[i];
762 if (a >= 0)
764 SET_ALLOCNO_LIVE (a);
765 block_start_allocnos[ax++] = a;
767 else if ((a = reg_renumber[i]) >= 0)
768 add_to_hard_reg_set (&hard_regs_live, PSEUDO_REGNO_MODE (i), a);
771 /* Record that each allocno now live conflicts with each hard reg
772 now live.
774 It is not necessary to mark any conflicts between pseudos at
775 this point, even for pseudos which are live at the start of
776 the basic block.
778 Given two pseudos X and Y and any point in the CFG P.
780 On any path to point P where X and Y are live one of the
781 following conditions must be true:
783 1. X is live at some instruction on the path that
784 evaluates Y.
786 2. Y is live at some instruction on the path that
787 evaluates X.
789 3. Either X or Y is not evaluated on the path to P
790 (i.e. it is used uninitialized) and thus the
791 conflict can be ignored.
793 In cases #1 and #2 the conflict will be recorded when we
794 scan the instruction that makes either X or Y become live. */
795 record_conflicts (block_start_allocnos, ax);
797 #ifdef EH_RETURN_DATA_REGNO
798 if (bb_has_eh_pred (b))
800 unsigned int i;
802 for (i = 0; ; ++i)
804 unsigned int regno = EH_RETURN_DATA_REGNO (i);
805 if (regno == INVALID_REGNUM)
806 break;
807 record_one_conflict (regno);
810 #endif
812 /* Pseudos can't go in stack regs at the start of a basic block that
813 is reached by an abnormal edge. Likewise for call clobbered regs,
814 because caller-save, fixup_abnormal_edges and possibly the table
815 driven EH machinery are not quite ready to handle such regs live
816 across such edges. */
818 edge e;
819 edge_iterator ei;
821 FOR_EACH_EDGE (e, ei, b->preds)
822 if (e->flags & EDGE_ABNORMAL)
823 break;
825 if (e != NULL)
827 #ifdef STACK_REGS
828 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
830 allocno[ax].no_stack_reg = 1;
832 for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
833 record_one_conflict (ax);
834 #endif
836 /* No need to record conflicts for call clobbered regs if we have
837 nonlocal labels around, as we don't ever try to allocate such
838 regs in this case. */
839 if (! current_function_has_nonlocal_label)
840 for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
841 if (call_used_regs [ax])
842 record_one_conflict (ax);
847 insn = BB_HEAD (b);
849 /* Scan the code of this basic block, noting which allocnos
850 and hard regs are born or die. When one is born,
851 record a conflict with all others currently live. */
853 while (1)
855 RTX_CODE code = GET_CODE (insn);
856 rtx link;
858 gcc_assert (VEC_empty (rtx, regs_set));
859 if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
861 #if 0
862 int i = 0;
863 for (link = REG_NOTES (insn);
864 link && i < NUM_NO_CONFLICT_PAIRS;
865 link = XEXP (link, 1))
866 if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
868 no_conflict_pairs[i].allocno1
869 = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
870 no_conflict_pairs[i].allocno2
871 = reg_allocno[REGNO (XEXP (link, 0))];
872 i++;
874 #endif /* 0 */
876 /* Mark any registers clobbered by INSN as live,
877 so they conflict with the inputs. */
879 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
881 #ifdef AUTO_INC_DEC
882 /* Auto-increment instructions clobber the base
883 register. */
884 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
885 if (REG_NOTE_KIND (link) == REG_INC)
886 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
887 #endif
888 /* Mark any registers dead after INSN as dead now. */
890 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
891 if (REG_NOTE_KIND (link) == REG_DEAD)
892 mark_reg_death (XEXP (link, 0));
894 /* Mark any registers set in INSN as live,
895 and mark them as conflicting with all other live regs.
896 Clobbers are processed again, so they conflict with
897 the registers that are set. */
899 note_stores (PATTERN (insn), mark_reg_store, NULL);
901 /* If INSN has multiple outputs, then any reg that dies here
902 and is used inside of an output
903 must conflict with the other outputs.
905 It is unsafe to use !single_set here since it will ignore an
906 unused output. Just because an output is unused does not mean
907 the compiler can assume the side effect will not occur.
908 Consider if REG appears in the address of an output and we
909 reload the output. If we allocate REG to the same hard
910 register as an unused output we could set the hard register
911 before the output reload insn. */
912 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
913 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
914 if (REG_NOTE_KIND (link) == REG_DEAD)
916 int used_in_output = 0;
917 int i;
918 rtx reg = XEXP (link, 0);
920 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
922 rtx set = XVECEXP (PATTERN (insn), 0, i);
923 if (GET_CODE (set) == SET
924 && !REG_P (SET_DEST (set))
925 && !rtx_equal_p (reg, SET_DEST (set))
926 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
927 used_in_output = 1;
929 if (used_in_output)
930 mark_reg_conflicts (reg);
933 /* Mark any registers set in INSN and then never used. */
935 while (!VEC_empty (rtx, regs_set))
937 rtx reg = VEC_pop (rtx, regs_set);
938 rtx note = find_regno_note (insn, REG_UNUSED,
939 REGNO (reg));
940 if (note)
941 mark_reg_death (XEXP (note, 0));
945 if (insn == BB_END (b))
946 break;
947 insn = NEXT_INSN (insn);
951 /* Clean up. */
952 free (block_start_allocnos);
955 /* Expand the preference information by looking for cases where one allocno
956 dies in an insn that sets an allocno. If those two allocnos don't conflict,
957 merge any preferences between those allocnos. */
959 static void
960 expand_preferences (void)
962 rtx insn;
963 rtx link;
964 rtx set;
966 /* We only try to handle the most common cases here. Most of the cases
967 where this wins are reg-reg copies. */
969 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
970 if (INSN_P (insn)
971 && (set = single_set (insn)) != 0
972 && REG_P (SET_DEST (set))
973 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
974 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
975 if (REG_NOTE_KIND (link) == REG_DEAD
976 && REG_P (XEXP (link, 0))
977 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
978 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
979 reg_allocno[REGNO (XEXP (link, 0))]))
981 int a1 = reg_allocno[REGNO (SET_DEST (set))];
982 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
984 if (XEXP (link, 0) == SET_SRC (set))
986 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
987 allocno[a2].hard_reg_copy_preferences);
988 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
989 allocno[a1].hard_reg_copy_preferences);
992 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
993 allocno[a2].hard_reg_preferences);
994 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
995 allocno[a1].hard_reg_preferences);
996 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
997 allocno[a2].hard_reg_full_preferences);
998 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
999 allocno[a1].hard_reg_full_preferences);
1003 /* Prune the preferences for global registers to exclude registers that cannot
1004 be used.
1006 Compute `regs_someone_prefers', which is a bitmask of the hard registers
1007 that are preferred by conflicting registers of lower priority. If possible,
1008 we will avoid using these registers. */
1010 static void
1011 prune_preferences (void)
1013 int i;
1014 int num;
1015 int *allocno_to_order = XNEWVEC (int, max_allocno);
1017 /* Scan least most important to most important.
1018 For each allocno, remove from preferences registers that cannot be used,
1019 either because of conflicts or register type. Then compute all registers
1020 preferred by each lower-priority register that conflicts. */
1022 for (i = max_allocno - 1; i >= 0; i--)
1024 HARD_REG_SET temp;
1026 num = allocno_order[i];
1027 allocno_to_order[num] = i;
1028 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
1030 if (allocno[num].calls_crossed == 0)
1031 IOR_HARD_REG_SET (temp, fixed_reg_set);
1032 else
1033 IOR_HARD_REG_SET (temp, call_used_reg_set);
1035 IOR_COMPL_HARD_REG_SET
1036 (temp,
1037 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
1039 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
1040 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
1041 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
1044 for (i = max_allocno - 1; i >= 0; i--)
1046 /* Merge in the preferences of lower-priority registers (they have
1047 already been pruned). If we also prefer some of those registers,
1048 don't exclude them unless we are of a smaller size (in which case
1049 we want to give the lower-priority allocno the first chance for
1050 these registers). */
1051 HARD_REG_SET temp, temp2;
1052 int allocno2;
1054 num = allocno_order[i];
1056 CLEAR_HARD_REG_SET (temp);
1057 CLEAR_HARD_REG_SET (temp2);
1059 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
1060 allocno2,
1062 if (allocno_to_order[allocno2] > i)
1064 if (allocno[allocno2].size <= allocno[num].size)
1065 IOR_HARD_REG_SET (temp,
1066 allocno[allocno2].hard_reg_full_preferences);
1067 else
1068 IOR_HARD_REG_SET (temp2,
1069 allocno[allocno2].hard_reg_full_preferences);
1073 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1074 IOR_HARD_REG_SET (temp, temp2);
1075 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1077 free (allocno_to_order);
1080 /* Assign a hard register to allocno NUM; look for one that is the beginning
1081 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1082 The registers marked in PREFREGS are tried first.
1084 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1085 be used for this allocation.
1087 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1088 Otherwise ignore that preferred class and use the alternate class.
1090 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1091 will have to be saved and restored at calls.
1093 RETRYING is nonzero if this is called from retry_global_alloc.
1095 If we find one, record it in reg_renumber.
1096 If not, do nothing. */
1098 static void
1099 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1101 int i, best_reg, pass;
1102 HARD_REG_SET used, used1, used2;
1104 enum reg_class class = (alt_regs_p
1105 ? reg_alternate_class (allocno[num].reg)
1106 : reg_preferred_class (allocno[num].reg));
1107 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1109 if (accept_call_clobbered)
1110 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1111 else if (allocno[num].calls_crossed == 0)
1112 COPY_HARD_REG_SET (used1, fixed_reg_set);
1113 else
1114 COPY_HARD_REG_SET (used1, call_used_reg_set);
1116 /* Some registers should not be allocated in global-alloc. */
1117 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1118 if (losers)
1119 IOR_HARD_REG_SET (used1, losers);
1121 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1122 COPY_HARD_REG_SET (used2, used1);
1124 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1126 #ifdef CANNOT_CHANGE_MODE_CLASS
1127 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1128 #endif
1130 /* Try each hard reg to see if it fits. Do this in two passes.
1131 In the first pass, skip registers that are preferred by some other pseudo
1132 to give it a better chance of getting one of those registers. Only if
1133 we can't get a register when excluding those do we take one of them.
1134 However, we never allocate a register for the first time in pass 0. */
1136 COPY_HARD_REG_SET (used, used1);
1137 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1138 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1140 best_reg = -1;
1141 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1142 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1143 pass++)
1145 if (pass == 1)
1146 COPY_HARD_REG_SET (used, used1);
1147 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1149 #ifdef REG_ALLOC_ORDER
1150 int regno = reg_alloc_order[i];
1151 #else
1152 int regno = i;
1153 #endif
1154 if (! TEST_HARD_REG_BIT (used, regno)
1155 && HARD_REGNO_MODE_OK (regno, mode)
1156 && (allocno[num].calls_crossed == 0
1157 || accept_call_clobbered
1158 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1160 int j;
1161 int lim = end_hard_regno (mode, regno);
1162 for (j = regno + 1;
1163 (j < lim
1164 && ! TEST_HARD_REG_BIT (used, j));
1165 j++);
1166 if (j == lim)
1168 best_reg = regno;
1169 break;
1171 #ifndef REG_ALLOC_ORDER
1172 i = j; /* Skip starting points we know will lose */
1173 #endif
1178 /* See if there is a preferred register with the same class as the register
1179 we allocated above. Making this restriction prevents register
1180 preferencing from creating worse register allocation.
1182 Remove from the preferred registers and conflicting registers. Note that
1183 additional conflicts may have been added after `prune_preferences' was
1184 called.
1186 First do this for those register with copy preferences, then all
1187 preferred registers. */
1189 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1190 if (!hard_reg_set_empty_p (allocno[num].hard_reg_copy_preferences)
1191 && best_reg >= 0)
1193 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1194 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1195 && HARD_REGNO_MODE_OK (i, mode)
1196 && (allocno[num].calls_crossed == 0
1197 || accept_call_clobbered
1198 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1199 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1200 || reg_class_subset_p (REGNO_REG_CLASS (i),
1201 REGNO_REG_CLASS (best_reg))
1202 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1203 REGNO_REG_CLASS (i))))
1205 int j;
1206 int lim = end_hard_regno (mode, i);
1207 for (j = i + 1;
1208 (j < lim
1209 && ! TEST_HARD_REG_BIT (used, j)
1210 && (REGNO_REG_CLASS (j)
1211 == REGNO_REG_CLASS (best_reg + (j - i))
1212 || reg_class_subset_p (REGNO_REG_CLASS (j),
1213 REGNO_REG_CLASS (best_reg + (j - i)))
1214 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1215 REGNO_REG_CLASS (j))));
1216 j++);
1217 if (j == lim)
1219 best_reg = i;
1220 goto no_prefs;
1225 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1226 if (!hard_reg_set_empty_p (allocno[num].hard_reg_preferences)
1227 && best_reg >= 0)
1229 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1230 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1231 && HARD_REGNO_MODE_OK (i, mode)
1232 && (allocno[num].calls_crossed == 0
1233 || accept_call_clobbered
1234 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1235 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1236 || reg_class_subset_p (REGNO_REG_CLASS (i),
1237 REGNO_REG_CLASS (best_reg))
1238 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1239 REGNO_REG_CLASS (i))))
1241 int j;
1242 int lim = end_hard_regno (mode, i);
1243 for (j = i + 1;
1244 (j < lim
1245 && ! TEST_HARD_REG_BIT (used, j)
1246 && (REGNO_REG_CLASS (j)
1247 == REGNO_REG_CLASS (best_reg + (j - i))
1248 || reg_class_subset_p (REGNO_REG_CLASS (j),
1249 REGNO_REG_CLASS (best_reg + (j - i)))
1250 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1251 REGNO_REG_CLASS (j))));
1252 j++);
1253 if (j == lim)
1255 best_reg = i;
1256 break;
1260 no_prefs:
1262 /* If we haven't succeeded yet, try with caller-saves.
1263 We need not check to see if the current function has nonlocal
1264 labels because we don't put any pseudos that are live over calls in
1265 registers in that case. */
1267 if (flag_caller_saves && best_reg < 0)
1269 /* Did not find a register. If it would be profitable to
1270 allocate a call-clobbered register and save and restore it
1271 around calls, do that. Don't do this if it crosses any calls
1272 that might throw. */
1273 if (! accept_call_clobbered
1274 && allocno[num].calls_crossed != 0
1275 && allocno[num].throwing_calls_crossed == 0
1276 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1277 allocno[num].calls_crossed))
1279 HARD_REG_SET new_losers;
1280 if (! losers)
1281 CLEAR_HARD_REG_SET (new_losers);
1282 else
1283 COPY_HARD_REG_SET (new_losers, losers);
1285 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1286 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1287 if (reg_renumber[allocno[num].reg] >= 0)
1289 caller_save_needed = 1;
1290 return;
1295 /* If we haven't succeeded yet,
1296 see if some hard reg that conflicts with us
1297 was utilized poorly by local-alloc.
1298 If so, kick out the regs that were put there by local-alloc
1299 so we can use it instead. */
1300 if (best_reg < 0 && !retrying
1301 /* Let's not bother with multi-reg allocnos. */
1302 && allocno[num].size == 1
1303 && REG_BASIC_BLOCK (allocno[num].reg) == REG_BLOCK_GLOBAL)
1305 /* Count from the end, to find the least-used ones first. */
1306 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1308 #ifdef REG_ALLOC_ORDER
1309 int regno = reg_alloc_order[i];
1310 #else
1311 int regno = i;
1312 #endif
1314 if (local_reg_n_refs[regno] != 0
1315 /* Don't use a reg no good for this pseudo. */
1316 && ! TEST_HARD_REG_BIT (used2, regno)
1317 && HARD_REGNO_MODE_OK (regno, mode)
1318 /* The code below assumes that we need only a single
1319 register, but the check of allocno[num].size above
1320 was not enough. Sometimes we need more than one
1321 register for a single-word value. */
1322 && hard_regno_nregs[regno][mode] == 1
1323 && (allocno[num].calls_crossed == 0
1324 || accept_call_clobbered
1325 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1326 #ifdef CANNOT_CHANGE_MODE_CLASS
1327 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1328 mode)
1329 #endif
1330 #ifdef STACK_REGS
1331 && (!allocno[num].no_stack_reg
1332 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1333 #endif
1336 /* We explicitly evaluate the divide results into temporary
1337 variables so as to avoid excess precision problems that occur
1338 on an i386-unknown-sysv4.2 (unixware) host. */
1340 double tmp1 = ((double) local_reg_freq[regno] * local_reg_n_refs[regno]
1341 / local_reg_live_length[regno]);
1342 double tmp2 = ((double) allocno[num].freq * allocno[num].n_refs
1343 / allocno[num].live_length);
1345 if (tmp1 < tmp2)
1347 /* Hard reg REGNO was used less in total by local regs
1348 than it would be used by this one allocno! */
1349 int k;
1350 if (dump_file)
1352 fprintf (dump_file, "Regno %d better for global %d, ",
1353 regno, allocno[num].reg);
1354 fprintf (dump_file, "fr:%d, ll:%d, nr:%d ",
1355 allocno[num].freq, allocno[num].live_length,
1356 allocno[num].n_refs);
1357 fprintf (dump_file, "(was: fr:%d, ll:%d, nr:%d)\n",
1358 local_reg_freq[regno],
1359 local_reg_live_length[regno],
1360 local_reg_n_refs[regno]);
1363 for (k = 0; k < max_regno; k++)
1364 if (reg_renumber[k] >= 0)
1366 int r = reg_renumber[k];
1367 int endregno
1368 = end_hard_regno (PSEUDO_REGNO_MODE (k), r);
1370 if (regno >= r && regno < endregno)
1372 if (dump_file)
1373 fprintf (dump_file,
1374 "Local Reg %d now on stack\n", k);
1375 reg_renumber[k] = -1;
1379 best_reg = regno;
1380 break;
1386 /* Did we find a register? */
1388 if (best_reg >= 0)
1390 int lim, j;
1391 HARD_REG_SET this_reg;
1393 /* Yes. Record it as the hard register of this pseudo-reg. */
1394 reg_renumber[allocno[num].reg] = best_reg;
1396 /* Make a set of the hard regs being allocated. */
1397 CLEAR_HARD_REG_SET (this_reg);
1398 lim = end_hard_regno (mode, best_reg);
1399 for (j = best_reg; j < lim; j++)
1401 SET_HARD_REG_BIT (this_reg, j);
1402 SET_HARD_REG_BIT (regs_used_so_far, j);
1403 /* This is no longer a reg used just by local regs. */
1404 local_reg_n_refs[j] = 0;
1405 local_reg_freq[j] = 0;
1407 /* For each other pseudo-reg conflicting with this one,
1408 mark it as conflicting with the hard regs this one occupies. */
1409 lim = num;
1410 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1412 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1417 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1418 Perhaps it had previously seemed not worth a hard reg,
1419 or perhaps its old hard reg has been commandeered for reloads.
1420 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1421 they do not appear to be allocated.
1422 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1424 void
1425 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1427 int alloc_no = reg_allocno[regno];
1428 if (alloc_no >= 0)
1430 /* If we have more than one register class,
1431 first try allocating in the class that is cheapest
1432 for this pseudo-reg. If that fails, try any reg. */
1433 if (N_REG_CLASSES > 1)
1434 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1435 if (reg_renumber[regno] < 0
1436 && reg_alternate_class (regno) != NO_REGS)
1437 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1439 /* If we found a register, modify the RTL for the register to
1440 show the hard register, and mark that register live. */
1441 if (reg_renumber[regno] >= 0)
1443 SET_REGNO (regno_reg_rtx[regno], reg_renumber[regno]);
1444 mark_home_live (regno);
1449 /* Record a conflict between register REGNO
1450 and everything currently live.
1451 REGNO must not be a pseudo reg that was allocated
1452 by local_alloc; such numbers must be translated through
1453 reg_renumber before calling here. */
1455 static void
1456 record_one_conflict (int regno)
1458 int j;
1460 if (regno < FIRST_PSEUDO_REGISTER)
1461 /* When a hard register becomes live,
1462 record conflicts with live pseudo regs. */
1463 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1465 SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1467 else
1468 /* When a pseudo-register becomes live,
1469 record conflicts first with hard regs,
1470 then with other pseudo regs. */
1472 int ialloc = reg_allocno[regno];
1473 int ialloc_prod = ialloc * allocno_row_words;
1475 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1476 for (j = allocno_row_words - 1; j >= 0; j--)
1477 conflicts[ialloc_prod + j] |= allocnos_live[j];
1481 /* Record all allocnos currently live as conflicting
1482 with all hard regs currently live.
1484 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1485 are currently live. Their bits are also flagged in allocnos_live. */
1487 static void
1488 record_conflicts (int *allocno_vec, int len)
1490 while (--len >= 0)
1491 IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1492 hard_regs_live);
1495 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1496 static void
1497 mirror_conflicts (void)
1499 int i, j;
1500 int rw = allocno_row_words;
1501 int rwb = rw * INT_BITS;
1502 INT_TYPE *p = conflicts;
1503 INT_TYPE *q0 = conflicts, *q1, *q2;
1504 unsigned INT_TYPE mask;
1506 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1508 if (! mask)
1510 mask = 1;
1511 q0++;
1513 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1515 unsigned INT_TYPE word;
1517 for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1518 word >>= 1, q2 += rw)
1520 if (word & 1)
1521 *q2 |= mask;
1527 /* Handle the case where REG is set by the insn being scanned,
1528 during the forward scan to accumulate conflicts.
1529 Store a 1 in regs_live or allocnos_live for this register, record how many
1530 consecutive hardware registers it actually needs,
1531 and record a conflict with all other registers already live.
1533 Note that even if REG does not remain alive after this insn,
1534 we must mark it here as live, to ensure a conflict between
1535 REG and any other regs set in this insn that really do live.
1536 This is because those other regs could be considered after this.
1538 REG might actually be something other than a register;
1539 if so, we do nothing.
1541 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1542 a REG_INC note was found for it). */
1544 static void
1545 mark_reg_store (rtx reg, const_rtx setter, void *data ATTRIBUTE_UNUSED)
1547 int regno;
1549 if (GET_CODE (reg) == SUBREG)
1550 reg = SUBREG_REG (reg);
1552 if (!REG_P (reg))
1553 return;
1555 VEC_safe_push (rtx, heap, regs_set, reg);
1557 if (setter && GET_CODE (setter) != CLOBBER)
1558 set_preference (reg, SET_SRC (setter));
1560 regno = REGNO (reg);
1562 /* Either this is one of the max_allocno pseudo regs not allocated,
1563 or it is or has a hardware reg. First handle the pseudo-regs. */
1564 if (regno >= FIRST_PSEUDO_REGISTER)
1566 if (reg_allocno[regno] >= 0)
1568 SET_ALLOCNO_LIVE (reg_allocno[regno]);
1569 record_one_conflict (regno);
1573 if (reg_renumber[regno] >= 0)
1574 regno = reg_renumber[regno];
1576 /* Handle hardware regs (and pseudos allocated to hard regs). */
1577 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1579 int last = end_hard_regno (GET_MODE (reg), regno);
1580 while (regno < last)
1582 record_one_conflict (regno);
1583 SET_HARD_REG_BIT (hard_regs_live, regno);
1584 regno++;
1589 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
1591 static void
1592 mark_reg_clobber (rtx reg, const_rtx setter, void *data)
1594 if (GET_CODE (setter) == CLOBBER)
1595 mark_reg_store (reg, setter, data);
1598 /* Record that REG has conflicts with all the regs currently live.
1599 Do not mark REG itself as live. */
1601 static void
1602 mark_reg_conflicts (rtx reg)
1604 int regno;
1606 if (GET_CODE (reg) == SUBREG)
1607 reg = SUBREG_REG (reg);
1609 if (!REG_P (reg))
1610 return;
1612 regno = REGNO (reg);
1614 /* Either this is one of the max_allocno pseudo regs not allocated,
1615 or it is or has a hardware reg. First handle the pseudo-regs. */
1616 if (regno >= FIRST_PSEUDO_REGISTER)
1618 if (reg_allocno[regno] >= 0)
1619 record_one_conflict (regno);
1622 if (reg_renumber[regno] >= 0)
1623 regno = reg_renumber[regno];
1625 /* Handle hardware regs (and pseudos allocated to hard regs). */
1626 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1628 int last = end_hard_regno (GET_MODE (reg), regno);
1629 while (regno < last)
1631 record_one_conflict (regno);
1632 regno++;
1637 /* Mark REG as being dead (following the insn being scanned now).
1638 Store a 0 in regs_live or allocnos_live for this register. */
1640 static void
1641 mark_reg_death (rtx reg)
1643 int regno = REGNO (reg);
1645 /* Either this is one of the max_allocno pseudo regs not allocated,
1646 or it is a hardware reg. First handle the pseudo-regs. */
1647 if (regno >= FIRST_PSEUDO_REGISTER)
1649 if (reg_allocno[regno] >= 0)
1650 CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1653 /* For pseudo reg, see if it has been assigned a hardware reg. */
1654 if (reg_renumber[regno] >= 0)
1655 regno = reg_renumber[regno];
1657 /* Handle hardware regs (and pseudos allocated to hard regs). */
1658 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1659 /* Pseudo regs already assigned hardware regs are treated
1660 almost the same as explicit hardware regs. */
1661 remove_from_hard_reg_set (&hard_regs_live, GET_MODE (reg), regno);
1664 /* Try to set a preference for an allocno to a hard register.
1665 We are passed DEST and SRC which are the operands of a SET. It is known
1666 that SRC is a register. If SRC or the first operand of SRC is a register,
1667 try to set a preference. If one of the two is a hard register and the other
1668 is a pseudo-register, mark the preference.
1670 Note that we are not as aggressive as local-alloc in trying to tie a
1671 pseudo-register to a hard register. */
1673 static void
1674 set_preference (rtx dest, rtx src)
1676 unsigned int src_regno, dest_regno, end_regno;
1677 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1678 to compensate for subregs in SRC or DEST. */
1679 int offset = 0;
1680 unsigned int i;
1681 int copy = 1;
1683 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1684 src = XEXP (src, 0), copy = 0;
1686 /* Get the reg number for both SRC and DEST.
1687 If neither is a reg, give up. */
1689 if (REG_P (src))
1690 src_regno = REGNO (src);
1691 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1693 src_regno = REGNO (SUBREG_REG (src));
1695 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1696 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1697 GET_MODE (SUBREG_REG (src)),
1698 SUBREG_BYTE (src),
1699 GET_MODE (src));
1700 else
1701 offset += (SUBREG_BYTE (src)
1702 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1704 else
1705 return;
1707 if (REG_P (dest))
1708 dest_regno = REGNO (dest);
1709 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1711 dest_regno = REGNO (SUBREG_REG (dest));
1713 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1714 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1715 GET_MODE (SUBREG_REG (dest)),
1716 SUBREG_BYTE (dest),
1717 GET_MODE (dest));
1718 else
1719 offset -= (SUBREG_BYTE (dest)
1720 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1722 else
1723 return;
1725 /* Convert either or both to hard reg numbers. */
1727 if (reg_renumber[src_regno] >= 0)
1728 src_regno = reg_renumber[src_regno];
1730 if (reg_renumber[dest_regno] >= 0)
1731 dest_regno = reg_renumber[dest_regno];
1733 /* Now if one is a hard reg and the other is a global pseudo
1734 then give the other a preference. */
1736 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1737 && reg_allocno[src_regno] >= 0)
1739 dest_regno -= offset;
1740 if (dest_regno < FIRST_PSEUDO_REGISTER)
1742 if (copy)
1743 SET_REGBIT (hard_reg_copy_preferences,
1744 reg_allocno[src_regno], dest_regno);
1746 SET_REGBIT (hard_reg_preferences,
1747 reg_allocno[src_regno], dest_regno);
1748 end_regno = end_hard_regno (GET_MODE (dest), dest_regno);
1749 for (i = dest_regno; i < end_regno; i++)
1750 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1754 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1755 && reg_allocno[dest_regno] >= 0)
1757 src_regno += offset;
1758 if (src_regno < FIRST_PSEUDO_REGISTER)
1760 if (copy)
1761 SET_REGBIT (hard_reg_copy_preferences,
1762 reg_allocno[dest_regno], src_regno);
1764 SET_REGBIT (hard_reg_preferences,
1765 reg_allocno[dest_regno], src_regno);
1766 end_regno = end_hard_regno (GET_MODE (src), src_regno);
1767 for (i = src_regno; i < end_regno; i++)
1768 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1773 /* Indicate that hard register number FROM was eliminated and replaced with
1774 an offset from hard register number TO. The status of hard registers live
1775 at the start of a basic block is updated by replacing a use of FROM with
1776 a use of TO. */
1778 void
1779 mark_elimination (int from, int to)
1781 basic_block bb;
1783 FOR_EACH_BB (bb)
1785 regset r = DF_RA_LIVE_IN (bb);
1786 if (REGNO_REG_SET_P (r, from))
1788 CLEAR_REGNO_REG_SET (r, from);
1789 SET_REGNO_REG_SET (r, to);
1794 /* Used for communication between the following functions. Holds the
1795 current life information. */
1796 static regset live_relevant_regs;
1798 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1799 This is called via note_stores. */
1800 static void
1801 reg_becomes_live (rtx reg, const_rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1803 int regno;
1805 if (GET_CODE (reg) == SUBREG)
1806 reg = SUBREG_REG (reg);
1808 if (!REG_P (reg))
1809 return;
1811 regno = REGNO (reg);
1812 if (regno < FIRST_PSEUDO_REGISTER)
1814 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1815 while (nregs-- > 0)
1817 if (GET_CODE (setter) == CLOBBER)
1818 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1819 else
1820 SET_REGNO_REG_SET (live_relevant_regs, regno);
1822 if (!fixed_regs[regno])
1823 SET_REGNO_REG_SET ((regset) regs_set, regno);
1824 regno++;
1827 else if (reg_renumber[regno] >= 0)
1829 if (GET_CODE (setter) == CLOBBER)
1830 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1831 else
1832 SET_REGNO_REG_SET (live_relevant_regs, regno);
1833 SET_REGNO_REG_SET ((regset) regs_set, regno);
1837 /* Record in live_relevant_regs that register REGNO died. */
1838 static void
1839 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1841 if (regno < FIRST_PSEUDO_REGISTER)
1843 int nregs = hard_regno_nregs[regno][mode];
1844 while (nregs-- > 0)
1846 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1847 if (! fixed_regs[regno])
1848 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1849 regno++;
1852 else
1854 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1855 if (reg_renumber[regno] >= 0)
1856 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1860 /* Walk the insns of the current function and build reload_insn_chain,
1861 and record register life information. */
1862 void
1863 build_insn_chain (rtx first)
1865 struct insn_chain **p = &reload_insn_chain;
1866 struct insn_chain *prev = 0;
1867 basic_block b = ENTRY_BLOCK_PTR->next_bb;
1869 live_relevant_regs = ALLOC_REG_SET (&reg_obstack);
1871 for (; first; first = NEXT_INSN (first))
1873 struct insn_chain *c;
1875 if (first == BB_HEAD (b))
1877 unsigned i;
1878 bitmap_iterator bi;
1880 CLEAR_REG_SET (live_relevant_regs);
1882 EXECUTE_IF_SET_IN_BITMAP (df_get_live_top (b), 0, i, bi)
1884 if (i < FIRST_PSEUDO_REGISTER
1885 ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1886 : reg_renumber[i] >= 0)
1887 SET_REGNO_REG_SET (live_relevant_regs, i);
1891 if (!NOTE_P (first) && !BARRIER_P (first))
1893 c = new_insn_chain ();
1894 c->prev = prev;
1895 prev = c;
1896 *p = c;
1897 p = &c->next;
1898 c->insn = first;
1899 c->block = b->index;
1901 if (INSN_P (first))
1903 rtx link;
1905 /* Mark the death of everything that dies in this instruction. */
1907 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1908 if (REG_NOTE_KIND (link) == REG_DEAD
1909 && REG_P (XEXP (link, 0)))
1910 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1913 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1915 /* Mark everything born in this instruction as live. */
1917 note_stores (PATTERN (first), reg_becomes_live,
1918 &c->dead_or_set);
1920 else
1921 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1923 if (INSN_P (first))
1925 rtx link;
1927 /* Mark anything that is set in this insn and then unused as dying. */
1929 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1930 if (REG_NOTE_KIND (link) == REG_UNUSED
1931 && REG_P (XEXP (link, 0)))
1932 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1937 if (first == BB_END (b))
1938 b = b->next_bb;
1940 /* Stop after we pass the end of the last basic block. Verify that
1941 no real insns are after the end of the last basic block.
1943 We may want to reorganize the loop somewhat since this test should
1944 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1945 the previous real insn is a JUMP_INSN. */
1946 if (b == EXIT_BLOCK_PTR)
1948 #ifdef ENABLE_CHECKING
1949 for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
1950 gcc_assert (!INSN_P (first)
1951 || GET_CODE (PATTERN (first)) == USE
1952 || ((GET_CODE (PATTERN (first)) == ADDR_VEC
1953 || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1954 && prev_real_insn (first) != 0
1955 && JUMP_P (prev_real_insn (first))));
1956 #endif
1957 break;
1960 FREE_REG_SET (live_relevant_regs);
1961 *p = 0;
1964 /* Print debugging trace information if -dg switch is given,
1965 showing the information on which the allocation decisions are based. */
1967 static void
1968 dump_conflicts (FILE *file)
1970 int i;
1971 int has_preferences;
1972 int nregs;
1973 nregs = 0;
1974 for (i = 0; i < max_allocno; i++)
1976 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1977 continue;
1978 nregs++;
1980 fprintf (file, ";; %d regs to allocate:", nregs);
1981 for (i = 0; i < max_allocno; i++)
1983 int j;
1984 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1985 continue;
1986 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1987 for (j = 0; j < max_regno; j++)
1988 if (reg_allocno[j] == allocno_order[i]
1989 && j != allocno[allocno_order[i]].reg)
1990 fprintf (file, "+%d", j);
1991 if (allocno[allocno_order[i]].size != 1)
1992 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1994 fprintf (file, "\n");
1996 for (i = 0; i < max_allocno; i++)
1998 int j;
1999 fprintf (file, ";; %d conflicts:", allocno[i].reg);
2000 for (j = 0; j < max_allocno; j++)
2001 if (CONFLICTP (j, i))
2002 fprintf (file, " %d", allocno[j].reg);
2003 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
2004 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
2005 fprintf (file, " %d", j);
2006 fprintf (file, "\n");
2008 has_preferences = 0;
2009 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
2010 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
2011 has_preferences = 1;
2013 if (! has_preferences)
2014 continue;
2015 fprintf (file, ";; %d preferences:", allocno[i].reg);
2016 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
2017 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
2018 fprintf (file, " %d", j);
2019 fprintf (file, "\n");
2021 fprintf (file, "\n");
2024 void
2025 dump_global_regs (FILE *file)
2027 int i, j;
2029 fprintf (file, ";; Register dispositions:\n");
2030 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
2031 if (reg_renumber[i] >= 0)
2033 fprintf (file, "%d in %d ", i, reg_renumber[i]);
2034 if (++j % 6 == 0)
2035 fprintf (file, "\n");
2038 fprintf (file, "\n\n;; Hard regs used: ");
2039 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2040 if (df_regs_ever_live_p (i))
2041 fprintf (file, " %d", i);
2042 fprintf (file, "\n\n");
2045 /* Run old register allocator. Return TRUE if we must exit
2046 rest_of_compilation upon return. */
2047 static unsigned int
2048 rest_of_handle_global_alloc (void)
2050 bool failure;
2052 /* If optimizing, allocate remaining pseudo-regs. Do the reload
2053 pass fixing up any insns that are invalid. */
2054 if (optimize && dbg_cnt (global_alloc_at_func))
2055 failure = global_alloc ();
2056 else
2058 compute_regsets (&eliminable_regset, &no_global_alloc_regs);
2059 build_insn_chain (get_insns ());
2060 df_set_flags (DF_NO_INSN_RESCAN);
2061 failure = reload (get_insns (), 0);
2064 if (dump_enabled_p (pass_global_alloc.static_pass_number))
2066 timevar_push (TV_DUMP);
2067 dump_global_regs (dump_file);
2068 timevar_pop (TV_DUMP);
2071 /* FIXME: This appears on the surface to be wrong thing to be doing.
2072 So much of the compiler is designed to check reload_completed to
2073 see if it is running after reload that seems doomed to failure.
2074 We should be returning a value that says that we have found
2075 errors so that nothing but the cleanup passes are run
2076 afterwards. */
2077 gcc_assert (reload_completed || failure);
2078 reload_completed = !failure;
2080 /* The world has changed so much that at this point we might as well
2081 just rescan everything. Not that df_rescan_all_insns is not
2082 going to help here because it does not touch the artificial uses
2083 and defs. */
2084 df_finish_pass (true);
2085 if (optimize > 1)
2086 df_live_add_problem ();
2087 df_scan_alloc (NULL);
2088 df_scan_blocks ();
2090 if (optimize)
2091 df_analyze ();
2093 regstat_free_n_sets_and_refs ();
2094 regstat_free_ri ();
2095 return 0;
2098 struct tree_opt_pass pass_global_alloc =
2100 "greg", /* name */
2101 NULL, /* gate */
2102 rest_of_handle_global_alloc, /* execute */
2103 NULL, /* sub */
2104 NULL, /* next */
2105 0, /* static_pass_number */
2106 TV_GLOBAL_ALLOC, /* tv_id */
2107 0, /* properties_required */
2108 0, /* properties_provided */
2109 0, /* properties_destroyed */
2110 0, /* todo_flags_start */
2111 TODO_dump_func |
2112 TODO_ggc_collect, /* todo_flags_finish */
2113 'g' /* letter */