Merge from the pain train
[official-gcc.git] / gcc / global.c
blobedf974291b985cbb67d6879e0428fdb509ffcea1
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "output.h"
38 #include "toplev.h"
40 /* This pass of the compiler performs global register allocation.
41 It assigns hard register numbers to all the pseudo registers
42 that were not handled in local_alloc. Assignments are recorded
43 in the vector reg_renumber, not by changing the rtl code.
44 (Such changes are made by final). The entry point is
45 the function global_alloc.
47 After allocation is complete, the reload pass is run as a subroutine
48 of this pass, so that when a pseudo reg loses its hard reg due to
49 spilling it is possible to make a second attempt to find a hard
50 reg for it. The reload pass is independent in other respects
51 and it is run even when stupid register allocation is in use.
53 1. Assign allocation-numbers (allocnos) to the pseudo-registers
54 still needing allocations and to the pseudo-registers currently
55 allocated by local-alloc which may be spilled by reload.
56 Set up tables reg_allocno and allocno_reg to map
57 reg numbers to allocnos and vice versa.
58 max_allocno gets the number of allocnos in use.
60 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
61 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
62 for conflicts between allocnos and explicit hard register use
63 (which includes use of pseudo-registers allocated by local_alloc).
65 3. For each basic block
66 walk forward through the block, recording which
67 pseudo-registers and which hardware registers are live.
68 Build the conflict matrix between the pseudo-registers
69 and another of pseudo-registers versus hardware registers.
70 Also record the preferred hardware registers
71 for each pseudo-register.
73 4. Sort a table of the allocnos into order of
74 desirability of the variables.
76 5. Allocate the variables in that order; each if possible into
77 a preferred register, else into another register. */
79 /* Number of pseudo-registers which are candidates for allocation. */
81 static int max_allocno;
83 /* Indexed by (pseudo) reg number, gives the allocno, or -1
84 for pseudo registers which are not to be allocated. */
86 static int *reg_allocno;
88 struct allocno
90 int reg;
91 /* Gives the number of consecutive hard registers needed by that
92 pseudo reg. */
93 int size;
95 /* Number of calls crossed by each allocno. */
96 int calls_crossed;
98 /* Number of refs to each allocno. */
99 int n_refs;
101 /* Frequency of uses of each allocno. */
102 int freq;
104 /* Guess at live length of each allocno.
105 This is actually the max of the live lengths of the regs. */
106 int live_length;
108 /* Set of hard regs conflicting with allocno N. */
110 HARD_REG_SET hard_reg_conflicts;
112 /* Set of hard regs preferred by allocno N.
113 This is used to make allocnos go into regs that are copied to or from them,
114 when possible, to reduce register shuffling. */
116 HARD_REG_SET hard_reg_preferences;
118 /* Similar, but just counts register preferences made in simple copy
119 operations, rather than arithmetic. These are given priority because
120 we can always eliminate an insn by using these, but using a register
121 in the above list won't always eliminate an insn. */
123 HARD_REG_SET hard_reg_copy_preferences;
125 /* Similar to hard_reg_preferences, but includes bits for subsequent
126 registers when an allocno is multi-word. The above variable is used for
127 allocation while this is used to build reg_someone_prefers, below. */
129 HARD_REG_SET hard_reg_full_preferences;
131 /* Set of hard registers that some later allocno has a preference for. */
133 HARD_REG_SET regs_someone_prefers;
135 #ifdef STACK_REGS
136 /* Set to true if allocno can't be allocated in the stack register. */
137 bool no_stack_reg;
138 #endif
141 static struct allocno *allocno;
143 /* A vector of the integers from 0 to max_allocno-1,
144 sorted in the order of first-to-be-allocated first. */
146 static int *allocno_order;
148 /* Indexed by (pseudo) reg number, gives the number of another
149 lower-numbered pseudo reg which can share a hard reg with this pseudo
150 *even if the two pseudos would otherwise appear to conflict*. */
152 static int *reg_may_share;
154 /* Define the number of bits in each element of `conflicts' and what
155 type that element has. We use the largest integer format on the
156 host machine. */
158 #define INT_BITS HOST_BITS_PER_WIDE_INT
159 #define INT_TYPE HOST_WIDE_INT
161 /* max_allocno by max_allocno array of bits,
162 recording whether two allocno's conflict (can't go in the same
163 hardware register).
165 `conflicts' is symmetric after the call to mirror_conflicts. */
167 static INT_TYPE *conflicts;
169 /* Number of ints require to hold max_allocno bits.
170 This is the length of a row in `conflicts'. */
172 static int allocno_row_words;
174 /* Two macros to test or store 1 in an element of `conflicts'. */
176 #define CONFLICTP(I, J) \
177 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
178 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
180 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
181 and execute CODE. */
182 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
183 do { \
184 int i_; \
185 int allocno_; \
186 INT_TYPE *p_ = (ALLOCNO_SET); \
188 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
189 i_--, allocno_ += INT_BITS) \
191 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
193 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
195 if (word_ & 1) \
196 {CODE;} \
199 } while (0)
201 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
202 #if 0
203 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
204 the conflicting allocno, and execute CODE. This macro assumes that
205 mirror_conflicts has been run. */
206 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
207 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
208 OUT_ALLOCNO, (CODE))
209 #endif
211 /* Set of hard regs currently live (during scan of all insns). */
213 static HARD_REG_SET hard_regs_live;
215 /* Set of registers that global-alloc isn't supposed to use. */
217 static HARD_REG_SET no_global_alloc_regs;
219 /* Set of registers used so far. */
221 static HARD_REG_SET regs_used_so_far;
223 /* Number of refs to each hard reg, as used by local alloc.
224 It is zero for a reg that contains global pseudos or is explicitly used. */
226 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
228 /* Frequency of uses of given hard reg. */
229 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
231 /* Guess at live length of each hard reg, as used by local alloc.
232 This is actually the sum of the live lengths of the specific regs. */
234 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
236 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
237 element I, and hard register number J. */
239 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
241 /* Bit mask for allocnos live at current point in the scan. */
243 static INT_TYPE *allocnos_live;
245 /* Test, set or clear bit number I in allocnos_live,
246 a bit vector indexed by allocno. */
248 #define SET_ALLOCNO_LIVE(I) \
249 (allocnos_live[(unsigned) (I) / INT_BITS] \
250 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
252 #define CLEAR_ALLOCNO_LIVE(I) \
253 (allocnos_live[(unsigned) (I) / INT_BITS] \
254 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
256 /* This is turned off because it doesn't work right for DImode.
257 (And it is only used for DImode, so the other cases are worthless.)
258 The problem is that it isn't true that there is NO possibility of conflict;
259 only that there is no conflict if the two pseudos get the exact same regs.
260 If they were allocated with a partial overlap, there would be a conflict.
261 We can't safely turn off the conflict unless we have another way to
262 prevent the partial overlap.
264 Idea: change hard_reg_conflicts so that instead of recording which
265 hard regs the allocno may not overlap, it records where the allocno
266 may not start. Change both where it is used and where it is updated.
267 Then there is a way to record that (reg:DI 108) may start at 10
268 but not at 9 or 11. There is still the question of how to record
269 this semi-conflict between two pseudos. */
270 #if 0
271 /* Reg pairs for which conflict after the current insn
272 is inhibited by a REG_NO_CONFLICT note.
273 If the table gets full, we ignore any other notes--that is conservative. */
274 #define NUM_NO_CONFLICT_PAIRS 4
275 /* Number of pairs in use in this insn. */
276 int n_no_conflict_pairs;
277 static struct { int allocno1, allocno2;}
278 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
279 #endif /* 0 */
281 /* Record all regs that are set in any one insn.
282 Communication from mark_reg_{store,clobber} and global_conflicts. */
284 static rtx *regs_set;
285 static int n_regs_set;
287 /* All registers that can be eliminated. */
289 static HARD_REG_SET eliminable_regset;
291 static int allocno_compare (const void *, const void *);
292 static void global_conflicts (void);
293 static void mirror_conflicts (void);
294 static void expand_preferences (void);
295 static void prune_preferences (void);
296 static void find_reg (int, HARD_REG_SET, int, int, int);
297 static void record_one_conflict (int);
298 static void record_conflicts (int *, int);
299 static void mark_reg_store (rtx, rtx, void *);
300 static void mark_reg_clobber (rtx, rtx, void *);
301 static void mark_reg_conflicts (rtx);
302 static void mark_reg_death (rtx);
303 static void mark_reg_live_nc (int, enum machine_mode);
304 static void set_preference (rtx, rtx);
305 static void dump_conflicts (FILE *);
306 static void reg_becomes_live (rtx, rtx, void *);
307 static void reg_dies (int, enum machine_mode, struct insn_chain *);
309 static void allocate_bb_info (void);
310 static void free_bb_info (void);
311 static bool check_earlyclobber (rtx);
312 static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
313 static int mark_reg_use_for_earlyclobber (rtx *, void *);
314 static void calculate_local_reg_bb_info (void);
315 static void set_up_bb_rts_numbers (void);
316 static int rpost_cmp (const void *, const void *);
317 static void calculate_reg_pav (void);
318 static void modify_reg_pav (void);
319 static void make_accurate_live_analysis (void);
323 /* Perform allocation of pseudo-registers not allocated by local_alloc.
324 FILE is a file to output debugging information on,
325 or zero if such output is not desired.
327 Return value is nonzero if reload failed
328 and we must not do any more for this function. */
331 global_alloc (FILE *file)
333 int retval;
334 #ifdef ELIMINABLE_REGS
335 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
336 #endif
337 int need_fp
338 = (! flag_omit_frame_pointer
339 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
340 || FRAME_POINTER_REQUIRED);
342 size_t i;
343 rtx x;
345 make_accurate_live_analysis ();
347 max_allocno = 0;
349 /* A machine may have certain hard registers that
350 are safe to use only within a basic block. */
352 CLEAR_HARD_REG_SET (no_global_alloc_regs);
354 /* Build the regset of all eliminable registers and show we can't use those
355 that we already know won't be eliminated. */
356 #ifdef ELIMINABLE_REGS
357 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
359 bool cannot_elim
360 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
361 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
363 if (!regs_asm_clobbered[eliminables[i].from])
365 SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
367 if (cannot_elim)
368 SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
370 else if (cannot_elim)
371 error ("%s cannot be used in asm here",
372 reg_names[eliminables[i].from]);
373 else
374 regs_ever_live[eliminables[i].from] = 1;
376 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
377 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
379 SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
380 if (need_fp)
381 SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
383 else if (need_fp)
384 error ("%s cannot be used in asm here",
385 reg_names[HARD_FRAME_POINTER_REGNUM]);
386 else
387 regs_ever_live[HARD_FRAME_POINTER_REGNUM] = 1;
388 #endif
390 #else
391 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
393 SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
394 if (need_fp)
395 SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
397 else if (need_fp)
398 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
399 else
400 regs_ever_live[FRAME_POINTER_REGNUM] = 1;
401 #endif
403 /* Track which registers have already been used. Start with registers
404 explicitly in the rtl, then registers allocated by local register
405 allocation. */
407 CLEAR_HARD_REG_SET (regs_used_so_far);
408 #ifdef LEAF_REGISTERS
409 /* If we are doing the leaf function optimization, and this is a leaf
410 function, it means that the registers that take work to save are those
411 that need a register window. So prefer the ones that can be used in
412 a leaf function. */
414 const char *cheap_regs;
415 const char *const leaf_regs = LEAF_REGISTERS;
417 if (only_leaf_regs_used () && leaf_function_p ())
418 cheap_regs = leaf_regs;
419 else
420 cheap_regs = call_used_regs;
421 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
422 if (regs_ever_live[i] || cheap_regs[i])
423 SET_HARD_REG_BIT (regs_used_so_far, i);
425 #else
426 /* We consider registers that do not have to be saved over calls as if
427 they were already used since there is no cost in using them. */
428 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
429 if (regs_ever_live[i] || call_used_regs[i])
430 SET_HARD_REG_BIT (regs_used_so_far, i);
431 #endif
433 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
434 if (reg_renumber[i] >= 0)
435 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
437 /* Establish mappings from register number to allocation number
438 and vice versa. In the process, count the allocnos. */
440 reg_allocno = xmalloc (max_regno * sizeof (int));
442 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
443 reg_allocno[i] = -1;
445 /* Initialize the shared-hard-reg mapping
446 from the list of pairs that may share. */
447 reg_may_share = xcalloc (max_regno, sizeof (int));
448 for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
450 int r1 = REGNO (XEXP (x, 0));
451 int r2 = REGNO (XEXP (XEXP (x, 1), 0));
452 if (r1 > r2)
453 reg_may_share[r1] = r2;
454 else
455 reg_may_share[r2] = r1;
458 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
459 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
460 that we are supposed to refrain from putting in a hard reg.
461 -2 means do make an allocno but don't allocate it. */
462 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
463 /* Don't allocate pseudos that cross calls,
464 if this function receives a nonlocal goto. */
465 && (! current_function_has_nonlocal_label
466 || REG_N_CALLS_CROSSED (i) == 0))
468 if (reg_renumber[i] < 0
469 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
470 reg_allocno[i] = reg_allocno[reg_may_share[i]];
471 else
472 reg_allocno[i] = max_allocno++;
473 gcc_assert (REG_LIVE_LENGTH (i));
475 else
476 reg_allocno[i] = -1;
478 allocno = xcalloc (max_allocno, sizeof (struct allocno));
480 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
481 if (reg_allocno[i] >= 0)
483 int num = reg_allocno[i];
484 allocno[num].reg = i;
485 allocno[num].size = PSEUDO_REGNO_SIZE (i);
486 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
487 allocno[num].n_refs += REG_N_REFS (i);
488 allocno[num].freq += REG_FREQ (i);
489 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
490 allocno[num].live_length = REG_LIVE_LENGTH (i);
493 /* Calculate amount of usage of each hard reg by pseudos
494 allocated by local-alloc. This is to see if we want to
495 override it. */
496 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
497 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
498 memset (local_reg_freq, 0, sizeof local_reg_freq);
499 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
500 if (reg_renumber[i] >= 0)
502 int regno = reg_renumber[i];
503 int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
504 int j;
506 for (j = regno; j < endregno; j++)
508 local_reg_n_refs[j] += REG_N_REFS (i);
509 local_reg_freq[j] += REG_FREQ (i);
510 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
514 /* We can't override local-alloc for a reg used not just by local-alloc. */
515 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
516 if (regs_ever_live[i])
517 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
519 allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
521 /* We used to use alloca here, but the size of what it would try to
522 allocate would occasionally cause it to exceed the stack limit and
523 cause unpredictable core dumps. Some examples were > 2Mb in size. */
524 conflicts = xcalloc (max_allocno * allocno_row_words, sizeof (INT_TYPE));
526 allocnos_live = xmalloc (allocno_row_words * sizeof (INT_TYPE));
528 /* If there is work to be done (at least one reg to allocate),
529 perform global conflict analysis and allocate the regs. */
531 if (max_allocno > 0)
533 /* Scan all the insns and compute the conflicts among allocnos
534 and between allocnos and hard regs. */
536 global_conflicts ();
538 mirror_conflicts ();
540 /* Eliminate conflicts between pseudos and eliminable registers. If
541 the register is not eliminated, the pseudo won't really be able to
542 live in the eliminable register, so the conflict doesn't matter.
543 If we do eliminate the register, the conflict will no longer exist.
544 So in either case, we can ignore the conflict. Likewise for
545 preferences. */
547 for (i = 0; i < (size_t) max_allocno; i++)
549 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
550 eliminable_regset);
551 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
552 eliminable_regset);
553 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
554 eliminable_regset);
557 /* Try to expand the preferences by merging them between allocnos. */
559 expand_preferences ();
561 /* Determine the order to allocate the remaining pseudo registers. */
563 allocno_order = xmalloc (max_allocno * sizeof (int));
564 for (i = 0; i < (size_t) max_allocno; i++)
565 allocno_order[i] = i;
567 /* Default the size to 1, since allocno_compare uses it to divide by.
568 Also convert allocno_live_length of zero to -1. A length of zero
569 can occur when all the registers for that allocno have reg_live_length
570 equal to -2. In this case, we want to make an allocno, but not
571 allocate it. So avoid the divide-by-zero and set it to a low
572 priority. */
574 for (i = 0; i < (size_t) max_allocno; i++)
576 if (allocno[i].size == 0)
577 allocno[i].size = 1;
578 if (allocno[i].live_length == 0)
579 allocno[i].live_length = -1;
582 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
584 prune_preferences ();
586 if (file)
587 dump_conflicts (file);
589 /* Try allocating them, one by one, in that order,
590 except for parameters marked with reg_live_length[regno] == -2. */
592 for (i = 0; i < (size_t) max_allocno; i++)
593 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
594 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
596 /* If we have more than one register class,
597 first try allocating in the class that is cheapest
598 for this pseudo-reg. If that fails, try any reg. */
599 if (N_REG_CLASSES > 1)
601 find_reg (allocno_order[i], 0, 0, 0, 0);
602 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
603 continue;
605 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
606 find_reg (allocno_order[i], 0, 1, 0, 0);
609 free (allocno_order);
612 /* Do the reloads now while the allocno data still exist, so that we can
613 try to assign new hard regs to any pseudo regs that are spilled. */
615 #if 0 /* We need to eliminate regs even if there is no rtl code,
616 for the sake of debugging information. */
617 if (n_basic_blocks > 0)
618 #endif
620 build_insn_chain (get_insns ());
621 retval = reload (get_insns (), 1);
624 /* Clean up. */
625 free (reg_allocno);
626 free (reg_may_share);
627 free (allocno);
628 free (conflicts);
629 free (allocnos_live);
631 return retval;
634 /* Sort predicate for ordering the allocnos.
635 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
637 static int
638 allocno_compare (const void *v1p, const void *v2p)
640 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
641 /* Note that the quotient will never be bigger than
642 the value of floor_log2 times the maximum number of
643 times a register can occur in one insn (surely less than 100)
644 weighted by the frequency (maximally REG_FREQ_MAX).
645 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
646 int pri1
647 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
648 / allocno[v1].live_length)
649 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
650 int pri2
651 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
652 / allocno[v2].live_length)
653 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
654 if (pri2 - pri1)
655 return pri2 - pri1;
657 /* If regs are equally good, sort by allocno,
658 so that the results of qsort leave nothing to chance. */
659 return v1 - v2;
662 /* Scan the rtl code and record all conflicts and register preferences in the
663 conflict matrices and preference tables. */
665 static void
666 global_conflicts (void)
668 unsigned i;
669 basic_block b;
670 rtx insn;
671 int *block_start_allocnos;
673 /* Make a vector that mark_reg_{store,clobber} will store in. */
674 regs_set = xmalloc (max_parallel * sizeof (rtx) * 2);
676 block_start_allocnos = xmalloc (max_allocno * sizeof (int));
678 FOR_EACH_BB (b)
680 memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
682 /* Initialize table of registers currently live
683 to the state at the beginning of this basic block.
684 This also marks the conflicts among hard registers
685 and any allocnos that are live.
687 For pseudo-regs, there is only one bit for each one
688 no matter how many hard regs it occupies.
689 This is ok; we know the size from PSEUDO_REGNO_SIZE.
690 For explicit hard regs, we cannot know the size that way
691 since one hard reg can be used with various sizes.
692 Therefore, we must require that all the hard regs
693 implicitly live as part of a multi-word hard reg
694 be explicitly marked in basic_block_live_at_start. */
697 regset old = b->global_live_at_start;
698 int ax = 0;
699 reg_set_iterator rsi;
701 REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
702 EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i, rsi)
704 int a = reg_allocno[i];
705 if (a >= 0)
707 SET_ALLOCNO_LIVE (a);
708 block_start_allocnos[ax++] = a;
710 else if ((a = reg_renumber[i]) >= 0)
711 mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
714 /* Record that each allocno now live conflicts with each hard reg
715 now live.
717 It is not necessary to mark any conflicts between pseudos as
718 this point, even for pseudos which are live at the start of
719 the basic block.
721 Given two pseudos X and Y and any point in the CFG P.
723 On any path to point P where X and Y are live one of the
724 following conditions must be true:
726 1. X is live at some instruction on the path that
727 evaluates Y.
729 2. Y is live at some instruction on the path that
730 evaluates X.
732 3. Either X or Y is not evaluated on the path to P
733 (i.e. it is used uninitialized) and thus the
734 conflict can be ignored.
736 In cases #1 and #2 the conflict will be recorded when we
737 scan the instruction that makes either X or Y become live. */
738 record_conflicts (block_start_allocnos, ax);
740 /* Pseudos can't go in stack regs at the start of a basic block that
741 is reached by an abnormal edge. Likewise for call clobbered regs,
742 because because caller-save, fixup_abnormal_edges, and possibly
743 the table driven EH machinery are not quite ready to handle such
744 regs live across such edges. */
746 edge e;
747 edge_iterator ei;
749 FOR_EACH_EDGE (e, ei, b->preds)
750 if (e->flags & EDGE_ABNORMAL)
751 break;
753 if (e != NULL)
755 #ifdef STACK_REGS
756 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
758 allocno[ax].no_stack_reg = 1;
760 for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
761 record_one_conflict (ax);
762 #endif
764 /* No need to record conflicts for call clobbered regs if we have
765 nonlocal labels around, as we don't ever try to allocate such
766 regs in this case. */
767 if (! current_function_has_nonlocal_label)
768 for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
769 if (call_used_regs [ax])
770 record_one_conflict (ax);
775 insn = BB_HEAD (b);
777 /* Scan the code of this basic block, noting which allocnos
778 and hard regs are born or die. When one is born,
779 record a conflict with all others currently live. */
781 while (1)
783 RTX_CODE code = GET_CODE (insn);
784 rtx link;
786 /* Make regs_set an empty set. */
788 n_regs_set = 0;
790 if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
793 #if 0
794 int i = 0;
795 for (link = REG_NOTES (insn);
796 link && i < NUM_NO_CONFLICT_PAIRS;
797 link = XEXP (link, 1))
798 if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
800 no_conflict_pairs[i].allocno1
801 = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
802 no_conflict_pairs[i].allocno2
803 = reg_allocno[REGNO (XEXP (link, 0))];
804 i++;
806 #endif /* 0 */
808 /* Mark any registers clobbered by INSN as live,
809 so they conflict with the inputs. */
811 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
813 /* Mark any registers dead after INSN as dead now. */
815 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
816 if (REG_NOTE_KIND (link) == REG_DEAD)
817 mark_reg_death (XEXP (link, 0));
819 /* Mark any registers set in INSN as live,
820 and mark them as conflicting with all other live regs.
821 Clobbers are processed again, so they conflict with
822 the registers that are set. */
824 note_stores (PATTERN (insn), mark_reg_store, NULL);
826 #ifdef AUTO_INC_DEC
827 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
828 if (REG_NOTE_KIND (link) == REG_INC)
829 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
830 #endif
832 /* If INSN has multiple outputs, then any reg that dies here
833 and is used inside of an output
834 must conflict with the other outputs.
836 It is unsafe to use !single_set here since it will ignore an
837 unused output. Just because an output is unused does not mean
838 the compiler can assume the side effect will not occur.
839 Consider if REG appears in the address of an output and we
840 reload the output. If we allocate REG to the same hard
841 register as an unused output we could set the hard register
842 before the output reload insn. */
843 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
844 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
845 if (REG_NOTE_KIND (link) == REG_DEAD)
847 int used_in_output = 0;
848 int i;
849 rtx reg = XEXP (link, 0);
851 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
853 rtx set = XVECEXP (PATTERN (insn), 0, i);
854 if (GET_CODE (set) == SET
855 && !REG_P (SET_DEST (set))
856 && !rtx_equal_p (reg, SET_DEST (set))
857 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
858 used_in_output = 1;
860 if (used_in_output)
861 mark_reg_conflicts (reg);
864 /* Mark any registers set in INSN and then never used. */
866 while (n_regs_set-- > 0)
868 rtx note = find_regno_note (insn, REG_UNUSED,
869 REGNO (regs_set[n_regs_set]));
870 if (note)
871 mark_reg_death (XEXP (note, 0));
875 if (insn == BB_END (b))
876 break;
877 insn = NEXT_INSN (insn);
881 /* Clean up. */
882 free (block_start_allocnos);
883 free (regs_set);
885 /* Expand the preference information by looking for cases where one allocno
886 dies in an insn that sets an allocno. If those two allocnos don't conflict,
887 merge any preferences between those allocnos. */
889 static void
890 expand_preferences (void)
892 rtx insn;
893 rtx link;
894 rtx set;
896 /* We only try to handle the most common cases here. Most of the cases
897 where this wins are reg-reg copies. */
899 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
900 if (INSN_P (insn)
901 && (set = single_set (insn)) != 0
902 && REG_P (SET_DEST (set))
903 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
904 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
905 if (REG_NOTE_KIND (link) == REG_DEAD
906 && REG_P (XEXP (link, 0))
907 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
908 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
909 reg_allocno[REGNO (XEXP (link, 0))]))
911 int a1 = reg_allocno[REGNO (SET_DEST (set))];
912 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
914 if (XEXP (link, 0) == SET_SRC (set))
916 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
917 allocno[a2].hard_reg_copy_preferences);
918 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
919 allocno[a1].hard_reg_copy_preferences);
922 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
923 allocno[a2].hard_reg_preferences);
924 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
925 allocno[a1].hard_reg_preferences);
926 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
927 allocno[a2].hard_reg_full_preferences);
928 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
929 allocno[a1].hard_reg_full_preferences);
933 /* Prune the preferences for global registers to exclude registers that cannot
934 be used.
936 Compute `regs_someone_prefers', which is a bitmask of the hard registers
937 that are preferred by conflicting registers of lower priority. If possible,
938 we will avoid using these registers. */
940 static void
941 prune_preferences (void)
943 int i;
944 int num;
945 int *allocno_to_order = xmalloc (max_allocno * sizeof (int));
947 /* Scan least most important to most important.
948 For each allocno, remove from preferences registers that cannot be used,
949 either because of conflicts or register type. Then compute all registers
950 preferred by each lower-priority register that conflicts. */
952 for (i = max_allocno - 1; i >= 0; i--)
954 HARD_REG_SET temp;
956 num = allocno_order[i];
957 allocno_to_order[num] = i;
958 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
960 if (allocno[num].calls_crossed == 0)
961 IOR_HARD_REG_SET (temp, fixed_reg_set);
962 else
963 IOR_HARD_REG_SET (temp, call_used_reg_set);
965 IOR_COMPL_HARD_REG_SET
966 (temp,
967 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
969 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
970 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
971 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
974 for (i = max_allocno - 1; i >= 0; i--)
976 /* Merge in the preferences of lower-priority registers (they have
977 already been pruned). If we also prefer some of those registers,
978 don't exclude them unless we are of a smaller size (in which case
979 we want to give the lower-priority allocno the first chance for
980 these registers). */
981 HARD_REG_SET temp, temp2;
982 int allocno2;
984 num = allocno_order[i];
986 CLEAR_HARD_REG_SET (temp);
987 CLEAR_HARD_REG_SET (temp2);
989 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
990 allocno2,
992 if (allocno_to_order[allocno2] > i)
994 if (allocno[allocno2].size <= allocno[num].size)
995 IOR_HARD_REG_SET (temp,
996 allocno[allocno2].hard_reg_full_preferences);
997 else
998 IOR_HARD_REG_SET (temp2,
999 allocno[allocno2].hard_reg_full_preferences);
1003 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1004 IOR_HARD_REG_SET (temp, temp2);
1005 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1007 free (allocno_to_order);
1010 /* Assign a hard register to allocno NUM; look for one that is the beginning
1011 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1012 The registers marked in PREFREGS are tried first.
1014 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1015 be used for this allocation.
1017 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1018 Otherwise ignore that preferred class and use the alternate class.
1020 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1021 will have to be saved and restored at calls.
1023 RETRYING is nonzero if this is called from retry_global_alloc.
1025 If we find one, record it in reg_renumber.
1026 If not, do nothing. */
1028 static void
1029 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1031 int i, best_reg, pass;
1032 HARD_REG_SET used, used1, used2;
1034 enum reg_class class = (alt_regs_p
1035 ? reg_alternate_class (allocno[num].reg)
1036 : reg_preferred_class (allocno[num].reg));
1037 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1039 if (accept_call_clobbered)
1040 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1041 else if (allocno[num].calls_crossed == 0)
1042 COPY_HARD_REG_SET (used1, fixed_reg_set);
1043 else
1044 COPY_HARD_REG_SET (used1, call_used_reg_set);
1046 /* Some registers should not be allocated in global-alloc. */
1047 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1048 if (losers)
1049 IOR_HARD_REG_SET (used1, losers);
1051 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1052 COPY_HARD_REG_SET (used2, used1);
1054 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1056 #ifdef CANNOT_CHANGE_MODE_CLASS
1057 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1058 #endif
1060 /* Try each hard reg to see if it fits. Do this in two passes.
1061 In the first pass, skip registers that are preferred by some other pseudo
1062 to give it a better chance of getting one of those registers. Only if
1063 we can't get a register when excluding those do we take one of them.
1064 However, we never allocate a register for the first time in pass 0. */
1066 COPY_HARD_REG_SET (used, used1);
1067 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1068 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1070 best_reg = -1;
1071 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1072 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1073 pass++)
1075 if (pass == 1)
1076 COPY_HARD_REG_SET (used, used1);
1077 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1079 #ifdef REG_ALLOC_ORDER
1080 int regno = reg_alloc_order[i];
1081 #else
1082 int regno = i;
1083 #endif
1084 if (! TEST_HARD_REG_BIT (used, regno)
1085 && HARD_REGNO_MODE_OK (regno, mode)
1086 && (allocno[num].calls_crossed == 0
1087 || accept_call_clobbered
1088 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1090 int j;
1091 int lim = regno + hard_regno_nregs[regno][mode];
1092 for (j = regno + 1;
1093 (j < lim
1094 && ! TEST_HARD_REG_BIT (used, j));
1095 j++);
1096 if (j == lim)
1098 best_reg = regno;
1099 break;
1101 #ifndef REG_ALLOC_ORDER
1102 i = j; /* Skip starting points we know will lose */
1103 #endif
1108 /* See if there is a preferred register with the same class as the register
1109 we allocated above. Making this restriction prevents register
1110 preferencing from creating worse register allocation.
1112 Remove from the preferred registers and conflicting registers. Note that
1113 additional conflicts may have been added after `prune_preferences' was
1114 called.
1116 First do this for those register with copy preferences, then all
1117 preferred registers. */
1119 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1120 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
1121 reg_class_contents[(int) NO_REGS], no_copy_prefs);
1123 if (best_reg >= 0)
1125 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1126 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1127 && HARD_REGNO_MODE_OK (i, mode)
1128 && (allocno[num].calls_crossed == 0
1129 || accept_call_clobbered
1130 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1131 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1132 || reg_class_subset_p (REGNO_REG_CLASS (i),
1133 REGNO_REG_CLASS (best_reg))
1134 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1135 REGNO_REG_CLASS (i))))
1137 int j;
1138 int lim = i + hard_regno_nregs[i][mode];
1139 for (j = i + 1;
1140 (j < lim
1141 && ! TEST_HARD_REG_BIT (used, j)
1142 && (REGNO_REG_CLASS (j)
1143 == REGNO_REG_CLASS (best_reg + (j - i))
1144 || reg_class_subset_p (REGNO_REG_CLASS (j),
1145 REGNO_REG_CLASS (best_reg + (j - i)))
1146 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1147 REGNO_REG_CLASS (j))));
1148 j++);
1149 if (j == lim)
1151 best_reg = i;
1152 goto no_prefs;
1156 no_copy_prefs:
1158 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1159 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
1160 reg_class_contents[(int) NO_REGS], no_prefs);
1162 if (best_reg >= 0)
1164 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1165 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1166 && HARD_REGNO_MODE_OK (i, mode)
1167 && (allocno[num].calls_crossed == 0
1168 || accept_call_clobbered
1169 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1170 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1171 || reg_class_subset_p (REGNO_REG_CLASS (i),
1172 REGNO_REG_CLASS (best_reg))
1173 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1174 REGNO_REG_CLASS (i))))
1176 int j;
1177 int lim = i + hard_regno_nregs[i][mode];
1178 for (j = i + 1;
1179 (j < lim
1180 && ! TEST_HARD_REG_BIT (used, j)
1181 && (REGNO_REG_CLASS (j)
1182 == REGNO_REG_CLASS (best_reg + (j - i))
1183 || reg_class_subset_p (REGNO_REG_CLASS (j),
1184 REGNO_REG_CLASS (best_reg + (j - i)))
1185 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1186 REGNO_REG_CLASS (j))));
1187 j++);
1188 if (j == lim)
1190 best_reg = i;
1191 break;
1195 no_prefs:
1197 /* If we haven't succeeded yet, try with caller-saves.
1198 We need not check to see if the current function has nonlocal
1199 labels because we don't put any pseudos that are live over calls in
1200 registers in that case. */
1202 if (flag_caller_saves && best_reg < 0)
1204 /* Did not find a register. If it would be profitable to
1205 allocate a call-clobbered register and save and restore it
1206 around calls, do that. */
1207 if (! accept_call_clobbered
1208 && allocno[num].calls_crossed != 0
1209 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1210 allocno[num].calls_crossed))
1212 HARD_REG_SET new_losers;
1213 if (! losers)
1214 CLEAR_HARD_REG_SET (new_losers);
1215 else
1216 COPY_HARD_REG_SET (new_losers, losers);
1218 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1219 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1220 if (reg_renumber[allocno[num].reg] >= 0)
1222 caller_save_needed = 1;
1223 return;
1228 /* If we haven't succeeded yet,
1229 see if some hard reg that conflicts with us
1230 was utilized poorly by local-alloc.
1231 If so, kick out the regs that were put there by local-alloc
1232 so we can use it instead. */
1233 if (best_reg < 0 && !retrying
1234 /* Let's not bother with multi-reg allocnos. */
1235 && allocno[num].size == 1)
1237 /* Count from the end, to find the least-used ones first. */
1238 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1240 #ifdef REG_ALLOC_ORDER
1241 int regno = reg_alloc_order[i];
1242 #else
1243 int regno = i;
1244 #endif
1246 if (local_reg_n_refs[regno] != 0
1247 /* Don't use a reg no good for this pseudo. */
1248 && ! TEST_HARD_REG_BIT (used2, regno)
1249 && HARD_REGNO_MODE_OK (regno, mode)
1250 /* The code below assumes that we need only a single
1251 register, but the check of allocno[num].size above
1252 was not enough. Sometimes we need more than one
1253 register for a single-word value. */
1254 && hard_regno_nregs[regno][mode] == 1
1255 && (allocno[num].calls_crossed == 0
1256 || accept_call_clobbered
1257 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1258 #ifdef CANNOT_CHANGE_MODE_CLASS
1259 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1260 mode)
1261 #endif
1262 #ifdef STACK_REGS
1263 && (!allocno[num].no_stack_reg
1264 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1265 #endif
1268 /* We explicitly evaluate the divide results into temporary
1269 variables so as to avoid excess precision problems that occur
1270 on an i386-unknown-sysv4.2 (unixware) host. */
1272 double tmp1 = ((double) local_reg_freq[regno]
1273 / local_reg_live_length[regno]);
1274 double tmp2 = ((double) allocno[num].freq
1275 / allocno[num].live_length);
1277 if (tmp1 < tmp2)
1279 /* Hard reg REGNO was used less in total by local regs
1280 than it would be used by this one allocno! */
1281 int k;
1282 for (k = 0; k < max_regno; k++)
1283 if (reg_renumber[k] >= 0)
1285 int r = reg_renumber[k];
1286 int endregno
1287 = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
1289 if (regno >= r && regno < endregno)
1290 reg_renumber[k] = -1;
1293 best_reg = regno;
1294 break;
1300 /* Did we find a register? */
1302 if (best_reg >= 0)
1304 int lim, j;
1305 HARD_REG_SET this_reg;
1307 /* Yes. Record it as the hard register of this pseudo-reg. */
1308 reg_renumber[allocno[num].reg] = best_reg;
1309 /* Also of any pseudo-regs that share with it. */
1310 if (reg_may_share[allocno[num].reg])
1311 for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
1312 if (reg_allocno[j] == num)
1313 reg_renumber[j] = best_reg;
1315 /* Make a set of the hard regs being allocated. */
1316 CLEAR_HARD_REG_SET (this_reg);
1317 lim = best_reg + hard_regno_nregs[best_reg][mode];
1318 for (j = best_reg; j < lim; j++)
1320 SET_HARD_REG_BIT (this_reg, j);
1321 SET_HARD_REG_BIT (regs_used_so_far, j);
1322 /* This is no longer a reg used just by local regs. */
1323 local_reg_n_refs[j] = 0;
1324 local_reg_freq[j] = 0;
1326 /* For each other pseudo-reg conflicting with this one,
1327 mark it as conflicting with the hard regs this one occupies. */
1328 lim = num;
1329 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1331 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1336 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1337 Perhaps it had previously seemed not worth a hard reg,
1338 or perhaps its old hard reg has been commandeered for reloads.
1339 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1340 they do not appear to be allocated.
1341 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1343 void
1344 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1346 int alloc_no = reg_allocno[regno];
1347 if (alloc_no >= 0)
1349 /* If we have more than one register class,
1350 first try allocating in the class that is cheapest
1351 for this pseudo-reg. If that fails, try any reg. */
1352 if (N_REG_CLASSES > 1)
1353 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1354 if (reg_renumber[regno] < 0
1355 && reg_alternate_class (regno) != NO_REGS)
1356 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1358 /* If we found a register, modify the RTL for the register to
1359 show the hard register, and mark that register live. */
1360 if (reg_renumber[regno] >= 0)
1362 REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
1363 mark_home_live (regno);
1368 /* Record a conflict between register REGNO
1369 and everything currently live.
1370 REGNO must not be a pseudo reg that was allocated
1371 by local_alloc; such numbers must be translated through
1372 reg_renumber before calling here. */
1374 static void
1375 record_one_conflict (int regno)
1377 int j;
1379 if (regno < FIRST_PSEUDO_REGISTER)
1380 /* When a hard register becomes live,
1381 record conflicts with live pseudo regs. */
1382 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1384 SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1386 else
1387 /* When a pseudo-register becomes live,
1388 record conflicts first with hard regs,
1389 then with other pseudo regs. */
1391 int ialloc = reg_allocno[regno];
1392 int ialloc_prod = ialloc * allocno_row_words;
1394 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1395 for (j = allocno_row_words - 1; j >= 0; j--)
1396 conflicts[ialloc_prod + j] |= allocnos_live[j];
1400 /* Record all allocnos currently live as conflicting
1401 with all hard regs currently live.
1403 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1404 are currently live. Their bits are also flagged in allocnos_live. */
1406 static void
1407 record_conflicts (int *allocno_vec, int len)
1409 while (--len >= 0)
1410 IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1411 hard_regs_live);
1414 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1415 static void
1416 mirror_conflicts (void)
1418 int i, j;
1419 int rw = allocno_row_words;
1420 int rwb = rw * INT_BITS;
1421 INT_TYPE *p = conflicts;
1422 INT_TYPE *q0 = conflicts, *q1, *q2;
1423 unsigned INT_TYPE mask;
1425 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1427 if (! mask)
1429 mask = 1;
1430 q0++;
1432 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1434 unsigned INT_TYPE word;
1436 for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1437 word >>= 1, q2 += rw)
1439 if (word & 1)
1440 *q2 |= mask;
1446 /* Handle the case where REG is set by the insn being scanned,
1447 during the forward scan to accumulate conflicts.
1448 Store a 1 in regs_live or allocnos_live for this register, record how many
1449 consecutive hardware registers it actually needs,
1450 and record a conflict with all other registers already live.
1452 Note that even if REG does not remain alive after this insn,
1453 we must mark it here as live, to ensure a conflict between
1454 REG and any other regs set in this insn that really do live.
1455 This is because those other regs could be considered after this.
1457 REG might actually be something other than a register;
1458 if so, we do nothing.
1460 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1461 a REG_INC note was found for it). */
1463 static void
1464 mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
1466 int regno;
1468 if (GET_CODE (reg) == SUBREG)
1469 reg = SUBREG_REG (reg);
1471 if (!REG_P (reg))
1472 return;
1474 regs_set[n_regs_set++] = reg;
1476 if (setter && GET_CODE (setter) != CLOBBER)
1477 set_preference (reg, SET_SRC (setter));
1479 regno = REGNO (reg);
1481 /* Either this is one of the max_allocno pseudo regs not allocated,
1482 or it is or has a hardware reg. First handle the pseudo-regs. */
1483 if (regno >= FIRST_PSEUDO_REGISTER)
1485 if (reg_allocno[regno] >= 0)
1487 SET_ALLOCNO_LIVE (reg_allocno[regno]);
1488 record_one_conflict (regno);
1492 if (reg_renumber[regno] >= 0)
1493 regno = reg_renumber[regno];
1495 /* Handle hardware regs (and pseudos allocated to hard regs). */
1496 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1498 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1499 while (regno < last)
1501 record_one_conflict (regno);
1502 SET_HARD_REG_BIT (hard_regs_live, regno);
1503 regno++;
1508 /* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
1510 static void
1511 mark_reg_clobber (rtx reg, rtx setter, void *data)
1513 if (GET_CODE (setter) == CLOBBER)
1514 mark_reg_store (reg, setter, data);
1517 /* Record that REG has conflicts with all the regs currently live.
1518 Do not mark REG itself as live. */
1520 static void
1521 mark_reg_conflicts (rtx reg)
1523 int regno;
1525 if (GET_CODE (reg) == SUBREG)
1526 reg = SUBREG_REG (reg);
1528 if (!REG_P (reg))
1529 return;
1531 regno = REGNO (reg);
1533 /* Either this is one of the max_allocno pseudo regs not allocated,
1534 or it is or has a hardware reg. First handle the pseudo-regs. */
1535 if (regno >= FIRST_PSEUDO_REGISTER)
1537 if (reg_allocno[regno] >= 0)
1538 record_one_conflict (regno);
1541 if (reg_renumber[regno] >= 0)
1542 regno = reg_renumber[regno];
1544 /* Handle hardware regs (and pseudos allocated to hard regs). */
1545 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1547 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1548 while (regno < last)
1550 record_one_conflict (regno);
1551 regno++;
1556 /* Mark REG as being dead (following the insn being scanned now).
1557 Store a 0 in regs_live or allocnos_live for this register. */
1559 static void
1560 mark_reg_death (rtx reg)
1562 int regno = REGNO (reg);
1564 /* Either this is one of the max_allocno pseudo regs not allocated,
1565 or it is a hardware reg. First handle the pseudo-regs. */
1566 if (regno >= FIRST_PSEUDO_REGISTER)
1568 if (reg_allocno[regno] >= 0)
1569 CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1572 /* For pseudo reg, see if it has been assigned a hardware reg. */
1573 if (reg_renumber[regno] >= 0)
1574 regno = reg_renumber[regno];
1576 /* Handle hardware regs (and pseudos allocated to hard regs). */
1577 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1579 /* Pseudo regs already assigned hardware regs are treated
1580 almost the same as explicit hardware regs. */
1581 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1582 while (regno < last)
1584 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
1585 regno++;
1590 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1591 for the value stored in it. MODE determines how many consecutive
1592 registers are actually in use. Do not record conflicts;
1593 it is assumed that the caller will do that. */
1595 static void
1596 mark_reg_live_nc (int regno, enum machine_mode mode)
1598 int last = regno + hard_regno_nregs[regno][mode];
1599 while (regno < last)
1601 SET_HARD_REG_BIT (hard_regs_live, regno);
1602 regno++;
1606 /* Try to set a preference for an allocno to a hard register.
1607 We are passed DEST and SRC which are the operands of a SET. It is known
1608 that SRC is a register. If SRC or the first operand of SRC is a register,
1609 try to set a preference. If one of the two is a hard register and the other
1610 is a pseudo-register, mark the preference.
1612 Note that we are not as aggressive as local-alloc in trying to tie a
1613 pseudo-register to a hard register. */
1615 static void
1616 set_preference (rtx dest, rtx src)
1618 unsigned int src_regno, dest_regno;
1619 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1620 to compensate for subregs in SRC or DEST. */
1621 int offset = 0;
1622 unsigned int i;
1623 int copy = 1;
1625 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1626 src = XEXP (src, 0), copy = 0;
1628 /* Get the reg number for both SRC and DEST.
1629 If neither is a reg, give up. */
1631 if (REG_P (src))
1632 src_regno = REGNO (src);
1633 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1635 src_regno = REGNO (SUBREG_REG (src));
1637 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1638 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1639 GET_MODE (SUBREG_REG (src)),
1640 SUBREG_BYTE (src),
1641 GET_MODE (src));
1642 else
1643 offset += (SUBREG_BYTE (src)
1644 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1646 else
1647 return;
1649 if (REG_P (dest))
1650 dest_regno = REGNO (dest);
1651 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1653 dest_regno = REGNO (SUBREG_REG (dest));
1655 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1656 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1657 GET_MODE (SUBREG_REG (dest)),
1658 SUBREG_BYTE (dest),
1659 GET_MODE (dest));
1660 else
1661 offset -= (SUBREG_BYTE (dest)
1662 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1664 else
1665 return;
1667 /* Convert either or both to hard reg numbers. */
1669 if (reg_renumber[src_regno] >= 0)
1670 src_regno = reg_renumber[src_regno];
1672 if (reg_renumber[dest_regno] >= 0)
1673 dest_regno = reg_renumber[dest_regno];
1675 /* Now if one is a hard reg and the other is a global pseudo
1676 then give the other a preference. */
1678 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1679 && reg_allocno[src_regno] >= 0)
1681 dest_regno -= offset;
1682 if (dest_regno < FIRST_PSEUDO_REGISTER)
1684 if (copy)
1685 SET_REGBIT (hard_reg_copy_preferences,
1686 reg_allocno[src_regno], dest_regno);
1688 SET_REGBIT (hard_reg_preferences,
1689 reg_allocno[src_regno], dest_regno);
1690 for (i = dest_regno;
1691 i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
1692 i++)
1693 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1697 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1698 && reg_allocno[dest_regno] >= 0)
1700 src_regno += offset;
1701 if (src_regno < FIRST_PSEUDO_REGISTER)
1703 if (copy)
1704 SET_REGBIT (hard_reg_copy_preferences,
1705 reg_allocno[dest_regno], src_regno);
1707 SET_REGBIT (hard_reg_preferences,
1708 reg_allocno[dest_regno], src_regno);
1709 for (i = src_regno;
1710 i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
1711 i++)
1712 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1717 /* Indicate that hard register number FROM was eliminated and replaced with
1718 an offset from hard register number TO. The status of hard registers live
1719 at the start of a basic block is updated by replacing a use of FROM with
1720 a use of TO. */
1722 void
1723 mark_elimination (int from, int to)
1725 basic_block bb;
1727 FOR_EACH_BB (bb)
1729 regset r = bb->global_live_at_start;
1730 if (REGNO_REG_SET_P (r, from))
1732 CLEAR_REGNO_REG_SET (r, from);
1733 SET_REGNO_REG_SET (r, to);
1738 /* Used for communication between the following functions. Holds the
1739 current life information. */
1740 static regset live_relevant_regs;
1742 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1743 This is called via note_stores. */
1744 static void
1745 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1747 int regno;
1749 if (GET_CODE (reg) == SUBREG)
1750 reg = SUBREG_REG (reg);
1752 if (!REG_P (reg))
1753 return;
1755 regno = REGNO (reg);
1756 if (regno < FIRST_PSEUDO_REGISTER)
1758 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1759 while (nregs-- > 0)
1761 SET_REGNO_REG_SET (live_relevant_regs, regno);
1762 if (! fixed_regs[regno])
1763 SET_REGNO_REG_SET ((regset) regs_set, regno);
1764 regno++;
1767 else if (reg_renumber[regno] >= 0)
1769 SET_REGNO_REG_SET (live_relevant_regs, regno);
1770 SET_REGNO_REG_SET ((regset) regs_set, regno);
1774 /* Record in live_relevant_regs that register REGNO died. */
1775 static void
1776 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1778 if (regno < FIRST_PSEUDO_REGISTER)
1780 int nregs = hard_regno_nregs[regno][mode];
1781 while (nregs-- > 0)
1783 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1784 if (! fixed_regs[regno])
1785 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1786 regno++;
1789 else
1791 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1792 if (reg_renumber[regno] >= 0)
1793 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1797 /* Walk the insns of the current function and build reload_insn_chain,
1798 and record register life information. */
1799 void
1800 build_insn_chain (rtx first)
1802 struct insn_chain **p = &reload_insn_chain;
1803 struct insn_chain *prev = 0;
1804 basic_block b = ENTRY_BLOCK_PTR->next_bb;
1806 live_relevant_regs = ALLOC_REG_SET (&reg_obstack);
1808 for (; first; first = NEXT_INSN (first))
1810 struct insn_chain *c;
1812 if (first == BB_HEAD (b))
1814 unsigned i;
1815 bitmap_iterator bi;
1817 CLEAR_REG_SET (live_relevant_regs);
1819 EXECUTE_IF_SET_IN_BITMAP (b->global_live_at_start, 0, i, bi)
1821 if (i < FIRST_PSEUDO_REGISTER
1822 ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1823 : reg_renumber[i] >= 0)
1824 SET_REGNO_REG_SET (live_relevant_regs, i);
1828 if (!NOTE_P (first) && !BARRIER_P (first))
1830 c = new_insn_chain ();
1831 c->prev = prev;
1832 prev = c;
1833 *p = c;
1834 p = &c->next;
1835 c->insn = first;
1836 c->block = b->index;
1838 if (INSN_P (first))
1840 rtx link;
1842 /* Mark the death of everything that dies in this instruction. */
1844 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1845 if (REG_NOTE_KIND (link) == REG_DEAD
1846 && REG_P (XEXP (link, 0)))
1847 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1850 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1852 /* Mark everything born in this instruction as live. */
1854 note_stores (PATTERN (first), reg_becomes_live,
1855 &c->dead_or_set);
1857 else
1858 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1860 if (INSN_P (first))
1862 rtx link;
1864 /* Mark anything that is set in this insn and then unused as dying. */
1866 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1867 if (REG_NOTE_KIND (link) == REG_UNUSED
1868 && REG_P (XEXP (link, 0)))
1869 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1874 if (first == BB_END (b))
1875 b = b->next_bb;
1877 /* Stop after we pass the end of the last basic block. Verify that
1878 no real insns are after the end of the last basic block.
1880 We may want to reorganize the loop somewhat since this test should
1881 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1882 the previous real insn is a JUMP_INSN. */
1883 if (b == EXIT_BLOCK_PTR)
1885 #ifdef ENABLE_CHECKING
1886 for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
1887 gcc_assert (!INSN_P (first)
1888 || GET_CODE (PATTERN (first)) == USE
1889 || ((GET_CODE (PATTERN (first)) == ADDR_VEC
1890 || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1891 && prev_real_insn (first) != 0
1892 && JUMP_P (prev_real_insn (first))));
1893 #endif
1894 break;
1897 FREE_REG_SET (live_relevant_regs);
1898 *p = 0;
1901 /* Print debugging trace information if -dg switch is given,
1902 showing the information on which the allocation decisions are based. */
1904 static void
1905 dump_conflicts (FILE *file)
1907 int i;
1908 int has_preferences;
1909 int nregs;
1910 nregs = 0;
1911 for (i = 0; i < max_allocno; i++)
1913 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1914 continue;
1915 nregs++;
1917 fprintf (file, ";; %d regs to allocate:", nregs);
1918 for (i = 0; i < max_allocno; i++)
1920 int j;
1921 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1922 continue;
1923 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1924 for (j = 0; j < max_regno; j++)
1925 if (reg_allocno[j] == allocno_order[i]
1926 && j != allocno[allocno_order[i]].reg)
1927 fprintf (file, "+%d", j);
1928 if (allocno[allocno_order[i]].size != 1)
1929 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1931 fprintf (file, "\n");
1933 for (i = 0; i < max_allocno; i++)
1935 int j;
1936 fprintf (file, ";; %d conflicts:", allocno[i].reg);
1937 for (j = 0; j < max_allocno; j++)
1938 if (CONFLICTP (j, i))
1939 fprintf (file, " %d", allocno[j].reg);
1940 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1941 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
1942 fprintf (file, " %d", j);
1943 fprintf (file, "\n");
1945 has_preferences = 0;
1946 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1947 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1948 has_preferences = 1;
1950 if (! has_preferences)
1951 continue;
1952 fprintf (file, ";; %d preferences:", allocno[i].reg);
1953 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1954 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1955 fprintf (file, " %d", j);
1956 fprintf (file, "\n");
1958 fprintf (file, "\n");
1961 void
1962 dump_global_regs (FILE *file)
1964 int i, j;
1966 fprintf (file, ";; Register dispositions:\n");
1967 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
1968 if (reg_renumber[i] >= 0)
1970 fprintf (file, "%d in %d ", i, reg_renumber[i]);
1971 if (++j % 6 == 0)
1972 fprintf (file, "\n");
1975 fprintf (file, "\n\n;; Hard regs used: ");
1976 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1977 if (regs_ever_live[i])
1978 fprintf (file, " %d", i);
1979 fprintf (file, "\n\n");
1984 /* This page contains code to make live information more accurate.
1985 The accurate register liveness at program point P means:
1986 o there is a path from P to usage of the register and the
1987 register is not redefined or killed on the path.
1988 o register at P is partially available, i.e. there is a path from
1989 a register definition to the point P and the register is not
1990 killed (clobbered) on the path
1992 The standard GCC live information means only the first condition.
1993 Without the partial availability, there will be more register
1994 conflicts and as a consequence worse register allocation. The
1995 typical example where the information can be different is a
1996 register initialized in the loop at the basic block preceding the
1997 loop in CFG. */
1999 /* The following structure contains basic block data flow information
2000 used to calculate partial availability of registers. */
2002 struct bb_info
2004 /* The basic block reverse post-order number. */
2005 int rts_number;
2006 /* Registers used uninitialized in an insn in which there is an
2007 early clobbered register might get the same hard register. */
2008 bitmap earlyclobber;
2009 /* Registers correspondingly killed (clobbered) and defined but not
2010 killed afterward in the basic block. */
2011 bitmap killed, avloc;
2012 /* Registers partially available and living (in other words whose
2013 values were calculated and used) correspondingly at the start
2014 and end of the basic block. */
2015 bitmap live_pavin, live_pavout;
2018 /* Macros for accessing data flow information of basic blocks. */
2020 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2021 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2023 /* The function allocates the info structures of each basic block. It
2024 also initialized LIVE_PAVIN and LIVE_PAVOUT as if all hard
2025 registers were partially available. */
2027 static void
2028 allocate_bb_info (void)
2030 int i;
2031 basic_block bb;
2032 struct bb_info *bb_info;
2033 bitmap init;
2035 alloc_aux_for_blocks (sizeof (struct bb_info));
2036 init = BITMAP_ALLOC (NULL);
2037 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2038 bitmap_set_bit (init, i);
2039 FOR_EACH_BB (bb)
2041 bb_info = bb->aux;
2042 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2043 bb_info->avloc = BITMAP_ALLOC (NULL);
2044 bb_info->killed = BITMAP_ALLOC (NULL);
2045 bb_info->live_pavin = BITMAP_ALLOC (NULL);
2046 bb_info->live_pavout = BITMAP_ALLOC (NULL);
2047 bitmap_copy (bb_info->live_pavin, init);
2048 bitmap_copy (bb_info->live_pavout, init);
2050 BITMAP_FREE (init);
2053 /* The function frees the allocated info of all basic blocks. */
2055 static void
2056 free_bb_info (void)
2058 basic_block bb;
2059 struct bb_info *bb_info;
2061 FOR_EACH_BB (bb)
2063 bb_info = BB_INFO (bb);
2064 BITMAP_FREE (bb_info->live_pavout);
2065 BITMAP_FREE (bb_info->live_pavin);
2066 BITMAP_FREE (bb_info->killed);
2067 BITMAP_FREE (bb_info->avloc);
2068 BITMAP_FREE (bb_info->earlyclobber);
2070 free_aux_for_blocks ();
2073 /* The function modifies local info for register REG being changed in
2074 SETTER. DATA is used to pass the current basic block info. */
2076 static void
2077 mark_reg_change (rtx reg, rtx setter, void *data)
2079 int regno;
2080 basic_block bb = data;
2081 struct bb_info *bb_info = BB_INFO (bb);
2083 if (GET_CODE (reg) == SUBREG)
2084 reg = SUBREG_REG (reg);
2086 if (!REG_P (reg))
2087 return;
2089 regno = REGNO (reg);
2090 bitmap_set_bit (bb_info->killed, regno);
2092 if (GET_CODE (setter) != CLOBBER)
2093 bitmap_set_bit (bb_info->avloc, regno);
2094 else
2095 bitmap_clear_bit (bb_info->avloc, regno);
2098 /* Classes of registers which could be early clobbered in the current
2099 insn. */
2101 static varray_type earlyclobber_regclass;
2103 /* This function finds and stores register classes that could be early
2104 clobbered in INSN. If any earlyclobber classes are found, the function
2105 returns TRUE, in all other cases it returns FALSE. */
2107 static bool
2108 check_earlyclobber (rtx insn)
2110 int opno;
2111 bool found = false;
2113 extract_insn (insn);
2115 VARRAY_POP_ALL (earlyclobber_regclass);
2116 for (opno = 0; opno < recog_data.n_operands; opno++)
2118 char c;
2119 bool amp_p;
2120 int i;
2121 enum reg_class class;
2122 const char *p = recog_data.constraints[opno];
2124 class = NO_REGS;
2125 amp_p = false;
2126 for (;;)
2128 c = *p;
2129 switch (c)
2131 case '=': case '+': case '?':
2132 case '#': case '!':
2133 case '*': case '%':
2134 case 'm': case '<': case '>': case 'V': case 'o':
2135 case 'E': case 'F': case 'G': case 'H':
2136 case 's': case 'i': case 'n':
2137 case 'I': case 'J': case 'K': case 'L':
2138 case 'M': case 'N': case 'O': case 'P':
2139 case 'X':
2140 case '0': case '1': case '2': case '3': case '4':
2141 case '5': case '6': case '7': case '8': case '9':
2142 /* These don't say anything we care about. */
2143 break;
2145 case '&':
2146 amp_p = true;
2147 break;
2148 case '\0':
2149 case ',':
2150 if (amp_p && class != NO_REGS)
2152 found = true;
2153 for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1;
2154 i >= 0; i--)
2155 if (VARRAY_INT (earlyclobber_regclass, i) == (int) class)
2156 break;
2157 if (i < 0)
2158 VARRAY_PUSH_INT (earlyclobber_regclass, (int) class);
2161 amp_p = false;
2162 class = NO_REGS;
2163 break;
2165 case 'r':
2166 class = GENERAL_REGS;
2167 break;
2169 default:
2170 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2171 break;
2173 if (c == '\0')
2174 break;
2175 p += CONSTRAINT_LEN (c, p);
2179 return found;
2182 /* The function checks that pseudo-register *X has a class
2183 intersecting with the class of pseudo-register could be early
2184 clobbered in the same insn.
2185 This function is a no-op if earlyclobber_regclass is empty. */
2187 static int
2188 mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
2190 enum reg_class pref_class, alt_class;
2191 int i, regno;
2192 basic_block bb = data;
2193 struct bb_info *bb_info = BB_INFO (bb);
2195 if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2197 regno = REGNO (*x);
2198 if (bitmap_bit_p (bb_info->killed, regno)
2199 || bitmap_bit_p (bb_info->avloc, regno))
2200 return 0;
2201 pref_class = reg_preferred_class (regno);
2202 alt_class = reg_alternate_class (regno);
2203 for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1; i >= 0; i--)
2204 if (reg_classes_intersect_p (VARRAY_INT (earlyclobber_regclass, i),
2205 pref_class)
2206 || (VARRAY_INT (earlyclobber_regclass, i) != NO_REGS
2207 && reg_classes_intersect_p (VARRAY_INT (earlyclobber_regclass,
2209 alt_class)))
2211 bitmap_set_bit (bb_info->earlyclobber, regno);
2212 break;
2215 return 0;
2218 /* The function processes all pseudo-registers in *X with the aid of
2219 previous function. */
2221 static void
2222 mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2224 for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
2227 /* The function calculates local info for each basic block. */
2229 static void
2230 calculate_local_reg_bb_info (void)
2232 basic_block bb;
2233 rtx insn, bound;
2235 VARRAY_INT_INIT (earlyclobber_regclass, 20,
2236 "classes of registers early clobbered in an insn");
2237 FOR_EACH_BB (bb)
2239 bound = NEXT_INSN (BB_END (bb));
2240 for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
2241 if (INSN_P (insn))
2243 note_stores (PATTERN (insn), mark_reg_change, bb);
2244 if (check_earlyclobber (insn))
2245 note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
2250 /* The function sets up reverse post-order number of each basic
2251 block. */
2253 static void
2254 set_up_bb_rts_numbers (void)
2256 int i;
2257 int *rts_order;
2259 rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2260 flow_reverse_top_sort_order_compute (rts_order);
2261 for (i = 0; i < n_basic_blocks; i++)
2262 BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
2263 free (rts_order);
2266 /* Compare function for sorting blocks in reverse postorder. */
2268 static int
2269 rpost_cmp (const void *bb1, const void *bb2)
2271 basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
2273 return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
2276 /* Temporary bitmap used for live_pavin, live_pavout calculation. */
2277 static bitmap temp_bitmap;
2279 /* The function calculates partial register availability according to
2280 the following equations:
2282 bb.live_pavin
2283 = empty for entry block
2284 | union (live_pavout of predecessors) & global_live_at_start
2285 bb.live_pavout = union (bb.live_pavin - bb.killed, bb.avloc)
2286 & global_live_at_end */
2288 static void
2289 calculate_reg_pav (void)
2291 basic_block bb, succ;
2292 edge e;
2293 int i, nel;
2294 varray_type bbs, new_bbs, temp;
2295 basic_block *bb_array;
2296 sbitmap wset;
2298 VARRAY_BB_INIT (bbs, n_basic_blocks, "basic blocks");
2299 VARRAY_BB_INIT (new_bbs, n_basic_blocks, "basic blocks for the next iter.");
2300 temp_bitmap = BITMAP_ALLOC (NULL);
2301 FOR_EACH_BB (bb)
2303 VARRAY_PUSH_BB (bbs, bb);
2305 wset = sbitmap_alloc (n_basic_blocks + 1);
2306 while (VARRAY_ACTIVE_SIZE (bbs))
2308 bb_array = &VARRAY_BB (bbs, 0);
2309 nel = VARRAY_ACTIVE_SIZE (bbs);
2310 qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
2311 sbitmap_zero (wset);
2312 for (i = 0; i < nel; i++)
2314 edge_iterator ei;
2315 struct bb_info *bb_info;
2316 bitmap bb_live_pavin, bb_live_pavout;
2318 bb = bb_array [i];
2319 bb_info = BB_INFO (bb);
2320 bb_live_pavin = bb_info->live_pavin;
2321 bb_live_pavout = bb_info->live_pavout;
2322 FOR_EACH_EDGE (e, ei, bb->preds)
2324 basic_block pred = e->src;
2326 if (pred->index != ENTRY_BLOCK)
2327 bitmap_ior_into (bb_live_pavin, BB_INFO (pred)->live_pavout);
2329 bitmap_and_into (bb_live_pavin, bb->global_live_at_start);
2330 bitmap_ior_and_compl (temp_bitmap, bb_info->avloc,
2331 bb_live_pavin, bb_info->killed);
2332 bitmap_and_into (temp_bitmap, bb->global_live_at_end);
2333 if (! bitmap_equal_p (temp_bitmap, bb_live_pavout))
2335 bitmap_copy (bb_live_pavout, temp_bitmap);
2336 FOR_EACH_EDGE (e, ei, bb->succs)
2338 succ = e->dest;
2339 if (succ->index != EXIT_BLOCK
2340 && !TEST_BIT (wset, succ->index))
2342 SET_BIT (wset, succ->index);
2343 VARRAY_PUSH_BB (new_bbs, succ);
2348 temp = bbs;
2349 bbs = new_bbs;
2350 new_bbs = temp;
2351 VARRAY_POP_ALL (new_bbs);
2353 sbitmap_free (wset);
2354 BITMAP_FREE (temp_bitmap);
2357 /* The function modifies partial availability information for two
2358 special cases to prevent incorrect work of the subsequent passes
2359 with the accurate live information based on the partial
2360 availability. */
2362 static void
2363 modify_reg_pav (void)
2365 basic_block bb;
2366 struct bb_info *bb_info;
2367 #ifdef STACK_REGS
2368 int i;
2369 HARD_REG_SET zero, stack_hard_regs, used;
2370 bitmap stack_regs;
2372 CLEAR_HARD_REG_SET (zero);
2373 CLEAR_HARD_REG_SET (stack_hard_regs);
2374 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2375 SET_HARD_REG_BIT(stack_hard_regs, i);
2376 stack_regs = BITMAP_ALLOC (NULL);
2377 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2379 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2380 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2381 AND_HARD_REG_SET (used, stack_hard_regs);
2382 GO_IF_HARD_REG_EQUAL(used, zero, skip);
2383 bitmap_set_bit (stack_regs, i);
2384 skip:
2387 #endif
2388 FOR_EACH_BB (bb)
2390 bb_info = BB_INFO (bb);
2392 /* Reload can assign the same hard register to uninitialized
2393 pseudo-register and early clobbered pseudo-register in an
2394 insn if the pseudo-register is used first time in given BB
2395 and not lived at the BB start. To prevent this we don't
2396 change life information for such pseudo-registers. */
2397 bitmap_ior_into (bb_info->live_pavin, bb_info->earlyclobber);
2398 #ifdef STACK_REGS
2399 /* We can not use the same stack register for uninitialized
2400 pseudo-register and another living pseudo-register because if the
2401 uninitialized pseudo-register dies, subsequent pass reg-stack
2402 will be confused (it will believe that the other register
2403 dies). */
2404 bitmap_ior_into (bb_info->live_pavin, stack_regs);
2405 #endif
2407 #ifdef STACK_REGS
2408 BITMAP_FREE (stack_regs);
2409 #endif
2412 /* The following function makes live information more accurate by
2413 modifying global_live_at_start and global_live_at_end of basic
2414 blocks.
2416 The standard GCC life analysis permits registers to live
2417 uninitialized, for example:
2419 R is never used
2420 .....
2421 Loop:
2422 R is defined
2424 R is used.
2426 With normal life_analysis, R would be live before "Loop:".
2427 The result is that R causes many interferences that do not
2428 serve any purpose.
2430 After the function call a register lives at a program point
2431 only if it is initialized on a path from CFG entry to the
2432 program point. */
2434 static void
2435 make_accurate_live_analysis (void)
2437 basic_block bb;
2438 struct bb_info *bb_info;
2440 max_regno = max_reg_num ();
2441 compact_blocks ();
2442 allocate_bb_info ();
2443 calculate_local_reg_bb_info ();
2444 set_up_bb_rts_numbers ();
2445 calculate_reg_pav ();
2446 modify_reg_pav ();
2447 FOR_EACH_BB (bb)
2449 bb_info = BB_INFO (bb);
2451 bitmap_and_into (bb->global_live_at_start, bb_info->live_pavin);
2452 bitmap_and_into (bb->global_live_at_end, bb_info->live_pavout);
2454 free_bb_info ();