* basic-block.h (FOR_EACH_EDGE): Record initial edge count.
[official-gcc.git] / gcc / global.c
blob8f52d50d437aafa4197c4cc85f0cc0a9a4bd9bef
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "machmode.h"
29 #include "hard-reg-set.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "flags.h"
33 #include "basic-block.h"
34 #include "regs.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "recog.h"
38 #include "reload.h"
39 #include "output.h"
40 #include "toplev.h"
42 /* This pass of the compiler performs global register allocation.
43 It assigns hard register numbers to all the pseudo registers
44 that were not handled in local_alloc. Assignments are recorded
45 in the vector reg_renumber, not by changing the rtl code.
46 (Such changes are made by final). The entry point is
47 the function global_alloc.
49 After allocation is complete, the reload pass is run as a subroutine
50 of this pass, so that when a pseudo reg loses its hard reg due to
51 spilling it is possible to make a second attempt to find a hard
52 reg for it. The reload pass is independent in other respects
53 and it is run even when stupid register allocation is in use.
55 1. Assign allocation-numbers (allocnos) to the pseudo-registers
56 still needing allocations and to the pseudo-registers currently
57 allocated by local-alloc which may be spilled by reload.
58 Set up tables reg_allocno and allocno_reg to map
59 reg numbers to allocnos and vice versa.
60 max_allocno gets the number of allocnos in use.
62 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
63 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
64 for conflicts between allocnos and explicit hard register use
65 (which includes use of pseudo-registers allocated by local_alloc).
67 3. For each basic block
68 walk forward through the block, recording which
69 pseudo-registers and which hardware registers are live.
70 Build the conflict matrix between the pseudo-registers
71 and another of pseudo-registers versus hardware registers.
72 Also record the preferred hardware registers
73 for each pseudo-register.
75 4. Sort a table of the allocnos into order of
76 desirability of the variables.
78 5. Allocate the variables in that order; each if possible into
79 a preferred register, else into another register. */
81 /* Number of pseudo-registers which are candidates for allocation. */
83 static int max_allocno;
85 /* Indexed by (pseudo) reg number, gives the allocno, or -1
86 for pseudo registers which are not to be allocated. */
88 static int *reg_allocno;
90 struct allocno
92 int reg;
93 /* Gives the number of consecutive hard registers needed by that
94 pseudo reg. */
95 int size;
97 /* Number of calls crossed by each allocno. */
98 int calls_crossed;
100 /* Number of refs to each allocno. */
101 int n_refs;
103 /* Frequency of uses of each allocno. */
104 int freq;
106 /* Guess at live length of each allocno.
107 This is actually the max of the live lengths of the regs. */
108 int live_length;
110 /* Set of hard regs conflicting with allocno N. */
112 HARD_REG_SET hard_reg_conflicts;
114 /* Set of hard regs preferred by allocno N.
115 This is used to make allocnos go into regs that are copied to or from them,
116 when possible, to reduce register shuffling. */
118 HARD_REG_SET hard_reg_preferences;
120 /* Similar, but just counts register preferences made in simple copy
121 operations, rather than arithmetic. These are given priority because
122 we can always eliminate an insn by using these, but using a register
123 in the above list won't always eliminate an insn. */
125 HARD_REG_SET hard_reg_copy_preferences;
127 /* Similar to hard_reg_preferences, but includes bits for subsequent
128 registers when an allocno is multi-word. The above variable is used for
129 allocation while this is used to build reg_someone_prefers, below. */
131 HARD_REG_SET hard_reg_full_preferences;
133 /* Set of hard registers that some later allocno has a preference for. */
135 HARD_REG_SET regs_someone_prefers;
137 #ifdef STACK_REGS
138 /* Set to true if allocno can't be allocated in the stack register. */
139 bool no_stack_reg;
140 #endif
143 static struct allocno *allocno;
145 /* A vector of the integers from 0 to max_allocno-1,
146 sorted in the order of first-to-be-allocated first. */
148 static int *allocno_order;
150 /* Indexed by (pseudo) reg number, gives the number of another
151 lower-numbered pseudo reg which can share a hard reg with this pseudo
152 *even if the two pseudos would otherwise appear to conflict*. */
154 static int *reg_may_share;
156 /* Define the number of bits in each element of `conflicts' and what
157 type that element has. We use the largest integer format on the
158 host machine. */
160 #define INT_BITS HOST_BITS_PER_WIDE_INT
161 #define INT_TYPE HOST_WIDE_INT
163 /* max_allocno by max_allocno array of bits,
164 recording whether two allocno's conflict (can't go in the same
165 hardware register).
167 `conflicts' is symmetric after the call to mirror_conflicts. */
169 static INT_TYPE *conflicts;
171 /* Number of ints require to hold max_allocno bits.
172 This is the length of a row in `conflicts'. */
174 static int allocno_row_words;
176 /* Two macros to test or store 1 in an element of `conflicts'. */
178 #define CONFLICTP(I, J) \
179 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
180 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
182 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
183 and execute CODE. */
184 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
185 do { \
186 int i_; \
187 int allocno_; \
188 INT_TYPE *p_ = (ALLOCNO_SET); \
190 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
191 i_--, allocno_ += INT_BITS) \
193 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
195 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
197 if (word_ & 1) \
198 {CODE;} \
201 } while (0)
203 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
204 #if 0
205 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
206 the conflicting allocno, and execute CODE. This macro assumes that
207 mirror_conflicts has been run. */
208 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
209 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
210 OUT_ALLOCNO, (CODE))
211 #endif
213 /* Set of hard regs currently live (during scan of all insns). */
215 static HARD_REG_SET hard_regs_live;
217 /* Set of registers that global-alloc isn't supposed to use. */
219 static HARD_REG_SET no_global_alloc_regs;
221 /* Set of registers used so far. */
223 static HARD_REG_SET regs_used_so_far;
225 /* Number of refs to each hard reg, as used by local alloc.
226 It is zero for a reg that contains global pseudos or is explicitly used. */
228 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
230 /* Frequency of uses of given hard reg. */
231 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
233 /* Guess at live length of each hard reg, as used by local alloc.
234 This is actually the sum of the live lengths of the specific regs. */
236 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
238 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
239 element I, and hard register number J. */
241 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
243 /* Bit mask for allocnos live at current point in the scan. */
245 static INT_TYPE *allocnos_live;
247 /* Test, set or clear bit number I in allocnos_live,
248 a bit vector indexed by allocno. */
250 #define SET_ALLOCNO_LIVE(I) \
251 (allocnos_live[(unsigned) (I) / INT_BITS] \
252 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
254 #define CLEAR_ALLOCNO_LIVE(I) \
255 (allocnos_live[(unsigned) (I) / INT_BITS] \
256 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
258 /* This is turned off because it doesn't work right for DImode.
259 (And it is only used for DImode, so the other cases are worthless.)
260 The problem is that it isn't true that there is NO possibility of conflict;
261 only that there is no conflict if the two pseudos get the exact same regs.
262 If they were allocated with a partial overlap, there would be a conflict.
263 We can't safely turn off the conflict unless we have another way to
264 prevent the partial overlap.
266 Idea: change hard_reg_conflicts so that instead of recording which
267 hard regs the allocno may not overlap, it records where the allocno
268 may not start. Change both where it is used and where it is updated.
269 Then there is a way to record that (reg:DI 108) may start at 10
270 but not at 9 or 11. There is still the question of how to record
271 this semi-conflict between two pseudos. */
272 #if 0
273 /* Reg pairs for which conflict after the current insn
274 is inhibited by a REG_NO_CONFLICT note.
275 If the table gets full, we ignore any other notes--that is conservative. */
276 #define NUM_NO_CONFLICT_PAIRS 4
277 /* Number of pairs in use in this insn. */
278 int n_no_conflict_pairs;
279 static struct { int allocno1, allocno2;}
280 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
281 #endif /* 0 */
283 /* Record all regs that are set in any one insn.
284 Communication from mark_reg_{store,clobber} and global_conflicts. */
286 static rtx *regs_set;
287 static int n_regs_set;
289 /* All registers that can be eliminated. */
291 static HARD_REG_SET eliminable_regset;
293 static int allocno_compare (const void *, const void *);
294 static void global_conflicts (void);
295 static void mirror_conflicts (void);
296 static void expand_preferences (void);
297 static void prune_preferences (void);
298 static void find_reg (int, HARD_REG_SET, int, int, int);
299 static void record_one_conflict (int);
300 static void record_conflicts (int *, int);
301 static void mark_reg_store (rtx, rtx, void *);
302 static void mark_reg_clobber (rtx, rtx, void *);
303 static void mark_reg_conflicts (rtx);
304 static void mark_reg_death (rtx);
305 static void mark_reg_live_nc (int, enum machine_mode);
306 static void set_preference (rtx, rtx);
307 static void dump_conflicts (FILE *);
308 static void reg_becomes_live (rtx, rtx, void *);
309 static void reg_dies (int, enum machine_mode, struct insn_chain *);
311 static void allocate_bb_info (void);
312 static void free_bb_info (void);
313 static void check_earlyclobber (rtx);
314 static bool regclass_intersect (enum reg_class, enum reg_class);
315 static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
316 static int mark_reg_use_for_earlyclobber (rtx *, void *);
317 static void calculate_local_reg_bb_info (void);
318 static void set_up_bb_rts_numbers (void);
319 static int rpost_cmp (const void *, const void *);
320 static bool modify_bb_reg_pav (basic_block, basic_block, bool);
321 static void calculate_reg_pav (void);
322 static void make_accurate_live_analysis (void);
326 /* Perform allocation of pseudo-registers not allocated by local_alloc.
327 FILE is a file to output debugging information on,
328 or zero if such output is not desired.
330 Return value is nonzero if reload failed
331 and we must not do any more for this function. */
334 global_alloc (FILE *file)
336 int retval;
337 #ifdef ELIMINABLE_REGS
338 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
339 #endif
340 int need_fp
341 = (! flag_omit_frame_pointer
342 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
343 || FRAME_POINTER_REQUIRED);
345 size_t i;
346 rtx x;
348 make_accurate_live_analysis ();
350 max_allocno = 0;
352 /* A machine may have certain hard registers that
353 are safe to use only within a basic block. */
355 CLEAR_HARD_REG_SET (no_global_alloc_regs);
357 /* Build the regset of all eliminable registers and show we can't use those
358 that we already know won't be eliminated. */
359 #ifdef ELIMINABLE_REGS
360 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
362 bool cannot_elim
363 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
364 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
366 if (!regs_asm_clobbered[eliminables[i].from])
368 SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
370 if (cannot_elim)
371 SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
373 else if (cannot_elim)
374 error ("%s cannot be used in asm here",
375 reg_names[eliminables[i].from]);
376 else
377 regs_ever_live[eliminables[i].from] = 1;
379 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
380 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
382 SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
383 if (need_fp)
384 SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
386 else if (need_fp)
387 error ("%s cannot be used in asm here",
388 reg_names[HARD_FRAME_POINTER_REGNUM]);
389 else
390 regs_ever_live[HARD_FRAME_POINTER_REGNUM] = 1;
391 #endif
393 #else
394 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
396 SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
397 if (need_fp)
398 SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
400 else if (need_fp)
401 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
402 else
403 regs_ever_live[FRAME_POINTER_REGNUM] = 1;
404 #endif
406 /* Track which registers have already been used. Start with registers
407 explicitly in the rtl, then registers allocated by local register
408 allocation. */
410 CLEAR_HARD_REG_SET (regs_used_so_far);
411 #ifdef LEAF_REGISTERS
412 /* If we are doing the leaf function optimization, and this is a leaf
413 function, it means that the registers that take work to save are those
414 that need a register window. So prefer the ones that can be used in
415 a leaf function. */
417 const char *cheap_regs;
418 const char *const leaf_regs = LEAF_REGISTERS;
420 if (only_leaf_regs_used () && leaf_function_p ())
421 cheap_regs = leaf_regs;
422 else
423 cheap_regs = call_used_regs;
424 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
425 if (regs_ever_live[i] || cheap_regs[i])
426 SET_HARD_REG_BIT (regs_used_so_far, i);
428 #else
429 /* We consider registers that do not have to be saved over calls as if
430 they were already used since there is no cost in using them. */
431 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
432 if (regs_ever_live[i] || call_used_regs[i])
433 SET_HARD_REG_BIT (regs_used_so_far, i);
434 #endif
436 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
437 if (reg_renumber[i] >= 0)
438 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
440 /* Establish mappings from register number to allocation number
441 and vice versa. In the process, count the allocnos. */
443 reg_allocno = xmalloc (max_regno * sizeof (int));
445 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
446 reg_allocno[i] = -1;
448 /* Initialize the shared-hard-reg mapping
449 from the list of pairs that may share. */
450 reg_may_share = xcalloc (max_regno, sizeof (int));
451 for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
453 int r1 = REGNO (XEXP (x, 0));
454 int r2 = REGNO (XEXP (XEXP (x, 1), 0));
455 if (r1 > r2)
456 reg_may_share[r1] = r2;
457 else
458 reg_may_share[r2] = r1;
461 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
462 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
463 that we are supposed to refrain from putting in a hard reg.
464 -2 means do make an allocno but don't allocate it. */
465 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
466 /* Don't allocate pseudos that cross calls,
467 if this function receives a nonlocal goto. */
468 && (! current_function_has_nonlocal_label
469 || REG_N_CALLS_CROSSED (i) == 0))
471 if (reg_renumber[i] < 0 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
472 reg_allocno[i] = reg_allocno[reg_may_share[i]];
473 else
474 reg_allocno[i] = max_allocno++;
475 if (REG_LIVE_LENGTH (i) == 0)
476 abort ();
478 else
479 reg_allocno[i] = -1;
481 allocno = xcalloc (max_allocno, sizeof (struct allocno));
483 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
484 if (reg_allocno[i] >= 0)
486 int num = reg_allocno[i];
487 allocno[num].reg = i;
488 allocno[num].size = PSEUDO_REGNO_SIZE (i);
489 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
490 allocno[num].n_refs += REG_N_REFS (i);
491 allocno[num].freq += REG_FREQ (i);
492 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
493 allocno[num].live_length = REG_LIVE_LENGTH (i);
496 /* Calculate amount of usage of each hard reg by pseudos
497 allocated by local-alloc. This is to see if we want to
498 override it. */
499 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
500 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
501 memset (local_reg_freq, 0, sizeof local_reg_freq);
502 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
503 if (reg_renumber[i] >= 0)
505 int regno = reg_renumber[i];
506 int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
507 int j;
509 for (j = regno; j < endregno; j++)
511 local_reg_n_refs[j] += REG_N_REFS (i);
512 local_reg_freq[j] += REG_FREQ (i);
513 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
517 /* We can't override local-alloc for a reg used not just by local-alloc. */
518 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
519 if (regs_ever_live[i])
520 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
522 allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
524 /* We used to use alloca here, but the size of what it would try to
525 allocate would occasionally cause it to exceed the stack limit and
526 cause unpredictable core dumps. Some examples were > 2Mb in size. */
527 conflicts = xcalloc (max_allocno * allocno_row_words, sizeof (INT_TYPE));
529 allocnos_live = xmalloc (allocno_row_words * sizeof (INT_TYPE));
531 /* If there is work to be done (at least one reg to allocate),
532 perform global conflict analysis and allocate the regs. */
534 if (max_allocno > 0)
536 /* Scan all the insns and compute the conflicts among allocnos
537 and between allocnos and hard regs. */
539 global_conflicts ();
541 mirror_conflicts ();
543 /* Eliminate conflicts between pseudos and eliminable registers. If
544 the register is not eliminated, the pseudo won't really be able to
545 live in the eliminable register, so the conflict doesn't matter.
546 If we do eliminate the register, the conflict will no longer exist.
547 So in either case, we can ignore the conflict. Likewise for
548 preferences. */
550 for (i = 0; i < (size_t) max_allocno; i++)
552 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
553 eliminable_regset);
554 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
555 eliminable_regset);
556 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
557 eliminable_regset);
560 /* Try to expand the preferences by merging them between allocnos. */
562 expand_preferences ();
564 /* Determine the order to allocate the remaining pseudo registers. */
566 allocno_order = xmalloc (max_allocno * sizeof (int));
567 for (i = 0; i < (size_t) max_allocno; i++)
568 allocno_order[i] = i;
570 /* Default the size to 1, since allocno_compare uses it to divide by.
571 Also convert allocno_live_length of zero to -1. A length of zero
572 can occur when all the registers for that allocno have reg_live_length
573 equal to -2. In this case, we want to make an allocno, but not
574 allocate it. So avoid the divide-by-zero and set it to a low
575 priority. */
577 for (i = 0; i < (size_t) max_allocno; i++)
579 if (allocno[i].size == 0)
580 allocno[i].size = 1;
581 if (allocno[i].live_length == 0)
582 allocno[i].live_length = -1;
585 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
587 prune_preferences ();
589 if (file)
590 dump_conflicts (file);
592 /* Try allocating them, one by one, in that order,
593 except for parameters marked with reg_live_length[regno] == -2. */
595 for (i = 0; i < (size_t) max_allocno; i++)
596 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
597 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
599 /* If we have more than one register class,
600 first try allocating in the class that is cheapest
601 for this pseudo-reg. If that fails, try any reg. */
602 if (N_REG_CLASSES > 1)
604 find_reg (allocno_order[i], 0, 0, 0, 0);
605 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
606 continue;
608 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
609 find_reg (allocno_order[i], 0, 1, 0, 0);
612 free (allocno_order);
615 /* Do the reloads now while the allocno data still exist, so that we can
616 try to assign new hard regs to any pseudo regs that are spilled. */
618 #if 0 /* We need to eliminate regs even if there is no rtl code,
619 for the sake of debugging information. */
620 if (n_basic_blocks > 0)
621 #endif
623 build_insn_chain (get_insns ());
624 retval = reload (get_insns (), 1);
627 /* Clean up. */
628 free (reg_allocno);
629 free (reg_may_share);
630 free (allocno);
631 free (conflicts);
632 free (allocnos_live);
634 return retval;
637 /* Sort predicate for ordering the allocnos.
638 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
640 static int
641 allocno_compare (const void *v1p, const void *v2p)
643 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
644 /* Note that the quotient will never be bigger than
645 the value of floor_log2 times the maximum number of
646 times a register can occur in one insn (surely less than 100)
647 weighted by the frequency (maximally REG_FREQ_MAX).
648 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
649 int pri1
650 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
651 / allocno[v1].live_length)
652 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
653 int pri2
654 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
655 / allocno[v2].live_length)
656 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
657 if (pri2 - pri1)
658 return pri2 - pri1;
660 /* If regs are equally good, sort by allocno,
661 so that the results of qsort leave nothing to chance. */
662 return v1 - v2;
665 /* Scan the rtl code and record all conflicts and register preferences in the
666 conflict matrices and preference tables. */
668 static void
669 global_conflicts (void)
671 int i;
672 basic_block b;
673 rtx insn;
674 int *block_start_allocnos;
676 /* Make a vector that mark_reg_{store,clobber} will store in. */
677 regs_set = xmalloc (max_parallel * sizeof (rtx) * 2);
679 block_start_allocnos = xmalloc (max_allocno * sizeof (int));
681 FOR_EACH_BB (b)
683 memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
685 /* Initialize table of registers currently live
686 to the state at the beginning of this basic block.
687 This also marks the conflicts among hard registers
688 and any allocnos that are live.
690 For pseudo-regs, there is only one bit for each one
691 no matter how many hard regs it occupies.
692 This is ok; we know the size from PSEUDO_REGNO_SIZE.
693 For explicit hard regs, we cannot know the size that way
694 since one hard reg can be used with various sizes.
695 Therefore, we must require that all the hard regs
696 implicitly live as part of a multi-word hard reg
697 are explicitly marked in basic_block_live_at_start. */
700 regset old = b->global_live_at_start;
701 int ax = 0;
703 REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
704 EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i,
706 int a = reg_allocno[i];
707 if (a >= 0)
709 SET_ALLOCNO_LIVE (a);
710 block_start_allocnos[ax++] = a;
712 else if ((a = reg_renumber[i]) >= 0)
713 mark_reg_live_nc
714 (a, PSEUDO_REGNO_MODE (i));
717 /* Record that each allocno now live conflicts with each hard reg
718 now live.
720 It is not necessary to mark any conflicts between pseudos as
721 this point, even for pseudos which are live at the start of
722 the basic block.
724 Given two pseudos X and Y and any point in the CFG P.
726 On any path to point P where X and Y are live one of the
727 following conditions must be true:
729 1. X is live at some instruction on the path that
730 evaluates Y.
732 2. Y is live at some instruction on the path that
733 evaluates X.
735 3. Either X or Y is not evaluated on the path to P
736 (ie it is used uninitialized) and thus the
737 conflict can be ignored.
739 In cases #1 and #2 the conflict will be recorded when we
740 scan the instruction that makes either X or Y become live. */
741 record_conflicts (block_start_allocnos, ax);
743 /* Pseudos can't go in stack regs at the start of a basic block that
744 is reached by an abnormal edge. Likewise for call clobbered regs,
745 because because caller-save, fixup_abnormal_edges, and possibly
746 the table driven EH machinery are not quite ready to handle such
747 regs live across such edges. */
749 edge e;
751 FOR_EACH_EDGE (e, b->preds)
753 if (e->flags & EDGE_ABNORMAL)
754 break;
756 END_FOR_EACH_EDGE;
758 if (e != NULL)
760 #ifdef STACK_REGS
761 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
763 allocno[ax].no_stack_reg = 1;
765 for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
766 record_one_conflict (ax);
767 #endif
769 /* No need to record conflicts for call clobbered regs if we have
770 nonlocal labels around, as we don't ever try to allocate such
771 regs in this case. */
772 if (! current_function_has_nonlocal_label)
773 for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
774 if (call_used_regs [ax])
775 record_one_conflict (ax);
780 insn = BB_HEAD (b);
782 /* Scan the code of this basic block, noting which allocnos
783 and hard regs are born or die. When one is born,
784 record a conflict with all others currently live. */
786 while (1)
788 RTX_CODE code = GET_CODE (insn);
789 rtx link;
791 /* Make regs_set an empty set. */
793 n_regs_set = 0;
795 if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
798 #if 0
799 int i = 0;
800 for (link = REG_NOTES (insn);
801 link && i < NUM_NO_CONFLICT_PAIRS;
802 link = XEXP (link, 1))
803 if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
805 no_conflict_pairs[i].allocno1
806 = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
807 no_conflict_pairs[i].allocno2
808 = reg_allocno[REGNO (XEXP (link, 0))];
809 i++;
811 #endif /* 0 */
813 /* Mark any registers clobbered by INSN as live,
814 so they conflict with the inputs. */
816 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
818 /* Mark any registers dead after INSN as dead now. */
820 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
821 if (REG_NOTE_KIND (link) == REG_DEAD)
822 mark_reg_death (XEXP (link, 0));
824 /* Mark any registers set in INSN as live,
825 and mark them as conflicting with all other live regs.
826 Clobbers are processed again, so they conflict with
827 the registers that are set. */
829 note_stores (PATTERN (insn), mark_reg_store, NULL);
831 #ifdef AUTO_INC_DEC
832 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
833 if (REG_NOTE_KIND (link) == REG_INC)
834 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
835 #endif
837 /* If INSN has multiple outputs, then any reg that dies here
838 and is used inside of an output
839 must conflict with the other outputs.
841 It is unsafe to use !single_set here since it will ignore an
842 unused output. Just because an output is unused does not mean
843 the compiler can assume the side effect will not occur.
844 Consider if REG appears in the address of an output and we
845 reload the output. If we allocate REG to the same hard
846 register as an unused output we could set the hard register
847 before the output reload insn. */
848 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
849 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
850 if (REG_NOTE_KIND (link) == REG_DEAD)
852 int used_in_output = 0;
853 int i;
854 rtx reg = XEXP (link, 0);
856 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
858 rtx set = XVECEXP (PATTERN (insn), 0, i);
859 if (GET_CODE (set) == SET
860 && !REG_P (SET_DEST (set))
861 && !rtx_equal_p (reg, SET_DEST (set))
862 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
863 used_in_output = 1;
865 if (used_in_output)
866 mark_reg_conflicts (reg);
869 /* Mark any registers set in INSN and then never used. */
871 while (n_regs_set-- > 0)
873 rtx note = find_regno_note (insn, REG_UNUSED,
874 REGNO (regs_set[n_regs_set]));
875 if (note)
876 mark_reg_death (XEXP (note, 0));
880 if (insn == BB_END (b))
881 break;
882 insn = NEXT_INSN (insn);
886 /* Clean up. */
887 free (block_start_allocnos);
888 free (regs_set);
890 /* Expand the preference information by looking for cases where one allocno
891 dies in an insn that sets an allocno. If those two allocnos don't conflict,
892 merge any preferences between those allocnos. */
894 static void
895 expand_preferences (void)
897 rtx insn;
898 rtx link;
899 rtx set;
901 /* We only try to handle the most common cases here. Most of the cases
902 where this wins are reg-reg copies. */
904 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
905 if (INSN_P (insn)
906 && (set = single_set (insn)) != 0
907 && REG_P (SET_DEST (set))
908 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
909 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
910 if (REG_NOTE_KIND (link) == REG_DEAD
911 && REG_P (XEXP (link, 0))
912 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
913 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
914 reg_allocno[REGNO (XEXP (link, 0))]))
916 int a1 = reg_allocno[REGNO (SET_DEST (set))];
917 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
919 if (XEXP (link, 0) == SET_SRC (set))
921 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
922 allocno[a2].hard_reg_copy_preferences);
923 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
924 allocno[a1].hard_reg_copy_preferences);
927 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
928 allocno[a2].hard_reg_preferences);
929 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
930 allocno[a1].hard_reg_preferences);
931 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
932 allocno[a2].hard_reg_full_preferences);
933 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
934 allocno[a1].hard_reg_full_preferences);
938 /* Prune the preferences for global registers to exclude registers that cannot
939 be used.
941 Compute `regs_someone_prefers', which is a bitmask of the hard registers
942 that are preferred by conflicting registers of lower priority. If possible,
943 we will avoid using these registers. */
945 static void
946 prune_preferences (void)
948 int i;
949 int num;
950 int *allocno_to_order = xmalloc (max_allocno * sizeof (int));
952 /* Scan least most important to most important.
953 For each allocno, remove from preferences registers that cannot be used,
954 either because of conflicts or register type. Then compute all registers
955 preferred by each lower-priority register that conflicts. */
957 for (i = max_allocno - 1; i >= 0; i--)
959 HARD_REG_SET temp;
961 num = allocno_order[i];
962 allocno_to_order[num] = i;
963 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
965 if (allocno[num].calls_crossed == 0)
966 IOR_HARD_REG_SET (temp, fixed_reg_set);
967 else
968 IOR_HARD_REG_SET (temp, call_used_reg_set);
970 IOR_COMPL_HARD_REG_SET
971 (temp,
972 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
974 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
975 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
976 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
979 for (i = max_allocno - 1; i >= 0; i--)
981 /* Merge in the preferences of lower-priority registers (they have
982 already been pruned). If we also prefer some of those registers,
983 don't exclude them unless we are of a smaller size (in which case
984 we want to give the lower-priority allocno the first chance for
985 these registers). */
986 HARD_REG_SET temp, temp2;
987 int allocno2;
989 num = allocno_order[i];
991 CLEAR_HARD_REG_SET (temp);
992 CLEAR_HARD_REG_SET (temp2);
994 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
995 allocno2,
997 if (allocno_to_order[allocno2] > i)
999 if (allocno[allocno2].size <= allocno[num].size)
1000 IOR_HARD_REG_SET (temp,
1001 allocno[allocno2].hard_reg_full_preferences);
1002 else
1003 IOR_HARD_REG_SET (temp2,
1004 allocno[allocno2].hard_reg_full_preferences);
1008 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1009 IOR_HARD_REG_SET (temp, temp2);
1010 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1012 free (allocno_to_order);
1015 /* Assign a hard register to allocno NUM; look for one that is the beginning
1016 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1017 The registers marked in PREFREGS are tried first.
1019 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1020 be used for this allocation.
1022 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1023 Otherwise ignore that preferred class and use the alternate class.
1025 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1026 will have to be saved and restored at calls.
1028 RETRYING is nonzero if this is called from retry_global_alloc.
1030 If we find one, record it in reg_renumber.
1031 If not, do nothing. */
1033 static void
1034 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1036 int i, best_reg, pass;
1037 HARD_REG_SET used, used1, used2;
1039 enum reg_class class = (alt_regs_p
1040 ? reg_alternate_class (allocno[num].reg)
1041 : reg_preferred_class (allocno[num].reg));
1042 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1044 if (accept_call_clobbered)
1045 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1046 else if (allocno[num].calls_crossed == 0)
1047 COPY_HARD_REG_SET (used1, fixed_reg_set);
1048 else
1049 COPY_HARD_REG_SET (used1, call_used_reg_set);
1051 /* Some registers should not be allocated in global-alloc. */
1052 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1053 if (losers)
1054 IOR_HARD_REG_SET (used1, losers);
1056 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1057 COPY_HARD_REG_SET (used2, used1);
1059 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1061 #ifdef CANNOT_CHANGE_MODE_CLASS
1062 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1063 #endif
1065 /* Try each hard reg to see if it fits. Do this in two passes.
1066 In the first pass, skip registers that are preferred by some other pseudo
1067 to give it a better chance of getting one of those registers. Only if
1068 we can't get a register when excluding those do we take one of them.
1069 However, we never allocate a register for the first time in pass 0. */
1071 COPY_HARD_REG_SET (used, used1);
1072 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1073 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1075 best_reg = -1;
1076 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1077 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1078 pass++)
1080 if (pass == 1)
1081 COPY_HARD_REG_SET (used, used1);
1082 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1084 #ifdef REG_ALLOC_ORDER
1085 int regno = reg_alloc_order[i];
1086 #else
1087 int regno = i;
1088 #endif
1089 if (! TEST_HARD_REG_BIT (used, regno)
1090 && HARD_REGNO_MODE_OK (regno, mode)
1091 && (allocno[num].calls_crossed == 0
1092 || accept_call_clobbered
1093 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1095 int j;
1096 int lim = regno + hard_regno_nregs[regno][mode];
1097 for (j = regno + 1;
1098 (j < lim
1099 && ! TEST_HARD_REG_BIT (used, j));
1100 j++);
1101 if (j == lim)
1103 best_reg = regno;
1104 break;
1106 #ifndef REG_ALLOC_ORDER
1107 i = j; /* Skip starting points we know will lose */
1108 #endif
1113 /* See if there is a preferred register with the same class as the register
1114 we allocated above. Making this restriction prevents register
1115 preferencing from creating worse register allocation.
1117 Remove from the preferred registers and conflicting registers. Note that
1118 additional conflicts may have been added after `prune_preferences' was
1119 called.
1121 First do this for those register with copy preferences, then all
1122 preferred registers. */
1124 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1125 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
1126 reg_class_contents[(int) NO_REGS], no_copy_prefs);
1128 if (best_reg >= 0)
1130 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1131 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1132 && HARD_REGNO_MODE_OK (i, mode)
1133 && (allocno[num].calls_crossed == 0
1134 || accept_call_clobbered
1135 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1136 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1137 || reg_class_subset_p (REGNO_REG_CLASS (i),
1138 REGNO_REG_CLASS (best_reg))
1139 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1140 REGNO_REG_CLASS (i))))
1142 int j;
1143 int lim = i + hard_regno_nregs[i][mode];
1144 for (j = i + 1;
1145 (j < lim
1146 && ! TEST_HARD_REG_BIT (used, j)
1147 && (REGNO_REG_CLASS (j)
1148 == REGNO_REG_CLASS (best_reg + (j - i))
1149 || reg_class_subset_p (REGNO_REG_CLASS (j),
1150 REGNO_REG_CLASS (best_reg + (j - i)))
1151 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1152 REGNO_REG_CLASS (j))));
1153 j++);
1154 if (j == lim)
1156 best_reg = i;
1157 goto no_prefs;
1161 no_copy_prefs:
1163 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1164 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
1165 reg_class_contents[(int) NO_REGS], no_prefs);
1167 if (best_reg >= 0)
1169 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1170 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1171 && HARD_REGNO_MODE_OK (i, mode)
1172 && (allocno[num].calls_crossed == 0
1173 || accept_call_clobbered
1174 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1175 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1176 || reg_class_subset_p (REGNO_REG_CLASS (i),
1177 REGNO_REG_CLASS (best_reg))
1178 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1179 REGNO_REG_CLASS (i))))
1181 int j;
1182 int lim = i + hard_regno_nregs[i][mode];
1183 for (j = i + 1;
1184 (j < lim
1185 && ! TEST_HARD_REG_BIT (used, j)
1186 && (REGNO_REG_CLASS (j)
1187 == REGNO_REG_CLASS (best_reg + (j - i))
1188 || reg_class_subset_p (REGNO_REG_CLASS (j),
1189 REGNO_REG_CLASS (best_reg + (j - i)))
1190 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1191 REGNO_REG_CLASS (j))));
1192 j++);
1193 if (j == lim)
1195 best_reg = i;
1196 break;
1200 no_prefs:
1202 /* If we haven't succeeded yet, try with caller-saves.
1203 We need not check to see if the current function has nonlocal
1204 labels because we don't put any pseudos that are live over calls in
1205 registers in that case. */
1207 if (flag_caller_saves && best_reg < 0)
1209 /* Did not find a register. If it would be profitable to
1210 allocate a call-clobbered register and save and restore it
1211 around calls, do that. */
1212 if (! accept_call_clobbered
1213 && allocno[num].calls_crossed != 0
1214 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1215 allocno[num].calls_crossed))
1217 HARD_REG_SET new_losers;
1218 if (! losers)
1219 CLEAR_HARD_REG_SET (new_losers);
1220 else
1221 COPY_HARD_REG_SET (new_losers, losers);
1223 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1224 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1225 if (reg_renumber[allocno[num].reg] >= 0)
1227 caller_save_needed = 1;
1228 return;
1233 /* If we haven't succeeded yet,
1234 see if some hard reg that conflicts with us
1235 was utilized poorly by local-alloc.
1236 If so, kick out the regs that were put there by local-alloc
1237 so we can use it instead. */
1238 if (best_reg < 0 && !retrying
1239 /* Let's not bother with multi-reg allocnos. */
1240 && allocno[num].size == 1)
1242 /* Count from the end, to find the least-used ones first. */
1243 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1245 #ifdef REG_ALLOC_ORDER
1246 int regno = reg_alloc_order[i];
1247 #else
1248 int regno = i;
1249 #endif
1251 if (local_reg_n_refs[regno] != 0
1252 /* Don't use a reg no good for this pseudo. */
1253 && ! TEST_HARD_REG_BIT (used2, regno)
1254 && HARD_REGNO_MODE_OK (regno, mode)
1255 /* The code below assumes that we need only a single
1256 register, but the check of allocno[num].size above
1257 was not enough. Sometimes we need more than one
1258 register for a single-word value. */
1259 && hard_regno_nregs[regno][mode] == 1
1260 && (allocno[num].calls_crossed == 0
1261 || accept_call_clobbered
1262 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1263 #ifdef CANNOT_CHANGE_MODE_CLASS
1264 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1265 mode)
1266 #endif
1267 #ifdef STACK_REGS
1268 && (!allocno[num].no_stack_reg
1269 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1270 #endif
1273 /* We explicitly evaluate the divide results into temporary
1274 variables so as to avoid excess precision problems that occur
1275 on an i386-unknown-sysv4.2 (unixware) host. */
1277 double tmp1 = ((double) local_reg_freq[regno]
1278 / local_reg_live_length[regno]);
1279 double tmp2 = ((double) allocno[num].freq
1280 / allocno[num].live_length);
1282 if (tmp1 < tmp2)
1284 /* Hard reg REGNO was used less in total by local regs
1285 than it would be used by this one allocno! */
1286 int k;
1287 for (k = 0; k < max_regno; k++)
1288 if (reg_renumber[k] >= 0)
1290 int r = reg_renumber[k];
1291 int endregno
1292 = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
1294 if (regno >= r && regno < endregno)
1295 reg_renumber[k] = -1;
1298 best_reg = regno;
1299 break;
1305 /* Did we find a register? */
1307 if (best_reg >= 0)
1309 int lim, j;
1310 HARD_REG_SET this_reg;
1312 /* Yes. Record it as the hard register of this pseudo-reg. */
1313 reg_renumber[allocno[num].reg] = best_reg;
1314 /* Also of any pseudo-regs that share with it. */
1315 if (reg_may_share[allocno[num].reg])
1316 for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
1317 if (reg_allocno[j] == num)
1318 reg_renumber[j] = best_reg;
1320 /* Make a set of the hard regs being allocated. */
1321 CLEAR_HARD_REG_SET (this_reg);
1322 lim = best_reg + hard_regno_nregs[best_reg][mode];
1323 for (j = best_reg; j < lim; j++)
1325 SET_HARD_REG_BIT (this_reg, j);
1326 SET_HARD_REG_BIT (regs_used_so_far, j);
1327 /* This is no longer a reg used just by local regs. */
1328 local_reg_n_refs[j] = 0;
1329 local_reg_freq[j] = 0;
1331 /* For each other pseudo-reg conflicting with this one,
1332 mark it as conflicting with the hard regs this one occupies. */
1333 lim = num;
1334 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1336 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1341 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1342 Perhaps it had previously seemed not worth a hard reg,
1343 or perhaps its old hard reg has been commandeered for reloads.
1344 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1345 they do not appear to be allocated.
1346 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1348 void
1349 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1351 int alloc_no = reg_allocno[regno];
1352 if (alloc_no >= 0)
1354 /* If we have more than one register class,
1355 first try allocating in the class that is cheapest
1356 for this pseudo-reg. If that fails, try any reg. */
1357 if (N_REG_CLASSES > 1)
1358 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1359 if (reg_renumber[regno] < 0
1360 && reg_alternate_class (regno) != NO_REGS)
1361 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1363 /* If we found a register, modify the RTL for the register to
1364 show the hard register, and mark that register live. */
1365 if (reg_renumber[regno] >= 0)
1367 REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
1368 mark_home_live (regno);
1373 /* Record a conflict between register REGNO
1374 and everything currently live.
1375 REGNO must not be a pseudo reg that was allocated
1376 by local_alloc; such numbers must be translated through
1377 reg_renumber before calling here. */
1379 static void
1380 record_one_conflict (int regno)
1382 int j;
1384 if (regno < FIRST_PSEUDO_REGISTER)
1385 /* When a hard register becomes live,
1386 record conflicts with live pseudo regs. */
1387 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1389 SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1391 else
1392 /* When a pseudo-register becomes live,
1393 record conflicts first with hard regs,
1394 then with other pseudo regs. */
1396 int ialloc = reg_allocno[regno];
1397 int ialloc_prod = ialloc * allocno_row_words;
1399 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1400 for (j = allocno_row_words - 1; j >= 0; j--)
1401 conflicts[ialloc_prod + j] |= allocnos_live[j];
1405 /* Record all allocnos currently live as conflicting
1406 with all hard regs currently live.
1408 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1409 are currently live. Their bits are also flagged in allocnos_live. */
1411 static void
1412 record_conflicts (int *allocno_vec, int len)
1414 while (--len >= 0)
1415 IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1416 hard_regs_live);
1419 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1420 static void
1421 mirror_conflicts (void)
1423 int i, j;
1424 int rw = allocno_row_words;
1425 int rwb = rw * INT_BITS;
1426 INT_TYPE *p = conflicts;
1427 INT_TYPE *q0 = conflicts, *q1, *q2;
1428 unsigned INT_TYPE mask;
1430 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1432 if (! mask)
1434 mask = 1;
1435 q0++;
1437 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1439 unsigned INT_TYPE word;
1441 for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1442 word >>= 1, q2 += rw)
1444 if (word & 1)
1445 *q2 |= mask;
1451 /* Handle the case where REG is set by the insn being scanned,
1452 during the forward scan to accumulate conflicts.
1453 Store a 1 in regs_live or allocnos_live for this register, record how many
1454 consecutive hardware registers it actually needs,
1455 and record a conflict with all other registers already live.
1457 Note that even if REG does not remain alive after this insn,
1458 we must mark it here as live, to ensure a conflict between
1459 REG and any other regs set in this insn that really do live.
1460 This is because those other regs could be considered after this.
1462 REG might actually be something other than a register;
1463 if so, we do nothing.
1465 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1466 a REG_INC note was found for it). */
1468 static void
1469 mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
1471 int regno;
1473 if (GET_CODE (reg) == SUBREG)
1474 reg = SUBREG_REG (reg);
1476 if (!REG_P (reg))
1477 return;
1479 regs_set[n_regs_set++] = reg;
1481 if (setter && GET_CODE (setter) != CLOBBER)
1482 set_preference (reg, SET_SRC (setter));
1484 regno = REGNO (reg);
1486 /* Either this is one of the max_allocno pseudo regs not allocated,
1487 or it is or has a hardware reg. First handle the pseudo-regs. */
1488 if (regno >= FIRST_PSEUDO_REGISTER)
1490 if (reg_allocno[regno] >= 0)
1492 SET_ALLOCNO_LIVE (reg_allocno[regno]);
1493 record_one_conflict (regno);
1497 if (reg_renumber[regno] >= 0)
1498 regno = reg_renumber[regno];
1500 /* Handle hardware regs (and pseudos allocated to hard regs). */
1501 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1503 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1504 while (regno < last)
1506 record_one_conflict (regno);
1507 SET_HARD_REG_BIT (hard_regs_live, regno);
1508 regno++;
1513 /* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
1515 static void
1516 mark_reg_clobber (rtx reg, rtx setter, void *data)
1518 if (GET_CODE (setter) == CLOBBER)
1519 mark_reg_store (reg, setter, data);
1522 /* Record that REG has conflicts with all the regs currently live.
1523 Do not mark REG itself as live. */
1525 static void
1526 mark_reg_conflicts (rtx reg)
1528 int regno;
1530 if (GET_CODE (reg) == SUBREG)
1531 reg = SUBREG_REG (reg);
1533 if (!REG_P (reg))
1534 return;
1536 regno = REGNO (reg);
1538 /* Either this is one of the max_allocno pseudo regs not allocated,
1539 or it is or has a hardware reg. First handle the pseudo-regs. */
1540 if (regno >= FIRST_PSEUDO_REGISTER)
1542 if (reg_allocno[regno] >= 0)
1543 record_one_conflict (regno);
1546 if (reg_renumber[regno] >= 0)
1547 regno = reg_renumber[regno];
1549 /* Handle hardware regs (and pseudos allocated to hard regs). */
1550 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1552 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1553 while (regno < last)
1555 record_one_conflict (regno);
1556 regno++;
1561 /* Mark REG as being dead (following the insn being scanned now).
1562 Store a 0 in regs_live or allocnos_live for this register. */
1564 static void
1565 mark_reg_death (rtx reg)
1567 int regno = REGNO (reg);
1569 /* Either this is one of the max_allocno pseudo regs not allocated,
1570 or it is a hardware reg. First handle the pseudo-regs. */
1571 if (regno >= FIRST_PSEUDO_REGISTER)
1573 if (reg_allocno[regno] >= 0)
1574 CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1577 /* For pseudo reg, see if it has been assigned a hardware reg. */
1578 if (reg_renumber[regno] >= 0)
1579 regno = reg_renumber[regno];
1581 /* Handle hardware regs (and pseudos allocated to hard regs). */
1582 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1584 /* Pseudo regs already assigned hardware regs are treated
1585 almost the same as explicit hardware regs. */
1586 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1587 while (regno < last)
1589 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
1590 regno++;
1595 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1596 for the value stored in it. MODE determines how many consecutive
1597 registers are actually in use. Do not record conflicts;
1598 it is assumed that the caller will do that. */
1600 static void
1601 mark_reg_live_nc (int regno, enum machine_mode mode)
1603 int last = regno + hard_regno_nregs[regno][mode];
1604 while (regno < last)
1606 SET_HARD_REG_BIT (hard_regs_live, regno);
1607 regno++;
1611 /* Try to set a preference for an allocno to a hard register.
1612 We are passed DEST and SRC which are the operands of a SET. It is known
1613 that SRC is a register. If SRC or the first operand of SRC is a register,
1614 try to set a preference. If one of the two is a hard register and the other
1615 is a pseudo-register, mark the preference.
1617 Note that we are not as aggressive as local-alloc in trying to tie a
1618 pseudo-register to a hard register. */
1620 static void
1621 set_preference (rtx dest, rtx src)
1623 unsigned int src_regno, dest_regno;
1624 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1625 to compensate for subregs in SRC or DEST. */
1626 int offset = 0;
1627 unsigned int i;
1628 int copy = 1;
1630 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1631 src = XEXP (src, 0), copy = 0;
1633 /* Get the reg number for both SRC and DEST.
1634 If neither is a reg, give up. */
1636 if (REG_P (src))
1637 src_regno = REGNO (src);
1638 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1640 src_regno = REGNO (SUBREG_REG (src));
1642 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1643 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1644 GET_MODE (SUBREG_REG (src)),
1645 SUBREG_BYTE (src),
1646 GET_MODE (src));
1647 else
1648 offset += (SUBREG_BYTE (src)
1649 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1651 else
1652 return;
1654 if (REG_P (dest))
1655 dest_regno = REGNO (dest);
1656 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1658 dest_regno = REGNO (SUBREG_REG (dest));
1660 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1661 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1662 GET_MODE (SUBREG_REG (dest)),
1663 SUBREG_BYTE (dest),
1664 GET_MODE (dest));
1665 else
1666 offset -= (SUBREG_BYTE (dest)
1667 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1669 else
1670 return;
1672 /* Convert either or both to hard reg numbers. */
1674 if (reg_renumber[src_regno] >= 0)
1675 src_regno = reg_renumber[src_regno];
1677 if (reg_renumber[dest_regno] >= 0)
1678 dest_regno = reg_renumber[dest_regno];
1680 /* Now if one is a hard reg and the other is a global pseudo
1681 then give the other a preference. */
1683 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1684 && reg_allocno[src_regno] >= 0)
1686 dest_regno -= offset;
1687 if (dest_regno < FIRST_PSEUDO_REGISTER)
1689 if (copy)
1690 SET_REGBIT (hard_reg_copy_preferences,
1691 reg_allocno[src_regno], dest_regno);
1693 SET_REGBIT (hard_reg_preferences,
1694 reg_allocno[src_regno], dest_regno);
1695 for (i = dest_regno;
1696 i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
1697 i++)
1698 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1702 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1703 && reg_allocno[dest_regno] >= 0)
1705 src_regno += offset;
1706 if (src_regno < FIRST_PSEUDO_REGISTER)
1708 if (copy)
1709 SET_REGBIT (hard_reg_copy_preferences,
1710 reg_allocno[dest_regno], src_regno);
1712 SET_REGBIT (hard_reg_preferences,
1713 reg_allocno[dest_regno], src_regno);
1714 for (i = src_regno;
1715 i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
1716 i++)
1717 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1722 /* Indicate that hard register number FROM was eliminated and replaced with
1723 an offset from hard register number TO. The status of hard registers live
1724 at the start of a basic block is updated by replacing a use of FROM with
1725 a use of TO. */
1727 void
1728 mark_elimination (int from, int to)
1730 basic_block bb;
1732 FOR_EACH_BB (bb)
1734 regset r = bb->global_live_at_start;
1735 if (REGNO_REG_SET_P (r, from))
1737 CLEAR_REGNO_REG_SET (r, from);
1738 SET_REGNO_REG_SET (r, to);
1743 /* Used for communication between the following functions. Holds the
1744 current life information. */
1745 static regset live_relevant_regs;
1747 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1748 This is called via note_stores. */
1749 static void
1750 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1752 int regno;
1754 if (GET_CODE (reg) == SUBREG)
1755 reg = SUBREG_REG (reg);
1757 if (!REG_P (reg))
1758 return;
1760 regno = REGNO (reg);
1761 if (regno < FIRST_PSEUDO_REGISTER)
1763 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1764 while (nregs-- > 0)
1766 SET_REGNO_REG_SET (live_relevant_regs, regno);
1767 if (! fixed_regs[regno])
1768 SET_REGNO_REG_SET ((regset) regs_set, regno);
1769 regno++;
1772 else if (reg_renumber[regno] >= 0)
1774 SET_REGNO_REG_SET (live_relevant_regs, regno);
1775 SET_REGNO_REG_SET ((regset) regs_set, regno);
1779 /* Record in live_relevant_regs that register REGNO died. */
1780 static void
1781 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1783 if (regno < FIRST_PSEUDO_REGISTER)
1785 int nregs = hard_regno_nregs[regno][mode];
1786 while (nregs-- > 0)
1788 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1789 if (! fixed_regs[regno])
1790 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1791 regno++;
1794 else
1796 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1797 if (reg_renumber[regno] >= 0)
1798 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1802 /* Walk the insns of the current function and build reload_insn_chain,
1803 and record register life information. */
1804 void
1805 build_insn_chain (rtx first)
1807 struct insn_chain **p = &reload_insn_chain;
1808 struct insn_chain *prev = 0;
1809 basic_block b = ENTRY_BLOCK_PTR->next_bb;
1810 regset_head live_relevant_regs_head;
1812 live_relevant_regs = INITIALIZE_REG_SET (live_relevant_regs_head);
1814 for (; first; first = NEXT_INSN (first))
1816 struct insn_chain *c;
1818 if (first == BB_HEAD (b))
1820 int i;
1822 CLEAR_REG_SET (live_relevant_regs);
1824 EXECUTE_IF_SET_IN_BITMAP
1825 (b->global_live_at_start, 0, i,
1827 if (i < FIRST_PSEUDO_REGISTER
1828 ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1829 : reg_renumber[i] >= 0)
1830 SET_REGNO_REG_SET (live_relevant_regs, i);
1834 if (!NOTE_P (first) && !BARRIER_P (first))
1836 c = new_insn_chain ();
1837 c->prev = prev;
1838 prev = c;
1839 *p = c;
1840 p = &c->next;
1841 c->insn = first;
1842 c->block = b->index;
1844 if (INSN_P (first))
1846 rtx link;
1848 /* Mark the death of everything that dies in this instruction. */
1850 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1851 if (REG_NOTE_KIND (link) == REG_DEAD
1852 && REG_P (XEXP (link, 0)))
1853 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1856 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1858 /* Mark everything born in this instruction as live. */
1860 note_stores (PATTERN (first), reg_becomes_live,
1861 &c->dead_or_set);
1863 else
1864 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1866 if (INSN_P (first))
1868 rtx link;
1870 /* Mark anything that is set in this insn and then unused as dying. */
1872 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1873 if (REG_NOTE_KIND (link) == REG_UNUSED
1874 && REG_P (XEXP (link, 0)))
1875 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1880 if (first == BB_END (b))
1881 b = b->next_bb;
1883 /* Stop after we pass the end of the last basic block. Verify that
1884 no real insns are after the end of the last basic block.
1886 We may want to reorganize the loop somewhat since this test should
1887 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1888 the previous real insn is a JUMP_INSN. */
1889 if (b == EXIT_BLOCK_PTR)
1891 for (first = NEXT_INSN (first) ; first; first = NEXT_INSN (first))
1892 if (INSN_P (first)
1893 && GET_CODE (PATTERN (first)) != USE
1894 && ! ((GET_CODE (PATTERN (first)) == ADDR_VEC
1895 || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1896 && prev_real_insn (first) != 0
1897 && JUMP_P (prev_real_insn (first))))
1898 abort ();
1899 break;
1902 FREE_REG_SET (live_relevant_regs);
1903 *p = 0;
1906 /* Print debugging trace information if -dg switch is given,
1907 showing the information on which the allocation decisions are based. */
1909 static void
1910 dump_conflicts (FILE *file)
1912 int i;
1913 int has_preferences;
1914 int nregs;
1915 nregs = 0;
1916 for (i = 0; i < max_allocno; i++)
1918 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1919 continue;
1920 nregs++;
1922 fprintf (file, ";; %d regs to allocate:", nregs);
1923 for (i = 0; i < max_allocno; i++)
1925 int j;
1926 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1927 continue;
1928 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1929 for (j = 0; j < max_regno; j++)
1930 if (reg_allocno[j] == allocno_order[i]
1931 && j != allocno[allocno_order[i]].reg)
1932 fprintf (file, "+%d", j);
1933 if (allocno[allocno_order[i]].size != 1)
1934 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1936 fprintf (file, "\n");
1938 for (i = 0; i < max_allocno; i++)
1940 int j;
1941 fprintf (file, ";; %d conflicts:", allocno[i].reg);
1942 for (j = 0; j < max_allocno; j++)
1943 if (CONFLICTP (j, i))
1944 fprintf (file, " %d", allocno[j].reg);
1945 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1946 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
1947 fprintf (file, " %d", j);
1948 fprintf (file, "\n");
1950 has_preferences = 0;
1951 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1952 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1953 has_preferences = 1;
1955 if (! has_preferences)
1956 continue;
1957 fprintf (file, ";; %d preferences:", allocno[i].reg);
1958 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1959 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1960 fprintf (file, " %d", j);
1961 fprintf (file, "\n");
1963 fprintf (file, "\n");
1966 void
1967 dump_global_regs (FILE *file)
1969 int i, j;
1971 fprintf (file, ";; Register dispositions:\n");
1972 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
1973 if (reg_renumber[i] >= 0)
1975 fprintf (file, "%d in %d ", i, reg_renumber[i]);
1976 if (++j % 6 == 0)
1977 fprintf (file, "\n");
1980 fprintf (file, "\n\n;; Hard regs used: ");
1981 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1982 if (regs_ever_live[i])
1983 fprintf (file, " %d", i);
1984 fprintf (file, "\n\n");
1989 /* This page contains code to make live information more accurate.
1990 The accurate register liveness at program point P means:
1991 o there is a path from P to usage of the register and the
1992 register is not redefined or killed on the path.
1993 o register at P is partially available, i.e. there is a path from
1994 a register definition to the point P and the register is not
1995 killed (clobbered) on the path
1997 The standard GCC live information means only the first condition.
1998 Without the partial availability, there will be more register
1999 conflicts and as a consequence worse register allocation. The
2000 typical example where the information can be different is a
2001 register initialized in the loop at the basic block preceding the
2002 loop in CFG. */
2004 /* The following structure contains basic block data flow information
2005 used to calculate partial availability of registers. */
2007 struct bb_info
2009 /* The basic block reverse post-order number. */
2010 int rts_number;
2011 /* Registers used uninitialized in an insn in which there is an
2012 early clobbered register might get the same hard register. */
2013 bitmap earlyclobber;
2014 /* Registers correspondingly killed (clobbered) and defined but not
2015 killed afterward in the basic block. */
2016 bitmap killed, avloc;
2017 /* Registers partially available correspondingly at the start and
2018 end of the basic block. */
2019 bitmap pavin, pavout;
2022 /* Macros for accessing data flow information of basic blocks. */
2024 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2025 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2027 /* The function allocates the info structures of each basic block. It
2028 also initialized PAVIN and PAVOUT as if all hard registers were
2029 partially available. */
2031 static void
2032 allocate_bb_info (void)
2034 int i;
2035 basic_block bb;
2036 struct bb_info *bb_info;
2037 bitmap init;
2039 alloc_aux_for_blocks (sizeof (struct bb_info));
2040 init = BITMAP_XMALLOC ();
2041 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2042 bitmap_set_bit (init, i);
2043 FOR_EACH_BB (bb)
2045 bb_info = bb->aux;
2046 bb_info->earlyclobber = BITMAP_XMALLOC ();
2047 bb_info->avloc = BITMAP_XMALLOC ();
2048 bb_info->killed = BITMAP_XMALLOC ();
2049 bb_info->pavin = BITMAP_XMALLOC ();
2050 bb_info->pavout = BITMAP_XMALLOC ();
2051 bitmap_copy (bb_info->pavin, init);
2052 bitmap_copy (bb_info->pavout, init);
2054 BITMAP_XFREE (init);
2057 /* The function frees the allocated info of all basic blocks. */
2059 static void
2060 free_bb_info (void)
2062 basic_block bb;
2063 struct bb_info *bb_info;
2065 FOR_EACH_BB (bb)
2067 bb_info = BB_INFO (bb);
2068 BITMAP_XFREE (bb_info->pavout);
2069 BITMAP_XFREE (bb_info->pavin);
2070 BITMAP_XFREE (bb_info->killed);
2071 BITMAP_XFREE (bb_info->avloc);
2072 BITMAP_XFREE (bb_info->earlyclobber);
2074 free_aux_for_blocks ();
2077 /* The function modifies local info for register REG being changed in
2078 SETTER. DATA is used to pass the current basic block info. */
2080 static void
2081 mark_reg_change (rtx reg, rtx setter, void *data)
2083 int regno;
2084 basic_block bb = data;
2085 struct bb_info *bb_info = BB_INFO (bb);
2087 if (GET_CODE (reg) == SUBREG)
2088 reg = SUBREG_REG (reg);
2090 if (!REG_P (reg))
2091 return;
2093 regno = REGNO (reg);
2094 bitmap_set_bit (bb_info->killed, regno);
2096 if (GET_CODE (setter) != CLOBBER)
2097 bitmap_set_bit (bb_info->avloc, regno);
2098 else
2099 bitmap_clear_bit (bb_info->avloc, regno);
2102 /* Classes of registers which could be early clobbered in the current
2103 insn. */
2105 static varray_type earlyclobber_regclass;
2107 /* The function stores classes of registers which could be early
2108 clobbered in INSN. */
2110 static void
2111 check_earlyclobber (rtx insn)
2113 int opno;
2115 extract_insn (insn);
2117 VARRAY_POP_ALL (earlyclobber_regclass);
2118 for (opno = 0; opno < recog_data.n_operands; opno++)
2120 char c;
2121 bool amp_p;
2122 int i;
2123 enum reg_class class;
2124 const char *p = recog_data.constraints[opno];
2126 class = NO_REGS;
2127 amp_p = false;
2128 for (;;)
2130 c = *p;
2131 switch (c)
2133 case '=': case '+': case '?':
2134 case '#': case '!':
2135 case '*': case '%':
2136 case 'm': case '<': case '>': case 'V': case 'o':
2137 case 'E': case 'F': case 'G': case 'H':
2138 case 's': case 'i': case 'n':
2139 case 'I': case 'J': case 'K': case 'L':
2140 case 'M': case 'N': case 'O': case 'P':
2141 case 'X':
2142 case '0': case '1': case '2': case '3': case '4':
2143 case '5': case '6': case '7': case '8': case '9':
2144 /* These don't say anything we care about. */
2145 break;
2147 case '&':
2148 amp_p = true;
2149 break;
2150 case '\0':
2151 case ',':
2152 if (amp_p && class != NO_REGS)
2154 for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1;
2155 i >= 0; i--)
2156 if (VARRAY_INT (earlyclobber_regclass, i) == (int) class)
2157 break;
2158 if (i < 0)
2159 VARRAY_PUSH_INT (earlyclobber_regclass, (int) class);
2162 amp_p = false;
2163 class = NO_REGS;
2164 break;
2166 case 'r':
2167 class = GENERAL_REGS;
2168 break;
2170 default:
2171 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2172 break;
2174 if (c == '\0')
2175 break;
2176 p += CONSTRAINT_LEN (c, p);
2181 /* The function returns true if register classes C1 and C2 inetrsect. */
2183 static bool
2184 regclass_intersect (enum reg_class c1, enum reg_class c2)
2186 HARD_REG_SET rs, zero;
2188 CLEAR_HARD_REG_SET (zero);
2189 COPY_HARD_REG_SET(rs, reg_class_contents [c1]);
2190 AND_HARD_REG_SET (rs, reg_class_contents [c2]);
2191 GO_IF_HARD_REG_EQUAL (zero, rs, yes);
2192 return true;
2193 yes:
2194 return false;
2197 /* The function checks that pseudo-register *X has a class
2198 intersecting with the class of pseudo-register could be early
2199 clobbered in the same insn. */
2201 static int
2202 mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
2204 enum reg_class pref_class, alt_class;
2205 int i, regno;
2206 basic_block bb = data;
2207 struct bb_info *bb_info = BB_INFO (bb);
2209 if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2211 regno = REGNO (*x);
2212 if (bitmap_bit_p (bb_info->killed, regno)
2213 || bitmap_bit_p (bb_info->avloc, regno))
2214 return 0;
2215 pref_class = reg_preferred_class (regno);
2216 alt_class = reg_alternate_class (regno);
2217 for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1; i >= 0; i--)
2218 if (regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
2219 pref_class)
2220 || (VARRAY_INT (earlyclobber_regclass, i) != NO_REGS
2221 && regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
2222 alt_class)))
2224 bitmap_set_bit (bb_info->earlyclobber, regno);
2225 break;
2228 return 0;
2231 /* The function processes all pseudo-registers in *X with the aid of
2232 previous function. */
2234 static void
2235 mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2237 for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
2240 /* The function calculates local info for each basic block. */
2242 static void
2243 calculate_local_reg_bb_info (void)
2245 basic_block bb;
2246 rtx insn, bound;
2248 VARRAY_INT_INIT (earlyclobber_regclass, 20,
2249 "classes of registers early clobbered in an insn");
2250 FOR_EACH_BB (bb)
2252 bound = NEXT_INSN (BB_END (bb));
2253 for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
2254 if (INSN_P (insn))
2256 note_stores (PATTERN (insn), mark_reg_change, bb);
2257 check_earlyclobber (insn);
2258 note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
2263 /* The function sets up reverse post-order number of each basic
2264 block. */
2266 static void
2267 set_up_bb_rts_numbers (void)
2269 int i;
2270 int *rts_order;
2272 rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2273 flow_reverse_top_sort_order_compute (rts_order);
2274 for (i = 0; i < n_basic_blocks; i++)
2275 BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
2276 free (rts_order);
2279 /* Compare function for sorting blocks in reverse postorder. */
2281 static int
2282 rpost_cmp (const void *bb1, const void *bb2)
2284 basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
2286 return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
2289 /* The function calculates partial availability of registers. The
2290 function calculates partial availability at the end of basic block
2291 BB by propagating partial availability at end of predecessor basic
2292 block PRED. The function returns true if the partial availability
2293 at the end of BB has been changed or if CHANGED_P. We have the
2294 following equations:
2296 bb.pavin = empty for entry block | union (pavout of predecessors)
2297 bb.pavout = union (bb.pavin - b.killed, bb.avloc) */
2299 static bool
2300 modify_bb_reg_pav (basic_block bb, basic_block pred, bool changed_p)
2302 struct bb_info *bb_info;
2303 bitmap bb_pavin, bb_pavout;
2305 bb_info = BB_INFO (bb);
2306 bb_pavin = bb_info->pavin;
2307 bb_pavout = bb_info->pavout;
2308 if (pred->index != ENTRY_BLOCK)
2309 bitmap_a_or_b (bb_pavin, bb_pavin, BB_INFO (pred)->pavout);
2310 changed_p |= bitmap_union_of_diff (bb_pavout, bb_info->avloc,
2311 bb_pavin, bb_info->killed);
2312 return changed_p;
2315 /* The function calculates partial register availability. */
2317 static void
2318 calculate_reg_pav (void)
2320 basic_block bb, succ;
2321 edge e;
2322 bool changed_p;
2323 int i, nel;
2324 varray_type bbs, new_bbs, temp;
2325 basic_block *bb_array;
2326 sbitmap wset;
2328 VARRAY_BB_INIT (bbs, n_basic_blocks, "basic blocks");
2329 VARRAY_BB_INIT (new_bbs, n_basic_blocks, "basic blocks for the next iter.");
2330 FOR_EACH_BB (bb)
2332 VARRAY_PUSH_BB (bbs, bb);
2334 wset = sbitmap_alloc (n_basic_blocks + 1);
2335 while (VARRAY_ACTIVE_SIZE (bbs))
2337 bb_array = &VARRAY_BB (bbs, 0);
2338 nel = VARRAY_ACTIVE_SIZE (bbs);
2339 qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
2340 sbitmap_zero (wset);
2341 for (i = 0; i < nel; i++)
2343 bb = bb_array [i];
2344 changed_p = 0;
2345 FOR_EACH_EDGE (e, bb->preds)
2347 changed_p = modify_bb_reg_pav (bb, e->src, changed_p);
2349 END_FOR_EACH_EDGE;
2350 if (changed_p)
2351 FOR_EACH_EDGE (e, bb->succs)
2353 succ = e->dest;
2354 if (succ->index != EXIT_BLOCK && !TEST_BIT (wset, succ->index))
2356 SET_BIT (wset, succ->index);
2357 VARRAY_PUSH_BB (new_bbs, succ);
2360 END_FOR_EACH_EDGE;
2362 temp = bbs;
2363 bbs = new_bbs;
2364 new_bbs = temp;
2365 VARRAY_POP_ALL (new_bbs);
2367 sbitmap_free (wset);
2370 /* The following function makes live information more accurate by
2371 modifying global_live_at_start and global_live_at_end of basic
2372 blocks. After the function call a register lives at a program
2373 point only if it is initialized on a path from CFG entry to the
2374 program point. The standard GCC life analysis permits registers to
2375 live uninitialized. */
2377 static void
2378 make_accurate_live_analysis (void)
2380 basic_block bb;
2381 struct bb_info *bb_info;
2383 max_regno = max_reg_num ();
2384 compact_blocks ();
2385 allocate_bb_info ();
2386 calculate_local_reg_bb_info ();
2387 set_up_bb_rts_numbers ();
2388 calculate_reg_pav ();
2389 FOR_EACH_BB (bb)
2391 bb_info = BB_INFO (bb);
2393 /* Reload can assign the same hard register to uninitialized
2394 pseudo-register and early clobbered pseudo-register in an
2395 insn if the pseudo-register is used first time in given BB
2396 and not lived at the BB start. To prevent this we don't
2397 change life information for such pseudo-registers. */
2398 bitmap_a_or_b (bb_info->pavin, bb_info->pavin, bb_info->earlyclobber);
2399 bitmap_a_and_b (bb->global_live_at_start, bb->global_live_at_start,
2400 bb_info->pavin);
2401 bitmap_a_and_b (bb->global_live_at_end, bb->global_live_at_end,
2402 bb_info->pavout);
2404 free_bb_info ();