Add PR target/17277 to ChangeLog entry.
[official-gcc.git] / gcc / global.c
blobe8184acd37e07fc832f006d8bbe09c54d32d1ab7
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
28 #include "machmode.h"
29 #include "hard-reg-set.h"
30 #include "rtl.h"
31 #include "tm_p.h"
32 #include "flags.h"
33 #include "basic-block.h"
34 #include "regs.h"
35 #include "function.h"
36 #include "insn-config.h"
37 #include "recog.h"
38 #include "reload.h"
39 #include "output.h"
40 #include "toplev.h"
42 /* This pass of the compiler performs global register allocation.
43 It assigns hard register numbers to all the pseudo registers
44 that were not handled in local_alloc. Assignments are recorded
45 in the vector reg_renumber, not by changing the rtl code.
46 (Such changes are made by final). The entry point is
47 the function global_alloc.
49 After allocation is complete, the reload pass is run as a subroutine
50 of this pass, so that when a pseudo reg loses its hard reg due to
51 spilling it is possible to make a second attempt to find a hard
52 reg for it. The reload pass is independent in other respects
53 and it is run even when stupid register allocation is in use.
55 1. Assign allocation-numbers (allocnos) to the pseudo-registers
56 still needing allocations and to the pseudo-registers currently
57 allocated by local-alloc which may be spilled by reload.
58 Set up tables reg_allocno and allocno_reg to map
59 reg numbers to allocnos and vice versa.
60 max_allocno gets the number of allocnos in use.
62 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
63 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
64 for conflicts between allocnos and explicit hard register use
65 (which includes use of pseudo-registers allocated by local_alloc).
67 3. For each basic block
68 walk forward through the block, recording which
69 pseudo-registers and which hardware registers are live.
70 Build the conflict matrix between the pseudo-registers
71 and another of pseudo-registers versus hardware registers.
72 Also record the preferred hardware registers
73 for each pseudo-register.
75 4. Sort a table of the allocnos into order of
76 desirability of the variables.
78 5. Allocate the variables in that order; each if possible into
79 a preferred register, else into another register. */
81 /* Number of pseudo-registers which are candidates for allocation. */
83 static int max_allocno;
85 /* Indexed by (pseudo) reg number, gives the allocno, or -1
86 for pseudo registers which are not to be allocated. */
88 static int *reg_allocno;
90 struct allocno
92 int reg;
93 /* Gives the number of consecutive hard registers needed by that
94 pseudo reg. */
95 int size;
97 /* Number of calls crossed by each allocno. */
98 int calls_crossed;
100 /* Number of refs to each allocno. */
101 int n_refs;
103 /* Frequency of uses of each allocno. */
104 int freq;
106 /* Guess at live length of each allocno.
107 This is actually the max of the live lengths of the regs. */
108 int live_length;
110 /* Set of hard regs conflicting with allocno N. */
112 HARD_REG_SET hard_reg_conflicts;
114 /* Set of hard regs preferred by allocno N.
115 This is used to make allocnos go into regs that are copied to or from them,
116 when possible, to reduce register shuffling. */
118 HARD_REG_SET hard_reg_preferences;
120 /* Similar, but just counts register preferences made in simple copy
121 operations, rather than arithmetic. These are given priority because
122 we can always eliminate an insn by using these, but using a register
123 in the above list won't always eliminate an insn. */
125 HARD_REG_SET hard_reg_copy_preferences;
127 /* Similar to hard_reg_preferences, but includes bits for subsequent
128 registers when an allocno is multi-word. The above variable is used for
129 allocation while this is used to build reg_someone_prefers, below. */
131 HARD_REG_SET hard_reg_full_preferences;
133 /* Set of hard registers that some later allocno has a preference for. */
135 HARD_REG_SET regs_someone_prefers;
137 #ifdef STACK_REGS
138 /* Set to true if allocno can't be allocated in the stack register. */
139 bool no_stack_reg;
140 #endif
143 static struct allocno *allocno;
145 /* A vector of the integers from 0 to max_allocno-1,
146 sorted in the order of first-to-be-allocated first. */
148 static int *allocno_order;
150 /* Indexed by (pseudo) reg number, gives the number of another
151 lower-numbered pseudo reg which can share a hard reg with this pseudo
152 *even if the two pseudos would otherwise appear to conflict*. */
154 static int *reg_may_share;
156 /* Define the number of bits in each element of `conflicts' and what
157 type that element has. We use the largest integer format on the
158 host machine. */
160 #define INT_BITS HOST_BITS_PER_WIDE_INT
161 #define INT_TYPE HOST_WIDE_INT
163 /* max_allocno by max_allocno array of bits,
164 recording whether two allocno's conflict (can't go in the same
165 hardware register).
167 `conflicts' is symmetric after the call to mirror_conflicts. */
169 static INT_TYPE *conflicts;
171 /* Number of ints require to hold max_allocno bits.
172 This is the length of a row in `conflicts'. */
174 static int allocno_row_words;
176 /* Two macros to test or store 1 in an element of `conflicts'. */
178 #define CONFLICTP(I, J) \
179 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
180 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
182 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
183 and execute CODE. */
184 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
185 do { \
186 int i_; \
187 int allocno_; \
188 INT_TYPE *p_ = (ALLOCNO_SET); \
190 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
191 i_--, allocno_ += INT_BITS) \
193 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
195 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
197 if (word_ & 1) \
198 {CODE;} \
201 } while (0)
203 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
204 #if 0
205 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
206 the conflicting allocno, and execute CODE. This macro assumes that
207 mirror_conflicts has been run. */
208 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
209 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
210 OUT_ALLOCNO, (CODE))
211 #endif
213 /* Set of hard regs currently live (during scan of all insns). */
215 static HARD_REG_SET hard_regs_live;
217 /* Set of registers that global-alloc isn't supposed to use. */
219 static HARD_REG_SET no_global_alloc_regs;
221 /* Set of registers used so far. */
223 static HARD_REG_SET regs_used_so_far;
225 /* Number of refs to each hard reg, as used by local alloc.
226 It is zero for a reg that contains global pseudos or is explicitly used. */
228 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
230 /* Frequency of uses of given hard reg. */
231 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
233 /* Guess at live length of each hard reg, as used by local alloc.
234 This is actually the sum of the live lengths of the specific regs. */
236 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
238 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
239 element I, and hard register number J. */
241 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
243 /* Bit mask for allocnos live at current point in the scan. */
245 static INT_TYPE *allocnos_live;
247 /* Test, set or clear bit number I in allocnos_live,
248 a bit vector indexed by allocno. */
250 #define SET_ALLOCNO_LIVE(I) \
251 (allocnos_live[(unsigned) (I) / INT_BITS] \
252 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
254 #define CLEAR_ALLOCNO_LIVE(I) \
255 (allocnos_live[(unsigned) (I) / INT_BITS] \
256 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
258 /* This is turned off because it doesn't work right for DImode.
259 (And it is only used for DImode, so the other cases are worthless.)
260 The problem is that it isn't true that there is NO possibility of conflict;
261 only that there is no conflict if the two pseudos get the exact same regs.
262 If they were allocated with a partial overlap, there would be a conflict.
263 We can't safely turn off the conflict unless we have another way to
264 prevent the partial overlap.
266 Idea: change hard_reg_conflicts so that instead of recording which
267 hard regs the allocno may not overlap, it records where the allocno
268 may not start. Change both where it is used and where it is updated.
269 Then there is a way to record that (reg:DI 108) may start at 10
270 but not at 9 or 11. There is still the question of how to record
271 this semi-conflict between two pseudos. */
272 #if 0
273 /* Reg pairs for which conflict after the current insn
274 is inhibited by a REG_NO_CONFLICT note.
275 If the table gets full, we ignore any other notes--that is conservative. */
276 #define NUM_NO_CONFLICT_PAIRS 4
277 /* Number of pairs in use in this insn. */
278 int n_no_conflict_pairs;
279 static struct { int allocno1, allocno2;}
280 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
281 #endif /* 0 */
283 /* Record all regs that are set in any one insn.
284 Communication from mark_reg_{store,clobber} and global_conflicts. */
286 static rtx *regs_set;
287 static int n_regs_set;
289 /* All registers that can be eliminated. */
291 static HARD_REG_SET eliminable_regset;
293 static int allocno_compare (const void *, const void *);
294 static void global_conflicts (void);
295 static void mirror_conflicts (void);
296 static void expand_preferences (void);
297 static void prune_preferences (void);
298 static void find_reg (int, HARD_REG_SET, int, int, int);
299 static void record_one_conflict (int);
300 static void record_conflicts (int *, int);
301 static void mark_reg_store (rtx, rtx, void *);
302 static void mark_reg_clobber (rtx, rtx, void *);
303 static void mark_reg_conflicts (rtx);
304 static void mark_reg_death (rtx);
305 static void mark_reg_live_nc (int, enum machine_mode);
306 static void set_preference (rtx, rtx);
307 static void dump_conflicts (FILE *);
308 static void reg_becomes_live (rtx, rtx, void *);
309 static void reg_dies (int, enum machine_mode, struct insn_chain *);
311 static void allocate_bb_info (void);
312 static void free_bb_info (void);
313 static void check_earlyclobber (rtx);
314 static bool regclass_intersect (enum reg_class, enum reg_class);
315 static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
316 static int mark_reg_use_for_earlyclobber (rtx *, void *);
317 static void calculate_local_reg_bb_info (void);
318 static void set_up_bb_rts_numbers (void);
319 static int rpost_cmp (const void *, const void *);
320 static bool modify_bb_reg_pav (basic_block, basic_block, bool);
321 static void calculate_reg_pav (void);
322 static void modify_reg_pav (void);
323 static void make_accurate_live_analysis (void);
327 /* Perform allocation of pseudo-registers not allocated by local_alloc.
328 FILE is a file to output debugging information on,
329 or zero if such output is not desired.
331 Return value is nonzero if reload failed
332 and we must not do any more for this function. */
335 global_alloc (FILE *file)
337 int retval;
338 #ifdef ELIMINABLE_REGS
339 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
340 #endif
341 int need_fp
342 = (! flag_omit_frame_pointer
343 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
344 || FRAME_POINTER_REQUIRED);
346 size_t i;
347 rtx x;
349 make_accurate_live_analysis ();
351 max_allocno = 0;
353 /* A machine may have certain hard registers that
354 are safe to use only within a basic block. */
356 CLEAR_HARD_REG_SET (no_global_alloc_regs);
358 /* Build the regset of all eliminable registers and show we can't use those
359 that we already know won't be eliminated. */
360 #ifdef ELIMINABLE_REGS
361 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
363 bool cannot_elim
364 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
365 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
367 if (!regs_asm_clobbered[eliminables[i].from])
369 SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
371 if (cannot_elim)
372 SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
374 else if (cannot_elim)
375 error ("%s cannot be used in asm here",
376 reg_names[eliminables[i].from]);
377 else
378 regs_ever_live[eliminables[i].from] = 1;
380 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
381 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
383 SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
384 if (need_fp)
385 SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
387 else if (need_fp)
388 error ("%s cannot be used in asm here",
389 reg_names[HARD_FRAME_POINTER_REGNUM]);
390 else
391 regs_ever_live[HARD_FRAME_POINTER_REGNUM] = 1;
392 #endif
394 #else
395 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
397 SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
398 if (need_fp)
399 SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
401 else if (need_fp)
402 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
403 else
404 regs_ever_live[FRAME_POINTER_REGNUM] = 1;
405 #endif
407 /* Track which registers have already been used. Start with registers
408 explicitly in the rtl, then registers allocated by local register
409 allocation. */
411 CLEAR_HARD_REG_SET (regs_used_so_far);
412 #ifdef LEAF_REGISTERS
413 /* If we are doing the leaf function optimization, and this is a leaf
414 function, it means that the registers that take work to save are those
415 that need a register window. So prefer the ones that can be used in
416 a leaf function. */
418 const char *cheap_regs;
419 const char *const leaf_regs = LEAF_REGISTERS;
421 if (only_leaf_regs_used () && leaf_function_p ())
422 cheap_regs = leaf_regs;
423 else
424 cheap_regs = call_used_regs;
425 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
426 if (regs_ever_live[i] || cheap_regs[i])
427 SET_HARD_REG_BIT (regs_used_so_far, i);
429 #else
430 /* We consider registers that do not have to be saved over calls as if
431 they were already used since there is no cost in using them. */
432 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
433 if (regs_ever_live[i] || call_used_regs[i])
434 SET_HARD_REG_BIT (regs_used_so_far, i);
435 #endif
437 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
438 if (reg_renumber[i] >= 0)
439 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
441 /* Establish mappings from register number to allocation number
442 and vice versa. In the process, count the allocnos. */
444 reg_allocno = xmalloc (max_regno * sizeof (int));
446 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
447 reg_allocno[i] = -1;
449 /* Initialize the shared-hard-reg mapping
450 from the list of pairs that may share. */
451 reg_may_share = xcalloc (max_regno, sizeof (int));
452 for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
454 int r1 = REGNO (XEXP (x, 0));
455 int r2 = REGNO (XEXP (XEXP (x, 1), 0));
456 if (r1 > r2)
457 reg_may_share[r1] = r2;
458 else
459 reg_may_share[r2] = r1;
462 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
463 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
464 that we are supposed to refrain from putting in a hard reg.
465 -2 means do make an allocno but don't allocate it. */
466 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
467 /* Don't allocate pseudos that cross calls,
468 if this function receives a nonlocal goto. */
469 && (! current_function_has_nonlocal_label
470 || REG_N_CALLS_CROSSED (i) == 0))
472 if (reg_renumber[i] < 0
473 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
474 reg_allocno[i] = reg_allocno[reg_may_share[i]];
475 else
476 reg_allocno[i] = max_allocno++;
477 gcc_assert (REG_LIVE_LENGTH (i));
479 else
480 reg_allocno[i] = -1;
482 allocno = xcalloc (max_allocno, sizeof (struct allocno));
484 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
485 if (reg_allocno[i] >= 0)
487 int num = reg_allocno[i];
488 allocno[num].reg = i;
489 allocno[num].size = PSEUDO_REGNO_SIZE (i);
490 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
491 allocno[num].n_refs += REG_N_REFS (i);
492 allocno[num].freq += REG_FREQ (i);
493 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
494 allocno[num].live_length = REG_LIVE_LENGTH (i);
497 /* Calculate amount of usage of each hard reg by pseudos
498 allocated by local-alloc. This is to see if we want to
499 override it. */
500 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
501 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
502 memset (local_reg_freq, 0, sizeof local_reg_freq);
503 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
504 if (reg_renumber[i] >= 0)
506 int regno = reg_renumber[i];
507 int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
508 int j;
510 for (j = regno; j < endregno; j++)
512 local_reg_n_refs[j] += REG_N_REFS (i);
513 local_reg_freq[j] += REG_FREQ (i);
514 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
518 /* We can't override local-alloc for a reg used not just by local-alloc. */
519 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
520 if (regs_ever_live[i])
521 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
523 allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
525 /* We used to use alloca here, but the size of what it would try to
526 allocate would occasionally cause it to exceed the stack limit and
527 cause unpredictable core dumps. Some examples were > 2Mb in size. */
528 conflicts = xcalloc (max_allocno * allocno_row_words, sizeof (INT_TYPE));
530 allocnos_live = xmalloc (allocno_row_words * sizeof (INT_TYPE));
532 /* If there is work to be done (at least one reg to allocate),
533 perform global conflict analysis and allocate the regs. */
535 if (max_allocno > 0)
537 /* Scan all the insns and compute the conflicts among allocnos
538 and between allocnos and hard regs. */
540 global_conflicts ();
542 mirror_conflicts ();
544 /* Eliminate conflicts between pseudos and eliminable registers. If
545 the register is not eliminated, the pseudo won't really be able to
546 live in the eliminable register, so the conflict doesn't matter.
547 If we do eliminate the register, the conflict will no longer exist.
548 So in either case, we can ignore the conflict. Likewise for
549 preferences. */
551 for (i = 0; i < (size_t) max_allocno; i++)
553 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
554 eliminable_regset);
555 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
556 eliminable_regset);
557 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
558 eliminable_regset);
561 /* Try to expand the preferences by merging them between allocnos. */
563 expand_preferences ();
565 /* Determine the order to allocate the remaining pseudo registers. */
567 allocno_order = xmalloc (max_allocno * sizeof (int));
568 for (i = 0; i < (size_t) max_allocno; i++)
569 allocno_order[i] = i;
571 /* Default the size to 1, since allocno_compare uses it to divide by.
572 Also convert allocno_live_length of zero to -1. A length of zero
573 can occur when all the registers for that allocno have reg_live_length
574 equal to -2. In this case, we want to make an allocno, but not
575 allocate it. So avoid the divide-by-zero and set it to a low
576 priority. */
578 for (i = 0; i < (size_t) max_allocno; i++)
580 if (allocno[i].size == 0)
581 allocno[i].size = 1;
582 if (allocno[i].live_length == 0)
583 allocno[i].live_length = -1;
586 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
588 prune_preferences ();
590 if (file)
591 dump_conflicts (file);
593 /* Try allocating them, one by one, in that order,
594 except for parameters marked with reg_live_length[regno] == -2. */
596 for (i = 0; i < (size_t) max_allocno; i++)
597 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
598 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
600 /* If we have more than one register class,
601 first try allocating in the class that is cheapest
602 for this pseudo-reg. If that fails, try any reg. */
603 if (N_REG_CLASSES > 1)
605 find_reg (allocno_order[i], 0, 0, 0, 0);
606 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
607 continue;
609 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
610 find_reg (allocno_order[i], 0, 1, 0, 0);
613 free (allocno_order);
616 /* Do the reloads now while the allocno data still exist, so that we can
617 try to assign new hard regs to any pseudo regs that are spilled. */
619 #if 0 /* We need to eliminate regs even if there is no rtl code,
620 for the sake of debugging information. */
621 if (n_basic_blocks > 0)
622 #endif
624 build_insn_chain (get_insns ());
625 retval = reload (get_insns (), 1);
628 /* Clean up. */
629 free (reg_allocno);
630 free (reg_may_share);
631 free (allocno);
632 free (conflicts);
633 free (allocnos_live);
635 return retval;
638 /* Sort predicate for ordering the allocnos.
639 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
641 static int
642 allocno_compare (const void *v1p, const void *v2p)
644 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
645 /* Note that the quotient will never be bigger than
646 the value of floor_log2 times the maximum number of
647 times a register can occur in one insn (surely less than 100)
648 weighted by the frequency (maximally REG_FREQ_MAX).
649 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
650 int pri1
651 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
652 / allocno[v1].live_length)
653 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
654 int pri2
655 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
656 / allocno[v2].live_length)
657 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
658 if (pri2 - pri1)
659 return pri2 - pri1;
661 /* If regs are equally good, sort by allocno,
662 so that the results of qsort leave nothing to chance. */
663 return v1 - v2;
666 /* Scan the rtl code and record all conflicts and register preferences in the
667 conflict matrices and preference tables. */
669 static void
670 global_conflicts (void)
672 int i;
673 basic_block b;
674 rtx insn;
675 int *block_start_allocnos;
677 /* Make a vector that mark_reg_{store,clobber} will store in. */
678 regs_set = xmalloc (max_parallel * sizeof (rtx) * 2);
680 block_start_allocnos = xmalloc (max_allocno * sizeof (int));
682 FOR_EACH_BB (b)
684 memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
686 /* Initialize table of registers currently live
687 to the state at the beginning of this basic block.
688 This also marks the conflicts among hard registers
689 and any allocnos that are live.
691 For pseudo-regs, there is only one bit for each one
692 no matter how many hard regs it occupies.
693 This is ok; we know the size from PSEUDO_REGNO_SIZE.
694 For explicit hard regs, we cannot know the size that way
695 since one hard reg can be used with various sizes.
696 Therefore, we must require that all the hard regs
697 implicitly live as part of a multi-word hard reg
698 are explicitly marked in basic_block_live_at_start. */
701 regset old = b->global_live_at_start;
702 int ax = 0;
704 REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
705 EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i,
707 int a = reg_allocno[i];
708 if (a >= 0)
710 SET_ALLOCNO_LIVE (a);
711 block_start_allocnos[ax++] = a;
713 else if ((a = reg_renumber[i]) >= 0)
714 mark_reg_live_nc
715 (a, PSEUDO_REGNO_MODE (i));
718 /* Record that each allocno now live conflicts with each hard reg
719 now live.
721 It is not necessary to mark any conflicts between pseudos as
722 this point, even for pseudos which are live at the start of
723 the basic block.
725 Given two pseudos X and Y and any point in the CFG P.
727 On any path to point P where X and Y are live one of the
728 following conditions must be true:
730 1. X is live at some instruction on the path that
731 evaluates Y.
733 2. Y is live at some instruction on the path that
734 evaluates X.
736 3. Either X or Y is not evaluated on the path to P
737 (ie it is used uninitialized) and thus the
738 conflict can be ignored.
740 In cases #1 and #2 the conflict will be recorded when we
741 scan the instruction that makes either X or Y become live. */
742 record_conflicts (block_start_allocnos, ax);
744 /* Pseudos can't go in stack regs at the start of a basic block that
745 is reached by an abnormal edge. Likewise for call clobbered regs,
746 because because caller-save, fixup_abnormal_edges, and possibly
747 the table driven EH machinery are not quite ready to handle such
748 regs live across such edges. */
750 edge e;
752 for (e = b->pred; e ; e = e->pred_next)
753 if (e->flags & EDGE_ABNORMAL)
754 break;
756 if (e != NULL)
758 #ifdef STACK_REGS
759 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
761 allocno[ax].no_stack_reg = 1;
763 for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
764 record_one_conflict (ax);
765 #endif
767 /* No need to record conflicts for call clobbered regs if we have
768 nonlocal labels around, as we don't ever try to allocate such
769 regs in this case. */
770 if (! current_function_has_nonlocal_label)
771 for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
772 if (call_used_regs [ax])
773 record_one_conflict (ax);
778 insn = BB_HEAD (b);
780 /* Scan the code of this basic block, noting which allocnos
781 and hard regs are born or die. When one is born,
782 record a conflict with all others currently live. */
784 while (1)
786 RTX_CODE code = GET_CODE (insn);
787 rtx link;
789 /* Make regs_set an empty set. */
791 n_regs_set = 0;
793 if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
796 #if 0
797 int i = 0;
798 for (link = REG_NOTES (insn);
799 link && i < NUM_NO_CONFLICT_PAIRS;
800 link = XEXP (link, 1))
801 if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
803 no_conflict_pairs[i].allocno1
804 = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
805 no_conflict_pairs[i].allocno2
806 = reg_allocno[REGNO (XEXP (link, 0))];
807 i++;
809 #endif /* 0 */
811 /* Mark any registers clobbered by INSN as live,
812 so they conflict with the inputs. */
814 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
816 /* Mark any registers dead after INSN as dead now. */
818 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
819 if (REG_NOTE_KIND (link) == REG_DEAD)
820 mark_reg_death (XEXP (link, 0));
822 /* Mark any registers set in INSN as live,
823 and mark them as conflicting with all other live regs.
824 Clobbers are processed again, so they conflict with
825 the registers that are set. */
827 note_stores (PATTERN (insn), mark_reg_store, NULL);
829 #ifdef AUTO_INC_DEC
830 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
831 if (REG_NOTE_KIND (link) == REG_INC)
832 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
833 #endif
835 /* If INSN has multiple outputs, then any reg that dies here
836 and is used inside of an output
837 must conflict with the other outputs.
839 It is unsafe to use !single_set here since it will ignore an
840 unused output. Just because an output is unused does not mean
841 the compiler can assume the side effect will not occur.
842 Consider if REG appears in the address of an output and we
843 reload the output. If we allocate REG to the same hard
844 register as an unused output we could set the hard register
845 before the output reload insn. */
846 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
847 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
848 if (REG_NOTE_KIND (link) == REG_DEAD)
850 int used_in_output = 0;
851 int i;
852 rtx reg = XEXP (link, 0);
854 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
856 rtx set = XVECEXP (PATTERN (insn), 0, i);
857 if (GET_CODE (set) == SET
858 && !REG_P (SET_DEST (set))
859 && !rtx_equal_p (reg, SET_DEST (set))
860 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
861 used_in_output = 1;
863 if (used_in_output)
864 mark_reg_conflicts (reg);
867 /* Mark any registers set in INSN and then never used. */
869 while (n_regs_set-- > 0)
871 rtx note = find_regno_note (insn, REG_UNUSED,
872 REGNO (regs_set[n_regs_set]));
873 if (note)
874 mark_reg_death (XEXP (note, 0));
878 if (insn == BB_END (b))
879 break;
880 insn = NEXT_INSN (insn);
884 /* Clean up. */
885 free (block_start_allocnos);
886 free (regs_set);
888 /* Expand the preference information by looking for cases where one allocno
889 dies in an insn that sets an allocno. If those two allocnos don't conflict,
890 merge any preferences between those allocnos. */
892 static void
893 expand_preferences (void)
895 rtx insn;
896 rtx link;
897 rtx set;
899 /* We only try to handle the most common cases here. Most of the cases
900 where this wins are reg-reg copies. */
902 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
903 if (INSN_P (insn)
904 && (set = single_set (insn)) != 0
905 && REG_P (SET_DEST (set))
906 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
907 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
908 if (REG_NOTE_KIND (link) == REG_DEAD
909 && REG_P (XEXP (link, 0))
910 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
911 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
912 reg_allocno[REGNO (XEXP (link, 0))]))
914 int a1 = reg_allocno[REGNO (SET_DEST (set))];
915 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
917 if (XEXP (link, 0) == SET_SRC (set))
919 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
920 allocno[a2].hard_reg_copy_preferences);
921 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
922 allocno[a1].hard_reg_copy_preferences);
925 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
926 allocno[a2].hard_reg_preferences);
927 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
928 allocno[a1].hard_reg_preferences);
929 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
930 allocno[a2].hard_reg_full_preferences);
931 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
932 allocno[a1].hard_reg_full_preferences);
936 /* Prune the preferences for global registers to exclude registers that cannot
937 be used.
939 Compute `regs_someone_prefers', which is a bitmask of the hard registers
940 that are preferred by conflicting registers of lower priority. If possible,
941 we will avoid using these registers. */
943 static void
944 prune_preferences (void)
946 int i;
947 int num;
948 int *allocno_to_order = xmalloc (max_allocno * sizeof (int));
950 /* Scan least most important to most important.
951 For each allocno, remove from preferences registers that cannot be used,
952 either because of conflicts or register type. Then compute all registers
953 preferred by each lower-priority register that conflicts. */
955 for (i = max_allocno - 1; i >= 0; i--)
957 HARD_REG_SET temp;
959 num = allocno_order[i];
960 allocno_to_order[num] = i;
961 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
963 if (allocno[num].calls_crossed == 0)
964 IOR_HARD_REG_SET (temp, fixed_reg_set);
965 else
966 IOR_HARD_REG_SET (temp, call_used_reg_set);
968 IOR_COMPL_HARD_REG_SET
969 (temp,
970 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
972 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
973 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
974 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
977 for (i = max_allocno - 1; i >= 0; i--)
979 /* Merge in the preferences of lower-priority registers (they have
980 already been pruned). If we also prefer some of those registers,
981 don't exclude them unless we are of a smaller size (in which case
982 we want to give the lower-priority allocno the first chance for
983 these registers). */
984 HARD_REG_SET temp, temp2;
985 int allocno2;
987 num = allocno_order[i];
989 CLEAR_HARD_REG_SET (temp);
990 CLEAR_HARD_REG_SET (temp2);
992 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
993 allocno2,
995 if (allocno_to_order[allocno2] > i)
997 if (allocno[allocno2].size <= allocno[num].size)
998 IOR_HARD_REG_SET (temp,
999 allocno[allocno2].hard_reg_full_preferences);
1000 else
1001 IOR_HARD_REG_SET (temp2,
1002 allocno[allocno2].hard_reg_full_preferences);
1006 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1007 IOR_HARD_REG_SET (temp, temp2);
1008 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1010 free (allocno_to_order);
1013 /* Assign a hard register to allocno NUM; look for one that is the beginning
1014 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1015 The registers marked in PREFREGS are tried first.
1017 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1018 be used for this allocation.
1020 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1021 Otherwise ignore that preferred class and use the alternate class.
1023 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1024 will have to be saved and restored at calls.
1026 RETRYING is nonzero if this is called from retry_global_alloc.
1028 If we find one, record it in reg_renumber.
1029 If not, do nothing. */
1031 static void
1032 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1034 int i, best_reg, pass;
1035 HARD_REG_SET used, used1, used2;
1037 enum reg_class class = (alt_regs_p
1038 ? reg_alternate_class (allocno[num].reg)
1039 : reg_preferred_class (allocno[num].reg));
1040 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1042 if (accept_call_clobbered)
1043 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1044 else if (allocno[num].calls_crossed == 0)
1045 COPY_HARD_REG_SET (used1, fixed_reg_set);
1046 else
1047 COPY_HARD_REG_SET (used1, call_used_reg_set);
1049 /* Some registers should not be allocated in global-alloc. */
1050 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1051 if (losers)
1052 IOR_HARD_REG_SET (used1, losers);
1054 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1055 COPY_HARD_REG_SET (used2, used1);
1057 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1059 #ifdef CANNOT_CHANGE_MODE_CLASS
1060 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1061 #endif
1063 /* Try each hard reg to see if it fits. Do this in two passes.
1064 In the first pass, skip registers that are preferred by some other pseudo
1065 to give it a better chance of getting one of those registers. Only if
1066 we can't get a register when excluding those do we take one of them.
1067 However, we never allocate a register for the first time in pass 0. */
1069 COPY_HARD_REG_SET (used, used1);
1070 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1071 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1073 best_reg = -1;
1074 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1075 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1076 pass++)
1078 if (pass == 1)
1079 COPY_HARD_REG_SET (used, used1);
1080 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1082 #ifdef REG_ALLOC_ORDER
1083 int regno = reg_alloc_order[i];
1084 #else
1085 int regno = i;
1086 #endif
1087 if (! TEST_HARD_REG_BIT (used, regno)
1088 && HARD_REGNO_MODE_OK (regno, mode)
1089 && (allocno[num].calls_crossed == 0
1090 || accept_call_clobbered
1091 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1093 int j;
1094 int lim = regno + hard_regno_nregs[regno][mode];
1095 for (j = regno + 1;
1096 (j < lim
1097 && ! TEST_HARD_REG_BIT (used, j));
1098 j++);
1099 if (j == lim)
1101 best_reg = regno;
1102 break;
1104 #ifndef REG_ALLOC_ORDER
1105 i = j; /* Skip starting points we know will lose */
1106 #endif
1111 /* See if there is a preferred register with the same class as the register
1112 we allocated above. Making this restriction prevents register
1113 preferencing from creating worse register allocation.
1115 Remove from the preferred registers and conflicting registers. Note that
1116 additional conflicts may have been added after `prune_preferences' was
1117 called.
1119 First do this for those register with copy preferences, then all
1120 preferred registers. */
1122 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1123 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
1124 reg_class_contents[(int) NO_REGS], no_copy_prefs);
1126 if (best_reg >= 0)
1128 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1129 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1130 && HARD_REGNO_MODE_OK (i, mode)
1131 && (allocno[num].calls_crossed == 0
1132 || accept_call_clobbered
1133 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1134 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1135 || reg_class_subset_p (REGNO_REG_CLASS (i),
1136 REGNO_REG_CLASS (best_reg))
1137 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1138 REGNO_REG_CLASS (i))))
1140 int j;
1141 int lim = i + hard_regno_nregs[i][mode];
1142 for (j = i + 1;
1143 (j < lim
1144 && ! TEST_HARD_REG_BIT (used, j)
1145 && (REGNO_REG_CLASS (j)
1146 == REGNO_REG_CLASS (best_reg + (j - i))
1147 || reg_class_subset_p (REGNO_REG_CLASS (j),
1148 REGNO_REG_CLASS (best_reg + (j - i)))
1149 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1150 REGNO_REG_CLASS (j))));
1151 j++);
1152 if (j == lim)
1154 best_reg = i;
1155 goto no_prefs;
1159 no_copy_prefs:
1161 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1162 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
1163 reg_class_contents[(int) NO_REGS], no_prefs);
1165 if (best_reg >= 0)
1167 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1168 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1169 && HARD_REGNO_MODE_OK (i, mode)
1170 && (allocno[num].calls_crossed == 0
1171 || accept_call_clobbered
1172 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1173 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1174 || reg_class_subset_p (REGNO_REG_CLASS (i),
1175 REGNO_REG_CLASS (best_reg))
1176 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1177 REGNO_REG_CLASS (i))))
1179 int j;
1180 int lim = i + hard_regno_nregs[i][mode];
1181 for (j = i + 1;
1182 (j < lim
1183 && ! TEST_HARD_REG_BIT (used, j)
1184 && (REGNO_REG_CLASS (j)
1185 == REGNO_REG_CLASS (best_reg + (j - i))
1186 || reg_class_subset_p (REGNO_REG_CLASS (j),
1187 REGNO_REG_CLASS (best_reg + (j - i)))
1188 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1189 REGNO_REG_CLASS (j))));
1190 j++);
1191 if (j == lim)
1193 best_reg = i;
1194 break;
1198 no_prefs:
1200 /* If we haven't succeeded yet, try with caller-saves.
1201 We need not check to see if the current function has nonlocal
1202 labels because we don't put any pseudos that are live over calls in
1203 registers in that case. */
1205 if (flag_caller_saves && best_reg < 0)
1207 /* Did not find a register. If it would be profitable to
1208 allocate a call-clobbered register and save and restore it
1209 around calls, do that. */
1210 if (! accept_call_clobbered
1211 && allocno[num].calls_crossed != 0
1212 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1213 allocno[num].calls_crossed))
1215 HARD_REG_SET new_losers;
1216 if (! losers)
1217 CLEAR_HARD_REG_SET (new_losers);
1218 else
1219 COPY_HARD_REG_SET (new_losers, losers);
1221 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1222 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1223 if (reg_renumber[allocno[num].reg] >= 0)
1225 caller_save_needed = 1;
1226 return;
1231 /* If we haven't succeeded yet,
1232 see if some hard reg that conflicts with us
1233 was utilized poorly by local-alloc.
1234 If so, kick out the regs that were put there by local-alloc
1235 so we can use it instead. */
1236 if (best_reg < 0 && !retrying
1237 /* Let's not bother with multi-reg allocnos. */
1238 && allocno[num].size == 1)
1240 /* Count from the end, to find the least-used ones first. */
1241 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1243 #ifdef REG_ALLOC_ORDER
1244 int regno = reg_alloc_order[i];
1245 #else
1246 int regno = i;
1247 #endif
1249 if (local_reg_n_refs[regno] != 0
1250 /* Don't use a reg no good for this pseudo. */
1251 && ! TEST_HARD_REG_BIT (used2, regno)
1252 && HARD_REGNO_MODE_OK (regno, mode)
1253 /* The code below assumes that we need only a single
1254 register, but the check of allocno[num].size above
1255 was not enough. Sometimes we need more than one
1256 register for a single-word value. */
1257 && hard_regno_nregs[regno][mode] == 1
1258 && (allocno[num].calls_crossed == 0
1259 || accept_call_clobbered
1260 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1261 #ifdef CANNOT_CHANGE_MODE_CLASS
1262 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1263 mode)
1264 #endif
1265 #ifdef STACK_REGS
1266 && (!allocno[num].no_stack_reg
1267 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1268 #endif
1271 /* We explicitly evaluate the divide results into temporary
1272 variables so as to avoid excess precision problems that occur
1273 on an i386-unknown-sysv4.2 (unixware) host. */
1275 double tmp1 = ((double) local_reg_freq[regno]
1276 / local_reg_live_length[regno]);
1277 double tmp2 = ((double) allocno[num].freq
1278 / allocno[num].live_length);
1280 if (tmp1 < tmp2)
1282 /* Hard reg REGNO was used less in total by local regs
1283 than it would be used by this one allocno! */
1284 int k;
1285 for (k = 0; k < max_regno; k++)
1286 if (reg_renumber[k] >= 0)
1288 int r = reg_renumber[k];
1289 int endregno
1290 = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
1292 if (regno >= r && regno < endregno)
1293 reg_renumber[k] = -1;
1296 best_reg = regno;
1297 break;
1303 /* Did we find a register? */
1305 if (best_reg >= 0)
1307 int lim, j;
1308 HARD_REG_SET this_reg;
1310 /* Yes. Record it as the hard register of this pseudo-reg. */
1311 reg_renumber[allocno[num].reg] = best_reg;
1312 /* Also of any pseudo-regs that share with it. */
1313 if (reg_may_share[allocno[num].reg])
1314 for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
1315 if (reg_allocno[j] == num)
1316 reg_renumber[j] = best_reg;
1318 /* Make a set of the hard regs being allocated. */
1319 CLEAR_HARD_REG_SET (this_reg);
1320 lim = best_reg + hard_regno_nregs[best_reg][mode];
1321 for (j = best_reg; j < lim; j++)
1323 SET_HARD_REG_BIT (this_reg, j);
1324 SET_HARD_REG_BIT (regs_used_so_far, j);
1325 /* This is no longer a reg used just by local regs. */
1326 local_reg_n_refs[j] = 0;
1327 local_reg_freq[j] = 0;
1329 /* For each other pseudo-reg conflicting with this one,
1330 mark it as conflicting with the hard regs this one occupies. */
1331 lim = num;
1332 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1334 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1339 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1340 Perhaps it had previously seemed not worth a hard reg,
1341 or perhaps its old hard reg has been commandeered for reloads.
1342 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1343 they do not appear to be allocated.
1344 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1346 void
1347 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1349 int alloc_no = reg_allocno[regno];
1350 if (alloc_no >= 0)
1352 /* If we have more than one register class,
1353 first try allocating in the class that is cheapest
1354 for this pseudo-reg. If that fails, try any reg. */
1355 if (N_REG_CLASSES > 1)
1356 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1357 if (reg_renumber[regno] < 0
1358 && reg_alternate_class (regno) != NO_REGS)
1359 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1361 /* If we found a register, modify the RTL for the register to
1362 show the hard register, and mark that register live. */
1363 if (reg_renumber[regno] >= 0)
1365 REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
1366 mark_home_live (regno);
1371 /* Record a conflict between register REGNO
1372 and everything currently live.
1373 REGNO must not be a pseudo reg that was allocated
1374 by local_alloc; such numbers must be translated through
1375 reg_renumber before calling here. */
1377 static void
1378 record_one_conflict (int regno)
1380 int j;
1382 if (regno < FIRST_PSEUDO_REGISTER)
1383 /* When a hard register becomes live,
1384 record conflicts with live pseudo regs. */
1385 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1387 SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1389 else
1390 /* When a pseudo-register becomes live,
1391 record conflicts first with hard regs,
1392 then with other pseudo regs. */
1394 int ialloc = reg_allocno[regno];
1395 int ialloc_prod = ialloc * allocno_row_words;
1397 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1398 for (j = allocno_row_words - 1; j >= 0; j--)
1399 conflicts[ialloc_prod + j] |= allocnos_live[j];
1403 /* Record all allocnos currently live as conflicting
1404 with all hard regs currently live.
1406 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1407 are currently live. Their bits are also flagged in allocnos_live. */
1409 static void
1410 record_conflicts (int *allocno_vec, int len)
1412 while (--len >= 0)
1413 IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1414 hard_regs_live);
1417 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1418 static void
1419 mirror_conflicts (void)
1421 int i, j;
1422 int rw = allocno_row_words;
1423 int rwb = rw * INT_BITS;
1424 INT_TYPE *p = conflicts;
1425 INT_TYPE *q0 = conflicts, *q1, *q2;
1426 unsigned INT_TYPE mask;
1428 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1430 if (! mask)
1432 mask = 1;
1433 q0++;
1435 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1437 unsigned INT_TYPE word;
1439 for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1440 word >>= 1, q2 += rw)
1442 if (word & 1)
1443 *q2 |= mask;
1449 /* Handle the case where REG is set by the insn being scanned,
1450 during the forward scan to accumulate conflicts.
1451 Store a 1 in regs_live or allocnos_live for this register, record how many
1452 consecutive hardware registers it actually needs,
1453 and record a conflict with all other registers already live.
1455 Note that even if REG does not remain alive after this insn,
1456 we must mark it here as live, to ensure a conflict between
1457 REG and any other regs set in this insn that really do live.
1458 This is because those other regs could be considered after this.
1460 REG might actually be something other than a register;
1461 if so, we do nothing.
1463 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1464 a REG_INC note was found for it). */
1466 static void
1467 mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
1469 int regno;
1471 if (GET_CODE (reg) == SUBREG)
1472 reg = SUBREG_REG (reg);
1474 if (!REG_P (reg))
1475 return;
1477 regs_set[n_regs_set++] = reg;
1479 if (setter && GET_CODE (setter) != CLOBBER)
1480 set_preference (reg, SET_SRC (setter));
1482 regno = REGNO (reg);
1484 /* Either this is one of the max_allocno pseudo regs not allocated,
1485 or it is or has a hardware reg. First handle the pseudo-regs. */
1486 if (regno >= FIRST_PSEUDO_REGISTER)
1488 if (reg_allocno[regno] >= 0)
1490 SET_ALLOCNO_LIVE (reg_allocno[regno]);
1491 record_one_conflict (regno);
1495 if (reg_renumber[regno] >= 0)
1496 regno = reg_renumber[regno];
1498 /* Handle hardware regs (and pseudos allocated to hard regs). */
1499 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1501 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1502 while (regno < last)
1504 record_one_conflict (regno);
1505 SET_HARD_REG_BIT (hard_regs_live, regno);
1506 regno++;
1511 /* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
1513 static void
1514 mark_reg_clobber (rtx reg, rtx setter, void *data)
1516 if (GET_CODE (setter) == CLOBBER)
1517 mark_reg_store (reg, setter, data);
1520 /* Record that REG has conflicts with all the regs currently live.
1521 Do not mark REG itself as live. */
1523 static void
1524 mark_reg_conflicts (rtx reg)
1526 int regno;
1528 if (GET_CODE (reg) == SUBREG)
1529 reg = SUBREG_REG (reg);
1531 if (!REG_P (reg))
1532 return;
1534 regno = REGNO (reg);
1536 /* Either this is one of the max_allocno pseudo regs not allocated,
1537 or it is or has a hardware reg. First handle the pseudo-regs. */
1538 if (regno >= FIRST_PSEUDO_REGISTER)
1540 if (reg_allocno[regno] >= 0)
1541 record_one_conflict (regno);
1544 if (reg_renumber[regno] >= 0)
1545 regno = reg_renumber[regno];
1547 /* Handle hardware regs (and pseudos allocated to hard regs). */
1548 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1550 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1551 while (regno < last)
1553 record_one_conflict (regno);
1554 regno++;
1559 /* Mark REG as being dead (following the insn being scanned now).
1560 Store a 0 in regs_live or allocnos_live for this register. */
1562 static void
1563 mark_reg_death (rtx reg)
1565 int regno = REGNO (reg);
1567 /* Either this is one of the max_allocno pseudo regs not allocated,
1568 or it is a hardware reg. First handle the pseudo-regs. */
1569 if (regno >= FIRST_PSEUDO_REGISTER)
1571 if (reg_allocno[regno] >= 0)
1572 CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1575 /* For pseudo reg, see if it has been assigned a hardware reg. */
1576 if (reg_renumber[regno] >= 0)
1577 regno = reg_renumber[regno];
1579 /* Handle hardware regs (and pseudos allocated to hard regs). */
1580 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1582 /* Pseudo regs already assigned hardware regs are treated
1583 almost the same as explicit hardware regs. */
1584 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1585 while (regno < last)
1587 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
1588 regno++;
1593 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1594 for the value stored in it. MODE determines how many consecutive
1595 registers are actually in use. Do not record conflicts;
1596 it is assumed that the caller will do that. */
1598 static void
1599 mark_reg_live_nc (int regno, enum machine_mode mode)
1601 int last = regno + hard_regno_nregs[regno][mode];
1602 while (regno < last)
1604 SET_HARD_REG_BIT (hard_regs_live, regno);
1605 regno++;
1609 /* Try to set a preference for an allocno to a hard register.
1610 We are passed DEST and SRC which are the operands of a SET. It is known
1611 that SRC is a register. If SRC or the first operand of SRC is a register,
1612 try to set a preference. If one of the two is a hard register and the other
1613 is a pseudo-register, mark the preference.
1615 Note that we are not as aggressive as local-alloc in trying to tie a
1616 pseudo-register to a hard register. */
1618 static void
1619 set_preference (rtx dest, rtx src)
1621 unsigned int src_regno, dest_regno;
1622 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1623 to compensate for subregs in SRC or DEST. */
1624 int offset = 0;
1625 unsigned int i;
1626 int copy = 1;
1628 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1629 src = XEXP (src, 0), copy = 0;
1631 /* Get the reg number for both SRC and DEST.
1632 If neither is a reg, give up. */
1634 if (REG_P (src))
1635 src_regno = REGNO (src);
1636 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1638 src_regno = REGNO (SUBREG_REG (src));
1640 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1641 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1642 GET_MODE (SUBREG_REG (src)),
1643 SUBREG_BYTE (src),
1644 GET_MODE (src));
1645 else
1646 offset += (SUBREG_BYTE (src)
1647 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1649 else
1650 return;
1652 if (REG_P (dest))
1653 dest_regno = REGNO (dest);
1654 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1656 dest_regno = REGNO (SUBREG_REG (dest));
1658 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1659 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1660 GET_MODE (SUBREG_REG (dest)),
1661 SUBREG_BYTE (dest),
1662 GET_MODE (dest));
1663 else
1664 offset -= (SUBREG_BYTE (dest)
1665 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1667 else
1668 return;
1670 /* Convert either or both to hard reg numbers. */
1672 if (reg_renumber[src_regno] >= 0)
1673 src_regno = reg_renumber[src_regno];
1675 if (reg_renumber[dest_regno] >= 0)
1676 dest_regno = reg_renumber[dest_regno];
1678 /* Now if one is a hard reg and the other is a global pseudo
1679 then give the other a preference. */
1681 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1682 && reg_allocno[src_regno] >= 0)
1684 dest_regno -= offset;
1685 if (dest_regno < FIRST_PSEUDO_REGISTER)
1687 if (copy)
1688 SET_REGBIT (hard_reg_copy_preferences,
1689 reg_allocno[src_regno], dest_regno);
1691 SET_REGBIT (hard_reg_preferences,
1692 reg_allocno[src_regno], dest_regno);
1693 for (i = dest_regno;
1694 i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
1695 i++)
1696 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1700 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1701 && reg_allocno[dest_regno] >= 0)
1703 src_regno += offset;
1704 if (src_regno < FIRST_PSEUDO_REGISTER)
1706 if (copy)
1707 SET_REGBIT (hard_reg_copy_preferences,
1708 reg_allocno[dest_regno], src_regno);
1710 SET_REGBIT (hard_reg_preferences,
1711 reg_allocno[dest_regno], src_regno);
1712 for (i = src_regno;
1713 i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
1714 i++)
1715 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1720 /* Indicate that hard register number FROM was eliminated and replaced with
1721 an offset from hard register number TO. The status of hard registers live
1722 at the start of a basic block is updated by replacing a use of FROM with
1723 a use of TO. */
1725 void
1726 mark_elimination (int from, int to)
1728 basic_block bb;
1730 FOR_EACH_BB (bb)
1732 regset r = bb->global_live_at_start;
1733 if (REGNO_REG_SET_P (r, from))
1735 CLEAR_REGNO_REG_SET (r, from);
1736 SET_REGNO_REG_SET (r, to);
1741 /* Used for communication between the following functions. Holds the
1742 current life information. */
1743 static regset live_relevant_regs;
1745 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1746 This is called via note_stores. */
1747 static void
1748 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1750 int regno;
1752 if (GET_CODE (reg) == SUBREG)
1753 reg = SUBREG_REG (reg);
1755 if (!REG_P (reg))
1756 return;
1758 regno = REGNO (reg);
1759 if (regno < FIRST_PSEUDO_REGISTER)
1761 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1762 while (nregs-- > 0)
1764 SET_REGNO_REG_SET (live_relevant_regs, regno);
1765 if (! fixed_regs[regno])
1766 SET_REGNO_REG_SET ((regset) regs_set, regno);
1767 regno++;
1770 else if (reg_renumber[regno] >= 0)
1772 SET_REGNO_REG_SET (live_relevant_regs, regno);
1773 SET_REGNO_REG_SET ((regset) regs_set, regno);
1777 /* Record in live_relevant_regs that register REGNO died. */
1778 static void
1779 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1781 if (regno < FIRST_PSEUDO_REGISTER)
1783 int nregs = hard_regno_nregs[regno][mode];
1784 while (nregs-- > 0)
1786 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1787 if (! fixed_regs[regno])
1788 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1789 regno++;
1792 else
1794 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1795 if (reg_renumber[regno] >= 0)
1796 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1800 /* Walk the insns of the current function and build reload_insn_chain,
1801 and record register life information. */
1802 void
1803 build_insn_chain (rtx first)
1805 struct insn_chain **p = &reload_insn_chain;
1806 struct insn_chain *prev = 0;
1807 basic_block b = ENTRY_BLOCK_PTR->next_bb;
1808 regset_head live_relevant_regs_head;
1810 live_relevant_regs = INITIALIZE_REG_SET (live_relevant_regs_head);
1812 for (; first; first = NEXT_INSN (first))
1814 struct insn_chain *c;
1816 if (first == BB_HEAD (b))
1818 int i;
1820 CLEAR_REG_SET (live_relevant_regs);
1822 EXECUTE_IF_SET_IN_BITMAP
1823 (b->global_live_at_start, 0, i,
1825 if (i < FIRST_PSEUDO_REGISTER
1826 ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1827 : reg_renumber[i] >= 0)
1828 SET_REGNO_REG_SET (live_relevant_regs, i);
1832 if (!NOTE_P (first) && !BARRIER_P (first))
1834 c = new_insn_chain ();
1835 c->prev = prev;
1836 prev = c;
1837 *p = c;
1838 p = &c->next;
1839 c->insn = first;
1840 c->block = b->index;
1842 if (INSN_P (first))
1844 rtx link;
1846 /* Mark the death of everything that dies in this instruction. */
1848 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1849 if (REG_NOTE_KIND (link) == REG_DEAD
1850 && REG_P (XEXP (link, 0)))
1851 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1854 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1856 /* Mark everything born in this instruction as live. */
1858 note_stores (PATTERN (first), reg_becomes_live,
1859 &c->dead_or_set);
1861 else
1862 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1864 if (INSN_P (first))
1866 rtx link;
1868 /* Mark anything that is set in this insn and then unused as dying. */
1870 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1871 if (REG_NOTE_KIND (link) == REG_UNUSED
1872 && REG_P (XEXP (link, 0)))
1873 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1878 if (first == BB_END (b))
1879 b = b->next_bb;
1881 /* Stop after we pass the end of the last basic block. Verify that
1882 no real insns are after the end of the last basic block.
1884 We may want to reorganize the loop somewhat since this test should
1885 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1886 the previous real insn is a JUMP_INSN. */
1887 if (b == EXIT_BLOCK_PTR)
1889 #ifdef ENABLE_CHECKING
1890 for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
1891 gcc_assert (!INSN_P (first)
1892 || GET_CODE (PATTERN (first)) == USE
1893 || ((GET_CODE (PATTERN (first)) == ADDR_VEC
1894 || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1895 && prev_real_insn (first) != 0
1896 && JUMP_P (prev_real_insn (first))));
1897 #endif
1898 break;
1901 FREE_REG_SET (live_relevant_regs);
1902 *p = 0;
1905 /* Print debugging trace information if -dg switch is given,
1906 showing the information on which the allocation decisions are based. */
1908 static void
1909 dump_conflicts (FILE *file)
1911 int i;
1912 int has_preferences;
1913 int nregs;
1914 nregs = 0;
1915 for (i = 0; i < max_allocno; i++)
1917 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1918 continue;
1919 nregs++;
1921 fprintf (file, ";; %d regs to allocate:", nregs);
1922 for (i = 0; i < max_allocno; i++)
1924 int j;
1925 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1926 continue;
1927 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1928 for (j = 0; j < max_regno; j++)
1929 if (reg_allocno[j] == allocno_order[i]
1930 && j != allocno[allocno_order[i]].reg)
1931 fprintf (file, "+%d", j);
1932 if (allocno[allocno_order[i]].size != 1)
1933 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1935 fprintf (file, "\n");
1937 for (i = 0; i < max_allocno; i++)
1939 int j;
1940 fprintf (file, ";; %d conflicts:", allocno[i].reg);
1941 for (j = 0; j < max_allocno; j++)
1942 if (CONFLICTP (j, i))
1943 fprintf (file, " %d", allocno[j].reg);
1944 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1945 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
1946 fprintf (file, " %d", j);
1947 fprintf (file, "\n");
1949 has_preferences = 0;
1950 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1951 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1952 has_preferences = 1;
1954 if (! has_preferences)
1955 continue;
1956 fprintf (file, ";; %d preferences:", allocno[i].reg);
1957 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1958 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1959 fprintf (file, " %d", j);
1960 fprintf (file, "\n");
1962 fprintf (file, "\n");
1965 void
1966 dump_global_regs (FILE *file)
1968 int i, j;
1970 fprintf (file, ";; Register dispositions:\n");
1971 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
1972 if (reg_renumber[i] >= 0)
1974 fprintf (file, "%d in %d ", i, reg_renumber[i]);
1975 if (++j % 6 == 0)
1976 fprintf (file, "\n");
1979 fprintf (file, "\n\n;; Hard regs used: ");
1980 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1981 if (regs_ever_live[i])
1982 fprintf (file, " %d", i);
1983 fprintf (file, "\n\n");
1988 /* This page contains code to make live information more accurate.
1989 The accurate register liveness at program point P means:
1990 o there is a path from P to usage of the register and the
1991 register is not redefined or killed on the path.
1992 o register at P is partially available, i.e. there is a path from
1993 a register definition to the point P and the register is not
1994 killed (clobbered) on the path
1996 The standard GCC live information means only the first condition.
1997 Without the partial availability, there will be more register
1998 conflicts and as a consequence worse register allocation. The
1999 typical example where the information can be different is a
2000 register initialized in the loop at the basic block preceding the
2001 loop in CFG. */
2003 /* The following structure contains basic block data flow information
2004 used to calculate partial availability of registers. */
2006 struct bb_info
2008 /* The basic block reverse post-order number. */
2009 int rts_number;
2010 /* Registers used uninitialized in an insn in which there is an
2011 early clobbered register might get the same hard register. */
2012 bitmap earlyclobber;
2013 /* Registers correspondingly killed (clobbered) and defined but not
2014 killed afterward in the basic block. */
2015 bitmap killed, avloc;
2016 /* Registers partially available correspondingly at the start and
2017 end of the basic block. */
2018 bitmap pavin, pavout;
2021 /* Macros for accessing data flow information of basic blocks. */
2023 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2024 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2026 /* The function allocates the info structures of each basic block. It
2027 also initialized PAVIN and PAVOUT as if all hard registers were
2028 partially available. */
2030 static void
2031 allocate_bb_info (void)
2033 int i;
2034 basic_block bb;
2035 struct bb_info *bb_info;
2036 bitmap init;
2038 alloc_aux_for_blocks (sizeof (struct bb_info));
2039 init = BITMAP_XMALLOC ();
2040 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2041 bitmap_set_bit (init, i);
2042 FOR_EACH_BB (bb)
2044 bb_info = bb->aux;
2045 bb_info->earlyclobber = BITMAP_XMALLOC ();
2046 bb_info->avloc = BITMAP_XMALLOC ();
2047 bb_info->killed = BITMAP_XMALLOC ();
2048 bb_info->pavin = BITMAP_XMALLOC ();
2049 bb_info->pavout = BITMAP_XMALLOC ();
2050 bitmap_copy (bb_info->pavin, init);
2051 bitmap_copy (bb_info->pavout, init);
2053 BITMAP_XFREE (init);
2056 /* The function frees the allocated info of all basic blocks. */
2058 static void
2059 free_bb_info (void)
2061 basic_block bb;
2062 struct bb_info *bb_info;
2064 FOR_EACH_BB (bb)
2066 bb_info = BB_INFO (bb);
2067 BITMAP_XFREE (bb_info->pavout);
2068 BITMAP_XFREE (bb_info->pavin);
2069 BITMAP_XFREE (bb_info->killed);
2070 BITMAP_XFREE (bb_info->avloc);
2071 BITMAP_XFREE (bb_info->earlyclobber);
2073 free_aux_for_blocks ();
2076 /* The function modifies local info for register REG being changed in
2077 SETTER. DATA is used to pass the current basic block info. */
2079 static void
2080 mark_reg_change (rtx reg, rtx setter, void *data)
2082 int regno;
2083 basic_block bb = data;
2084 struct bb_info *bb_info = BB_INFO (bb);
2086 if (GET_CODE (reg) == SUBREG)
2087 reg = SUBREG_REG (reg);
2089 if (!REG_P (reg))
2090 return;
2092 regno = REGNO (reg);
2093 bitmap_set_bit (bb_info->killed, regno);
2095 if (GET_CODE (setter) != CLOBBER)
2096 bitmap_set_bit (bb_info->avloc, regno);
2097 else
2098 bitmap_clear_bit (bb_info->avloc, regno);
2101 /* Classes of registers which could be early clobbered in the current
2102 insn. */
2104 static varray_type earlyclobber_regclass;
2106 /* The function stores classes of registers which could be early
2107 clobbered in INSN. */
2109 static void
2110 check_earlyclobber (rtx insn)
2112 int opno;
2114 extract_insn (insn);
2116 VARRAY_POP_ALL (earlyclobber_regclass);
2117 for (opno = 0; opno < recog_data.n_operands; opno++)
2119 char c;
2120 bool amp_p;
2121 int i;
2122 enum reg_class class;
2123 const char *p = recog_data.constraints[opno];
2125 class = NO_REGS;
2126 amp_p = false;
2127 for (;;)
2129 c = *p;
2130 switch (c)
2132 case '=': case '+': case '?':
2133 case '#': case '!':
2134 case '*': case '%':
2135 case 'm': case '<': case '>': case 'V': case 'o':
2136 case 'E': case 'F': case 'G': case 'H':
2137 case 's': case 'i': case 'n':
2138 case 'I': case 'J': case 'K': case 'L':
2139 case 'M': case 'N': case 'O': case 'P':
2140 case 'X':
2141 case '0': case '1': case '2': case '3': case '4':
2142 case '5': case '6': case '7': case '8': case '9':
2143 /* These don't say anything we care about. */
2144 break;
2146 case '&':
2147 amp_p = true;
2148 break;
2149 case '\0':
2150 case ',':
2151 if (amp_p && class != NO_REGS)
2153 for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1;
2154 i >= 0; i--)
2155 if (VARRAY_INT (earlyclobber_regclass, i) == (int) class)
2156 break;
2157 if (i < 0)
2158 VARRAY_PUSH_INT (earlyclobber_regclass, (int) class);
2161 amp_p = false;
2162 class = NO_REGS;
2163 break;
2165 case 'r':
2166 class = GENERAL_REGS;
2167 break;
2169 default:
2170 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2171 break;
2173 if (c == '\0')
2174 break;
2175 p += CONSTRAINT_LEN (c, p);
2180 /* The function returns true if register classes C1 and C2 intersect. */
2182 static bool
2183 regclass_intersect (enum reg_class c1, enum reg_class c2)
2185 HARD_REG_SET rs, zero;
2187 CLEAR_HARD_REG_SET (zero);
2188 COPY_HARD_REG_SET(rs, reg_class_contents [c1]);
2189 AND_HARD_REG_SET (rs, reg_class_contents [c2]);
2190 GO_IF_HARD_REG_EQUAL (zero, rs, yes);
2191 return true;
2192 yes:
2193 return false;
2196 /* The function checks that pseudo-register *X has a class
2197 intersecting with the class of pseudo-register could be early
2198 clobbered in the same insn. */
2200 static int
2201 mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
2203 enum reg_class pref_class, alt_class;
2204 int i, regno;
2205 basic_block bb = data;
2206 struct bb_info *bb_info = BB_INFO (bb);
2208 if (GET_CODE (*x) == REG && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2210 regno = REGNO (*x);
2211 if (bitmap_bit_p (bb_info->killed, regno)
2212 || bitmap_bit_p (bb_info->avloc, regno))
2213 return 0;
2214 pref_class = reg_preferred_class (regno);
2215 alt_class = reg_alternate_class (regno);
2216 for (i = VARRAY_ACTIVE_SIZE (earlyclobber_regclass) - 1; i >= 0; i--)
2217 if (regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
2218 pref_class)
2219 || (VARRAY_INT (earlyclobber_regclass, i) != NO_REGS
2220 && regclass_intersect (VARRAY_INT (earlyclobber_regclass, i),
2221 alt_class)))
2223 bitmap_set_bit (bb_info->earlyclobber, regno);
2224 break;
2227 return 0;
2230 /* The function processes all pseudo-registers in *X with the aid of
2231 previous function. */
2233 static void
2234 mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2236 for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
2239 /* The function calculates local info for each basic block. */
2241 static void
2242 calculate_local_reg_bb_info (void)
2244 basic_block bb;
2245 rtx insn, bound;
2247 VARRAY_INT_INIT (earlyclobber_regclass, 20,
2248 "classes of registers early clobbered in an insn");
2249 FOR_EACH_BB (bb)
2251 bound = NEXT_INSN (BB_END (bb));
2252 for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
2253 if (INSN_P (insn))
2255 note_stores (PATTERN (insn), mark_reg_change, bb);
2256 check_earlyclobber (insn);
2257 note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
2262 /* The function sets up reverse post-order number of each basic
2263 block. */
2265 static void
2266 set_up_bb_rts_numbers (void)
2268 int i;
2269 int *rts_order;
2271 rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2272 flow_reverse_top_sort_order_compute (rts_order);
2273 for (i = 0; i < n_basic_blocks; i++)
2274 BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
2275 free (rts_order);
2278 /* Compare function for sorting blocks in reverse postorder. */
2280 static int
2281 rpost_cmp (const void *bb1, const void *bb2)
2283 basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
2285 return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
2288 /* The function calculates partial availability of registers. The
2289 function calculates partial availability at the end of basic block
2290 BB by propagating partial availability at end of predecessor basic
2291 block PRED. The function returns true if the partial availability
2292 at the end of BB has been changed or if CHANGED_P. We have the
2293 following equations:
2295 bb.pavin = empty for entry block | union (pavout of predecessors)
2296 bb.pavout = union (bb.pavin - b.killed, bb.avloc) */
2298 static bool
2299 modify_bb_reg_pav (basic_block bb, basic_block pred, bool changed_p)
2301 struct bb_info *bb_info;
2302 bitmap bb_pavin, bb_pavout;
2304 bb_info = BB_INFO (bb);
2305 bb_pavin = bb_info->pavin;
2306 bb_pavout = bb_info->pavout;
2307 if (pred->index != ENTRY_BLOCK)
2308 bitmap_a_or_b (bb_pavin, bb_pavin, BB_INFO (pred)->pavout);
2309 changed_p |= bitmap_union_of_diff (bb_pavout, bb_info->avloc,
2310 bb_pavin, bb_info->killed);
2311 return changed_p;
2314 /* The function calculates partial register availability. */
2316 static void
2317 calculate_reg_pav (void)
2319 basic_block bb, succ;
2320 edge e;
2321 bool changed_p;
2322 int i, nel;
2323 varray_type bbs, new_bbs, temp;
2324 basic_block *bb_array;
2325 sbitmap wset;
2327 VARRAY_BB_INIT (bbs, n_basic_blocks, "basic blocks");
2328 VARRAY_BB_INIT (new_bbs, n_basic_blocks, "basic blocks for the next iter.");
2329 FOR_EACH_BB (bb)
2331 VARRAY_PUSH_BB (bbs, bb);
2333 wset = sbitmap_alloc (n_basic_blocks + 1);
2334 while (VARRAY_ACTIVE_SIZE (bbs))
2336 bb_array = &VARRAY_BB (bbs, 0);
2337 nel = VARRAY_ACTIVE_SIZE (bbs);
2338 qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
2339 sbitmap_zero (wset);
2340 for (i = 0; i < nel; i++)
2342 bb = bb_array [i];
2343 changed_p = 0;
2344 for (e = bb->pred; e; e = e->pred_next)
2345 changed_p = modify_bb_reg_pav (bb, e->src, changed_p);
2346 if (changed_p)
2347 for (e = bb->succ; e; e = e->succ_next)
2349 succ = e->dest;
2350 if (succ->index != EXIT_BLOCK && !TEST_BIT (wset, succ->index))
2352 SET_BIT (wset, succ->index);
2353 VARRAY_PUSH_BB (new_bbs, succ);
2357 temp = bbs;
2358 bbs = new_bbs;
2359 new_bbs = temp;
2360 VARRAY_POP_ALL (new_bbs);
2362 sbitmap_free (wset);
2365 /* The function modifies partial availability information for two
2366 special cases to prevent incorrect work of the subsequent passes
2367 with the accurate live information based on the partial
2368 availability. */
2370 static void
2371 modify_reg_pav (void)
2373 basic_block bb;
2374 struct bb_info *bb_info;
2375 #ifdef STACK_REGS
2376 int i;
2377 HARD_REG_SET zero, stack_hard_regs, used;
2378 bitmap stack_regs;
2380 CLEAR_HARD_REG_SET (zero);
2381 CLEAR_HARD_REG_SET (stack_hard_regs);
2382 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2383 SET_HARD_REG_BIT(stack_hard_regs, i);
2384 stack_regs = BITMAP_XMALLOC ();
2385 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2387 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2388 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2389 AND_HARD_REG_SET (used, stack_hard_regs);
2390 GO_IF_HARD_REG_EQUAL(used, zero, skip);
2391 bitmap_set_bit (stack_regs, i);
2392 skip:
2395 #endif
2396 FOR_EACH_BB (bb)
2398 bb_info = BB_INFO (bb);
2400 /* Reload can assign the same hard register to uninitialized
2401 pseudo-register and early clobbered pseudo-register in an
2402 insn if the pseudo-register is used first time in given BB
2403 and not lived at the BB start. To prevent this we don't
2404 change life information for such pseudo-registers. */
2405 bitmap_a_or_b (bb_info->pavin, bb_info->pavin, bb_info->earlyclobber);
2406 #ifdef STACK_REGS
2407 /* We can not use the same stack register for uninitialized
2408 pseudo-register and another living pseudo-register because if the
2409 uninitialized pseudo-register dies, subsequent pass reg-stack
2410 will be confused (it will believe that the other register
2411 dies). */
2412 bitmap_a_or_b (bb_info->pavin, bb_info->pavin, stack_regs);
2413 #endif
2415 #ifdef STACK_REGS
2416 BITMAP_XFREE (stack_regs);
2417 #endif
2420 /* The following function makes live information more accurate by
2421 modifying global_live_at_start and global_live_at_end of basic
2422 blocks. After the function call a register lives at a program
2423 point only if it is initialized on a path from CFG entry to the
2424 program point. The standard GCC life analysis permits registers to
2425 live uninitialized. */
2427 static void
2428 make_accurate_live_analysis (void)
2430 basic_block bb;
2431 struct bb_info *bb_info;
2433 max_regno = max_reg_num ();
2434 compact_blocks ();
2435 allocate_bb_info ();
2436 calculate_local_reg_bb_info ();
2437 set_up_bb_rts_numbers ();
2438 calculate_reg_pav ();
2439 modify_reg_pav ();
2440 FOR_EACH_BB (bb)
2442 bb_info = BB_INFO (bb);
2444 bitmap_a_and_b (bb->global_live_at_start, bb->global_live_at_start,
2445 bb_info->pavin);
2446 bitmap_a_and_b (bb->global_live_at_end, bb->global_live_at_end,
2447 bb_info->pavout);
2449 free_bb_info ();