* config/i386/x-cygwin (host-cygwin): Change dependency from
[official-gcc.git] / gcc / global.c
blobf82cd08017a15b8465778ef34133a5f75318400b
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "tree-pass.h"
40 #include "timevar.h"
42 /* This pass of the compiler performs global register allocation.
43 It assigns hard register numbers to all the pseudo registers
44 that were not handled in local_alloc. Assignments are recorded
45 in the vector reg_renumber, not by changing the rtl code.
46 (Such changes are made by final). The entry point is
47 the function global_alloc.
49 After allocation is complete, the reload pass is run as a subroutine
50 of this pass, so that when a pseudo reg loses its hard reg due to
51 spilling it is possible to make a second attempt to find a hard
52 reg for it. The reload pass is independent in other respects
53 and it is run even when stupid register allocation is in use.
55 1. Assign allocation-numbers (allocnos) to the pseudo-registers
56 still needing allocations and to the pseudo-registers currently
57 allocated by local-alloc which may be spilled by reload.
58 Set up tables reg_allocno and allocno_reg to map
59 reg numbers to allocnos and vice versa.
60 max_allocno gets the number of allocnos in use.
62 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
63 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
64 for conflicts between allocnos and explicit hard register use
65 (which includes use of pseudo-registers allocated by local_alloc).
67 3. For each basic block
68 walk forward through the block, recording which
69 pseudo-registers and which hardware registers are live.
70 Build the conflict matrix between the pseudo-registers
71 and another of pseudo-registers versus hardware registers.
72 Also record the preferred hardware registers
73 for each pseudo-register.
75 4. Sort a table of the allocnos into order of
76 desirability of the variables.
78 5. Allocate the variables in that order; each if possible into
79 a preferred register, else into another register. */
81 /* Number of pseudo-registers which are candidates for allocation. */
83 static int max_allocno;
85 /* Indexed by (pseudo) reg number, gives the allocno, or -1
86 for pseudo registers which are not to be allocated. */
88 static int *reg_allocno;
90 struct allocno
92 int reg;
93 /* Gives the number of consecutive hard registers needed by that
94 pseudo reg. */
95 int size;
97 /* Number of calls crossed by each allocno. */
98 int calls_crossed;
100 /* Number of refs to each allocno. */
101 int n_refs;
103 /* Frequency of uses of each allocno. */
104 int freq;
106 /* Guess at live length of each allocno.
107 This is actually the max of the live lengths of the regs. */
108 int live_length;
110 /* Set of hard regs conflicting with allocno N. */
112 HARD_REG_SET hard_reg_conflicts;
114 /* Set of hard regs preferred by allocno N.
115 This is used to make allocnos go into regs that are copied to or from them,
116 when possible, to reduce register shuffling. */
118 HARD_REG_SET hard_reg_preferences;
120 /* Similar, but just counts register preferences made in simple copy
121 operations, rather than arithmetic. These are given priority because
122 we can always eliminate an insn by using these, but using a register
123 in the above list won't always eliminate an insn. */
125 HARD_REG_SET hard_reg_copy_preferences;
127 /* Similar to hard_reg_preferences, but includes bits for subsequent
128 registers when an allocno is multi-word. The above variable is used for
129 allocation while this is used to build reg_someone_prefers, below. */
131 HARD_REG_SET hard_reg_full_preferences;
133 /* Set of hard registers that some later allocno has a preference for. */
135 HARD_REG_SET regs_someone_prefers;
137 #ifdef STACK_REGS
138 /* Set to true if allocno can't be allocated in the stack register. */
139 bool no_stack_reg;
140 #endif
143 static struct allocno *allocno;
145 /* A vector of the integers from 0 to max_allocno-1,
146 sorted in the order of first-to-be-allocated first. */
148 static int *allocno_order;
150 /* Indexed by (pseudo) reg number, gives the number of another
151 lower-numbered pseudo reg which can share a hard reg with this pseudo
152 *even if the two pseudos would otherwise appear to conflict*. */
154 static int *reg_may_share;
156 /* Define the number of bits in each element of `conflicts' and what
157 type that element has. We use the largest integer format on the
158 host machine. */
160 #define INT_BITS HOST_BITS_PER_WIDE_INT
161 #define INT_TYPE HOST_WIDE_INT
163 /* max_allocno by max_allocno array of bits,
164 recording whether two allocno's conflict (can't go in the same
165 hardware register).
167 `conflicts' is symmetric after the call to mirror_conflicts. */
169 static INT_TYPE *conflicts;
171 /* Number of ints require to hold max_allocno bits.
172 This is the length of a row in `conflicts'. */
174 static int allocno_row_words;
176 /* Two macros to test or store 1 in an element of `conflicts'. */
178 #define CONFLICTP(I, J) \
179 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
180 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
182 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
183 and execute CODE. */
184 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
185 do { \
186 int i_; \
187 int allocno_; \
188 INT_TYPE *p_ = (ALLOCNO_SET); \
190 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
191 i_--, allocno_ += INT_BITS) \
193 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
195 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
197 if (word_ & 1) \
198 {CODE;} \
201 } while (0)
203 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
204 #if 0
205 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
206 the conflicting allocno, and execute CODE. This macro assumes that
207 mirror_conflicts has been run. */
208 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
209 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
210 OUT_ALLOCNO, (CODE))
211 #endif
213 /* Set of hard regs currently live (during scan of all insns). */
215 static HARD_REG_SET hard_regs_live;
217 /* Set of registers that global-alloc isn't supposed to use. */
219 static HARD_REG_SET no_global_alloc_regs;
221 /* Set of registers used so far. */
223 static HARD_REG_SET regs_used_so_far;
225 /* Number of refs to each hard reg, as used by local alloc.
226 It is zero for a reg that contains global pseudos or is explicitly used. */
228 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
230 /* Frequency of uses of given hard reg. */
231 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
233 /* Guess at live length of each hard reg, as used by local alloc.
234 This is actually the sum of the live lengths of the specific regs. */
236 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
238 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
239 element I, and hard register number J. */
241 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
243 /* Bit mask for allocnos live at current point in the scan. */
245 static INT_TYPE *allocnos_live;
247 /* Test, set or clear bit number I in allocnos_live,
248 a bit vector indexed by allocno. */
250 #define SET_ALLOCNO_LIVE(I) \
251 (allocnos_live[(unsigned) (I) / INT_BITS] \
252 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
254 #define CLEAR_ALLOCNO_LIVE(I) \
255 (allocnos_live[(unsigned) (I) / INT_BITS] \
256 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
258 /* This is turned off because it doesn't work right for DImode.
259 (And it is only used for DImode, so the other cases are worthless.)
260 The problem is that it isn't true that there is NO possibility of conflict;
261 only that there is no conflict if the two pseudos get the exact same regs.
262 If they were allocated with a partial overlap, there would be a conflict.
263 We can't safely turn off the conflict unless we have another way to
264 prevent the partial overlap.
266 Idea: change hard_reg_conflicts so that instead of recording which
267 hard regs the allocno may not overlap, it records where the allocno
268 may not start. Change both where it is used and where it is updated.
269 Then there is a way to record that (reg:DI 108) may start at 10
270 but not at 9 or 11. There is still the question of how to record
271 this semi-conflict between two pseudos. */
272 #if 0
273 /* Reg pairs for which conflict after the current insn
274 is inhibited by a REG_NO_CONFLICT note.
275 If the table gets full, we ignore any other notes--that is conservative. */
276 #define NUM_NO_CONFLICT_PAIRS 4
277 /* Number of pairs in use in this insn. */
278 int n_no_conflict_pairs;
279 static struct { int allocno1, allocno2;}
280 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
281 #endif /* 0 */
283 /* Record all regs that are set in any one insn.
284 Communication from mark_reg_{store,clobber} and global_conflicts. */
286 static rtx *regs_set;
287 static int n_regs_set;
289 /* All registers that can be eliminated. */
291 static HARD_REG_SET eliminable_regset;
293 static int allocno_compare (const void *, const void *);
294 static void global_conflicts (void);
295 static void mirror_conflicts (void);
296 static void expand_preferences (void);
297 static void prune_preferences (void);
298 static void find_reg (int, HARD_REG_SET, int, int, int);
299 static void record_one_conflict (int);
300 static void record_conflicts (int *, int);
301 static void mark_reg_store (rtx, rtx, void *);
302 static void mark_reg_clobber (rtx, rtx, void *);
303 static void mark_reg_conflicts (rtx);
304 static void mark_reg_death (rtx);
305 static void mark_reg_live_nc (int, enum machine_mode);
306 static void set_preference (rtx, rtx);
307 static void dump_conflicts (FILE *);
308 static void reg_becomes_live (rtx, rtx, void *);
309 static void reg_dies (int, enum machine_mode, struct insn_chain *);
311 static void allocate_bb_info (void);
312 static void free_bb_info (void);
313 static bool check_earlyclobber (rtx);
314 static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
315 static int mark_reg_use_for_earlyclobber (rtx *, void *);
316 static void calculate_local_reg_bb_info (void);
317 static void set_up_bb_rts_numbers (void);
318 static int rpost_cmp (const void *, const void *);
319 static void calculate_reg_pav (void);
320 static void modify_reg_pav (void);
321 static void make_accurate_live_analysis (void);
325 /* Perform allocation of pseudo-registers not allocated by local_alloc.
326 FILE is a file to output debugging information on,
327 or zero if such output is not desired.
329 Return value is nonzero if reload failed
330 and we must not do any more for this function. */
333 global_alloc (FILE *file)
335 int retval;
336 #ifdef ELIMINABLE_REGS
337 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
338 #endif
339 int need_fp
340 = (! flag_omit_frame_pointer
341 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
342 || FRAME_POINTER_REQUIRED);
344 size_t i;
345 rtx x;
347 make_accurate_live_analysis ();
349 max_allocno = 0;
351 /* A machine may have certain hard registers that
352 are safe to use only within a basic block. */
354 CLEAR_HARD_REG_SET (no_global_alloc_regs);
356 /* Build the regset of all eliminable registers and show we can't use those
357 that we already know won't be eliminated. */
358 #ifdef ELIMINABLE_REGS
359 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
361 bool cannot_elim
362 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
363 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
365 if (!regs_asm_clobbered[eliminables[i].from])
367 SET_HARD_REG_BIT (eliminable_regset, eliminables[i].from);
369 if (cannot_elim)
370 SET_HARD_REG_BIT (no_global_alloc_regs, eliminables[i].from);
372 else if (cannot_elim)
373 error ("%s cannot be used in asm here",
374 reg_names[eliminables[i].from]);
375 else
376 regs_ever_live[eliminables[i].from] = 1;
378 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
379 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
381 SET_HARD_REG_BIT (eliminable_regset, HARD_FRAME_POINTER_REGNUM);
382 if (need_fp)
383 SET_HARD_REG_BIT (no_global_alloc_regs, HARD_FRAME_POINTER_REGNUM);
385 else if (need_fp)
386 error ("%s cannot be used in asm here",
387 reg_names[HARD_FRAME_POINTER_REGNUM]);
388 else
389 regs_ever_live[HARD_FRAME_POINTER_REGNUM] = 1;
390 #endif
392 #else
393 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
395 SET_HARD_REG_BIT (eliminable_regset, FRAME_POINTER_REGNUM);
396 if (need_fp)
397 SET_HARD_REG_BIT (no_global_alloc_regs, FRAME_POINTER_REGNUM);
399 else if (need_fp)
400 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
401 else
402 regs_ever_live[FRAME_POINTER_REGNUM] = 1;
403 #endif
405 /* Track which registers have already been used. Start with registers
406 explicitly in the rtl, then registers allocated by local register
407 allocation. */
409 CLEAR_HARD_REG_SET (regs_used_so_far);
410 #ifdef LEAF_REGISTERS
411 /* If we are doing the leaf function optimization, and this is a leaf
412 function, it means that the registers that take work to save are those
413 that need a register window. So prefer the ones that can be used in
414 a leaf function. */
416 const char *cheap_regs;
417 const char *const leaf_regs = LEAF_REGISTERS;
419 if (only_leaf_regs_used () && leaf_function_p ())
420 cheap_regs = leaf_regs;
421 else
422 cheap_regs = call_used_regs;
423 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
424 if (regs_ever_live[i] || cheap_regs[i])
425 SET_HARD_REG_BIT (regs_used_so_far, i);
427 #else
428 /* We consider registers that do not have to be saved over calls as if
429 they were already used since there is no cost in using them. */
430 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
431 if (regs_ever_live[i] || call_used_regs[i])
432 SET_HARD_REG_BIT (regs_used_so_far, i);
433 #endif
435 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
436 if (reg_renumber[i] >= 0)
437 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
439 /* Establish mappings from register number to allocation number
440 and vice versa. In the process, count the allocnos. */
442 reg_allocno = xmalloc (max_regno * sizeof (int));
444 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
445 reg_allocno[i] = -1;
447 /* Initialize the shared-hard-reg mapping
448 from the list of pairs that may share. */
449 reg_may_share = xcalloc (max_regno, sizeof (int));
450 for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
452 int r1 = REGNO (XEXP (x, 0));
453 int r2 = REGNO (XEXP (XEXP (x, 1), 0));
454 if (r1 > r2)
455 reg_may_share[r1] = r2;
456 else
457 reg_may_share[r2] = r1;
460 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
461 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
462 that we are supposed to refrain from putting in a hard reg.
463 -2 means do make an allocno but don't allocate it. */
464 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
465 /* Don't allocate pseudos that cross calls,
466 if this function receives a nonlocal goto. */
467 && (! current_function_has_nonlocal_label
468 || REG_N_CALLS_CROSSED (i) == 0)
469 /* Don't allocate pseudos that cross calls that may throw. */
470 && REG_N_THROWING_CALLS_CROSSED (i) == 0)
472 if (reg_renumber[i] < 0
473 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
474 reg_allocno[i] = reg_allocno[reg_may_share[i]];
475 else
476 reg_allocno[i] = max_allocno++;
477 gcc_assert (REG_LIVE_LENGTH (i));
479 else
480 reg_allocno[i] = -1;
482 allocno = xcalloc (max_allocno, sizeof (struct allocno));
484 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
485 if (reg_allocno[i] >= 0)
487 int num = reg_allocno[i];
488 allocno[num].reg = i;
489 allocno[num].size = PSEUDO_REGNO_SIZE (i);
490 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
491 allocno[num].n_refs += REG_N_REFS (i);
492 allocno[num].freq += REG_FREQ (i);
493 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
494 allocno[num].live_length = REG_LIVE_LENGTH (i);
497 /* Calculate amount of usage of each hard reg by pseudos
498 allocated by local-alloc. This is to see if we want to
499 override it. */
500 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
501 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
502 memset (local_reg_freq, 0, sizeof local_reg_freq);
503 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
504 if (reg_renumber[i] >= 0)
506 int regno = reg_renumber[i];
507 int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
508 int j;
510 for (j = regno; j < endregno; j++)
512 local_reg_n_refs[j] += REG_N_REFS (i);
513 local_reg_freq[j] += REG_FREQ (i);
514 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
518 /* We can't override local-alloc for a reg used not just by local-alloc. */
519 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
520 if (regs_ever_live[i])
521 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
523 allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
525 /* We used to use alloca here, but the size of what it would try to
526 allocate would occasionally cause it to exceed the stack limit and
527 cause unpredictable core dumps. Some examples were > 2Mb in size. */
528 conflicts = xcalloc (max_allocno * allocno_row_words, sizeof (INT_TYPE));
530 allocnos_live = xmalloc (allocno_row_words * sizeof (INT_TYPE));
532 /* If there is work to be done (at least one reg to allocate),
533 perform global conflict analysis and allocate the regs. */
535 if (max_allocno > 0)
537 /* Scan all the insns and compute the conflicts among allocnos
538 and between allocnos and hard regs. */
540 global_conflicts ();
542 mirror_conflicts ();
544 /* Eliminate conflicts between pseudos and eliminable registers. If
545 the register is not eliminated, the pseudo won't really be able to
546 live in the eliminable register, so the conflict doesn't matter.
547 If we do eliminate the register, the conflict will no longer exist.
548 So in either case, we can ignore the conflict. Likewise for
549 preferences. */
551 for (i = 0; i < (size_t) max_allocno; i++)
553 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
554 eliminable_regset);
555 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
556 eliminable_regset);
557 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
558 eliminable_regset);
561 /* Try to expand the preferences by merging them between allocnos. */
563 expand_preferences ();
565 /* Determine the order to allocate the remaining pseudo registers. */
567 allocno_order = xmalloc (max_allocno * sizeof (int));
568 for (i = 0; i < (size_t) max_allocno; i++)
569 allocno_order[i] = i;
571 /* Default the size to 1, since allocno_compare uses it to divide by.
572 Also convert allocno_live_length of zero to -1. A length of zero
573 can occur when all the registers for that allocno have reg_live_length
574 equal to -2. In this case, we want to make an allocno, but not
575 allocate it. So avoid the divide-by-zero and set it to a low
576 priority. */
578 for (i = 0; i < (size_t) max_allocno; i++)
580 if (allocno[i].size == 0)
581 allocno[i].size = 1;
582 if (allocno[i].live_length == 0)
583 allocno[i].live_length = -1;
586 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
588 prune_preferences ();
590 if (file)
591 dump_conflicts (file);
593 /* Try allocating them, one by one, in that order,
594 except for parameters marked with reg_live_length[regno] == -2. */
596 for (i = 0; i < (size_t) max_allocno; i++)
597 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
598 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
600 /* If we have more than one register class,
601 first try allocating in the class that is cheapest
602 for this pseudo-reg. If that fails, try any reg. */
603 if (N_REG_CLASSES > 1)
605 find_reg (allocno_order[i], 0, 0, 0, 0);
606 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
607 continue;
609 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
610 find_reg (allocno_order[i], 0, 1, 0, 0);
613 free (allocno_order);
616 /* Do the reloads now while the allocno data still exist, so that we can
617 try to assign new hard regs to any pseudo regs that are spilled. */
619 #if 0 /* We need to eliminate regs even if there is no rtl code,
620 for the sake of debugging information. */
621 if (n_basic_blocks > 0)
622 #endif
624 build_insn_chain (get_insns ());
625 retval = reload (get_insns (), 1);
628 /* Clean up. */
629 free (reg_allocno);
630 free (reg_may_share);
631 free (allocno);
632 free (conflicts);
633 free (allocnos_live);
635 return retval;
638 /* Sort predicate for ordering the allocnos.
639 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
641 static int
642 allocno_compare (const void *v1p, const void *v2p)
644 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
645 /* Note that the quotient will never be bigger than
646 the value of floor_log2 times the maximum number of
647 times a register can occur in one insn (surely less than 100)
648 weighted by the frequency (maximally REG_FREQ_MAX).
649 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
650 int pri1
651 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
652 / allocno[v1].live_length)
653 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
654 int pri2
655 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
656 / allocno[v2].live_length)
657 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
658 if (pri2 - pri1)
659 return pri2 - pri1;
661 /* If regs are equally good, sort by allocno,
662 so that the results of qsort leave nothing to chance. */
663 return v1 - v2;
666 /* Scan the rtl code and record all conflicts and register preferences in the
667 conflict matrices and preference tables. */
669 static void
670 global_conflicts (void)
672 unsigned i;
673 basic_block b;
674 rtx insn;
675 int *block_start_allocnos;
677 /* Make a vector that mark_reg_{store,clobber} will store in. */
678 regs_set = xmalloc (max_parallel * sizeof (rtx) * 2);
680 block_start_allocnos = xmalloc (max_allocno * sizeof (int));
682 FOR_EACH_BB (b)
684 memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
686 /* Initialize table of registers currently live
687 to the state at the beginning of this basic block.
688 This also marks the conflicts among hard registers
689 and any allocnos that are live.
691 For pseudo-regs, there is only one bit for each one
692 no matter how many hard regs it occupies.
693 This is ok; we know the size from PSEUDO_REGNO_SIZE.
694 For explicit hard regs, we cannot know the size that way
695 since one hard reg can be used with various sizes.
696 Therefore, we must require that all the hard regs
697 implicitly live as part of a multi-word hard reg
698 be explicitly marked in basic_block_live_at_start. */
701 regset old = b->il.rtl->global_live_at_start;
702 int ax = 0;
703 reg_set_iterator rsi;
705 REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
706 EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i, rsi)
708 int a = reg_allocno[i];
709 if (a >= 0)
711 SET_ALLOCNO_LIVE (a);
712 block_start_allocnos[ax++] = a;
714 else if ((a = reg_renumber[i]) >= 0)
715 mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
718 /* Record that each allocno now live conflicts with each hard reg
719 now live.
721 It is not necessary to mark any conflicts between pseudos as
722 this point, even for pseudos which are live at the start of
723 the basic block.
725 Given two pseudos X and Y and any point in the CFG P.
727 On any path to point P where X and Y are live one of the
728 following conditions must be true:
730 1. X is live at some instruction on the path that
731 evaluates Y.
733 2. Y is live at some instruction on the path that
734 evaluates X.
736 3. Either X or Y is not evaluated on the path to P
737 (i.e. it is used uninitialized) and thus the
738 conflict can be ignored.
740 In cases #1 and #2 the conflict will be recorded when we
741 scan the instruction that makes either X or Y become live. */
742 record_conflicts (block_start_allocnos, ax);
744 /* Pseudos can't go in stack regs at the start of a basic block that
745 is reached by an abnormal edge. Likewise for call clobbered regs,
746 because because caller-save, fixup_abnormal_edges, and possibly
747 the table driven EH machinery are not quite ready to handle such
748 regs live across such edges. */
750 edge e;
751 edge_iterator ei;
753 FOR_EACH_EDGE (e, ei, b->preds)
754 if (e->flags & EDGE_ABNORMAL)
755 break;
757 if (e != NULL)
759 #ifdef STACK_REGS
760 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
762 allocno[ax].no_stack_reg = 1;
764 for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
765 record_one_conflict (ax);
766 #endif
768 /* No need to record conflicts for call clobbered regs if we have
769 nonlocal labels around, as we don't ever try to allocate such
770 regs in this case. */
771 if (! current_function_has_nonlocal_label)
772 for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
773 if (call_used_regs [ax])
774 record_one_conflict (ax);
779 insn = BB_HEAD (b);
781 /* Scan the code of this basic block, noting which allocnos
782 and hard regs are born or die. When one is born,
783 record a conflict with all others currently live. */
785 while (1)
787 RTX_CODE code = GET_CODE (insn);
788 rtx link;
790 /* Make regs_set an empty set. */
792 n_regs_set = 0;
794 if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
797 #if 0
798 int i = 0;
799 for (link = REG_NOTES (insn);
800 link && i < NUM_NO_CONFLICT_PAIRS;
801 link = XEXP (link, 1))
802 if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
804 no_conflict_pairs[i].allocno1
805 = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
806 no_conflict_pairs[i].allocno2
807 = reg_allocno[REGNO (XEXP (link, 0))];
808 i++;
810 #endif /* 0 */
812 /* Mark any registers clobbered by INSN as live,
813 so they conflict with the inputs. */
815 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
817 /* Mark any registers dead after INSN as dead now. */
819 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
820 if (REG_NOTE_KIND (link) == REG_DEAD)
821 mark_reg_death (XEXP (link, 0));
823 /* Mark any registers set in INSN as live,
824 and mark them as conflicting with all other live regs.
825 Clobbers are processed again, so they conflict with
826 the registers that are set. */
828 note_stores (PATTERN (insn), mark_reg_store, NULL);
830 #ifdef AUTO_INC_DEC
831 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
832 if (REG_NOTE_KIND (link) == REG_INC)
833 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
834 #endif
836 /* If INSN has multiple outputs, then any reg that dies here
837 and is used inside of an output
838 must conflict with the other outputs.
840 It is unsafe to use !single_set here since it will ignore an
841 unused output. Just because an output is unused does not mean
842 the compiler can assume the side effect will not occur.
843 Consider if REG appears in the address of an output and we
844 reload the output. If we allocate REG to the same hard
845 register as an unused output we could set the hard register
846 before the output reload insn. */
847 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
848 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
849 if (REG_NOTE_KIND (link) == REG_DEAD)
851 int used_in_output = 0;
852 int i;
853 rtx reg = XEXP (link, 0);
855 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
857 rtx set = XVECEXP (PATTERN (insn), 0, i);
858 if (GET_CODE (set) == SET
859 && !REG_P (SET_DEST (set))
860 && !rtx_equal_p (reg, SET_DEST (set))
861 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
862 used_in_output = 1;
864 if (used_in_output)
865 mark_reg_conflicts (reg);
868 /* Mark any registers set in INSN and then never used. */
870 while (n_regs_set-- > 0)
872 rtx note = find_regno_note (insn, REG_UNUSED,
873 REGNO (regs_set[n_regs_set]));
874 if (note)
875 mark_reg_death (XEXP (note, 0));
879 if (insn == BB_END (b))
880 break;
881 insn = NEXT_INSN (insn);
885 /* Clean up. */
886 free (block_start_allocnos);
887 free (regs_set);
889 /* Expand the preference information by looking for cases where one allocno
890 dies in an insn that sets an allocno. If those two allocnos don't conflict,
891 merge any preferences between those allocnos. */
893 static void
894 expand_preferences (void)
896 rtx insn;
897 rtx link;
898 rtx set;
900 /* We only try to handle the most common cases here. Most of the cases
901 where this wins are reg-reg copies. */
903 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
904 if (INSN_P (insn)
905 && (set = single_set (insn)) != 0
906 && REG_P (SET_DEST (set))
907 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
908 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
909 if (REG_NOTE_KIND (link) == REG_DEAD
910 && REG_P (XEXP (link, 0))
911 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
912 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
913 reg_allocno[REGNO (XEXP (link, 0))]))
915 int a1 = reg_allocno[REGNO (SET_DEST (set))];
916 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
918 if (XEXP (link, 0) == SET_SRC (set))
920 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
921 allocno[a2].hard_reg_copy_preferences);
922 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
923 allocno[a1].hard_reg_copy_preferences);
926 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
927 allocno[a2].hard_reg_preferences);
928 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
929 allocno[a1].hard_reg_preferences);
930 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
931 allocno[a2].hard_reg_full_preferences);
932 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
933 allocno[a1].hard_reg_full_preferences);
937 /* Prune the preferences for global registers to exclude registers that cannot
938 be used.
940 Compute `regs_someone_prefers', which is a bitmask of the hard registers
941 that are preferred by conflicting registers of lower priority. If possible,
942 we will avoid using these registers. */
944 static void
945 prune_preferences (void)
947 int i;
948 int num;
949 int *allocno_to_order = xmalloc (max_allocno * sizeof (int));
951 /* Scan least most important to most important.
952 For each allocno, remove from preferences registers that cannot be used,
953 either because of conflicts or register type. Then compute all registers
954 preferred by each lower-priority register that conflicts. */
956 for (i = max_allocno - 1; i >= 0; i--)
958 HARD_REG_SET temp;
960 num = allocno_order[i];
961 allocno_to_order[num] = i;
962 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
964 if (allocno[num].calls_crossed == 0)
965 IOR_HARD_REG_SET (temp, fixed_reg_set);
966 else
967 IOR_HARD_REG_SET (temp, call_used_reg_set);
969 IOR_COMPL_HARD_REG_SET
970 (temp,
971 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
973 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
974 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
975 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
978 for (i = max_allocno - 1; i >= 0; i--)
980 /* Merge in the preferences of lower-priority registers (they have
981 already been pruned). If we also prefer some of those registers,
982 don't exclude them unless we are of a smaller size (in which case
983 we want to give the lower-priority allocno the first chance for
984 these registers). */
985 HARD_REG_SET temp, temp2;
986 int allocno2;
988 num = allocno_order[i];
990 CLEAR_HARD_REG_SET (temp);
991 CLEAR_HARD_REG_SET (temp2);
993 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
994 allocno2,
996 if (allocno_to_order[allocno2] > i)
998 if (allocno[allocno2].size <= allocno[num].size)
999 IOR_HARD_REG_SET (temp,
1000 allocno[allocno2].hard_reg_full_preferences);
1001 else
1002 IOR_HARD_REG_SET (temp2,
1003 allocno[allocno2].hard_reg_full_preferences);
1007 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1008 IOR_HARD_REG_SET (temp, temp2);
1009 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1011 free (allocno_to_order);
1014 /* Assign a hard register to allocno NUM; look for one that is the beginning
1015 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1016 The registers marked in PREFREGS are tried first.
1018 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1019 be used for this allocation.
1021 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1022 Otherwise ignore that preferred class and use the alternate class.
1024 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1025 will have to be saved and restored at calls.
1027 RETRYING is nonzero if this is called from retry_global_alloc.
1029 If we find one, record it in reg_renumber.
1030 If not, do nothing. */
1032 static void
1033 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1035 int i, best_reg, pass;
1036 HARD_REG_SET used, used1, used2;
1038 enum reg_class class = (alt_regs_p
1039 ? reg_alternate_class (allocno[num].reg)
1040 : reg_preferred_class (allocno[num].reg));
1041 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1043 if (accept_call_clobbered)
1044 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1045 else if (allocno[num].calls_crossed == 0)
1046 COPY_HARD_REG_SET (used1, fixed_reg_set);
1047 else
1048 COPY_HARD_REG_SET (used1, call_used_reg_set);
1050 /* Some registers should not be allocated in global-alloc. */
1051 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1052 if (losers)
1053 IOR_HARD_REG_SET (used1, losers);
1055 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1056 COPY_HARD_REG_SET (used2, used1);
1058 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1060 #ifdef CANNOT_CHANGE_MODE_CLASS
1061 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1062 #endif
1064 /* Try each hard reg to see if it fits. Do this in two passes.
1065 In the first pass, skip registers that are preferred by some other pseudo
1066 to give it a better chance of getting one of those registers. Only if
1067 we can't get a register when excluding those do we take one of them.
1068 However, we never allocate a register for the first time in pass 0. */
1070 COPY_HARD_REG_SET (used, used1);
1071 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1072 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1074 best_reg = -1;
1075 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1076 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1077 pass++)
1079 if (pass == 1)
1080 COPY_HARD_REG_SET (used, used1);
1081 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1083 #ifdef REG_ALLOC_ORDER
1084 int regno = reg_alloc_order[i];
1085 #else
1086 int regno = i;
1087 #endif
1088 if (! TEST_HARD_REG_BIT (used, regno)
1089 && HARD_REGNO_MODE_OK (regno, mode)
1090 && (allocno[num].calls_crossed == 0
1091 || accept_call_clobbered
1092 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1094 int j;
1095 int lim = regno + hard_regno_nregs[regno][mode];
1096 for (j = regno + 1;
1097 (j < lim
1098 && ! TEST_HARD_REG_BIT (used, j));
1099 j++);
1100 if (j == lim)
1102 best_reg = regno;
1103 break;
1105 #ifndef REG_ALLOC_ORDER
1106 i = j; /* Skip starting points we know will lose */
1107 #endif
1112 /* See if there is a preferred register with the same class as the register
1113 we allocated above. Making this restriction prevents register
1114 preferencing from creating worse register allocation.
1116 Remove from the preferred registers and conflicting registers. Note that
1117 additional conflicts may have been added after `prune_preferences' was
1118 called.
1120 First do this for those register with copy preferences, then all
1121 preferred registers. */
1123 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1124 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
1125 reg_class_contents[(int) NO_REGS], no_copy_prefs);
1127 if (best_reg >= 0)
1129 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1130 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1131 && HARD_REGNO_MODE_OK (i, mode)
1132 && (allocno[num].calls_crossed == 0
1133 || accept_call_clobbered
1134 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1135 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1136 || reg_class_subset_p (REGNO_REG_CLASS (i),
1137 REGNO_REG_CLASS (best_reg))
1138 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1139 REGNO_REG_CLASS (i))))
1141 int j;
1142 int lim = i + hard_regno_nregs[i][mode];
1143 for (j = i + 1;
1144 (j < lim
1145 && ! TEST_HARD_REG_BIT (used, j)
1146 && (REGNO_REG_CLASS (j)
1147 == REGNO_REG_CLASS (best_reg + (j - i))
1148 || reg_class_subset_p (REGNO_REG_CLASS (j),
1149 REGNO_REG_CLASS (best_reg + (j - i)))
1150 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1151 REGNO_REG_CLASS (j))));
1152 j++);
1153 if (j == lim)
1155 best_reg = i;
1156 goto no_prefs;
1160 no_copy_prefs:
1162 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1163 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
1164 reg_class_contents[(int) NO_REGS], no_prefs);
1166 if (best_reg >= 0)
1168 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1169 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1170 && HARD_REGNO_MODE_OK (i, mode)
1171 && (allocno[num].calls_crossed == 0
1172 || accept_call_clobbered
1173 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1174 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1175 || reg_class_subset_p (REGNO_REG_CLASS (i),
1176 REGNO_REG_CLASS (best_reg))
1177 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1178 REGNO_REG_CLASS (i))))
1180 int j;
1181 int lim = i + hard_regno_nregs[i][mode];
1182 for (j = i + 1;
1183 (j < lim
1184 && ! TEST_HARD_REG_BIT (used, j)
1185 && (REGNO_REG_CLASS (j)
1186 == REGNO_REG_CLASS (best_reg + (j - i))
1187 || reg_class_subset_p (REGNO_REG_CLASS (j),
1188 REGNO_REG_CLASS (best_reg + (j - i)))
1189 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1190 REGNO_REG_CLASS (j))));
1191 j++);
1192 if (j == lim)
1194 best_reg = i;
1195 break;
1199 no_prefs:
1201 /* If we haven't succeeded yet, try with caller-saves.
1202 We need not check to see if the current function has nonlocal
1203 labels because we don't put any pseudos that are live over calls in
1204 registers in that case. */
1206 if (flag_caller_saves && best_reg < 0)
1208 /* Did not find a register. If it would be profitable to
1209 allocate a call-clobbered register and save and restore it
1210 around calls, do that. */
1211 if (! accept_call_clobbered
1212 && allocno[num].calls_crossed != 0
1213 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1214 allocno[num].calls_crossed))
1216 HARD_REG_SET new_losers;
1217 if (! losers)
1218 CLEAR_HARD_REG_SET (new_losers);
1219 else
1220 COPY_HARD_REG_SET (new_losers, losers);
1222 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1223 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1224 if (reg_renumber[allocno[num].reg] >= 0)
1226 caller_save_needed = 1;
1227 return;
1232 /* If we haven't succeeded yet,
1233 see if some hard reg that conflicts with us
1234 was utilized poorly by local-alloc.
1235 If so, kick out the regs that were put there by local-alloc
1236 so we can use it instead. */
1237 if (best_reg < 0 && !retrying
1238 /* Let's not bother with multi-reg allocnos. */
1239 && allocno[num].size == 1)
1241 /* Count from the end, to find the least-used ones first. */
1242 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1244 #ifdef REG_ALLOC_ORDER
1245 int regno = reg_alloc_order[i];
1246 #else
1247 int regno = i;
1248 #endif
1250 if (local_reg_n_refs[regno] != 0
1251 /* Don't use a reg no good for this pseudo. */
1252 && ! TEST_HARD_REG_BIT (used2, regno)
1253 && HARD_REGNO_MODE_OK (regno, mode)
1254 /* The code below assumes that we need only a single
1255 register, but the check of allocno[num].size above
1256 was not enough. Sometimes we need more than one
1257 register for a single-word value. */
1258 && hard_regno_nregs[regno][mode] == 1
1259 && (allocno[num].calls_crossed == 0
1260 || accept_call_clobbered
1261 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1262 #ifdef CANNOT_CHANGE_MODE_CLASS
1263 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1264 mode)
1265 #endif
1266 #ifdef STACK_REGS
1267 && (!allocno[num].no_stack_reg
1268 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1269 #endif
1272 /* We explicitly evaluate the divide results into temporary
1273 variables so as to avoid excess precision problems that occur
1274 on an i386-unknown-sysv4.2 (unixware) host. */
1276 double tmp1 = ((double) local_reg_freq[regno]
1277 / local_reg_live_length[regno]);
1278 double tmp2 = ((double) allocno[num].freq
1279 / allocno[num].live_length);
1281 if (tmp1 < tmp2)
1283 /* Hard reg REGNO was used less in total by local regs
1284 than it would be used by this one allocno! */
1285 int k;
1286 for (k = 0; k < max_regno; k++)
1287 if (reg_renumber[k] >= 0)
1289 int r = reg_renumber[k];
1290 int endregno
1291 = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
1293 if (regno >= r && regno < endregno)
1294 reg_renumber[k] = -1;
1297 best_reg = regno;
1298 break;
1304 /* Did we find a register? */
1306 if (best_reg >= 0)
1308 int lim, j;
1309 HARD_REG_SET this_reg;
1311 /* Yes. Record it as the hard register of this pseudo-reg. */
1312 reg_renumber[allocno[num].reg] = best_reg;
1313 /* Also of any pseudo-regs that share with it. */
1314 if (reg_may_share[allocno[num].reg])
1315 for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
1316 if (reg_allocno[j] == num)
1317 reg_renumber[j] = best_reg;
1319 /* Make a set of the hard regs being allocated. */
1320 CLEAR_HARD_REG_SET (this_reg);
1321 lim = best_reg + hard_regno_nregs[best_reg][mode];
1322 for (j = best_reg; j < lim; j++)
1324 SET_HARD_REG_BIT (this_reg, j);
1325 SET_HARD_REG_BIT (regs_used_so_far, j);
1326 /* This is no longer a reg used just by local regs. */
1327 local_reg_n_refs[j] = 0;
1328 local_reg_freq[j] = 0;
1330 /* For each other pseudo-reg conflicting with this one,
1331 mark it as conflicting with the hard regs this one occupies. */
1332 lim = num;
1333 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1335 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1340 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1341 Perhaps it had previously seemed not worth a hard reg,
1342 or perhaps its old hard reg has been commandeered for reloads.
1343 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1344 they do not appear to be allocated.
1345 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1347 void
1348 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1350 int alloc_no = reg_allocno[regno];
1351 if (alloc_no >= 0)
1353 /* If we have more than one register class,
1354 first try allocating in the class that is cheapest
1355 for this pseudo-reg. If that fails, try any reg. */
1356 if (N_REG_CLASSES > 1)
1357 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1358 if (reg_renumber[regno] < 0
1359 && reg_alternate_class (regno) != NO_REGS)
1360 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1362 /* If we found a register, modify the RTL for the register to
1363 show the hard register, and mark that register live. */
1364 if (reg_renumber[regno] >= 0)
1366 REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
1367 mark_home_live (regno);
1372 /* Record a conflict between register REGNO
1373 and everything currently live.
1374 REGNO must not be a pseudo reg that was allocated
1375 by local_alloc; such numbers must be translated through
1376 reg_renumber before calling here. */
1378 static void
1379 record_one_conflict (int regno)
1381 int j;
1383 if (regno < FIRST_PSEUDO_REGISTER)
1384 /* When a hard register becomes live,
1385 record conflicts with live pseudo regs. */
1386 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1388 SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1390 else
1391 /* When a pseudo-register becomes live,
1392 record conflicts first with hard regs,
1393 then with other pseudo regs. */
1395 int ialloc = reg_allocno[regno];
1396 int ialloc_prod = ialloc * allocno_row_words;
1398 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1399 for (j = allocno_row_words - 1; j >= 0; j--)
1400 conflicts[ialloc_prod + j] |= allocnos_live[j];
1404 /* Record all allocnos currently live as conflicting
1405 with all hard regs currently live.
1407 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1408 are currently live. Their bits are also flagged in allocnos_live. */
1410 static void
1411 record_conflicts (int *allocno_vec, int len)
1413 while (--len >= 0)
1414 IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1415 hard_regs_live);
1418 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1419 static void
1420 mirror_conflicts (void)
1422 int i, j;
1423 int rw = allocno_row_words;
1424 int rwb = rw * INT_BITS;
1425 INT_TYPE *p = conflicts;
1426 INT_TYPE *q0 = conflicts, *q1, *q2;
1427 unsigned INT_TYPE mask;
1429 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1431 if (! mask)
1433 mask = 1;
1434 q0++;
1436 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1438 unsigned INT_TYPE word;
1440 for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1441 word >>= 1, q2 += rw)
1443 if (word & 1)
1444 *q2 |= mask;
1450 /* Handle the case where REG is set by the insn being scanned,
1451 during the forward scan to accumulate conflicts.
1452 Store a 1 in regs_live or allocnos_live for this register, record how many
1453 consecutive hardware registers it actually needs,
1454 and record a conflict with all other registers already live.
1456 Note that even if REG does not remain alive after this insn,
1457 we must mark it here as live, to ensure a conflict between
1458 REG and any other regs set in this insn that really do live.
1459 This is because those other regs could be considered after this.
1461 REG might actually be something other than a register;
1462 if so, we do nothing.
1464 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1465 a REG_INC note was found for it). */
1467 static void
1468 mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
1470 int regno;
1472 if (GET_CODE (reg) == SUBREG)
1473 reg = SUBREG_REG (reg);
1475 if (!REG_P (reg))
1476 return;
1478 regs_set[n_regs_set++] = reg;
1480 if (setter && GET_CODE (setter) != CLOBBER)
1481 set_preference (reg, SET_SRC (setter));
1483 regno = REGNO (reg);
1485 /* Either this is one of the max_allocno pseudo regs not allocated,
1486 or it is or has a hardware reg. First handle the pseudo-regs. */
1487 if (regno >= FIRST_PSEUDO_REGISTER)
1489 if (reg_allocno[regno] >= 0)
1491 SET_ALLOCNO_LIVE (reg_allocno[regno]);
1492 record_one_conflict (regno);
1496 if (reg_renumber[regno] >= 0)
1497 regno = reg_renumber[regno];
1499 /* Handle hardware regs (and pseudos allocated to hard regs). */
1500 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1502 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1503 while (regno < last)
1505 record_one_conflict (regno);
1506 SET_HARD_REG_BIT (hard_regs_live, regno);
1507 regno++;
1512 /* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
1514 static void
1515 mark_reg_clobber (rtx reg, rtx setter, void *data)
1517 if (GET_CODE (setter) == CLOBBER)
1518 mark_reg_store (reg, setter, data);
1521 /* Record that REG has conflicts with all the regs currently live.
1522 Do not mark REG itself as live. */
1524 static void
1525 mark_reg_conflicts (rtx reg)
1527 int regno;
1529 if (GET_CODE (reg) == SUBREG)
1530 reg = SUBREG_REG (reg);
1532 if (!REG_P (reg))
1533 return;
1535 regno = REGNO (reg);
1537 /* Either this is one of the max_allocno pseudo regs not allocated,
1538 or it is or has a hardware reg. First handle the pseudo-regs. */
1539 if (regno >= FIRST_PSEUDO_REGISTER)
1541 if (reg_allocno[regno] >= 0)
1542 record_one_conflict (regno);
1545 if (reg_renumber[regno] >= 0)
1546 regno = reg_renumber[regno];
1548 /* Handle hardware regs (and pseudos allocated to hard regs). */
1549 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1551 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1552 while (regno < last)
1554 record_one_conflict (regno);
1555 regno++;
1560 /* Mark REG as being dead (following the insn being scanned now).
1561 Store a 0 in regs_live or allocnos_live for this register. */
1563 static void
1564 mark_reg_death (rtx reg)
1566 int regno = REGNO (reg);
1568 /* Either this is one of the max_allocno pseudo regs not allocated,
1569 or it is a hardware reg. First handle the pseudo-regs. */
1570 if (regno >= FIRST_PSEUDO_REGISTER)
1572 if (reg_allocno[regno] >= 0)
1573 CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1576 /* For pseudo reg, see if it has been assigned a hardware reg. */
1577 if (reg_renumber[regno] >= 0)
1578 regno = reg_renumber[regno];
1580 /* Handle hardware regs (and pseudos allocated to hard regs). */
1581 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1583 /* Pseudo regs already assigned hardware regs are treated
1584 almost the same as explicit hardware regs. */
1585 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1586 while (regno < last)
1588 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
1589 regno++;
1594 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1595 for the value stored in it. MODE determines how many consecutive
1596 registers are actually in use. Do not record conflicts;
1597 it is assumed that the caller will do that. */
1599 static void
1600 mark_reg_live_nc (int regno, enum machine_mode mode)
1602 int last = regno + hard_regno_nregs[regno][mode];
1603 while (regno < last)
1605 SET_HARD_REG_BIT (hard_regs_live, regno);
1606 regno++;
1610 /* Try to set a preference for an allocno to a hard register.
1611 We are passed DEST and SRC which are the operands of a SET. It is known
1612 that SRC is a register. If SRC or the first operand of SRC is a register,
1613 try to set a preference. If one of the two is a hard register and the other
1614 is a pseudo-register, mark the preference.
1616 Note that we are not as aggressive as local-alloc in trying to tie a
1617 pseudo-register to a hard register. */
1619 static void
1620 set_preference (rtx dest, rtx src)
1622 unsigned int src_regno, dest_regno;
1623 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1624 to compensate for subregs in SRC or DEST. */
1625 int offset = 0;
1626 unsigned int i;
1627 int copy = 1;
1629 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1630 src = XEXP (src, 0), copy = 0;
1632 /* Get the reg number for both SRC and DEST.
1633 If neither is a reg, give up. */
1635 if (REG_P (src))
1636 src_regno = REGNO (src);
1637 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1639 src_regno = REGNO (SUBREG_REG (src));
1641 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1642 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1643 GET_MODE (SUBREG_REG (src)),
1644 SUBREG_BYTE (src),
1645 GET_MODE (src));
1646 else
1647 offset += (SUBREG_BYTE (src)
1648 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1650 else
1651 return;
1653 if (REG_P (dest))
1654 dest_regno = REGNO (dest);
1655 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1657 dest_regno = REGNO (SUBREG_REG (dest));
1659 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1660 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1661 GET_MODE (SUBREG_REG (dest)),
1662 SUBREG_BYTE (dest),
1663 GET_MODE (dest));
1664 else
1665 offset -= (SUBREG_BYTE (dest)
1666 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1668 else
1669 return;
1671 /* Convert either or both to hard reg numbers. */
1673 if (reg_renumber[src_regno] >= 0)
1674 src_regno = reg_renumber[src_regno];
1676 if (reg_renumber[dest_regno] >= 0)
1677 dest_regno = reg_renumber[dest_regno];
1679 /* Now if one is a hard reg and the other is a global pseudo
1680 then give the other a preference. */
1682 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1683 && reg_allocno[src_regno] >= 0)
1685 dest_regno -= offset;
1686 if (dest_regno < FIRST_PSEUDO_REGISTER)
1688 if (copy)
1689 SET_REGBIT (hard_reg_copy_preferences,
1690 reg_allocno[src_regno], dest_regno);
1692 SET_REGBIT (hard_reg_preferences,
1693 reg_allocno[src_regno], dest_regno);
1694 for (i = dest_regno;
1695 i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
1696 i++)
1697 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1701 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1702 && reg_allocno[dest_regno] >= 0)
1704 src_regno += offset;
1705 if (src_regno < FIRST_PSEUDO_REGISTER)
1707 if (copy)
1708 SET_REGBIT (hard_reg_copy_preferences,
1709 reg_allocno[dest_regno], src_regno);
1711 SET_REGBIT (hard_reg_preferences,
1712 reg_allocno[dest_regno], src_regno);
1713 for (i = src_regno;
1714 i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
1715 i++)
1716 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1721 /* Indicate that hard register number FROM was eliminated and replaced with
1722 an offset from hard register number TO. The status of hard registers live
1723 at the start of a basic block is updated by replacing a use of FROM with
1724 a use of TO. */
1726 void
1727 mark_elimination (int from, int to)
1729 basic_block bb;
1731 FOR_EACH_BB (bb)
1733 regset r = bb->il.rtl->global_live_at_start;
1734 if (REGNO_REG_SET_P (r, from))
1736 CLEAR_REGNO_REG_SET (r, from);
1737 SET_REGNO_REG_SET (r, to);
1742 /* Used for communication between the following functions. Holds the
1743 current life information. */
1744 static regset live_relevant_regs;
1746 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1747 This is called via note_stores. */
1748 static void
1749 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1751 int regno;
1753 if (GET_CODE (reg) == SUBREG)
1754 reg = SUBREG_REG (reg);
1756 if (!REG_P (reg))
1757 return;
1759 regno = REGNO (reg);
1760 if (regno < FIRST_PSEUDO_REGISTER)
1762 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1763 while (nregs-- > 0)
1765 SET_REGNO_REG_SET (live_relevant_regs, regno);
1766 if (! fixed_regs[regno])
1767 SET_REGNO_REG_SET ((regset) regs_set, regno);
1768 regno++;
1771 else if (reg_renumber[regno] >= 0)
1773 SET_REGNO_REG_SET (live_relevant_regs, regno);
1774 SET_REGNO_REG_SET ((regset) regs_set, regno);
1778 /* Record in live_relevant_regs that register REGNO died. */
1779 static void
1780 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1782 if (regno < FIRST_PSEUDO_REGISTER)
1784 int nregs = hard_regno_nregs[regno][mode];
1785 while (nregs-- > 0)
1787 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1788 if (! fixed_regs[regno])
1789 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1790 regno++;
1793 else
1795 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1796 if (reg_renumber[regno] >= 0)
1797 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1801 /* Walk the insns of the current function and build reload_insn_chain,
1802 and record register life information. */
1803 void
1804 build_insn_chain (rtx first)
1806 struct insn_chain **p = &reload_insn_chain;
1807 struct insn_chain *prev = 0;
1808 basic_block b = ENTRY_BLOCK_PTR->next_bb;
1810 live_relevant_regs = ALLOC_REG_SET (&reg_obstack);
1812 for (; first; first = NEXT_INSN (first))
1814 struct insn_chain *c;
1816 if (first == BB_HEAD (b))
1818 unsigned i;
1819 bitmap_iterator bi;
1821 CLEAR_REG_SET (live_relevant_regs);
1823 EXECUTE_IF_SET_IN_BITMAP (b->il.rtl->global_live_at_start, 0, i, bi)
1825 if (i < FIRST_PSEUDO_REGISTER
1826 ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1827 : reg_renumber[i] >= 0)
1828 SET_REGNO_REG_SET (live_relevant_regs, i);
1832 if (!NOTE_P (first) && !BARRIER_P (first))
1834 c = new_insn_chain ();
1835 c->prev = prev;
1836 prev = c;
1837 *p = c;
1838 p = &c->next;
1839 c->insn = first;
1840 c->block = b->index;
1842 if (INSN_P (first))
1844 rtx link;
1846 /* Mark the death of everything that dies in this instruction. */
1848 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1849 if (REG_NOTE_KIND (link) == REG_DEAD
1850 && REG_P (XEXP (link, 0)))
1851 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1854 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1856 /* Mark everything born in this instruction as live. */
1858 note_stores (PATTERN (first), reg_becomes_live,
1859 &c->dead_or_set);
1861 else
1862 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1864 if (INSN_P (first))
1866 rtx link;
1868 /* Mark anything that is set in this insn and then unused as dying. */
1870 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1871 if (REG_NOTE_KIND (link) == REG_UNUSED
1872 && REG_P (XEXP (link, 0)))
1873 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1878 if (first == BB_END (b))
1879 b = b->next_bb;
1881 /* Stop after we pass the end of the last basic block. Verify that
1882 no real insns are after the end of the last basic block.
1884 We may want to reorganize the loop somewhat since this test should
1885 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1886 the previous real insn is a JUMP_INSN. */
1887 if (b == EXIT_BLOCK_PTR)
1889 #ifdef ENABLE_CHECKING
1890 for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
1891 gcc_assert (!INSN_P (first)
1892 || GET_CODE (PATTERN (first)) == USE
1893 || ((GET_CODE (PATTERN (first)) == ADDR_VEC
1894 || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1895 && prev_real_insn (first) != 0
1896 && JUMP_P (prev_real_insn (first))));
1897 #endif
1898 break;
1901 FREE_REG_SET (live_relevant_regs);
1902 *p = 0;
1905 /* Print debugging trace information if -dg switch is given,
1906 showing the information on which the allocation decisions are based. */
1908 static void
1909 dump_conflicts (FILE *file)
1911 int i;
1912 int has_preferences;
1913 int nregs;
1914 nregs = 0;
1915 for (i = 0; i < max_allocno; i++)
1917 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1918 continue;
1919 nregs++;
1921 fprintf (file, ";; %d regs to allocate:", nregs);
1922 for (i = 0; i < max_allocno; i++)
1924 int j;
1925 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1926 continue;
1927 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1928 for (j = 0; j < max_regno; j++)
1929 if (reg_allocno[j] == allocno_order[i]
1930 && j != allocno[allocno_order[i]].reg)
1931 fprintf (file, "+%d", j);
1932 if (allocno[allocno_order[i]].size != 1)
1933 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1935 fprintf (file, "\n");
1937 for (i = 0; i < max_allocno; i++)
1939 int j;
1940 fprintf (file, ";; %d conflicts:", allocno[i].reg);
1941 for (j = 0; j < max_allocno; j++)
1942 if (CONFLICTP (j, i))
1943 fprintf (file, " %d", allocno[j].reg);
1944 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1945 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
1946 fprintf (file, " %d", j);
1947 fprintf (file, "\n");
1949 has_preferences = 0;
1950 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1951 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1952 has_preferences = 1;
1954 if (! has_preferences)
1955 continue;
1956 fprintf (file, ";; %d preferences:", allocno[i].reg);
1957 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1958 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1959 fprintf (file, " %d", j);
1960 fprintf (file, "\n");
1962 fprintf (file, "\n");
1965 void
1966 dump_global_regs (FILE *file)
1968 int i, j;
1970 fprintf (file, ";; Register dispositions:\n");
1971 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
1972 if (reg_renumber[i] >= 0)
1974 fprintf (file, "%d in %d ", i, reg_renumber[i]);
1975 if (++j % 6 == 0)
1976 fprintf (file, "\n");
1979 fprintf (file, "\n\n;; Hard regs used: ");
1980 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1981 if (regs_ever_live[i])
1982 fprintf (file, " %d", i);
1983 fprintf (file, "\n\n");
1988 /* This page contains code to make live information more accurate.
1989 The accurate register liveness at program point P means:
1990 o there is a path from P to usage of the register and the
1991 register is not redefined or killed on the path.
1992 o register at P is partially available, i.e. there is a path from
1993 a register definition to the point P and the register is not
1994 killed (clobbered) on the path
1996 The standard GCC live information means only the first condition.
1997 Without the partial availability, there will be more register
1998 conflicts and as a consequence worse register allocation. The
1999 typical example where the information can be different is a
2000 register initialized in the loop at the basic block preceding the
2001 loop in CFG. */
2003 /* The following structure contains basic block data flow information
2004 used to calculate partial availability of registers. */
2006 struct bb_info
2008 /* The basic block reverse post-order number. */
2009 int rts_number;
2010 /* Registers used uninitialized in an insn in which there is an
2011 early clobbered register might get the same hard register. */
2012 bitmap earlyclobber;
2013 /* Registers correspondingly killed (clobbered) and defined but not
2014 killed afterward in the basic block. */
2015 bitmap killed, avloc;
2016 /* Registers partially available and living (in other words whose
2017 values were calculated and used) correspondingly at the start
2018 and end of the basic block. */
2019 bitmap live_pavin, live_pavout;
2022 /* Macros for accessing data flow information of basic blocks. */
2024 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2025 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2027 /* The function allocates the info structures of each basic block. It
2028 also initialized LIVE_PAVIN and LIVE_PAVOUT as if all hard
2029 registers were partially available. */
2031 static void
2032 allocate_bb_info (void)
2034 int i;
2035 basic_block bb;
2036 struct bb_info *bb_info;
2037 bitmap init;
2039 alloc_aux_for_blocks (sizeof (struct bb_info));
2040 init = BITMAP_ALLOC (NULL);
2041 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2042 bitmap_set_bit (init, i);
2043 FOR_EACH_BB (bb)
2045 bb_info = bb->aux;
2046 bb_info->earlyclobber = BITMAP_ALLOC (NULL);
2047 bb_info->avloc = BITMAP_ALLOC (NULL);
2048 bb_info->killed = BITMAP_ALLOC (NULL);
2049 bb_info->live_pavin = BITMAP_ALLOC (NULL);
2050 bb_info->live_pavout = BITMAP_ALLOC (NULL);
2051 bitmap_copy (bb_info->live_pavin, init);
2052 bitmap_copy (bb_info->live_pavout, init);
2054 BITMAP_FREE (init);
2057 /* The function frees the allocated info of all basic blocks. */
2059 static void
2060 free_bb_info (void)
2062 basic_block bb;
2063 struct bb_info *bb_info;
2065 FOR_EACH_BB (bb)
2067 bb_info = BB_INFO (bb);
2068 BITMAP_FREE (bb_info->live_pavout);
2069 BITMAP_FREE (bb_info->live_pavin);
2070 BITMAP_FREE (bb_info->killed);
2071 BITMAP_FREE (bb_info->avloc);
2072 BITMAP_FREE (bb_info->earlyclobber);
2074 free_aux_for_blocks ();
2077 /* The function modifies local info for register REG being changed in
2078 SETTER. DATA is used to pass the current basic block info. */
2080 static void
2081 mark_reg_change (rtx reg, rtx setter, void *data)
2083 int regno;
2084 basic_block bb = data;
2085 struct bb_info *bb_info = BB_INFO (bb);
2087 if (GET_CODE (reg) == SUBREG)
2088 reg = SUBREG_REG (reg);
2090 if (!REG_P (reg))
2091 return;
2093 regno = REGNO (reg);
2094 bitmap_set_bit (bb_info->killed, regno);
2096 if (GET_CODE (setter) != CLOBBER)
2097 bitmap_set_bit (bb_info->avloc, regno);
2098 else
2099 bitmap_clear_bit (bb_info->avloc, regno);
2102 /* Classes of registers which could be early clobbered in the current
2103 insn. */
2105 DEF_VEC_I(int);
2106 DEF_VEC_ALLOC_I(int,heap);
2108 static VEC(int,heap) *earlyclobber_regclass;
2110 /* This function finds and stores register classes that could be early
2111 clobbered in INSN. If any earlyclobber classes are found, the function
2112 returns TRUE, in all other cases it returns FALSE. */
2114 static bool
2115 check_earlyclobber (rtx insn)
2117 int opno;
2118 bool found = false;
2120 extract_insn (insn);
2122 VEC_truncate (int, earlyclobber_regclass, 0);
2123 for (opno = 0; opno < recog_data.n_operands; opno++)
2125 char c;
2126 bool amp_p;
2127 int i;
2128 enum reg_class class;
2129 const char *p = recog_data.constraints[opno];
2131 class = NO_REGS;
2132 amp_p = false;
2133 for (;;)
2135 c = *p;
2136 switch (c)
2138 case '=': case '+': case '?':
2139 case '#': case '!':
2140 case '*': case '%':
2141 case 'm': case '<': case '>': case 'V': case 'o':
2142 case 'E': case 'F': case 'G': case 'H':
2143 case 's': case 'i': case 'n':
2144 case 'I': case 'J': case 'K': case 'L':
2145 case 'M': case 'N': case 'O': case 'P':
2146 case 'X':
2147 case '0': case '1': case '2': case '3': case '4':
2148 case '5': case '6': case '7': case '8': case '9':
2149 /* These don't say anything we care about. */
2150 break;
2152 case '&':
2153 amp_p = true;
2154 break;
2155 case '\0':
2156 case ',':
2157 if (amp_p && class != NO_REGS)
2159 int rc;
2161 found = true;
2162 for (i = 0;
2163 VEC_iterate (int, earlyclobber_regclass, i, rc);
2164 i++)
2166 if (rc == (int) class)
2167 goto found_rc;
2170 /* We use VEC_quick_push here because
2171 earlyclobber_regclass holds no more than
2172 N_REG_CLASSES elements. */
2173 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2174 found_rc:
2178 amp_p = false;
2179 class = NO_REGS;
2180 break;
2182 case 'r':
2183 class = GENERAL_REGS;
2184 break;
2186 default:
2187 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2188 break;
2190 if (c == '\0')
2191 break;
2192 p += CONSTRAINT_LEN (c, p);
2196 return found;
2199 /* The function checks that pseudo-register *X has a class
2200 intersecting with the class of pseudo-register could be early
2201 clobbered in the same insn.
2202 This function is a no-op if earlyclobber_regclass is empty. */
2204 static int
2205 mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
2207 enum reg_class pref_class, alt_class;
2208 int i, regno;
2209 basic_block bb = data;
2210 struct bb_info *bb_info = BB_INFO (bb);
2212 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2214 int rc;
2216 regno = REGNO (*x);
2217 if (bitmap_bit_p (bb_info->killed, regno)
2218 || bitmap_bit_p (bb_info->avloc, regno))
2219 return 0;
2220 pref_class = reg_preferred_class (regno);
2221 alt_class = reg_alternate_class (regno);
2222 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2224 if (reg_classes_intersect_p (rc, pref_class)
2225 || (rc != NO_REGS
2226 && reg_classes_intersect_p (rc, alt_class)))
2228 bitmap_set_bit (bb_info->earlyclobber, regno);
2229 break;
2233 return 0;
2236 /* The function processes all pseudo-registers in *X with the aid of
2237 previous function. */
2239 static void
2240 mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2242 for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
2245 /* The function calculates local info for each basic block. */
2247 static void
2248 calculate_local_reg_bb_info (void)
2250 basic_block bb;
2251 rtx insn, bound;
2253 /* We know that earlyclobber_regclass holds no more than
2254 N_REG_CLASSES elements. See check_earlyclobber. */
2255 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2256 FOR_EACH_BB (bb)
2258 bound = NEXT_INSN (BB_END (bb));
2259 for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
2260 if (INSN_P (insn))
2262 note_stores (PATTERN (insn), mark_reg_change, bb);
2263 if (check_earlyclobber (insn))
2264 note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
2267 VEC_free (int, heap, earlyclobber_regclass);
2270 /* The function sets up reverse post-order number of each basic
2271 block. */
2273 static void
2274 set_up_bb_rts_numbers (void)
2276 int i;
2277 int *rts_order;
2279 rts_order = xmalloc (sizeof (int) * n_basic_blocks);
2280 flow_reverse_top_sort_order_compute (rts_order);
2281 for (i = 0; i < n_basic_blocks; i++)
2282 BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
2283 free (rts_order);
2286 /* Compare function for sorting blocks in reverse postorder. */
2288 static int
2289 rpost_cmp (const void *bb1, const void *bb2)
2291 basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
2293 return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
2296 /* Temporary bitmap used for live_pavin, live_pavout calculation. */
2297 static bitmap temp_bitmap;
2299 DEF_VEC_P(basic_block);
2300 DEF_VEC_ALLOC_P(basic_block,heap);
2302 /* The function calculates partial register availability according to
2303 the following equations:
2305 bb.live_pavin
2306 = empty for entry block
2307 | union (live_pavout of predecessors) & global_live_at_start
2308 bb.live_pavout = union (bb.live_pavin - bb.killed, bb.avloc)
2309 & global_live_at_end */
2311 static void
2312 calculate_reg_pav (void)
2314 basic_block bb, succ;
2315 edge e;
2316 int i, nel;
2317 VEC(basic_block,heap) *bbs, *new_bbs, *temp;
2318 basic_block *bb_array;
2319 sbitmap wset;
2321 bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
2322 new_bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
2323 temp_bitmap = BITMAP_ALLOC (NULL);
2324 FOR_EACH_BB (bb)
2326 VEC_quick_push (basic_block, bbs, bb);
2328 wset = sbitmap_alloc (n_basic_blocks + 1);
2329 while (VEC_length (basic_block, bbs))
2331 bb_array = VEC_address (basic_block, bbs);
2332 nel = VEC_length (basic_block, bbs);
2333 qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
2334 sbitmap_zero (wset);
2335 for (i = 0; i < nel; i++)
2337 edge_iterator ei;
2338 struct bb_info *bb_info;
2339 bitmap bb_live_pavin, bb_live_pavout;
2341 bb = bb_array [i];
2342 bb_info = BB_INFO (bb);
2343 bb_live_pavin = bb_info->live_pavin;
2344 bb_live_pavout = bb_info->live_pavout;
2345 FOR_EACH_EDGE (e, ei, bb->preds)
2347 basic_block pred = e->src;
2349 if (pred->index != ENTRY_BLOCK)
2350 bitmap_ior_into (bb_live_pavin, BB_INFO (pred)->live_pavout);
2352 bitmap_and_into (bb_live_pavin, bb->il.rtl->global_live_at_start);
2353 bitmap_ior_and_compl (temp_bitmap, bb_info->avloc,
2354 bb_live_pavin, bb_info->killed);
2355 bitmap_and_into (temp_bitmap, bb->il.rtl->global_live_at_end);
2356 if (! bitmap_equal_p (temp_bitmap, bb_live_pavout))
2358 bitmap_copy (bb_live_pavout, temp_bitmap);
2359 FOR_EACH_EDGE (e, ei, bb->succs)
2361 succ = e->dest;
2362 if (succ->index != EXIT_BLOCK
2363 && !TEST_BIT (wset, succ->index))
2365 SET_BIT (wset, succ->index);
2366 VEC_quick_push (basic_block, new_bbs, succ);
2371 temp = bbs;
2372 bbs = new_bbs;
2373 new_bbs = temp;
2374 VEC_truncate (basic_block, new_bbs, 0);
2376 sbitmap_free (wset);
2377 BITMAP_FREE (temp_bitmap);
2378 VEC_free (basic_block, heap, new_bbs);
2379 VEC_free (basic_block, heap, bbs);
2382 /* The function modifies partial availability information for two
2383 special cases to prevent incorrect work of the subsequent passes
2384 with the accurate live information based on the partial
2385 availability. */
2387 static void
2388 modify_reg_pav (void)
2390 basic_block bb;
2391 struct bb_info *bb_info;
2392 #ifdef STACK_REGS
2393 int i;
2394 HARD_REG_SET zero, stack_hard_regs, used;
2395 bitmap stack_regs;
2397 CLEAR_HARD_REG_SET (zero);
2398 CLEAR_HARD_REG_SET (stack_hard_regs);
2399 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2400 SET_HARD_REG_BIT(stack_hard_regs, i);
2401 stack_regs = BITMAP_ALLOC (NULL);
2402 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2404 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2405 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2406 AND_HARD_REG_SET (used, stack_hard_regs);
2407 GO_IF_HARD_REG_EQUAL(used, zero, skip);
2408 bitmap_set_bit (stack_regs, i);
2409 skip:
2412 #endif
2413 FOR_EACH_BB (bb)
2415 bb_info = BB_INFO (bb);
2417 /* Reload can assign the same hard register to uninitialized
2418 pseudo-register and early clobbered pseudo-register in an
2419 insn if the pseudo-register is used first time in given BB
2420 and not lived at the BB start. To prevent this we don't
2421 change life information for such pseudo-registers. */
2422 bitmap_ior_into (bb_info->live_pavin, bb_info->earlyclobber);
2423 #ifdef STACK_REGS
2424 /* We can not use the same stack register for uninitialized
2425 pseudo-register and another living pseudo-register because if the
2426 uninitialized pseudo-register dies, subsequent pass reg-stack
2427 will be confused (it will believe that the other register
2428 dies). */
2429 bitmap_ior_into (bb_info->live_pavin, stack_regs);
2430 #endif
2432 #ifdef STACK_REGS
2433 BITMAP_FREE (stack_regs);
2434 #endif
2437 /* The following function makes live information more accurate by
2438 modifying global_live_at_start and global_live_at_end of basic
2439 blocks.
2441 The standard GCC life analysis permits registers to live
2442 uninitialized, for example:
2444 R is never used
2445 .....
2446 Loop:
2447 R is defined
2449 R is used.
2451 With normal life_analysis, R would be live before "Loop:".
2452 The result is that R causes many interferences that do not
2453 serve any purpose.
2455 After the function call a register lives at a program point
2456 only if it is initialized on a path from CFG entry to the
2457 program point. */
2459 static void
2460 make_accurate_live_analysis (void)
2462 basic_block bb;
2463 struct bb_info *bb_info;
2465 max_regno = max_reg_num ();
2466 compact_blocks ();
2467 allocate_bb_info ();
2468 calculate_local_reg_bb_info ();
2469 set_up_bb_rts_numbers ();
2470 calculate_reg_pav ();
2471 modify_reg_pav ();
2472 FOR_EACH_BB (bb)
2474 bb_info = BB_INFO (bb);
2476 bitmap_and_into (bb->il.rtl->global_live_at_start, bb_info->live_pavin);
2477 bitmap_and_into (bb->il.rtl->global_live_at_end, bb_info->live_pavout);
2479 free_bb_info ();
2481 /* Run old register allocator. Return TRUE if we must exit
2482 rest_of_compilation upon return. */
2483 static void
2484 rest_of_handle_global_alloc (void)
2486 bool failure;
2488 /* If optimizing, allocate remaining pseudo-regs. Do the reload
2489 pass fixing up any insns that are invalid. */
2491 if (optimize)
2492 failure = global_alloc (dump_file);
2493 else
2495 build_insn_chain (get_insns ());
2496 failure = reload (get_insns (), 0);
2499 if (dump_enabled_p (pass_global_alloc.static_pass_number))
2501 timevar_push (TV_DUMP);
2502 dump_global_regs (dump_file);
2503 timevar_pop (TV_DUMP);
2506 gcc_assert (reload_completed || failure);
2507 reload_completed = !failure;
2510 struct tree_opt_pass pass_global_alloc =
2512 "greg", /* name */
2513 NULL, /* gate */
2514 rest_of_handle_global_alloc, /* execute */
2515 NULL, /* sub */
2516 NULL, /* next */
2517 0, /* static_pass_number */
2518 TV_GLOBAL_ALLOC, /* tv_id */
2519 0, /* properties_required */
2520 0, /* properties_provided */
2521 0, /* properties_destroyed */
2522 0, /* todo_flags_start */
2523 TODO_dump_func |
2524 TODO_ggc_collect, /* todo_flags_finish */
2525 'g' /* letter */