* Makefile.in (TEXI_GCCINSTALL_FILES): Add gcc-vers.texi dependency.
[official-gcc.git] / gcc / global.c
blobb6bfa4d9f39ff31bd0db2c17ab201e5d28489c66
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "tree-pass.h"
40 #include "timevar.h"
41 #include "vecprim.h"
43 /* This pass of the compiler performs global register allocation.
44 It assigns hard register numbers to all the pseudo registers
45 that were not handled in local_alloc. Assignments are recorded
46 in the vector reg_renumber, not by changing the rtl code.
47 (Such changes are made by final). The entry point is
48 the function global_alloc.
50 After allocation is complete, the reload pass is run as a subroutine
51 of this pass, so that when a pseudo reg loses its hard reg due to
52 spilling it is possible to make a second attempt to find a hard
53 reg for it. The reload pass is independent in other respects
54 and it is run even when stupid register allocation is in use.
56 1. Assign allocation-numbers (allocnos) to the pseudo-registers
57 still needing allocations and to the pseudo-registers currently
58 allocated by local-alloc which may be spilled by reload.
59 Set up tables reg_allocno and allocno_reg to map
60 reg numbers to allocnos and vice versa.
61 max_allocno gets the number of allocnos in use.
63 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
64 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
65 for conflicts between allocnos and explicit hard register use
66 (which includes use of pseudo-registers allocated by local_alloc).
68 3. For each basic block
69 walk forward through the block, recording which
70 pseudo-registers and which hardware registers are live.
71 Build the conflict matrix between the pseudo-registers
72 and another of pseudo-registers versus hardware registers.
73 Also record the preferred hardware registers
74 for each pseudo-register.
76 4. Sort a table of the allocnos into order of
77 desirability of the variables.
79 5. Allocate the variables in that order; each if possible into
80 a preferred register, else into another register. */
82 /* Number of pseudo-registers which are candidates for allocation. */
84 static int max_allocno;
86 /* Indexed by (pseudo) reg number, gives the allocno, or -1
87 for pseudo registers which are not to be allocated. */
89 static int *reg_allocno;
91 struct allocno
93 int reg;
94 /* Gives the number of consecutive hard registers needed by that
95 pseudo reg. */
96 int size;
98 /* Number of calls crossed by each allocno. */
99 int calls_crossed;
101 /* Number of calls that might throw crossed by each allocno. */
102 int throwing_calls_crossed;
104 /* Number of refs to each allocno. */
105 int n_refs;
107 /* Frequency of uses of each allocno. */
108 int freq;
110 /* Guess at live length of each allocno.
111 This is actually the max of the live lengths of the regs. */
112 int live_length;
114 /* Set of hard regs conflicting with allocno N. */
116 HARD_REG_SET hard_reg_conflicts;
118 /* Set of hard regs preferred by allocno N.
119 This is used to make allocnos go into regs that are copied to or from them,
120 when possible, to reduce register shuffling. */
122 HARD_REG_SET hard_reg_preferences;
124 /* Similar, but just counts register preferences made in simple copy
125 operations, rather than arithmetic. These are given priority because
126 we can always eliminate an insn by using these, but using a register
127 in the above list won't always eliminate an insn. */
129 HARD_REG_SET hard_reg_copy_preferences;
131 /* Similar to hard_reg_preferences, but includes bits for subsequent
132 registers when an allocno is multi-word. The above variable is used for
133 allocation while this is used to build reg_someone_prefers, below. */
135 HARD_REG_SET hard_reg_full_preferences;
137 /* Set of hard registers that some later allocno has a preference for. */
139 HARD_REG_SET regs_someone_prefers;
141 #ifdef STACK_REGS
142 /* Set to true if allocno can't be allocated in the stack register. */
143 bool no_stack_reg;
144 #endif
147 static struct allocno *allocno;
149 /* A vector of the integers from 0 to max_allocno-1,
150 sorted in the order of first-to-be-allocated first. */
152 static int *allocno_order;
154 /* Indexed by (pseudo) reg number, gives the number of another
155 lower-numbered pseudo reg which can share a hard reg with this pseudo
156 *even if the two pseudos would otherwise appear to conflict*. */
158 static int *reg_may_share;
160 /* Define the number of bits in each element of `conflicts' and what
161 type that element has. We use the largest integer format on the
162 host machine. */
164 #define INT_BITS HOST_BITS_PER_WIDE_INT
165 #define INT_TYPE HOST_WIDE_INT
167 /* max_allocno by max_allocno array of bits,
168 recording whether two allocno's conflict (can't go in the same
169 hardware register).
171 `conflicts' is symmetric after the call to mirror_conflicts. */
173 static INT_TYPE *conflicts;
175 /* Number of ints required to hold max_allocno bits.
176 This is the length of a row in `conflicts'. */
178 static int allocno_row_words;
180 /* Two macros to test or store 1 in an element of `conflicts'. */
182 #define CONFLICTP(I, J) \
183 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
184 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
186 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
187 and execute CODE. */
188 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
189 do { \
190 int i_; \
191 int allocno_; \
192 INT_TYPE *p_ = (ALLOCNO_SET); \
194 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
195 i_--, allocno_ += INT_BITS) \
197 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
199 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
201 if (word_ & 1) \
202 {CODE;} \
205 } while (0)
207 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
208 #if 0
209 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
210 the conflicting allocno, and execute CODE. This macro assumes that
211 mirror_conflicts has been run. */
212 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
213 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
214 OUT_ALLOCNO, (CODE))
215 #endif
217 /* Set of hard regs currently live (during scan of all insns). */
219 static HARD_REG_SET hard_regs_live;
221 /* Set of registers that global-alloc isn't supposed to use. */
223 static HARD_REG_SET no_global_alloc_regs;
225 /* Set of registers used so far. */
227 static HARD_REG_SET regs_used_so_far;
229 /* Number of refs to each hard reg, as used by local alloc.
230 It is zero for a reg that contains global pseudos or is explicitly used. */
232 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
234 /* Frequency of uses of given hard reg. */
235 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
237 /* Guess at live length of each hard reg, as used by local alloc.
238 This is actually the sum of the live lengths of the specific regs. */
240 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
242 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
243 element I, and hard register number J. */
245 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
247 /* Bit mask for allocnos live at current point in the scan. */
249 static INT_TYPE *allocnos_live;
251 /* Test, set or clear bit number I in allocnos_live,
252 a bit vector indexed by allocno. */
254 #define SET_ALLOCNO_LIVE(I) \
255 (allocnos_live[(unsigned) (I) / INT_BITS] \
256 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
258 #define CLEAR_ALLOCNO_LIVE(I) \
259 (allocnos_live[(unsigned) (I) / INT_BITS] \
260 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
262 /* This is turned off because it doesn't work right for DImode.
263 (And it is only used for DImode, so the other cases are worthless.)
264 The problem is that it isn't true that there is NO possibility of conflict;
265 only that there is no conflict if the two pseudos get the exact same regs.
266 If they were allocated with a partial overlap, there would be a conflict.
267 We can't safely turn off the conflict unless we have another way to
268 prevent the partial overlap.
270 Idea: change hard_reg_conflicts so that instead of recording which
271 hard regs the allocno may not overlap, it records where the allocno
272 may not start. Change both where it is used and where it is updated.
273 Then there is a way to record that (reg:DI 108) may start at 10
274 but not at 9 or 11. There is still the question of how to record
275 this semi-conflict between two pseudos. */
276 #if 0
277 /* Reg pairs for which conflict after the current insn
278 is inhibited by a REG_NO_CONFLICT note.
279 If the table gets full, we ignore any other notes--that is conservative. */
280 #define NUM_NO_CONFLICT_PAIRS 4
281 /* Number of pairs in use in this insn. */
282 int n_no_conflict_pairs;
283 static struct { int allocno1, allocno2;}
284 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
285 #endif /* 0 */
287 /* Record all regs that are set in any one insn.
288 Communication from mark_reg_{store,clobber} and global_conflicts. */
290 static rtx *regs_set;
291 static int n_regs_set;
293 /* All registers that can be eliminated. */
295 static HARD_REG_SET eliminable_regset;
297 static int allocno_compare (const void *, const void *);
298 static void global_conflicts (void);
299 static void mirror_conflicts (void);
300 static void expand_preferences (void);
301 static void prune_preferences (void);
302 static void find_reg (int, HARD_REG_SET, int, int, int);
303 static void record_one_conflict (int);
304 static void record_conflicts (int *, int);
305 static void mark_reg_store (rtx, rtx, void *);
306 static void mark_reg_clobber (rtx, rtx, void *);
307 static void mark_reg_conflicts (rtx);
308 static void mark_reg_death (rtx);
309 static void mark_reg_live_nc (int, enum machine_mode);
310 static void set_preference (rtx, rtx);
311 static void dump_conflicts (FILE *);
312 static void reg_becomes_live (rtx, rtx, void *);
313 static void reg_dies (int, enum machine_mode, struct insn_chain *);
315 static void allocate_bb_info (void);
316 static void free_bb_info (void);
317 static bool check_earlyclobber (rtx);
318 static void mark_reg_use_for_earlyclobber_1 (rtx *, void *);
319 static int mark_reg_use_for_earlyclobber (rtx *, void *);
320 static void calculate_local_reg_bb_info (void);
321 static void set_up_bb_rts_numbers (void);
322 static int rpost_cmp (const void *, const void *);
323 static void calculate_reg_pav (void);
324 static void modify_reg_pav (void);
325 static void make_accurate_live_analysis (void);
329 /* Look through the list of eliminable registers. Add registers
330 clobbered by asm statements to LIVE_REGS. Set ELIM_SET to the set of
331 registers which may be eliminated. Set NO_GLOBAL_SET to the set of
332 registers which may not be used across blocks.
334 ASM_CLOBBERED is the set of registers clobbered by some asm statement.
336 This will normally be called with LIVE_REGS as the global variable
337 regs_ever_live, ELIM_SET as the file static variable
338 eliminable_regset, and NO_GLOBAL_SET as the file static variable
339 NO_GLOBAL_ALLOC_REGS. */
341 static void
342 compute_regsets (char asm_clobbered[FIRST_PSEUDO_REGISTER],
343 char live_regs[FIRST_PSEUDO_REGISTER],
344 HARD_REG_SET *elim_set,
345 HARD_REG_SET *no_global_set)
347 #ifdef ELIMINABLE_REGS
348 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
349 size_t i;
350 #endif
351 int need_fp
352 = (! flag_omit_frame_pointer
353 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
354 || FRAME_POINTER_REQUIRED);
356 /* A machine may have certain hard registers that
357 are safe to use only within a basic block. */
359 CLEAR_HARD_REG_SET (*no_global_set);
360 CLEAR_HARD_REG_SET (*elim_set);
362 /* Build the regset of all eliminable registers and show we can't use those
363 that we already know won't be eliminated. */
364 #ifdef ELIMINABLE_REGS
365 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
367 bool cannot_elim
368 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
369 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
371 if (!asm_clobbered[eliminables[i].from])
373 SET_HARD_REG_BIT (*elim_set, eliminables[i].from);
375 if (cannot_elim)
376 SET_HARD_REG_BIT (*no_global_set, eliminables[i].from);
378 else if (cannot_elim)
379 error ("%s cannot be used in asm here",
380 reg_names[eliminables[i].from]);
381 else
382 live_regs[eliminables[i].from] = 1;
384 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
385 if (!asm_clobbered[HARD_FRAME_POINTER_REGNUM])
387 SET_HARD_REG_BIT (*elim_set, HARD_FRAME_POINTER_REGNUM);
388 if (need_fp)
389 SET_HARD_REG_BIT (*no_global_set, HARD_FRAME_POINTER_REGNUM);
391 else if (need_fp)
392 error ("%s cannot be used in asm here",
393 reg_names[HARD_FRAME_POINTER_REGNUM]);
394 else
395 live_regs[HARD_FRAME_POINTER_REGNUM] = 1;
396 #endif
398 #else
399 if (!asm_clobbered[FRAME_POINTER_REGNUM])
401 SET_HARD_REG_BIT (*elim_set, FRAME_POINTER_REGNUM);
402 if (need_fp)
403 SET_HARD_REG_BIT (*no_global_set, FRAME_POINTER_REGNUM);
405 else if (need_fp)
406 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
407 else
408 live_regs[FRAME_POINTER_REGNUM] = 1;
409 #endif
412 /* Perform allocation of pseudo-registers not allocated by local_alloc.
414 Return value is nonzero if reload failed
415 and we must not do any more for this function. */
417 static int
418 global_alloc (void)
420 int retval;
421 size_t i;
422 rtx x;
424 make_accurate_live_analysis ();
426 compute_regsets (regs_asm_clobbered, regs_ever_live,
427 &eliminable_regset, &no_global_alloc_regs);
429 /* Track which registers have already been used. Start with registers
430 explicitly in the rtl, then registers allocated by local register
431 allocation. */
433 CLEAR_HARD_REG_SET (regs_used_so_far);
434 #ifdef LEAF_REGISTERS
435 /* If we are doing the leaf function optimization, and this is a leaf
436 function, it means that the registers that take work to save are those
437 that need a register window. So prefer the ones that can be used in
438 a leaf function. */
440 const char *cheap_regs;
441 const char *const leaf_regs = LEAF_REGISTERS;
443 if (only_leaf_regs_used () && leaf_function_p ())
444 cheap_regs = leaf_regs;
445 else
446 cheap_regs = call_used_regs;
447 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
448 if (regs_ever_live[i] || cheap_regs[i])
449 SET_HARD_REG_BIT (regs_used_so_far, i);
451 #else
452 /* We consider registers that do not have to be saved over calls as if
453 they were already used since there is no cost in using them. */
454 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
455 if (regs_ever_live[i] || call_used_regs[i])
456 SET_HARD_REG_BIT (regs_used_so_far, i);
457 #endif
459 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
460 if (reg_renumber[i] >= 0)
461 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
463 /* Establish mappings from register number to allocation number
464 and vice versa. In the process, count the allocnos. */
466 reg_allocno = XNEWVEC (int, max_regno);
468 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
469 reg_allocno[i] = -1;
471 /* Initialize the shared-hard-reg mapping
472 from the list of pairs that may share. */
473 reg_may_share = XCNEWVEC (int, max_regno);
474 for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
476 int r1 = REGNO (XEXP (x, 0));
477 int r2 = REGNO (XEXP (XEXP (x, 1), 0));
478 if (r1 > r2)
479 reg_may_share[r1] = r2;
480 else
481 reg_may_share[r2] = r1;
484 max_allocno = 0;
485 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
486 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
487 that we are supposed to refrain from putting in a hard reg.
488 -2 means do make an allocno but don't allocate it. */
489 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
490 /* Don't allocate pseudos that cross calls,
491 if this function receives a nonlocal goto. */
492 && (! current_function_has_nonlocal_label
493 || REG_N_CALLS_CROSSED (i) == 0))
495 if (reg_renumber[i] < 0
496 && reg_may_share[i] && reg_allocno[reg_may_share[i]] >= 0)
497 reg_allocno[i] = reg_allocno[reg_may_share[i]];
498 else
499 reg_allocno[i] = max_allocno++;
500 gcc_assert (REG_LIVE_LENGTH (i));
502 else
503 reg_allocno[i] = -1;
505 allocno = XCNEWVEC (struct allocno, max_allocno);
507 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
508 if (reg_allocno[i] >= 0)
510 int num = reg_allocno[i];
511 allocno[num].reg = i;
512 allocno[num].size = PSEUDO_REGNO_SIZE (i);
513 allocno[num].calls_crossed += REG_N_CALLS_CROSSED (i);
514 allocno[num].throwing_calls_crossed
515 += REG_N_THROWING_CALLS_CROSSED (i);
516 allocno[num].n_refs += REG_N_REFS (i);
517 allocno[num].freq += REG_FREQ (i);
518 if (allocno[num].live_length < REG_LIVE_LENGTH (i))
519 allocno[num].live_length = REG_LIVE_LENGTH (i);
522 /* Calculate amount of usage of each hard reg by pseudos
523 allocated by local-alloc. This is to see if we want to
524 override it. */
525 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
526 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
527 memset (local_reg_freq, 0, sizeof local_reg_freq);
528 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
529 if (reg_renumber[i] >= 0)
531 int regno = reg_renumber[i];
532 int endregno = regno + hard_regno_nregs[regno][PSEUDO_REGNO_MODE (i)];
533 int j;
535 for (j = regno; j < endregno; j++)
537 local_reg_n_refs[j] += REG_N_REFS (i);
538 local_reg_freq[j] += REG_FREQ (i);
539 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
543 /* We can't override local-alloc for a reg used not just by local-alloc. */
544 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
545 if (regs_ever_live[i])
546 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
548 allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
550 /* We used to use alloca here, but the size of what it would try to
551 allocate would occasionally cause it to exceed the stack limit and
552 cause unpredictable core dumps. Some examples were > 2Mb in size. */
553 conflicts = XCNEWVEC (INT_TYPE, max_allocno * allocno_row_words);
555 allocnos_live = XNEWVEC (INT_TYPE, allocno_row_words);
557 /* If there is work to be done (at least one reg to allocate),
558 perform global conflict analysis and allocate the regs. */
560 if (max_allocno > 0)
562 /* Scan all the insns and compute the conflicts among allocnos
563 and between allocnos and hard regs. */
565 global_conflicts ();
567 mirror_conflicts ();
569 /* Eliminate conflicts between pseudos and eliminable registers. If
570 the register is not eliminated, the pseudo won't really be able to
571 live in the eliminable register, so the conflict doesn't matter.
572 If we do eliminate the register, the conflict will no longer exist.
573 So in either case, we can ignore the conflict. Likewise for
574 preferences. */
576 for (i = 0; i < (size_t) max_allocno; i++)
578 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
579 eliminable_regset);
580 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
581 eliminable_regset);
582 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
583 eliminable_regset);
586 /* Try to expand the preferences by merging them between allocnos. */
588 expand_preferences ();
590 /* Determine the order to allocate the remaining pseudo registers. */
592 allocno_order = XNEWVEC (int, max_allocno);
593 for (i = 0; i < (size_t) max_allocno; i++)
594 allocno_order[i] = i;
596 /* Default the size to 1, since allocno_compare uses it to divide by.
597 Also convert allocno_live_length of zero to -1. A length of zero
598 can occur when all the registers for that allocno have reg_live_length
599 equal to -2. In this case, we want to make an allocno, but not
600 allocate it. So avoid the divide-by-zero and set it to a low
601 priority. */
603 for (i = 0; i < (size_t) max_allocno; i++)
605 if (allocno[i].size == 0)
606 allocno[i].size = 1;
607 if (allocno[i].live_length == 0)
608 allocno[i].live_length = -1;
611 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
613 prune_preferences ();
615 if (dump_file)
616 dump_conflicts (dump_file);
618 /* Try allocating them, one by one, in that order,
619 except for parameters marked with reg_live_length[regno] == -2. */
621 for (i = 0; i < (size_t) max_allocno; i++)
622 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
623 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
625 /* If we have more than one register class,
626 first try allocating in the class that is cheapest
627 for this pseudo-reg. If that fails, try any reg. */
628 if (N_REG_CLASSES > 1)
630 find_reg (allocno_order[i], 0, 0, 0, 0);
631 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
632 continue;
634 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
635 find_reg (allocno_order[i], 0, 1, 0, 0);
638 free (allocno_order);
641 /* Do the reloads now while the allocno data still exists, so that we can
642 try to assign new hard regs to any pseudo regs that are spilled. */
644 #if 0 /* We need to eliminate regs even if there is no rtl code,
645 for the sake of debugging information. */
646 if (n_basic_blocks > NUM_FIXED_BLOCKS)
647 #endif
649 build_insn_chain (get_insns ());
650 retval = reload (get_insns (), 1);
653 /* Clean up. */
654 free (reg_allocno);
655 free (reg_may_share);
656 free (allocno);
657 free (conflicts);
658 free (allocnos_live);
660 return retval;
663 /* Sort predicate for ordering the allocnos.
664 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
666 static int
667 allocno_compare (const void *v1p, const void *v2p)
669 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
670 /* Note that the quotient will never be bigger than
671 the value of floor_log2 times the maximum number of
672 times a register can occur in one insn (surely less than 100)
673 weighted by the frequency (maximally REG_FREQ_MAX).
674 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
675 int pri1
676 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
677 / allocno[v1].live_length)
678 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
679 int pri2
680 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
681 / allocno[v2].live_length)
682 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
683 if (pri2 - pri1)
684 return pri2 - pri1;
686 /* If regs are equally good, sort by allocno,
687 so that the results of qsort leave nothing to chance. */
688 return v1 - v2;
691 /* Scan the rtl code and record all conflicts and register preferences in the
692 conflict matrices and preference tables. */
694 static void
695 global_conflicts (void)
697 unsigned i;
698 basic_block b;
699 rtx insn;
700 int *block_start_allocnos;
702 /* Make a vector that mark_reg_{store,clobber} will store in. */
703 regs_set = XNEWVEC (rtx, max_parallel * 2);
705 block_start_allocnos = XNEWVEC (int, max_allocno);
707 FOR_EACH_BB (b)
709 memset (allocnos_live, 0, allocno_row_words * sizeof (INT_TYPE));
711 /* Initialize table of registers currently live
712 to the state at the beginning of this basic block.
713 This also marks the conflicts among hard registers
714 and any allocnos that are live.
716 For pseudo-regs, there is only one bit for each one
717 no matter how many hard regs it occupies.
718 This is ok; we know the size from PSEUDO_REGNO_SIZE.
719 For explicit hard regs, we cannot know the size that way
720 since one hard reg can be used with various sizes.
721 Therefore, we must require that all the hard regs
722 implicitly live as part of a multi-word hard reg
723 be explicitly marked in basic_block_live_at_start. */
726 regset old = b->il.rtl->global_live_at_start;
727 int ax = 0;
728 reg_set_iterator rsi;
730 REG_SET_TO_HARD_REG_SET (hard_regs_live, old);
731 EXECUTE_IF_SET_IN_REG_SET (old, FIRST_PSEUDO_REGISTER, i, rsi)
733 int a = reg_allocno[i];
734 if (a >= 0)
736 SET_ALLOCNO_LIVE (a);
737 block_start_allocnos[ax++] = a;
739 else if ((a = reg_renumber[i]) >= 0)
740 mark_reg_live_nc (a, PSEUDO_REGNO_MODE (i));
743 /* Record that each allocno now live conflicts with each hard reg
744 now live.
746 It is not necessary to mark any conflicts between pseudos at
747 this point, even for pseudos which are live at the start of
748 the basic block.
750 Given two pseudos X and Y and any point in the CFG P.
752 On any path to point P where X and Y are live one of the
753 following conditions must be true:
755 1. X is live at some instruction on the path that
756 evaluates Y.
758 2. Y is live at some instruction on the path that
759 evaluates X.
761 3. Either X or Y is not evaluated on the path to P
762 (i.e. it is used uninitialized) and thus the
763 conflict can be ignored.
765 In cases #1 and #2 the conflict will be recorded when we
766 scan the instruction that makes either X or Y become live. */
767 record_conflicts (block_start_allocnos, ax);
769 #ifdef EH_RETURN_DATA_REGNO
770 if (bb_has_eh_pred (b))
772 unsigned int i;
774 for (i = 0; ; ++i)
776 unsigned int regno = EH_RETURN_DATA_REGNO (i);
777 if (regno == INVALID_REGNUM)
778 break;
779 record_one_conflict (regno);
782 #endif
784 /* Pseudos can't go in stack regs at the start of a basic block that
785 is reached by an abnormal edge. Likewise for call clobbered regs,
786 because caller-save, fixup_abnormal_edges and possibly the table
787 driven EH machinery are not quite ready to handle such regs live
788 across such edges. */
790 edge e;
791 edge_iterator ei;
793 FOR_EACH_EDGE (e, ei, b->preds)
794 if (e->flags & EDGE_ABNORMAL)
795 break;
797 if (e != NULL)
799 #ifdef STACK_REGS
800 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, ax,
802 allocno[ax].no_stack_reg = 1;
804 for (ax = FIRST_STACK_REG; ax <= LAST_STACK_REG; ax++)
805 record_one_conflict (ax);
806 #endif
808 /* No need to record conflicts for call clobbered regs if we have
809 nonlocal labels around, as we don't ever try to allocate such
810 regs in this case. */
811 if (! current_function_has_nonlocal_label)
812 for (ax = 0; ax < FIRST_PSEUDO_REGISTER; ax++)
813 if (call_used_regs [ax])
814 record_one_conflict (ax);
819 insn = BB_HEAD (b);
821 /* Scan the code of this basic block, noting which allocnos
822 and hard regs are born or die. When one is born,
823 record a conflict with all others currently live. */
825 while (1)
827 RTX_CODE code = GET_CODE (insn);
828 rtx link;
830 /* Make regs_set an empty set. */
832 n_regs_set = 0;
834 if (code == INSN || code == CALL_INSN || code == JUMP_INSN)
837 #if 0
838 int i = 0;
839 for (link = REG_NOTES (insn);
840 link && i < NUM_NO_CONFLICT_PAIRS;
841 link = XEXP (link, 1))
842 if (REG_NOTE_KIND (link) == REG_NO_CONFLICT)
844 no_conflict_pairs[i].allocno1
845 = reg_allocno[REGNO (SET_DEST (PATTERN (insn)))];
846 no_conflict_pairs[i].allocno2
847 = reg_allocno[REGNO (XEXP (link, 0))];
848 i++;
850 #endif /* 0 */
852 /* Mark any registers clobbered by INSN as live,
853 so they conflict with the inputs. */
855 note_stores (PATTERN (insn), mark_reg_clobber, NULL);
857 /* Mark any registers dead after INSN as dead now. */
859 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
860 if (REG_NOTE_KIND (link) == REG_DEAD)
861 mark_reg_death (XEXP (link, 0));
863 /* Mark any registers set in INSN as live,
864 and mark them as conflicting with all other live regs.
865 Clobbers are processed again, so they conflict with
866 the registers that are set. */
868 note_stores (PATTERN (insn), mark_reg_store, NULL);
870 #ifdef AUTO_INC_DEC
871 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
872 if (REG_NOTE_KIND (link) == REG_INC)
873 mark_reg_store (XEXP (link, 0), NULL_RTX, NULL);
874 #endif
876 /* If INSN has multiple outputs, then any reg that dies here
877 and is used inside of an output
878 must conflict with the other outputs.
880 It is unsafe to use !single_set here since it will ignore an
881 unused output. Just because an output is unused does not mean
882 the compiler can assume the side effect will not occur.
883 Consider if REG appears in the address of an output and we
884 reload the output. If we allocate REG to the same hard
885 register as an unused output we could set the hard register
886 before the output reload insn. */
887 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
888 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
889 if (REG_NOTE_KIND (link) == REG_DEAD)
891 int used_in_output = 0;
892 int i;
893 rtx reg = XEXP (link, 0);
895 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
897 rtx set = XVECEXP (PATTERN (insn), 0, i);
898 if (GET_CODE (set) == SET
899 && !REG_P (SET_DEST (set))
900 && !rtx_equal_p (reg, SET_DEST (set))
901 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
902 used_in_output = 1;
904 if (used_in_output)
905 mark_reg_conflicts (reg);
908 /* Mark any registers set in INSN and then never used. */
910 while (n_regs_set-- > 0)
912 rtx note = find_regno_note (insn, REG_UNUSED,
913 REGNO (regs_set[n_regs_set]));
914 if (note)
915 mark_reg_death (XEXP (note, 0));
919 if (insn == BB_END (b))
920 break;
921 insn = NEXT_INSN (insn);
925 /* Clean up. */
926 free (block_start_allocnos);
927 free (regs_set);
929 /* Expand the preference information by looking for cases where one allocno
930 dies in an insn that sets an allocno. If those two allocnos don't conflict,
931 merge any preferences between those allocnos. */
933 static void
934 expand_preferences (void)
936 rtx insn;
937 rtx link;
938 rtx set;
940 /* We only try to handle the most common cases here. Most of the cases
941 where this wins are reg-reg copies. */
943 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
944 if (INSN_P (insn)
945 && (set = single_set (insn)) != 0
946 && REG_P (SET_DEST (set))
947 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
948 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
949 if (REG_NOTE_KIND (link) == REG_DEAD
950 && REG_P (XEXP (link, 0))
951 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
952 && ! CONFLICTP (reg_allocno[REGNO (SET_DEST (set))],
953 reg_allocno[REGNO (XEXP (link, 0))]))
955 int a1 = reg_allocno[REGNO (SET_DEST (set))];
956 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
958 if (XEXP (link, 0) == SET_SRC (set))
960 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
961 allocno[a2].hard_reg_copy_preferences);
962 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
963 allocno[a1].hard_reg_copy_preferences);
966 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
967 allocno[a2].hard_reg_preferences);
968 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
969 allocno[a1].hard_reg_preferences);
970 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
971 allocno[a2].hard_reg_full_preferences);
972 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
973 allocno[a1].hard_reg_full_preferences);
977 /* Prune the preferences for global registers to exclude registers that cannot
978 be used.
980 Compute `regs_someone_prefers', which is a bitmask of the hard registers
981 that are preferred by conflicting registers of lower priority. If possible,
982 we will avoid using these registers. */
984 static void
985 prune_preferences (void)
987 int i;
988 int num;
989 int *allocno_to_order = XNEWVEC (int, max_allocno);
991 /* Scan least most important to most important.
992 For each allocno, remove from preferences registers that cannot be used,
993 either because of conflicts or register type. Then compute all registers
994 preferred by each lower-priority register that conflicts. */
996 for (i = max_allocno - 1; i >= 0; i--)
998 HARD_REG_SET temp;
1000 num = allocno_order[i];
1001 allocno_to_order[num] = i;
1002 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
1004 if (allocno[num].calls_crossed == 0)
1005 IOR_HARD_REG_SET (temp, fixed_reg_set);
1006 else
1007 IOR_HARD_REG_SET (temp, call_used_reg_set);
1009 IOR_COMPL_HARD_REG_SET
1010 (temp,
1011 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
1013 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
1014 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
1015 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
1018 for (i = max_allocno - 1; i >= 0; i--)
1020 /* Merge in the preferences of lower-priority registers (they have
1021 already been pruned). If we also prefer some of those registers,
1022 don't exclude them unless we are of a smaller size (in which case
1023 we want to give the lower-priority allocno the first chance for
1024 these registers). */
1025 HARD_REG_SET temp, temp2;
1026 int allocno2;
1028 num = allocno_order[i];
1030 CLEAR_HARD_REG_SET (temp);
1031 CLEAR_HARD_REG_SET (temp2);
1033 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + num * allocno_row_words,
1034 allocno2,
1036 if (allocno_to_order[allocno2] > i)
1038 if (allocno[allocno2].size <= allocno[num].size)
1039 IOR_HARD_REG_SET (temp,
1040 allocno[allocno2].hard_reg_full_preferences);
1041 else
1042 IOR_HARD_REG_SET (temp2,
1043 allocno[allocno2].hard_reg_full_preferences);
1047 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
1048 IOR_HARD_REG_SET (temp, temp2);
1049 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
1051 free (allocno_to_order);
1054 /* Assign a hard register to allocno NUM; look for one that is the beginning
1055 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1056 The registers marked in PREFREGS are tried first.
1058 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1059 be used for this allocation.
1061 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1062 Otherwise ignore that preferred class and use the alternate class.
1064 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1065 will have to be saved and restored at calls.
1067 RETRYING is nonzero if this is called from retry_global_alloc.
1069 If we find one, record it in reg_renumber.
1070 If not, do nothing. */
1072 static void
1073 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
1075 int i, best_reg, pass;
1076 HARD_REG_SET used, used1, used2;
1078 enum reg_class class = (alt_regs_p
1079 ? reg_alternate_class (allocno[num].reg)
1080 : reg_preferred_class (allocno[num].reg));
1081 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1083 if (accept_call_clobbered)
1084 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1085 else if (allocno[num].calls_crossed == 0)
1086 COPY_HARD_REG_SET (used1, fixed_reg_set);
1087 else
1088 COPY_HARD_REG_SET (used1, call_used_reg_set);
1090 /* Some registers should not be allocated in global-alloc. */
1091 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1092 if (losers)
1093 IOR_HARD_REG_SET (used1, losers);
1095 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1096 COPY_HARD_REG_SET (used2, used1);
1098 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1100 #ifdef CANNOT_CHANGE_MODE_CLASS
1101 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1102 #endif
1104 /* Try each hard reg to see if it fits. Do this in two passes.
1105 In the first pass, skip registers that are preferred by some other pseudo
1106 to give it a better chance of getting one of those registers. Only if
1107 we can't get a register when excluding those do we take one of them.
1108 However, we never allocate a register for the first time in pass 0. */
1110 COPY_HARD_REG_SET (used, used1);
1111 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1112 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1114 best_reg = -1;
1115 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1116 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1117 pass++)
1119 if (pass == 1)
1120 COPY_HARD_REG_SET (used, used1);
1121 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1123 #ifdef REG_ALLOC_ORDER
1124 int regno = reg_alloc_order[i];
1125 #else
1126 int regno = i;
1127 #endif
1128 if (! TEST_HARD_REG_BIT (used, regno)
1129 && HARD_REGNO_MODE_OK (regno, mode)
1130 && (allocno[num].calls_crossed == 0
1131 || accept_call_clobbered
1132 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1134 int j;
1135 int lim = regno + hard_regno_nregs[regno][mode];
1136 for (j = regno + 1;
1137 (j < lim
1138 && ! TEST_HARD_REG_BIT (used, j));
1139 j++);
1140 if (j == lim)
1142 best_reg = regno;
1143 break;
1145 #ifndef REG_ALLOC_ORDER
1146 i = j; /* Skip starting points we know will lose */
1147 #endif
1152 /* See if there is a preferred register with the same class as the register
1153 we allocated above. Making this restriction prevents register
1154 preferencing from creating worse register allocation.
1156 Remove from the preferred registers and conflicting registers. Note that
1157 additional conflicts may have been added after `prune_preferences' was
1158 called.
1160 First do this for those register with copy preferences, then all
1161 preferred registers. */
1163 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1164 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_copy_preferences,
1165 reg_class_contents[(int) NO_REGS], no_copy_prefs);
1167 if (best_reg >= 0)
1169 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1170 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1171 && HARD_REGNO_MODE_OK (i, mode)
1172 && (allocno[num].calls_crossed == 0
1173 || accept_call_clobbered
1174 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1175 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1176 || reg_class_subset_p (REGNO_REG_CLASS (i),
1177 REGNO_REG_CLASS (best_reg))
1178 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1179 REGNO_REG_CLASS (i))))
1181 int j;
1182 int lim = i + hard_regno_nregs[i][mode];
1183 for (j = i + 1;
1184 (j < lim
1185 && ! TEST_HARD_REG_BIT (used, j)
1186 && (REGNO_REG_CLASS (j)
1187 == REGNO_REG_CLASS (best_reg + (j - i))
1188 || reg_class_subset_p (REGNO_REG_CLASS (j),
1189 REGNO_REG_CLASS (best_reg + (j - i)))
1190 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1191 REGNO_REG_CLASS (j))));
1192 j++);
1193 if (j == lim)
1195 best_reg = i;
1196 goto no_prefs;
1200 no_copy_prefs:
1202 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1203 GO_IF_HARD_REG_SUBSET (allocno[num].hard_reg_preferences,
1204 reg_class_contents[(int) NO_REGS], no_prefs);
1206 if (best_reg >= 0)
1208 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1209 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1210 && HARD_REGNO_MODE_OK (i, mode)
1211 && (allocno[num].calls_crossed == 0
1212 || accept_call_clobbered
1213 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1214 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1215 || reg_class_subset_p (REGNO_REG_CLASS (i),
1216 REGNO_REG_CLASS (best_reg))
1217 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1218 REGNO_REG_CLASS (i))))
1220 int j;
1221 int lim = i + hard_regno_nregs[i][mode];
1222 for (j = i + 1;
1223 (j < lim
1224 && ! TEST_HARD_REG_BIT (used, j)
1225 && (REGNO_REG_CLASS (j)
1226 == REGNO_REG_CLASS (best_reg + (j - i))
1227 || reg_class_subset_p (REGNO_REG_CLASS (j),
1228 REGNO_REG_CLASS (best_reg + (j - i)))
1229 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1230 REGNO_REG_CLASS (j))));
1231 j++);
1232 if (j == lim)
1234 best_reg = i;
1235 break;
1239 no_prefs:
1241 /* If we haven't succeeded yet, try with caller-saves.
1242 We need not check to see if the current function has nonlocal
1243 labels because we don't put any pseudos that are live over calls in
1244 registers in that case. */
1246 if (flag_caller_saves && best_reg < 0)
1248 /* Did not find a register. If it would be profitable to
1249 allocate a call-clobbered register and save and restore it
1250 around calls, do that. Don't do this if it crosses any calls
1251 that might throw. */
1252 if (! accept_call_clobbered
1253 && allocno[num].calls_crossed != 0
1254 && allocno[num].throwing_calls_crossed == 0
1255 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1256 allocno[num].calls_crossed))
1258 HARD_REG_SET new_losers;
1259 if (! losers)
1260 CLEAR_HARD_REG_SET (new_losers);
1261 else
1262 COPY_HARD_REG_SET (new_losers, losers);
1264 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1265 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1266 if (reg_renumber[allocno[num].reg] >= 0)
1268 caller_save_needed = 1;
1269 return;
1274 /* If we haven't succeeded yet,
1275 see if some hard reg that conflicts with us
1276 was utilized poorly by local-alloc.
1277 If so, kick out the regs that were put there by local-alloc
1278 so we can use it instead. */
1279 if (best_reg < 0 && !retrying
1280 /* Let's not bother with multi-reg allocnos. */
1281 && allocno[num].size == 1
1282 && REG_BASIC_BLOCK (allocno[num].reg) == REG_BLOCK_GLOBAL)
1284 /* Count from the end, to find the least-used ones first. */
1285 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1287 #ifdef REG_ALLOC_ORDER
1288 int regno = reg_alloc_order[i];
1289 #else
1290 int regno = i;
1291 #endif
1293 if (local_reg_n_refs[regno] != 0
1294 /* Don't use a reg no good for this pseudo. */
1295 && ! TEST_HARD_REG_BIT (used2, regno)
1296 && HARD_REGNO_MODE_OK (regno, mode)
1297 /* The code below assumes that we need only a single
1298 register, but the check of allocno[num].size above
1299 was not enough. Sometimes we need more than one
1300 register for a single-word value. */
1301 && hard_regno_nregs[regno][mode] == 1
1302 && (allocno[num].calls_crossed == 0
1303 || accept_call_clobbered
1304 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1305 #ifdef CANNOT_CHANGE_MODE_CLASS
1306 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1307 mode)
1308 #endif
1309 #ifdef STACK_REGS
1310 && (!allocno[num].no_stack_reg
1311 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1312 #endif
1315 /* We explicitly evaluate the divide results into temporary
1316 variables so as to avoid excess precision problems that occur
1317 on an i386-unknown-sysv4.2 (unixware) host. */
1319 double tmp1 = ((double) local_reg_freq[regno] * local_reg_n_refs[regno]
1320 / local_reg_live_length[regno]);
1321 double tmp2 = ((double) allocno[num].freq * allocno[num].n_refs
1322 / allocno[num].live_length);
1324 if (tmp1 < tmp2)
1326 /* Hard reg REGNO was used less in total by local regs
1327 than it would be used by this one allocno! */
1328 int k;
1329 if (dump_file)
1331 fprintf (dump_file, "Regno %d better for global %d, ",
1332 regno, allocno[num].reg);
1333 fprintf (dump_file, "fr:%d, ll:%d, nr:%d ",
1334 allocno[num].freq, allocno[num].live_length,
1335 allocno[num].n_refs);
1336 fprintf (dump_file, "(was: fr:%d, ll:%d, nr:%d)\n",
1337 local_reg_freq[regno],
1338 local_reg_live_length[regno],
1339 local_reg_n_refs[regno]);
1342 for (k = 0; k < max_regno; k++)
1343 if (reg_renumber[k] >= 0)
1345 int r = reg_renumber[k];
1346 int endregno
1347 = r + hard_regno_nregs[r][PSEUDO_REGNO_MODE (k)];
1349 if (regno >= r && regno < endregno)
1351 if (dump_file)
1352 fprintf (dump_file,
1353 "Local Reg %d now on stack\n", k);
1354 reg_renumber[k] = -1;
1358 best_reg = regno;
1359 break;
1365 /* Did we find a register? */
1367 if (best_reg >= 0)
1369 int lim, j;
1370 HARD_REG_SET this_reg;
1372 /* Yes. Record it as the hard register of this pseudo-reg. */
1373 reg_renumber[allocno[num].reg] = best_reg;
1374 /* Also of any pseudo-regs that share with it. */
1375 if (reg_may_share[allocno[num].reg])
1376 for (j = FIRST_PSEUDO_REGISTER; j < max_regno; j++)
1377 if (reg_allocno[j] == num)
1378 reg_renumber[j] = best_reg;
1380 /* Make a set of the hard regs being allocated. */
1381 CLEAR_HARD_REG_SET (this_reg);
1382 lim = best_reg + hard_regno_nregs[best_reg][mode];
1383 for (j = best_reg; j < lim; j++)
1385 SET_HARD_REG_BIT (this_reg, j);
1386 SET_HARD_REG_BIT (regs_used_so_far, j);
1387 /* This is no longer a reg used just by local regs. */
1388 local_reg_n_refs[j] = 0;
1389 local_reg_freq[j] = 0;
1391 /* For each other pseudo-reg conflicting with this one,
1392 mark it as conflicting with the hard regs this one occupies. */
1393 lim = num;
1394 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + lim * allocno_row_words, j,
1396 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1401 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1402 Perhaps it had previously seemed not worth a hard reg,
1403 or perhaps its old hard reg has been commandeered for reloads.
1404 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1405 they do not appear to be allocated.
1406 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1408 void
1409 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1411 int alloc_no = reg_allocno[regno];
1412 if (alloc_no >= 0)
1414 /* If we have more than one register class,
1415 first try allocating in the class that is cheapest
1416 for this pseudo-reg. If that fails, try any reg. */
1417 if (N_REG_CLASSES > 1)
1418 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1419 if (reg_renumber[regno] < 0
1420 && reg_alternate_class (regno) != NO_REGS)
1421 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1423 /* If we found a register, modify the RTL for the register to
1424 show the hard register, and mark that register live. */
1425 if (reg_renumber[regno] >= 0)
1427 REGNO (regno_reg_rtx[regno]) = reg_renumber[regno];
1428 mark_home_live (regno);
1433 /* Record a conflict between register REGNO
1434 and everything currently live.
1435 REGNO must not be a pseudo reg that was allocated
1436 by local_alloc; such numbers must be translated through
1437 reg_renumber before calling here. */
1439 static void
1440 record_one_conflict (int regno)
1442 int j;
1444 if (regno < FIRST_PSEUDO_REGISTER)
1445 /* When a hard register becomes live,
1446 record conflicts with live pseudo regs. */
1447 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live, j,
1449 SET_HARD_REG_BIT (allocno[j].hard_reg_conflicts, regno);
1451 else
1452 /* When a pseudo-register becomes live,
1453 record conflicts first with hard regs,
1454 then with other pseudo regs. */
1456 int ialloc = reg_allocno[regno];
1457 int ialloc_prod = ialloc * allocno_row_words;
1459 IOR_HARD_REG_SET (allocno[ialloc].hard_reg_conflicts, hard_regs_live);
1460 for (j = allocno_row_words - 1; j >= 0; j--)
1461 conflicts[ialloc_prod + j] |= allocnos_live[j];
1465 /* Record all allocnos currently live as conflicting
1466 with all hard regs currently live.
1468 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1469 are currently live. Their bits are also flagged in allocnos_live. */
1471 static void
1472 record_conflicts (int *allocno_vec, int len)
1474 while (--len >= 0)
1475 IOR_HARD_REG_SET (allocno[allocno_vec[len]].hard_reg_conflicts,
1476 hard_regs_live);
1479 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1480 static void
1481 mirror_conflicts (void)
1483 int i, j;
1484 int rw = allocno_row_words;
1485 int rwb = rw * INT_BITS;
1486 INT_TYPE *p = conflicts;
1487 INT_TYPE *q0 = conflicts, *q1, *q2;
1488 unsigned INT_TYPE mask;
1490 for (i = max_allocno - 1, mask = 1; i >= 0; i--, mask <<= 1)
1492 if (! mask)
1494 mask = 1;
1495 q0++;
1497 for (j = allocno_row_words - 1, q1 = q0; j >= 0; j--, q1 += rwb)
1499 unsigned INT_TYPE word;
1501 for (word = (unsigned INT_TYPE) *p++, q2 = q1; word;
1502 word >>= 1, q2 += rw)
1504 if (word & 1)
1505 *q2 |= mask;
1511 /* Handle the case where REG is set by the insn being scanned,
1512 during the forward scan to accumulate conflicts.
1513 Store a 1 in regs_live or allocnos_live for this register, record how many
1514 consecutive hardware registers it actually needs,
1515 and record a conflict with all other registers already live.
1517 Note that even if REG does not remain alive after this insn,
1518 we must mark it here as live, to ensure a conflict between
1519 REG and any other regs set in this insn that really do live.
1520 This is because those other regs could be considered after this.
1522 REG might actually be something other than a register;
1523 if so, we do nothing.
1525 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1526 a REG_INC note was found for it). */
1528 static void
1529 mark_reg_store (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
1531 int regno;
1533 if (GET_CODE (reg) == SUBREG)
1534 reg = SUBREG_REG (reg);
1536 if (!REG_P (reg))
1537 return;
1539 regs_set[n_regs_set++] = reg;
1541 if (setter && GET_CODE (setter) != CLOBBER)
1542 set_preference (reg, SET_SRC (setter));
1544 regno = REGNO (reg);
1546 /* Either this is one of the max_allocno pseudo regs not allocated,
1547 or it is or has a hardware reg. First handle the pseudo-regs. */
1548 if (regno >= FIRST_PSEUDO_REGISTER)
1550 if (reg_allocno[regno] >= 0)
1552 SET_ALLOCNO_LIVE (reg_allocno[regno]);
1553 record_one_conflict (regno);
1557 if (reg_renumber[regno] >= 0)
1558 regno = reg_renumber[regno];
1560 /* Handle hardware regs (and pseudos allocated to hard regs). */
1561 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1563 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1564 while (regno < last)
1566 record_one_conflict (regno);
1567 SET_HARD_REG_BIT (hard_regs_live, regno);
1568 regno++;
1573 /* Like mark_reg_store except notice just CLOBBERs; ignore SETs. */
1575 static void
1576 mark_reg_clobber (rtx reg, rtx setter, void *data)
1578 if (GET_CODE (setter) == CLOBBER)
1579 mark_reg_store (reg, setter, data);
1582 /* Record that REG has conflicts with all the regs currently live.
1583 Do not mark REG itself as live. */
1585 static void
1586 mark_reg_conflicts (rtx reg)
1588 int regno;
1590 if (GET_CODE (reg) == SUBREG)
1591 reg = SUBREG_REG (reg);
1593 if (!REG_P (reg))
1594 return;
1596 regno = REGNO (reg);
1598 /* Either this is one of the max_allocno pseudo regs not allocated,
1599 or it is or has a hardware reg. First handle the pseudo-regs. */
1600 if (regno >= FIRST_PSEUDO_REGISTER)
1602 if (reg_allocno[regno] >= 0)
1603 record_one_conflict (regno);
1606 if (reg_renumber[regno] >= 0)
1607 regno = reg_renumber[regno];
1609 /* Handle hardware regs (and pseudos allocated to hard regs). */
1610 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1612 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1613 while (regno < last)
1615 record_one_conflict (regno);
1616 regno++;
1621 /* Mark REG as being dead (following the insn being scanned now).
1622 Store a 0 in regs_live or allocnos_live for this register. */
1624 static void
1625 mark_reg_death (rtx reg)
1627 int regno = REGNO (reg);
1629 /* Either this is one of the max_allocno pseudo regs not allocated,
1630 or it is a hardware reg. First handle the pseudo-regs. */
1631 if (regno >= FIRST_PSEUDO_REGISTER)
1633 if (reg_allocno[regno] >= 0)
1634 CLEAR_ALLOCNO_LIVE (reg_allocno[regno]);
1637 /* For pseudo reg, see if it has been assigned a hardware reg. */
1638 if (reg_renumber[regno] >= 0)
1639 regno = reg_renumber[regno];
1641 /* Handle hardware regs (and pseudos allocated to hard regs). */
1642 if (regno < FIRST_PSEUDO_REGISTER && ! fixed_regs[regno])
1644 /* Pseudo regs already assigned hardware regs are treated
1645 almost the same as explicit hardware regs. */
1646 int last = regno + hard_regno_nregs[regno][GET_MODE (reg)];
1647 while (regno < last)
1649 CLEAR_HARD_REG_BIT (hard_regs_live, regno);
1650 regno++;
1655 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1656 for the value stored in it. MODE determines how many consecutive
1657 registers are actually in use. Do not record conflicts;
1658 it is assumed that the caller will do that. */
1660 static void
1661 mark_reg_live_nc (int regno, enum machine_mode mode)
1663 int last = regno + hard_regno_nregs[regno][mode];
1664 while (regno < last)
1666 SET_HARD_REG_BIT (hard_regs_live, regno);
1667 regno++;
1671 /* Try to set a preference for an allocno to a hard register.
1672 We are passed DEST and SRC which are the operands of a SET. It is known
1673 that SRC is a register. If SRC or the first operand of SRC is a register,
1674 try to set a preference. If one of the two is a hard register and the other
1675 is a pseudo-register, mark the preference.
1677 Note that we are not as aggressive as local-alloc in trying to tie a
1678 pseudo-register to a hard register. */
1680 static void
1681 set_preference (rtx dest, rtx src)
1683 unsigned int src_regno, dest_regno;
1684 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1685 to compensate for subregs in SRC or DEST. */
1686 int offset = 0;
1687 unsigned int i;
1688 int copy = 1;
1690 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
1691 src = XEXP (src, 0), copy = 0;
1693 /* Get the reg number for both SRC and DEST.
1694 If neither is a reg, give up. */
1696 if (REG_P (src))
1697 src_regno = REGNO (src);
1698 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
1700 src_regno = REGNO (SUBREG_REG (src));
1702 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
1703 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
1704 GET_MODE (SUBREG_REG (src)),
1705 SUBREG_BYTE (src),
1706 GET_MODE (src));
1707 else
1708 offset += (SUBREG_BYTE (src)
1709 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
1711 else
1712 return;
1714 if (REG_P (dest))
1715 dest_regno = REGNO (dest);
1716 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
1718 dest_regno = REGNO (SUBREG_REG (dest));
1720 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
1721 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
1722 GET_MODE (SUBREG_REG (dest)),
1723 SUBREG_BYTE (dest),
1724 GET_MODE (dest));
1725 else
1726 offset -= (SUBREG_BYTE (dest)
1727 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
1729 else
1730 return;
1732 /* Convert either or both to hard reg numbers. */
1734 if (reg_renumber[src_regno] >= 0)
1735 src_regno = reg_renumber[src_regno];
1737 if (reg_renumber[dest_regno] >= 0)
1738 dest_regno = reg_renumber[dest_regno];
1740 /* Now if one is a hard reg and the other is a global pseudo
1741 then give the other a preference. */
1743 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
1744 && reg_allocno[src_regno] >= 0)
1746 dest_regno -= offset;
1747 if (dest_regno < FIRST_PSEUDO_REGISTER)
1749 if (copy)
1750 SET_REGBIT (hard_reg_copy_preferences,
1751 reg_allocno[src_regno], dest_regno);
1753 SET_REGBIT (hard_reg_preferences,
1754 reg_allocno[src_regno], dest_regno);
1755 for (i = dest_regno;
1756 i < dest_regno + hard_regno_nregs[dest_regno][GET_MODE (dest)];
1757 i++)
1758 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
1762 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
1763 && reg_allocno[dest_regno] >= 0)
1765 src_regno += offset;
1766 if (src_regno < FIRST_PSEUDO_REGISTER)
1768 if (copy)
1769 SET_REGBIT (hard_reg_copy_preferences,
1770 reg_allocno[dest_regno], src_regno);
1772 SET_REGBIT (hard_reg_preferences,
1773 reg_allocno[dest_regno], src_regno);
1774 for (i = src_regno;
1775 i < src_regno + hard_regno_nregs[src_regno][GET_MODE (src)];
1776 i++)
1777 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
1782 /* Indicate that hard register number FROM was eliminated and replaced with
1783 an offset from hard register number TO. The status of hard registers live
1784 at the start of a basic block is updated by replacing a use of FROM with
1785 a use of TO. */
1787 void
1788 mark_elimination (int from, int to)
1790 basic_block bb;
1792 FOR_EACH_BB (bb)
1794 regset r = bb->il.rtl->global_live_at_start;
1795 if (REGNO_REG_SET_P (r, from))
1797 CLEAR_REGNO_REG_SET (r, from);
1798 SET_REGNO_REG_SET (r, to);
1803 /* Used for communication between the following functions. Holds the
1804 current life information. */
1805 static regset live_relevant_regs;
1807 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1808 This is called via note_stores. */
1809 static void
1810 reg_becomes_live (rtx reg, rtx setter ATTRIBUTE_UNUSED, void *regs_set)
1812 int regno;
1814 if (GET_CODE (reg) == SUBREG)
1815 reg = SUBREG_REG (reg);
1817 if (!REG_P (reg))
1818 return;
1820 regno = REGNO (reg);
1821 if (regno < FIRST_PSEUDO_REGISTER)
1823 int nregs = hard_regno_nregs[regno][GET_MODE (reg)];
1824 while (nregs-- > 0)
1826 SET_REGNO_REG_SET (live_relevant_regs, regno);
1827 if (! fixed_regs[regno])
1828 SET_REGNO_REG_SET ((regset) regs_set, regno);
1829 regno++;
1832 else if (reg_renumber[regno] >= 0)
1834 SET_REGNO_REG_SET (live_relevant_regs, regno);
1835 SET_REGNO_REG_SET ((regset) regs_set, regno);
1839 /* Record in live_relevant_regs that register REGNO died. */
1840 static void
1841 reg_dies (int regno, enum machine_mode mode, struct insn_chain *chain)
1843 if (regno < FIRST_PSEUDO_REGISTER)
1845 int nregs = hard_regno_nregs[regno][mode];
1846 while (nregs-- > 0)
1848 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1849 if (! fixed_regs[regno])
1850 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1851 regno++;
1854 else
1856 CLEAR_REGNO_REG_SET (live_relevant_regs, regno);
1857 if (reg_renumber[regno] >= 0)
1858 SET_REGNO_REG_SET (&chain->dead_or_set, regno);
1862 /* Walk the insns of the current function and build reload_insn_chain,
1863 and record register life information. */
1864 void
1865 build_insn_chain (rtx first)
1867 struct insn_chain **p = &reload_insn_chain;
1868 struct insn_chain *prev = 0;
1869 basic_block b = ENTRY_BLOCK_PTR->next_bb;
1871 live_relevant_regs = ALLOC_REG_SET (&reg_obstack);
1873 for (; first; first = NEXT_INSN (first))
1875 struct insn_chain *c;
1877 if (first == BB_HEAD (b))
1879 unsigned i;
1880 bitmap_iterator bi;
1882 CLEAR_REG_SET (live_relevant_regs);
1884 EXECUTE_IF_SET_IN_BITMAP (b->il.rtl->global_live_at_start, 0, i, bi)
1886 if (i < FIRST_PSEUDO_REGISTER
1887 ? ! TEST_HARD_REG_BIT (eliminable_regset, i)
1888 : reg_renumber[i] >= 0)
1889 SET_REGNO_REG_SET (live_relevant_regs, i);
1893 if (!NOTE_P (first) && !BARRIER_P (first))
1895 c = new_insn_chain ();
1896 c->prev = prev;
1897 prev = c;
1898 *p = c;
1899 p = &c->next;
1900 c->insn = first;
1901 c->block = b->index;
1903 if (INSN_P (first))
1905 rtx link;
1907 /* Mark the death of everything that dies in this instruction. */
1909 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1910 if (REG_NOTE_KIND (link) == REG_DEAD
1911 && REG_P (XEXP (link, 0)))
1912 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1915 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1917 /* Mark everything born in this instruction as live. */
1919 note_stores (PATTERN (first), reg_becomes_live,
1920 &c->dead_or_set);
1922 else
1923 COPY_REG_SET (&c->live_throughout, live_relevant_regs);
1925 if (INSN_P (first))
1927 rtx link;
1929 /* Mark anything that is set in this insn and then unused as dying. */
1931 for (link = REG_NOTES (first); link; link = XEXP (link, 1))
1932 if (REG_NOTE_KIND (link) == REG_UNUSED
1933 && REG_P (XEXP (link, 0)))
1934 reg_dies (REGNO (XEXP (link, 0)), GET_MODE (XEXP (link, 0)),
1939 if (first == BB_END (b))
1940 b = b->next_bb;
1942 /* Stop after we pass the end of the last basic block. Verify that
1943 no real insns are after the end of the last basic block.
1945 We may want to reorganize the loop somewhat since this test should
1946 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1947 the previous real insn is a JUMP_INSN. */
1948 if (b == EXIT_BLOCK_PTR)
1950 #ifdef ENABLE_CHECKING
1951 for (first = NEXT_INSN (first); first; first = NEXT_INSN (first))
1952 gcc_assert (!INSN_P (first)
1953 || GET_CODE (PATTERN (first)) == USE
1954 || ((GET_CODE (PATTERN (first)) == ADDR_VEC
1955 || GET_CODE (PATTERN (first)) == ADDR_DIFF_VEC)
1956 && prev_real_insn (first) != 0
1957 && JUMP_P (prev_real_insn (first))));
1958 #endif
1959 break;
1962 FREE_REG_SET (live_relevant_regs);
1963 *p = 0;
1966 /* Print debugging trace information if -dg switch is given,
1967 showing the information on which the allocation decisions are based. */
1969 static void
1970 dump_conflicts (FILE *file)
1972 int i;
1973 int has_preferences;
1974 int nregs;
1975 nregs = 0;
1976 for (i = 0; i < max_allocno; i++)
1978 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1979 continue;
1980 nregs++;
1982 fprintf (file, ";; %d regs to allocate:", nregs);
1983 for (i = 0; i < max_allocno; i++)
1985 int j;
1986 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1987 continue;
1988 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1989 for (j = 0; j < max_regno; j++)
1990 if (reg_allocno[j] == allocno_order[i]
1991 && j != allocno[allocno_order[i]].reg)
1992 fprintf (file, "+%d", j);
1993 if (allocno[allocno_order[i]].size != 1)
1994 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1996 fprintf (file, "\n");
1998 for (i = 0; i < max_allocno; i++)
2000 int j;
2001 fprintf (file, ";; %d conflicts:", allocno[i].reg);
2002 for (j = 0; j < max_allocno; j++)
2003 if (CONFLICTP (j, i))
2004 fprintf (file, " %d", allocno[j].reg);
2005 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
2006 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j))
2007 fprintf (file, " %d", j);
2008 fprintf (file, "\n");
2010 has_preferences = 0;
2011 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
2012 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
2013 has_preferences = 1;
2015 if (! has_preferences)
2016 continue;
2017 fprintf (file, ";; %d preferences:", allocno[i].reg);
2018 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
2019 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
2020 fprintf (file, " %d", j);
2021 fprintf (file, "\n");
2023 fprintf (file, "\n");
2026 void
2027 dump_global_regs (FILE *file)
2029 int i, j;
2031 fprintf (file, ";; Register dispositions:\n");
2032 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
2033 if (reg_renumber[i] >= 0)
2035 fprintf (file, "%d in %d ", i, reg_renumber[i]);
2036 if (++j % 6 == 0)
2037 fprintf (file, "\n");
2040 fprintf (file, "\n\n;; Hard regs used: ");
2041 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2042 if (regs_ever_live[i])
2043 fprintf (file, " %d", i);
2044 fprintf (file, "\n\n");
2049 /* This page contains code to make live information more accurate.
2050 The accurate register liveness at program point P means:
2051 o there is a path from P to usage of the register and the
2052 register is not redefined or killed on the path.
2053 o register at P is partially available, i.e. there is a path from
2054 a register definition to the point P and the register is not
2055 killed (clobbered) on the path
2057 The standard GCC live information means only the first condition.
2058 Without the partial availability, there will be more register
2059 conflicts and as a consequence worse register allocation. The
2060 typical example where the information can be different is a
2061 register initialized in the loop at the basic block preceding the
2062 loop in CFG. */
2064 /* The following structure contains basic block data flow information
2065 used to calculate partial availability of registers. */
2067 struct bb_info
2069 /* The basic block reverse post-order number. */
2070 int rts_number;
2071 /* Registers used uninitialized in an insn in which there is an
2072 early clobbered register might get the same hard register. */
2073 bitmap earlyclobber;
2074 /* Registers correspondingly killed (clobbered) and defined but not
2075 killed afterward in the basic block. */
2076 bitmap killed, avloc;
2077 /* Registers partially available and living (in other words whose
2078 values were calculated and used) correspondingly at the start
2079 and end of the basic block. */
2080 bitmap live_pavin, live_pavout;
2083 /* Macros for accessing data flow information of basic blocks. */
2085 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2086 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2088 static struct bitmap_obstack greg_obstack;
2089 /* The function allocates the info structures of each basic block. It
2090 also initialized LIVE_PAVIN and LIVE_PAVOUT as if all hard
2091 registers were partially available. */
2093 static void
2094 allocate_bb_info (void)
2096 int i;
2097 basic_block bb;
2098 struct bb_info *bb_info;
2099 bitmap init;
2101 alloc_aux_for_blocks (sizeof (struct bb_info));
2102 init = BITMAP_ALLOC (NULL);
2103 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2104 bitmap_set_bit (init, i);
2105 bitmap_obstack_initialize (&greg_obstack);
2106 FOR_EACH_BB (bb)
2108 bb_info = bb->aux;
2109 bb_info->earlyclobber = BITMAP_ALLOC (&greg_obstack);
2110 bb_info->avloc = BITMAP_ALLOC (&greg_obstack);
2111 bb_info->killed = BITMAP_ALLOC (&greg_obstack);
2112 bb_info->live_pavin = BITMAP_ALLOC (&greg_obstack);
2113 bb_info->live_pavout = BITMAP_ALLOC (&greg_obstack);
2114 bitmap_copy (bb_info->live_pavin, init);
2115 bitmap_copy (bb_info->live_pavout, init);
2117 BITMAP_FREE (init);
2120 /* The function frees the allocated info of all basic blocks. */
2122 static void
2123 free_bb_info (void)
2125 bitmap_obstack_release (&greg_obstack);
2126 free_aux_for_blocks ();
2129 /* The function modifies local info for register REG being changed in
2130 SETTER. DATA is used to pass the current basic block info. */
2132 static void
2133 mark_reg_change (rtx reg, rtx setter, void *data)
2135 int regno;
2136 basic_block bb = data;
2137 struct bb_info *bb_info = BB_INFO (bb);
2139 if (GET_CODE (reg) == SUBREG)
2140 reg = SUBREG_REG (reg);
2142 if (!REG_P (reg))
2143 return;
2145 regno = REGNO (reg);
2146 bitmap_set_bit (bb_info->killed, regno);
2148 if (GET_CODE (setter) != CLOBBER)
2149 bitmap_set_bit (bb_info->avloc, regno);
2150 else
2151 bitmap_clear_bit (bb_info->avloc, regno);
2154 /* Classes of registers which could be early clobbered in the current
2155 insn. */
2157 static VEC(int,heap) *earlyclobber_regclass;
2159 /* This function finds and stores register classes that could be early
2160 clobbered in INSN. If any earlyclobber classes are found, the function
2161 returns TRUE, in all other cases it returns FALSE. */
2163 static bool
2164 check_earlyclobber (rtx insn)
2166 int opno;
2167 bool found = false;
2169 extract_insn (insn);
2171 VEC_truncate (int, earlyclobber_regclass, 0);
2172 for (opno = 0; opno < recog_data.n_operands; opno++)
2174 char c;
2175 bool amp_p;
2176 int i;
2177 enum reg_class class;
2178 const char *p = recog_data.constraints[opno];
2180 class = NO_REGS;
2181 amp_p = false;
2182 for (;;)
2184 c = *p;
2185 switch (c)
2187 case '=': case '+': case '?':
2188 case '#': case '!':
2189 case '*': case '%':
2190 case 'm': case '<': case '>': case 'V': case 'o':
2191 case 'E': case 'F': case 'G': case 'H':
2192 case 's': case 'i': case 'n':
2193 case 'I': case 'J': case 'K': case 'L':
2194 case 'M': case 'N': case 'O': case 'P':
2195 case 'X':
2196 case '0': case '1': case '2': case '3': case '4':
2197 case '5': case '6': case '7': case '8': case '9':
2198 /* These don't say anything we care about. */
2199 break;
2201 case '&':
2202 amp_p = true;
2203 break;
2204 case '\0':
2205 case ',':
2206 if (amp_p && class != NO_REGS)
2208 int rc;
2210 found = true;
2211 for (i = 0;
2212 VEC_iterate (int, earlyclobber_regclass, i, rc);
2213 i++)
2215 if (rc == (int) class)
2216 goto found_rc;
2219 /* We use VEC_quick_push here because
2220 earlyclobber_regclass holds no more than
2221 N_REG_CLASSES elements. */
2222 VEC_quick_push (int, earlyclobber_regclass, (int) class);
2223 found_rc:
2227 amp_p = false;
2228 class = NO_REGS;
2229 break;
2231 case 'r':
2232 class = GENERAL_REGS;
2233 break;
2235 default:
2236 class = REG_CLASS_FROM_CONSTRAINT (c, p);
2237 break;
2239 if (c == '\0')
2240 break;
2241 p += CONSTRAINT_LEN (c, p);
2245 return found;
2248 /* The function checks that pseudo-register *X has a class
2249 intersecting with the class of pseudo-register could be early
2250 clobbered in the same insn.
2251 This function is a no-op if earlyclobber_regclass is empty. */
2253 static int
2254 mark_reg_use_for_earlyclobber (rtx *x, void *data ATTRIBUTE_UNUSED)
2256 enum reg_class pref_class, alt_class;
2257 int i, regno;
2258 basic_block bb = data;
2259 struct bb_info *bb_info = BB_INFO (bb);
2261 if (REG_P (*x) && REGNO (*x) >= FIRST_PSEUDO_REGISTER)
2263 int rc;
2265 regno = REGNO (*x);
2266 if (bitmap_bit_p (bb_info->killed, regno)
2267 || bitmap_bit_p (bb_info->avloc, regno))
2268 return 0;
2269 pref_class = reg_preferred_class (regno);
2270 alt_class = reg_alternate_class (regno);
2271 for (i = 0; VEC_iterate (int, earlyclobber_regclass, i, rc); i++)
2273 if (reg_classes_intersect_p (rc, pref_class)
2274 || (rc != NO_REGS
2275 && reg_classes_intersect_p (rc, alt_class)))
2277 bitmap_set_bit (bb_info->earlyclobber, regno);
2278 break;
2282 return 0;
2285 /* The function processes all pseudo-registers in *X with the aid of
2286 previous function. */
2288 static void
2289 mark_reg_use_for_earlyclobber_1 (rtx *x, void *data)
2291 for_each_rtx (x, mark_reg_use_for_earlyclobber, data);
2294 /* The function calculates local info for each basic block. */
2296 static void
2297 calculate_local_reg_bb_info (void)
2299 basic_block bb;
2300 rtx insn, bound;
2302 /* We know that earlyclobber_regclass holds no more than
2303 N_REG_CLASSES elements. See check_earlyclobber. */
2304 earlyclobber_regclass = VEC_alloc (int, heap, N_REG_CLASSES);
2305 FOR_EACH_BB (bb)
2307 bound = NEXT_INSN (BB_END (bb));
2308 for (insn = BB_HEAD (bb); insn != bound; insn = NEXT_INSN (insn))
2309 if (INSN_P (insn))
2311 note_stores (PATTERN (insn), mark_reg_change, bb);
2312 if (check_earlyclobber (insn))
2313 note_uses (&PATTERN (insn), mark_reg_use_for_earlyclobber_1, bb);
2316 VEC_free (int, heap, earlyclobber_regclass);
2319 /* The function sets up reverse post-order number of each basic
2320 block. */
2322 static void
2323 set_up_bb_rts_numbers (void)
2325 int i;
2326 int *rts_order;
2328 rts_order = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
2329 post_order_compute (rts_order, false);
2330 for (i = 0; i < n_basic_blocks - NUM_FIXED_BLOCKS; i++)
2331 BB_INFO_BY_INDEX (rts_order [i])->rts_number = i;
2332 free (rts_order);
2335 /* Compare function for sorting blocks in reverse postorder. */
2337 static int
2338 rpost_cmp (const void *bb1, const void *bb2)
2340 basic_block b1 = *(basic_block *) bb1, b2 = *(basic_block *) bb2;
2342 return BB_INFO (b2)->rts_number - BB_INFO (b1)->rts_number;
2345 /* Temporary bitmap used for live_pavin, live_pavout calculation. */
2346 static bitmap temp_bitmap;
2348 /* The function calculates partial register availability according to
2349 the following equations:
2351 bb.live_pavin
2352 = empty for entry block
2353 | union (live_pavout of predecessors) & global_live_at_start
2354 bb.live_pavout = union (bb.live_pavin - bb.killed, bb.avloc)
2355 & global_live_at_end */
2357 static void
2358 calculate_reg_pav (void)
2360 basic_block bb, succ;
2361 edge e;
2362 int i, nel;
2363 VEC(basic_block,heap) *bbs, *new_bbs, *temp;
2364 basic_block *bb_array;
2365 sbitmap wset;
2367 bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
2368 new_bbs = VEC_alloc (basic_block, heap, n_basic_blocks);
2369 temp_bitmap = BITMAP_ALLOC (NULL);
2370 FOR_EACH_BB (bb)
2372 VEC_quick_push (basic_block, bbs, bb);
2374 wset = sbitmap_alloc (n_basic_blocks + 1);
2375 while (VEC_length (basic_block, bbs))
2377 bb_array = VEC_address (basic_block, bbs);
2378 nel = VEC_length (basic_block, bbs);
2379 qsort (bb_array, nel, sizeof (basic_block), rpost_cmp);
2380 sbitmap_zero (wset);
2381 for (i = 0; i < nel; i++)
2383 edge_iterator ei;
2384 struct bb_info *bb_info;
2385 bitmap bb_live_pavin, bb_live_pavout;
2387 bb = bb_array [i];
2388 bb_info = BB_INFO (bb);
2389 bb_live_pavin = bb_info->live_pavin;
2390 bb_live_pavout = bb_info->live_pavout;
2391 FOR_EACH_EDGE (e, ei, bb->preds)
2393 basic_block pred = e->src;
2395 if (pred->index != ENTRY_BLOCK)
2396 bitmap_ior_into (bb_live_pavin, BB_INFO (pred)->live_pavout);
2398 bitmap_and_into (bb_live_pavin, bb->il.rtl->global_live_at_start);
2399 bitmap_ior_and_compl (temp_bitmap, bb_info->avloc,
2400 bb_live_pavin, bb_info->killed);
2401 bitmap_and_into (temp_bitmap, bb->il.rtl->global_live_at_end);
2402 if (! bitmap_equal_p (temp_bitmap, bb_live_pavout))
2404 bitmap_copy (bb_live_pavout, temp_bitmap);
2405 FOR_EACH_EDGE (e, ei, bb->succs)
2407 succ = e->dest;
2408 if (succ->index != EXIT_BLOCK
2409 && !TEST_BIT (wset, succ->index))
2411 SET_BIT (wset, succ->index);
2412 VEC_quick_push (basic_block, new_bbs, succ);
2417 temp = bbs;
2418 bbs = new_bbs;
2419 new_bbs = temp;
2420 VEC_truncate (basic_block, new_bbs, 0);
2422 sbitmap_free (wset);
2423 BITMAP_FREE (temp_bitmap);
2424 VEC_free (basic_block, heap, new_bbs);
2425 VEC_free (basic_block, heap, bbs);
2428 /* The function modifies partial availability information for two
2429 special cases to prevent incorrect work of the subsequent passes
2430 with the accurate live information based on the partial
2431 availability. */
2433 static void
2434 modify_reg_pav (void)
2436 basic_block bb;
2437 struct bb_info *bb_info;
2438 #ifdef STACK_REGS
2439 int i;
2440 HARD_REG_SET zero, stack_hard_regs, used;
2441 bitmap stack_regs;
2443 CLEAR_HARD_REG_SET (zero);
2444 CLEAR_HARD_REG_SET (stack_hard_regs);
2445 for (i = FIRST_STACK_REG; i <= LAST_STACK_REG; i++)
2446 SET_HARD_REG_BIT(stack_hard_regs, i);
2447 stack_regs = BITMAP_ALLOC (&greg_obstack);
2448 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2450 COPY_HARD_REG_SET (used, reg_class_contents[reg_preferred_class (i)]);
2451 IOR_HARD_REG_SET (used, reg_class_contents[reg_alternate_class (i)]);
2452 AND_HARD_REG_SET (used, stack_hard_regs);
2453 GO_IF_HARD_REG_EQUAL(used, zero, skip);
2454 bitmap_set_bit (stack_regs, i);
2455 skip:
2458 #endif
2459 FOR_EACH_BB (bb)
2461 bb_info = BB_INFO (bb);
2463 /* Reload can assign the same hard register to uninitialized
2464 pseudo-register and early clobbered pseudo-register in an
2465 insn if the pseudo-register is used first time in given BB
2466 and not lived at the BB start. To prevent this we don't
2467 change life information for such pseudo-registers. */
2468 bitmap_ior_into (bb_info->live_pavin, bb_info->earlyclobber);
2469 #ifdef STACK_REGS
2470 /* We can not use the same stack register for uninitialized
2471 pseudo-register and another living pseudo-register because if the
2472 uninitialized pseudo-register dies, subsequent pass reg-stack
2473 will be confused (it will believe that the other register
2474 dies). */
2475 bitmap_ior_into (bb_info->live_pavin, stack_regs);
2476 #endif
2478 #ifdef STACK_REGS
2479 BITMAP_FREE (stack_regs);
2480 #endif
2483 /* The following function makes live information more accurate by
2484 modifying global_live_at_start and global_live_at_end of basic
2485 blocks.
2487 The standard GCC life analysis permits registers to live
2488 uninitialized, for example:
2490 R is never used
2491 .....
2492 Loop:
2493 R is defined
2495 R is used.
2497 With normal life_analysis, R would be live before "Loop:".
2498 The result is that R causes many interferences that do not
2499 serve any purpose.
2501 After the function call a register lives at a program point
2502 only if it is initialized on a path from CFG entry to the
2503 program point. */
2505 static void
2506 make_accurate_live_analysis (void)
2508 basic_block bb;
2509 struct bb_info *bb_info;
2511 max_regno = max_reg_num ();
2512 compact_blocks ();
2513 allocate_bb_info ();
2514 calculate_local_reg_bb_info ();
2515 set_up_bb_rts_numbers ();
2516 calculate_reg_pav ();
2517 modify_reg_pav ();
2518 FOR_EACH_BB (bb)
2520 bb_info = BB_INFO (bb);
2522 bitmap_and_into (bb->il.rtl->global_live_at_start, bb_info->live_pavin);
2523 bitmap_and_into (bb->il.rtl->global_live_at_end, bb_info->live_pavout);
2525 free_bb_info ();
2527 /* Run old register allocator. Return TRUE if we must exit
2528 rest_of_compilation upon return. */
2529 static unsigned int
2530 rest_of_handle_global_alloc (void)
2532 bool failure;
2534 /* If optimizing, allocate remaining pseudo-regs. Do the reload
2535 pass fixing up any insns that are invalid. */
2537 if (optimize)
2538 failure = global_alloc ();
2539 else
2541 compute_regsets (regs_asm_clobbered, regs_ever_live,
2542 &eliminable_regset, &no_global_alloc_regs);
2543 build_insn_chain (get_insns ());
2544 failure = reload (get_insns (), 0);
2547 if (dump_enabled_p (pass_global_alloc.static_pass_number))
2549 timevar_push (TV_DUMP);
2550 dump_global_regs (dump_file);
2551 timevar_pop (TV_DUMP);
2554 gcc_assert (reload_completed || failure);
2555 reload_completed = !failure;
2556 return 0;
2559 struct tree_opt_pass pass_global_alloc =
2561 "greg", /* name */
2562 NULL, /* gate */
2563 rest_of_handle_global_alloc, /* execute */
2564 NULL, /* sub */
2565 NULL, /* next */
2566 0, /* static_pass_number */
2567 TV_GLOBAL_ALLOC, /* tv_id */
2568 0, /* properties_required */
2569 0, /* properties_provided */
2570 0, /* properties_destroyed */
2571 0, /* todo_flags_start */
2572 TODO_dump_func |
2573 TODO_ggc_collect, /* todo_flags_finish */
2574 'g' /* letter */