* g++.dg/tree-ssa/copyprop-1.C: Remove xfail.
[official-gcc.git] / gcc / global.c
blob90f6da877f20264a25dc63346468d00d95edc80b
1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "machmode.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "regs.h"
33 #include "function.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "reload.h"
37 #include "output.h"
38 #include "toplev.h"
39 #include "tree-pass.h"
40 #include "timevar.h"
41 #include "df.h"
42 #include "vecprim.h"
43 #include "dbgcnt.h"
44 #include "ra.h"
46 /* This pass of the compiler performs global register allocation.
47 It assigns hard register numbers to all the pseudo registers
48 that were not handled in local_alloc. Assignments are recorded
49 in the vector reg_renumber, not by changing the rtl code.
50 (Such changes are made by final). The entry point is
51 the function global_alloc.
53 After allocation is complete, the reload pass is run as a subroutine
54 of this pass, so that when a pseudo reg loses its hard reg due to
55 spilling it is possible to make a second attempt to find a hard
56 reg for it. The reload pass is independent in other respects
57 and it is run even when stupid register allocation is in use.
59 1. Assign allocation-numbers (allocnos) to the pseudo-registers
60 still needing allocations and to the pseudo-registers currently
61 allocated by local-alloc which may be spilled by reload.
62 Set up tables reg_allocno and allocno_reg to map
63 reg numbers to allocnos and vice versa.
64 max_allocno gets the number of allocnos in use.
66 2. Allocate a max_allocno by max_allocno compressed triangular conflict
67 bit matrix (a triangular bit matrix with portions removed for which we
68 can guarantee there are no conflicts, example: two local pseudos that
69 live in different basic blocks) and clear it. This is called "conflict".
70 Note that for triangular bit matrices, there are two possible equations
71 for computing the bit number for two allocnos: LOW and HIGH (LOW < HIGH):
73 1) BITNUM = f(HIGH) + LOW, where
74 f(HIGH) = (HIGH * (HIGH - 1)) / 2
76 2) BITNUM = f(LOW) + HIGH, where
77 f(LOW) = LOW * (max_allocno - LOW) + (LOW * (LOW - 1)) / 2 - LOW - 1
79 We use the second (and less common) equation as this gives us better
80 cache locality for local allocnos that are live within the same basic
81 block. Also note that f(HIGH) and f(LOW) can be precalculated for all
82 values of HIGH and LOW, so all that is necessary to compute the bit
83 number for two allocnos LOW and HIGH is a load followed by an addition.
85 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix for
86 conflicts between allocnos and explicit hard register use (which
87 includes use of pseudo-registers allocated by local_alloc). This
88 is the hard_reg_conflicts inside each allocno.
90 3. For each basic block, walk backward through the block, recording
91 which pseudo-registers and which hardware registers are live.
92 Build the conflict matrix between the pseudo-registers and another of
93 pseudo-registers versus hardware registers.
95 4. For each basic block, walk backward through the block, recording
96 the preferred hardware registers for each pseudo-register.
98 5. Sort a table of the allocnos into order of desirability of the variables.
100 6. Allocate the variables in that order; each if possible into
101 a preferred register, else into another register. */
103 /* A vector of the integers from 0 to max_allocno-1,
104 sorted in the order of first-to-be-allocated first. */
106 static int *allocno_order;
108 /* Set of registers that global-alloc isn't supposed to use. */
110 static HARD_REG_SET no_global_alloc_regs;
112 /* Set of registers used so far. */
114 static HARD_REG_SET regs_used_so_far;
116 /* Number of refs to each hard reg, as used by local alloc.
117 It is zero for a reg that contains global pseudos or is explicitly used. */
119 static int local_reg_n_refs[FIRST_PSEUDO_REGISTER];
121 /* Frequency of uses of given hard reg. */
122 static int local_reg_freq[FIRST_PSEUDO_REGISTER];
124 /* Guess at live length of each hard reg, as used by local alloc.
125 This is actually the sum of the live lengths of the specific regs. */
127 static int local_reg_live_length[FIRST_PSEUDO_REGISTER];
129 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
130 element I, and hard register number J. */
132 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
134 /* This is turned off because it doesn't work right for DImode.
135 (And it is only used for DImode, so the other cases are worthless.)
136 The problem is that it isn't true that there is NO possibility of conflict;
137 only that there is no conflict if the two pseudos get the exact same regs.
138 If they were allocated with a partial overlap, there would be a conflict.
139 We can't safely turn off the conflict unless we have another way to
140 prevent the partial overlap.
142 Idea: change hard_reg_conflicts so that instead of recording which
143 hard regs the allocno may not overlap, it records where the allocno
144 may not start. Change both where it is used and where it is updated.
145 Then there is a way to record that (reg:DI 108) may start at 10
146 but not at 9 or 11. There is still the question of how to record
147 this semi-conflict between two pseudos. */
148 #if 0
149 /* Reg pairs for which conflict after the current insn
150 is inhibited by a REG_NO_CONFLICT note.
151 If the table gets full, we ignore any other notes--that is conservative. */
152 #define NUM_NO_CONFLICT_PAIRS 4
153 /* Number of pairs in use in this insn. */
154 int n_no_conflict_pairs;
155 static struct { int allocno1, allocno2;}
156 no_conflict_pairs[NUM_NO_CONFLICT_PAIRS];
157 #endif /* 0 */
159 /* Return true if *LOC contains an asm. */
161 static int
162 insn_contains_asm_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
164 if ( !*loc)
165 return 0;
166 if (GET_CODE (*loc) == ASM_OPERANDS)
167 return 1;
168 return 0;
172 /* Return true if INSN contains an ASM. */
174 static int
175 insn_contains_asm (rtx insn)
177 return for_each_rtx (&insn, insn_contains_asm_1, NULL);
181 static void
182 compute_regs_asm_clobbered (char *regs_asm_clobbered)
184 basic_block bb;
186 memset (regs_asm_clobbered, 0, sizeof (char) * FIRST_PSEUDO_REGISTER);
188 FOR_EACH_BB (bb)
190 rtx insn;
191 FOR_BB_INSNS_REVERSE (bb, insn)
193 struct df_ref **def_rec;
194 if (insn_contains_asm (insn))
195 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
197 struct df_ref *def = *def_rec;
198 unsigned int dregno = DF_REF_REGNO (def);
199 if (dregno < FIRST_PSEUDO_REGISTER)
201 unsigned int i;
202 enum machine_mode mode = GET_MODE (DF_REF_REAL_REG (def));
203 unsigned int end = dregno
204 + hard_regno_nregs[dregno][mode] - 1;
205 for (i = dregno; i <= end; ++i)
206 regs_asm_clobbered[i] = 1;
214 /* All registers that can be eliminated. */
216 static HARD_REG_SET eliminable_regset;
218 static int regno_compare (const void *, const void *);
219 static int allocno_compare (const void *, const void *);
220 static void expand_preferences (void);
221 static void prune_preferences (void);
222 static void set_preferences (void);
223 static void find_reg (int, HARD_REG_SET, int, int, int);
224 static void dump_conflicts (FILE *);
225 static void build_insn_chain (void);
228 /* Look through the list of eliminable registers. Set ELIM_SET to the
229 set of registers which may be eliminated. Set NO_GLOBAL_SET to the
230 set of registers which may not be used across blocks.
232 This will normally be called with ELIM_SET as the file static
233 variable eliminable_regset, and NO_GLOBAL_SET as the file static
234 variable NO_GLOBAL_ALLOC_REGS. */
236 static void
237 compute_regsets (HARD_REG_SET *elim_set,
238 HARD_REG_SET *no_global_set)
241 /* Like regs_ever_live, but 1 if a reg is set or clobbered from an asm.
242 Unlike regs_ever_live, elements of this array corresponding to
243 eliminable regs like the frame pointer are set if an asm sets them. */
244 char *regs_asm_clobbered = alloca (FIRST_PSEUDO_REGISTER * sizeof (char));
246 #ifdef ELIMINABLE_REGS
247 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
248 size_t i;
249 #endif
250 int need_fp
251 = (! flag_omit_frame_pointer
252 || (current_function_calls_alloca && EXIT_IGNORE_STACK)
253 || FRAME_POINTER_REQUIRED);
255 max_regno = max_reg_num ();
256 compact_blocks ();
258 max_allocno = 0;
260 /* A machine may have certain hard registers that
261 are safe to use only within a basic block. */
263 CLEAR_HARD_REG_SET (*no_global_set);
264 CLEAR_HARD_REG_SET (*elim_set);
266 compute_regs_asm_clobbered (regs_asm_clobbered);
267 /* Build the regset of all eliminable registers and show we can't use those
268 that we already know won't be eliminated. */
269 #ifdef ELIMINABLE_REGS
270 for (i = 0; i < ARRAY_SIZE (eliminables); i++)
272 bool cannot_elim
273 = (! CAN_ELIMINATE (eliminables[i].from, eliminables[i].to)
274 || (eliminables[i].to == STACK_POINTER_REGNUM && need_fp));
276 if (!regs_asm_clobbered[eliminables[i].from])
278 SET_HARD_REG_BIT (*elim_set, eliminables[i].from);
280 if (cannot_elim)
281 SET_HARD_REG_BIT (*no_global_set, eliminables[i].from);
283 else if (cannot_elim)
284 error ("%s cannot be used in asm here",
285 reg_names[eliminables[i].from]);
286 else
287 df_set_regs_ever_live (eliminables[i].from, true);
289 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
290 if (!regs_asm_clobbered[HARD_FRAME_POINTER_REGNUM])
292 SET_HARD_REG_BIT (*elim_set, HARD_FRAME_POINTER_REGNUM);
293 if (need_fp)
294 SET_HARD_REG_BIT (*no_global_set, HARD_FRAME_POINTER_REGNUM);
296 else if (need_fp)
297 error ("%s cannot be used in asm here",
298 reg_names[HARD_FRAME_POINTER_REGNUM]);
299 else
300 df_set_regs_ever_live (HARD_FRAME_POINTER_REGNUM, true);
301 #endif
303 #else
304 if (!regs_asm_clobbered[FRAME_POINTER_REGNUM])
306 SET_HARD_REG_BIT (*elim_set, FRAME_POINTER_REGNUM);
307 if (need_fp)
308 SET_HARD_REG_BIT (*no_global_set, FRAME_POINTER_REGNUM);
310 else if (need_fp)
311 error ("%s cannot be used in asm here", reg_names[FRAME_POINTER_REGNUM]);
312 else
313 df_set_regs_ever_live (FRAME_POINTER_REGNUM, true);
314 #endif
317 /* Perform allocation of pseudo-registers not allocated by local_alloc.
319 Return value is nonzero if reload failed
320 and we must not do any more for this function. */
322 static int
323 global_alloc (void)
325 int retval;
326 size_t i;
327 int max_blk;
328 int *num_allocnos_per_blk;
330 compute_regsets (&eliminable_regset, &no_global_alloc_regs);
332 /* Track which registers have already been used. Start with registers
333 explicitly in the rtl, then registers allocated by local register
334 allocation. */
336 CLEAR_HARD_REG_SET (regs_used_so_far);
337 #ifdef LEAF_REGISTERS
338 /* If we are doing the leaf function optimization, and this is a leaf
339 function, it means that the registers that take work to save are those
340 that need a register window. So prefer the ones that can be used in
341 a leaf function. */
343 const char *cheap_regs;
344 const char *const leaf_regs = LEAF_REGISTERS;
346 if (only_leaf_regs_used () && leaf_function_p ())
347 cheap_regs = leaf_regs;
348 else
349 cheap_regs = call_used_regs;
350 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
351 if (df_regs_ever_live_p (i) || cheap_regs[i])
352 SET_HARD_REG_BIT (regs_used_so_far, i);
354 #else
355 /* We consider registers that do not have to be saved over calls as if
356 they were already used since there is no cost in using them. */
357 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
358 if (df_regs_ever_live_p (i) || call_used_regs[i])
359 SET_HARD_REG_BIT (regs_used_so_far, i);
360 #endif
362 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
363 if (reg_renumber[i] >= 0)
364 SET_HARD_REG_BIT (regs_used_so_far, reg_renumber[i]);
366 /* Establish mappings from register number to allocation number
367 and vice versa. In the process, count the allocnos. */
369 reg_allocno = XNEWVEC (int, max_regno);
371 /* Initially fill the reg_allocno array with regno's... */
372 max_blk = 0;
373 max_allocno = 0;
374 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
375 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
376 that we are supposed to refrain from putting in a hard reg.
377 -2 means do make an allocno but don't allocate it. */
378 if (REG_N_REFS (i) != 0 && REG_LIVE_LENGTH (i) != -1
379 /* Don't allocate pseudos that cross calls,
380 if this function receives a nonlocal goto. */
381 && (! current_function_has_nonlocal_label
382 || REG_N_CALLS_CROSSED (i) == 0))
384 int blk = regno_basic_block (i);
385 reg_allocno[max_allocno++] = i;
386 if (blk > max_blk)
387 max_blk = blk;
388 gcc_assert (REG_LIVE_LENGTH (i));
391 allocno = XCNEWVEC (struct allocno, max_allocno);
392 partial_bitnum = XNEWVEC (HOST_WIDE_INT, max_allocno);
393 num_allocnos_per_blk = XCNEWVEC (int, max_blk + 1);
395 /* ...so we can sort them in the order we want them to receive
396 their allocnos. */
397 qsort (reg_allocno, max_allocno, sizeof (int), regno_compare);
399 for (i = 0; i < (size_t) max_allocno; i++)
401 int regno = reg_allocno[i];
402 int blk = regno_basic_block (regno);
403 num_allocnos_per_blk[blk]++;
404 allocno[i].reg = regno;
405 allocno[i].size = PSEUDO_REGNO_SIZE (regno);
406 allocno[i].calls_crossed += REG_N_CALLS_CROSSED (regno);
407 allocno[i].throwing_calls_crossed
408 += REG_N_THROWING_CALLS_CROSSED (regno);
409 allocno[i].n_refs += REG_N_REFS (regno);
410 allocno[i].freq += REG_FREQ (regno);
411 if (allocno[i].live_length < REG_LIVE_LENGTH (regno))
412 allocno[i].live_length = REG_LIVE_LENGTH (regno);
415 /* The "global" block must contain all allocnos. */
416 num_allocnos_per_blk[0] = max_allocno;
418 /* Now reinitialize the reg_allocno array in terms of the
419 optimized regno to allocno mapping we created above. */
420 for (i = 0; i < (size_t) max_regno; i++)
421 reg_allocno[i] = -1;
423 max_bitnum = 0;
424 for (i = 0; i < (size_t) max_allocno; i++)
426 int regno = allocno[i].reg;
427 int blk = regno_basic_block (regno);
428 int row_size = --num_allocnos_per_blk[blk];
429 reg_allocno[regno] = (int) i;
430 partial_bitnum[i] = (row_size > 0) ? max_bitnum - ((int) i + 1) : -1;
431 max_bitnum += row_size;
434 #ifdef ENABLE_CHECKING
435 gcc_assert (max_bitnum <=
436 (((HOST_WIDE_INT) max_allocno *
437 ((HOST_WIDE_INT) max_allocno - 1)) / 2));
438 #endif
440 if (dump_file)
442 HOST_WIDE_INT num_bits, num_bytes, actual_bytes;
444 fprintf (dump_file, "## max_blk: %d\n", max_blk);
445 fprintf (dump_file, "## max_regno: %d\n", max_regno);
446 fprintf (dump_file, "## max_allocno: %d\n", max_allocno);
448 num_bits = max_bitnum;
449 num_bytes = CEIL (num_bits, 8);
450 actual_bytes = num_bytes;
451 fprintf (dump_file, "## Compressed triangular bitmatrix size: ");
452 fprintf (dump_file, HOST_WIDE_INT_PRINT_DEC " bits, ", num_bits);
453 fprintf (dump_file, HOST_WIDE_INT_PRINT_DEC " bytes\n", num_bytes);
455 num_bits = ((HOST_WIDE_INT) max_allocno *
456 ((HOST_WIDE_INT) max_allocno - 1)) / 2;
457 num_bytes = CEIL (num_bits, 8);
458 fprintf (dump_file, "## Standard triangular bitmatrix size: ");
459 fprintf (dump_file, HOST_WIDE_INT_PRINT_DEC " bits, ", num_bits);
460 fprintf (dump_file, HOST_WIDE_INT_PRINT_DEC " bytes [%.2f%%]\n",
461 num_bytes, 100.0 * ((double) actual_bytes / (double) num_bytes));
463 num_bits = (HOST_WIDE_INT) max_allocno * (HOST_WIDE_INT) max_allocno;
464 num_bytes = CEIL (num_bits, 8);
465 fprintf (dump_file, "## Square bitmatrix size: ");
466 fprintf (dump_file, HOST_WIDE_INT_PRINT_DEC " bits, ", num_bits);
467 fprintf (dump_file, HOST_WIDE_INT_PRINT_DEC " bytes [%.2f%%]\n",
468 num_bytes, 100.0 * ((double) actual_bytes / (double) num_bytes));
471 /* Calculate amount of usage of each hard reg by pseudos
472 allocated by local-alloc. This is to see if we want to
473 override it. */
474 memset (local_reg_live_length, 0, sizeof local_reg_live_length);
475 memset (local_reg_n_refs, 0, sizeof local_reg_n_refs);
476 memset (local_reg_freq, 0, sizeof local_reg_freq);
477 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
478 if (reg_renumber[i] >= 0)
480 int regno = reg_renumber[i];
481 int endregno = end_hard_regno (PSEUDO_REGNO_MODE (i), regno);
482 int j;
484 for (j = regno; j < endregno; j++)
486 local_reg_n_refs[j] += REG_N_REFS (i);
487 local_reg_freq[j] += REG_FREQ (i);
488 local_reg_live_length[j] += REG_LIVE_LENGTH (i);
492 /* We can't override local-alloc for a reg used not just by local-alloc. */
493 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
494 if (df_regs_ever_live_p (i))
495 local_reg_n_refs[i] = 0, local_reg_freq[i] = 0;
497 if (dump_file)
499 for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
501 fprintf (dump_file, "%d REG_N_REFS=%d, REG_FREQ=%d, REG_LIVE_LENGTH=%d\n",
502 (int)i, REG_N_REFS (i), REG_FREQ (i), REG_LIVE_LENGTH (i));
504 fprintf (dump_file, "regs_ever_live =");
505 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
506 if (df_regs_ever_live_p (i))
507 fprintf (dump_file, " %d", (int)i);
508 fprintf (dump_file, "\n");
511 conflicts = NULL;
512 adjacency = NULL;
513 adjacency_pool = NULL;
515 /* If there is work to be done (at least one reg to allocate),
516 perform global conflict analysis and allocate the regs. */
518 if (max_allocno > 0)
520 /* We used to use alloca here, but the size of what it would try to
521 allocate would occasionally cause it to exceed the stack limit and
522 cause unpredictable core dumps. Some examples were > 2Mb in size. */
523 conflicts = XCNEWVEC (HOST_WIDEST_FAST_INT,
524 CEIL(max_bitnum, HOST_BITS_PER_WIDEST_FAST_INT));
526 adjacency = XCNEWVEC (adjacency_t *, max_allocno);
527 adjacency_pool = create_alloc_pool ("global_alloc adjacency list pool",
528 sizeof (adjacency_t), 1024);
530 /* Scan all the insns and compute the conflicts among allocnos
531 and between allocnos and hard regs. */
533 global_conflicts ();
535 /* There is just too much going on in the register allocators to
536 keep things up to date. At the end we have to rescan anyway
537 because things change when the reload_completed flag is set.
538 So we just turn off scanning and we will rescan by hand.
540 However, we needed to do the rescanning before this point to
541 get the new insns scanned inserted by local_alloc scanned for
542 global_conflicts. */
543 df_set_flags (DF_NO_INSN_RESCAN);
545 /* Eliminate conflicts between pseudos and eliminable registers. If
546 the register is not eliminated, the pseudo won't really be able to
547 live in the eliminable register, so the conflict doesn't matter.
548 If we do eliminate the register, the conflict will no longer exist.
549 So in either case, we can ignore the conflict. Likewise for
550 preferences. */
552 set_preferences ();
554 for (i = 0; i < (size_t) max_allocno; i++)
556 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_conflicts,
557 eliminable_regset);
558 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_copy_preferences,
559 eliminable_regset);
560 AND_COMPL_HARD_REG_SET (allocno[i].hard_reg_preferences,
561 eliminable_regset);
564 /* Try to expand the preferences by merging them between allocnos. */
566 expand_preferences ();
568 /* Determine the order to allocate the remaining pseudo registers. */
570 allocno_order = XNEWVEC (int, max_allocno);
571 for (i = 0; i < (size_t) max_allocno; i++)
572 allocno_order[i] = i;
574 /* Default the size to 1, since allocno_compare uses it to divide by.
575 Also convert allocno_live_length of zero to -1. A length of zero
576 can occur when all the registers for that allocno have reg_live_length
577 equal to -2. In this case, we want to make an allocno, but not
578 allocate it. So avoid the divide-by-zero and set it to a low
579 priority. */
581 for (i = 0; i < (size_t) max_allocno; i++)
583 if (allocno[i].size == 0)
584 allocno[i].size = 1;
585 if (allocno[i].live_length == 0)
586 allocno[i].live_length = -1;
589 qsort (allocno_order, max_allocno, sizeof (int), allocno_compare);
591 prune_preferences ();
593 if (dump_file)
594 dump_conflicts (dump_file);
596 /* Try allocating them, one by one, in that order,
597 except for parameters marked with reg_live_length[regno] == -2. */
599 for (i = 0; i < (size_t) max_allocno; i++)
600 if (reg_renumber[allocno[allocno_order[i]].reg] < 0
601 && REG_LIVE_LENGTH (allocno[allocno_order[i]].reg) >= 0)
603 if (!dbg_cnt (global_alloc_at_reg))
604 break;
605 /* If we have more than one register class,
606 first try allocating in the class that is cheapest
607 for this pseudo-reg. If that fails, try any reg. */
608 if (N_REG_CLASSES > 1)
610 find_reg (allocno_order[i], 0, 0, 0, 0);
611 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
612 continue;
614 if (reg_alternate_class (allocno[allocno_order[i]].reg) != NO_REGS)
615 find_reg (allocno_order[i], 0, 1, 0, 0);
618 free (allocno_order);
619 free (conflicts);
622 /* Do the reloads now while the allocno data still exists, so that we can
623 try to assign new hard regs to any pseudo regs that are spilled. */
625 #if 0 /* We need to eliminate regs even if there is no rtl code,
626 for the sake of debugging information. */
627 if (n_basic_blocks > NUM_FIXED_BLOCKS)
628 #endif
630 build_insn_chain ();
631 retval = reload (get_insns (), 1);
634 /* Clean up. */
635 free (reg_allocno);
636 free (num_allocnos_per_blk);
637 free (partial_bitnum);
638 free (allocno);
639 if (adjacency != NULL)
641 free_alloc_pool (adjacency_pool);
642 free (adjacency);
645 return retval;
648 /* Sort predicate for ordering the regnos. We want the regno to allocno
649 mapping to have the property that all "global" regnos (ie, regnos that
650 are referenced in more than one basic block) have smaller allocno values
651 than "local" regnos (ie, regnos referenced in only one basic block).
652 In addition, for two basic blocks "i" and "j" with i < j, all regnos
653 local to basic block i should have smaller allocno values than regnos
654 local to basic block j.
655 Returns -1 (1) if *v1p should be allocated before (after) *v2p. */
657 static int
658 regno_compare (const void *v1p, const void *v2p)
660 int regno1 = *(const int *)v1p;
661 int regno2 = *(const int *)v2p;
662 int blk1 = REG_BASIC_BLOCK (regno1);
663 int blk2 = REG_BASIC_BLOCK (regno2);
665 /* Prefer lower numbered basic blocks. Note that global and unknown
666 blocks have negative values, giving them high precedence. */
667 if (blk1 - blk2)
668 return blk1 - blk2;
670 /* If both regs are referenced from the same block, sort by regno. */
671 return regno1 - regno2;
674 /* Sort predicate for ordering the allocnos.
675 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
677 static int
678 allocno_compare (const void *v1p, const void *v2p)
680 int v1 = *(const int *)v1p, v2 = *(const int *)v2p;
681 /* Note that the quotient will never be bigger than
682 the value of floor_log2 times the maximum number of
683 times a register can occur in one insn (surely less than 100)
684 weighted by the frequency (maximally REG_FREQ_MAX).
685 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
686 int pri1
687 = (((double) (floor_log2 (allocno[v1].n_refs) * allocno[v1].freq)
688 / allocno[v1].live_length)
689 * (10000 / REG_FREQ_MAX) * allocno[v1].size);
690 int pri2
691 = (((double) (floor_log2 (allocno[v2].n_refs) * allocno[v2].freq)
692 / allocno[v2].live_length)
693 * (10000 / REG_FREQ_MAX) * allocno[v2].size);
694 if (pri2 - pri1)
695 return pri2 - pri1;
697 /* If regs are equally good, sort by allocno,
698 so that the results of qsort leave nothing to chance. */
699 return v1 - v2;
702 /* Expand the preference information by looking for cases where one allocno
703 dies in an insn that sets an allocno. If those two allocnos don't conflict,
704 merge any preferences between those allocnos. */
706 static void
707 expand_preferences (void)
709 rtx insn;
710 rtx link;
711 rtx set;
713 /* We only try to handle the most common cases here. Most of the cases
714 where this wins are reg-reg copies. */
716 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
717 if (INSN_P (insn)
718 && (set = single_set (insn)) != 0
719 && REG_P (SET_DEST (set))
720 && reg_allocno[REGNO (SET_DEST (set))] >= 0)
721 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
722 if (REG_NOTE_KIND (link) == REG_DEAD
723 && REG_P (XEXP (link, 0))
724 && reg_allocno[REGNO (XEXP (link, 0))] >= 0
725 && ! conflict_p (reg_allocno[REGNO (SET_DEST (set))],
726 reg_allocno[REGNO (XEXP (link, 0))]))
728 int a1 = reg_allocno[REGNO (SET_DEST (set))];
729 int a2 = reg_allocno[REGNO (XEXP (link, 0))];
731 if (XEXP (link, 0) == SET_SRC (set))
733 IOR_HARD_REG_SET (allocno[a1].hard_reg_copy_preferences,
734 allocno[a2].hard_reg_copy_preferences);
735 IOR_HARD_REG_SET (allocno[a2].hard_reg_copy_preferences,
736 allocno[a1].hard_reg_copy_preferences);
739 IOR_HARD_REG_SET (allocno[a1].hard_reg_preferences,
740 allocno[a2].hard_reg_preferences);
741 IOR_HARD_REG_SET (allocno[a2].hard_reg_preferences,
742 allocno[a1].hard_reg_preferences);
743 IOR_HARD_REG_SET (allocno[a1].hard_reg_full_preferences,
744 allocno[a2].hard_reg_full_preferences);
745 IOR_HARD_REG_SET (allocno[a2].hard_reg_full_preferences,
746 allocno[a1].hard_reg_full_preferences);
751 /* Try to set a preference for an allocno to a hard register.
752 We are passed DEST and SRC which are the operands of a SET. It is known
753 that SRC is a register. If SRC or the first operand of SRC is a register,
754 try to set a preference. If one of the two is a hard register and the other
755 is a pseudo-register, mark the preference.
757 Note that we are not as aggressive as local-alloc in trying to tie a
758 pseudo-register to a hard register. */
760 static void
761 set_preference (rtx dest, rtx src)
763 unsigned int src_regno, dest_regno, end_regno;
764 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
765 to compensate for subregs in SRC or DEST. */
766 int offset = 0;
767 unsigned int i;
768 int copy = 1;
770 if (GET_RTX_FORMAT (GET_CODE (src))[0] == 'e')
771 src = XEXP (src, 0), copy = 0;
773 /* Get the reg number for both SRC and DEST.
774 If neither is a reg, give up. */
776 if (REG_P (src))
777 src_regno = REGNO (src);
778 else if (GET_CODE (src) == SUBREG && REG_P (SUBREG_REG (src)))
780 src_regno = REGNO (SUBREG_REG (src));
782 if (REGNO (SUBREG_REG (src)) < FIRST_PSEUDO_REGISTER)
783 offset += subreg_regno_offset (REGNO (SUBREG_REG (src)),
784 GET_MODE (SUBREG_REG (src)),
785 SUBREG_BYTE (src),
786 GET_MODE (src));
787 else
788 offset += (SUBREG_BYTE (src)
789 / REGMODE_NATURAL_SIZE (GET_MODE (src)));
791 else
792 return;
794 if (REG_P (dest))
795 dest_regno = REGNO (dest);
796 else if (GET_CODE (dest) == SUBREG && REG_P (SUBREG_REG (dest)))
798 dest_regno = REGNO (SUBREG_REG (dest));
800 if (REGNO (SUBREG_REG (dest)) < FIRST_PSEUDO_REGISTER)
801 offset -= subreg_regno_offset (REGNO (SUBREG_REG (dest)),
802 GET_MODE (SUBREG_REG (dest)),
803 SUBREG_BYTE (dest),
804 GET_MODE (dest));
805 else
806 offset -= (SUBREG_BYTE (dest)
807 / REGMODE_NATURAL_SIZE (GET_MODE (dest)));
809 else
810 return;
812 /* Convert either or both to hard reg numbers. */
814 if (reg_renumber[src_regno] >= 0)
815 src_regno = reg_renumber[src_regno];
817 if (reg_renumber[dest_regno] >= 0)
818 dest_regno = reg_renumber[dest_regno];
820 /* Now if one is a hard reg and the other is a global pseudo
821 then give the other a preference. */
823 if (dest_regno < FIRST_PSEUDO_REGISTER && src_regno >= FIRST_PSEUDO_REGISTER
824 && reg_allocno[src_regno] >= 0)
826 dest_regno -= offset;
827 if (dest_regno < FIRST_PSEUDO_REGISTER)
829 if (copy)
830 SET_REGBIT (hard_reg_copy_preferences,
831 reg_allocno[src_regno], dest_regno);
833 SET_REGBIT (hard_reg_preferences,
834 reg_allocno[src_regno], dest_regno);
835 end_regno = end_hard_regno (GET_MODE (dest), dest_regno);
836 for (i = dest_regno; i < end_regno; i++)
837 SET_REGBIT (hard_reg_full_preferences, reg_allocno[src_regno], i);
841 if (src_regno < FIRST_PSEUDO_REGISTER && dest_regno >= FIRST_PSEUDO_REGISTER
842 && reg_allocno[dest_regno] >= 0)
844 src_regno += offset;
845 if (src_regno < FIRST_PSEUDO_REGISTER)
847 if (copy)
848 SET_REGBIT (hard_reg_copy_preferences,
849 reg_allocno[dest_regno], src_regno);
851 SET_REGBIT (hard_reg_preferences,
852 reg_allocno[dest_regno], src_regno);
853 end_regno = end_hard_regno (GET_MODE (src), src_regno);
854 for (i = src_regno; i < end_regno; i++)
855 SET_REGBIT (hard_reg_full_preferences, reg_allocno[dest_regno], i);
860 /* Helper function for set_preferences. */
861 static void
862 set_preferences_1 (rtx reg, const_rtx setter, void *data ATTRIBUTE_UNUSED)
864 if (GET_CODE (reg) == SUBREG)
865 reg = SUBREG_REG (reg);
867 if (!REG_P (reg))
868 return;
870 gcc_assert (setter);
871 if (GET_CODE (setter) != CLOBBER)
872 set_preference (reg, SET_SRC (setter));
875 /* Scan all of the insns and initialize the preferences. */
877 static void
878 set_preferences (void)
880 basic_block bb;
881 rtx insn;
882 FOR_EACH_BB (bb)
883 FOR_BB_INSNS_REVERSE (bb, insn)
885 if (!INSN_P (insn))
886 continue;
888 note_stores (PATTERN (insn), set_preferences_1, NULL);
894 /* Prune the preferences for global registers to exclude registers that cannot
895 be used.
897 Compute `regs_someone_prefers', which is a bitmask of the hard registers
898 that are preferred by conflicting registers of lower priority. If possible,
899 we will avoid using these registers. */
901 static void
902 prune_preferences (void)
904 int i;
905 int num;
906 int *allocno_to_order = XNEWVEC (int, max_allocno);
908 /* Scan least most important to most important.
909 For each allocno, remove from preferences registers that cannot be used,
910 either because of conflicts or register type. Then compute all registers
911 preferred by each lower-priority register that conflicts. */
913 for (i = max_allocno - 1; i >= 0; i--)
915 HARD_REG_SET temp;
917 num = allocno_order[i];
918 allocno_to_order[num] = i;
919 COPY_HARD_REG_SET (temp, allocno[num].hard_reg_conflicts);
921 if (allocno[num].calls_crossed == 0)
922 IOR_HARD_REG_SET (temp, fixed_reg_set);
923 else
924 IOR_HARD_REG_SET (temp, call_used_reg_set);
926 IOR_COMPL_HARD_REG_SET
927 (temp,
928 reg_class_contents[(int) reg_preferred_class (allocno[num].reg)]);
930 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, temp);
931 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, temp);
932 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_full_preferences, temp);
935 for (i = max_allocno - 1; i >= 0; i--)
937 /* Merge in the preferences of lower-priority registers (they have
938 already been pruned). If we also prefer some of those registers,
939 don't exclude them unless we are of a smaller size (in which case
940 we want to give the lower-priority allocno the first chance for
941 these registers). */
942 HARD_REG_SET temp, temp2;
943 int allocno2;
944 adjacency_iter ai;
946 num = allocno_order[i];
948 CLEAR_HARD_REG_SET (temp);
949 CLEAR_HARD_REG_SET (temp2);
951 FOR_EACH_CONFLICT (num, allocno2, ai)
953 if (allocno_to_order[allocno2] > i)
955 if (allocno[allocno2].size <= allocno[num].size)
956 IOR_HARD_REG_SET (temp,
957 allocno[allocno2].hard_reg_full_preferences);
958 else
959 IOR_HARD_REG_SET (temp2,
960 allocno[allocno2].hard_reg_full_preferences);
964 AND_COMPL_HARD_REG_SET (temp, allocno[num].hard_reg_full_preferences);
965 IOR_HARD_REG_SET (temp, temp2);
966 COPY_HARD_REG_SET (allocno[num].regs_someone_prefers, temp);
968 free (allocno_to_order);
971 /* Assign a hard register to allocno NUM; look for one that is the beginning
972 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
973 The registers marked in PREFREGS are tried first.
975 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
976 be used for this allocation.
978 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
979 Otherwise ignore that preferred class and use the alternate class.
981 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
982 will have to be saved and restored at calls.
984 RETRYING is nonzero if this is called from retry_global_alloc.
986 If we find one, record it in reg_renumber.
987 If not, do nothing. */
989 static void
990 find_reg (int num, HARD_REG_SET losers, int alt_regs_p, int accept_call_clobbered, int retrying)
992 int i, best_reg, pass;
993 HARD_REG_SET used, used1, used2;
995 enum reg_class class = (alt_regs_p
996 ? reg_alternate_class (allocno[num].reg)
997 : reg_preferred_class (allocno[num].reg));
998 enum machine_mode mode = PSEUDO_REGNO_MODE (allocno[num].reg);
1000 if (accept_call_clobbered)
1001 COPY_HARD_REG_SET (used1, call_fixed_reg_set);
1002 else if (allocno[num].calls_crossed == 0)
1003 COPY_HARD_REG_SET (used1, fixed_reg_set);
1004 else
1005 COPY_HARD_REG_SET (used1, call_used_reg_set);
1007 /* Some registers should not be allocated in global-alloc. */
1008 IOR_HARD_REG_SET (used1, no_global_alloc_regs);
1009 if (losers)
1010 IOR_HARD_REG_SET (used1, losers);
1012 IOR_COMPL_HARD_REG_SET (used1, reg_class_contents[(int) class]);
1013 COPY_HARD_REG_SET (used2, used1);
1015 IOR_HARD_REG_SET (used1, allocno[num].hard_reg_conflicts);
1017 #ifdef CANNOT_CHANGE_MODE_CLASS
1018 cannot_change_mode_set_regs (&used1, mode, allocno[num].reg);
1019 #endif
1021 /* Try each hard reg to see if it fits. Do this in two passes.
1022 In the first pass, skip registers that are preferred by some other pseudo
1023 to give it a better chance of getting one of those registers. Only if
1024 we can't get a register when excluding those do we take one of them.
1025 However, we never allocate a register for the first time in pass 0. */
1027 COPY_HARD_REG_SET (used, used1);
1028 IOR_COMPL_HARD_REG_SET (used, regs_used_so_far);
1029 IOR_HARD_REG_SET (used, allocno[num].regs_someone_prefers);
1031 best_reg = -1;
1032 for (i = FIRST_PSEUDO_REGISTER, pass = 0;
1033 pass <= 1 && i >= FIRST_PSEUDO_REGISTER;
1034 pass++)
1036 if (pass == 1)
1037 COPY_HARD_REG_SET (used, used1);
1038 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1040 #ifdef REG_ALLOC_ORDER
1041 int regno = reg_alloc_order[i];
1042 #else
1043 int regno = i;
1044 #endif
1045 if (! TEST_HARD_REG_BIT (used, regno)
1046 && HARD_REGNO_MODE_OK (regno, mode)
1047 && (allocno[num].calls_crossed == 0
1048 || accept_call_clobbered
1049 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
1051 int j;
1052 int lim = end_hard_regno (mode, regno);
1053 for (j = regno + 1;
1054 (j < lim
1055 && ! TEST_HARD_REG_BIT (used, j));
1056 j++);
1057 if (j == lim)
1059 best_reg = regno;
1060 break;
1062 #ifndef REG_ALLOC_ORDER
1063 i = j; /* Skip starting points we know will lose */
1064 #endif
1069 /* See if there is a preferred register with the same class as the register
1070 we allocated above. Making this restriction prevents register
1071 preferencing from creating worse register allocation.
1073 Remove from the preferred registers and conflicting registers. Note that
1074 additional conflicts may have been added after `prune_preferences' was
1075 called.
1077 First do this for those register with copy preferences, then all
1078 preferred registers. */
1080 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_copy_preferences, used);
1081 if (!hard_reg_set_empty_p (allocno[num].hard_reg_copy_preferences)
1082 && best_reg >= 0)
1084 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1085 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_copy_preferences, i)
1086 && HARD_REGNO_MODE_OK (i, mode)
1087 && (allocno[num].calls_crossed == 0
1088 || accept_call_clobbered
1089 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1090 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1091 || reg_class_subset_p (REGNO_REG_CLASS (i),
1092 REGNO_REG_CLASS (best_reg))
1093 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1094 REGNO_REG_CLASS (i))))
1096 int j;
1097 int lim = end_hard_regno (mode, i);
1098 for (j = i + 1;
1099 (j < lim
1100 && ! TEST_HARD_REG_BIT (used, j)
1101 && (REGNO_REG_CLASS (j)
1102 == REGNO_REG_CLASS (best_reg + (j - i))
1103 || reg_class_subset_p (REGNO_REG_CLASS (j),
1104 REGNO_REG_CLASS (best_reg + (j - i)))
1105 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1106 REGNO_REG_CLASS (j))));
1107 j++);
1108 if (j == lim)
1110 best_reg = i;
1111 goto no_prefs;
1116 AND_COMPL_HARD_REG_SET (allocno[num].hard_reg_preferences, used);
1117 if (!hard_reg_set_empty_p (allocno[num].hard_reg_preferences)
1118 && best_reg >= 0)
1120 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1121 if (TEST_HARD_REG_BIT (allocno[num].hard_reg_preferences, i)
1122 && HARD_REGNO_MODE_OK (i, mode)
1123 && (allocno[num].calls_crossed == 0
1124 || accept_call_clobbered
1125 || ! HARD_REGNO_CALL_PART_CLOBBERED (i, mode))
1126 && (REGNO_REG_CLASS (i) == REGNO_REG_CLASS (best_reg)
1127 || reg_class_subset_p (REGNO_REG_CLASS (i),
1128 REGNO_REG_CLASS (best_reg))
1129 || reg_class_subset_p (REGNO_REG_CLASS (best_reg),
1130 REGNO_REG_CLASS (i))))
1132 int j;
1133 int lim = end_hard_regno (mode, i);
1134 for (j = i + 1;
1135 (j < lim
1136 && ! TEST_HARD_REG_BIT (used, j)
1137 && (REGNO_REG_CLASS (j)
1138 == REGNO_REG_CLASS (best_reg + (j - i))
1139 || reg_class_subset_p (REGNO_REG_CLASS (j),
1140 REGNO_REG_CLASS (best_reg + (j - i)))
1141 || reg_class_subset_p (REGNO_REG_CLASS (best_reg + (j - i)),
1142 REGNO_REG_CLASS (j))));
1143 j++);
1144 if (j == lim)
1146 best_reg = i;
1147 break;
1151 no_prefs:
1153 /* If we haven't succeeded yet, try with caller-saves.
1154 We need not check to see if the current function has nonlocal
1155 labels because we don't put any pseudos that are live over calls in
1156 registers in that case. */
1158 if (flag_caller_saves && best_reg < 0)
1160 /* Did not find a register. If it would be profitable to
1161 allocate a call-clobbered register and save and restore it
1162 around calls, do that. Don't do this if it crosses any calls
1163 that might throw. */
1164 if (! accept_call_clobbered
1165 && allocno[num].calls_crossed != 0
1166 && allocno[num].throwing_calls_crossed == 0
1167 && CALLER_SAVE_PROFITABLE (allocno[num].n_refs,
1168 allocno[num].calls_crossed))
1170 HARD_REG_SET new_losers;
1171 if (! losers)
1172 CLEAR_HARD_REG_SET (new_losers);
1173 else
1174 COPY_HARD_REG_SET (new_losers, losers);
1176 IOR_HARD_REG_SET(new_losers, losing_caller_save_reg_set);
1177 find_reg (num, new_losers, alt_regs_p, 1, retrying);
1178 if (reg_renumber[allocno[num].reg] >= 0)
1180 caller_save_needed = 1;
1181 return;
1186 /* If we haven't succeeded yet,
1187 see if some hard reg that conflicts with us
1188 was utilized poorly by local-alloc.
1189 If so, kick out the regs that were put there by local-alloc
1190 so we can use it instead. */
1191 if (best_reg < 0 && !retrying
1192 /* Let's not bother with multi-reg allocnos. */
1193 && allocno[num].size == 1
1194 && REG_BASIC_BLOCK (allocno[num].reg) == REG_BLOCK_GLOBAL)
1196 /* Count from the end, to find the least-used ones first. */
1197 for (i = FIRST_PSEUDO_REGISTER - 1; i >= 0; i--)
1199 #ifdef REG_ALLOC_ORDER
1200 int regno = reg_alloc_order[i];
1201 #else
1202 int regno = i;
1203 #endif
1205 if (local_reg_n_refs[regno] != 0
1206 /* Don't use a reg no good for this pseudo. */
1207 && ! TEST_HARD_REG_BIT (used2, regno)
1208 && HARD_REGNO_MODE_OK (regno, mode)
1209 /* The code below assumes that we need only a single
1210 register, but the check of allocno[num].size above
1211 was not enough. Sometimes we need more than one
1212 register for a single-word value. */
1213 && hard_regno_nregs[regno][mode] == 1
1214 && (allocno[num].calls_crossed == 0
1215 || accept_call_clobbered
1216 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode))
1217 #ifdef CANNOT_CHANGE_MODE_CLASS
1218 && ! invalid_mode_change_p (regno, REGNO_REG_CLASS (regno),
1219 mode)
1220 #endif
1221 #ifdef STACK_REGS
1222 && (!allocno[num].no_stack_reg
1223 || regno < FIRST_STACK_REG || regno > LAST_STACK_REG)
1224 #endif
1227 /* We explicitly evaluate the divide results into temporary
1228 variables so as to avoid excess precision problems that occur
1229 on an i386-unknown-sysv4.2 (unixware) host. */
1231 double tmp1 = ((double) local_reg_freq[regno] * local_reg_n_refs[regno]
1232 / local_reg_live_length[regno]);
1233 double tmp2 = ((double) allocno[num].freq * allocno[num].n_refs
1234 / allocno[num].live_length);
1236 if (tmp1 < tmp2)
1238 /* Hard reg REGNO was used less in total by local regs
1239 than it would be used by this one allocno! */
1240 int k;
1241 if (dump_file)
1243 fprintf (dump_file, "Regno %d better for global %d, ",
1244 regno, allocno[num].reg);
1245 fprintf (dump_file, "fr:%d, ll:%d, nr:%d ",
1246 allocno[num].freq, allocno[num].live_length,
1247 allocno[num].n_refs);
1248 fprintf (dump_file, "(was: fr:%d, ll:%d, nr:%d)\n",
1249 local_reg_freq[regno],
1250 local_reg_live_length[regno],
1251 local_reg_n_refs[regno]);
1254 for (k = 0; k < max_regno; k++)
1255 if (reg_renumber[k] >= 0)
1257 int r = reg_renumber[k];
1258 int endregno
1259 = end_hard_regno (PSEUDO_REGNO_MODE (k), r);
1261 if (regno >= r && regno < endregno)
1263 if (dump_file)
1264 fprintf (dump_file,
1265 "Local Reg %d now on stack\n", k);
1266 reg_renumber[k] = -1;
1270 best_reg = regno;
1271 break;
1277 /* Did we find a register? */
1279 if (best_reg >= 0)
1281 int lim, j;
1282 HARD_REG_SET this_reg;
1283 adjacency_iter ai;
1285 /* Yes. Record it as the hard register of this pseudo-reg. */
1286 reg_renumber[allocno[num].reg] = best_reg;
1288 /* Make a set of the hard regs being allocated. */
1289 CLEAR_HARD_REG_SET (this_reg);
1290 lim = end_hard_regno (mode, best_reg);
1291 for (j = best_reg; j < lim; j++)
1293 SET_HARD_REG_BIT (this_reg, j);
1294 SET_HARD_REG_BIT (regs_used_so_far, j);
1295 /* This is no longer a reg used just by local regs. */
1296 local_reg_n_refs[j] = 0;
1297 local_reg_freq[j] = 0;
1299 /* For each other pseudo-reg conflicting with this one,
1300 mark it as conflicting with the hard regs this one occupies. */
1301 FOR_EACH_CONFLICT (num, j, ai)
1303 IOR_HARD_REG_SET (allocno[j].hard_reg_conflicts, this_reg);
1308 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1309 Perhaps it had previously seemed not worth a hard reg,
1310 or perhaps its old hard reg has been commandeered for reloads.
1311 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1312 they do not appear to be allocated.
1313 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1315 void
1316 retry_global_alloc (int regno, HARD_REG_SET forbidden_regs)
1318 int alloc_no = reg_allocno[regno];
1319 if (alloc_no >= 0)
1321 /* If we have more than one register class,
1322 first try allocating in the class that is cheapest
1323 for this pseudo-reg. If that fails, try any reg. */
1324 if (N_REG_CLASSES > 1)
1325 find_reg (alloc_no, forbidden_regs, 0, 0, 1);
1326 if (reg_renumber[regno] < 0
1327 && reg_alternate_class (regno) != NO_REGS)
1328 find_reg (alloc_no, forbidden_regs, 1, 0, 1);
1330 /* If we found a register, modify the RTL for the register to
1331 show the hard register, and mark that register live. */
1332 if (reg_renumber[regno] >= 0)
1334 SET_REGNO (regno_reg_rtx[regno], reg_renumber[regno]);
1335 mark_home_live (regno);
1340 /* Indicate that hard register number FROM was eliminated and replaced with
1341 an offset from hard register number TO. The status of hard registers live
1342 at the start of a basic block is updated by replacing a use of FROM with
1343 a use of TO. */
1345 void
1346 mark_elimination (int from, int to)
1348 basic_block bb;
1350 FOR_EACH_BB (bb)
1352 regset r = DF_LIVE_IN (bb);
1353 if (REGNO_REG_SET_P (r, from))
1355 CLEAR_REGNO_REG_SET (r, from);
1356 SET_REGNO_REG_SET (r, to);
1361 /* Print chain C to FILE. */
1363 static void
1364 print_insn_chain (FILE *file, struct insn_chain *c)
1366 fprintf (file, "insn=%d, ", INSN_UID(c->insn));
1367 bitmap_print (file, &c->live_throughout, "live_throughout: ", ", ");
1368 bitmap_print (file, &c->dead_or_set, "dead_or_set: ", "\n");
1372 /* Print all reload_insn_chains to FILE. */
1374 static void
1375 print_insn_chains (FILE *file)
1377 struct insn_chain *c;
1378 for (c = reload_insn_chain; c ; c = c->next)
1379 print_insn_chain (file, c);
1383 /* Walk the insns of the current function and build reload_insn_chain,
1384 and record register life information. */
1386 static void
1387 build_insn_chain (void)
1389 unsigned int i;
1390 struct insn_chain **p = &reload_insn_chain;
1391 basic_block bb;
1392 struct insn_chain *c = NULL;
1393 struct insn_chain *next = NULL;
1394 bitmap live_relevant_regs = BITMAP_ALLOC (NULL);
1395 bitmap elim_regset = BITMAP_ALLOC (NULL);
1396 /* live_subregs is a vector used to keep accurate information about
1397 which hardregs are live in multiword pseudos. live_subregs and
1398 live_subregs_used are indexed by pseudo number. The live_subreg
1399 entry for a particular pseudo is only used if the corresponding
1400 element is non zero in live_subregs_used. The value in
1401 live_subregs_used is number of bytes that the pseudo can
1402 occupy. */
1403 sbitmap *live_subregs = XCNEWVEC (sbitmap, max_regno);
1404 int *live_subregs_used = XNEWVEC (int, max_regno);
1406 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1407 if (TEST_HARD_REG_BIT (eliminable_regset, i))
1408 bitmap_set_bit (elim_regset, i);
1410 FOR_EACH_BB_REVERSE (bb)
1412 bitmap_iterator bi;
1413 rtx insn;
1415 CLEAR_REG_SET (live_relevant_regs);
1416 memset (live_subregs_used, 0, max_regno * sizeof (int));
1418 EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb), 0, i, bi)
1420 if (i >= FIRST_PSEUDO_REGISTER)
1421 break;
1422 bitmap_set_bit (live_relevant_regs, i);
1425 EXECUTE_IF_SET_IN_BITMAP (df_get_live_out (bb), FIRST_PSEUDO_REGISTER, i, bi)
1427 if (reg_renumber[i] >= 0)
1428 bitmap_set_bit (live_relevant_regs, i);
1431 FOR_BB_INSNS_REVERSE (bb, insn)
1433 if (!NOTE_P (insn) && !BARRIER_P (insn))
1435 unsigned int uid = INSN_UID (insn);
1436 struct df_ref **def_rec;
1437 struct df_ref **use_rec;
1439 c = new_insn_chain ();
1440 c->next = next;
1441 next = c;
1442 *p = c;
1443 p = &c->prev;
1445 c->insn = insn;
1446 c->block = bb->index;
1448 if (INSN_P (insn))
1449 for (def_rec = DF_INSN_UID_DEFS (uid); *def_rec; def_rec++)
1451 struct df_ref *def = *def_rec;
1452 unsigned int regno = DF_REF_REGNO (def);
1454 /* Ignore may clobbers because these are generated
1455 from calls. However, every other kind of def is
1456 added to dead_or_set. */
1457 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_MAY_CLOBBER))
1459 if (regno < FIRST_PSEUDO_REGISTER)
1461 if (!fixed_regs[regno])
1462 bitmap_set_bit (&c->dead_or_set, regno);
1464 else if (reg_renumber[regno] >= 0)
1465 bitmap_set_bit (&c->dead_or_set, regno);
1468 if ((regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
1469 && (!DF_REF_FLAGS_IS_SET (def, DF_REF_CONDITIONAL)))
1471 rtx reg = DF_REF_REG (def);
1473 /* We can model subregs, but not if they are
1474 wrapped in ZERO_EXTRACTS. */
1475 if (GET_CODE (reg) == SUBREG
1476 && !DF_REF_FLAGS_IS_SET (def, DF_REF_EXTRACT))
1478 unsigned int start = SUBREG_BYTE (reg);
1479 unsigned int last = start
1480 + GET_MODE_SIZE (GET_MODE (reg));
1482 ra_init_live_subregs (bitmap_bit_p (live_relevant_regs,
1483 regno),
1484 live_subregs,
1485 live_subregs_used,
1486 regno, reg);
1487 /* Ignore the paradoxical bits. */
1488 if ((int)last > live_subregs_used[regno])
1489 last = live_subregs_used[regno];
1491 while (start < last)
1493 RESET_BIT (live_subregs[regno], start);
1494 start++;
1497 if (sbitmap_empty_p (live_subregs[regno]))
1499 live_subregs_used[regno] = 0;
1500 bitmap_clear_bit (live_relevant_regs, regno);
1502 else
1503 /* Set live_relevant_regs here because
1504 that bit has to be true to get us to
1505 look at the live_subregs fields. */
1506 bitmap_set_bit (live_relevant_regs, regno);
1508 else
1510 /* DF_REF_PARTIAL is generated for
1511 subregs, STRICT_LOW_PART, and
1512 ZERO_EXTRACT. We handle the subreg
1513 case above so here we have to keep from
1514 modeling the def as a killing def. */
1515 if (!DF_REF_FLAGS_IS_SET (def, DF_REF_PARTIAL))
1517 bitmap_clear_bit (live_relevant_regs, regno);
1518 live_subregs_used[regno] = 0;
1524 bitmap_and_compl_into (live_relevant_regs, elim_regset);
1525 bitmap_copy (&c->live_throughout, live_relevant_regs);
1527 if (INSN_P (insn))
1528 for (use_rec = DF_INSN_UID_USES (uid); *use_rec; use_rec++)
1530 struct df_ref *use = *use_rec;
1531 unsigned int regno = DF_REF_REGNO (use);
1532 rtx reg = DF_REF_REG (use);
1534 /* DF_REF_READ_WRITE on a use means that this use
1535 is fabricated from a def that is a partial set
1536 to a multiword reg. Here, we only model the
1537 subreg case that is not wrapped in ZERO_EXTRACT
1538 precisely so we do not need to look at the
1539 fabricated use. */
1540 if (DF_REF_FLAGS_IS_SET (use, DF_REF_READ_WRITE)
1541 && !DF_REF_FLAGS_IS_SET (use, DF_REF_EXTRACT)
1542 && DF_REF_FLAGS_IS_SET (use, DF_REF_SUBREG))
1543 continue;
1545 /* Add the last use of each var to dead_or_set. */
1546 if (!bitmap_bit_p (live_relevant_regs, regno))
1548 if (regno < FIRST_PSEUDO_REGISTER)
1550 if (!fixed_regs[regno])
1551 bitmap_set_bit (&c->dead_or_set, regno);
1553 else if (reg_renumber[regno] >= 0)
1554 bitmap_set_bit (&c->dead_or_set, regno);
1557 if (regno < FIRST_PSEUDO_REGISTER || reg_renumber[regno] >= 0)
1559 if (GET_CODE (reg) == SUBREG
1560 && !DF_REF_FLAGS_IS_SET (use, DF_REF_EXTRACT))
1562 unsigned int start = SUBREG_BYTE (reg);
1563 unsigned int last = start
1564 + GET_MODE_SIZE (GET_MODE (reg));
1566 ra_init_live_subregs (bitmap_bit_p (live_relevant_regs,
1567 regno),
1568 live_subregs,
1569 live_subregs_used,
1570 regno, reg);
1572 /* Ignore the paradoxical bits. */
1573 if ((int)last > live_subregs_used[regno])
1574 last = live_subregs_used[regno];
1576 while (start < last)
1578 SET_BIT (live_subregs[regno], start);
1579 start++;
1582 else
1583 /* Resetting the live_subregs_used is
1584 effectively saying do not use the subregs
1585 because we are reading the whole
1586 pseudo. */
1587 live_subregs_used[regno] = 0;
1588 bitmap_set_bit (live_relevant_regs, regno);
1594 /* FIXME!! The following code is a disaster. Reload needs to see the
1595 labels and jump tables that are just hanging out in between
1596 the basic blocks. See pr33676. */
1597 insn = BB_HEAD (bb);
1599 /* Skip over the barriers and cruft. */
1600 while (insn && (BARRIER_P (insn) || NOTE_P (insn)
1601 || BLOCK_FOR_INSN (insn) == bb))
1602 insn = PREV_INSN (insn);
1604 /* While we add anything except barriers and notes, the focus is
1605 to get the labels and jump tables into the
1606 reload_insn_chain. */
1607 while (insn)
1609 if (!NOTE_P (insn) && !BARRIER_P (insn))
1611 if (BLOCK_FOR_INSN (insn))
1612 break;
1614 c = new_insn_chain ();
1615 c->next = next;
1616 next = c;
1617 *p = c;
1618 p = &c->prev;
1620 /* The block makes no sense here, but it is what the old
1621 code did. */
1622 c->block = bb->index;
1623 c->insn = insn;
1624 bitmap_copy (&c->live_throughout, live_relevant_regs);
1626 insn = PREV_INSN (insn);
1630 for (i = 0; i < (unsigned int) max_regno; i++)
1631 if (live_subregs[i])
1632 free (live_subregs[i]);
1634 reload_insn_chain = c;
1635 *p = NULL;
1637 free (live_subregs);
1638 free (live_subregs_used);
1639 BITMAP_FREE (live_relevant_regs);
1640 BITMAP_FREE (elim_regset);
1642 if (dump_file)
1643 print_insn_chains (dump_file);
1646 /* Print debugging trace information if -dg switch is given,
1647 showing the information on which the allocation decisions are based. */
1649 static void
1650 dump_conflicts (FILE *file)
1652 int i;
1653 int regno;
1654 int has_preferences;
1655 int nregs;
1656 nregs = 0;
1657 for (i = 0; i < max_allocno; i++)
1659 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1660 continue;
1661 nregs++;
1663 fprintf (file, ";; %d regs to allocate:", nregs);
1664 for (regno = 0; regno < max_regno; regno++)
1665 if ((i = reg_allocno[regno]) >= 0)
1667 int j;
1668 if (reg_renumber[allocno[allocno_order[i]].reg] >= 0)
1669 continue;
1670 fprintf (file, " %d", allocno[allocno_order[i]].reg);
1671 for (j = 0; j < max_regno; j++)
1672 if (reg_allocno[j] == allocno_order[i]
1673 && j != allocno[allocno_order[i]].reg)
1674 fprintf (file, "+%d", j);
1675 if (allocno[allocno_order[i]].size != 1)
1676 fprintf (file, " (%d)", allocno[allocno_order[i]].size);
1678 fprintf (file, "\n");
1680 for (regno = 0; regno < max_regno; regno++)
1681 if ((i = reg_allocno[regno]) >= 0)
1683 int j;
1684 adjacency_iter ai;
1685 fprintf (file, ";; %d conflicts:", allocno[i].reg);
1686 FOR_EACH_CONFLICT (i, j, ai)
1688 fprintf (file, " %d", allocno[j].reg);
1690 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1691 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_conflicts, j)
1692 && !fixed_regs[j])
1693 fprintf (file, " %d", j);
1694 fprintf (file, "\n");
1696 has_preferences = 0;
1697 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1698 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1699 has_preferences = 1;
1701 if (!has_preferences)
1702 continue;
1703 fprintf (file, ";; %d preferences:", allocno[i].reg);
1704 for (j = 0; j < FIRST_PSEUDO_REGISTER; j++)
1705 if (TEST_HARD_REG_BIT (allocno[i].hard_reg_preferences, j))
1706 fprintf (file, " %d", j);
1707 fprintf (file, "\n");
1709 fprintf (file, "\n");
1712 void
1713 dump_global_regs (FILE *file)
1715 int i, j;
1717 fprintf (file, ";; Register dispositions:\n");
1718 for (i = FIRST_PSEUDO_REGISTER, j = 0; i < max_regno; i++)
1719 if (reg_renumber[i] >= 0)
1721 fprintf (file, "%d in %d ", i, reg_renumber[i]);
1722 if (++j % 6 == 0)
1723 fprintf (file, "\n");
1726 fprintf (file, "\n\n;; Hard regs used: ");
1727 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1728 if (df_regs_ever_live_p (i))
1729 fprintf (file, " %d", i);
1730 fprintf (file, "\n\n");
1733 /* Run old register allocator. Return TRUE if we must exit
1734 rest_of_compilation upon return. */
1735 static unsigned int
1736 rest_of_handle_global_alloc (void)
1738 bool failure;
1740 /* If optimizing, allocate remaining pseudo-regs. Do the reload
1741 pass fixing up any insns that are invalid. */
1742 if (optimize && dbg_cnt (global_alloc_at_func))
1743 failure = global_alloc ();
1744 else
1746 /* There is just too much going on in the register allocators to
1747 keep things up to date. At the end we have to rescan anyway
1748 because things change when the reload_completed flag is set.
1749 So we just turn off scanning and we will rescan by hand. */
1750 df_set_flags (DF_NO_INSN_RESCAN);
1751 compute_regsets (&eliminable_regset, &no_global_alloc_regs);
1752 build_insn_chain ();
1753 df_set_flags (DF_NO_INSN_RESCAN);
1754 failure = reload (get_insns (), 0);
1757 if (dump_enabled_p (pass_global_alloc.static_pass_number))
1759 timevar_push (TV_DUMP);
1760 dump_global_regs (dump_file);
1761 timevar_pop (TV_DUMP);
1764 /* FIXME: This appears on the surface to be wrong thing to be doing.
1765 So much of the compiler is designed to check reload_completed to
1766 see if it is running after reload that seems doomed to failure.
1767 We should be returning a value that says that we have found
1768 errors so that nothing but the cleanup passes are run
1769 afterwards. */
1770 gcc_assert (reload_completed || failure);
1771 reload_completed = !failure;
1773 /* The world has changed so much that at this point we might as well
1774 just rescan everything. Not that df_rescan_all_insns is not
1775 going to help here because it does not touch the artificial uses
1776 and defs. */
1777 df_finish_pass (true);
1778 if (optimize > 1)
1779 df_live_add_problem ();
1780 df_scan_alloc (NULL);
1781 df_scan_blocks ();
1783 if (optimize)
1784 df_analyze ();
1786 regstat_free_n_sets_and_refs ();
1787 regstat_free_ri ();
1788 return 0;
1791 struct tree_opt_pass pass_global_alloc =
1793 "greg", /* name */
1794 NULL, /* gate */
1795 rest_of_handle_global_alloc, /* execute */
1796 NULL, /* sub */
1797 NULL, /* next */
1798 0, /* static_pass_number */
1799 TV_GLOBAL_ALLOC, /* tv_id */
1800 0, /* properties_required */
1801 0, /* properties_provided */
1802 0, /* properties_destroyed */
1803 0, /* todo_flags_start */
1804 TODO_dump_func | TODO_verify_rtl_sharing
1805 | TODO_ggc_collect, /* todo_flags_finish */
1806 'g' /* letter */