1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
28 #include "hard-reg-set.h"
34 #include "insn-config.h"
40 /* This pass of the compiler performs global register allocation.
41 It assigns hard register numbers to all the pseudo registers
42 that were not handled in local_alloc. Assignments are recorded
43 in the vector reg_renumber, not by changing the rtl code.
44 (Such changes are made by final). The entry point is
45 the function global_alloc.
47 After allocation is complete, the reload pass is run as a subroutine
48 of this pass, so that when a pseudo reg loses its hard reg due to
49 spilling it is possible to make a second attempt to find a hard
50 reg for it. The reload pass is independent in other respects
51 and it is run even when stupid register allocation is in use.
53 1. Assign allocation-numbers (allocnos) to the pseudo-registers
54 still needing allocations and to the pseudo-registers currently
55 allocated by local-alloc which may be spilled by reload.
56 Set up tables reg_allocno and allocno_reg to map
57 reg numbers to allocnos and vice versa.
58 max_allocno gets the number of allocnos in use.
60 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
61 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
62 for conflicts between allocnos and explicit hard register use
63 (which includes use of pseudo-registers allocated by local_alloc).
65 3. For each basic block
66 walk forward through the block, recording which
67 pseudo-registers and which hardware registers are live.
68 Build the conflict matrix between the pseudo-registers
69 and another of pseudo-registers versus hardware registers.
70 Also record the preferred hardware registers
71 for each pseudo-register.
73 4. Sort a table of the allocnos into order of
74 desirability of the variables.
76 5. Allocate the variables in that order; each if possible into
77 a preferred register, else into another register. */
79 /* Number of pseudo-registers which are candidates for allocation. */
81 static int max_allocno
;
83 /* Indexed by (pseudo) reg number, gives the allocno, or -1
84 for pseudo registers which are not to be allocated. */
86 static int *reg_allocno
;
91 /* Gives the number of consecutive hard registers needed by that
95 /* Number of calls crossed by each allocno. */
98 /* Number of calls that might throw crossed by each allocno. */
99 int throwing_calls_crossed
;
101 /* Number of refs to each allocno. */
104 /* Frequency of uses of each allocno. */
107 /* Guess at live length of each allocno.
108 This is actually the max of the live lengths of the regs. */
111 /* Set of hard regs conflicting with allocno N. */
113 HARD_REG_SET hard_reg_conflicts
;
115 /* Set of hard regs preferred by allocno N.
116 This is used to make allocnos go into regs that are copied to or from them,
117 when possible, to reduce register shuffling. */
119 HARD_REG_SET hard_reg_preferences
;
121 /* Similar, but just counts register preferences made in simple copy
122 operations, rather than arithmetic. These are given priority because
123 we can always eliminate an insn by using these, but using a register
124 in the above list won't always eliminate an insn. */
126 HARD_REG_SET hard_reg_copy_preferences
;
128 /* Similar to hard_reg_preferences, but includes bits for subsequent
129 registers when an allocno is multi-word. The above variable is used for
130 allocation while this is used to build reg_someone_prefers, below. */
132 HARD_REG_SET hard_reg_full_preferences
;
134 /* Set of hard registers that some later allocno has a preference for. */
136 HARD_REG_SET regs_someone_prefers
;
139 /* Set to true if allocno can't be allocated in the stack register. */
144 static struct allocno
*allocno
;
146 /* A vector of the integers from 0 to max_allocno-1,
147 sorted in the order of first-to-be-allocated first. */
149 static int *allocno_order
;
151 /* Indexed by (pseudo) reg number, gives the number of another
152 lower-numbered pseudo reg which can share a hard reg with this pseudo
153 *even if the two pseudos would otherwise appear to conflict*. */
155 static int *reg_may_share
;
157 /* Define the number of bits in each element of `conflicts' and what
158 type that element has. We use the largest integer format on the
161 #define INT_BITS HOST_BITS_PER_WIDE_INT
162 #define INT_TYPE HOST_WIDE_INT
164 /* max_allocno by max_allocno array of bits,
165 recording whether two allocno's conflict (can't go in the same
168 `conflicts' is symmetric after the call to mirror_conflicts. */
170 static INT_TYPE
*conflicts
;
172 /* Number of ints require to hold max_allocno bits.
173 This is the length of a row in `conflicts'. */
175 static int allocno_row_words
;
177 /* Two macros to test or store 1 in an element of `conflicts'. */
179 #define CONFLICTP(I, J) \
180 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
181 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
183 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
185 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
189 INT_TYPE *p_ = (ALLOCNO_SET); \
191 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
192 i_--, allocno_ += INT_BITS) \
194 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
196 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
204 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
206 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
207 the conflicting allocno, and execute CODE. This macro assumes that
208 mirror_conflicts has been run. */
209 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
210 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
214 /* Set of hard regs currently live (during scan of all insns). */
216 static HARD_REG_SET hard_regs_live
;
218 /* Set of registers that global-alloc isn't supposed to use. */
220 static HARD_REG_SET no_global_alloc_regs
;
222 /* Set of registers used so far. */
224 static HARD_REG_SET regs_used_so_far
;
226 /* Number of refs to each hard reg, as used by local alloc.
227 It is zero for a reg that contains global pseudos or is explicitly used. */
229 static int local_reg_n_refs
[FIRST_PSEUDO_REGISTER
];
231 /* Frequency of uses of given hard reg. */
232 static int local_reg_freq
[FIRST_PSEUDO_REGISTER
];
234 /* Guess at live length of each hard reg, as used by local alloc.
235 This is actually the sum of the live lengths of the specific regs. */
237 static int local_reg_live_length
[FIRST_PSEUDO_REGISTER
];
239 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
240 element I, and hard register number J. */
242 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
244 /* Bit mask for allocnos live at current point in the scan. */
246 static INT_TYPE
*allocnos_live
;
248 /* Test, set or clear bit number I in allocnos_live,
249 a bit vector indexed by allocno. */
251 #define SET_ALLOCNO_LIVE(I) \
252 (allocnos_live[(unsigned) (I) / INT_BITS] \
253 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
255 #define CLEAR_ALLOCNO_LIVE(I) \
256 (allocnos_live[(unsigned) (I) / INT_BITS] \
257 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
259 /* This is turned off because it doesn't work right for DImode.
260 (And it is only used for DImode, so the other cases are worthless.)
261 The problem is that it isn't true that there is NO possibility of conflict;
262 only that there is no conflict if the two pseudos get the exact same regs.
263 If they were allocated with a partial overlap, there would be a conflict.
264 We can't safely turn off the conflict unless we have another way to
265 prevent the partial overlap.
267 Idea: change hard_reg_conflicts so that instead of recording which
268 hard regs the allocno may not overlap, it records where the allocno
269 may not start. Change both where it is used and where it is updated.
270 Then there is a way to record that (reg:DI 108) may start at 10
271 but not at 9 or 11. There is still the question of how to record
272 this semi-conflict between two pseudos. */
274 /* Reg pairs for which conflict after the current insn
275 is inhibited by a REG_NO_CONFLICT note.
276 If the table gets full, we ignore any other notes--that is conservative. */
277 #define NUM_NO_CONFLICT_PAIRS 4
278 /* Number of pairs in use in this insn. */
279 int n_no_conflict_pairs
;
280 static struct { int allocno1
, allocno2
;}
281 no_conflict_pairs
[NUM_NO_CONFLICT_PAIRS
];
284 /* Record all regs that are set in any one insn.
285 Communication from mark_reg_{store,clobber} and global_conflicts. */
287 static rtx
*regs_set
;
288 static int n_regs_set
;
290 /* All registers that can be eliminated. */
292 static HARD_REG_SET eliminable_regset
;
294 static int allocno_compare (const void *, const void *);
295 static void global_conflicts (void);
296 static void mirror_conflicts (void);
297 static void expand_preferences (void);
298 static void prune_preferences (void);
299 static void find_reg (int, HARD_REG_SET
, int, int, int);
300 static void record_one_conflict (int);
301 static void record_conflicts (int *, int);
302 static void mark_reg_store (rtx
, rtx
, void *);
303 static void mark_reg_clobber (rtx
, rtx
, void *);
304 static void mark_reg_conflicts (rtx
);
305 static void mark_reg_death (rtx
);
306 static void mark_reg_live_nc (int, enum machine_mode
);
307 static void set_preference (rtx
, rtx
);
308 static void dump_conflicts (FILE *);
309 static void reg_becomes_live (rtx
, rtx
, void *);
310 static void reg_dies (int, enum machine_mode
, struct insn_chain
*);
312 static void allocate_bb_info (void);
313 static void free_bb_info (void);
314 static bool check_earlyclobber (rtx
);
315 static void mark_reg_use_for_earlyclobber_1 (rtx
*, void *);
316 static int mark_reg_use_for_earlyclobber (rtx
*, void *);
317 static void calculate_local_reg_bb_info (void);
318 static void set_up_bb_rts_numbers (void);
319 static int rpost_cmp (const void *, const void *);
320 static void calculate_reg_pav (void);
321 static void modify_reg_pav (void);
322 static void make_accurate_live_analysis (void);
326 /* Perform allocation of pseudo-registers not allocated by local_alloc.
327 FILE is a file to output debugging information on,
328 or zero if such output is not desired.
330 Return value is nonzero if reload failed
331 and we must not do any more for this function. */
334 global_alloc (FILE *file
)
337 #ifdef ELIMINABLE_REGS
338 static const struct {const int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
341 = (! flag_omit_frame_pointer
342 || (current_function_calls_alloca
&& EXIT_IGNORE_STACK
)
343 || FRAME_POINTER_REQUIRED
);
348 make_accurate_live_analysis ();
352 /* A machine may have certain hard registers that
353 are safe to use only within a basic block. */
355 CLEAR_HARD_REG_SET (no_global_alloc_regs
);
357 /* Build the regset of all eliminable registers and show we can't use those
358 that we already know won't be eliminated. */
359 #ifdef ELIMINABLE_REGS
360 for (i
= 0; i
< ARRAY_SIZE (eliminables
); i
++)
363 = (! CAN_ELIMINATE (eliminables
[i
].from
, eliminables
[i
].to
)
364 || (eliminables
[i
].to
== STACK_POINTER_REGNUM
&& need_fp
));
366 if (!regs_asm_clobbered
[eliminables
[i
].from
])
368 SET_HARD_REG_BIT (eliminable_regset
, eliminables
[i
].from
);
371 SET_HARD_REG_BIT (no_global_alloc_regs
, eliminables
[i
].from
);
373 else if (cannot_elim
)
374 error ("%s cannot be used in asm here",
375 reg_names
[eliminables
[i
].from
]);
377 regs_ever_live
[eliminables
[i
].from
] = 1;
379 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
380 if (!regs_asm_clobbered
[HARD_FRAME_POINTER_REGNUM
])
382 SET_HARD_REG_BIT (eliminable_regset
, HARD_FRAME_POINTER_REGNUM
);
384 SET_HARD_REG_BIT (no_global_alloc_regs
, HARD_FRAME_POINTER_REGNUM
);
387 error ("%s cannot be used in asm here",
388 reg_names
[HARD_FRAME_POINTER_REGNUM
]);
390 regs_ever_live
[HARD_FRAME_POINTER_REGNUM
] = 1;
394 if (!regs_asm_clobbered
[FRAME_POINTER_REGNUM
])
396 SET_HARD_REG_BIT (eliminable_regset
, FRAME_POINTER_REGNUM
);
398 SET_HARD_REG_BIT (no_global_alloc_regs
, FRAME_POINTER_REGNUM
);
401 error ("%s cannot be used in asm here", reg_names
[FRAME_POINTER_REGNUM
]);
403 regs_ever_live
[FRAME_POINTER_REGNUM
] = 1;
406 /* Track which registers have already been used. Start with registers
407 explicitly in the rtl, then registers allocated by local register
410 CLEAR_HARD_REG_SET (regs_used_so_far
);
411 #ifdef LEAF_REGISTERS
412 /* If we are doing the leaf function optimization, and this is a leaf
413 function, it means that the registers that take work to save are those
414 that need a register window. So prefer the ones that can be used in
417 const char *cheap_regs
;
418 const char *const leaf_regs
= LEAF_REGISTERS
;
420 if (only_leaf_regs_used () && leaf_function_p ())
421 cheap_regs
= leaf_regs
;
423 cheap_regs
= call_used_regs
;
424 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
425 if (regs_ever_live
[i
] || cheap_regs
[i
])
426 SET_HARD_REG_BIT (regs_used_so_far
, i
);
429 /* We consider registers that do not have to be saved over calls as if
430 they were already used since there is no cost in using them. */
431 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
432 if (regs_ever_live
[i
] || call_used_regs
[i
])
433 SET_HARD_REG_BIT (regs_used_so_far
, i
);
436 for (i
= FIRST_PSEUDO_REGISTER
; i
< (size_t) max_regno
; i
++)
437 if (reg_renumber
[i
] >= 0)
438 SET_HARD_REG_BIT (regs_used_so_far
, reg_renumber
[i
]);
440 /* Establish mappings from register number to allocation number
441 and vice versa. In the process, count the allocnos. */
443 reg_allocno
= xmalloc (max_regno
* sizeof (int));
445 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
448 /* Initialize the shared-hard-reg mapping
449 from the list of pairs that may share. */
450 reg_may_share
= xcalloc (max_regno
, sizeof (int));
451 for (x
= regs_may_share
; x
; x
= XEXP (XEXP (x
, 1), 1))
453 int r1
= REGNO (XEXP (x
, 0));
454 int r2
= REGNO (XEXP (XEXP (x
, 1), 0));
456 reg_may_share
[r1
] = r2
;
458 reg_may_share
[r2
] = r1
;
461 for (i
= FIRST_PSEUDO_REGISTER
; i
< (size_t) max_regno
; i
++)
462 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
463 that we are supposed to refrain from putting in a hard reg.
464 -2 means do make an allocno but don't allocate it. */
465 if (REG_N_REFS (i
) != 0 && REG_LIVE_LENGTH (i
) != -1
466 /* Don't allocate pseudos that cross calls,
467 if this function receives a nonlocal goto. */
468 && (! current_function_has_nonlocal_label
469 || REG_N_CALLS_CROSSED (i
) == 0))
471 if (reg_renumber
[i
] < 0
472 && reg_may_share
[i
] && reg_allocno
[reg_may_share
[i
]] >= 0)
473 reg_allocno
[i
] = reg_allocno
[reg_may_share
[i
]];
475 reg_allocno
[i
] = max_allocno
++;
476 gcc_assert (REG_LIVE_LENGTH (i
));
481 allocno
= xcalloc (max_allocno
, sizeof (struct allocno
));
483 for (i
= FIRST_PSEUDO_REGISTER
; i
< (size_t) max_regno
; i
++)
484 if (reg_allocno
[i
] >= 0)
486 int num
= reg_allocno
[i
];
487 allocno
[num
].reg
= i
;
488 allocno
[num
].size
= PSEUDO_REGNO_SIZE (i
);
489 allocno
[num
].calls_crossed
+= REG_N_CALLS_CROSSED (i
);
490 allocno
[num
].throwing_calls_crossed
491 += REG_N_THROWING_CALLS_CROSSED (i
);
492 allocno
[num
].n_refs
+= REG_N_REFS (i
);
493 allocno
[num
].freq
+= REG_FREQ (i
);
494 if (allocno
[num
].live_length
< REG_LIVE_LENGTH (i
))
495 allocno
[num
].live_length
= REG_LIVE_LENGTH (i
);
498 /* Calculate amount of usage of each hard reg by pseudos
499 allocated by local-alloc. This is to see if we want to
501 memset (local_reg_live_length
, 0, sizeof local_reg_live_length
);
502 memset (local_reg_n_refs
, 0, sizeof local_reg_n_refs
);
503 memset (local_reg_freq
, 0, sizeof local_reg_freq
);
504 for (i
= FIRST_PSEUDO_REGISTER
; i
< (size_t) max_regno
; i
++)
505 if (reg_renumber
[i
] >= 0)
507 int regno
= reg_renumber
[i
];
508 int endregno
= regno
+ hard_regno_nregs
[regno
][PSEUDO_REGNO_MODE (i
)];
511 for (j
= regno
; j
< endregno
; j
++)
513 local_reg_n_refs
[j
] += REG_N_REFS (i
);
514 local_reg_freq
[j
] += REG_FREQ (i
);
515 local_reg_live_length
[j
] += REG_LIVE_LENGTH (i
);
519 /* We can't override local-alloc for a reg used not just by local-alloc. */
520 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
521 if (regs_ever_live
[i
])
522 local_reg_n_refs
[i
] = 0, local_reg_freq
[i
] = 0;
524 allocno_row_words
= (max_allocno
+ INT_BITS
- 1) / INT_BITS
;
526 /* We used to use alloca here, but the size of what it would try to
527 allocate would occasionally cause it to exceed the stack limit and
528 cause unpredictable core dumps. Some examples were > 2Mb in size. */
529 conflicts
= xcalloc (max_allocno
* allocno_row_words
, sizeof (INT_TYPE
));
531 allocnos_live
= xmalloc (allocno_row_words
* sizeof (INT_TYPE
));
533 /* If there is work to be done (at least one reg to allocate),
534 perform global conflict analysis and allocate the regs. */
538 /* Scan all the insns and compute the conflicts among allocnos
539 and between allocnos and hard regs. */
545 /* Eliminate conflicts between pseudos and eliminable registers. If
546 the register is not eliminated, the pseudo won't really be able to
547 live in the eliminable register, so the conflict doesn't matter.
548 If we do eliminate the register, the conflict will no longer exist.
549 So in either case, we can ignore the conflict. Likewise for
552 for (i
= 0; i
< (size_t) max_allocno
; i
++)
554 AND_COMPL_HARD_REG_SET (allocno
[i
].hard_reg_conflicts
,
556 AND_COMPL_HARD_REG_SET (allocno
[i
].hard_reg_copy_preferences
,
558 AND_COMPL_HARD_REG_SET (allocno
[i
].hard_reg_preferences
,
562 /* Try to expand the preferences by merging them between allocnos. */
564 expand_preferences ();
566 /* Determine the order to allocate the remaining pseudo registers. */
568 allocno_order
= xmalloc (max_allocno
* sizeof (int));
569 for (i
= 0; i
< (size_t) max_allocno
; i
++)
570 allocno_order
[i
] = i
;
572 /* Default the size to 1, since allocno_compare uses it to divide by.
573 Also convert allocno_live_length of zero to -1. A length of zero
574 can occur when all the registers for that allocno have reg_live_length
575 equal to -2. In this case, we want to make an allocno, but not
576 allocate it. So avoid the divide-by-zero and set it to a low
579 for (i
= 0; i
< (size_t) max_allocno
; i
++)
581 if (allocno
[i
].size
== 0)
583 if (allocno
[i
].live_length
== 0)
584 allocno
[i
].live_length
= -1;
587 qsort (allocno_order
, max_allocno
, sizeof (int), allocno_compare
);
589 prune_preferences ();
592 dump_conflicts (file
);
594 /* Try allocating them, one by one, in that order,
595 except for parameters marked with reg_live_length[regno] == -2. */
597 for (i
= 0; i
< (size_t) max_allocno
; i
++)
598 if (reg_renumber
[allocno
[allocno_order
[i
]].reg
] < 0
599 && REG_LIVE_LENGTH (allocno
[allocno_order
[i
]].reg
) >= 0)
601 /* If we have more than one register class,
602 first try allocating in the class that is cheapest
603 for this pseudo-reg. If that fails, try any reg. */
604 if (N_REG_CLASSES
> 1)
606 find_reg (allocno_order
[i
], 0, 0, 0, 0);
607 if (reg_renumber
[allocno
[allocno_order
[i
]].reg
] >= 0)
610 if (reg_alternate_class (allocno
[allocno_order
[i
]].reg
) != NO_REGS
)
611 find_reg (allocno_order
[i
], 0, 1, 0, 0);
614 free (allocno_order
);
617 /* Do the reloads now while the allocno data still exist, so that we can
618 try to assign new hard regs to any pseudo regs that are spilled. */
620 #if 0 /* We need to eliminate regs even if there is no rtl code,
621 for the sake of debugging information. */
622 if (n_basic_blocks
> 0)
625 build_insn_chain (get_insns ());
626 retval
= reload (get_insns (), 1);
631 free (reg_may_share
);
634 free (allocnos_live
);
639 /* Sort predicate for ordering the allocnos.
640 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
643 allocno_compare (const void *v1p
, const void *v2p
)
645 int v1
= *(const int *)v1p
, v2
= *(const int *)v2p
;
646 /* Note that the quotient will never be bigger than
647 the value of floor_log2 times the maximum number of
648 times a register can occur in one insn (surely less than 100)
649 weighted by the frequency (maximally REG_FREQ_MAX).
650 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
652 = (((double) (floor_log2 (allocno
[v1
].n_refs
) * allocno
[v1
].freq
)
653 / allocno
[v1
].live_length
)
654 * (10000 / REG_FREQ_MAX
) * allocno
[v1
].size
);
656 = (((double) (floor_log2 (allocno
[v2
].n_refs
) * allocno
[v2
].freq
)
657 / allocno
[v2
].live_length
)
658 * (10000 / REG_FREQ_MAX
) * allocno
[v2
].size
);
662 /* If regs are equally good, sort by allocno,
663 so that the results of qsort leave nothing to chance. */
667 /* Scan the rtl code and record all conflicts and register preferences in the
668 conflict matrices and preference tables. */
671 global_conflicts (void)
676 int *block_start_allocnos
;
678 /* Make a vector that mark_reg_{store,clobber} will store in. */
679 regs_set
= xmalloc (max_parallel
* sizeof (rtx
) * 2);
681 block_start_allocnos
= xmalloc (max_allocno
* sizeof (int));
685 memset (allocnos_live
, 0, allocno_row_words
* sizeof (INT_TYPE
));
687 /* Initialize table of registers currently live
688 to the state at the beginning of this basic block.
689 This also marks the conflicts among hard registers
690 and any allocnos that are live.
692 For pseudo-regs, there is only one bit for each one
693 no matter how many hard regs it occupies.
694 This is ok; we know the size from PSEUDO_REGNO_SIZE.
695 For explicit hard regs, we cannot know the size that way
696 since one hard reg can be used with various sizes.
697 Therefore, we must require that all the hard regs
698 implicitly live as part of a multi-word hard reg
699 be explicitly marked in basic_block_live_at_start. */
702 regset old
= b
->global_live_at_start
;
704 reg_set_iterator rsi
;
706 REG_SET_TO_HARD_REG_SET (hard_regs_live
, old
);
707 EXECUTE_IF_SET_IN_REG_SET (old
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
709 int a
= reg_allocno
[i
];
712 SET_ALLOCNO_LIVE (a
);
713 block_start_allocnos
[ax
++] = a
;
715 else if ((a
= reg_renumber
[i
]) >= 0)
716 mark_reg_live_nc (a
, PSEUDO_REGNO_MODE (i
));
719 /* Record that each allocno now live conflicts with each hard reg
722 It is not necessary to mark any conflicts between pseudos as
723 this point, even for pseudos which are live at the start of
726 Given two pseudos X and Y and any point in the CFG P.
728 On any path to point P where X and Y are live one of the
729 following conditions must be true:
731 1. X is live at some instruction on the path that
734 2. Y is live at some instruction on the path that
737 3. Either X or Y is not evaluated on the path to P
738 (i.e. it is used uninitialized) and thus the
739 conflict can be ignored.
741 In cases #1 and #2 the conflict will be recorded when we
742 scan the instruction that makes either X or Y become live. */
743 record_conflicts (block_start_allocnos
, ax
);
745 /* Pseudos can't go in stack regs at the start of a basic block that
746 is reached by an abnormal edge. Likewise for call clobbered regs,
747 because because caller-save, fixup_abnormal_edges, and possibly
748 the table driven EH machinery are not quite ready to handle such
749 regs live across such edges. */
754 FOR_EACH_EDGE (e
, ei
, b
->preds
)
755 if (e
->flags
& EDGE_ABNORMAL
)
761 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, ax
,
763 allocno
[ax
].no_stack_reg
= 1;
765 for (ax
= FIRST_STACK_REG
; ax
<= LAST_STACK_REG
; ax
++)
766 record_one_conflict (ax
);
769 /* No need to record conflicts for call clobbered regs if we have
770 nonlocal labels around, as we don't ever try to allocate such
771 regs in this case. */
772 if (! current_function_has_nonlocal_label
)
773 for (ax
= 0; ax
< FIRST_PSEUDO_REGISTER
; ax
++)
774 if (call_used_regs
[ax
])
775 record_one_conflict (ax
);
782 /* Scan the code of this basic block, noting which allocnos
783 and hard regs are born or die. When one is born,
784 record a conflict with all others currently live. */
788 RTX_CODE code
= GET_CODE (insn
);
791 /* Make regs_set an empty set. */
795 if (code
== INSN
|| code
== CALL_INSN
|| code
== JUMP_INSN
)
800 for (link
= REG_NOTES (insn
);
801 link
&& i
< NUM_NO_CONFLICT_PAIRS
;
802 link
= XEXP (link
, 1))
803 if (REG_NOTE_KIND (link
) == REG_NO_CONFLICT
)
805 no_conflict_pairs
[i
].allocno1
806 = reg_allocno
[REGNO (SET_DEST (PATTERN (insn
)))];
807 no_conflict_pairs
[i
].allocno2
808 = reg_allocno
[REGNO (XEXP (link
, 0))];
813 /* Mark any registers clobbered by INSN as live,
814 so they conflict with the inputs. */
816 note_stores (PATTERN (insn
), mark_reg_clobber
, NULL
);
818 /* Mark any registers dead after INSN as dead now. */
820 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
821 if (REG_NOTE_KIND (link
) == REG_DEAD
)
822 mark_reg_death (XEXP (link
, 0));
824 /* Mark any registers set in INSN as live,
825 and mark them as conflicting with all other live regs.
826 Clobbers are processed again, so they conflict with
827 the registers that are set. */
829 note_stores (PATTERN (insn
), mark_reg_store
, NULL
);
832 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
833 if (REG_NOTE_KIND (link
) == REG_INC
)
834 mark_reg_store (XEXP (link
, 0), NULL_RTX
, NULL
);
837 /* If INSN has multiple outputs, then any reg that dies here
838 and is used inside of an output
839 must conflict with the other outputs.
841 It is unsafe to use !single_set here since it will ignore an
842 unused output. Just because an output is unused does not mean
843 the compiler can assume the side effect will not occur.
844 Consider if REG appears in the address of an output and we
845 reload the output. If we allocate REG to the same hard
846 register as an unused output we could set the hard register
847 before the output reload insn. */
848 if (GET_CODE (PATTERN (insn
)) == PARALLEL
&& multiple_sets (insn
))
849 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
850 if (REG_NOTE_KIND (link
) == REG_DEAD
)
852 int used_in_output
= 0;
854 rtx reg
= XEXP (link
, 0);
856 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
858 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
859 if (GET_CODE (set
) == SET
860 && !REG_P (SET_DEST (set
))
861 && !rtx_equal_p (reg
, SET_DEST (set
))
862 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
866 mark_reg_conflicts (reg
);
869 /* Mark any registers set in INSN and then never used. */
871 while (n_regs_set
-- > 0)
873 rtx note
= find_regno_note (insn
, REG_UNUSED
,
874 REGNO (regs_set
[n_regs_set
]));
876 mark_reg_death (XEXP (note
, 0));
880 if (insn
== BB_END (b
))
882 insn
= NEXT_INSN (insn
);
887 free (block_start_allocnos
);
890 /* Expand the preference information by looking for cases where one allocno
891 dies in an insn that sets an allocno. If those two allocnos don't conflict,
892 merge any preferences between those allocnos. */
895 expand_preferences (void)
901 /* We only try to handle the most common cases here. Most of the cases
902 where this wins are reg-reg copies. */
904 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
906 && (set
= single_set (insn
)) != 0
907 && REG_P (SET_DEST (set
))
908 && reg_allocno
[REGNO (SET_DEST (set
))] >= 0)
909 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
910 if (REG_NOTE_KIND (link
) == REG_DEAD
911 && REG_P (XEXP (link
, 0))
912 && reg_allocno
[REGNO (XEXP (link
, 0))] >= 0
913 && ! CONFLICTP (reg_allocno
[REGNO (SET_DEST (set
))],
914 reg_allocno
[REGNO (XEXP (link
, 0))]))
916 int a1
= reg_allocno
[REGNO (SET_DEST (set
))];
917 int a2
= reg_allocno
[REGNO (XEXP (link
, 0))];
919 if (XEXP (link
, 0) == SET_SRC (set
))
921 IOR_HARD_REG_SET (allocno
[a1
].hard_reg_copy_preferences
,
922 allocno
[a2
].hard_reg_copy_preferences
);
923 IOR_HARD_REG_SET (allocno
[a2
].hard_reg_copy_preferences
,
924 allocno
[a1
].hard_reg_copy_preferences
);
927 IOR_HARD_REG_SET (allocno
[a1
].hard_reg_preferences
,
928 allocno
[a2
].hard_reg_preferences
);
929 IOR_HARD_REG_SET (allocno
[a2
].hard_reg_preferences
,
930 allocno
[a1
].hard_reg_preferences
);
931 IOR_HARD_REG_SET (allocno
[a1
].hard_reg_full_preferences
,
932 allocno
[a2
].hard_reg_full_preferences
);
933 IOR_HARD_REG_SET (allocno
[a2
].hard_reg_full_preferences
,
934 allocno
[a1
].hard_reg_full_preferences
);
938 /* Prune the preferences for global registers to exclude registers that cannot
941 Compute `regs_someone_prefers', which is a bitmask of the hard registers
942 that are preferred by conflicting registers of lower priority. If possible,
943 we will avoid using these registers. */
946 prune_preferences (void)
950 int *allocno_to_order
= xmalloc (max_allocno
* sizeof (int));
952 /* Scan least most important to most important.
953 For each allocno, remove from preferences registers that cannot be used,
954 either because of conflicts or register type. Then compute all registers
955 preferred by each lower-priority register that conflicts. */
957 for (i
= max_allocno
- 1; i
>= 0; i
--)
961 num
= allocno_order
[i
];
962 allocno_to_order
[num
] = i
;
963 COPY_HARD_REG_SET (temp
, allocno
[num
].hard_reg_conflicts
);
965 if (allocno
[num
].calls_crossed
== 0)
966 IOR_HARD_REG_SET (temp
, fixed_reg_set
);
968 IOR_HARD_REG_SET (temp
, call_used_reg_set
);
970 IOR_COMPL_HARD_REG_SET
972 reg_class_contents
[(int) reg_preferred_class (allocno
[num
].reg
)]);
974 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_preferences
, temp
);
975 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_copy_preferences
, temp
);
976 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_full_preferences
, temp
);
979 for (i
= max_allocno
- 1; i
>= 0; i
--)
981 /* Merge in the preferences of lower-priority registers (they have
982 already been pruned). If we also prefer some of those registers,
983 don't exclude them unless we are of a smaller size (in which case
984 we want to give the lower-priority allocno the first chance for
986 HARD_REG_SET temp
, temp2
;
989 num
= allocno_order
[i
];
991 CLEAR_HARD_REG_SET (temp
);
992 CLEAR_HARD_REG_SET (temp2
);
994 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts
+ num
* allocno_row_words
,
997 if (allocno_to_order
[allocno2
] > i
)
999 if (allocno
[allocno2
].size
<= allocno
[num
].size
)
1000 IOR_HARD_REG_SET (temp
,
1001 allocno
[allocno2
].hard_reg_full_preferences
);
1003 IOR_HARD_REG_SET (temp2
,
1004 allocno
[allocno2
].hard_reg_full_preferences
);
1008 AND_COMPL_HARD_REG_SET (temp
, allocno
[num
].hard_reg_full_preferences
);
1009 IOR_HARD_REG_SET (temp
, temp2
);
1010 COPY_HARD_REG_SET (allocno
[num
].regs_someone_prefers
, temp
);
1012 free (allocno_to_order
);
1015 /* Assign a hard register to allocno NUM; look for one that is the beginning
1016 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1017 The registers marked in PREFREGS are tried first.
1019 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1020 be used for this allocation.
1022 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1023 Otherwise ignore that preferred class and use the alternate class.
1025 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1026 will have to be saved and restored at calls.
1028 RETRYING is nonzero if this is called from retry_global_alloc.
1030 If we find one, record it in reg_renumber.
1031 If not, do nothing. */
1034 find_reg (int num
, HARD_REG_SET losers
, int alt_regs_p
, int accept_call_clobbered
, int retrying
)
1036 int i
, best_reg
, pass
;
1037 HARD_REG_SET used
, used1
, used2
;
1039 enum reg_class
class = (alt_regs_p
1040 ? reg_alternate_class (allocno
[num
].reg
)
1041 : reg_preferred_class (allocno
[num
].reg
));
1042 enum machine_mode mode
= PSEUDO_REGNO_MODE (allocno
[num
].reg
);
1044 if (accept_call_clobbered
)
1045 COPY_HARD_REG_SET (used1
, call_fixed_reg_set
);
1046 else if (allocno
[num
].calls_crossed
== 0)
1047 COPY_HARD_REG_SET (used1
, fixed_reg_set
);
1049 COPY_HARD_REG_SET (used1
, call_used_reg_set
);
1051 /* Some registers should not be allocated in global-alloc. */
1052 IOR_HARD_REG_SET (used1
, no_global_alloc_regs
);
1054 IOR_HARD_REG_SET (used1
, losers
);
1056 IOR_COMPL_HARD_REG_SET (used1
, reg_class_contents
[(int) class]);
1057 COPY_HARD_REG_SET (used2
, used1
);
1059 IOR_HARD_REG_SET (used1
, allocno
[num
].hard_reg_conflicts
);
1061 #ifdef CANNOT_CHANGE_MODE_CLASS
1062 cannot_change_mode_set_regs (&used1
, mode
, allocno
[num
].reg
);
1065 /* Try each hard reg to see if it fits. Do this in two passes.
1066 In the first pass, skip registers that are preferred by some other pseudo
1067 to give it a better chance of getting one of those registers. Only if
1068 we can't get a register when excluding those do we take one of them.
1069 However, we never allocate a register for the first time in pass 0. */
1071 COPY_HARD_REG_SET (used
, used1
);
1072 IOR_COMPL_HARD_REG_SET (used
, regs_used_so_far
);
1073 IOR_HARD_REG_SET (used
, allocno
[num
].regs_someone_prefers
);
1076 for (i
= FIRST_PSEUDO_REGISTER
, pass
= 0;
1077 pass
<= 1 && i
>= FIRST_PSEUDO_REGISTER
;
1081 COPY_HARD_REG_SET (used
, used1
);
1082 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1084 #ifdef REG_ALLOC_ORDER
1085 int regno
= reg_alloc_order
[i
];
1089 if (! TEST_HARD_REG_BIT (used
, regno
)
1090 && HARD_REGNO_MODE_OK (regno
, mode
)
1091 && (allocno
[num
].calls_crossed
== 0
1092 || accept_call_clobbered
1093 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno
, mode
)))
1096 int lim
= regno
+ hard_regno_nregs
[regno
][mode
];
1099 && ! TEST_HARD_REG_BIT (used
, j
));
1106 #ifndef REG_ALLOC_ORDER
1107 i
= j
; /* Skip starting points we know will lose */
1113 /* See if there is a preferred register with the same class as the register
1114 we allocated above. Making this restriction prevents register
1115 preferencing from creating worse register allocation.
1117 Remove from the preferred registers and conflicting registers. Note that
1118 additional conflicts may have been added after `prune_preferences' was
1121 First do this for those register with copy preferences, then all
1122 preferred registers. */
1124 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_copy_preferences
, used
);
1125 GO_IF_HARD_REG_SUBSET (allocno
[num
].hard_reg_copy_preferences
,
1126 reg_class_contents
[(int) NO_REGS
], no_copy_prefs
);
1130 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1131 if (TEST_HARD_REG_BIT (allocno
[num
].hard_reg_copy_preferences
, i
)
1132 && HARD_REGNO_MODE_OK (i
, mode
)
1133 && (allocno
[num
].calls_crossed
== 0
1134 || accept_call_clobbered
1135 || ! HARD_REGNO_CALL_PART_CLOBBERED (i
, mode
))
1136 && (REGNO_REG_CLASS (i
) == REGNO_REG_CLASS (best_reg
)
1137 || reg_class_subset_p (REGNO_REG_CLASS (i
),
1138 REGNO_REG_CLASS (best_reg
))
1139 || reg_class_subset_p (REGNO_REG_CLASS (best_reg
),
1140 REGNO_REG_CLASS (i
))))
1143 int lim
= i
+ hard_regno_nregs
[i
][mode
];
1146 && ! TEST_HARD_REG_BIT (used
, j
)
1147 && (REGNO_REG_CLASS (j
)
1148 == REGNO_REG_CLASS (best_reg
+ (j
- i
))
1149 || reg_class_subset_p (REGNO_REG_CLASS (j
),
1150 REGNO_REG_CLASS (best_reg
+ (j
- i
)))
1151 || reg_class_subset_p (REGNO_REG_CLASS (best_reg
+ (j
- i
)),
1152 REGNO_REG_CLASS (j
))));
1163 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_preferences
, used
);
1164 GO_IF_HARD_REG_SUBSET (allocno
[num
].hard_reg_preferences
,
1165 reg_class_contents
[(int) NO_REGS
], no_prefs
);
1169 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1170 if (TEST_HARD_REG_BIT (allocno
[num
].hard_reg_preferences
, i
)
1171 && HARD_REGNO_MODE_OK (i
, mode
)
1172 && (allocno
[num
].calls_crossed
== 0
1173 || accept_call_clobbered
1174 || ! HARD_REGNO_CALL_PART_CLOBBERED (i
, mode
))
1175 && (REGNO_REG_CLASS (i
) == REGNO_REG_CLASS (best_reg
)
1176 || reg_class_subset_p (REGNO_REG_CLASS (i
),
1177 REGNO_REG_CLASS (best_reg
))
1178 || reg_class_subset_p (REGNO_REG_CLASS (best_reg
),
1179 REGNO_REG_CLASS (i
))))
1182 int lim
= i
+ hard_regno_nregs
[i
][mode
];
1185 && ! TEST_HARD_REG_BIT (used
, j
)
1186 && (REGNO_REG_CLASS (j
)
1187 == REGNO_REG_CLASS (best_reg
+ (j
- i
))
1188 || reg_class_subset_p (REGNO_REG_CLASS (j
),
1189 REGNO_REG_CLASS (best_reg
+ (j
- i
)))
1190 || reg_class_subset_p (REGNO_REG_CLASS (best_reg
+ (j
- i
)),
1191 REGNO_REG_CLASS (j
))));
1202 /* If we haven't succeeded yet, try with caller-saves.
1203 We need not check to see if the current function has nonlocal
1204 labels because we don't put any pseudos that are live over calls in
1205 registers in that case. */
1207 if (flag_caller_saves
&& best_reg
< 0)
1209 /* Did not find a register. If it would be profitable to
1210 allocate a call-clobbered register and save and restore it
1211 around calls, do that. Don't do this if it crosses any calls
1212 that might throw. */
1213 if (! accept_call_clobbered
1214 && allocno
[num
].calls_crossed
!= 0
1215 && allocno
[num
].throwing_calls_crossed
== 0
1216 && CALLER_SAVE_PROFITABLE (allocno
[num
].n_refs
,
1217 allocno
[num
].calls_crossed
))
1219 HARD_REG_SET new_losers
;
1221 CLEAR_HARD_REG_SET (new_losers
);
1223 COPY_HARD_REG_SET (new_losers
, losers
);
1225 IOR_HARD_REG_SET(new_losers
, losing_caller_save_reg_set
);
1226 find_reg (num
, new_losers
, alt_regs_p
, 1, retrying
);
1227 if (reg_renumber
[allocno
[num
].reg
] >= 0)
1229 caller_save_needed
= 1;
1235 /* If we haven't succeeded yet,
1236 see if some hard reg that conflicts with us
1237 was utilized poorly by local-alloc.
1238 If so, kick out the regs that were put there by local-alloc
1239 so we can use it instead. */
1240 if (best_reg
< 0 && !retrying
1241 /* Let's not bother with multi-reg allocnos. */
1242 && allocno
[num
].size
== 1)
1244 /* Count from the end, to find the least-used ones first. */
1245 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1247 #ifdef REG_ALLOC_ORDER
1248 int regno
= reg_alloc_order
[i
];
1253 if (local_reg_n_refs
[regno
] != 0
1254 /* Don't use a reg no good for this pseudo. */
1255 && ! TEST_HARD_REG_BIT (used2
, regno
)
1256 && HARD_REGNO_MODE_OK (regno
, mode
)
1257 /* The code below assumes that we need only a single
1258 register, but the check of allocno[num].size above
1259 was not enough. Sometimes we need more than one
1260 register for a single-word value. */
1261 && hard_regno_nregs
[regno
][mode
] == 1
1262 && (allocno
[num
].calls_crossed
== 0
1263 || accept_call_clobbered
1264 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno
, mode
))
1265 #ifdef CANNOT_CHANGE_MODE_CLASS
1266 && ! invalid_mode_change_p (regno
, REGNO_REG_CLASS (regno
),
1270 && (!allocno
[num
].no_stack_reg
1271 || regno
< FIRST_STACK_REG
|| regno
> LAST_STACK_REG
)
1275 /* We explicitly evaluate the divide results into temporary
1276 variables so as to avoid excess precision problems that occur
1277 on an i386-unknown-sysv4.2 (unixware) host. */
1279 double tmp1
= ((double) local_reg_freq
[regno
]
1280 / local_reg_live_length
[regno
]);
1281 double tmp2
= ((double) allocno
[num
].freq
1282 / allocno
[num
].live_length
);
1286 /* Hard reg REGNO was used less in total by local regs
1287 than it would be used by this one allocno! */
1289 for (k
= 0; k
< max_regno
; k
++)
1290 if (reg_renumber
[k
] >= 0)
1292 int r
= reg_renumber
[k
];
1294 = r
+ hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (k
)];
1296 if (regno
>= r
&& regno
< endregno
)
1297 reg_renumber
[k
] = -1;
1307 /* Did we find a register? */
1312 HARD_REG_SET this_reg
;
1314 /* Yes. Record it as the hard register of this pseudo-reg. */
1315 reg_renumber
[allocno
[num
].reg
] = best_reg
;
1316 /* Also of any pseudo-regs that share with it. */
1317 if (reg_may_share
[allocno
[num
].reg
])
1318 for (j
= FIRST_PSEUDO_REGISTER
; j
< max_regno
; j
++)
1319 if (reg_allocno
[j
] == num
)
1320 reg_renumber
[j
] = best_reg
;
1322 /* Make a set of the hard regs being allocated. */
1323 CLEAR_HARD_REG_SET (this_reg
);
1324 lim
= best_reg
+ hard_regno_nregs
[best_reg
][mode
];
1325 for (j
= best_reg
; j
< lim
; j
++)
1327 SET_HARD_REG_BIT (this_reg
, j
);
1328 SET_HARD_REG_BIT (regs_used_so_far
, j
);
1329 /* This is no longer a reg used just by local regs. */
1330 local_reg_n_refs
[j
] = 0;
1331 local_reg_freq
[j
] = 0;
1333 /* For each other pseudo-reg conflicting with this one,
1334 mark it as conflicting with the hard regs this one occupies. */
1336 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts
+ lim
* allocno_row_words
, j
,
1338 IOR_HARD_REG_SET (allocno
[j
].hard_reg_conflicts
, this_reg
);
1343 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1344 Perhaps it had previously seemed not worth a hard reg,
1345 or perhaps its old hard reg has been commandeered for reloads.
1346 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1347 they do not appear to be allocated.
1348 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1351 retry_global_alloc (int regno
, HARD_REG_SET forbidden_regs
)
1353 int alloc_no
= reg_allocno
[regno
];
1356 /* If we have more than one register class,
1357 first try allocating in the class that is cheapest
1358 for this pseudo-reg. If that fails, try any reg. */
1359 if (N_REG_CLASSES
> 1)
1360 find_reg (alloc_no
, forbidden_regs
, 0, 0, 1);
1361 if (reg_renumber
[regno
] < 0
1362 && reg_alternate_class (regno
) != NO_REGS
)
1363 find_reg (alloc_no
, forbidden_regs
, 1, 0, 1);
1365 /* If we found a register, modify the RTL for the register to
1366 show the hard register, and mark that register live. */
1367 if (reg_renumber
[regno
] >= 0)
1369 REGNO (regno_reg_rtx
[regno
]) = reg_renumber
[regno
];
1370 mark_home_live (regno
);
1375 /* Record a conflict between register REGNO
1376 and everything currently live.
1377 REGNO must not be a pseudo reg that was allocated
1378 by local_alloc; such numbers must be translated through
1379 reg_renumber before calling here. */
1382 record_one_conflict (int regno
)
1386 if (regno
< FIRST_PSEUDO_REGISTER
)
1387 /* When a hard register becomes live,
1388 record conflicts with live pseudo regs. */
1389 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, j
,
1391 SET_HARD_REG_BIT (allocno
[j
].hard_reg_conflicts
, regno
);
1394 /* When a pseudo-register becomes live,
1395 record conflicts first with hard regs,
1396 then with other pseudo regs. */
1398 int ialloc
= reg_allocno
[regno
];
1399 int ialloc_prod
= ialloc
* allocno_row_words
;
1401 IOR_HARD_REG_SET (allocno
[ialloc
].hard_reg_conflicts
, hard_regs_live
);
1402 for (j
= allocno_row_words
- 1; j
>= 0; j
--)
1403 conflicts
[ialloc_prod
+ j
] |= allocnos_live
[j
];
1407 /* Record all allocnos currently live as conflicting
1408 with all hard regs currently live.
1410 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1411 are currently live. Their bits are also flagged in allocnos_live. */
1414 record_conflicts (int *allocno_vec
, int len
)
1417 IOR_HARD_REG_SET (allocno
[allocno_vec
[len
]].hard_reg_conflicts
,
1421 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1423 mirror_conflicts (void)
1426 int rw
= allocno_row_words
;
1427 int rwb
= rw
* INT_BITS
;
1428 INT_TYPE
*p
= conflicts
;
1429 INT_TYPE
*q0
= conflicts
, *q1
, *q2
;
1430 unsigned INT_TYPE mask
;
1432 for (i
= max_allocno
- 1, mask
= 1; i
>= 0; i
--, mask
<<= 1)
1439 for (j
= allocno_row_words
- 1, q1
= q0
; j
>= 0; j
--, q1
+= rwb
)
1441 unsigned INT_TYPE word
;
1443 for (word
= (unsigned INT_TYPE
) *p
++, q2
= q1
; word
;
1444 word
>>= 1, q2
+= rw
)
1453 /* Handle the case where REG is set by the insn being scanned,
1454 during the forward scan to accumulate conflicts.
1455 Store a 1 in regs_live or allocnos_live for this register, record how many
1456 consecutive hardware registers it actually needs,
1457 and record a conflict with all other registers already live.
1459 Note that even if REG does not remain alive after this insn,
1460 we must mark it here as live, to ensure a conflict between
1461 REG and any other regs set in this insn that really do live.
1462 This is because those other regs could be considered after this.
1464 REG might actually be something other than a register;
1465 if so, we do nothing.
1467 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1468 a REG_INC note was found for it). */
1471 mark_reg_store (rtx reg
, rtx setter
, void *data ATTRIBUTE_UNUSED
)
1475 if (GET_CODE (reg
) == SUBREG
)
1476 reg
= SUBREG_REG (reg
);
1481 regs_set
[n_regs_set
++] = reg
;
1483 if (setter
&& GET_CODE (setter
) != CLOBBER
)
1484 set_preference (reg
, SET_SRC (setter
));
1486 regno
= REGNO (reg
);
1488 /* Either this is one of the max_allocno pseudo regs not allocated,
1489 or it is or has a hardware reg. First handle the pseudo-regs. */
1490 if (regno
>= FIRST_PSEUDO_REGISTER
)
1492 if (reg_allocno
[regno
] >= 0)
1494 SET_ALLOCNO_LIVE (reg_allocno
[regno
]);
1495 record_one_conflict (regno
);
1499 if (reg_renumber
[regno
] >= 0)
1500 regno
= reg_renumber
[regno
];
1502 /* Handle hardware regs (and pseudos allocated to hard regs). */
1503 if (regno
< FIRST_PSEUDO_REGISTER
&& ! fixed_regs
[regno
])
1505 int last
= regno
+ hard_regno_nregs
[regno
][GET_MODE (reg
)];
1506 while (regno
< last
)
1508 record_one_conflict (regno
);
1509 SET_HARD_REG_BIT (hard_regs_live
, regno
);
1515 /* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
1518 mark_reg_clobber (rtx reg
, rtx setter
, void *data
)
1520 if (GET_CODE (setter
) == CLOBBER
)
1521 mark_reg_store (reg
, setter
, data
);
1524 /* Record that REG has conflicts with all the regs currently live.
1525 Do not mark REG itself as live. */
1528 mark_reg_conflicts (rtx reg
)
1532 if (GET_CODE (reg
) == SUBREG
)
1533 reg
= SUBREG_REG (reg
);
1538 regno
= REGNO (reg
);
1540 /* Either this is one of the max_allocno pseudo regs not allocated,
1541 or it is or has a hardware reg. First handle the pseudo-regs. */
1542 if (regno
>= FIRST_PSEUDO_REGISTER
)
1544 if (reg_allocno
[regno
] >= 0)
1545 record_one_conflict (regno
);
1548 if (reg_renumber
[regno
] >= 0)
1549 regno
= reg_renumber
[regno
];
1551 /* Handle hardware regs (and pseudos allocated to hard regs). */
1552 if (regno
< FIRST_PSEUDO_REGISTER
&& ! fixed_regs
[regno
])
1554 int last
= regno
+ hard_regno_nregs
[regno
][GET_MODE (reg
)];
1555 while (regno
< last
)
1557 record_one_conflict (regno
);
1563 /* Mark REG as being dead (following the insn being scanned now).
1564 Store a 0 in regs_live or allocnos_live for this register. */
1567 mark_reg_death (rtx reg
)
1569 int regno
= REGNO (reg
);
1571 /* Either this is one of the max_allocno pseudo regs not allocated,
1572 or it is a hardware reg. First handle the pseudo-regs. */
1573 if (regno
>= FIRST_PSEUDO_REGISTER
)
1575 if (reg_allocno
[regno
] >= 0)
1576 CLEAR_ALLOCNO_LIVE (reg_allocno
[regno
]);
1579 /* For pseudo reg, see if it has been assigned a hardware reg. */
1580 if (reg_renumber
[regno
] >= 0)
1581 regno
= reg_renumber
[regno
];
1583 /* Handle hardware regs (and pseudos allocated to hard regs). */
1584 if (regno
< FIRST_PSEUDO_REGISTER
&& ! fixed_regs
[regno
])
1586 /* Pseudo regs already assigned hardware regs are treated
1587 almost the same as explicit hardware regs. */
1588 int last
= regno
+ hard_regno_nregs
[regno
][GET_MODE (reg
)];
1589 while (regno
< last
)
1591 CLEAR_HARD_REG_BIT (hard_regs_live
, regno
);
1597 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1598 for the value stored in it. MODE determines how many consecutive
1599 registers are actually in use. Do not record conflicts;
1600 it is assumed that the caller will do that. */
1603 mark_reg_live_nc (int regno
, enum machine_mode mode
)
1605 int last
= regno
+ hard_regno_nregs
[regno
][mode
];
1606 while (regno
< last
)
1608 SET_HARD_REG_BIT (hard_regs_live
, regno
);
1613 /* Try to set a preference for an allocno to a hard register.
1614 We are passed DEST and SRC which are the operands of a SET. It is known
1615 that SRC is a register. If SRC or the first operand of SRC is a register,
1616 try to set a preference. If one of the two is a hard register and the other
1617 is a pseudo-register, mark the preference.
1619 Note that we are not as aggressive as local-alloc in trying to tie a
1620 pseudo-register to a hard register. */
1623 set_preference (rtx dest
, rtx src
)
1625 unsigned int src_regno
, dest_regno
;
1626 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1627 to compensate for subregs in SRC or DEST. */
1632 if (GET_RTX_FORMAT (GET_CODE (src
))[0] == 'e')
1633 src
= XEXP (src
, 0), copy
= 0;
1635 /* Get the reg number for both SRC and DEST.
1636 If neither is a reg, give up. */
1639 src_regno
= REGNO (src
);
1640 else if (GET_CODE (src
) == SUBREG
&& REG_P (SUBREG_REG (src
)))
1642 src_regno
= REGNO (SUBREG_REG (src
));
1644 if (REGNO (SUBREG_REG (src
)) < FIRST_PSEUDO_REGISTER
)
1645 offset
+= subreg_regno_offset (REGNO (SUBREG_REG (src
)),
1646 GET_MODE (SUBREG_REG (src
)),
1650 offset
+= (SUBREG_BYTE (src
)
1651 / REGMODE_NATURAL_SIZE (GET_MODE (src
)));
1657 dest_regno
= REGNO (dest
);
1658 else if (GET_CODE (dest
) == SUBREG
&& REG_P (SUBREG_REG (dest
)))
1660 dest_regno
= REGNO (SUBREG_REG (dest
));
1662 if (REGNO (SUBREG_REG (dest
)) < FIRST_PSEUDO_REGISTER
)
1663 offset
-= subreg_regno_offset (REGNO (SUBREG_REG (dest
)),
1664 GET_MODE (SUBREG_REG (dest
)),
1668 offset
-= (SUBREG_BYTE (dest
)
1669 / REGMODE_NATURAL_SIZE (GET_MODE (dest
)));
1674 /* Convert either or both to hard reg numbers. */
1676 if (reg_renumber
[src_regno
] >= 0)
1677 src_regno
= reg_renumber
[src_regno
];
1679 if (reg_renumber
[dest_regno
] >= 0)
1680 dest_regno
= reg_renumber
[dest_regno
];
1682 /* Now if one is a hard reg and the other is a global pseudo
1683 then give the other a preference. */
1685 if (dest_regno
< FIRST_PSEUDO_REGISTER
&& src_regno
>= FIRST_PSEUDO_REGISTER
1686 && reg_allocno
[src_regno
] >= 0)
1688 dest_regno
-= offset
;
1689 if (dest_regno
< FIRST_PSEUDO_REGISTER
)
1692 SET_REGBIT (hard_reg_copy_preferences
,
1693 reg_allocno
[src_regno
], dest_regno
);
1695 SET_REGBIT (hard_reg_preferences
,
1696 reg_allocno
[src_regno
], dest_regno
);
1697 for (i
= dest_regno
;
1698 i
< dest_regno
+ hard_regno_nregs
[dest_regno
][GET_MODE (dest
)];
1700 SET_REGBIT (hard_reg_full_preferences
, reg_allocno
[src_regno
], i
);
1704 if (src_regno
< FIRST_PSEUDO_REGISTER
&& dest_regno
>= FIRST_PSEUDO_REGISTER
1705 && reg_allocno
[dest_regno
] >= 0)
1707 src_regno
+= offset
;
1708 if (src_regno
< FIRST_PSEUDO_REGISTER
)
1711 SET_REGBIT (hard_reg_copy_preferences
,
1712 reg_allocno
[dest_regno
], src_regno
);
1714 SET_REGBIT (hard_reg_preferences
,
1715 reg_allocno
[dest_regno
], src_regno
);
1717 i
< src_regno
+ hard_regno_nregs
[src_regno
][GET_MODE (src
)];
1719 SET_REGBIT (hard_reg_full_preferences
, reg_allocno
[dest_regno
], i
);
1724 /* Indicate that hard register number FROM was eliminated and replaced with
1725 an offset from hard register number TO. The status of hard registers live
1726 at the start of a basic block is updated by replacing a use of FROM with
1730 mark_elimination (int from
, int to
)
1736 regset r
= bb
->global_live_at_start
;
1737 if (REGNO_REG_SET_P (r
, from
))
1739 CLEAR_REGNO_REG_SET (r
, from
);
1740 SET_REGNO_REG_SET (r
, to
);
1745 /* Used for communication between the following functions. Holds the
1746 current life information. */
1747 static regset live_relevant_regs
;
1749 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1750 This is called via note_stores. */
1752 reg_becomes_live (rtx reg
, rtx setter ATTRIBUTE_UNUSED
, void *regs_set
)
1756 if (GET_CODE (reg
) == SUBREG
)
1757 reg
= SUBREG_REG (reg
);
1762 regno
= REGNO (reg
);
1763 if (regno
< FIRST_PSEUDO_REGISTER
)
1765 int nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
1768 SET_REGNO_REG_SET (live_relevant_regs
, regno
);
1769 if (! fixed_regs
[regno
])
1770 SET_REGNO_REG_SET ((regset
) regs_set
, regno
);
1774 else if (reg_renumber
[regno
] >= 0)
1776 SET_REGNO_REG_SET (live_relevant_regs
, regno
);
1777 SET_REGNO_REG_SET ((regset
) regs_set
, regno
);
1781 /* Record in live_relevant_regs that register REGNO died. */
1783 reg_dies (int regno
, enum machine_mode mode
, struct insn_chain
*chain
)
1785 if (regno
< FIRST_PSEUDO_REGISTER
)
1787 int nregs
= hard_regno_nregs
[regno
][mode
];
1790 CLEAR_REGNO_REG_SET (live_relevant_regs
, regno
);
1791 if (! fixed_regs
[regno
])
1792 SET_REGNO_REG_SET (&chain
->dead_or_set
, regno
);
1798 CLEAR_REGNO_REG_SET (live_relevant_regs
, regno
);
1799 if (reg_renumber
[regno
] >= 0)
1800 SET_REGNO_REG_SET (&chain
->dead_or_set
, regno
);
1804 /* Walk the insns of the current function and build reload_insn_chain,
1805 and record register life information. */
1807 build_insn_chain (rtx first
)
1809 struct insn_chain
**p
= &reload_insn_chain
;
1810 struct insn_chain
*prev
= 0;
1811 basic_block b
= ENTRY_BLOCK_PTR
->next_bb
;
1813 live_relevant_regs
= ALLOC_REG_SET (®_obstack
);
1815 for (; first
; first
= NEXT_INSN (first
))
1817 struct insn_chain
*c
;
1819 if (first
== BB_HEAD (b
))
1824 CLEAR_REG_SET (live_relevant_regs
);
1826 EXECUTE_IF_SET_IN_BITMAP (b
->global_live_at_start
, 0, i
, bi
)
1828 if (i
< FIRST_PSEUDO_REGISTER
1829 ? ! TEST_HARD_REG_BIT (eliminable_regset
, i
)
1830 : reg_renumber
[i
] >= 0)
1831 SET_REGNO_REG_SET (live_relevant_regs
, i
);
1835 if (!NOTE_P (first
) && !BARRIER_P (first
))
1837 c
= new_insn_chain ();
1843 c
->block
= b
->index
;
1849 /* Mark the death of everything that dies in this instruction. */
1851 for (link
= REG_NOTES (first
); link
; link
= XEXP (link
, 1))
1852 if (REG_NOTE_KIND (link
) == REG_DEAD
1853 && REG_P (XEXP (link
, 0)))
1854 reg_dies (REGNO (XEXP (link
, 0)), GET_MODE (XEXP (link
, 0)),
1857 COPY_REG_SET (&c
->live_throughout
, live_relevant_regs
);
1859 /* Mark everything born in this instruction as live. */
1861 note_stores (PATTERN (first
), reg_becomes_live
,
1865 COPY_REG_SET (&c
->live_throughout
, live_relevant_regs
);
1871 /* Mark anything that is set in this insn and then unused as dying. */
1873 for (link
= REG_NOTES (first
); link
; link
= XEXP (link
, 1))
1874 if (REG_NOTE_KIND (link
) == REG_UNUSED
1875 && REG_P (XEXP (link
, 0)))
1876 reg_dies (REGNO (XEXP (link
, 0)), GET_MODE (XEXP (link
, 0)),
1881 if (first
== BB_END (b
))
1884 /* Stop after we pass the end of the last basic block. Verify that
1885 no real insns are after the end of the last basic block.
1887 We may want to reorganize the loop somewhat since this test should
1888 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1889 the previous real insn is a JUMP_INSN. */
1890 if (b
== EXIT_BLOCK_PTR
)
1892 #ifdef ENABLE_CHECKING
1893 for (first
= NEXT_INSN (first
); first
; first
= NEXT_INSN (first
))
1894 gcc_assert (!INSN_P (first
)
1895 || GET_CODE (PATTERN (first
)) == USE
1896 || ((GET_CODE (PATTERN (first
)) == ADDR_VEC
1897 || GET_CODE (PATTERN (first
)) == ADDR_DIFF_VEC
)
1898 && prev_real_insn (first
) != 0
1899 && JUMP_P (prev_real_insn (first
))));
1904 FREE_REG_SET (live_relevant_regs
);
1908 /* Print debugging trace information if -dg switch is given,
1909 showing the information on which the allocation decisions are based. */
1912 dump_conflicts (FILE *file
)
1915 int has_preferences
;
1918 for (i
= 0; i
< max_allocno
; i
++)
1920 if (reg_renumber
[allocno
[allocno_order
[i
]].reg
] >= 0)
1924 fprintf (file
, ";; %d regs to allocate:", nregs
);
1925 for (i
= 0; i
< max_allocno
; i
++)
1928 if (reg_renumber
[allocno
[allocno_order
[i
]].reg
] >= 0)
1930 fprintf (file
, " %d", allocno
[allocno_order
[i
]].reg
);
1931 for (j
= 0; j
< max_regno
; j
++)
1932 if (reg_allocno
[j
] == allocno_order
[i
]
1933 && j
!= allocno
[allocno_order
[i
]].reg
)
1934 fprintf (file
, "+%d", j
);
1935 if (allocno
[allocno_order
[i
]].size
!= 1)
1936 fprintf (file
, " (%d)", allocno
[allocno_order
[i
]].size
);
1938 fprintf (file
, "\n");
1940 for (i
= 0; i
< max_allocno
; i
++)
1943 fprintf (file
, ";; %d conflicts:", allocno
[i
].reg
);
1944 for (j
= 0; j
< max_allocno
; j
++)
1945 if (CONFLICTP (j
, i
))
1946 fprintf (file
, " %d", allocno
[j
].reg
);
1947 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
1948 if (TEST_HARD_REG_BIT (allocno
[i
].hard_reg_conflicts
, j
))
1949 fprintf (file
, " %d", j
);
1950 fprintf (file
, "\n");
1952 has_preferences
= 0;
1953 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
1954 if (TEST_HARD_REG_BIT (allocno
[i
].hard_reg_preferences
, j
))
1955 has_preferences
= 1;
1957 if (! has_preferences
)
1959 fprintf (file
, ";; %d preferences:", allocno
[i
].reg
);
1960 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
1961 if (TEST_HARD_REG_BIT (allocno
[i
].hard_reg_preferences
, j
))
1962 fprintf (file
, " %d", j
);
1963 fprintf (file
, "\n");
1965 fprintf (file
, "\n");
1969 dump_global_regs (FILE *file
)
1973 fprintf (file
, ";; Register dispositions:\n");
1974 for (i
= FIRST_PSEUDO_REGISTER
, j
= 0; i
< max_regno
; i
++)
1975 if (reg_renumber
[i
] >= 0)
1977 fprintf (file
, "%d in %d ", i
, reg_renumber
[i
]);
1979 fprintf (file
, "\n");
1982 fprintf (file
, "\n\n;; Hard regs used: ");
1983 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1984 if (regs_ever_live
[i
])
1985 fprintf (file
, " %d", i
);
1986 fprintf (file
, "\n\n");
1991 /* This page contains code to make live information more accurate.
1992 The accurate register liveness at program point P means:
1993 o there is a path from P to usage of the register and the
1994 register is not redefined or killed on the path.
1995 o register at P is partially available, i.e. there is a path from
1996 a register definition to the point P and the register is not
1997 killed (clobbered) on the path
1999 The standard GCC live information means only the first condition.
2000 Without the partial availability, there will be more register
2001 conflicts and as a consequence worse register allocation. The
2002 typical example where the information can be different is a
2003 register initialized in the loop at the basic block preceding the
2006 /* The following structure contains basic block data flow information
2007 used to calculate partial availability of registers. */
2011 /* The basic block reverse post-order number. */
2013 /* Registers used uninitialized in an insn in which there is an
2014 early clobbered register might get the same hard register. */
2015 bitmap earlyclobber
;
2016 /* Registers correspondingly killed (clobbered) and defined but not
2017 killed afterward in the basic block. */
2018 bitmap killed
, avloc
;
2019 /* Registers partially available and living (in other words whose
2020 values were calculated and used) correspondingly at the start
2021 and end of the basic block. */
2022 bitmap live_pavin
, live_pavout
;
2025 /* Macros for accessing data flow information of basic blocks. */
2027 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2028 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2030 /* The function allocates the info structures of each basic block. It
2031 also initialized LIVE_PAVIN and LIVE_PAVOUT as if all hard
2032 registers were partially available. */
2035 allocate_bb_info (void)
2039 struct bb_info
*bb_info
;
2042 alloc_aux_for_blocks (sizeof (struct bb_info
));
2043 init
= BITMAP_ALLOC (NULL
);
2044 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2045 bitmap_set_bit (init
, i
);
2049 bb_info
->earlyclobber
= BITMAP_ALLOC (NULL
);
2050 bb_info
->avloc
= BITMAP_ALLOC (NULL
);
2051 bb_info
->killed
= BITMAP_ALLOC (NULL
);
2052 bb_info
->live_pavin
= BITMAP_ALLOC (NULL
);
2053 bb_info
->live_pavout
= BITMAP_ALLOC (NULL
);
2054 bitmap_copy (bb_info
->live_pavin
, init
);
2055 bitmap_copy (bb_info
->live_pavout
, init
);
2060 /* The function frees the allocated info of all basic blocks. */
2066 struct bb_info
*bb_info
;
2070 bb_info
= BB_INFO (bb
);
2071 BITMAP_FREE (bb_info
->live_pavout
);
2072 BITMAP_FREE (bb_info
->live_pavin
);
2073 BITMAP_FREE (bb_info
->killed
);
2074 BITMAP_FREE (bb_info
->avloc
);
2075 BITMAP_FREE (bb_info
->earlyclobber
);
2077 free_aux_for_blocks ();
2080 /* The function modifies local info for register REG being changed in
2081 SETTER. DATA is used to pass the current basic block info. */
2084 mark_reg_change (rtx reg
, rtx setter
, void *data
)
2087 basic_block bb
= data
;
2088 struct bb_info
*bb_info
= BB_INFO (bb
);
2090 if (GET_CODE (reg
) == SUBREG
)
2091 reg
= SUBREG_REG (reg
);
2096 regno
= REGNO (reg
);
2097 bitmap_set_bit (bb_info
->killed
, regno
);
2099 if (GET_CODE (setter
) != CLOBBER
)
2100 bitmap_set_bit (bb_info
->avloc
, regno
);
2102 bitmap_clear_bit (bb_info
->avloc
, regno
);
2105 /* Classes of registers which could be early clobbered in the current
2108 static varray_type earlyclobber_regclass
;
2110 /* This function finds and stores register classes that could be early
2111 clobbered in INSN. If any earlyclobber classes are found, the function
2112 returns TRUE, in all other cases it returns FALSE. */
2115 check_earlyclobber (rtx insn
)
2120 extract_insn (insn
);
2122 VARRAY_POP_ALL (earlyclobber_regclass
);
2123 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2128 enum reg_class
class;
2129 const char *p
= recog_data
.constraints
[opno
];
2138 case '=': case '+': case '?':
2141 case 'm': case '<': case '>': case 'V': case 'o':
2142 case 'E': case 'F': case 'G': case 'H':
2143 case 's': case 'i': case 'n':
2144 case 'I': case 'J': case 'K': case 'L':
2145 case 'M': case 'N': case 'O': case 'P':
2147 case '0': case '1': case '2': case '3': case '4':
2148 case '5': case '6': case '7': case '8': case '9':
2149 /* These don't say anything we care about. */
2157 if (amp_p
&& class != NO_REGS
)
2160 for (i
= VARRAY_ACTIVE_SIZE (earlyclobber_regclass
) - 1;
2162 if (VARRAY_INT (earlyclobber_regclass
, i
) == (int) class)
2165 VARRAY_PUSH_INT (earlyclobber_regclass
, (int) class);
2173 class = GENERAL_REGS
;
2177 class = REG_CLASS_FROM_CONSTRAINT (c
, p
);
2182 p
+= CONSTRAINT_LEN (c
, p
);
2189 /* The function checks that pseudo-register *X has a class
2190 intersecting with the class of pseudo-register could be early
2191 clobbered in the same insn.
2192 This function is a no-op if earlyclobber_regclass is empty. */
2195 mark_reg_use_for_earlyclobber (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
2197 enum reg_class pref_class
, alt_class
;
2199 basic_block bb
= data
;
2200 struct bb_info
*bb_info
= BB_INFO (bb
);
2202 if (GET_CODE (*x
) == REG
&& REGNO (*x
) >= FIRST_PSEUDO_REGISTER
)
2205 if (bitmap_bit_p (bb_info
->killed
, regno
)
2206 || bitmap_bit_p (bb_info
->avloc
, regno
))
2208 pref_class
= reg_preferred_class (regno
);
2209 alt_class
= reg_alternate_class (regno
);
2210 for (i
= VARRAY_ACTIVE_SIZE (earlyclobber_regclass
) - 1; i
>= 0; i
--)
2211 if (reg_classes_intersect_p (VARRAY_INT (earlyclobber_regclass
, i
),
2213 || (VARRAY_INT (earlyclobber_regclass
, i
) != NO_REGS
2214 && reg_classes_intersect_p (VARRAY_INT (earlyclobber_regclass
,
2218 bitmap_set_bit (bb_info
->earlyclobber
, regno
);
2225 /* The function processes all pseudo-registers in *X with the aid of
2226 previous function. */
2229 mark_reg_use_for_earlyclobber_1 (rtx
*x
, void *data
)
2231 for_each_rtx (x
, mark_reg_use_for_earlyclobber
, data
);
2234 /* The function calculates local info for each basic block. */
2237 calculate_local_reg_bb_info (void)
2242 VARRAY_INT_INIT (earlyclobber_regclass
, 20,
2243 "classes of registers early clobbered in an insn");
2246 bound
= NEXT_INSN (BB_END (bb
));
2247 for (insn
= BB_HEAD (bb
); insn
!= bound
; insn
= NEXT_INSN (insn
))
2250 note_stores (PATTERN (insn
), mark_reg_change
, bb
);
2251 if (check_earlyclobber (insn
))
2252 note_uses (&PATTERN (insn
), mark_reg_use_for_earlyclobber_1
, bb
);
2257 /* The function sets up reverse post-order number of each basic
2261 set_up_bb_rts_numbers (void)
2266 rts_order
= xmalloc (sizeof (int) * n_basic_blocks
);
2267 flow_reverse_top_sort_order_compute (rts_order
);
2268 for (i
= 0; i
< n_basic_blocks
; i
++)
2269 BB_INFO_BY_INDEX (rts_order
[i
])->rts_number
= i
;
2273 /* Compare function for sorting blocks in reverse postorder. */
2276 rpost_cmp (const void *bb1
, const void *bb2
)
2278 basic_block b1
= *(basic_block
*) bb1
, b2
= *(basic_block
*) bb2
;
2280 return BB_INFO (b2
)->rts_number
- BB_INFO (b1
)->rts_number
;
2283 /* Temporary bitmap used for live_pavin, live_pavout calculation. */
2284 static bitmap temp_bitmap
;
2286 /* The function calculates partial register availability according to
2287 the following equations:
2290 = empty for entry block
2291 | union (live_pavout of predecessors) & global_live_at_start
2292 bb.live_pavout = union (bb.live_pavin - bb.killed, bb.avloc)
2293 & global_live_at_end */
2296 calculate_reg_pav (void)
2298 basic_block bb
, succ
;
2301 varray_type bbs
, new_bbs
, temp
;
2302 basic_block
*bb_array
;
2305 VARRAY_BB_INIT (bbs
, n_basic_blocks
, "basic blocks");
2306 VARRAY_BB_INIT (new_bbs
, n_basic_blocks
, "basic blocks for the next iter.");
2307 temp_bitmap
= BITMAP_ALLOC (NULL
);
2310 VARRAY_PUSH_BB (bbs
, bb
);
2312 wset
= sbitmap_alloc (n_basic_blocks
+ 1);
2313 while (VARRAY_ACTIVE_SIZE (bbs
))
2315 bb_array
= &VARRAY_BB (bbs
, 0);
2316 nel
= VARRAY_ACTIVE_SIZE (bbs
);
2317 qsort (bb_array
, nel
, sizeof (basic_block
), rpost_cmp
);
2318 sbitmap_zero (wset
);
2319 for (i
= 0; i
< nel
; i
++)
2322 struct bb_info
*bb_info
;
2323 bitmap bb_live_pavin
, bb_live_pavout
;
2326 bb_info
= BB_INFO (bb
);
2327 bb_live_pavin
= bb_info
->live_pavin
;
2328 bb_live_pavout
= bb_info
->live_pavout
;
2329 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2331 basic_block pred
= e
->src
;
2333 if (pred
->index
!= ENTRY_BLOCK
)
2334 bitmap_ior_into (bb_live_pavin
, BB_INFO (pred
)->live_pavout
);
2336 bitmap_and_into (bb_live_pavin
, bb
->global_live_at_start
);
2337 bitmap_ior_and_compl (temp_bitmap
, bb_info
->avloc
,
2338 bb_live_pavin
, bb_info
->killed
);
2339 bitmap_and_into (temp_bitmap
, bb
->global_live_at_end
);
2340 if (! bitmap_equal_p (temp_bitmap
, bb_live_pavout
))
2342 bitmap_copy (bb_live_pavout
, temp_bitmap
);
2343 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2346 if (succ
->index
!= EXIT_BLOCK
2347 && !TEST_BIT (wset
, succ
->index
))
2349 SET_BIT (wset
, succ
->index
);
2350 VARRAY_PUSH_BB (new_bbs
, succ
);
2358 VARRAY_POP_ALL (new_bbs
);
2360 sbitmap_free (wset
);
2361 BITMAP_FREE (temp_bitmap
);
2364 /* The function modifies partial availability information for two
2365 special cases to prevent incorrect work of the subsequent passes
2366 with the accurate live information based on the partial
2370 modify_reg_pav (void)
2373 struct bb_info
*bb_info
;
2376 HARD_REG_SET zero
, stack_hard_regs
, used
;
2379 CLEAR_HARD_REG_SET (zero
);
2380 CLEAR_HARD_REG_SET (stack_hard_regs
);
2381 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
2382 SET_HARD_REG_BIT(stack_hard_regs
, i
);
2383 stack_regs
= BITMAP_ALLOC (NULL
);
2384 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
2386 COPY_HARD_REG_SET (used
, reg_class_contents
[reg_preferred_class (i
)]);
2387 IOR_HARD_REG_SET (used
, reg_class_contents
[reg_alternate_class (i
)]);
2388 AND_HARD_REG_SET (used
, stack_hard_regs
);
2389 GO_IF_HARD_REG_EQUAL(used
, zero
, skip
);
2390 bitmap_set_bit (stack_regs
, i
);
2397 bb_info
= BB_INFO (bb
);
2399 /* Reload can assign the same hard register to uninitialized
2400 pseudo-register and early clobbered pseudo-register in an
2401 insn if the pseudo-register is used first time in given BB
2402 and not lived at the BB start. To prevent this we don't
2403 change life information for such pseudo-registers. */
2404 bitmap_ior_into (bb_info
->live_pavin
, bb_info
->earlyclobber
);
2406 /* We can not use the same stack register for uninitialized
2407 pseudo-register and another living pseudo-register because if the
2408 uninitialized pseudo-register dies, subsequent pass reg-stack
2409 will be confused (it will believe that the other register
2411 bitmap_ior_into (bb_info
->live_pavin
, stack_regs
);
2415 BITMAP_FREE (stack_regs
);
2419 /* The following function makes live information more accurate by
2420 modifying global_live_at_start and global_live_at_end of basic
2423 The standard GCC life analysis permits registers to live
2424 uninitialized, for example:
2433 With normal life_analysis, R would be live before "Loop:".
2434 The result is that R causes many interferences that do not
2437 After the function call a register lives at a program point
2438 only if it is initialized on a path from CFG entry to the
2442 make_accurate_live_analysis (void)
2445 struct bb_info
*bb_info
;
2447 max_regno
= max_reg_num ();
2449 allocate_bb_info ();
2450 calculate_local_reg_bb_info ();
2451 set_up_bb_rts_numbers ();
2452 calculate_reg_pav ();
2456 bb_info
= BB_INFO (bb
);
2458 bitmap_and_into (bb
->global_live_at_start
, bb_info
->live_pavin
);
2459 bitmap_and_into (bb
->global_live_at_end
, bb_info
->live_pavout
);