1 /* Allocate registers for pseudo-registers that span basic blocks.
2 Copyright (C) 1987, 1988, 1991, 1994, 1996, 1997, 1998,
3 1999, 2000, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
28 #include "hard-reg-set.h"
34 #include "insn-config.h"
39 #include "tree-pass.h"
42 /* This pass of the compiler performs global register allocation.
43 It assigns hard register numbers to all the pseudo registers
44 that were not handled in local_alloc. Assignments are recorded
45 in the vector reg_renumber, not by changing the rtl code.
46 (Such changes are made by final). The entry point is
47 the function global_alloc.
49 After allocation is complete, the reload pass is run as a subroutine
50 of this pass, so that when a pseudo reg loses its hard reg due to
51 spilling it is possible to make a second attempt to find a hard
52 reg for it. The reload pass is independent in other respects
53 and it is run even when stupid register allocation is in use.
55 1. Assign allocation-numbers (allocnos) to the pseudo-registers
56 still needing allocations and to the pseudo-registers currently
57 allocated by local-alloc which may be spilled by reload.
58 Set up tables reg_allocno and allocno_reg to map
59 reg numbers to allocnos and vice versa.
60 max_allocno gets the number of allocnos in use.
62 2. Allocate a max_allocno by max_allocno conflict bit matrix and clear it.
63 Allocate a max_allocno by FIRST_PSEUDO_REGISTER conflict matrix
64 for conflicts between allocnos and explicit hard register use
65 (which includes use of pseudo-registers allocated by local_alloc).
67 3. For each basic block
68 walk forward through the block, recording which
69 pseudo-registers and which hardware registers are live.
70 Build the conflict matrix between the pseudo-registers
71 and another of pseudo-registers versus hardware registers.
72 Also record the preferred hardware registers
73 for each pseudo-register.
75 4. Sort a table of the allocnos into order of
76 desirability of the variables.
78 5. Allocate the variables in that order; each if possible into
79 a preferred register, else into another register. */
81 /* Number of pseudo-registers which are candidates for allocation. */
83 static int max_allocno
;
85 /* Indexed by (pseudo) reg number, gives the allocno, or -1
86 for pseudo registers which are not to be allocated. */
88 static int *reg_allocno
;
93 /* Gives the number of consecutive hard registers needed by that
97 /* Number of calls crossed by each allocno. */
100 /* Number of calls that might throw crossed by each allocno. */
101 int throwing_calls_crossed
;
103 /* Number of refs to each allocno. */
106 /* Frequency of uses of each allocno. */
109 /* Guess at live length of each allocno.
110 This is actually the max of the live lengths of the regs. */
113 /* Set of hard regs conflicting with allocno N. */
115 HARD_REG_SET hard_reg_conflicts
;
117 /* Set of hard regs preferred by allocno N.
118 This is used to make allocnos go into regs that are copied to or from them,
119 when possible, to reduce register shuffling. */
121 HARD_REG_SET hard_reg_preferences
;
123 /* Similar, but just counts register preferences made in simple copy
124 operations, rather than arithmetic. These are given priority because
125 we can always eliminate an insn by using these, but using a register
126 in the above list won't always eliminate an insn. */
128 HARD_REG_SET hard_reg_copy_preferences
;
130 /* Similar to hard_reg_preferences, but includes bits for subsequent
131 registers when an allocno is multi-word. The above variable is used for
132 allocation while this is used to build reg_someone_prefers, below. */
134 HARD_REG_SET hard_reg_full_preferences
;
136 /* Set of hard registers that some later allocno has a preference for. */
138 HARD_REG_SET regs_someone_prefers
;
141 /* Set to true if allocno can't be allocated in the stack register. */
146 static struct allocno
*allocno
;
148 /* A vector of the integers from 0 to max_allocno-1,
149 sorted in the order of first-to-be-allocated first. */
151 static int *allocno_order
;
153 /* Indexed by (pseudo) reg number, gives the number of another
154 lower-numbered pseudo reg which can share a hard reg with this pseudo
155 *even if the two pseudos would otherwise appear to conflict*. */
157 static int *reg_may_share
;
159 /* Define the number of bits in each element of `conflicts' and what
160 type that element has. We use the largest integer format on the
163 #define INT_BITS HOST_BITS_PER_WIDE_INT
164 #define INT_TYPE HOST_WIDE_INT
166 /* max_allocno by max_allocno array of bits,
167 recording whether two allocno's conflict (can't go in the same
170 `conflicts' is symmetric after the call to mirror_conflicts. */
172 static INT_TYPE
*conflicts
;
174 /* Number of ints require to hold max_allocno bits.
175 This is the length of a row in `conflicts'. */
177 static int allocno_row_words
;
179 /* Two macros to test or store 1 in an element of `conflicts'. */
181 #define CONFLICTP(I, J) \
182 (conflicts[(I) * allocno_row_words + (unsigned) (J) / INT_BITS] \
183 & ((INT_TYPE) 1 << ((unsigned) (J) % INT_BITS)))
185 /* For any allocno set in ALLOCNO_SET, set ALLOCNO to that allocno,
187 #define EXECUTE_IF_SET_IN_ALLOCNO_SET(ALLOCNO_SET, ALLOCNO, CODE) \
191 INT_TYPE *p_ = (ALLOCNO_SET); \
193 for (i_ = allocno_row_words - 1, allocno_ = 0; i_ >= 0; \
194 i_--, allocno_ += INT_BITS) \
196 unsigned INT_TYPE word_ = (unsigned INT_TYPE) *p_++; \
198 for ((ALLOCNO) = allocno_; word_; word_ >>= 1, (ALLOCNO)++) \
206 /* This doesn't work for non-GNU C due to the way CODE is macro expanded. */
208 /* For any allocno that conflicts with IN_ALLOCNO, set OUT_ALLOCNO to
209 the conflicting allocno, and execute CODE. This macro assumes that
210 mirror_conflicts has been run. */
211 #define EXECUTE_IF_CONFLICT(IN_ALLOCNO, OUT_ALLOCNO, CODE)\
212 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts + (IN_ALLOCNO) * allocno_row_words,\
216 /* Set of hard regs currently live (during scan of all insns). */
218 static HARD_REG_SET hard_regs_live
;
220 /* Set of registers that global-alloc isn't supposed to use. */
222 static HARD_REG_SET no_global_alloc_regs
;
224 /* Set of registers used so far. */
226 static HARD_REG_SET regs_used_so_far
;
228 /* Number of refs to each hard reg, as used by local alloc.
229 It is zero for a reg that contains global pseudos or is explicitly used. */
231 static int local_reg_n_refs
[FIRST_PSEUDO_REGISTER
];
233 /* Frequency of uses of given hard reg. */
234 static int local_reg_freq
[FIRST_PSEUDO_REGISTER
];
236 /* Guess at live length of each hard reg, as used by local alloc.
237 This is actually the sum of the live lengths of the specific regs. */
239 static int local_reg_live_length
[FIRST_PSEUDO_REGISTER
];
241 /* Set to 1 a bit in a vector TABLE of HARD_REG_SETs, for vector
242 element I, and hard register number J. */
244 #define SET_REGBIT(TABLE, I, J) SET_HARD_REG_BIT (allocno[I].TABLE, J)
246 /* Bit mask for allocnos live at current point in the scan. */
248 static INT_TYPE
*allocnos_live
;
250 /* Test, set or clear bit number I in allocnos_live,
251 a bit vector indexed by allocno. */
253 #define SET_ALLOCNO_LIVE(I) \
254 (allocnos_live[(unsigned) (I) / INT_BITS] \
255 |= ((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
257 #define CLEAR_ALLOCNO_LIVE(I) \
258 (allocnos_live[(unsigned) (I) / INT_BITS] \
259 &= ~((INT_TYPE) 1 << ((unsigned) (I) % INT_BITS)))
261 /* This is turned off because it doesn't work right for DImode.
262 (And it is only used for DImode, so the other cases are worthless.)
263 The problem is that it isn't true that there is NO possibility of conflict;
264 only that there is no conflict if the two pseudos get the exact same regs.
265 If they were allocated with a partial overlap, there would be a conflict.
266 We can't safely turn off the conflict unless we have another way to
267 prevent the partial overlap.
269 Idea: change hard_reg_conflicts so that instead of recording which
270 hard regs the allocno may not overlap, it records where the allocno
271 may not start. Change both where it is used and where it is updated.
272 Then there is a way to record that (reg:DI 108) may start at 10
273 but not at 9 or 11. There is still the question of how to record
274 this semi-conflict between two pseudos. */
276 /* Reg pairs for which conflict after the current insn
277 is inhibited by a REG_NO_CONFLICT note.
278 If the table gets full, we ignore any other notes--that is conservative. */
279 #define NUM_NO_CONFLICT_PAIRS 4
280 /* Number of pairs in use in this insn. */
281 int n_no_conflict_pairs
;
282 static struct { int allocno1
, allocno2
;}
283 no_conflict_pairs
[NUM_NO_CONFLICT_PAIRS
];
286 /* Record all regs that are set in any one insn.
287 Communication from mark_reg_{store,clobber} and global_conflicts. */
289 static rtx
*regs_set
;
290 static int n_regs_set
;
292 /* All registers that can be eliminated. */
294 static HARD_REG_SET eliminable_regset
;
296 static int allocno_compare (const void *, const void *);
297 static void global_conflicts (void);
298 static void mirror_conflicts (void);
299 static void expand_preferences (void);
300 static void prune_preferences (void);
301 static void find_reg (int, HARD_REG_SET
, int, int, int);
302 static void record_one_conflict (int);
303 static void record_conflicts (int *, int);
304 static void mark_reg_store (rtx
, rtx
, void *);
305 static void mark_reg_clobber (rtx
, rtx
, void *);
306 static void mark_reg_conflicts (rtx
);
307 static void mark_reg_death (rtx
);
308 static void mark_reg_live_nc (int, enum machine_mode
);
309 static void set_preference (rtx
, rtx
);
310 static void dump_conflicts (FILE *);
311 static void reg_becomes_live (rtx
, rtx
, void *);
312 static void reg_dies (int, enum machine_mode
, struct insn_chain
*);
314 static void allocate_bb_info (void);
315 static void free_bb_info (void);
316 static bool check_earlyclobber (rtx
);
317 static void mark_reg_use_for_earlyclobber_1 (rtx
*, void *);
318 static int mark_reg_use_for_earlyclobber (rtx
*, void *);
319 static void calculate_local_reg_bb_info (void);
320 static void set_up_bb_rts_numbers (void);
321 static int rpost_cmp (const void *, const void *);
322 static void calculate_reg_pav (void);
323 static void modify_reg_pav (void);
324 static void make_accurate_live_analysis (void);
328 /* Perform allocation of pseudo-registers not allocated by local_alloc.
329 FILE is a file to output debugging information on,
330 or zero if such output is not desired.
332 Return value is nonzero if reload failed
333 and we must not do any more for this function. */
336 global_alloc (FILE *file
)
339 #ifdef ELIMINABLE_REGS
340 static const struct {const int from
, to
; } eliminables
[] = ELIMINABLE_REGS
;
343 = (! flag_omit_frame_pointer
344 || (current_function_calls_alloca
&& EXIT_IGNORE_STACK
)
345 || FRAME_POINTER_REQUIRED
);
350 make_accurate_live_analysis ();
354 /* A machine may have certain hard registers that
355 are safe to use only within a basic block. */
357 CLEAR_HARD_REG_SET (no_global_alloc_regs
);
359 /* Build the regset of all eliminable registers and show we can't use those
360 that we already know won't be eliminated. */
361 #ifdef ELIMINABLE_REGS
362 for (i
= 0; i
< ARRAY_SIZE (eliminables
); i
++)
365 = (! CAN_ELIMINATE (eliminables
[i
].from
, eliminables
[i
].to
)
366 || (eliminables
[i
].to
== STACK_POINTER_REGNUM
&& need_fp
));
368 if (!regs_asm_clobbered
[eliminables
[i
].from
])
370 SET_HARD_REG_BIT (eliminable_regset
, eliminables
[i
].from
);
373 SET_HARD_REG_BIT (no_global_alloc_regs
, eliminables
[i
].from
);
375 else if (cannot_elim
)
376 error ("%s cannot be used in asm here",
377 reg_names
[eliminables
[i
].from
]);
379 regs_ever_live
[eliminables
[i
].from
] = 1;
381 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
382 if (!regs_asm_clobbered
[HARD_FRAME_POINTER_REGNUM
])
384 SET_HARD_REG_BIT (eliminable_regset
, HARD_FRAME_POINTER_REGNUM
);
386 SET_HARD_REG_BIT (no_global_alloc_regs
, HARD_FRAME_POINTER_REGNUM
);
389 error ("%s cannot be used in asm here",
390 reg_names
[HARD_FRAME_POINTER_REGNUM
]);
392 regs_ever_live
[HARD_FRAME_POINTER_REGNUM
] = 1;
396 if (!regs_asm_clobbered
[FRAME_POINTER_REGNUM
])
398 SET_HARD_REG_BIT (eliminable_regset
, FRAME_POINTER_REGNUM
);
400 SET_HARD_REG_BIT (no_global_alloc_regs
, FRAME_POINTER_REGNUM
);
403 error ("%s cannot be used in asm here", reg_names
[FRAME_POINTER_REGNUM
]);
405 regs_ever_live
[FRAME_POINTER_REGNUM
] = 1;
408 /* Track which registers have already been used. Start with registers
409 explicitly in the rtl, then registers allocated by local register
412 CLEAR_HARD_REG_SET (regs_used_so_far
);
413 #ifdef LEAF_REGISTERS
414 /* If we are doing the leaf function optimization, and this is a leaf
415 function, it means that the registers that take work to save are those
416 that need a register window. So prefer the ones that can be used in
419 const char *cheap_regs
;
420 const char *const leaf_regs
= LEAF_REGISTERS
;
422 if (only_leaf_regs_used () && leaf_function_p ())
423 cheap_regs
= leaf_regs
;
425 cheap_regs
= call_used_regs
;
426 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
427 if (regs_ever_live
[i
] || cheap_regs
[i
])
428 SET_HARD_REG_BIT (regs_used_so_far
, i
);
431 /* We consider registers that do not have to be saved over calls as if
432 they were already used since there is no cost in using them. */
433 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
434 if (regs_ever_live
[i
] || call_used_regs
[i
])
435 SET_HARD_REG_BIT (regs_used_so_far
, i
);
438 for (i
= FIRST_PSEUDO_REGISTER
; i
< (size_t) max_regno
; i
++)
439 if (reg_renumber
[i
] >= 0)
440 SET_HARD_REG_BIT (regs_used_so_far
, reg_renumber
[i
]);
442 /* Establish mappings from register number to allocation number
443 and vice versa. In the process, count the allocnos. */
445 reg_allocno
= xmalloc (max_regno
* sizeof (int));
447 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
450 /* Initialize the shared-hard-reg mapping
451 from the list of pairs that may share. */
452 reg_may_share
= xcalloc (max_regno
, sizeof (int));
453 for (x
= regs_may_share
; x
; x
= XEXP (XEXP (x
, 1), 1))
455 int r1
= REGNO (XEXP (x
, 0));
456 int r2
= REGNO (XEXP (XEXP (x
, 1), 0));
458 reg_may_share
[r1
] = r2
;
460 reg_may_share
[r2
] = r1
;
463 for (i
= FIRST_PSEUDO_REGISTER
; i
< (size_t) max_regno
; i
++)
464 /* Note that reg_live_length[i] < 0 indicates a "constant" reg
465 that we are supposed to refrain from putting in a hard reg.
466 -2 means do make an allocno but don't allocate it. */
467 if (REG_N_REFS (i
) != 0 && REG_LIVE_LENGTH (i
) != -1
468 /* Don't allocate pseudos that cross calls,
469 if this function receives a nonlocal goto. */
470 && (! current_function_has_nonlocal_label
471 || REG_N_CALLS_CROSSED (i
) == 0))
473 if (reg_renumber
[i
] < 0
474 && reg_may_share
[i
] && reg_allocno
[reg_may_share
[i
]] >= 0)
475 reg_allocno
[i
] = reg_allocno
[reg_may_share
[i
]];
477 reg_allocno
[i
] = max_allocno
++;
478 gcc_assert (REG_LIVE_LENGTH (i
));
483 allocno
= xcalloc (max_allocno
, sizeof (struct allocno
));
485 for (i
= FIRST_PSEUDO_REGISTER
; i
< (size_t) max_regno
; i
++)
486 if (reg_allocno
[i
] >= 0)
488 int num
= reg_allocno
[i
];
489 allocno
[num
].reg
= i
;
490 allocno
[num
].size
= PSEUDO_REGNO_SIZE (i
);
491 allocno
[num
].calls_crossed
+= REG_N_CALLS_CROSSED (i
);
492 allocno
[num
].throwing_calls_crossed
493 += REG_N_THROWING_CALLS_CROSSED (i
);
494 allocno
[num
].n_refs
+= REG_N_REFS (i
);
495 allocno
[num
].freq
+= REG_FREQ (i
);
496 if (allocno
[num
].live_length
< REG_LIVE_LENGTH (i
))
497 allocno
[num
].live_length
= REG_LIVE_LENGTH (i
);
500 /* Calculate amount of usage of each hard reg by pseudos
501 allocated by local-alloc. This is to see if we want to
503 memset (local_reg_live_length
, 0, sizeof local_reg_live_length
);
504 memset (local_reg_n_refs
, 0, sizeof local_reg_n_refs
);
505 memset (local_reg_freq
, 0, sizeof local_reg_freq
);
506 for (i
= FIRST_PSEUDO_REGISTER
; i
< (size_t) max_regno
; i
++)
507 if (reg_renumber
[i
] >= 0)
509 int regno
= reg_renumber
[i
];
510 int endregno
= regno
+ hard_regno_nregs
[regno
][PSEUDO_REGNO_MODE (i
)];
513 for (j
= regno
; j
< endregno
; j
++)
515 local_reg_n_refs
[j
] += REG_N_REFS (i
);
516 local_reg_freq
[j
] += REG_FREQ (i
);
517 local_reg_live_length
[j
] += REG_LIVE_LENGTH (i
);
521 /* We can't override local-alloc for a reg used not just by local-alloc. */
522 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
523 if (regs_ever_live
[i
])
524 local_reg_n_refs
[i
] = 0, local_reg_freq
[i
] = 0;
526 allocno_row_words
= (max_allocno
+ INT_BITS
- 1) / INT_BITS
;
528 /* We used to use alloca here, but the size of what it would try to
529 allocate would occasionally cause it to exceed the stack limit and
530 cause unpredictable core dumps. Some examples were > 2Mb in size. */
531 conflicts
= xcalloc (max_allocno
* allocno_row_words
, sizeof (INT_TYPE
));
533 allocnos_live
= xmalloc (allocno_row_words
* sizeof (INT_TYPE
));
535 /* If there is work to be done (at least one reg to allocate),
536 perform global conflict analysis and allocate the regs. */
540 /* Scan all the insns and compute the conflicts among allocnos
541 and between allocnos and hard regs. */
547 /* Eliminate conflicts between pseudos and eliminable registers. If
548 the register is not eliminated, the pseudo won't really be able to
549 live in the eliminable register, so the conflict doesn't matter.
550 If we do eliminate the register, the conflict will no longer exist.
551 So in either case, we can ignore the conflict. Likewise for
554 for (i
= 0; i
< (size_t) max_allocno
; i
++)
556 AND_COMPL_HARD_REG_SET (allocno
[i
].hard_reg_conflicts
,
558 AND_COMPL_HARD_REG_SET (allocno
[i
].hard_reg_copy_preferences
,
560 AND_COMPL_HARD_REG_SET (allocno
[i
].hard_reg_preferences
,
564 /* Try to expand the preferences by merging them between allocnos. */
566 expand_preferences ();
568 /* Determine the order to allocate the remaining pseudo registers. */
570 allocno_order
= xmalloc (max_allocno
* sizeof (int));
571 for (i
= 0; i
< (size_t) max_allocno
; i
++)
572 allocno_order
[i
] = i
;
574 /* Default the size to 1, since allocno_compare uses it to divide by.
575 Also convert allocno_live_length of zero to -1. A length of zero
576 can occur when all the registers for that allocno have reg_live_length
577 equal to -2. In this case, we want to make an allocno, but not
578 allocate it. So avoid the divide-by-zero and set it to a low
581 for (i
= 0; i
< (size_t) max_allocno
; i
++)
583 if (allocno
[i
].size
== 0)
585 if (allocno
[i
].live_length
== 0)
586 allocno
[i
].live_length
= -1;
589 qsort (allocno_order
, max_allocno
, sizeof (int), allocno_compare
);
591 prune_preferences ();
594 dump_conflicts (file
);
596 /* Try allocating them, one by one, in that order,
597 except for parameters marked with reg_live_length[regno] == -2. */
599 for (i
= 0; i
< (size_t) max_allocno
; i
++)
600 if (reg_renumber
[allocno
[allocno_order
[i
]].reg
] < 0
601 && REG_LIVE_LENGTH (allocno
[allocno_order
[i
]].reg
) >= 0)
603 /* If we have more than one register class,
604 first try allocating in the class that is cheapest
605 for this pseudo-reg. If that fails, try any reg. */
606 if (N_REG_CLASSES
> 1)
608 find_reg (allocno_order
[i
], 0, 0, 0, 0);
609 if (reg_renumber
[allocno
[allocno_order
[i
]].reg
] >= 0)
612 if (reg_alternate_class (allocno
[allocno_order
[i
]].reg
) != NO_REGS
)
613 find_reg (allocno_order
[i
], 0, 1, 0, 0);
616 free (allocno_order
);
619 /* Do the reloads now while the allocno data still exist, so that we can
620 try to assign new hard regs to any pseudo regs that are spilled. */
622 #if 0 /* We need to eliminate regs even if there is no rtl code,
623 for the sake of debugging information. */
624 if (n_basic_blocks
> NUM_FIXED_BLOCKS
)
627 build_insn_chain (get_insns ());
628 retval
= reload (get_insns (), 1);
633 free (reg_may_share
);
636 free (allocnos_live
);
641 /* Sort predicate for ordering the allocnos.
642 Returns -1 (1) if *v1 should be allocated before (after) *v2. */
645 allocno_compare (const void *v1p
, const void *v2p
)
647 int v1
= *(const int *)v1p
, v2
= *(const int *)v2p
;
648 /* Note that the quotient will never be bigger than
649 the value of floor_log2 times the maximum number of
650 times a register can occur in one insn (surely less than 100)
651 weighted by the frequency (maximally REG_FREQ_MAX).
652 Multiplying this by 10000/REG_FREQ_MAX can't overflow. */
654 = (((double) (floor_log2 (allocno
[v1
].n_refs
) * allocno
[v1
].freq
)
655 / allocno
[v1
].live_length
)
656 * (10000 / REG_FREQ_MAX
) * allocno
[v1
].size
);
658 = (((double) (floor_log2 (allocno
[v2
].n_refs
) * allocno
[v2
].freq
)
659 / allocno
[v2
].live_length
)
660 * (10000 / REG_FREQ_MAX
) * allocno
[v2
].size
);
664 /* If regs are equally good, sort by allocno,
665 so that the results of qsort leave nothing to chance. */
669 /* Scan the rtl code and record all conflicts and register preferences in the
670 conflict matrices and preference tables. */
673 global_conflicts (void)
678 int *block_start_allocnos
;
680 /* Make a vector that mark_reg_{store,clobber} will store in. */
681 regs_set
= xmalloc (max_parallel
* sizeof (rtx
) * 2);
683 block_start_allocnos
= xmalloc (max_allocno
* sizeof (int));
687 memset (allocnos_live
, 0, allocno_row_words
* sizeof (INT_TYPE
));
689 /* Initialize table of registers currently live
690 to the state at the beginning of this basic block.
691 This also marks the conflicts among hard registers
692 and any allocnos that are live.
694 For pseudo-regs, there is only one bit for each one
695 no matter how many hard regs it occupies.
696 This is ok; we know the size from PSEUDO_REGNO_SIZE.
697 For explicit hard regs, we cannot know the size that way
698 since one hard reg can be used with various sizes.
699 Therefore, we must require that all the hard regs
700 implicitly live as part of a multi-word hard reg
701 be explicitly marked in basic_block_live_at_start. */
704 regset old
= b
->il
.rtl
->global_live_at_start
;
706 reg_set_iterator rsi
;
708 REG_SET_TO_HARD_REG_SET (hard_regs_live
, old
);
709 EXECUTE_IF_SET_IN_REG_SET (old
, FIRST_PSEUDO_REGISTER
, i
, rsi
)
711 int a
= reg_allocno
[i
];
714 SET_ALLOCNO_LIVE (a
);
715 block_start_allocnos
[ax
++] = a
;
717 else if ((a
= reg_renumber
[i
]) >= 0)
718 mark_reg_live_nc (a
, PSEUDO_REGNO_MODE (i
));
721 /* Record that each allocno now live conflicts with each hard reg
724 It is not necessary to mark any conflicts between pseudos as
725 this point, even for pseudos which are live at the start of
728 Given two pseudos X and Y and any point in the CFG P.
730 On any path to point P where X and Y are live one of the
731 following conditions must be true:
733 1. X is live at some instruction on the path that
736 2. Y is live at some instruction on the path that
739 3. Either X or Y is not evaluated on the path to P
740 (i.e. it is used uninitialized) and thus the
741 conflict can be ignored.
743 In cases #1 and #2 the conflict will be recorded when we
744 scan the instruction that makes either X or Y become live. */
745 record_conflicts (block_start_allocnos
, ax
);
747 /* Pseudos can't go in stack regs at the start of a basic block that
748 is reached by an abnormal edge. Likewise for call clobbered regs,
749 because because caller-save, fixup_abnormal_edges, and possibly
750 the table driven EH machinery are not quite ready to handle such
751 regs live across such edges. */
756 FOR_EACH_EDGE (e
, ei
, b
->preds
)
757 if (e
->flags
& EDGE_ABNORMAL
)
763 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, ax
,
765 allocno
[ax
].no_stack_reg
= 1;
767 for (ax
= FIRST_STACK_REG
; ax
<= LAST_STACK_REG
; ax
++)
768 record_one_conflict (ax
);
771 /* No need to record conflicts for call clobbered regs if we have
772 nonlocal labels around, as we don't ever try to allocate such
773 regs in this case. */
774 if (! current_function_has_nonlocal_label
)
775 for (ax
= 0; ax
< FIRST_PSEUDO_REGISTER
; ax
++)
776 if (call_used_regs
[ax
])
777 record_one_conflict (ax
);
784 /* Scan the code of this basic block, noting which allocnos
785 and hard regs are born or die. When one is born,
786 record a conflict with all others currently live. */
790 RTX_CODE code
= GET_CODE (insn
);
793 /* Make regs_set an empty set. */
797 if (code
== INSN
|| code
== CALL_INSN
|| code
== JUMP_INSN
)
802 for (link
= REG_NOTES (insn
);
803 link
&& i
< NUM_NO_CONFLICT_PAIRS
;
804 link
= XEXP (link
, 1))
805 if (REG_NOTE_KIND (link
) == REG_NO_CONFLICT
)
807 no_conflict_pairs
[i
].allocno1
808 = reg_allocno
[REGNO (SET_DEST (PATTERN (insn
)))];
809 no_conflict_pairs
[i
].allocno2
810 = reg_allocno
[REGNO (XEXP (link
, 0))];
815 /* Mark any registers clobbered by INSN as live,
816 so they conflict with the inputs. */
818 note_stores (PATTERN (insn
), mark_reg_clobber
, NULL
);
820 /* Mark any registers dead after INSN as dead now. */
822 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
823 if (REG_NOTE_KIND (link
) == REG_DEAD
)
824 mark_reg_death (XEXP (link
, 0));
826 /* Mark any registers set in INSN as live,
827 and mark them as conflicting with all other live regs.
828 Clobbers are processed again, so they conflict with
829 the registers that are set. */
831 note_stores (PATTERN (insn
), mark_reg_store
, NULL
);
834 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
835 if (REG_NOTE_KIND (link
) == REG_INC
)
836 mark_reg_store (XEXP (link
, 0), NULL_RTX
, NULL
);
839 /* If INSN has multiple outputs, then any reg that dies here
840 and is used inside of an output
841 must conflict with the other outputs.
843 It is unsafe to use !single_set here since it will ignore an
844 unused output. Just because an output is unused does not mean
845 the compiler can assume the side effect will not occur.
846 Consider if REG appears in the address of an output and we
847 reload the output. If we allocate REG to the same hard
848 register as an unused output we could set the hard register
849 before the output reload insn. */
850 if (GET_CODE (PATTERN (insn
)) == PARALLEL
&& multiple_sets (insn
))
851 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
852 if (REG_NOTE_KIND (link
) == REG_DEAD
)
854 int used_in_output
= 0;
856 rtx reg
= XEXP (link
, 0);
858 for (i
= XVECLEN (PATTERN (insn
), 0) - 1; i
>= 0; i
--)
860 rtx set
= XVECEXP (PATTERN (insn
), 0, i
);
861 if (GET_CODE (set
) == SET
862 && !REG_P (SET_DEST (set
))
863 && !rtx_equal_p (reg
, SET_DEST (set
))
864 && reg_overlap_mentioned_p (reg
, SET_DEST (set
)))
868 mark_reg_conflicts (reg
);
871 /* Mark any registers set in INSN and then never used. */
873 while (n_regs_set
-- > 0)
875 rtx note
= find_regno_note (insn
, REG_UNUSED
,
876 REGNO (regs_set
[n_regs_set
]));
878 mark_reg_death (XEXP (note
, 0));
882 if (insn
== BB_END (b
))
884 insn
= NEXT_INSN (insn
);
889 free (block_start_allocnos
);
892 /* Expand the preference information by looking for cases where one allocno
893 dies in an insn that sets an allocno. If those two allocnos don't conflict,
894 merge any preferences between those allocnos. */
897 expand_preferences (void)
903 /* We only try to handle the most common cases here. Most of the cases
904 where this wins are reg-reg copies. */
906 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
908 && (set
= single_set (insn
)) != 0
909 && REG_P (SET_DEST (set
))
910 && reg_allocno
[REGNO (SET_DEST (set
))] >= 0)
911 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
912 if (REG_NOTE_KIND (link
) == REG_DEAD
913 && REG_P (XEXP (link
, 0))
914 && reg_allocno
[REGNO (XEXP (link
, 0))] >= 0
915 && ! CONFLICTP (reg_allocno
[REGNO (SET_DEST (set
))],
916 reg_allocno
[REGNO (XEXP (link
, 0))]))
918 int a1
= reg_allocno
[REGNO (SET_DEST (set
))];
919 int a2
= reg_allocno
[REGNO (XEXP (link
, 0))];
921 if (XEXP (link
, 0) == SET_SRC (set
))
923 IOR_HARD_REG_SET (allocno
[a1
].hard_reg_copy_preferences
,
924 allocno
[a2
].hard_reg_copy_preferences
);
925 IOR_HARD_REG_SET (allocno
[a2
].hard_reg_copy_preferences
,
926 allocno
[a1
].hard_reg_copy_preferences
);
929 IOR_HARD_REG_SET (allocno
[a1
].hard_reg_preferences
,
930 allocno
[a2
].hard_reg_preferences
);
931 IOR_HARD_REG_SET (allocno
[a2
].hard_reg_preferences
,
932 allocno
[a1
].hard_reg_preferences
);
933 IOR_HARD_REG_SET (allocno
[a1
].hard_reg_full_preferences
,
934 allocno
[a2
].hard_reg_full_preferences
);
935 IOR_HARD_REG_SET (allocno
[a2
].hard_reg_full_preferences
,
936 allocno
[a1
].hard_reg_full_preferences
);
940 /* Prune the preferences for global registers to exclude registers that cannot
943 Compute `regs_someone_prefers', which is a bitmask of the hard registers
944 that are preferred by conflicting registers of lower priority. If possible,
945 we will avoid using these registers. */
948 prune_preferences (void)
952 int *allocno_to_order
= xmalloc (max_allocno
* sizeof (int));
954 /* Scan least most important to most important.
955 For each allocno, remove from preferences registers that cannot be used,
956 either because of conflicts or register type. Then compute all registers
957 preferred by each lower-priority register that conflicts. */
959 for (i
= max_allocno
- 1; i
>= 0; i
--)
963 num
= allocno_order
[i
];
964 allocno_to_order
[num
] = i
;
965 COPY_HARD_REG_SET (temp
, allocno
[num
].hard_reg_conflicts
);
967 if (allocno
[num
].calls_crossed
== 0)
968 IOR_HARD_REG_SET (temp
, fixed_reg_set
);
970 IOR_HARD_REG_SET (temp
, call_used_reg_set
);
972 IOR_COMPL_HARD_REG_SET
974 reg_class_contents
[(int) reg_preferred_class (allocno
[num
].reg
)]);
976 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_preferences
, temp
);
977 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_copy_preferences
, temp
);
978 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_full_preferences
, temp
);
981 for (i
= max_allocno
- 1; i
>= 0; i
--)
983 /* Merge in the preferences of lower-priority registers (they have
984 already been pruned). If we also prefer some of those registers,
985 don't exclude them unless we are of a smaller size (in which case
986 we want to give the lower-priority allocno the first chance for
988 HARD_REG_SET temp
, temp2
;
991 num
= allocno_order
[i
];
993 CLEAR_HARD_REG_SET (temp
);
994 CLEAR_HARD_REG_SET (temp2
);
996 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts
+ num
* allocno_row_words
,
999 if (allocno_to_order
[allocno2
] > i
)
1001 if (allocno
[allocno2
].size
<= allocno
[num
].size
)
1002 IOR_HARD_REG_SET (temp
,
1003 allocno
[allocno2
].hard_reg_full_preferences
);
1005 IOR_HARD_REG_SET (temp2
,
1006 allocno
[allocno2
].hard_reg_full_preferences
);
1010 AND_COMPL_HARD_REG_SET (temp
, allocno
[num
].hard_reg_full_preferences
);
1011 IOR_HARD_REG_SET (temp
, temp2
);
1012 COPY_HARD_REG_SET (allocno
[num
].regs_someone_prefers
, temp
);
1014 free (allocno_to_order
);
1017 /* Assign a hard register to allocno NUM; look for one that is the beginning
1018 of a long enough stretch of hard regs none of which conflicts with ALLOCNO.
1019 The registers marked in PREFREGS are tried first.
1021 LOSERS, if nonzero, is a HARD_REG_SET indicating registers that cannot
1022 be used for this allocation.
1024 If ALT_REGS_P is zero, consider only the preferred class of ALLOCNO's reg.
1025 Otherwise ignore that preferred class and use the alternate class.
1027 If ACCEPT_CALL_CLOBBERED is nonzero, accept a call-clobbered hard reg that
1028 will have to be saved and restored at calls.
1030 RETRYING is nonzero if this is called from retry_global_alloc.
1032 If we find one, record it in reg_renumber.
1033 If not, do nothing. */
1036 find_reg (int num
, HARD_REG_SET losers
, int alt_regs_p
, int accept_call_clobbered
, int retrying
)
1038 int i
, best_reg
, pass
;
1039 HARD_REG_SET used
, used1
, used2
;
1041 enum reg_class
class = (alt_regs_p
1042 ? reg_alternate_class (allocno
[num
].reg
)
1043 : reg_preferred_class (allocno
[num
].reg
));
1044 enum machine_mode mode
= PSEUDO_REGNO_MODE (allocno
[num
].reg
);
1046 if (accept_call_clobbered
)
1047 COPY_HARD_REG_SET (used1
, call_fixed_reg_set
);
1048 else if (allocno
[num
].calls_crossed
== 0)
1049 COPY_HARD_REG_SET (used1
, fixed_reg_set
);
1051 COPY_HARD_REG_SET (used1
, call_used_reg_set
);
1053 /* Some registers should not be allocated in global-alloc. */
1054 IOR_HARD_REG_SET (used1
, no_global_alloc_regs
);
1056 IOR_HARD_REG_SET (used1
, losers
);
1058 IOR_COMPL_HARD_REG_SET (used1
, reg_class_contents
[(int) class]);
1059 COPY_HARD_REG_SET (used2
, used1
);
1061 IOR_HARD_REG_SET (used1
, allocno
[num
].hard_reg_conflicts
);
1063 #ifdef CANNOT_CHANGE_MODE_CLASS
1064 cannot_change_mode_set_regs (&used1
, mode
, allocno
[num
].reg
);
1067 /* Try each hard reg to see if it fits. Do this in two passes.
1068 In the first pass, skip registers that are preferred by some other pseudo
1069 to give it a better chance of getting one of those registers. Only if
1070 we can't get a register when excluding those do we take one of them.
1071 However, we never allocate a register for the first time in pass 0. */
1073 COPY_HARD_REG_SET (used
, used1
);
1074 IOR_COMPL_HARD_REG_SET (used
, regs_used_so_far
);
1075 IOR_HARD_REG_SET (used
, allocno
[num
].regs_someone_prefers
);
1078 for (i
= FIRST_PSEUDO_REGISTER
, pass
= 0;
1079 pass
<= 1 && i
>= FIRST_PSEUDO_REGISTER
;
1083 COPY_HARD_REG_SET (used
, used1
);
1084 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1086 #ifdef REG_ALLOC_ORDER
1087 int regno
= reg_alloc_order
[i
];
1091 if (! TEST_HARD_REG_BIT (used
, regno
)
1092 && HARD_REGNO_MODE_OK (regno
, mode
)
1093 && (allocno
[num
].calls_crossed
== 0
1094 || accept_call_clobbered
1095 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno
, mode
)))
1098 int lim
= regno
+ hard_regno_nregs
[regno
][mode
];
1101 && ! TEST_HARD_REG_BIT (used
, j
));
1108 #ifndef REG_ALLOC_ORDER
1109 i
= j
; /* Skip starting points we know will lose */
1115 /* See if there is a preferred register with the same class as the register
1116 we allocated above. Making this restriction prevents register
1117 preferencing from creating worse register allocation.
1119 Remove from the preferred registers and conflicting registers. Note that
1120 additional conflicts may have been added after `prune_preferences' was
1123 First do this for those register with copy preferences, then all
1124 preferred registers. */
1126 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_copy_preferences
, used
);
1127 GO_IF_HARD_REG_SUBSET (allocno
[num
].hard_reg_copy_preferences
,
1128 reg_class_contents
[(int) NO_REGS
], no_copy_prefs
);
1132 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1133 if (TEST_HARD_REG_BIT (allocno
[num
].hard_reg_copy_preferences
, i
)
1134 && HARD_REGNO_MODE_OK (i
, mode
)
1135 && (allocno
[num
].calls_crossed
== 0
1136 || accept_call_clobbered
1137 || ! HARD_REGNO_CALL_PART_CLOBBERED (i
, mode
))
1138 && (REGNO_REG_CLASS (i
) == REGNO_REG_CLASS (best_reg
)
1139 || reg_class_subset_p (REGNO_REG_CLASS (i
),
1140 REGNO_REG_CLASS (best_reg
))
1141 || reg_class_subset_p (REGNO_REG_CLASS (best_reg
),
1142 REGNO_REG_CLASS (i
))))
1145 int lim
= i
+ hard_regno_nregs
[i
][mode
];
1148 && ! TEST_HARD_REG_BIT (used
, j
)
1149 && (REGNO_REG_CLASS (j
)
1150 == REGNO_REG_CLASS (best_reg
+ (j
- i
))
1151 || reg_class_subset_p (REGNO_REG_CLASS (j
),
1152 REGNO_REG_CLASS (best_reg
+ (j
- i
)))
1153 || reg_class_subset_p (REGNO_REG_CLASS (best_reg
+ (j
- i
)),
1154 REGNO_REG_CLASS (j
))));
1165 AND_COMPL_HARD_REG_SET (allocno
[num
].hard_reg_preferences
, used
);
1166 GO_IF_HARD_REG_SUBSET (allocno
[num
].hard_reg_preferences
,
1167 reg_class_contents
[(int) NO_REGS
], no_prefs
);
1171 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1172 if (TEST_HARD_REG_BIT (allocno
[num
].hard_reg_preferences
, i
)
1173 && HARD_REGNO_MODE_OK (i
, mode
)
1174 && (allocno
[num
].calls_crossed
== 0
1175 || accept_call_clobbered
1176 || ! HARD_REGNO_CALL_PART_CLOBBERED (i
, mode
))
1177 && (REGNO_REG_CLASS (i
) == REGNO_REG_CLASS (best_reg
)
1178 || reg_class_subset_p (REGNO_REG_CLASS (i
),
1179 REGNO_REG_CLASS (best_reg
))
1180 || reg_class_subset_p (REGNO_REG_CLASS (best_reg
),
1181 REGNO_REG_CLASS (i
))))
1184 int lim
= i
+ hard_regno_nregs
[i
][mode
];
1187 && ! TEST_HARD_REG_BIT (used
, j
)
1188 && (REGNO_REG_CLASS (j
)
1189 == REGNO_REG_CLASS (best_reg
+ (j
- i
))
1190 || reg_class_subset_p (REGNO_REG_CLASS (j
),
1191 REGNO_REG_CLASS (best_reg
+ (j
- i
)))
1192 || reg_class_subset_p (REGNO_REG_CLASS (best_reg
+ (j
- i
)),
1193 REGNO_REG_CLASS (j
))));
1204 /* If we haven't succeeded yet, try with caller-saves.
1205 We need not check to see if the current function has nonlocal
1206 labels because we don't put any pseudos that are live over calls in
1207 registers in that case. */
1209 if (flag_caller_saves
&& best_reg
< 0)
1211 /* Did not find a register. If it would be profitable to
1212 allocate a call-clobbered register and save and restore it
1213 around calls, do that. Don't do this if it crosses any calls
1214 that might throw. */
1215 if (! accept_call_clobbered
1216 && allocno
[num
].calls_crossed
!= 0
1217 && allocno
[num
].throwing_calls_crossed
== 0
1218 && CALLER_SAVE_PROFITABLE (allocno
[num
].n_refs
,
1219 allocno
[num
].calls_crossed
))
1221 HARD_REG_SET new_losers
;
1223 CLEAR_HARD_REG_SET (new_losers
);
1225 COPY_HARD_REG_SET (new_losers
, losers
);
1227 IOR_HARD_REG_SET(new_losers
, losing_caller_save_reg_set
);
1228 find_reg (num
, new_losers
, alt_regs_p
, 1, retrying
);
1229 if (reg_renumber
[allocno
[num
].reg
] >= 0)
1231 caller_save_needed
= 1;
1237 /* If we haven't succeeded yet,
1238 see if some hard reg that conflicts with us
1239 was utilized poorly by local-alloc.
1240 If so, kick out the regs that were put there by local-alloc
1241 so we can use it instead. */
1242 if (best_reg
< 0 && !retrying
1243 /* Let's not bother with multi-reg allocnos. */
1244 && allocno
[num
].size
== 1)
1246 /* Count from the end, to find the least-used ones first. */
1247 for (i
= FIRST_PSEUDO_REGISTER
- 1; i
>= 0; i
--)
1249 #ifdef REG_ALLOC_ORDER
1250 int regno
= reg_alloc_order
[i
];
1255 if (local_reg_n_refs
[regno
] != 0
1256 /* Don't use a reg no good for this pseudo. */
1257 && ! TEST_HARD_REG_BIT (used2
, regno
)
1258 && HARD_REGNO_MODE_OK (regno
, mode
)
1259 /* The code below assumes that we need only a single
1260 register, but the check of allocno[num].size above
1261 was not enough. Sometimes we need more than one
1262 register for a single-word value. */
1263 && hard_regno_nregs
[regno
][mode
] == 1
1264 && (allocno
[num
].calls_crossed
== 0
1265 || accept_call_clobbered
1266 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno
, mode
))
1267 #ifdef CANNOT_CHANGE_MODE_CLASS
1268 && ! invalid_mode_change_p (regno
, REGNO_REG_CLASS (regno
),
1272 && (!allocno
[num
].no_stack_reg
1273 || regno
< FIRST_STACK_REG
|| regno
> LAST_STACK_REG
)
1277 /* We explicitly evaluate the divide results into temporary
1278 variables so as to avoid excess precision problems that occur
1279 on an i386-unknown-sysv4.2 (unixware) host. */
1281 double tmp1
= ((double) local_reg_freq
[regno
]
1282 / local_reg_live_length
[regno
]);
1283 double tmp2
= ((double) allocno
[num
].freq
1284 / allocno
[num
].live_length
);
1288 /* Hard reg REGNO was used less in total by local regs
1289 than it would be used by this one allocno! */
1291 for (k
= 0; k
< max_regno
; k
++)
1292 if (reg_renumber
[k
] >= 0)
1294 int r
= reg_renumber
[k
];
1296 = r
+ hard_regno_nregs
[r
][PSEUDO_REGNO_MODE (k
)];
1298 if (regno
>= r
&& regno
< endregno
)
1299 reg_renumber
[k
] = -1;
1309 /* Did we find a register? */
1314 HARD_REG_SET this_reg
;
1316 /* Yes. Record it as the hard register of this pseudo-reg. */
1317 reg_renumber
[allocno
[num
].reg
] = best_reg
;
1318 /* Also of any pseudo-regs that share with it. */
1319 if (reg_may_share
[allocno
[num
].reg
])
1320 for (j
= FIRST_PSEUDO_REGISTER
; j
< max_regno
; j
++)
1321 if (reg_allocno
[j
] == num
)
1322 reg_renumber
[j
] = best_reg
;
1324 /* Make a set of the hard regs being allocated. */
1325 CLEAR_HARD_REG_SET (this_reg
);
1326 lim
= best_reg
+ hard_regno_nregs
[best_reg
][mode
];
1327 for (j
= best_reg
; j
< lim
; j
++)
1329 SET_HARD_REG_BIT (this_reg
, j
);
1330 SET_HARD_REG_BIT (regs_used_so_far
, j
);
1331 /* This is no longer a reg used just by local regs. */
1332 local_reg_n_refs
[j
] = 0;
1333 local_reg_freq
[j
] = 0;
1335 /* For each other pseudo-reg conflicting with this one,
1336 mark it as conflicting with the hard regs this one occupies. */
1338 EXECUTE_IF_SET_IN_ALLOCNO_SET (conflicts
+ lim
* allocno_row_words
, j
,
1340 IOR_HARD_REG_SET (allocno
[j
].hard_reg_conflicts
, this_reg
);
1345 /* Called from `reload' to look for a hard reg to put pseudo reg REGNO in.
1346 Perhaps it had previously seemed not worth a hard reg,
1347 or perhaps its old hard reg has been commandeered for reloads.
1348 FORBIDDEN_REGS indicates certain hard regs that may not be used, even if
1349 they do not appear to be allocated.
1350 If FORBIDDEN_REGS is zero, no regs are forbidden. */
1353 retry_global_alloc (int regno
, HARD_REG_SET forbidden_regs
)
1355 int alloc_no
= reg_allocno
[regno
];
1358 /* If we have more than one register class,
1359 first try allocating in the class that is cheapest
1360 for this pseudo-reg. If that fails, try any reg. */
1361 if (N_REG_CLASSES
> 1)
1362 find_reg (alloc_no
, forbidden_regs
, 0, 0, 1);
1363 if (reg_renumber
[regno
] < 0
1364 && reg_alternate_class (regno
) != NO_REGS
)
1365 find_reg (alloc_no
, forbidden_regs
, 1, 0, 1);
1367 /* If we found a register, modify the RTL for the register to
1368 show the hard register, and mark that register live. */
1369 if (reg_renumber
[regno
] >= 0)
1371 REGNO (regno_reg_rtx
[regno
]) = reg_renumber
[regno
];
1372 mark_home_live (regno
);
1377 /* Record a conflict between register REGNO
1378 and everything currently live.
1379 REGNO must not be a pseudo reg that was allocated
1380 by local_alloc; such numbers must be translated through
1381 reg_renumber before calling here. */
1384 record_one_conflict (int regno
)
1388 if (regno
< FIRST_PSEUDO_REGISTER
)
1389 /* When a hard register becomes live,
1390 record conflicts with live pseudo regs. */
1391 EXECUTE_IF_SET_IN_ALLOCNO_SET (allocnos_live
, j
,
1393 SET_HARD_REG_BIT (allocno
[j
].hard_reg_conflicts
, regno
);
1396 /* When a pseudo-register becomes live,
1397 record conflicts first with hard regs,
1398 then with other pseudo regs. */
1400 int ialloc
= reg_allocno
[regno
];
1401 int ialloc_prod
= ialloc
* allocno_row_words
;
1403 IOR_HARD_REG_SET (allocno
[ialloc
].hard_reg_conflicts
, hard_regs_live
);
1404 for (j
= allocno_row_words
- 1; j
>= 0; j
--)
1405 conflicts
[ialloc_prod
+ j
] |= allocnos_live
[j
];
1409 /* Record all allocnos currently live as conflicting
1410 with all hard regs currently live.
1412 ALLOCNO_VEC is a vector of LEN allocnos, all allocnos that
1413 are currently live. Their bits are also flagged in allocnos_live. */
1416 record_conflicts (int *allocno_vec
, int len
)
1419 IOR_HARD_REG_SET (allocno
[allocno_vec
[len
]].hard_reg_conflicts
,
1423 /* If CONFLICTP (i, j) is true, make sure CONFLICTP (j, i) is also true. */
1425 mirror_conflicts (void)
1428 int rw
= allocno_row_words
;
1429 int rwb
= rw
* INT_BITS
;
1430 INT_TYPE
*p
= conflicts
;
1431 INT_TYPE
*q0
= conflicts
, *q1
, *q2
;
1432 unsigned INT_TYPE mask
;
1434 for (i
= max_allocno
- 1, mask
= 1; i
>= 0; i
--, mask
<<= 1)
1441 for (j
= allocno_row_words
- 1, q1
= q0
; j
>= 0; j
--, q1
+= rwb
)
1443 unsigned INT_TYPE word
;
1445 for (word
= (unsigned INT_TYPE
) *p
++, q2
= q1
; word
;
1446 word
>>= 1, q2
+= rw
)
1455 /* Handle the case where REG is set by the insn being scanned,
1456 during the forward scan to accumulate conflicts.
1457 Store a 1 in regs_live or allocnos_live for this register, record how many
1458 consecutive hardware registers it actually needs,
1459 and record a conflict with all other registers already live.
1461 Note that even if REG does not remain alive after this insn,
1462 we must mark it here as live, to ensure a conflict between
1463 REG and any other regs set in this insn that really do live.
1464 This is because those other regs could be considered after this.
1466 REG might actually be something other than a register;
1467 if so, we do nothing.
1469 SETTER is 0 if this register was modified by an auto-increment (i.e.,
1470 a REG_INC note was found for it). */
1473 mark_reg_store (rtx reg
, rtx setter
, void *data ATTRIBUTE_UNUSED
)
1477 if (GET_CODE (reg
) == SUBREG
)
1478 reg
= SUBREG_REG (reg
);
1483 regs_set
[n_regs_set
++] = reg
;
1485 if (setter
&& GET_CODE (setter
) != CLOBBER
)
1486 set_preference (reg
, SET_SRC (setter
));
1488 regno
= REGNO (reg
);
1490 /* Either this is one of the max_allocno pseudo regs not allocated,
1491 or it is or has a hardware reg. First handle the pseudo-regs. */
1492 if (regno
>= FIRST_PSEUDO_REGISTER
)
1494 if (reg_allocno
[regno
] >= 0)
1496 SET_ALLOCNO_LIVE (reg_allocno
[regno
]);
1497 record_one_conflict (regno
);
1501 if (reg_renumber
[regno
] >= 0)
1502 regno
= reg_renumber
[regno
];
1504 /* Handle hardware regs (and pseudos allocated to hard regs). */
1505 if (regno
< FIRST_PSEUDO_REGISTER
&& ! fixed_regs
[regno
])
1507 int last
= regno
+ hard_regno_nregs
[regno
][GET_MODE (reg
)];
1508 while (regno
< last
)
1510 record_one_conflict (regno
);
1511 SET_HARD_REG_BIT (hard_regs_live
, regno
);
1517 /* Like mark_reg_set except notice just CLOBBERs; ignore SETs. */
1520 mark_reg_clobber (rtx reg
, rtx setter
, void *data
)
1522 if (GET_CODE (setter
) == CLOBBER
)
1523 mark_reg_store (reg
, setter
, data
);
1526 /* Record that REG has conflicts with all the regs currently live.
1527 Do not mark REG itself as live. */
1530 mark_reg_conflicts (rtx reg
)
1534 if (GET_CODE (reg
) == SUBREG
)
1535 reg
= SUBREG_REG (reg
);
1540 regno
= REGNO (reg
);
1542 /* Either this is one of the max_allocno pseudo regs not allocated,
1543 or it is or has a hardware reg. First handle the pseudo-regs. */
1544 if (regno
>= FIRST_PSEUDO_REGISTER
)
1546 if (reg_allocno
[regno
] >= 0)
1547 record_one_conflict (regno
);
1550 if (reg_renumber
[regno
] >= 0)
1551 regno
= reg_renumber
[regno
];
1553 /* Handle hardware regs (and pseudos allocated to hard regs). */
1554 if (regno
< FIRST_PSEUDO_REGISTER
&& ! fixed_regs
[regno
])
1556 int last
= regno
+ hard_regno_nregs
[regno
][GET_MODE (reg
)];
1557 while (regno
< last
)
1559 record_one_conflict (regno
);
1565 /* Mark REG as being dead (following the insn being scanned now).
1566 Store a 0 in regs_live or allocnos_live for this register. */
1569 mark_reg_death (rtx reg
)
1571 int regno
= REGNO (reg
);
1573 /* Either this is one of the max_allocno pseudo regs not allocated,
1574 or it is a hardware reg. First handle the pseudo-regs. */
1575 if (regno
>= FIRST_PSEUDO_REGISTER
)
1577 if (reg_allocno
[regno
] >= 0)
1578 CLEAR_ALLOCNO_LIVE (reg_allocno
[regno
]);
1581 /* For pseudo reg, see if it has been assigned a hardware reg. */
1582 if (reg_renumber
[regno
] >= 0)
1583 regno
= reg_renumber
[regno
];
1585 /* Handle hardware regs (and pseudos allocated to hard regs). */
1586 if (regno
< FIRST_PSEUDO_REGISTER
&& ! fixed_regs
[regno
])
1588 /* Pseudo regs already assigned hardware regs are treated
1589 almost the same as explicit hardware regs. */
1590 int last
= regno
+ hard_regno_nregs
[regno
][GET_MODE (reg
)];
1591 while (regno
< last
)
1593 CLEAR_HARD_REG_BIT (hard_regs_live
, regno
);
1599 /* Mark hard reg REGNO as currently live, assuming machine mode MODE
1600 for the value stored in it. MODE determines how many consecutive
1601 registers are actually in use. Do not record conflicts;
1602 it is assumed that the caller will do that. */
1605 mark_reg_live_nc (int regno
, enum machine_mode mode
)
1607 int last
= regno
+ hard_regno_nregs
[regno
][mode
];
1608 while (regno
< last
)
1610 SET_HARD_REG_BIT (hard_regs_live
, regno
);
1615 /* Try to set a preference for an allocno to a hard register.
1616 We are passed DEST and SRC which are the operands of a SET. It is known
1617 that SRC is a register. If SRC or the first operand of SRC is a register,
1618 try to set a preference. If one of the two is a hard register and the other
1619 is a pseudo-register, mark the preference.
1621 Note that we are not as aggressive as local-alloc in trying to tie a
1622 pseudo-register to a hard register. */
1625 set_preference (rtx dest
, rtx src
)
1627 unsigned int src_regno
, dest_regno
;
1628 /* Amount to add to the hard regno for SRC, or subtract from that for DEST,
1629 to compensate for subregs in SRC or DEST. */
1634 if (GET_RTX_FORMAT (GET_CODE (src
))[0] == 'e')
1635 src
= XEXP (src
, 0), copy
= 0;
1637 /* Get the reg number for both SRC and DEST.
1638 If neither is a reg, give up. */
1641 src_regno
= REGNO (src
);
1642 else if (GET_CODE (src
) == SUBREG
&& REG_P (SUBREG_REG (src
)))
1644 src_regno
= REGNO (SUBREG_REG (src
));
1646 if (REGNO (SUBREG_REG (src
)) < FIRST_PSEUDO_REGISTER
)
1647 offset
+= subreg_regno_offset (REGNO (SUBREG_REG (src
)),
1648 GET_MODE (SUBREG_REG (src
)),
1652 offset
+= (SUBREG_BYTE (src
)
1653 / REGMODE_NATURAL_SIZE (GET_MODE (src
)));
1659 dest_regno
= REGNO (dest
);
1660 else if (GET_CODE (dest
) == SUBREG
&& REG_P (SUBREG_REG (dest
)))
1662 dest_regno
= REGNO (SUBREG_REG (dest
));
1664 if (REGNO (SUBREG_REG (dest
)) < FIRST_PSEUDO_REGISTER
)
1665 offset
-= subreg_regno_offset (REGNO (SUBREG_REG (dest
)),
1666 GET_MODE (SUBREG_REG (dest
)),
1670 offset
-= (SUBREG_BYTE (dest
)
1671 / REGMODE_NATURAL_SIZE (GET_MODE (dest
)));
1676 /* Convert either or both to hard reg numbers. */
1678 if (reg_renumber
[src_regno
] >= 0)
1679 src_regno
= reg_renumber
[src_regno
];
1681 if (reg_renumber
[dest_regno
] >= 0)
1682 dest_regno
= reg_renumber
[dest_regno
];
1684 /* Now if one is a hard reg and the other is a global pseudo
1685 then give the other a preference. */
1687 if (dest_regno
< FIRST_PSEUDO_REGISTER
&& src_regno
>= FIRST_PSEUDO_REGISTER
1688 && reg_allocno
[src_regno
] >= 0)
1690 dest_regno
-= offset
;
1691 if (dest_regno
< FIRST_PSEUDO_REGISTER
)
1694 SET_REGBIT (hard_reg_copy_preferences
,
1695 reg_allocno
[src_regno
], dest_regno
);
1697 SET_REGBIT (hard_reg_preferences
,
1698 reg_allocno
[src_regno
], dest_regno
);
1699 for (i
= dest_regno
;
1700 i
< dest_regno
+ hard_regno_nregs
[dest_regno
][GET_MODE (dest
)];
1702 SET_REGBIT (hard_reg_full_preferences
, reg_allocno
[src_regno
], i
);
1706 if (src_regno
< FIRST_PSEUDO_REGISTER
&& dest_regno
>= FIRST_PSEUDO_REGISTER
1707 && reg_allocno
[dest_regno
] >= 0)
1709 src_regno
+= offset
;
1710 if (src_regno
< FIRST_PSEUDO_REGISTER
)
1713 SET_REGBIT (hard_reg_copy_preferences
,
1714 reg_allocno
[dest_regno
], src_regno
);
1716 SET_REGBIT (hard_reg_preferences
,
1717 reg_allocno
[dest_regno
], src_regno
);
1719 i
< src_regno
+ hard_regno_nregs
[src_regno
][GET_MODE (src
)];
1721 SET_REGBIT (hard_reg_full_preferences
, reg_allocno
[dest_regno
], i
);
1726 /* Indicate that hard register number FROM was eliminated and replaced with
1727 an offset from hard register number TO. The status of hard registers live
1728 at the start of a basic block is updated by replacing a use of FROM with
1732 mark_elimination (int from
, int to
)
1738 regset r
= bb
->il
.rtl
->global_live_at_start
;
1739 if (REGNO_REG_SET_P (r
, from
))
1741 CLEAR_REGNO_REG_SET (r
, from
);
1742 SET_REGNO_REG_SET (r
, to
);
1747 /* Used for communication between the following functions. Holds the
1748 current life information. */
1749 static regset live_relevant_regs
;
1751 /* Record in live_relevant_regs and REGS_SET that register REG became live.
1752 This is called via note_stores. */
1754 reg_becomes_live (rtx reg
, rtx setter ATTRIBUTE_UNUSED
, void *regs_set
)
1758 if (GET_CODE (reg
) == SUBREG
)
1759 reg
= SUBREG_REG (reg
);
1764 regno
= REGNO (reg
);
1765 if (regno
< FIRST_PSEUDO_REGISTER
)
1767 int nregs
= hard_regno_nregs
[regno
][GET_MODE (reg
)];
1770 SET_REGNO_REG_SET (live_relevant_regs
, regno
);
1771 if (! fixed_regs
[regno
])
1772 SET_REGNO_REG_SET ((regset
) regs_set
, regno
);
1776 else if (reg_renumber
[regno
] >= 0)
1778 SET_REGNO_REG_SET (live_relevant_regs
, regno
);
1779 SET_REGNO_REG_SET ((regset
) regs_set
, regno
);
1783 /* Record in live_relevant_regs that register REGNO died. */
1785 reg_dies (int regno
, enum machine_mode mode
, struct insn_chain
*chain
)
1787 if (regno
< FIRST_PSEUDO_REGISTER
)
1789 int nregs
= hard_regno_nregs
[regno
][mode
];
1792 CLEAR_REGNO_REG_SET (live_relevant_regs
, regno
);
1793 if (! fixed_regs
[regno
])
1794 SET_REGNO_REG_SET (&chain
->dead_or_set
, regno
);
1800 CLEAR_REGNO_REG_SET (live_relevant_regs
, regno
);
1801 if (reg_renumber
[regno
] >= 0)
1802 SET_REGNO_REG_SET (&chain
->dead_or_set
, regno
);
1806 /* Walk the insns of the current function and build reload_insn_chain,
1807 and record register life information. */
1809 build_insn_chain (rtx first
)
1811 struct insn_chain
**p
= &reload_insn_chain
;
1812 struct insn_chain
*prev
= 0;
1813 basic_block b
= ENTRY_BLOCK_PTR
->next_bb
;
1815 live_relevant_regs
= ALLOC_REG_SET (®_obstack
);
1817 for (; first
; first
= NEXT_INSN (first
))
1819 struct insn_chain
*c
;
1821 if (first
== BB_HEAD (b
))
1826 CLEAR_REG_SET (live_relevant_regs
);
1828 EXECUTE_IF_SET_IN_BITMAP (b
->il
.rtl
->global_live_at_start
, 0, i
, bi
)
1830 if (i
< FIRST_PSEUDO_REGISTER
1831 ? ! TEST_HARD_REG_BIT (eliminable_regset
, i
)
1832 : reg_renumber
[i
] >= 0)
1833 SET_REGNO_REG_SET (live_relevant_regs
, i
);
1837 if (!NOTE_P (first
) && !BARRIER_P (first
))
1839 c
= new_insn_chain ();
1845 c
->block
= b
->index
;
1851 /* Mark the death of everything that dies in this instruction. */
1853 for (link
= REG_NOTES (first
); link
; link
= XEXP (link
, 1))
1854 if (REG_NOTE_KIND (link
) == REG_DEAD
1855 && REG_P (XEXP (link
, 0)))
1856 reg_dies (REGNO (XEXP (link
, 0)), GET_MODE (XEXP (link
, 0)),
1859 COPY_REG_SET (&c
->live_throughout
, live_relevant_regs
);
1861 /* Mark everything born in this instruction as live. */
1863 note_stores (PATTERN (first
), reg_becomes_live
,
1867 COPY_REG_SET (&c
->live_throughout
, live_relevant_regs
);
1873 /* Mark anything that is set in this insn and then unused as dying. */
1875 for (link
= REG_NOTES (first
); link
; link
= XEXP (link
, 1))
1876 if (REG_NOTE_KIND (link
) == REG_UNUSED
1877 && REG_P (XEXP (link
, 0)))
1878 reg_dies (REGNO (XEXP (link
, 0)), GET_MODE (XEXP (link
, 0)),
1883 if (first
== BB_END (b
))
1886 /* Stop after we pass the end of the last basic block. Verify that
1887 no real insns are after the end of the last basic block.
1889 We may want to reorganize the loop somewhat since this test should
1890 always be the right exit test. Allow an ADDR_VEC or ADDR_DIF_VEC if
1891 the previous real insn is a JUMP_INSN. */
1892 if (b
== EXIT_BLOCK_PTR
)
1894 #ifdef ENABLE_CHECKING
1895 for (first
= NEXT_INSN (first
); first
; first
= NEXT_INSN (first
))
1896 gcc_assert (!INSN_P (first
)
1897 || GET_CODE (PATTERN (first
)) == USE
1898 || ((GET_CODE (PATTERN (first
)) == ADDR_VEC
1899 || GET_CODE (PATTERN (first
)) == ADDR_DIFF_VEC
)
1900 && prev_real_insn (first
) != 0
1901 && JUMP_P (prev_real_insn (first
))));
1906 FREE_REG_SET (live_relevant_regs
);
1910 /* Print debugging trace information if -dg switch is given,
1911 showing the information on which the allocation decisions are based. */
1914 dump_conflicts (FILE *file
)
1917 int has_preferences
;
1920 for (i
= 0; i
< max_allocno
; i
++)
1922 if (reg_renumber
[allocno
[allocno_order
[i
]].reg
] >= 0)
1926 fprintf (file
, ";; %d regs to allocate:", nregs
);
1927 for (i
= 0; i
< max_allocno
; i
++)
1930 if (reg_renumber
[allocno
[allocno_order
[i
]].reg
] >= 0)
1932 fprintf (file
, " %d", allocno
[allocno_order
[i
]].reg
);
1933 for (j
= 0; j
< max_regno
; j
++)
1934 if (reg_allocno
[j
] == allocno_order
[i
]
1935 && j
!= allocno
[allocno_order
[i
]].reg
)
1936 fprintf (file
, "+%d", j
);
1937 if (allocno
[allocno_order
[i
]].size
!= 1)
1938 fprintf (file
, " (%d)", allocno
[allocno_order
[i
]].size
);
1940 fprintf (file
, "\n");
1942 for (i
= 0; i
< max_allocno
; i
++)
1945 fprintf (file
, ";; %d conflicts:", allocno
[i
].reg
);
1946 for (j
= 0; j
< max_allocno
; j
++)
1947 if (CONFLICTP (j
, i
))
1948 fprintf (file
, " %d", allocno
[j
].reg
);
1949 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
1950 if (TEST_HARD_REG_BIT (allocno
[i
].hard_reg_conflicts
, j
))
1951 fprintf (file
, " %d", j
);
1952 fprintf (file
, "\n");
1954 has_preferences
= 0;
1955 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
1956 if (TEST_HARD_REG_BIT (allocno
[i
].hard_reg_preferences
, j
))
1957 has_preferences
= 1;
1959 if (! has_preferences
)
1961 fprintf (file
, ";; %d preferences:", allocno
[i
].reg
);
1962 for (j
= 0; j
< FIRST_PSEUDO_REGISTER
; j
++)
1963 if (TEST_HARD_REG_BIT (allocno
[i
].hard_reg_preferences
, j
))
1964 fprintf (file
, " %d", j
);
1965 fprintf (file
, "\n");
1967 fprintf (file
, "\n");
1971 dump_global_regs (FILE *file
)
1975 fprintf (file
, ";; Register dispositions:\n");
1976 for (i
= FIRST_PSEUDO_REGISTER
, j
= 0; i
< max_regno
; i
++)
1977 if (reg_renumber
[i
] >= 0)
1979 fprintf (file
, "%d in %d ", i
, reg_renumber
[i
]);
1981 fprintf (file
, "\n");
1984 fprintf (file
, "\n\n;; Hard regs used: ");
1985 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
1986 if (regs_ever_live
[i
])
1987 fprintf (file
, " %d", i
);
1988 fprintf (file
, "\n\n");
1993 /* This page contains code to make live information more accurate.
1994 The accurate register liveness at program point P means:
1995 o there is a path from P to usage of the register and the
1996 register is not redefined or killed on the path.
1997 o register at P is partially available, i.e. there is a path from
1998 a register definition to the point P and the register is not
1999 killed (clobbered) on the path
2001 The standard GCC live information means only the first condition.
2002 Without the partial availability, there will be more register
2003 conflicts and as a consequence worse register allocation. The
2004 typical example where the information can be different is a
2005 register initialized in the loop at the basic block preceding the
2008 /* The following structure contains basic block data flow information
2009 used to calculate partial availability of registers. */
2013 /* The basic block reverse post-order number. */
2015 /* Registers used uninitialized in an insn in which there is an
2016 early clobbered register might get the same hard register. */
2017 bitmap earlyclobber
;
2018 /* Registers correspondingly killed (clobbered) and defined but not
2019 killed afterward in the basic block. */
2020 bitmap killed
, avloc
;
2021 /* Registers partially available and living (in other words whose
2022 values were calculated and used) correspondingly at the start
2023 and end of the basic block. */
2024 bitmap live_pavin
, live_pavout
;
2027 /* Macros for accessing data flow information of basic blocks. */
2029 #define BB_INFO(BB) ((struct bb_info *) (BB)->aux)
2030 #define BB_INFO_BY_INDEX(N) BB_INFO (BASIC_BLOCK(N))
2032 /* The function allocates the info structures of each basic block. It
2033 also initialized LIVE_PAVIN and LIVE_PAVOUT as if all hard
2034 registers were partially available. */
2037 allocate_bb_info (void)
2041 struct bb_info
*bb_info
;
2044 alloc_aux_for_blocks (sizeof (struct bb_info
));
2045 init
= BITMAP_ALLOC (NULL
);
2046 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2047 bitmap_set_bit (init
, i
);
2051 bb_info
->earlyclobber
= BITMAP_ALLOC (NULL
);
2052 bb_info
->avloc
= BITMAP_ALLOC (NULL
);
2053 bb_info
->killed
= BITMAP_ALLOC (NULL
);
2054 bb_info
->live_pavin
= BITMAP_ALLOC (NULL
);
2055 bb_info
->live_pavout
= BITMAP_ALLOC (NULL
);
2056 bitmap_copy (bb_info
->live_pavin
, init
);
2057 bitmap_copy (bb_info
->live_pavout
, init
);
2062 /* The function frees the allocated info of all basic blocks. */
2068 struct bb_info
*bb_info
;
2072 bb_info
= BB_INFO (bb
);
2073 BITMAP_FREE (bb_info
->live_pavout
);
2074 BITMAP_FREE (bb_info
->live_pavin
);
2075 BITMAP_FREE (bb_info
->killed
);
2076 BITMAP_FREE (bb_info
->avloc
);
2077 BITMAP_FREE (bb_info
->earlyclobber
);
2079 free_aux_for_blocks ();
2082 /* The function modifies local info for register REG being changed in
2083 SETTER. DATA is used to pass the current basic block info. */
2086 mark_reg_change (rtx reg
, rtx setter
, void *data
)
2089 basic_block bb
= data
;
2090 struct bb_info
*bb_info
= BB_INFO (bb
);
2092 if (GET_CODE (reg
) == SUBREG
)
2093 reg
= SUBREG_REG (reg
);
2098 regno
= REGNO (reg
);
2099 bitmap_set_bit (bb_info
->killed
, regno
);
2101 if (GET_CODE (setter
) != CLOBBER
)
2102 bitmap_set_bit (bb_info
->avloc
, regno
);
2104 bitmap_clear_bit (bb_info
->avloc
, regno
);
2107 /* Classes of registers which could be early clobbered in the current
2111 DEF_VEC_ALLOC_I(int,heap
);
2113 static VEC(int,heap
) *earlyclobber_regclass
;
2115 /* This function finds and stores register classes that could be early
2116 clobbered in INSN. If any earlyclobber classes are found, the function
2117 returns TRUE, in all other cases it returns FALSE. */
2120 check_earlyclobber (rtx insn
)
2125 extract_insn (insn
);
2127 VEC_truncate (int, earlyclobber_regclass
, 0);
2128 for (opno
= 0; opno
< recog_data
.n_operands
; opno
++)
2133 enum reg_class
class;
2134 const char *p
= recog_data
.constraints
[opno
];
2143 case '=': case '+': case '?':
2146 case 'm': case '<': case '>': case 'V': case 'o':
2147 case 'E': case 'F': case 'G': case 'H':
2148 case 's': case 'i': case 'n':
2149 case 'I': case 'J': case 'K': case 'L':
2150 case 'M': case 'N': case 'O': case 'P':
2152 case '0': case '1': case '2': case '3': case '4':
2153 case '5': case '6': case '7': case '8': case '9':
2154 /* These don't say anything we care about. */
2162 if (amp_p
&& class != NO_REGS
)
2168 VEC_iterate (int, earlyclobber_regclass
, i
, rc
);
2171 if (rc
== (int) class)
2175 /* We use VEC_quick_push here because
2176 earlyclobber_regclass holds no more than
2177 N_REG_CLASSES elements. */
2178 VEC_quick_push (int, earlyclobber_regclass
, (int) class);
2188 class = GENERAL_REGS
;
2192 class = REG_CLASS_FROM_CONSTRAINT (c
, p
);
2197 p
+= CONSTRAINT_LEN (c
, p
);
2204 /* The function checks that pseudo-register *X has a class
2205 intersecting with the class of pseudo-register could be early
2206 clobbered in the same insn.
2207 This function is a no-op if earlyclobber_regclass is empty. */
2210 mark_reg_use_for_earlyclobber (rtx
*x
, void *data ATTRIBUTE_UNUSED
)
2212 enum reg_class pref_class
, alt_class
;
2214 basic_block bb
= data
;
2215 struct bb_info
*bb_info
= BB_INFO (bb
);
2217 if (REG_P (*x
) && REGNO (*x
) >= FIRST_PSEUDO_REGISTER
)
2222 if (bitmap_bit_p (bb_info
->killed
, regno
)
2223 || bitmap_bit_p (bb_info
->avloc
, regno
))
2225 pref_class
= reg_preferred_class (regno
);
2226 alt_class
= reg_alternate_class (regno
);
2227 for (i
= 0; VEC_iterate (int, earlyclobber_regclass
, i
, rc
); i
++)
2229 if (reg_classes_intersect_p (rc
, pref_class
)
2231 && reg_classes_intersect_p (rc
, alt_class
)))
2233 bitmap_set_bit (bb_info
->earlyclobber
, regno
);
2241 /* The function processes all pseudo-registers in *X with the aid of
2242 previous function. */
2245 mark_reg_use_for_earlyclobber_1 (rtx
*x
, void *data
)
2247 for_each_rtx (x
, mark_reg_use_for_earlyclobber
, data
);
2250 /* The function calculates local info for each basic block. */
2253 calculate_local_reg_bb_info (void)
2258 /* We know that earlyclobber_regclass holds no more than
2259 N_REG_CLASSES elements. See check_earlyclobber. */
2260 earlyclobber_regclass
= VEC_alloc (int, heap
, N_REG_CLASSES
);
2263 bound
= NEXT_INSN (BB_END (bb
));
2264 for (insn
= BB_HEAD (bb
); insn
!= bound
; insn
= NEXT_INSN (insn
))
2267 note_stores (PATTERN (insn
), mark_reg_change
, bb
);
2268 if (check_earlyclobber (insn
))
2269 note_uses (&PATTERN (insn
), mark_reg_use_for_earlyclobber_1
, bb
);
2272 VEC_free (int, heap
, earlyclobber_regclass
);
2275 /* The function sets up reverse post-order number of each basic
2279 set_up_bb_rts_numbers (void)
2284 rts_order
= xmalloc (sizeof (int) * (n_basic_blocks
- NUM_FIXED_BLOCKS
));
2285 flow_reverse_top_sort_order_compute (rts_order
);
2286 for (i
= 0; i
< n_basic_blocks
- NUM_FIXED_BLOCKS
; i
++)
2287 BB_INFO_BY_INDEX (rts_order
[i
])->rts_number
= i
;
2291 /* Compare function for sorting blocks in reverse postorder. */
2294 rpost_cmp (const void *bb1
, const void *bb2
)
2296 basic_block b1
= *(basic_block
*) bb1
, b2
= *(basic_block
*) bb2
;
2298 return BB_INFO (b2
)->rts_number
- BB_INFO (b1
)->rts_number
;
2301 /* Temporary bitmap used for live_pavin, live_pavout calculation. */
2302 static bitmap temp_bitmap
;
2304 DEF_VEC_P(basic_block
);
2305 DEF_VEC_ALLOC_P(basic_block
,heap
);
2307 /* The function calculates partial register availability according to
2308 the following equations:
2311 = empty for entry block
2312 | union (live_pavout of predecessors) & global_live_at_start
2313 bb.live_pavout = union (bb.live_pavin - bb.killed, bb.avloc)
2314 & global_live_at_end */
2317 calculate_reg_pav (void)
2319 basic_block bb
, succ
;
2322 VEC(basic_block
,heap
) *bbs
, *new_bbs
, *temp
;
2323 basic_block
*bb_array
;
2326 bbs
= VEC_alloc (basic_block
, heap
, n_basic_blocks
);
2327 new_bbs
= VEC_alloc (basic_block
, heap
, n_basic_blocks
);
2328 temp_bitmap
= BITMAP_ALLOC (NULL
);
2331 VEC_quick_push (basic_block
, bbs
, bb
);
2333 wset
= sbitmap_alloc (n_basic_blocks
+ 1);
2334 while (VEC_length (basic_block
, bbs
))
2336 bb_array
= VEC_address (basic_block
, bbs
);
2337 nel
= VEC_length (basic_block
, bbs
);
2338 qsort (bb_array
, nel
, sizeof (basic_block
), rpost_cmp
);
2339 sbitmap_zero (wset
);
2340 for (i
= 0; i
< nel
; i
++)
2343 struct bb_info
*bb_info
;
2344 bitmap bb_live_pavin
, bb_live_pavout
;
2347 bb_info
= BB_INFO (bb
);
2348 bb_live_pavin
= bb_info
->live_pavin
;
2349 bb_live_pavout
= bb_info
->live_pavout
;
2350 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2352 basic_block pred
= e
->src
;
2354 if (pred
->index
!= ENTRY_BLOCK
)
2355 bitmap_ior_into (bb_live_pavin
, BB_INFO (pred
)->live_pavout
);
2357 bitmap_and_into (bb_live_pavin
, bb
->il
.rtl
->global_live_at_start
);
2358 bitmap_ior_and_compl (temp_bitmap
, bb_info
->avloc
,
2359 bb_live_pavin
, bb_info
->killed
);
2360 bitmap_and_into (temp_bitmap
, bb
->il
.rtl
->global_live_at_end
);
2361 if (! bitmap_equal_p (temp_bitmap
, bb_live_pavout
))
2363 bitmap_copy (bb_live_pavout
, temp_bitmap
);
2364 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2367 if (succ
->index
!= EXIT_BLOCK
2368 && !TEST_BIT (wset
, succ
->index
))
2370 SET_BIT (wset
, succ
->index
);
2371 VEC_quick_push (basic_block
, new_bbs
, succ
);
2379 VEC_truncate (basic_block
, new_bbs
, 0);
2381 sbitmap_free (wset
);
2382 BITMAP_FREE (temp_bitmap
);
2383 VEC_free (basic_block
, heap
, new_bbs
);
2384 VEC_free (basic_block
, heap
, bbs
);
2387 /* The function modifies partial availability information for two
2388 special cases to prevent incorrect work of the subsequent passes
2389 with the accurate live information based on the partial
2393 modify_reg_pav (void)
2396 struct bb_info
*bb_info
;
2399 HARD_REG_SET zero
, stack_hard_regs
, used
;
2402 CLEAR_HARD_REG_SET (zero
);
2403 CLEAR_HARD_REG_SET (stack_hard_regs
);
2404 for (i
= FIRST_STACK_REG
; i
<= LAST_STACK_REG
; i
++)
2405 SET_HARD_REG_BIT(stack_hard_regs
, i
);
2406 stack_regs
= BITMAP_ALLOC (NULL
);
2407 for (i
= FIRST_PSEUDO_REGISTER
; i
< max_regno
; i
++)
2409 COPY_HARD_REG_SET (used
, reg_class_contents
[reg_preferred_class (i
)]);
2410 IOR_HARD_REG_SET (used
, reg_class_contents
[reg_alternate_class (i
)]);
2411 AND_HARD_REG_SET (used
, stack_hard_regs
);
2412 GO_IF_HARD_REG_EQUAL(used
, zero
, skip
);
2413 bitmap_set_bit (stack_regs
, i
);
2420 bb_info
= BB_INFO (bb
);
2422 /* Reload can assign the same hard register to uninitialized
2423 pseudo-register and early clobbered pseudo-register in an
2424 insn if the pseudo-register is used first time in given BB
2425 and not lived at the BB start. To prevent this we don't
2426 change life information for such pseudo-registers. */
2427 bitmap_ior_into (bb_info
->live_pavin
, bb_info
->earlyclobber
);
2429 /* We can not use the same stack register for uninitialized
2430 pseudo-register and another living pseudo-register because if the
2431 uninitialized pseudo-register dies, subsequent pass reg-stack
2432 will be confused (it will believe that the other register
2434 bitmap_ior_into (bb_info
->live_pavin
, stack_regs
);
2438 BITMAP_FREE (stack_regs
);
2442 /* The following function makes live information more accurate by
2443 modifying global_live_at_start and global_live_at_end of basic
2446 The standard GCC life analysis permits registers to live
2447 uninitialized, for example:
2456 With normal life_analysis, R would be live before "Loop:".
2457 The result is that R causes many interferences that do not
2460 After the function call a register lives at a program point
2461 only if it is initialized on a path from CFG entry to the
2465 make_accurate_live_analysis (void)
2468 struct bb_info
*bb_info
;
2470 max_regno
= max_reg_num ();
2472 allocate_bb_info ();
2473 calculate_local_reg_bb_info ();
2474 set_up_bb_rts_numbers ();
2475 calculate_reg_pav ();
2479 bb_info
= BB_INFO (bb
);
2481 bitmap_and_into (bb
->il
.rtl
->global_live_at_start
, bb_info
->live_pavin
);
2482 bitmap_and_into (bb
->il
.rtl
->global_live_at_end
, bb_info
->live_pavout
);
2486 /* Run old register allocator. Return TRUE if we must exit
2487 rest_of_compilation upon return. */
2489 rest_of_handle_global_alloc (void)
2493 /* If optimizing, allocate remaining pseudo-regs. Do the reload
2494 pass fixing up any insns that are invalid. */
2497 failure
= global_alloc (dump_file
);
2500 build_insn_chain (get_insns ());
2501 failure
= reload (get_insns (), 0);
2504 if (dump_enabled_p (pass_global_alloc
.static_pass_number
))
2506 timevar_push (TV_DUMP
);
2507 dump_global_regs (dump_file
);
2508 timevar_pop (TV_DUMP
);
2511 gcc_assert (reload_completed
|| failure
);
2512 reload_completed
= !failure
;
2515 struct tree_opt_pass pass_global_alloc
=
2519 rest_of_handle_global_alloc
, /* execute */
2522 0, /* static_pass_number */
2523 TV_GLOBAL_ALLOC
, /* tv_id */
2524 0, /* properties_required */
2525 0, /* properties_provided */
2526 0, /* properties_destroyed */
2527 0, /* todo_flags_start */
2529 TODO_ggc_collect
, /* todo_flags_finish */