* config/xtensa/elf.h (HANDLE_PRAGMA_PACK_PUSH_POP): Define.
[official-gcc.git] / gcc / local-alloc.c
blobe6ff69e48a547d2cbd84853f7c4f0c9f2b683552
1 /* Allocate registers within a basic block, for GNU compiler.
2 Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* Allocation of hard register numbers to pseudo registers is done in
24 two passes. In this pass we consider only regs that are born and
25 die once within one basic block. We do this one basic block at a
26 time. Then the next pass allocates the registers that remain.
27 Two passes are used because this pass uses methods that work only
28 on linear code, but that do a better job than the general methods
29 used in global_alloc, and more quickly too.
31 The assignments made are recorded in the vector reg_renumber
32 whose space is allocated here. The rtl code itself is not altered.
34 We assign each instruction in the basic block a number
35 which is its order from the beginning of the block.
36 Then we can represent the lifetime of a pseudo register with
37 a pair of numbers, and check for conflicts easily.
38 We can record the availability of hard registers with a
39 HARD_REG_SET for each instruction. The HARD_REG_SET
40 contains 0 or 1 for each hard reg.
42 To avoid register shuffling, we tie registers together when one
43 dies by being copied into another, or dies in an instruction that
44 does arithmetic to produce another. The tied registers are
45 allocated as one. Registers with different reg class preferences
46 can never be tied unless the class preferred by one is a subclass
47 of the one preferred by the other.
49 Tying is represented with "quantity numbers".
50 A non-tied register is given a new quantity number.
51 Tied registers have the same quantity number.
53 We have provision to exempt registers, even when they are contained
54 within the block, that can be tied to others that are not contained in it.
55 This is so that global_alloc could process them both and tie them then.
56 But this is currently disabled since tying in global_alloc is not
57 yet implemented. */
59 /* Pseudos allocated here can be reallocated by global.c if the hard register
60 is used as a spill register. Currently we don't allocate such pseudos
61 here if their preferred class is likely to be used by spills. */
63 #include "config.h"
64 #include "system.h"
65 #include "coretypes.h"
66 #include "tm.h"
67 #include "hard-reg-set.h"
68 #include "rtl.h"
69 #include "tm_p.h"
70 #include "flags.h"
71 #include "regs.h"
72 #include "function.h"
73 #include "insn-config.h"
74 #include "insn-attr.h"
75 #include "recog.h"
76 #include "output.h"
77 #include "toplev.h"
78 #include "except.h"
79 #include "integrate.h"
80 #include "reload.h"
81 #include "ggc.h"
82 #include "timevar.h"
83 #include "tree-pass.h"
85 /* Next quantity number available for allocation. */
87 static int next_qty;
89 /* Information we maintain about each quantity. */
90 struct qty
92 /* The number of refs to quantity Q. */
94 int n_refs;
96 /* The frequency of uses of quantity Q. */
98 int freq;
100 /* Insn number (counting from head of basic block)
101 where quantity Q was born. -1 if birth has not been recorded. */
103 int birth;
105 /* Insn number (counting from head of basic block)
106 where given quantity died. Due to the way tying is done,
107 and the fact that we consider in this pass only regs that die but once,
108 a quantity can die only once. Each quantity's life span
109 is a set of consecutive insns. -1 if death has not been recorded. */
111 int death;
113 /* Number of words needed to hold the data in given quantity.
114 This depends on its machine mode. It is used for these purposes:
115 1. It is used in computing the relative importance of qtys,
116 which determines the order in which we look for regs for them.
117 2. It is used in rules that prevent tying several registers of
118 different sizes in a way that is geometrically impossible
119 (see combine_regs). */
121 int size;
123 /* Number of times a reg tied to given qty lives across a CALL_INSN. */
125 int n_calls_crossed;
127 /* Number of times a reg tied to given qty lives across a CALL_INSN
128 that might throw. */
130 int n_throwing_calls_crossed;
132 /* The register number of one pseudo register whose reg_qty value is Q.
133 This register should be the head of the chain
134 maintained in reg_next_in_qty. */
136 int first_reg;
138 /* Reg class contained in (smaller than) the preferred classes of all
139 the pseudo regs that are tied in given quantity.
140 This is the preferred class for allocating that quantity. */
142 enum reg_class min_class;
144 /* Register class within which we allocate given qty if we can't get
145 its preferred class. */
147 enum reg_class alternate_class;
149 /* This holds the mode of the registers that are tied to given qty,
150 or VOIDmode if registers with differing modes are tied together. */
152 enum machine_mode mode;
154 /* the hard reg number chosen for given quantity,
155 or -1 if none was found. */
157 short phys_reg;
160 static struct qty *qty;
162 /* These fields are kept separately to speedup their clearing. */
164 /* We maintain two hard register sets that indicate suggested hard registers
165 for each quantity. The first, phys_copy_sugg, contains hard registers
166 that are tied to the quantity by a simple copy. The second contains all
167 hard registers that are tied to the quantity via an arithmetic operation.
169 The former register set is given priority for allocation. This tends to
170 eliminate copy insns. */
172 /* Element Q is a set of hard registers that are suggested for quantity Q by
173 copy insns. */
175 static HARD_REG_SET *qty_phys_copy_sugg;
177 /* Element Q is a set of hard registers that are suggested for quantity Q by
178 arithmetic insns. */
180 static HARD_REG_SET *qty_phys_sugg;
182 /* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
184 static short *qty_phys_num_copy_sugg;
186 /* Element Q is the number of suggested registers in qty_phys_sugg. */
188 static short *qty_phys_num_sugg;
190 /* If (REG N) has been assigned a quantity number, is a register number
191 of another register assigned the same quantity number, or -1 for the
192 end of the chain. qty->first_reg point to the head of this chain. */
194 static int *reg_next_in_qty;
196 /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
197 if it is >= 0,
198 of -1 if this register cannot be allocated by local-alloc,
199 or -2 if not known yet.
201 Note that if we see a use or death of pseudo register N with
202 reg_qty[N] == -2, register N must be local to the current block. If
203 it were used in more than one block, we would have reg_qty[N] == -1.
204 This relies on the fact that if reg_basic_block[N] is >= 0, register N
205 will not appear in any other block. We save a considerable number of
206 tests by exploiting this.
208 If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
209 be referenced. */
211 static int *reg_qty;
213 /* The offset (in words) of register N within its quantity.
214 This can be nonzero if register N is SImode, and has been tied
215 to a subreg of a DImode register. */
217 static char *reg_offset;
219 /* Vector of substitutions of register numbers,
220 used to map pseudo regs into hardware regs.
221 This is set up as a result of register allocation.
222 Element N is the hard reg assigned to pseudo reg N,
223 or is -1 if no hard reg was assigned.
224 If N is a hard reg number, element N is N. */
226 short *reg_renumber;
228 /* Set of hard registers live at the current point in the scan
229 of the instructions in a basic block. */
231 static HARD_REG_SET regs_live;
233 /* Each set of hard registers indicates registers live at a particular
234 point in the basic block. For N even, regs_live_at[N] says which
235 hard registers are needed *after* insn N/2 (i.e., they may not
236 conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
238 If an object is to conflict with the inputs of insn J but not the
239 outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
240 if it is to conflict with the outputs of insn J but not the inputs of
241 insn J + 1, it is said to die at index J*2 + 1. */
243 static HARD_REG_SET *regs_live_at;
245 /* Communicate local vars `insn_number' and `insn'
246 from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
247 static int this_insn_number;
248 static rtx this_insn;
250 struct equivalence
252 /* Set when an attempt should be made to replace a register
253 with the associated src_p entry. */
255 char replace;
257 /* Set when a REG_EQUIV note is found or created. Use to
258 keep track of what memory accesses might be created later,
259 e.g. by reload. */
261 rtx replacement;
263 rtx *src_p;
265 /* Loop depth is used to recognize equivalences which appear
266 to be present within the same loop (or in an inner loop). */
268 int loop_depth;
270 /* The list of each instruction which initializes this register. */
272 rtx init_insns;
274 /* Nonzero if this had a preexisting REG_EQUIV note. */
276 int is_arg_equivalence;
279 /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
280 structure for that register. */
282 static struct equivalence *reg_equiv;
284 /* Nonzero if we recorded an equivalence for a LABEL_REF. */
285 static int recorded_label_ref;
287 static void alloc_qty (int, enum machine_mode, int, int);
288 static void validate_equiv_mem_from_store (rtx, rtx, void *);
289 static int validate_equiv_mem (rtx, rtx, rtx);
290 static int equiv_init_varies_p (rtx);
291 static int equiv_init_movable_p (rtx, int);
292 static int contains_replace_regs (rtx);
293 static int memref_referenced_p (rtx, rtx);
294 static int memref_used_between_p (rtx, rtx, rtx);
295 static void update_equiv_regs (void);
296 static void no_equiv (rtx, rtx, void *);
297 static void block_alloc (int);
298 static int qty_sugg_compare (int, int);
299 static int qty_sugg_compare_1 (const void *, const void *);
300 static int qty_compare (int, int);
301 static int qty_compare_1 (const void *, const void *);
302 static int combine_regs (rtx, rtx, int, int, rtx, int);
303 static int reg_meets_class_p (int, enum reg_class);
304 static void update_qty_class (int, int);
305 static void reg_is_set (rtx, rtx, void *);
306 static void reg_is_born (rtx, int);
307 static void wipe_dead_reg (rtx, int);
308 static int find_free_reg (enum reg_class, enum machine_mode, int, int, int,
309 int, int);
310 static void mark_life (int, enum machine_mode, int);
311 static void post_mark_life (int, enum machine_mode, int, int, int);
312 static int no_conflict_p (rtx, rtx, rtx);
313 static int requires_inout (const char *);
315 /* Allocate a new quantity (new within current basic block)
316 for register number REGNO which is born at index BIRTH
317 within the block. MODE and SIZE are info on reg REGNO. */
319 static void
320 alloc_qty (int regno, enum machine_mode mode, int size, int birth)
322 int qtyno = next_qty++;
324 reg_qty[regno] = qtyno;
325 reg_offset[regno] = 0;
326 reg_next_in_qty[regno] = -1;
328 qty[qtyno].first_reg = regno;
329 qty[qtyno].size = size;
330 qty[qtyno].mode = mode;
331 qty[qtyno].birth = birth;
332 qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
333 qty[qtyno].n_throwing_calls_crossed = REG_N_THROWING_CALLS_CROSSED (regno);
334 qty[qtyno].min_class = reg_preferred_class (regno);
335 qty[qtyno].alternate_class = reg_alternate_class (regno);
336 qty[qtyno].n_refs = REG_N_REFS (regno);
337 qty[qtyno].freq = REG_FREQ (regno);
340 /* Main entry point of this file. */
342 static int
343 local_alloc (void)
345 int i;
346 int max_qty;
347 basic_block b;
349 /* We need to keep track of whether or not we recorded a LABEL_REF so
350 that we know if the jump optimizer needs to be rerun. */
351 recorded_label_ref = 0;
353 /* Leaf functions and non-leaf functions have different needs.
354 If defined, let the machine say what kind of ordering we
355 should use. */
356 #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
357 ORDER_REGS_FOR_LOCAL_ALLOC;
358 #endif
360 /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
361 registers. */
362 update_equiv_regs ();
364 /* This sets the maximum number of quantities we can have. Quantity
365 numbers start at zero and we can have one for each pseudo. */
366 max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
368 /* Allocate vectors of temporary data.
369 See the declarations of these variables, above,
370 for what they mean. */
372 qty = XNEWVEC (struct qty, max_qty);
373 qty_phys_copy_sugg = XNEWVEC (HARD_REG_SET, max_qty);
374 qty_phys_num_copy_sugg = XNEWVEC (short, max_qty);
375 qty_phys_sugg = XNEWVEC (HARD_REG_SET, max_qty);
376 qty_phys_num_sugg = XNEWVEC (short, max_qty);
378 reg_qty = XNEWVEC (int, max_regno);
379 reg_offset = XNEWVEC (char, max_regno);
380 reg_next_in_qty = XNEWVEC (int, max_regno);
382 /* Determine which pseudo-registers can be allocated by local-alloc.
383 In general, these are the registers used only in a single block and
384 which only die once.
386 We need not be concerned with which block actually uses the register
387 since we will never see it outside that block. */
389 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
391 if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
392 reg_qty[i] = -2;
393 else
394 reg_qty[i] = -1;
397 /* Force loop below to initialize entire quantity array. */
398 next_qty = max_qty;
400 /* Allocate each block's local registers, block by block. */
402 FOR_EACH_BB (b)
404 /* NEXT_QTY indicates which elements of the `qty_...'
405 vectors might need to be initialized because they were used
406 for the previous block; it is set to the entire array before
407 block 0. Initialize those, with explicit loop if there are few,
408 else with bzero and bcopy. Do not initialize vectors that are
409 explicit set by `alloc_qty'. */
411 if (next_qty < 6)
413 for (i = 0; i < next_qty; i++)
415 CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
416 qty_phys_num_copy_sugg[i] = 0;
417 CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
418 qty_phys_num_sugg[i] = 0;
421 else
423 #define CLEAR(vector) \
424 memset ((vector), 0, (sizeof (*(vector))) * next_qty);
426 CLEAR (qty_phys_copy_sugg);
427 CLEAR (qty_phys_num_copy_sugg);
428 CLEAR (qty_phys_sugg);
429 CLEAR (qty_phys_num_sugg);
432 next_qty = 0;
434 block_alloc (b->index);
437 free (qty);
438 free (qty_phys_copy_sugg);
439 free (qty_phys_num_copy_sugg);
440 free (qty_phys_sugg);
441 free (qty_phys_num_sugg);
443 free (reg_qty);
444 free (reg_offset);
445 free (reg_next_in_qty);
447 return recorded_label_ref;
450 /* Used for communication between the following two functions: contains
451 a MEM that we wish to ensure remains unchanged. */
452 static rtx equiv_mem;
454 /* Set nonzero if EQUIV_MEM is modified. */
455 static int equiv_mem_modified;
457 /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
458 Called via note_stores. */
460 static void
461 validate_equiv_mem_from_store (rtx dest, rtx set ATTRIBUTE_UNUSED,
462 void *data ATTRIBUTE_UNUSED)
464 if ((REG_P (dest)
465 && reg_overlap_mentioned_p (dest, equiv_mem))
466 || (MEM_P (dest)
467 && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
468 equiv_mem_modified = 1;
471 /* Verify that no store between START and the death of REG invalidates
472 MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
473 by storing into an overlapping memory location, or with a non-const
474 CALL_INSN.
476 Return 1 if MEMREF remains valid. */
478 static int
479 validate_equiv_mem (rtx start, rtx reg, rtx memref)
481 rtx insn;
482 rtx note;
484 equiv_mem = memref;
485 equiv_mem_modified = 0;
487 /* If the memory reference has side effects or is volatile, it isn't a
488 valid equivalence. */
489 if (side_effects_p (memref))
490 return 0;
492 for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
494 if (! INSN_P (insn))
495 continue;
497 if (find_reg_note (insn, REG_DEAD, reg))
498 return 1;
500 if (CALL_P (insn) && ! MEM_READONLY_P (memref)
501 && ! CONST_OR_PURE_CALL_P (insn))
502 return 0;
504 note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
506 /* If a register mentioned in MEMREF is modified via an
507 auto-increment, we lose the equivalence. Do the same if one
508 dies; although we could extend the life, it doesn't seem worth
509 the trouble. */
511 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
512 if ((REG_NOTE_KIND (note) == REG_INC
513 || REG_NOTE_KIND (note) == REG_DEAD)
514 && REG_P (XEXP (note, 0))
515 && reg_overlap_mentioned_p (XEXP (note, 0), memref))
516 return 0;
519 return 0;
522 /* Returns zero if X is known to be invariant. */
524 static int
525 equiv_init_varies_p (rtx x)
527 RTX_CODE code = GET_CODE (x);
528 int i;
529 const char *fmt;
531 switch (code)
533 case MEM:
534 return !MEM_READONLY_P (x) || equiv_init_varies_p (XEXP (x, 0));
536 case CONST:
537 case CONST_INT:
538 case CONST_DOUBLE:
539 case CONST_VECTOR:
540 case SYMBOL_REF:
541 case LABEL_REF:
542 return 0;
544 case REG:
545 return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
547 case ASM_OPERANDS:
548 if (MEM_VOLATILE_P (x))
549 return 1;
551 /* Fall through. */
553 default:
554 break;
557 fmt = GET_RTX_FORMAT (code);
558 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
559 if (fmt[i] == 'e')
561 if (equiv_init_varies_p (XEXP (x, i)))
562 return 1;
564 else if (fmt[i] == 'E')
566 int j;
567 for (j = 0; j < XVECLEN (x, i); j++)
568 if (equiv_init_varies_p (XVECEXP (x, i, j)))
569 return 1;
572 return 0;
575 /* Returns nonzero if X (used to initialize register REGNO) is movable.
576 X is only movable if the registers it uses have equivalent initializations
577 which appear to be within the same loop (or in an inner loop) and movable
578 or if they are not candidates for local_alloc and don't vary. */
580 static int
581 equiv_init_movable_p (rtx x, int regno)
583 int i, j;
584 const char *fmt;
585 enum rtx_code code = GET_CODE (x);
587 switch (code)
589 case SET:
590 return equiv_init_movable_p (SET_SRC (x), regno);
592 case CC0:
593 case CLOBBER:
594 return 0;
596 case PRE_INC:
597 case PRE_DEC:
598 case POST_INC:
599 case POST_DEC:
600 case PRE_MODIFY:
601 case POST_MODIFY:
602 return 0;
604 case REG:
605 return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
606 && reg_equiv[REGNO (x)].replace)
607 || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
609 case UNSPEC_VOLATILE:
610 return 0;
612 case ASM_OPERANDS:
613 if (MEM_VOLATILE_P (x))
614 return 0;
616 /* Fall through. */
618 default:
619 break;
622 fmt = GET_RTX_FORMAT (code);
623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
624 switch (fmt[i])
626 case 'e':
627 if (! equiv_init_movable_p (XEXP (x, i), regno))
628 return 0;
629 break;
630 case 'E':
631 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
632 if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
633 return 0;
634 break;
637 return 1;
640 /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
642 static int
643 contains_replace_regs (rtx x)
645 int i, j;
646 const char *fmt;
647 enum rtx_code code = GET_CODE (x);
649 switch (code)
651 case CONST_INT:
652 case CONST:
653 case LABEL_REF:
654 case SYMBOL_REF:
655 case CONST_DOUBLE:
656 case CONST_VECTOR:
657 case PC:
658 case CC0:
659 case HIGH:
660 return 0;
662 case REG:
663 return reg_equiv[REGNO (x)].replace;
665 default:
666 break;
669 fmt = GET_RTX_FORMAT (code);
670 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
671 switch (fmt[i])
673 case 'e':
674 if (contains_replace_regs (XEXP (x, i)))
675 return 1;
676 break;
677 case 'E':
678 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
679 if (contains_replace_regs (XVECEXP (x, i, j)))
680 return 1;
681 break;
684 return 0;
687 /* TRUE if X references a memory location that would be affected by a store
688 to MEMREF. */
690 static int
691 memref_referenced_p (rtx memref, rtx x)
693 int i, j;
694 const char *fmt;
695 enum rtx_code code = GET_CODE (x);
697 switch (code)
699 case CONST_INT:
700 case CONST:
701 case LABEL_REF:
702 case SYMBOL_REF:
703 case CONST_DOUBLE:
704 case CONST_VECTOR:
705 case PC:
706 case CC0:
707 case HIGH:
708 case LO_SUM:
709 return 0;
711 case REG:
712 return (reg_equiv[REGNO (x)].replacement
713 && memref_referenced_p (memref,
714 reg_equiv[REGNO (x)].replacement));
716 case MEM:
717 if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
718 return 1;
719 break;
721 case SET:
722 /* If we are setting a MEM, it doesn't count (its address does), but any
723 other SET_DEST that has a MEM in it is referencing the MEM. */
724 if (MEM_P (SET_DEST (x)))
726 if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
727 return 1;
729 else if (memref_referenced_p (memref, SET_DEST (x)))
730 return 1;
732 return memref_referenced_p (memref, SET_SRC (x));
734 default:
735 break;
738 fmt = GET_RTX_FORMAT (code);
739 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
740 switch (fmt[i])
742 case 'e':
743 if (memref_referenced_p (memref, XEXP (x, i)))
744 return 1;
745 break;
746 case 'E':
747 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
748 if (memref_referenced_p (memref, XVECEXP (x, i, j)))
749 return 1;
750 break;
753 return 0;
756 /* TRUE if some insn in the range (START, END] references a memory location
757 that would be affected by a store to MEMREF. */
759 static int
760 memref_used_between_p (rtx memref, rtx start, rtx end)
762 rtx insn;
764 for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
765 insn = NEXT_INSN (insn))
767 if (!INSN_P (insn))
768 continue;
770 if (memref_referenced_p (memref, PATTERN (insn)))
771 return 1;
773 /* Nonconst functions may access memory. */
774 if (CALL_P (insn)
775 && (! CONST_OR_PURE_CALL_P (insn)
776 || pure_call_p (insn)))
777 return 1;
780 return 0;
783 /* Find registers that are equivalent to a single value throughout the
784 compilation (either because they can be referenced in memory or are set once
785 from a single constant). Lower their priority for a register.
787 If such a register is only referenced once, try substituting its value
788 into the using insn. If it succeeds, we can eliminate the register
789 completely.
791 Initialize the REG_EQUIV_INIT array of initializing insns. */
793 static void
794 update_equiv_regs (void)
796 rtx insn;
797 basic_block bb;
798 int loop_depth;
799 regset_head cleared_regs;
800 int clear_regnos = 0;
802 reg_equiv = XCNEWVEC (struct equivalence, max_regno);
803 INIT_REG_SET (&cleared_regs);
804 reg_equiv_init = ggc_alloc_cleared (max_regno * sizeof (rtx));
805 reg_equiv_init_size = max_regno;
807 init_alias_analysis ();
809 /* Scan the insns and find which registers have equivalences. Do this
810 in a separate scan of the insns because (due to -fcse-follow-jumps)
811 a register can be set below its use. */
812 FOR_EACH_BB (bb)
814 loop_depth = bb->loop_depth;
816 for (insn = BB_HEAD (bb);
817 insn != NEXT_INSN (BB_END (bb));
818 insn = NEXT_INSN (insn))
820 rtx note;
821 rtx set;
822 rtx dest, src;
823 int regno;
825 if (! INSN_P (insn))
826 continue;
828 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
829 if (REG_NOTE_KIND (note) == REG_INC)
830 no_equiv (XEXP (note, 0), note, NULL);
832 set = single_set (insn);
834 /* If this insn contains more (or less) than a single SET,
835 only mark all destinations as having no known equivalence. */
836 if (set == 0)
838 note_stores (PATTERN (insn), no_equiv, NULL);
839 continue;
841 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
843 int i;
845 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
847 rtx part = XVECEXP (PATTERN (insn), 0, i);
848 if (part != set)
849 note_stores (part, no_equiv, NULL);
853 dest = SET_DEST (set);
854 src = SET_SRC (set);
856 /* See if this is setting up the equivalence between an argument
857 register and its stack slot. */
858 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
859 if (note)
861 gcc_assert (REG_P (dest));
862 regno = REGNO (dest);
864 /* Note that we don't want to clear reg_equiv_init even if there
865 are multiple sets of this register. */
866 reg_equiv[regno].is_arg_equivalence = 1;
868 /* Record for reload that this is an equivalencing insn. */
869 if (rtx_equal_p (src, XEXP (note, 0)))
870 reg_equiv_init[regno]
871 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
873 /* Continue normally in case this is a candidate for
874 replacements. */
877 if (!optimize)
878 continue;
880 /* We only handle the case of a pseudo register being set
881 once, or always to the same value. */
882 /* ??? The mn10200 port breaks if we add equivalences for
883 values that need an ADDRESS_REGS register and set them equivalent
884 to a MEM of a pseudo. The actual problem is in the over-conservative
885 handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
886 calculate_needs, but we traditionally work around this problem
887 here by rejecting equivalences when the destination is in a register
888 that's likely spilled. This is fragile, of course, since the
889 preferred class of a pseudo depends on all instructions that set
890 or use it. */
892 if (!REG_P (dest)
893 || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
894 || reg_equiv[regno].init_insns == const0_rtx
895 || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
896 && MEM_P (src) && ! reg_equiv[regno].is_arg_equivalence))
898 /* This might be setting a SUBREG of a pseudo, a pseudo that is
899 also set somewhere else to a constant. */
900 note_stores (set, no_equiv, NULL);
901 continue;
904 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
906 /* cse sometimes generates function invariants, but doesn't put a
907 REG_EQUAL note on the insn. Since this note would be redundant,
908 there's no point creating it earlier than here. */
909 if (! note && ! rtx_varies_p (src, 0))
910 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
912 /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
913 since it represents a function call */
914 if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
915 note = NULL_RTX;
917 if (REG_N_SETS (regno) != 1
918 && (! note
919 || rtx_varies_p (XEXP (note, 0), 0)
920 || (reg_equiv[regno].replacement
921 && ! rtx_equal_p (XEXP (note, 0),
922 reg_equiv[regno].replacement))))
924 no_equiv (dest, set, NULL);
925 continue;
927 /* Record this insn as initializing this register. */
928 reg_equiv[regno].init_insns
929 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
931 /* If this register is known to be equal to a constant, record that
932 it is always equivalent to the constant. */
933 if (note && ! rtx_varies_p (XEXP (note, 0), 0))
934 PUT_MODE (note, (enum machine_mode) REG_EQUIV);
936 /* If this insn introduces a "constant" register, decrease the priority
937 of that register. Record this insn if the register is only used once
938 more and the equivalence value is the same as our source.
940 The latter condition is checked for two reasons: First, it is an
941 indication that it may be more efficient to actually emit the insn
942 as written (if no registers are available, reload will substitute
943 the equivalence). Secondly, it avoids problems with any registers
944 dying in this insn whose death notes would be missed.
946 If we don't have a REG_EQUIV note, see if this insn is loading
947 a register used only in one basic block from a MEM. If so, and the
948 MEM remains unchanged for the life of the register, add a REG_EQUIV
949 note. */
951 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
953 if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
954 && MEM_P (SET_SRC (set))
955 && validate_equiv_mem (insn, dest, SET_SRC (set)))
956 REG_NOTES (insn) = note = gen_rtx_EXPR_LIST (REG_EQUIV,
957 copy_rtx (SET_SRC (set)),
958 REG_NOTES (insn));
960 if (note)
962 int regno = REGNO (dest);
963 rtx x = XEXP (note, 0);
965 /* If we haven't done so, record for reload that this is an
966 equivalencing insn. */
967 if (!reg_equiv[regno].is_arg_equivalence)
968 reg_equiv_init[regno]
969 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
971 /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
972 We might end up substituting the LABEL_REF for uses of the
973 pseudo here or later. That kind of transformation may turn an
974 indirect jump into a direct jump, in which case we must rerun the
975 jump optimizer to ensure that the JUMP_LABEL fields are valid. */
976 if (GET_CODE (x) == LABEL_REF
977 || (GET_CODE (x) == CONST
978 && GET_CODE (XEXP (x, 0)) == PLUS
979 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)))
980 recorded_label_ref = 1;
982 reg_equiv[regno].replacement = x;
983 reg_equiv[regno].src_p = &SET_SRC (set);
984 reg_equiv[regno].loop_depth = loop_depth;
986 /* Don't mess with things live during setjmp. */
987 if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
989 /* Note that the statement below does not affect the priority
990 in local-alloc! */
991 REG_LIVE_LENGTH (regno) *= 2;
993 /* If the register is referenced exactly twice, meaning it is
994 set once and used once, indicate that the reference may be
995 replaced by the equivalence we computed above. Do this
996 even if the register is only used in one block so that
997 dependencies can be handled where the last register is
998 used in a different block (i.e. HIGH / LO_SUM sequences)
999 and to reduce the number of registers alive across
1000 calls. */
1002 if (REG_N_REFS (regno) == 2
1003 && (rtx_equal_p (x, src)
1004 || ! equiv_init_varies_p (src))
1005 && NONJUMP_INSN_P (insn)
1006 && equiv_init_movable_p (PATTERN (insn), regno))
1007 reg_equiv[regno].replace = 1;
1013 if (!optimize)
1014 goto out;
1016 /* A second pass, to gather additional equivalences with memory. This needs
1017 to be done after we know which registers we are going to replace. */
1019 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1021 rtx set, src, dest;
1022 unsigned regno;
1024 if (! INSN_P (insn))
1025 continue;
1027 set = single_set (insn);
1028 if (! set)
1029 continue;
1031 dest = SET_DEST (set);
1032 src = SET_SRC (set);
1034 /* If this sets a MEM to the contents of a REG that is only used
1035 in a single basic block, see if the register is always equivalent
1036 to that memory location and if moving the store from INSN to the
1037 insn that set REG is safe. If so, put a REG_EQUIV note on the
1038 initializing insn.
1040 Don't add a REG_EQUIV note if the insn already has one. The existing
1041 REG_EQUIV is likely more useful than the one we are adding.
1043 If one of the regs in the address has reg_equiv[REGNO].replace set,
1044 then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
1045 optimization may move the set of this register immediately before
1046 insn, which puts it after reg_equiv[REGNO].init_insns, and hence
1047 the mention in the REG_EQUIV note would be to an uninitialized
1048 pseudo. */
1050 if (MEM_P (dest) && REG_P (src)
1051 && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
1052 && REG_BASIC_BLOCK (regno) >= 0
1053 && REG_N_SETS (regno) == 1
1054 && reg_equiv[regno].init_insns != 0
1055 && reg_equiv[regno].init_insns != const0_rtx
1056 && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
1057 REG_EQUIV, NULL_RTX)
1058 && ! contains_replace_regs (XEXP (dest, 0)))
1060 rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
1061 if (validate_equiv_mem (init_insn, src, dest)
1062 && ! memref_used_between_p (dest, init_insn, insn))
1064 REG_NOTES (init_insn)
1065 = gen_rtx_EXPR_LIST (REG_EQUIV, copy_rtx (dest),
1066 REG_NOTES (init_insn));
1067 /* This insn makes the equivalence, not the one initializing
1068 the register. */
1069 reg_equiv_init[regno]
1070 = gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
1075 /* Now scan all regs killed in an insn to see if any of them are
1076 registers only used that once. If so, see if we can replace the
1077 reference with the equivalent form. If we can, delete the
1078 initializing reference and this register will go away. If we
1079 can't replace the reference, and the initializing reference is
1080 within the same loop (or in an inner loop), then move the register
1081 initialization just before the use, so that they are in the same
1082 basic block. */
1083 FOR_EACH_BB_REVERSE (bb)
1085 loop_depth = bb->loop_depth;
1086 for (insn = BB_END (bb);
1087 insn != PREV_INSN (BB_HEAD (bb));
1088 insn = PREV_INSN (insn))
1090 rtx link;
1092 if (! INSN_P (insn))
1093 continue;
1095 /* Don't substitute into a non-local goto, this confuses CFG. */
1096 if (JUMP_P (insn)
1097 && find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1098 continue;
1100 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1102 if (REG_NOTE_KIND (link) == REG_DEAD
1103 /* Make sure this insn still refers to the register. */
1104 && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1106 int regno = REGNO (XEXP (link, 0));
1107 rtx equiv_insn;
1109 if (! reg_equiv[regno].replace
1110 || reg_equiv[regno].loop_depth < loop_depth)
1111 continue;
1113 /* reg_equiv[REGNO].replace gets set only when
1114 REG_N_REFS[REGNO] is 2, i.e. the register is set
1115 once and used once. (If it were only set, but not used,
1116 flow would have deleted the setting insns.) Hence
1117 there can only be one insn in reg_equiv[REGNO].init_insns. */
1118 gcc_assert (reg_equiv[regno].init_insns
1119 && !XEXP (reg_equiv[regno].init_insns, 1));
1120 equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1122 /* We may not move instructions that can throw, since
1123 that changes basic block boundaries and we are not
1124 prepared to adjust the CFG to match. */
1125 if (can_throw_internal (equiv_insn))
1126 continue;
1128 if (asm_noperands (PATTERN (equiv_insn)) < 0
1129 && validate_replace_rtx (regno_reg_rtx[regno],
1130 *(reg_equiv[regno].src_p), insn))
1132 rtx equiv_link;
1133 rtx last_link;
1134 rtx note;
1136 /* Find the last note. */
1137 for (last_link = link; XEXP (last_link, 1);
1138 last_link = XEXP (last_link, 1))
1141 /* Append the REG_DEAD notes from equiv_insn. */
1142 equiv_link = REG_NOTES (equiv_insn);
1143 while (equiv_link)
1145 note = equiv_link;
1146 equiv_link = XEXP (equiv_link, 1);
1147 if (REG_NOTE_KIND (note) == REG_DEAD)
1149 remove_note (equiv_insn, note);
1150 XEXP (last_link, 1) = note;
1151 XEXP (note, 1) = NULL_RTX;
1152 last_link = note;
1156 remove_death (regno, insn);
1157 REG_N_REFS (regno) = 0;
1158 REG_FREQ (regno) = 0;
1159 delete_insn (equiv_insn);
1161 reg_equiv[regno].init_insns
1162 = XEXP (reg_equiv[regno].init_insns, 1);
1164 /* Remember to clear REGNO from all basic block's live
1165 info. */
1166 SET_REGNO_REG_SET (&cleared_regs, regno);
1167 clear_regnos++;
1168 reg_equiv_init[regno] = NULL_RTX;
1170 /* Move the initialization of the register to just before
1171 INSN. Update the flow information. */
1172 else if (PREV_INSN (insn) != equiv_insn)
1174 rtx new_insn;
1176 new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1177 REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1178 REG_NOTES (equiv_insn) = 0;
1180 /* Make sure this insn is recognized before
1181 reload begins, otherwise
1182 eliminate_regs_in_insn will die. */
1183 INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1185 delete_insn (equiv_insn);
1187 XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1189 REG_BASIC_BLOCK (regno) = bb->index;
1190 REG_N_CALLS_CROSSED (regno) = 0;
1191 REG_N_THROWING_CALLS_CROSSED (regno) = 0;
1192 REG_LIVE_LENGTH (regno) = 2;
1194 if (insn == BB_HEAD (bb))
1195 BB_HEAD (bb) = PREV_INSN (insn);
1197 /* Remember to clear REGNO from all basic block's live
1198 info. */
1199 SET_REGNO_REG_SET (&cleared_regs, regno);
1200 clear_regnos++;
1201 reg_equiv_init[regno]
1202 = gen_rtx_INSN_LIST (VOIDmode, new_insn, NULL_RTX);
1209 /* Clear all dead REGNOs from all basic block's live info. */
1210 if (clear_regnos)
1212 unsigned j;
1214 if (clear_regnos > 8)
1216 FOR_EACH_BB (bb)
1218 AND_COMPL_REG_SET (bb->il.rtl->global_live_at_start,
1219 &cleared_regs);
1220 AND_COMPL_REG_SET (bb->il.rtl->global_live_at_end,
1221 &cleared_regs);
1224 else
1226 reg_set_iterator rsi;
1227 EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j, rsi)
1229 FOR_EACH_BB (bb)
1231 CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start, j);
1232 CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_end, j);
1238 out:
1239 /* Clean up. */
1240 end_alias_analysis ();
1241 CLEAR_REG_SET (&cleared_regs);
1242 free (reg_equiv);
1245 /* Mark REG as having no known equivalence.
1246 Some instructions might have been processed before and furnished
1247 with REG_EQUIV notes for this register; these notes will have to be
1248 removed.
1249 STORE is the piece of RTL that does the non-constant / conflicting
1250 assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
1251 but needs to be there because this function is called from note_stores. */
1252 static void
1253 no_equiv (rtx reg, rtx store ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1255 int regno;
1256 rtx list;
1258 if (!REG_P (reg))
1259 return;
1260 regno = REGNO (reg);
1261 list = reg_equiv[regno].init_insns;
1262 if (list == const0_rtx)
1263 return;
1264 reg_equiv[regno].init_insns = const0_rtx;
1265 reg_equiv[regno].replacement = NULL_RTX;
1266 /* This doesn't matter for equivalences made for argument registers, we
1267 should keep their initialization insns. */
1268 if (reg_equiv[regno].is_arg_equivalence)
1269 return;
1270 reg_equiv_init[regno] = NULL_RTX;
1271 for (; list; list = XEXP (list, 1))
1273 rtx insn = XEXP (list, 0);
1274 remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1278 /* Allocate hard regs to the pseudo regs used only within block number B.
1279 Only the pseudos that die but once can be handled. */
1281 static void
1282 block_alloc (int b)
1284 int i, q;
1285 rtx insn;
1286 rtx note, hard_reg;
1287 int insn_number = 0;
1288 int insn_count = 0;
1289 int max_uid = get_max_uid ();
1290 int *qty_order;
1291 int no_conflict_combined_regno = -1;
1293 /* Count the instructions in the basic block. */
1295 insn = BB_END (BASIC_BLOCK (b));
1296 while (1)
1298 if (!NOTE_P (insn))
1300 ++insn_count;
1301 gcc_assert (insn_count <= max_uid);
1303 if (insn == BB_HEAD (BASIC_BLOCK (b)))
1304 break;
1305 insn = PREV_INSN (insn);
1308 /* +2 to leave room for a post_mark_life at the last insn and for
1309 the birth of a CLOBBER in the first insn. */
1310 regs_live_at = XCNEWVEC (HARD_REG_SET, 2 * insn_count + 2);
1312 /* Initialize table of hardware registers currently live. */
1314 REG_SET_TO_HARD_REG_SET (regs_live,
1315 BASIC_BLOCK (b)->il.rtl->global_live_at_start);
1317 /* This loop scans the instructions of the basic block
1318 and assigns quantities to registers.
1319 It computes which registers to tie. */
1321 insn = BB_HEAD (BASIC_BLOCK (b));
1322 while (1)
1324 if (!NOTE_P (insn))
1325 insn_number++;
1327 if (INSN_P (insn))
1329 rtx link, set;
1330 int win = 0;
1331 rtx r0, r1 = NULL_RTX;
1332 int combined_regno = -1;
1333 int i;
1335 this_insn_number = insn_number;
1336 this_insn = insn;
1338 extract_insn (insn);
1339 which_alternative = -1;
1341 /* Is this insn suitable for tying two registers?
1342 If so, try doing that.
1343 Suitable insns are those with at least two operands and where
1344 operand 0 is an output that is a register that is not
1345 earlyclobber.
1347 We can tie operand 0 with some operand that dies in this insn.
1348 First look for operands that are required to be in the same
1349 register as operand 0. If we find such, only try tying that
1350 operand or one that can be put into that operand if the
1351 operation is commutative. If we don't find an operand
1352 that is required to be in the same register as operand 0,
1353 we can tie with any operand.
1355 Subregs in place of regs are also ok.
1357 If tying is done, WIN is set nonzero. */
1359 if (optimize
1360 && recog_data.n_operands > 1
1361 && recog_data.constraints[0][0] == '='
1362 && recog_data.constraints[0][1] != '&')
1364 /* If non-negative, is an operand that must match operand 0. */
1365 int must_match_0 = -1;
1366 /* Counts number of alternatives that require a match with
1367 operand 0. */
1368 int n_matching_alts = 0;
1370 for (i = 1; i < recog_data.n_operands; i++)
1372 const char *p = recog_data.constraints[i];
1373 int this_match = requires_inout (p);
1375 n_matching_alts += this_match;
1376 if (this_match == recog_data.n_alternatives)
1377 must_match_0 = i;
1380 r0 = recog_data.operand[0];
1381 for (i = 1; i < recog_data.n_operands; i++)
1383 /* Skip this operand if we found an operand that
1384 must match operand 0 and this operand isn't it
1385 and can't be made to be it by commutativity. */
1387 if (must_match_0 >= 0 && i != must_match_0
1388 && ! (i == must_match_0 + 1
1389 && recog_data.constraints[i-1][0] == '%')
1390 && ! (i == must_match_0 - 1
1391 && recog_data.constraints[i][0] == '%'))
1392 continue;
1394 /* Likewise if each alternative has some operand that
1395 must match operand zero. In that case, skip any
1396 operand that doesn't list operand 0 since we know that
1397 the operand always conflicts with operand 0. We
1398 ignore commutativity in this case to keep things simple. */
1399 if (n_matching_alts == recog_data.n_alternatives
1400 && 0 == requires_inout (recog_data.constraints[i]))
1401 continue;
1403 r1 = recog_data.operand[i];
1405 /* If the operand is an address, find a register in it.
1406 There may be more than one register, but we only try one
1407 of them. */
1408 if (recog_data.constraints[i][0] == 'p'
1409 || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0],
1410 recog_data.constraints[i]))
1411 while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1412 r1 = XEXP (r1, 0);
1414 /* Avoid making a call-saved register unnecessarily
1415 clobbered. */
1416 hard_reg = get_hard_reg_initial_reg (cfun, r1);
1417 if (hard_reg != NULL_RTX)
1419 if (REG_P (hard_reg)
1420 && REGNO (hard_reg) < FIRST_PSEUDO_REGISTER
1421 && !call_used_regs[REGNO (hard_reg)])
1422 continue;
1425 if (REG_P (r0) || GET_CODE (r0) == SUBREG)
1427 /* We have two priorities for hard register preferences.
1428 If we have a move insn or an insn whose first input
1429 can only be in the same register as the output, give
1430 priority to an equivalence found from that insn. */
1431 int may_save_copy
1432 = (r1 == recog_data.operand[i] && must_match_0 >= 0);
1434 if (REG_P (r1) || GET_CODE (r1) == SUBREG)
1435 win = combine_regs (r1, r0, may_save_copy,
1436 insn_number, insn, 0);
1438 if (win)
1439 break;
1443 /* Recognize an insn sequence with an ultimate result
1444 which can safely overlap one of the inputs.
1445 The sequence begins with a CLOBBER of its result,
1446 and ends with an insn that copies the result to itself
1447 and has a REG_EQUAL note for an equivalent formula.
1448 That note indicates what the inputs are.
1449 The result and the input can overlap if each insn in
1450 the sequence either doesn't mention the input
1451 or has a REG_NO_CONFLICT note to inhibit the conflict.
1453 We do the combining test at the CLOBBER so that the
1454 destination register won't have had a quantity number
1455 assigned, since that would prevent combining. */
1457 if (optimize
1458 && GET_CODE (PATTERN (insn)) == CLOBBER
1459 && (r0 = XEXP (PATTERN (insn), 0),
1460 REG_P (r0))
1461 && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1462 && XEXP (link, 0) != 0
1463 && NONJUMP_INSN_P (XEXP (link, 0))
1464 && (set = single_set (XEXP (link, 0))) != 0
1465 && SET_DEST (set) == r0 && SET_SRC (set) == r0
1466 && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1467 NULL_RTX)) != 0)
1469 if (r1 = XEXP (note, 0), REG_P (r1)
1470 /* Check that we have such a sequence. */
1471 && no_conflict_p (insn, r0, r1))
1472 win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1473 else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1474 && (r1 = XEXP (XEXP (note, 0), 0),
1475 REG_P (r1) || GET_CODE (r1) == SUBREG)
1476 && no_conflict_p (insn, r0, r1))
1477 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1479 /* Here we care if the operation to be computed is
1480 commutative. */
1481 else if (COMMUTATIVE_P (XEXP (note, 0))
1482 && (r1 = XEXP (XEXP (note, 0), 1),
1483 (REG_P (r1) || GET_CODE (r1) == SUBREG))
1484 && no_conflict_p (insn, r0, r1))
1485 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1487 /* If we did combine something, show the register number
1488 in question so that we know to ignore its death. */
1489 if (win)
1490 no_conflict_combined_regno = REGNO (r1);
1493 /* If registers were just tied, set COMBINED_REGNO
1494 to the number of the register used in this insn
1495 that was tied to the register set in this insn.
1496 This register's qty should not be "killed". */
1498 if (win)
1500 while (GET_CODE (r1) == SUBREG)
1501 r1 = SUBREG_REG (r1);
1502 combined_regno = REGNO (r1);
1505 /* Mark the death of everything that dies in this instruction,
1506 except for anything that was just combined. */
1508 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1509 if (REG_NOTE_KIND (link) == REG_DEAD
1510 && REG_P (XEXP (link, 0))
1511 && combined_regno != (int) REGNO (XEXP (link, 0))
1512 && (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1513 || ! find_reg_note (insn, REG_NO_CONFLICT,
1514 XEXP (link, 0))))
1515 wipe_dead_reg (XEXP (link, 0), 0);
1517 /* Allocate qty numbers for all registers local to this block
1518 that are born (set) in this instruction.
1519 A pseudo that already has a qty is not changed. */
1521 note_stores (PATTERN (insn), reg_is_set, NULL);
1523 /* If anything is set in this insn and then unused, mark it as dying
1524 after this insn, so it will conflict with our outputs. This
1525 can't match with something that combined, and it doesn't matter
1526 if it did. Do this after the calls to reg_is_set since these
1527 die after, not during, the current insn. */
1529 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1530 if (REG_NOTE_KIND (link) == REG_UNUSED
1531 && REG_P (XEXP (link, 0)))
1532 wipe_dead_reg (XEXP (link, 0), 1);
1534 /* If this is an insn that has a REG_RETVAL note pointing at a
1535 CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1536 block, so clear any register number that combined within it. */
1537 if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1538 && NONJUMP_INSN_P (XEXP (note, 0))
1539 && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1540 no_conflict_combined_regno = -1;
1543 /* Set the registers live after INSN_NUMBER. Note that we never
1544 record the registers live before the block's first insn, since no
1545 pseudos we care about are live before that insn. */
1547 IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1548 IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1550 if (insn == BB_END (BASIC_BLOCK (b)))
1551 break;
1553 insn = NEXT_INSN (insn);
1556 /* Now every register that is local to this basic block
1557 should have been given a quantity, or else -1 meaning ignore it.
1558 Every quantity should have a known birth and death.
1560 Order the qtys so we assign them registers in order of the
1561 number of suggested registers they need so we allocate those with
1562 the most restrictive needs first. */
1564 qty_order = XNEWVEC (int, next_qty);
1565 for (i = 0; i < next_qty; i++)
1566 qty_order[i] = i;
1568 #define EXCHANGE(I1, I2) \
1569 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1571 switch (next_qty)
1573 case 3:
1574 /* Make qty_order[2] be the one to allocate last. */
1575 if (qty_sugg_compare (0, 1) > 0)
1576 EXCHANGE (0, 1);
1577 if (qty_sugg_compare (1, 2) > 0)
1578 EXCHANGE (2, 1);
1580 /* ... Fall through ... */
1581 case 2:
1582 /* Put the best one to allocate in qty_order[0]. */
1583 if (qty_sugg_compare (0, 1) > 0)
1584 EXCHANGE (0, 1);
1586 /* ... Fall through ... */
1588 case 1:
1589 case 0:
1590 /* Nothing to do here. */
1591 break;
1593 default:
1594 qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1597 /* Try to put each quantity in a suggested physical register, if it has one.
1598 This may cause registers to be allocated that otherwise wouldn't be, but
1599 this seems acceptable in local allocation (unlike global allocation). */
1600 for (i = 0; i < next_qty; i++)
1602 q = qty_order[i];
1603 if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1604 qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1605 0, 1, qty[q].birth, qty[q].death);
1606 else
1607 qty[q].phys_reg = -1;
1610 /* Order the qtys so we assign them registers in order of
1611 decreasing length of life. Normally call qsort, but if we
1612 have only a very small number of quantities, sort them ourselves. */
1614 for (i = 0; i < next_qty; i++)
1615 qty_order[i] = i;
1617 #define EXCHANGE(I1, I2) \
1618 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1620 switch (next_qty)
1622 case 3:
1623 /* Make qty_order[2] be the one to allocate last. */
1624 if (qty_compare (0, 1) > 0)
1625 EXCHANGE (0, 1);
1626 if (qty_compare (1, 2) > 0)
1627 EXCHANGE (2, 1);
1629 /* ... Fall through ... */
1630 case 2:
1631 /* Put the best one to allocate in qty_order[0]. */
1632 if (qty_compare (0, 1) > 0)
1633 EXCHANGE (0, 1);
1635 /* ... Fall through ... */
1637 case 1:
1638 case 0:
1639 /* Nothing to do here. */
1640 break;
1642 default:
1643 qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1646 /* Now for each qty that is not a hardware register,
1647 look for a hardware register to put it in.
1648 First try the register class that is cheapest for this qty,
1649 if there is more than one class. */
1651 for (i = 0; i < next_qty; i++)
1653 q = qty_order[i];
1654 if (qty[q].phys_reg < 0)
1656 #ifdef INSN_SCHEDULING
1657 /* These values represent the adjusted lifetime of a qty so
1658 that it conflicts with qtys which appear near the start/end
1659 of this qty's lifetime.
1661 The purpose behind extending the lifetime of this qty is to
1662 discourage the register allocator from creating false
1663 dependencies.
1665 The adjustment value is chosen to indicate that this qty
1666 conflicts with all the qtys in the instructions immediately
1667 before and after the lifetime of this qty.
1669 Experiments have shown that higher values tend to hurt
1670 overall code performance.
1672 If allocation using the extended lifetime fails we will try
1673 again with the qty's unadjusted lifetime. */
1674 int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1675 int fake_death = MIN (insn_number * 2 + 1,
1676 qty[q].death + 2 - qty[q].death % 2);
1677 #endif
1679 if (N_REG_CLASSES > 1)
1681 #ifdef INSN_SCHEDULING
1682 /* We try to avoid using hard registers allocated to qtys which
1683 are born immediately after this qty or die immediately before
1684 this qty.
1686 This optimization is only appropriate when we will run
1687 a scheduling pass after reload and we are not optimizing
1688 for code size. */
1689 if (flag_schedule_insns_after_reload
1690 && !optimize_size
1691 && !SMALL_REGISTER_CLASSES)
1693 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1694 qty[q].mode, q, 0, 0,
1695 fake_birth, fake_death);
1696 if (qty[q].phys_reg >= 0)
1697 continue;
1699 #endif
1700 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1701 qty[q].mode, q, 0, 0,
1702 qty[q].birth, qty[q].death);
1703 if (qty[q].phys_reg >= 0)
1704 continue;
1707 #ifdef INSN_SCHEDULING
1708 /* Similarly, avoid false dependencies. */
1709 if (flag_schedule_insns_after_reload
1710 && !optimize_size
1711 && !SMALL_REGISTER_CLASSES
1712 && qty[q].alternate_class != NO_REGS)
1713 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1714 qty[q].mode, q, 0, 0,
1715 fake_birth, fake_death);
1716 #endif
1717 if (qty[q].alternate_class != NO_REGS)
1718 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1719 qty[q].mode, q, 0, 0,
1720 qty[q].birth, qty[q].death);
1724 /* Now propagate the register assignments
1725 to the pseudo regs belonging to the qtys. */
1727 for (q = 0; q < next_qty; q++)
1728 if (qty[q].phys_reg >= 0)
1730 for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1731 reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1734 /* Clean up. */
1735 free (regs_live_at);
1736 free (qty_order);
1739 /* Compare two quantities' priority for getting real registers.
1740 We give shorter-lived quantities higher priority.
1741 Quantities with more references are also preferred, as are quantities that
1742 require multiple registers. This is the identical prioritization as
1743 done by global-alloc.
1745 We used to give preference to registers with *longer* lives, but using
1746 the same algorithm in both local- and global-alloc can speed up execution
1747 of some programs by as much as a factor of three! */
1749 /* Note that the quotient will never be bigger than
1750 the value of floor_log2 times the maximum number of
1751 times a register can occur in one insn (surely less than 100)
1752 weighted by frequency (max REG_FREQ_MAX).
1753 Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1754 QTY_CMP_PRI is also used by qty_sugg_compare. */
1756 #define QTY_CMP_PRI(q) \
1757 ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1758 / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1760 static int
1761 qty_compare (int q1, int q2)
1763 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1766 static int
1767 qty_compare_1 (const void *q1p, const void *q2p)
1769 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1770 int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1772 if (tem != 0)
1773 return tem;
1775 /* If qtys are equally good, sort by qty number,
1776 so that the results of qsort leave nothing to chance. */
1777 return q1 - q2;
1780 /* Compare two quantities' priority for getting real registers. This version
1781 is called for quantities that have suggested hard registers. First priority
1782 goes to quantities that have copy preferences, then to those that have
1783 normal preferences. Within those groups, quantities with the lower
1784 number of preferences have the highest priority. Of those, we use the same
1785 algorithm as above. */
1787 #define QTY_CMP_SUGG(q) \
1788 (qty_phys_num_copy_sugg[q] \
1789 ? qty_phys_num_copy_sugg[q] \
1790 : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1792 static int
1793 qty_sugg_compare (int q1, int q2)
1795 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1797 if (tem != 0)
1798 return tem;
1800 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1803 static int
1804 qty_sugg_compare_1 (const void *q1p, const void *q2p)
1806 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1807 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1809 if (tem != 0)
1810 return tem;
1812 tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1813 if (tem != 0)
1814 return tem;
1816 /* If qtys are equally good, sort by qty number,
1817 so that the results of qsort leave nothing to chance. */
1818 return q1 - q2;
1821 #undef QTY_CMP_SUGG
1822 #undef QTY_CMP_PRI
1824 /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1825 Returns 1 if have done so, or 0 if cannot.
1827 Combining registers means marking them as having the same quantity
1828 and adjusting the offsets within the quantity if either of
1829 them is a SUBREG.
1831 We don't actually combine a hard reg with a pseudo; instead
1832 we just record the hard reg as the suggestion for the pseudo's quantity.
1833 If we really combined them, we could lose if the pseudo lives
1834 across an insn that clobbers the hard reg (eg, movmem).
1836 ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1837 there is no REG_DEAD note on INSN. This occurs during the processing
1838 of REG_NO_CONFLICT blocks.
1840 MAY_SAVE_COPY is nonzero if this insn is simply copying USEDREG to
1841 SETREG or if the input and output must share a register.
1842 In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1844 There are elaborate checks for the validity of combining. */
1846 static int
1847 combine_regs (rtx usedreg, rtx setreg, int may_save_copy, int insn_number,
1848 rtx insn, int already_dead)
1850 int ureg, sreg;
1851 int offset = 0;
1852 int usize, ssize;
1853 int sqty;
1855 /* Determine the numbers and sizes of registers being used. If a subreg
1856 is present that does not change the entire register, don't consider
1857 this a copy insn. */
1859 while (GET_CODE (usedreg) == SUBREG)
1861 rtx subreg = SUBREG_REG (usedreg);
1863 if (REG_P (subreg))
1865 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1866 may_save_copy = 0;
1868 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1869 offset += subreg_regno_offset (REGNO (subreg),
1870 GET_MODE (subreg),
1871 SUBREG_BYTE (usedreg),
1872 GET_MODE (usedreg));
1873 else
1874 offset += (SUBREG_BYTE (usedreg)
1875 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1878 usedreg = subreg;
1881 if (!REG_P (usedreg))
1882 return 0;
1884 ureg = REGNO (usedreg);
1885 if (ureg < FIRST_PSEUDO_REGISTER)
1886 usize = hard_regno_nregs[ureg][GET_MODE (usedreg)];
1887 else
1888 usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1889 + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1890 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1892 while (GET_CODE (setreg) == SUBREG)
1894 rtx subreg = SUBREG_REG (setreg);
1896 if (REG_P (subreg))
1898 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1899 may_save_copy = 0;
1901 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1902 offset -= subreg_regno_offset (REGNO (subreg),
1903 GET_MODE (subreg),
1904 SUBREG_BYTE (setreg),
1905 GET_MODE (setreg));
1906 else
1907 offset -= (SUBREG_BYTE (setreg)
1908 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1911 setreg = subreg;
1914 if (!REG_P (setreg))
1915 return 0;
1917 sreg = REGNO (setreg);
1918 if (sreg < FIRST_PSEUDO_REGISTER)
1919 ssize = hard_regno_nregs[sreg][GET_MODE (setreg)];
1920 else
1921 ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1922 + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1923 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1925 /* If UREG is a pseudo-register that hasn't already been assigned a
1926 quantity number, it means that it is not local to this block or dies
1927 more than once. In either event, we can't do anything with it. */
1928 if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1929 /* Do not combine registers unless one fits within the other. */
1930 || (offset > 0 && usize + offset > ssize)
1931 || (offset < 0 && usize + offset < ssize)
1932 /* Do not combine with a smaller already-assigned object
1933 if that smaller object is already combined with something bigger. */
1934 || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1935 && usize < qty[reg_qty[ureg]].size)
1936 /* Can't combine if SREG is not a register we can allocate. */
1937 || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1938 /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1939 These have already been taken care of. This probably wouldn't
1940 combine anyway, but don't take any chances. */
1941 || (ureg >= FIRST_PSEUDO_REGISTER
1942 && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1943 /* Don't tie something to itself. In most cases it would make no
1944 difference, but it would screw up if the reg being tied to itself
1945 also dies in this insn. */
1946 || ureg == sreg
1947 /* Don't try to connect two different hardware registers. */
1948 || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1949 /* Don't connect two different machine modes if they have different
1950 implications as to which registers may be used. */
1951 || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1952 return 0;
1954 /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1955 qty_phys_sugg for the pseudo instead of tying them.
1957 Return "failure" so that the lifespan of UREG is terminated here;
1958 that way the two lifespans will be disjoint and nothing will prevent
1959 the pseudo reg from being given this hard reg. */
1961 if (ureg < FIRST_PSEUDO_REGISTER)
1963 /* Allocate a quantity number so we have a place to put our
1964 suggestions. */
1965 if (reg_qty[sreg] == -2)
1966 reg_is_born (setreg, 2 * insn_number);
1968 if (reg_qty[sreg] >= 0)
1970 if (may_save_copy
1971 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1973 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1974 qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1976 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1978 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1979 qty_phys_num_sugg[reg_qty[sreg]]++;
1982 return 0;
1985 /* Similarly for SREG a hard register and UREG a pseudo register. */
1987 if (sreg < FIRST_PSEUDO_REGISTER)
1989 if (may_save_copy
1990 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1992 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1993 qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1995 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1997 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1998 qty_phys_num_sugg[reg_qty[ureg]]++;
2000 return 0;
2003 /* At this point we know that SREG and UREG are both pseudos.
2004 Do nothing if SREG already has a quantity or is a register that we
2005 don't allocate. */
2006 if (reg_qty[sreg] >= -1
2007 /* If we are not going to let any regs live across calls,
2008 don't tie a call-crossing reg to a non-call-crossing reg. */
2009 || (current_function_has_nonlocal_label
2010 && ((REG_N_CALLS_CROSSED (ureg) > 0)
2011 != (REG_N_CALLS_CROSSED (sreg) > 0))))
2012 return 0;
2014 /* We don't already know about SREG, so tie it to UREG
2015 if this is the last use of UREG, provided the classes they want
2016 are compatible. */
2018 if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
2019 && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
2021 /* Add SREG to UREG's quantity. */
2022 sqty = reg_qty[ureg];
2023 reg_qty[sreg] = sqty;
2024 reg_offset[sreg] = reg_offset[ureg] + offset;
2025 reg_next_in_qty[sreg] = qty[sqty].first_reg;
2026 qty[sqty].first_reg = sreg;
2028 /* If SREG's reg class is smaller, set qty[SQTY].min_class. */
2029 update_qty_class (sqty, sreg);
2031 /* Update info about quantity SQTY. */
2032 qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
2033 qty[sqty].n_throwing_calls_crossed
2034 += REG_N_THROWING_CALLS_CROSSED (sreg);
2035 qty[sqty].n_refs += REG_N_REFS (sreg);
2036 qty[sqty].freq += REG_FREQ (sreg);
2037 if (usize < ssize)
2039 int i;
2041 for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
2042 reg_offset[i] -= offset;
2044 qty[sqty].size = ssize;
2045 qty[sqty].mode = GET_MODE (setreg);
2048 else
2049 return 0;
2051 return 1;
2054 /* Return 1 if the preferred class of REG allows it to be tied
2055 to a quantity or register whose class is CLASS.
2056 True if REG's reg class either contains or is contained in CLASS. */
2058 static int
2059 reg_meets_class_p (int reg, enum reg_class class)
2061 enum reg_class rclass = reg_preferred_class (reg);
2062 return (reg_class_subset_p (rclass, class)
2063 || reg_class_subset_p (class, rclass));
2066 /* Update the class of QTYNO assuming that REG is being tied to it. */
2068 static void
2069 update_qty_class (int qtyno, int reg)
2071 enum reg_class rclass = reg_preferred_class (reg);
2072 if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2073 qty[qtyno].min_class = rclass;
2075 rclass = reg_alternate_class (reg);
2076 if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2077 qty[qtyno].alternate_class = rclass;
2080 /* Handle something which alters the value of an rtx REG.
2082 REG is whatever is set or clobbered. SETTER is the rtx that
2083 is modifying the register.
2085 If it is not really a register, we do nothing.
2086 The file-global variables `this_insn' and `this_insn_number'
2087 carry info from `block_alloc'. */
2089 static void
2090 reg_is_set (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
2092 /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2093 a hard register. These may actually not exist any more. */
2095 if (GET_CODE (reg) != SUBREG
2096 && !REG_P (reg))
2097 return;
2099 /* Mark this register as being born. If it is used in a CLOBBER, mark
2100 it as being born halfway between the previous insn and this insn so that
2101 it conflicts with our inputs but not the outputs of the previous insn. */
2103 reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2106 /* Handle beginning of the life of register REG.
2107 BIRTH is the index at which this is happening. */
2109 static void
2110 reg_is_born (rtx reg, int birth)
2112 int regno;
2114 if (GET_CODE (reg) == SUBREG)
2116 regno = REGNO (SUBREG_REG (reg));
2117 if (regno < FIRST_PSEUDO_REGISTER)
2118 regno = subreg_regno (reg);
2120 else
2121 regno = REGNO (reg);
2123 if (regno < FIRST_PSEUDO_REGISTER)
2125 mark_life (regno, GET_MODE (reg), 1);
2127 /* If the register was to have been born earlier that the present
2128 insn, mark it as live where it is actually born. */
2129 if (birth < 2 * this_insn_number)
2130 post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2132 else
2134 if (reg_qty[regno] == -2)
2135 alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2137 /* If this register has a quantity number, show that it isn't dead. */
2138 if (reg_qty[regno] >= 0)
2139 qty[reg_qty[regno]].death = -1;
2143 /* Record the death of REG in the current insn. If OUTPUT_P is nonzero,
2144 REG is an output that is dying (i.e., it is never used), otherwise it
2145 is an input (the normal case).
2146 If OUTPUT_P is 1, then we extend the life past the end of this insn. */
2148 static void
2149 wipe_dead_reg (rtx reg, int output_p)
2151 int regno = REGNO (reg);
2153 /* If this insn has multiple results,
2154 and the dead reg is used in one of the results,
2155 extend its life to after this insn,
2156 so it won't get allocated together with any other result of this insn.
2158 It is unsafe to use !single_set here since it will ignore an unused
2159 output. Just because an output is unused does not mean the compiler
2160 can assume the side effect will not occur. Consider if REG appears
2161 in the address of an output and we reload the output. If we allocate
2162 REG to the same hard register as an unused output we could set the hard
2163 register before the output reload insn. */
2164 if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2165 && multiple_sets (this_insn))
2167 int i;
2168 for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2170 rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2171 if (GET_CODE (set) == SET
2172 && !REG_P (SET_DEST (set))
2173 && !rtx_equal_p (reg, SET_DEST (set))
2174 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2175 output_p = 1;
2179 /* If this register is used in an auto-increment address, then extend its
2180 life to after this insn, so that it won't get allocated together with
2181 the result of this insn. */
2182 if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2183 output_p = 1;
2185 if (regno < FIRST_PSEUDO_REGISTER)
2187 mark_life (regno, GET_MODE (reg), 0);
2189 /* If a hard register is dying as an output, mark it as in use at
2190 the beginning of this insn (the above statement would cause this
2191 not to happen). */
2192 if (output_p)
2193 post_mark_life (regno, GET_MODE (reg), 1,
2194 2 * this_insn_number, 2 * this_insn_number + 1);
2197 else if (reg_qty[regno] >= 0)
2198 qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2201 /* Find a block of SIZE words of hard regs in reg_class CLASS
2202 that can hold something of machine-mode MODE
2203 (but actually we test only the first of the block for holding MODE)
2204 and still free between insn BORN_INDEX and insn DEAD_INDEX,
2205 and return the number of the first of them.
2206 Return -1 if such a block cannot be found.
2207 If QTYNO crosses calls, insist on a register preserved by calls,
2208 unless ACCEPT_CALL_CLOBBERED is nonzero.
2210 If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2211 register is available. If not, return -1. */
2213 static int
2214 find_free_reg (enum reg_class class, enum machine_mode mode, int qtyno,
2215 int accept_call_clobbered, int just_try_suggested,
2216 int born_index, int dead_index)
2218 int i, ins;
2219 HARD_REG_SET first_used, used;
2220 #ifdef ELIMINABLE_REGS
2221 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2222 #endif
2224 /* Validate our parameters. */
2225 gcc_assert (born_index >= 0 && born_index <= dead_index);
2227 /* Don't let a pseudo live in a reg across a function call
2228 if we might get a nonlocal goto. */
2229 if (current_function_has_nonlocal_label
2230 && qty[qtyno].n_calls_crossed > 0)
2231 return -1;
2233 if (accept_call_clobbered)
2234 COPY_HARD_REG_SET (used, call_fixed_reg_set);
2235 else if (qty[qtyno].n_calls_crossed == 0)
2236 COPY_HARD_REG_SET (used, fixed_reg_set);
2237 else
2238 COPY_HARD_REG_SET (used, call_used_reg_set);
2240 if (accept_call_clobbered)
2241 IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2243 for (ins = born_index; ins < dead_index; ins++)
2244 IOR_HARD_REG_SET (used, regs_live_at[ins]);
2246 IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2248 /* Don't use the frame pointer reg in local-alloc even if
2249 we may omit the frame pointer, because if we do that and then we
2250 need a frame pointer, reload won't know how to move the pseudo
2251 to another hard reg. It can move only regs made by global-alloc.
2253 This is true of any register that can be eliminated. */
2254 #ifdef ELIMINABLE_REGS
2255 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2256 SET_HARD_REG_BIT (used, eliminables[i].from);
2257 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2258 /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2259 that it might be eliminated into. */
2260 SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2261 #endif
2262 #else
2263 SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2264 #endif
2266 #ifdef CANNOT_CHANGE_MODE_CLASS
2267 cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2268 #endif
2270 /* Normally, the registers that can be used for the first register in
2271 a multi-register quantity are the same as those that can be used for
2272 subsequent registers. However, if just trying suggested registers,
2273 restrict our consideration to them. If there are copy-suggested
2274 register, try them. Otherwise, try the arithmetic-suggested
2275 registers. */
2276 COPY_HARD_REG_SET (first_used, used);
2278 if (just_try_suggested)
2280 if (qty_phys_num_copy_sugg[qtyno] != 0)
2281 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2282 else
2283 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2286 /* If all registers are excluded, we can't do anything. */
2287 GO_IF_HARD_REG_SUBSET (reg_class_contents[(int) ALL_REGS], first_used, fail);
2289 /* If at least one would be suitable, test each hard reg. */
2291 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2293 #ifdef REG_ALLOC_ORDER
2294 int regno = reg_alloc_order[i];
2295 #else
2296 int regno = i;
2297 #endif
2298 if (! TEST_HARD_REG_BIT (first_used, regno)
2299 && HARD_REGNO_MODE_OK (regno, mode)
2300 && (qty[qtyno].n_calls_crossed == 0
2301 || accept_call_clobbered
2302 || ! HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2304 int j;
2305 int size1 = hard_regno_nregs[regno][mode];
2306 for (j = 1; j < size1 && ! TEST_HARD_REG_BIT (used, regno + j); j++);
2307 if (j == size1)
2309 /* Mark that this register is in use between its birth and death
2310 insns. */
2311 post_mark_life (regno, mode, 1, born_index, dead_index);
2312 return regno;
2314 #ifndef REG_ALLOC_ORDER
2315 /* Skip starting points we know will lose. */
2316 i += j;
2317 #endif
2321 fail:
2322 /* If we are just trying suggested register, we have just tried copy-
2323 suggested registers, and there are arithmetic-suggested registers,
2324 try them. */
2326 /* If it would be profitable to allocate a call-clobbered register
2327 and save and restore it around calls, do that. */
2328 if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2329 && qty_phys_num_sugg[qtyno] != 0)
2331 /* Don't try the copy-suggested regs again. */
2332 qty_phys_num_copy_sugg[qtyno] = 0;
2333 return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2334 born_index, dead_index);
2337 /* We need not check to see if the current function has nonlocal
2338 labels because we don't put any pseudos that are live over calls in
2339 registers in that case. Avoid putting pseudos crossing calls that
2340 might throw into call used registers. */
2342 if (! accept_call_clobbered
2343 && flag_caller_saves
2344 && ! just_try_suggested
2345 && qty[qtyno].n_calls_crossed != 0
2346 && qty[qtyno].n_throwing_calls_crossed == 0
2347 && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2348 qty[qtyno].n_calls_crossed))
2350 i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2351 if (i >= 0)
2352 caller_save_needed = 1;
2353 return i;
2355 return -1;
2358 /* Mark that REGNO with machine-mode MODE is live starting from the current
2359 insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2360 is zero). */
2362 static void
2363 mark_life (int regno, enum machine_mode mode, int life)
2365 int j = hard_regno_nregs[regno][mode];
2366 if (life)
2367 while (--j >= 0)
2368 SET_HARD_REG_BIT (regs_live, regno + j);
2369 else
2370 while (--j >= 0)
2371 CLEAR_HARD_REG_BIT (regs_live, regno + j);
2374 /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2375 is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2376 to insn number DEATH (exclusive). */
2378 static void
2379 post_mark_life (int regno, enum machine_mode mode, int life, int birth,
2380 int death)
2382 int j = hard_regno_nregs[regno][mode];
2383 HARD_REG_SET this_reg;
2385 CLEAR_HARD_REG_SET (this_reg);
2386 while (--j >= 0)
2387 SET_HARD_REG_BIT (this_reg, regno + j);
2389 if (life)
2390 while (birth < death)
2392 IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2393 birth++;
2395 else
2396 while (birth < death)
2398 AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2399 birth++;
2403 /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2404 is the register being clobbered, and R1 is a register being used in
2405 the equivalent expression.
2407 If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2408 in which it is used, return 1.
2410 Otherwise, return 0. */
2412 static int
2413 no_conflict_p (rtx insn, rtx r0 ATTRIBUTE_UNUSED, rtx r1)
2415 int ok = 0;
2416 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2417 rtx p, last;
2419 /* If R1 is a hard register, return 0 since we handle this case
2420 when we scan the insns that actually use it. */
2422 if (note == 0
2423 || (REG_P (r1) && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2424 || (GET_CODE (r1) == SUBREG && REG_P (SUBREG_REG (r1))
2425 && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2426 return 0;
2428 last = XEXP (note, 0);
2430 for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2431 if (INSN_P (p))
2433 if (find_reg_note (p, REG_DEAD, r1))
2434 ok = 1;
2436 /* There must be a REG_NO_CONFLICT note on every insn, otherwise
2437 some earlier optimization pass has inserted instructions into
2438 the sequence, and it is not safe to perform this optimization.
2439 Note that emit_no_conflict_block always ensures that this is
2440 true when these sequences are created. */
2441 if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2442 return 0;
2445 return ok;
2448 /* Return the number of alternatives for which the constraint string P
2449 indicates that the operand must be equal to operand 0 and that no register
2450 is acceptable. */
2452 static int
2453 requires_inout (const char *p)
2455 char c;
2456 int found_zero = 0;
2457 int reg_allowed = 0;
2458 int num_matching_alts = 0;
2459 int len;
2461 for ( ; (c = *p); p += len)
2463 len = CONSTRAINT_LEN (c, p);
2464 switch (c)
2466 case '=': case '+': case '?':
2467 case '#': case '&': case '!':
2468 case '*': case '%':
2469 case 'm': case '<': case '>': case 'V': case 'o':
2470 case 'E': case 'F': case 'G': case 'H':
2471 case 's': case 'i': case 'n':
2472 case 'I': case 'J': case 'K': case 'L':
2473 case 'M': case 'N': case 'O': case 'P':
2474 case 'X':
2475 /* These don't say anything we care about. */
2476 break;
2478 case ',':
2479 if (found_zero && ! reg_allowed)
2480 num_matching_alts++;
2482 found_zero = reg_allowed = 0;
2483 break;
2485 case '0':
2486 found_zero = 1;
2487 break;
2489 case '1': case '2': case '3': case '4': case '5':
2490 case '6': case '7': case '8': case '9':
2491 /* Skip the balance of the matching constraint. */
2493 p++;
2494 while (ISDIGIT (*p));
2495 len = 0;
2496 break;
2498 default:
2499 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
2500 && !EXTRA_ADDRESS_CONSTRAINT (c, p))
2501 break;
2502 /* Fall through. */
2503 case 'p':
2504 case 'g': case 'r':
2505 reg_allowed = 1;
2506 break;
2510 if (found_zero && ! reg_allowed)
2511 num_matching_alts++;
2513 return num_matching_alts;
2516 void
2517 dump_local_alloc (FILE *file)
2519 int i;
2520 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2521 if (reg_renumber[i] != -1)
2522 fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2525 /* Run old register allocator. Return TRUE if we must exit
2526 rest_of_compilation upon return. */
2527 static unsigned int
2528 rest_of_handle_local_alloc (void)
2530 int rebuild_notes;
2532 /* Determine if the current function is a leaf before running reload
2533 since this can impact optimizations done by the prologue and
2534 epilogue thus changing register elimination offsets. */
2535 current_function_is_leaf = leaf_function_p ();
2537 /* Allocate the reg_renumber array. */
2538 allocate_reg_info (max_regno, FALSE, TRUE);
2540 /* And the reg_equiv_memory_loc array. */
2541 VEC_safe_grow (rtx, gc, reg_equiv_memory_loc_vec, max_regno);
2542 memset (VEC_address (rtx, reg_equiv_memory_loc_vec), 0,
2543 sizeof (rtx) * max_regno);
2544 reg_equiv_memory_loc = VEC_address (rtx, reg_equiv_memory_loc_vec);
2546 allocate_initial_values (reg_equiv_memory_loc);
2548 regclass (get_insns (), max_reg_num ());
2549 rebuild_notes = local_alloc ();
2551 /* Local allocation may have turned an indirect jump into a direct
2552 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
2553 instructions. */
2554 if (rebuild_notes)
2556 timevar_push (TV_JUMP);
2558 rebuild_jump_labels (get_insns ());
2559 purge_all_dead_edges ();
2560 delete_unreachable_blocks ();
2562 timevar_pop (TV_JUMP);
2565 if (dump_file && (dump_flags & TDF_DETAILS))
2567 timevar_push (TV_DUMP);
2568 dump_flow_info (dump_file, dump_flags);
2569 dump_local_alloc (dump_file);
2570 timevar_pop (TV_DUMP);
2572 return 0;
2575 struct tree_opt_pass pass_local_alloc =
2577 "lreg", /* name */
2578 NULL, /* gate */
2579 rest_of_handle_local_alloc, /* execute */
2580 NULL, /* sub */
2581 NULL, /* next */
2582 0, /* static_pass_number */
2583 TV_LOCAL_ALLOC, /* tv_id */
2584 0, /* properties_required */
2585 0, /* properties_provided */
2586 0, /* properties_destroyed */
2587 0, /* todo_flags_start */
2588 TODO_dump_func |
2589 TODO_ggc_collect, /* todo_flags_finish */
2590 'l' /* letter */