EnumSet*.class: Regenerate
[official-gcc.git] / gcc / local-alloc.c
blobdc56ca479174cdb5c238234a4a2eadd7a73b92f1
1 /* Allocate registers within a basic block, for GNU compiler.
2 Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* Allocation of hard register numbers to pseudo registers is done in
23 two passes. In this pass we consider only regs that are born and
24 die once within one basic block. We do this one basic block at a
25 time. Then the next pass allocates the registers that remain.
26 Two passes are used because this pass uses methods that work only
27 on linear code, but that do a better job than the general methods
28 used in global_alloc, and more quickly too.
30 The assignments made are recorded in the vector reg_renumber
31 whose space is allocated here. The rtl code itself is not altered.
33 We assign each instruction in the basic block a number
34 which is its order from the beginning of the block.
35 Then we can represent the lifetime of a pseudo register with
36 a pair of numbers, and check for conflicts easily.
37 We can record the availability of hard registers with a
38 HARD_REG_SET for each instruction. The HARD_REG_SET
39 contains 0 or 1 for each hard reg.
41 To avoid register shuffling, we tie registers together when one
42 dies by being copied into another, or dies in an instruction that
43 does arithmetic to produce another. The tied registers are
44 allocated as one. Registers with different reg class preferences
45 can never be tied unless the class preferred by one is a subclass
46 of the one preferred by the other.
48 Tying is represented with "quantity numbers".
49 A non-tied register is given a new quantity number.
50 Tied registers have the same quantity number.
52 We have provision to exempt registers, even when they are contained
53 within the block, that can be tied to others that are not contained in it.
54 This is so that global_alloc could process them both and tie them then.
55 But this is currently disabled since tying in global_alloc is not
56 yet implemented. */
58 /* Pseudos allocated here can be reallocated by global.c if the hard register
59 is used as a spill register. Currently we don't allocate such pseudos
60 here if their preferred class is likely to be used by spills. */
62 #include "config.h"
63 #include "system.h"
64 #include "coretypes.h"
65 #include "tm.h"
66 #include "hard-reg-set.h"
67 #include "rtl.h"
68 #include "tm_p.h"
69 #include "flags.h"
70 #include "regs.h"
71 #include "function.h"
72 #include "insn-config.h"
73 #include "insn-attr.h"
74 #include "recog.h"
75 #include "output.h"
76 #include "toplev.h"
77 #include "except.h"
78 #include "integrate.h"
79 #include "reload.h"
80 #include "ggc.h"
81 #include "timevar.h"
82 #include "tree-pass.h"
83 #include "df.h"
84 #include "dbgcnt.h"
87 /* Next quantity number available for allocation. */
89 static int next_qty;
91 /* Information we maintain about each quantity. */
92 struct qty
94 /* The number of refs to quantity Q. */
96 int n_refs;
98 /* The frequency of uses of quantity Q. */
100 int freq;
102 /* Insn number (counting from head of basic block)
103 where quantity Q was born. -1 if birth has not been recorded. */
105 int birth;
107 /* Insn number (counting from head of basic block)
108 where given quantity died. Due to the way tying is done,
109 and the fact that we consider in this pass only regs that die but once,
110 a quantity can die only once. Each quantity's life span
111 is a set of consecutive insns. -1 if death has not been recorded. */
113 int death;
115 /* Number of words needed to hold the data in given quantity.
116 This depends on its machine mode. It is used for these purposes:
117 1. It is used in computing the relative importance of qtys,
118 which determines the order in which we look for regs for them.
119 2. It is used in rules that prevent tying several registers of
120 different sizes in a way that is geometrically impossible
121 (see combine_regs). */
123 int size;
125 /* Number of times a reg tied to given qty lives across a CALL_INSN. */
127 int n_calls_crossed;
129 /* Number of times a reg tied to given qty lives across a CALL_INSN
130 that might throw. */
132 int n_throwing_calls_crossed;
134 /* The register number of one pseudo register whose reg_qty value is Q.
135 This register should be the head of the chain
136 maintained in reg_next_in_qty. */
138 int first_reg;
140 /* Reg class contained in (smaller than) the preferred classes of all
141 the pseudo regs that are tied in given quantity.
142 This is the preferred class for allocating that quantity. */
144 enum reg_class min_class;
146 /* Register class within which we allocate given qty if we can't get
147 its preferred class. */
149 enum reg_class alternate_class;
151 /* This holds the mode of the registers that are tied to given qty,
152 or VOIDmode if registers with differing modes are tied together. */
154 enum machine_mode mode;
156 /* the hard reg number chosen for given quantity,
157 or -1 if none was found. */
159 short phys_reg;
162 static struct qty *qty;
164 /* These fields are kept separately to speedup their clearing. */
166 /* We maintain two hard register sets that indicate suggested hard registers
167 for each quantity. The first, phys_copy_sugg, contains hard registers
168 that are tied to the quantity by a simple copy. The second contains all
169 hard registers that are tied to the quantity via an arithmetic operation.
171 The former register set is given priority for allocation. This tends to
172 eliminate copy insns. */
174 /* Element Q is a set of hard registers that are suggested for quantity Q by
175 copy insns. */
177 static HARD_REG_SET *qty_phys_copy_sugg;
179 /* Element Q is a set of hard registers that are suggested for quantity Q by
180 arithmetic insns. */
182 static HARD_REG_SET *qty_phys_sugg;
184 /* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
186 static short *qty_phys_num_copy_sugg;
188 /* Element Q is the number of suggested registers in qty_phys_sugg. */
190 static short *qty_phys_num_sugg;
192 /* If (REG N) has been assigned a quantity number, is a register number
193 of another register assigned the same quantity number, or -1 for the
194 end of the chain. qty->first_reg point to the head of this chain. */
196 static int *reg_next_in_qty;
198 /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
199 if it is >= 0,
200 of -1 if this register cannot be allocated by local-alloc,
201 or -2 if not known yet.
203 Note that if we see a use or death of pseudo register N with
204 reg_qty[N] == -2, register N must be local to the current block. If
205 it were used in more than one block, we would have reg_qty[N] == -1.
206 This relies on the fact that if reg_basic_block[N] is >= 0, register N
207 will not appear in any other block. We save a considerable number of
208 tests by exploiting this.
210 If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
211 be referenced. */
213 static int *reg_qty;
215 /* The offset (in words) of register N within its quantity.
216 This can be nonzero if register N is SImode, and has been tied
217 to a subreg of a DImode register. */
219 static char *reg_offset;
221 /* Vector of substitutions of register numbers,
222 used to map pseudo regs into hardware regs.
223 This is set up as a result of register allocation.
224 Element N is the hard reg assigned to pseudo reg N,
225 or is -1 if no hard reg was assigned.
226 If N is a hard reg number, element N is N. */
228 short *reg_renumber;
230 /* Set of hard registers live at the current point in the scan
231 of the instructions in a basic block. */
233 static HARD_REG_SET regs_live;
235 /* Each set of hard registers indicates registers live at a particular
236 point in the basic block. For N even, regs_live_at[N] says which
237 hard registers are needed *after* insn N/2 (i.e., they may not
238 conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
240 If an object is to conflict with the inputs of insn J but not the
241 outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
242 if it is to conflict with the outputs of insn J but not the inputs of
243 insn J + 1, it is said to die at index J*2 + 1. */
245 static HARD_REG_SET *regs_live_at;
247 /* Communicate local vars `insn_number' and `insn'
248 from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
249 static int this_insn_number;
250 static rtx this_insn;
252 struct equivalence
254 /* Set when an attempt should be made to replace a register
255 with the associated src_p entry. */
257 char replace;
259 /* Set when a REG_EQUIV note is found or created. Use to
260 keep track of what memory accesses might be created later,
261 e.g. by reload. */
263 rtx replacement;
265 rtx *src_p;
267 /* Loop depth is used to recognize equivalences which appear
268 to be present within the same loop (or in an inner loop). */
270 int loop_depth;
272 /* The list of each instruction which initializes this register. */
274 rtx init_insns;
276 /* Nonzero if this had a preexisting REG_EQUIV note. */
278 int is_arg_equivalence;
281 /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
282 structure for that register. */
284 static struct equivalence *reg_equiv;
286 /* Nonzero if we recorded an equivalence for a LABEL_REF. */
287 static int recorded_label_ref;
289 static void alloc_qty (int, enum machine_mode, int, int);
290 static void validate_equiv_mem_from_store (rtx, const_rtx, void *);
291 static int validate_equiv_mem (rtx, rtx, rtx);
292 static int equiv_init_varies_p (rtx);
293 static int equiv_init_movable_p (rtx, int);
294 static int contains_replace_regs (rtx);
295 static int memref_referenced_p (rtx, rtx);
296 static int memref_used_between_p (rtx, rtx, rtx);
297 static void update_equiv_regs (void);
298 static void no_equiv (rtx, const_rtx, void *);
299 static void block_alloc (int);
300 static int qty_sugg_compare (int, int);
301 static int qty_sugg_compare_1 (const void *, const void *);
302 static int qty_compare (int, int);
303 static int qty_compare_1 (const void *, const void *);
304 static int combine_regs (rtx, rtx, int, int, rtx, int);
305 static int reg_meets_class_p (int, enum reg_class);
306 static void update_qty_class (int, int);
307 static void reg_is_set (rtx, const_rtx, void *);
308 static void reg_is_born (rtx, int);
309 static void wipe_dead_reg (rtx, int);
310 static int find_free_reg (enum reg_class, enum machine_mode, int, int, int,
311 int, int);
312 static void mark_life (int, enum machine_mode, int);
313 static void post_mark_life (int, enum machine_mode, int, int, int);
314 static int no_conflict_p (rtx, rtx, rtx);
315 static int requires_inout (const char *);
317 /* Allocate a new quantity (new within current basic block)
318 for register number REGNO which is born at index BIRTH
319 within the block. MODE and SIZE are info on reg REGNO. */
321 static void
322 alloc_qty (int regno, enum machine_mode mode, int size, int birth)
324 int qtyno = next_qty++;
326 reg_qty[regno] = qtyno;
327 reg_offset[regno] = 0;
328 reg_next_in_qty[regno] = -1;
330 qty[qtyno].first_reg = regno;
331 qty[qtyno].size = size;
332 qty[qtyno].mode = mode;
333 qty[qtyno].birth = birth;
334 qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
335 qty[qtyno].n_throwing_calls_crossed = REG_N_THROWING_CALLS_CROSSED (regno);
336 qty[qtyno].min_class = reg_preferred_class (regno);
337 qty[qtyno].alternate_class = reg_alternate_class (regno);
338 qty[qtyno].n_refs = REG_N_REFS (regno);
339 qty[qtyno].freq = REG_FREQ (regno);
342 /* Main entry point of this file. */
344 static int
345 local_alloc (void)
347 int i;
348 int max_qty;
349 basic_block b;
351 /* We need to keep track of whether or not we recorded a LABEL_REF so
352 that we know if the jump optimizer needs to be rerun. */
353 recorded_label_ref = 0;
355 /* Leaf functions and non-leaf functions have different needs.
356 If defined, let the machine say what kind of ordering we
357 should use. */
358 #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
359 ORDER_REGS_FOR_LOCAL_ALLOC;
360 #endif
362 /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
363 registers. */
364 update_equiv_regs ();
366 /* This sets the maximum number of quantities we can have. Quantity
367 numbers start at zero and we can have one for each pseudo. */
368 max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
370 /* Allocate vectors of temporary data.
371 See the declarations of these variables, above,
372 for what they mean. */
374 qty = XNEWVEC (struct qty, max_qty);
375 qty_phys_copy_sugg = XNEWVEC (HARD_REG_SET, max_qty);
376 qty_phys_num_copy_sugg = XNEWVEC (short, max_qty);
377 qty_phys_sugg = XNEWVEC (HARD_REG_SET, max_qty);
378 qty_phys_num_sugg = XNEWVEC (short, max_qty);
380 reg_qty = XNEWVEC (int, max_regno);
381 reg_offset = XNEWVEC (char, max_regno);
382 reg_next_in_qty = XNEWVEC (int, max_regno);
384 /* Determine which pseudo-registers can be allocated by local-alloc.
385 In general, these are the registers used only in a single block and
386 which only die once.
388 We need not be concerned with which block actually uses the register
389 since we will never see it outside that block. */
391 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
393 if (REG_BASIC_BLOCK (i) >= NUM_FIXED_BLOCKS && REG_N_DEATHS (i) == 1)
394 reg_qty[i] = -2;
395 else
396 reg_qty[i] = -1;
399 /* Force loop below to initialize entire quantity array. */
400 next_qty = max_qty;
402 /* Allocate each block's local registers, block by block. */
404 FOR_EACH_BB (b)
406 /* NEXT_QTY indicates which elements of the `qty_...'
407 vectors might need to be initialized because they were used
408 for the previous block; it is set to the entire array before
409 block 0. Initialize those, with explicit loop if there are few,
410 else with bzero and bcopy. Do not initialize vectors that are
411 explicit set by `alloc_qty'. */
413 if (next_qty < 6)
415 for (i = 0; i < next_qty; i++)
417 CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
418 qty_phys_num_copy_sugg[i] = 0;
419 CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
420 qty_phys_num_sugg[i] = 0;
423 else
425 #define CLEAR(vector) \
426 memset ((vector), 0, (sizeof (*(vector))) * next_qty);
428 CLEAR (qty_phys_copy_sugg);
429 CLEAR (qty_phys_num_copy_sugg);
430 CLEAR (qty_phys_sugg);
431 CLEAR (qty_phys_num_sugg);
434 next_qty = 0;
436 block_alloc (b->index);
439 free (qty);
440 free (qty_phys_copy_sugg);
441 free (qty_phys_num_copy_sugg);
442 free (qty_phys_sugg);
443 free (qty_phys_num_sugg);
445 free (reg_qty);
446 free (reg_offset);
447 free (reg_next_in_qty);
449 return recorded_label_ref;
452 /* Used for communication between the following two functions: contains
453 a MEM that we wish to ensure remains unchanged. */
454 static rtx equiv_mem;
456 /* Set nonzero if EQUIV_MEM is modified. */
457 static int equiv_mem_modified;
459 /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
460 Called via note_stores. */
462 static void
463 validate_equiv_mem_from_store (rtx dest, const_rtx set ATTRIBUTE_UNUSED,
464 void *data ATTRIBUTE_UNUSED)
466 if ((REG_P (dest)
467 && reg_overlap_mentioned_p (dest, equiv_mem))
468 || (MEM_P (dest)
469 && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
470 equiv_mem_modified = 1;
473 /* Verify that no store between START and the death of REG invalidates
474 MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
475 by storing into an overlapping memory location, or with a non-const
476 CALL_INSN.
478 Return 1 if MEMREF remains valid. */
480 static int
481 validate_equiv_mem (rtx start, rtx reg, rtx memref)
483 rtx insn;
484 rtx note;
486 equiv_mem = memref;
487 equiv_mem_modified = 0;
489 /* If the memory reference has side effects or is volatile, it isn't a
490 valid equivalence. */
491 if (side_effects_p (memref))
492 return 0;
494 for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
496 if (! INSN_P (insn))
497 continue;
499 if (find_reg_note (insn, REG_DEAD, reg))
500 return 1;
502 if (CALL_P (insn) && ! MEM_READONLY_P (memref)
503 && ! CONST_OR_PURE_CALL_P (insn))
504 return 0;
506 note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
508 /* If a register mentioned in MEMREF is modified via an
509 auto-increment, we lose the equivalence. Do the same if one
510 dies; although we could extend the life, it doesn't seem worth
511 the trouble. */
513 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
514 if ((REG_NOTE_KIND (note) == REG_INC
515 || REG_NOTE_KIND (note) == REG_DEAD)
516 && REG_P (XEXP (note, 0))
517 && reg_overlap_mentioned_p (XEXP (note, 0), memref))
518 return 0;
521 return 0;
524 /* Returns zero if X is known to be invariant. */
526 static int
527 equiv_init_varies_p (rtx x)
529 RTX_CODE code = GET_CODE (x);
530 int i;
531 const char *fmt;
533 switch (code)
535 case MEM:
536 return !MEM_READONLY_P (x) || equiv_init_varies_p (XEXP (x, 0));
538 case CONST:
539 case CONST_INT:
540 case CONST_DOUBLE:
541 case CONST_FIXED:
542 case CONST_VECTOR:
543 case SYMBOL_REF:
544 case LABEL_REF:
545 return 0;
547 case REG:
548 return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
550 case ASM_OPERANDS:
551 if (MEM_VOLATILE_P (x))
552 return 1;
554 /* Fall through. */
556 default:
557 break;
560 fmt = GET_RTX_FORMAT (code);
561 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
562 if (fmt[i] == 'e')
564 if (equiv_init_varies_p (XEXP (x, i)))
565 return 1;
567 else if (fmt[i] == 'E')
569 int j;
570 for (j = 0; j < XVECLEN (x, i); j++)
571 if (equiv_init_varies_p (XVECEXP (x, i, j)))
572 return 1;
575 return 0;
578 /* Returns nonzero if X (used to initialize register REGNO) is movable.
579 X is only movable if the registers it uses have equivalent initializations
580 which appear to be within the same loop (or in an inner loop) and movable
581 or if they are not candidates for local_alloc and don't vary. */
583 static int
584 equiv_init_movable_p (rtx x, int regno)
586 int i, j;
587 const char *fmt;
588 enum rtx_code code = GET_CODE (x);
590 switch (code)
592 case SET:
593 return equiv_init_movable_p (SET_SRC (x), regno);
595 case CC0:
596 case CLOBBER:
597 return 0;
599 case PRE_INC:
600 case PRE_DEC:
601 case POST_INC:
602 case POST_DEC:
603 case PRE_MODIFY:
604 case POST_MODIFY:
605 return 0;
607 case REG:
608 return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
609 && reg_equiv[REGNO (x)].replace)
610 || (REG_BASIC_BLOCK (REGNO (x)) < NUM_FIXED_BLOCKS && ! rtx_varies_p (x, 0));
612 case UNSPEC_VOLATILE:
613 return 0;
615 case ASM_OPERANDS:
616 if (MEM_VOLATILE_P (x))
617 return 0;
619 /* Fall through. */
621 default:
622 break;
625 fmt = GET_RTX_FORMAT (code);
626 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
627 switch (fmt[i])
629 case 'e':
630 if (! equiv_init_movable_p (XEXP (x, i), regno))
631 return 0;
632 break;
633 case 'E':
634 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
635 if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
636 return 0;
637 break;
640 return 1;
643 /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
645 static int
646 contains_replace_regs (rtx x)
648 int i, j;
649 const char *fmt;
650 enum rtx_code code = GET_CODE (x);
652 switch (code)
654 case CONST_INT:
655 case CONST:
656 case LABEL_REF:
657 case SYMBOL_REF:
658 case CONST_DOUBLE:
659 case CONST_FIXED:
660 case CONST_VECTOR:
661 case PC:
662 case CC0:
663 case HIGH:
664 return 0;
666 case REG:
667 return reg_equiv[REGNO (x)].replace;
669 default:
670 break;
673 fmt = GET_RTX_FORMAT (code);
674 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
675 switch (fmt[i])
677 case 'e':
678 if (contains_replace_regs (XEXP (x, i)))
679 return 1;
680 break;
681 case 'E':
682 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
683 if (contains_replace_regs (XVECEXP (x, i, j)))
684 return 1;
685 break;
688 return 0;
691 /* TRUE if X references a memory location that would be affected by a store
692 to MEMREF. */
694 static int
695 memref_referenced_p (rtx memref, rtx x)
697 int i, j;
698 const char *fmt;
699 enum rtx_code code = GET_CODE (x);
701 switch (code)
703 case CONST_INT:
704 case CONST:
705 case LABEL_REF:
706 case SYMBOL_REF:
707 case CONST_DOUBLE:
708 case CONST_FIXED:
709 case CONST_VECTOR:
710 case PC:
711 case CC0:
712 case HIGH:
713 case LO_SUM:
714 return 0;
716 case REG:
717 return (reg_equiv[REGNO (x)].replacement
718 && memref_referenced_p (memref,
719 reg_equiv[REGNO (x)].replacement));
721 case MEM:
722 if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
723 return 1;
724 break;
726 case SET:
727 /* If we are setting a MEM, it doesn't count (its address does), but any
728 other SET_DEST that has a MEM in it is referencing the MEM. */
729 if (MEM_P (SET_DEST (x)))
731 if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
732 return 1;
734 else if (memref_referenced_p (memref, SET_DEST (x)))
735 return 1;
737 return memref_referenced_p (memref, SET_SRC (x));
739 default:
740 break;
743 fmt = GET_RTX_FORMAT (code);
744 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
745 switch (fmt[i])
747 case 'e':
748 if (memref_referenced_p (memref, XEXP (x, i)))
749 return 1;
750 break;
751 case 'E':
752 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
753 if (memref_referenced_p (memref, XVECEXP (x, i, j)))
754 return 1;
755 break;
758 return 0;
761 /* TRUE if some insn in the range (START, END] references a memory location
762 that would be affected by a store to MEMREF. */
764 static int
765 memref_used_between_p (rtx memref, rtx start, rtx end)
767 rtx insn;
769 for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
770 insn = NEXT_INSN (insn))
772 if (!INSN_P (insn))
773 continue;
775 if (memref_referenced_p (memref, PATTERN (insn)))
776 return 1;
778 /* Nonconst functions may access memory. */
779 if (CALL_P (insn)
780 && (! CONST_OR_PURE_CALL_P (insn)
781 || pure_call_p (insn)))
782 return 1;
785 return 0;
788 /* Find registers that are equivalent to a single value throughout the
789 compilation (either because they can be referenced in memory or are set once
790 from a single constant). Lower their priority for a register.
792 If such a register is only referenced once, try substituting its value
793 into the using insn. If it succeeds, we can eliminate the register
794 completely.
796 Initialize the REG_EQUIV_INIT array of initializing insns. */
798 static void
799 update_equiv_regs (void)
801 rtx insn;
802 basic_block bb;
803 int loop_depth;
804 bitmap cleared_regs;
806 reg_equiv = XCNEWVEC (struct equivalence, max_regno);
807 reg_equiv_init = ggc_alloc_cleared (max_regno * sizeof (rtx));
808 reg_equiv_init_size = max_regno;
810 init_alias_analysis ();
812 /* Scan the insns and find which registers have equivalences. Do this
813 in a separate scan of the insns because (due to -fcse-follow-jumps)
814 a register can be set below its use. */
815 FOR_EACH_BB (bb)
817 loop_depth = bb->loop_depth;
819 for (insn = BB_HEAD (bb);
820 insn != NEXT_INSN (BB_END (bb));
821 insn = NEXT_INSN (insn))
823 rtx note;
824 rtx set;
825 rtx dest, src;
826 int regno;
828 if (! INSN_P (insn))
829 continue;
831 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
832 if (REG_NOTE_KIND (note) == REG_INC)
833 no_equiv (XEXP (note, 0), note, NULL);
835 set = single_set (insn);
837 /* If this insn contains more (or less) than a single SET,
838 only mark all destinations as having no known equivalence. */
839 if (set == 0)
841 note_stores (PATTERN (insn), no_equiv, NULL);
842 continue;
844 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
846 int i;
848 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
850 rtx part = XVECEXP (PATTERN (insn), 0, i);
851 if (part != set)
852 note_stores (part, no_equiv, NULL);
856 dest = SET_DEST (set);
857 src = SET_SRC (set);
859 /* See if this is setting up the equivalence between an argument
860 register and its stack slot. */
861 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
862 if (note)
864 gcc_assert (REG_P (dest));
865 regno = REGNO (dest);
867 /* Note that we don't want to clear reg_equiv_init even if there
868 are multiple sets of this register. */
869 reg_equiv[regno].is_arg_equivalence = 1;
871 /* Record for reload that this is an equivalencing insn. */
872 if (rtx_equal_p (src, XEXP (note, 0)))
873 reg_equiv_init[regno]
874 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
876 /* Continue normally in case this is a candidate for
877 replacements. */
880 if (!optimize)
881 continue;
883 /* We only handle the case of a pseudo register being set
884 once, or always to the same value. */
885 /* ??? The mn10200 port breaks if we add equivalences for
886 values that need an ADDRESS_REGS register and set them equivalent
887 to a MEM of a pseudo. The actual problem is in the over-conservative
888 handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
889 calculate_needs, but we traditionally work around this problem
890 here by rejecting equivalences when the destination is in a register
891 that's likely spilled. This is fragile, of course, since the
892 preferred class of a pseudo depends on all instructions that set
893 or use it. */
895 if (!REG_P (dest)
896 || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
897 || reg_equiv[regno].init_insns == const0_rtx
898 || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
899 && MEM_P (src) && ! reg_equiv[regno].is_arg_equivalence))
901 /* This might be setting a SUBREG of a pseudo, a pseudo that is
902 also set somewhere else to a constant. */
903 note_stores (set, no_equiv, NULL);
904 continue;
907 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
909 /* cse sometimes generates function invariants, but doesn't put a
910 REG_EQUAL note on the insn. Since this note would be redundant,
911 there's no point creating it earlier than here. */
912 if (! note && ! rtx_varies_p (src, 0))
913 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
915 /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
916 since it represents a function call */
917 if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
918 note = NULL_RTX;
920 if (DF_REG_DEF_COUNT (regno) != 1
921 && (! note
922 || rtx_varies_p (XEXP (note, 0), 0)
923 || (reg_equiv[regno].replacement
924 && ! rtx_equal_p (XEXP (note, 0),
925 reg_equiv[regno].replacement))))
927 no_equiv (dest, set, NULL);
928 continue;
930 /* Record this insn as initializing this register. */
931 reg_equiv[regno].init_insns
932 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
934 /* If this register is known to be equal to a constant, record that
935 it is always equivalent to the constant. */
936 if (DF_REG_DEF_COUNT (regno) == 1
937 && note && ! rtx_varies_p (XEXP (note, 0), 0))
939 rtx note_value = XEXP (note, 0);
940 remove_note (insn, note);
941 set_unique_reg_note (insn, REG_EQUIV, note_value);
944 /* If this insn introduces a "constant" register, decrease the priority
945 of that register. Record this insn if the register is only used once
946 more and the equivalence value is the same as our source.
948 The latter condition is checked for two reasons: First, it is an
949 indication that it may be more efficient to actually emit the insn
950 as written (if no registers are available, reload will substitute
951 the equivalence). Secondly, it avoids problems with any registers
952 dying in this insn whose death notes would be missed.
954 If we don't have a REG_EQUIV note, see if this insn is loading
955 a register used only in one basic block from a MEM. If so, and the
956 MEM remains unchanged for the life of the register, add a REG_EQUIV
957 note. */
959 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
961 if (note == 0 && REG_BASIC_BLOCK (regno) >= NUM_FIXED_BLOCKS
962 && MEM_P (SET_SRC (set))
963 && validate_equiv_mem (insn, dest, SET_SRC (set)))
964 note = set_unique_reg_note (insn, REG_EQUIV, copy_rtx (SET_SRC (set)));
966 if (note)
968 int regno = REGNO (dest);
969 rtx x = XEXP (note, 0);
971 /* If we haven't done so, record for reload that this is an
972 equivalencing insn. */
973 if (!reg_equiv[regno].is_arg_equivalence)
974 reg_equiv_init[regno]
975 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
977 /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
978 We might end up substituting the LABEL_REF for uses of the
979 pseudo here or later. That kind of transformation may turn an
980 indirect jump into a direct jump, in which case we must rerun the
981 jump optimizer to ensure that the JUMP_LABEL fields are valid. */
982 if (GET_CODE (x) == LABEL_REF
983 || (GET_CODE (x) == CONST
984 && GET_CODE (XEXP (x, 0)) == PLUS
985 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)))
986 recorded_label_ref = 1;
988 reg_equiv[regno].replacement = x;
989 reg_equiv[regno].src_p = &SET_SRC (set);
990 reg_equiv[regno].loop_depth = loop_depth;
992 /* Don't mess with things live during setjmp. */
993 if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
995 /* Note that the statement below does not affect the priority
996 in local-alloc! */
997 REG_LIVE_LENGTH (regno) *= 2;
999 /* If the register is referenced exactly twice, meaning it is
1000 set once and used once, indicate that the reference may be
1001 replaced by the equivalence we computed above. Do this
1002 even if the register is only used in one block so that
1003 dependencies can be handled where the last register is
1004 used in a different block (i.e. HIGH / LO_SUM sequences)
1005 and to reduce the number of registers alive across
1006 calls. */
1008 if (REG_N_REFS (regno) == 2
1009 && (rtx_equal_p (x, src)
1010 || ! equiv_init_varies_p (src))
1011 && NONJUMP_INSN_P (insn)
1012 && equiv_init_movable_p (PATTERN (insn), regno))
1013 reg_equiv[regno].replace = 1;
1019 if (!optimize)
1020 goto out;
1022 /* A second pass, to gather additional equivalences with memory. This needs
1023 to be done after we know which registers we are going to replace. */
1025 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1027 rtx set, src, dest;
1028 unsigned regno;
1030 if (! INSN_P (insn))
1031 continue;
1033 set = single_set (insn);
1034 if (! set)
1035 continue;
1037 dest = SET_DEST (set);
1038 src = SET_SRC (set);
1040 /* If this sets a MEM to the contents of a REG that is only used
1041 in a single basic block, see if the register is always equivalent
1042 to that memory location and if moving the store from INSN to the
1043 insn that set REG is safe. If so, put a REG_EQUIV note on the
1044 initializing insn.
1046 Don't add a REG_EQUIV note if the insn already has one. The existing
1047 REG_EQUIV is likely more useful than the one we are adding.
1049 If one of the regs in the address has reg_equiv[REGNO].replace set,
1050 then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
1051 optimization may move the set of this register immediately before
1052 insn, which puts it after reg_equiv[REGNO].init_insns, and hence
1053 the mention in the REG_EQUIV note would be to an uninitialized
1054 pseudo. */
1056 if (MEM_P (dest) && REG_P (src)
1057 && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
1058 && REG_BASIC_BLOCK (regno) >= NUM_FIXED_BLOCKS
1059 && DF_REG_DEF_COUNT (regno) == 1
1060 && reg_equiv[regno].init_insns != 0
1061 && reg_equiv[regno].init_insns != const0_rtx
1062 && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
1063 REG_EQUIV, NULL_RTX)
1064 && ! contains_replace_regs (XEXP (dest, 0)))
1066 rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
1067 if (validate_equiv_mem (init_insn, src, dest)
1068 && ! memref_used_between_p (dest, init_insn, insn)
1069 /* Attaching a REG_EQUIV note will fail if INIT_INSN has
1070 multiple sets. */
1071 && set_unique_reg_note (init_insn, REG_EQUIV, copy_rtx (dest)))
1073 /* This insn makes the equivalence, not the one initializing
1074 the register. */
1075 reg_equiv_init[regno]
1076 = gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
1077 df_notes_rescan (init_insn);
1082 cleared_regs = BITMAP_ALLOC (NULL);
1083 /* Now scan all regs killed in an insn to see if any of them are
1084 registers only used that once. If so, see if we can replace the
1085 reference with the equivalent form. If we can, delete the
1086 initializing reference and this register will go away. If we
1087 can't replace the reference, and the initializing reference is
1088 within the same loop (or in an inner loop), then move the register
1089 initialization just before the use, so that they are in the same
1090 basic block. */
1091 FOR_EACH_BB_REVERSE (bb)
1093 loop_depth = bb->loop_depth;
1094 for (insn = BB_END (bb);
1095 insn != PREV_INSN (BB_HEAD (bb));
1096 insn = PREV_INSN (insn))
1098 rtx link;
1100 if (! INSN_P (insn))
1101 continue;
1103 /* Don't substitute into a non-local goto, this confuses CFG. */
1104 if (JUMP_P (insn)
1105 && find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1106 continue;
1108 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1110 if (REG_NOTE_KIND (link) == REG_DEAD
1111 /* Make sure this insn still refers to the register. */
1112 && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1114 int regno = REGNO (XEXP (link, 0));
1115 rtx equiv_insn;
1117 if (! reg_equiv[regno].replace
1118 || reg_equiv[regno].loop_depth < loop_depth)
1119 continue;
1121 /* reg_equiv[REGNO].replace gets set only when
1122 REG_N_REFS[REGNO] is 2, i.e. the register is set
1123 once and used once. (If it were only set, but not used,
1124 flow would have deleted the setting insns.) Hence
1125 there can only be one insn in reg_equiv[REGNO].init_insns. */
1126 gcc_assert (reg_equiv[regno].init_insns
1127 && !XEXP (reg_equiv[regno].init_insns, 1));
1128 equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1130 /* We may not move instructions that can throw, since
1131 that changes basic block boundaries and we are not
1132 prepared to adjust the CFG to match. */
1133 if (can_throw_internal (equiv_insn))
1134 continue;
1136 if (asm_noperands (PATTERN (equiv_insn)) < 0
1137 && validate_replace_rtx (regno_reg_rtx[regno],
1138 *(reg_equiv[regno].src_p), insn))
1140 rtx equiv_link;
1141 rtx last_link;
1142 rtx note;
1144 /* Find the last note. */
1145 for (last_link = link; XEXP (last_link, 1);
1146 last_link = XEXP (last_link, 1))
1149 /* Append the REG_DEAD notes from equiv_insn. */
1150 equiv_link = REG_NOTES (equiv_insn);
1151 while (equiv_link)
1153 note = equiv_link;
1154 equiv_link = XEXP (equiv_link, 1);
1155 if (REG_NOTE_KIND (note) == REG_DEAD)
1157 remove_note (equiv_insn, note);
1158 XEXP (last_link, 1) = note;
1159 XEXP (note, 1) = NULL_RTX;
1160 last_link = note;
1164 remove_death (regno, insn);
1165 SET_REG_N_REFS (regno, 0);
1166 REG_FREQ (regno) = 0;
1167 delete_insn (equiv_insn);
1169 reg_equiv[regno].init_insns
1170 = XEXP (reg_equiv[regno].init_insns, 1);
1172 reg_equiv_init[regno] = NULL_RTX;
1173 bitmap_set_bit (cleared_regs, regno);
1175 /* Move the initialization of the register to just before
1176 INSN. Update the flow information. */
1177 else if (PREV_INSN (insn) != equiv_insn)
1179 rtx new_insn;
1181 new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1182 REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1183 REG_NOTES (equiv_insn) = 0;
1185 /* Make sure this insn is recognized before
1186 reload begins, otherwise
1187 eliminate_regs_in_insn will die. */
1188 INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1190 delete_insn (equiv_insn);
1192 XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1194 REG_BASIC_BLOCK (regno) = bb->index;
1195 REG_N_CALLS_CROSSED (regno) = 0;
1196 REG_N_THROWING_CALLS_CROSSED (regno) = 0;
1197 REG_LIVE_LENGTH (regno) = 2;
1199 if (insn == BB_HEAD (bb))
1200 BB_HEAD (bb) = PREV_INSN (insn);
1202 reg_equiv_init[regno]
1203 = gen_rtx_INSN_LIST (VOIDmode, new_insn, NULL_RTX);
1204 bitmap_set_bit (cleared_regs, regno);
1211 if (!bitmap_empty_p (cleared_regs))
1212 FOR_EACH_BB (bb)
1214 bitmap_and_compl_into (DF_RA_LIVE_IN (bb), cleared_regs);
1215 if (DF_RA_LIVE_TOP (bb))
1216 bitmap_and_compl_into (DF_RA_LIVE_TOP (bb), cleared_regs);
1217 bitmap_and_compl_into (DF_RA_LIVE_OUT (bb), cleared_regs);
1218 bitmap_and_compl_into (DF_LR_IN (bb), cleared_regs);
1219 if (DF_LR_TOP (bb))
1220 bitmap_and_compl_into (DF_LR_TOP (bb), cleared_regs);
1221 bitmap_and_compl_into (DF_LR_OUT (bb), cleared_regs);
1224 BITMAP_FREE (cleared_regs);
1226 out:
1227 /* Clean up. */
1229 end_alias_analysis ();
1230 free (reg_equiv);
1233 /* Mark REG as having no known equivalence.
1234 Some instructions might have been processed before and furnished
1235 with REG_EQUIV notes for this register; these notes will have to be
1236 removed.
1237 STORE is the piece of RTL that does the non-constant / conflicting
1238 assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
1239 but needs to be there because this function is called from note_stores. */
1240 static void
1241 no_equiv (rtx reg, const_rtx store ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1243 int regno;
1244 rtx list;
1246 if (!REG_P (reg))
1247 return;
1248 regno = REGNO (reg);
1249 list = reg_equiv[regno].init_insns;
1250 if (list == const0_rtx)
1251 return;
1252 reg_equiv[regno].init_insns = const0_rtx;
1253 reg_equiv[regno].replacement = NULL_RTX;
1254 /* This doesn't matter for equivalences made for argument registers, we
1255 should keep their initialization insns. */
1256 if (reg_equiv[regno].is_arg_equivalence)
1257 return;
1258 reg_equiv_init[regno] = NULL_RTX;
1259 for (; list; list = XEXP (list, 1))
1261 rtx insn = XEXP (list, 0);
1262 remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1266 /* Allocate hard regs to the pseudo regs used only within block number B.
1267 Only the pseudos that die but once can be handled. */
1269 static void
1270 block_alloc (int b)
1272 int i, q;
1273 rtx insn;
1274 rtx note, hard_reg;
1275 int insn_number = 0;
1276 int insn_count = 0;
1277 int max_uid = get_max_uid ();
1278 int *qty_order;
1279 int no_conflict_combined_regno = -1;
1281 /* Count the instructions in the basic block. */
1283 insn = BB_END (BASIC_BLOCK (b));
1284 while (1)
1286 if (!NOTE_P (insn))
1288 ++insn_count;
1289 gcc_assert (insn_count <= max_uid);
1291 if (insn == BB_HEAD (BASIC_BLOCK (b)))
1292 break;
1293 insn = PREV_INSN (insn);
1296 /* +2 to leave room for a post_mark_life at the last insn and for
1297 the birth of a CLOBBER in the first insn. */
1298 regs_live_at = XCNEWVEC (HARD_REG_SET, 2 * insn_count + 2);
1300 /* Initialize table of hardware registers currently live. */
1302 REG_SET_TO_HARD_REG_SET (regs_live, DF_LR_TOP (BASIC_BLOCK (b)));
1304 /* This loop scans the instructions of the basic block
1305 and assigns quantities to registers.
1306 It computes which registers to tie. */
1308 insn = BB_HEAD (BASIC_BLOCK (b));
1309 while (1)
1311 if (!NOTE_P (insn))
1312 insn_number++;
1314 if (INSN_P (insn))
1316 rtx link, set;
1317 int win = 0;
1318 rtx r0, r1 = NULL_RTX;
1319 int combined_regno = -1;
1320 int i;
1322 this_insn_number = insn_number;
1323 this_insn = insn;
1325 extract_insn (insn);
1326 which_alternative = -1;
1328 /* Is this insn suitable for tying two registers?
1329 If so, try doing that.
1330 Suitable insns are those with at least two operands and where
1331 operand 0 is an output that is a register that is not
1332 earlyclobber.
1334 We can tie operand 0 with some operand that dies in this insn.
1335 First look for operands that are required to be in the same
1336 register as operand 0. If we find such, only try tying that
1337 operand or one that can be put into that operand if the
1338 operation is commutative. If we don't find an operand
1339 that is required to be in the same register as operand 0,
1340 we can tie with any operand.
1342 Subregs in place of regs are also ok.
1344 If tying is done, WIN is set nonzero. */
1346 if (optimize
1347 && recog_data.n_operands > 1
1348 && recog_data.constraints[0][0] == '='
1349 && recog_data.constraints[0][1] != '&')
1351 /* If non-negative, is an operand that must match operand 0. */
1352 int must_match_0 = -1;
1353 /* Counts number of alternatives that require a match with
1354 operand 0. */
1355 int n_matching_alts = 0;
1357 for (i = 1; i < recog_data.n_operands; i++)
1359 const char *p = recog_data.constraints[i];
1360 int this_match = requires_inout (p);
1362 n_matching_alts += this_match;
1363 if (this_match == recog_data.n_alternatives)
1364 must_match_0 = i;
1367 r0 = recog_data.operand[0];
1368 for (i = 1; i < recog_data.n_operands; i++)
1370 /* Skip this operand if we found an operand that
1371 must match operand 0 and this operand isn't it
1372 and can't be made to be it by commutativity. */
1374 if (must_match_0 >= 0 && i != must_match_0
1375 && ! (i == must_match_0 + 1
1376 && recog_data.constraints[i-1][0] == '%')
1377 && ! (i == must_match_0 - 1
1378 && recog_data.constraints[i][0] == '%'))
1379 continue;
1381 /* Likewise if each alternative has some operand that
1382 must match operand zero. In that case, skip any
1383 operand that doesn't list operand 0 since we know that
1384 the operand always conflicts with operand 0. We
1385 ignore commutativity in this case to keep things simple. */
1386 if (n_matching_alts == recog_data.n_alternatives
1387 && 0 == requires_inout (recog_data.constraints[i]))
1388 continue;
1390 r1 = recog_data.operand[i];
1392 /* If the operand is an address, find a register in it.
1393 There may be more than one register, but we only try one
1394 of them. */
1395 if (recog_data.constraints[i][0] == 'p'
1396 || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0],
1397 recog_data.constraints[i]))
1398 while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1399 r1 = XEXP (r1, 0);
1401 /* Avoid making a call-saved register unnecessarily
1402 clobbered. */
1403 hard_reg = get_hard_reg_initial_reg (cfun, r1);
1404 if (hard_reg != NULL_RTX)
1406 if (REG_P (hard_reg)
1407 && REGNO (hard_reg) < FIRST_PSEUDO_REGISTER
1408 && !call_used_regs[REGNO (hard_reg)])
1409 continue;
1412 if (REG_P (r0) || GET_CODE (r0) == SUBREG)
1414 /* We have two priorities for hard register preferences.
1415 If we have a move insn or an insn whose first input
1416 can only be in the same register as the output, give
1417 priority to an equivalence found from that insn. */
1418 int may_save_copy
1419 = (r1 == recog_data.operand[i] && must_match_0 >= 0);
1421 if (REG_P (r1) || GET_CODE (r1) == SUBREG)
1422 win = combine_regs (r1, r0, may_save_copy,
1423 insn_number, insn, 0);
1425 if (win)
1426 break;
1430 /* Recognize an insn sequence with an ultimate result
1431 which can safely overlap one of the inputs.
1432 The sequence begins with a CLOBBER of its result,
1433 and ends with an insn that copies the result to itself
1434 and has a REG_EQUAL note for an equivalent formula.
1435 That note indicates what the inputs are.
1436 The result and the input can overlap if each insn in
1437 the sequence either doesn't mention the input
1438 or has a REG_NO_CONFLICT note to inhibit the conflict.
1440 We do the combining test at the CLOBBER so that the
1441 destination register won't have had a quantity number
1442 assigned, since that would prevent combining. */
1444 if (optimize
1445 && GET_CODE (PATTERN (insn)) == CLOBBER
1446 && (r0 = XEXP (PATTERN (insn), 0),
1447 REG_P (r0))
1448 && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1449 && XEXP (link, 0) != 0
1450 && NONJUMP_INSN_P (XEXP (link, 0))
1451 && (set = single_set (XEXP (link, 0))) != 0
1452 && SET_DEST (set) == r0 && SET_SRC (set) == r0
1453 && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1454 NULL_RTX)) != 0)
1456 if (r1 = XEXP (note, 0), REG_P (r1)
1457 /* Check that we have such a sequence. */
1458 && no_conflict_p (insn, r0, r1))
1459 win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1460 else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1461 && (r1 = XEXP (XEXP (note, 0), 0),
1462 REG_P (r1) || GET_CODE (r1) == SUBREG)
1463 && no_conflict_p (insn, r0, r1))
1464 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1466 /* Here we care if the operation to be computed is
1467 commutative. */
1468 else if (COMMUTATIVE_P (XEXP (note, 0))
1469 && (r1 = XEXP (XEXP (note, 0), 1),
1470 (REG_P (r1) || GET_CODE (r1) == SUBREG))
1471 && no_conflict_p (insn, r0, r1))
1472 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1474 /* If we did combine something, show the register number
1475 in question so that we know to ignore its death. */
1476 if (win)
1477 no_conflict_combined_regno = REGNO (r1);
1480 /* If registers were just tied, set COMBINED_REGNO
1481 to the number of the register used in this insn
1482 that was tied to the register set in this insn.
1483 This register's qty should not be "killed". */
1485 if (win)
1487 while (GET_CODE (r1) == SUBREG)
1488 r1 = SUBREG_REG (r1);
1489 combined_regno = REGNO (r1);
1492 /* Mark the death of everything that dies in this instruction,
1493 except for anything that was just combined. */
1495 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1496 if (REG_NOTE_KIND (link) == REG_DEAD
1497 && REG_P (XEXP (link, 0))
1498 && combined_regno != (int) REGNO (XEXP (link, 0))
1499 && (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1500 || ! find_reg_note (insn, REG_NO_CONFLICT,
1501 XEXP (link, 0))))
1502 wipe_dead_reg (XEXP (link, 0), 0);
1504 /* Allocate qty numbers for all registers local to this block
1505 that are born (set) in this instruction.
1506 A pseudo that already has a qty is not changed. */
1508 note_stores (PATTERN (insn), reg_is_set, NULL);
1510 /* If anything is set in this insn and then unused, mark it as dying
1511 after this insn, so it will conflict with our outputs. This
1512 can't match with something that combined, and it doesn't matter
1513 if it did. Do this after the calls to reg_is_set since these
1514 die after, not during, the current insn. */
1516 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1517 if (REG_NOTE_KIND (link) == REG_UNUSED
1518 && REG_P (XEXP (link, 0)))
1519 wipe_dead_reg (XEXP (link, 0), 1);
1521 /* If this is an insn that has a REG_RETVAL note pointing at a
1522 CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1523 block, so clear any register number that combined within it. */
1524 if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1525 && NONJUMP_INSN_P (XEXP (note, 0))
1526 && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1527 no_conflict_combined_regno = -1;
1530 /* Set the registers live after INSN_NUMBER. Note that we never
1531 record the registers live before the block's first insn, since no
1532 pseudos we care about are live before that insn. */
1534 IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1535 IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1537 if (insn == BB_END (BASIC_BLOCK (b)))
1538 break;
1540 insn = NEXT_INSN (insn);
1543 /* Now every register that is local to this basic block
1544 should have been given a quantity, or else -1 meaning ignore it.
1545 Every quantity should have a known birth and death.
1547 Order the qtys so we assign them registers in order of the
1548 number of suggested registers they need so we allocate those with
1549 the most restrictive needs first. */
1551 qty_order = XNEWVEC (int, next_qty);
1552 for (i = 0; i < next_qty; i++)
1553 qty_order[i] = i;
1555 #define EXCHANGE(I1, I2) \
1556 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1558 switch (next_qty)
1560 case 3:
1561 /* Make qty_order[2] be the one to allocate last. */
1562 if (qty_sugg_compare (0, 1) > 0)
1563 EXCHANGE (0, 1);
1564 if (qty_sugg_compare (1, 2) > 0)
1565 EXCHANGE (2, 1);
1567 /* ... Fall through ... */
1568 case 2:
1569 /* Put the best one to allocate in qty_order[0]. */
1570 if (qty_sugg_compare (0, 1) > 0)
1571 EXCHANGE (0, 1);
1573 /* ... Fall through ... */
1575 case 1:
1576 case 0:
1577 /* Nothing to do here. */
1578 break;
1580 default:
1581 qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1584 /* Try to put each quantity in a suggested physical register, if it has one.
1585 This may cause registers to be allocated that otherwise wouldn't be, but
1586 this seems acceptable in local allocation (unlike global allocation). */
1587 for (i = 0; i < next_qty; i++)
1589 q = qty_order[i];
1590 if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1591 qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1592 0, 1, qty[q].birth, qty[q].death);
1593 else
1594 qty[q].phys_reg = -1;
1597 /* Order the qtys so we assign them registers in order of
1598 decreasing length of life. Normally call qsort, but if we
1599 have only a very small number of quantities, sort them ourselves. */
1601 for (i = 0; i < next_qty; i++)
1602 qty_order[i] = i;
1604 #define EXCHANGE(I1, I2) \
1605 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1607 switch (next_qty)
1609 case 3:
1610 /* Make qty_order[2] be the one to allocate last. */
1611 if (qty_compare (0, 1) > 0)
1612 EXCHANGE (0, 1);
1613 if (qty_compare (1, 2) > 0)
1614 EXCHANGE (2, 1);
1616 /* ... Fall through ... */
1617 case 2:
1618 /* Put the best one to allocate in qty_order[0]. */
1619 if (qty_compare (0, 1) > 0)
1620 EXCHANGE (0, 1);
1622 /* ... Fall through ... */
1624 case 1:
1625 case 0:
1626 /* Nothing to do here. */
1627 break;
1629 default:
1630 qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1633 /* Now for each qty that is not a hardware register,
1634 look for a hardware register to put it in.
1635 First try the register class that is cheapest for this qty,
1636 if there is more than one class. */
1638 for (i = 0; i < next_qty; i++)
1640 q = qty_order[i];
1641 if (qty[q].phys_reg < 0)
1643 #ifdef INSN_SCHEDULING
1644 /* These values represent the adjusted lifetime of a qty so
1645 that it conflicts with qtys which appear near the start/end
1646 of this qty's lifetime.
1648 The purpose behind extending the lifetime of this qty is to
1649 discourage the register allocator from creating false
1650 dependencies.
1652 The adjustment value is chosen to indicate that this qty
1653 conflicts with all the qtys in the instructions immediately
1654 before and after the lifetime of this qty.
1656 Experiments have shown that higher values tend to hurt
1657 overall code performance.
1659 If allocation using the extended lifetime fails we will try
1660 again with the qty's unadjusted lifetime. */
1661 int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1662 int fake_death = MIN (insn_number * 2 + 1,
1663 qty[q].death + 2 - qty[q].death % 2);
1664 #endif
1666 if (N_REG_CLASSES > 1)
1668 #ifdef INSN_SCHEDULING
1669 /* We try to avoid using hard registers allocated to qtys which
1670 are born immediately after this qty or die immediately before
1671 this qty.
1673 This optimization is only appropriate when we will run
1674 a scheduling pass after reload and we are not optimizing
1675 for code size. */
1676 if (flag_schedule_insns_after_reload && dbg_cnt (local_alloc_for_sched)
1677 && !optimize_size
1678 && !SMALL_REGISTER_CLASSES)
1680 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1681 qty[q].mode, q, 0, 0,
1682 fake_birth, fake_death);
1683 if (qty[q].phys_reg >= 0)
1684 continue;
1686 #endif
1687 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1688 qty[q].mode, q, 0, 0,
1689 qty[q].birth, qty[q].death);
1690 if (qty[q].phys_reg >= 0)
1691 continue;
1694 #ifdef INSN_SCHEDULING
1695 /* Similarly, avoid false dependencies. */
1696 if (flag_schedule_insns_after_reload && dbg_cnt (local_alloc_for_sched)
1697 && !optimize_size
1698 && !SMALL_REGISTER_CLASSES
1699 && qty[q].alternate_class != NO_REGS)
1700 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1701 qty[q].mode, q, 0, 0,
1702 fake_birth, fake_death);
1703 #endif
1704 if (qty[q].alternate_class != NO_REGS)
1705 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1706 qty[q].mode, q, 0, 0,
1707 qty[q].birth, qty[q].death);
1711 /* Now propagate the register assignments
1712 to the pseudo regs belonging to the qtys. */
1714 for (q = 0; q < next_qty; q++)
1715 if (qty[q].phys_reg >= 0)
1717 for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1718 reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1721 /* Clean up. */
1722 free (regs_live_at);
1723 free (qty_order);
1726 /* Compare two quantities' priority for getting real registers.
1727 We give shorter-lived quantities higher priority.
1728 Quantities with more references are also preferred, as are quantities that
1729 require multiple registers. This is the identical prioritization as
1730 done by global-alloc.
1732 We used to give preference to registers with *longer* lives, but using
1733 the same algorithm in both local- and global-alloc can speed up execution
1734 of some programs by as much as a factor of three! */
1736 /* Note that the quotient will never be bigger than
1737 the value of floor_log2 times the maximum number of
1738 times a register can occur in one insn (surely less than 100)
1739 weighted by frequency (max REG_FREQ_MAX).
1740 Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1741 QTY_CMP_PRI is also used by qty_sugg_compare. */
1743 #define QTY_CMP_PRI(q) \
1744 ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1745 / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1747 static int
1748 qty_compare (int q1, int q2)
1750 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1753 static int
1754 qty_compare_1 (const void *q1p, const void *q2p)
1756 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1757 int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1759 if (tem != 0)
1760 return tem;
1762 /* If qtys are equally good, sort by qty number,
1763 so that the results of qsort leave nothing to chance. */
1764 return q1 - q2;
1767 /* Compare two quantities' priority for getting real registers. This version
1768 is called for quantities that have suggested hard registers. First priority
1769 goes to quantities that have copy preferences, then to those that have
1770 normal preferences. Within those groups, quantities with the lower
1771 number of preferences have the highest priority. Of those, we use the same
1772 algorithm as above. */
1774 #define QTY_CMP_SUGG(q) \
1775 (qty_phys_num_copy_sugg[q] \
1776 ? qty_phys_num_copy_sugg[q] \
1777 : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1779 static int
1780 qty_sugg_compare (int q1, int q2)
1782 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1784 if (tem != 0)
1785 return tem;
1787 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1790 static int
1791 qty_sugg_compare_1 (const void *q1p, const void *q2p)
1793 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1794 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1796 if (tem != 0)
1797 return tem;
1799 tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1800 if (tem != 0)
1801 return tem;
1803 /* If qtys are equally good, sort by qty number,
1804 so that the results of qsort leave nothing to chance. */
1805 return q1 - q2;
1808 #undef QTY_CMP_SUGG
1809 #undef QTY_CMP_PRI
1811 /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1812 Returns 1 if have done so, or 0 if cannot.
1814 Combining registers means marking them as having the same quantity
1815 and adjusting the offsets within the quantity if either of
1816 them is a SUBREG.
1818 We don't actually combine a hard reg with a pseudo; instead
1819 we just record the hard reg as the suggestion for the pseudo's quantity.
1820 If we really combined them, we could lose if the pseudo lives
1821 across an insn that clobbers the hard reg (eg, movmem).
1823 ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1824 there is no REG_DEAD note on INSN. This occurs during the processing
1825 of REG_NO_CONFLICT blocks.
1827 MAY_SAVE_COPY is nonzero if this insn is simply copying USEDREG to
1828 SETREG or if the input and output must share a register.
1829 In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1831 There are elaborate checks for the validity of combining. */
1833 static int
1834 combine_regs (rtx usedreg, rtx setreg, int may_save_copy, int insn_number,
1835 rtx insn, int already_dead)
1837 int ureg, sreg;
1838 int offset = 0;
1839 int usize, ssize;
1840 int sqty;
1842 /* Determine the numbers and sizes of registers being used. If a subreg
1843 is present that does not change the entire register, don't consider
1844 this a copy insn. */
1846 while (GET_CODE (usedreg) == SUBREG)
1848 rtx subreg = SUBREG_REG (usedreg);
1850 if (REG_P (subreg))
1852 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1853 may_save_copy = 0;
1855 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1856 offset += subreg_regno_offset (REGNO (subreg),
1857 GET_MODE (subreg),
1858 SUBREG_BYTE (usedreg),
1859 GET_MODE (usedreg));
1860 else
1861 offset += (SUBREG_BYTE (usedreg)
1862 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1865 usedreg = subreg;
1868 if (!REG_P (usedreg))
1869 return 0;
1871 ureg = REGNO (usedreg);
1872 if (ureg < FIRST_PSEUDO_REGISTER)
1873 usize = hard_regno_nregs[ureg][GET_MODE (usedreg)];
1874 else
1875 usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1876 + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1877 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1879 while (GET_CODE (setreg) == SUBREG)
1881 rtx subreg = SUBREG_REG (setreg);
1883 if (REG_P (subreg))
1885 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1886 may_save_copy = 0;
1888 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1889 offset -= subreg_regno_offset (REGNO (subreg),
1890 GET_MODE (subreg),
1891 SUBREG_BYTE (setreg),
1892 GET_MODE (setreg));
1893 else
1894 offset -= (SUBREG_BYTE (setreg)
1895 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1898 setreg = subreg;
1901 if (!REG_P (setreg))
1902 return 0;
1904 sreg = REGNO (setreg);
1905 if (sreg < FIRST_PSEUDO_REGISTER)
1906 ssize = hard_regno_nregs[sreg][GET_MODE (setreg)];
1907 else
1908 ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1909 + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1910 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1912 /* If UREG is a pseudo-register that hasn't already been assigned a
1913 quantity number, it means that it is not local to this block or dies
1914 more than once. In either event, we can't do anything with it. */
1915 if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1916 /* Do not combine registers unless one fits within the other. */
1917 || (offset > 0 && usize + offset > ssize)
1918 || (offset < 0 && usize + offset < ssize)
1919 /* Do not combine with a smaller already-assigned object
1920 if that smaller object is already combined with something bigger. */
1921 || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1922 && usize < qty[reg_qty[ureg]].size)
1923 /* Can't combine if SREG is not a register we can allocate. */
1924 || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1925 /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1926 These have already been taken care of. This probably wouldn't
1927 combine anyway, but don't take any chances. */
1928 || (ureg >= FIRST_PSEUDO_REGISTER
1929 && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1930 /* Don't tie something to itself. In most cases it would make no
1931 difference, but it would screw up if the reg being tied to itself
1932 also dies in this insn. */
1933 || ureg == sreg
1934 /* Don't try to connect two different hardware registers. */
1935 || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1936 /* Don't connect two different machine modes if they have different
1937 implications as to which registers may be used. */
1938 || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1939 return 0;
1941 /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1942 qty_phys_sugg for the pseudo instead of tying them.
1944 Return "failure" so that the lifespan of UREG is terminated here;
1945 that way the two lifespans will be disjoint and nothing will prevent
1946 the pseudo reg from being given this hard reg. */
1948 if (ureg < FIRST_PSEUDO_REGISTER)
1950 /* Allocate a quantity number so we have a place to put our
1951 suggestions. */
1952 if (reg_qty[sreg] == -2)
1953 reg_is_born (setreg, 2 * insn_number);
1955 if (reg_qty[sreg] >= 0)
1957 if (may_save_copy
1958 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1960 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1961 qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1963 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1965 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1966 qty_phys_num_sugg[reg_qty[sreg]]++;
1969 return 0;
1972 /* Similarly for SREG a hard register and UREG a pseudo register. */
1974 if (sreg < FIRST_PSEUDO_REGISTER)
1976 if (may_save_copy
1977 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1979 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1980 qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1982 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
1984 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
1985 qty_phys_num_sugg[reg_qty[ureg]]++;
1987 return 0;
1990 /* At this point we know that SREG and UREG are both pseudos.
1991 Do nothing if SREG already has a quantity or is a register that we
1992 don't allocate. */
1993 if (reg_qty[sreg] >= -1
1994 /* If we are not going to let any regs live across calls,
1995 don't tie a call-crossing reg to a non-call-crossing reg. */
1996 || (current_function_has_nonlocal_label
1997 && ((REG_N_CALLS_CROSSED (ureg) > 0)
1998 != (REG_N_CALLS_CROSSED (sreg) > 0))))
1999 return 0;
2001 /* We don't already know about SREG, so tie it to UREG
2002 if this is the last use of UREG, provided the classes they want
2003 are compatible. */
2005 if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
2006 && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
2008 /* Add SREG to UREG's quantity. */
2009 sqty = reg_qty[ureg];
2010 reg_qty[sreg] = sqty;
2011 reg_offset[sreg] = reg_offset[ureg] + offset;
2012 reg_next_in_qty[sreg] = qty[sqty].first_reg;
2013 qty[sqty].first_reg = sreg;
2015 /* If SREG's reg class is smaller, set qty[SQTY].min_class. */
2016 update_qty_class (sqty, sreg);
2018 /* Update info about quantity SQTY. */
2019 qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
2020 qty[sqty].n_throwing_calls_crossed
2021 += REG_N_THROWING_CALLS_CROSSED (sreg);
2022 qty[sqty].n_refs += REG_N_REFS (sreg);
2023 qty[sqty].freq += REG_FREQ (sreg);
2024 if (usize < ssize)
2026 int i;
2028 for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
2029 reg_offset[i] -= offset;
2031 qty[sqty].size = ssize;
2032 qty[sqty].mode = GET_MODE (setreg);
2035 else
2036 return 0;
2038 return 1;
2041 /* Return 1 if the preferred class of REG allows it to be tied
2042 to a quantity or register whose class is CLASS.
2043 True if REG's reg class either contains or is contained in CLASS. */
2045 static int
2046 reg_meets_class_p (int reg, enum reg_class class)
2048 enum reg_class rclass = reg_preferred_class (reg);
2049 return (reg_class_subset_p (rclass, class)
2050 || reg_class_subset_p (class, rclass));
2053 /* Update the class of QTYNO assuming that REG is being tied to it. */
2055 static void
2056 update_qty_class (int qtyno, int reg)
2058 enum reg_class rclass = reg_preferred_class (reg);
2059 if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2060 qty[qtyno].min_class = rclass;
2062 rclass = reg_alternate_class (reg);
2063 if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2064 qty[qtyno].alternate_class = rclass;
2067 /* Handle something which alters the value of an rtx REG.
2069 REG is whatever is set or clobbered. SETTER is the rtx that
2070 is modifying the register.
2072 If it is not really a register, we do nothing.
2073 The file-global variables `this_insn' and `this_insn_number'
2074 carry info from `block_alloc'. */
2076 static void
2077 reg_is_set (rtx reg, const_rtx setter, void *data ATTRIBUTE_UNUSED)
2079 /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2080 a hard register. These may actually not exist any more. */
2082 if (GET_CODE (reg) != SUBREG
2083 && !REG_P (reg))
2084 return;
2086 /* Mark this register as being born. If it is used in a CLOBBER, mark
2087 it as being born halfway between the previous insn and this insn so that
2088 it conflicts with our inputs but not the outputs of the previous insn. */
2090 reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2093 /* Handle beginning of the life of register REG.
2094 BIRTH is the index at which this is happening. */
2096 static void
2097 reg_is_born (rtx reg, int birth)
2099 int regno;
2101 if (GET_CODE (reg) == SUBREG)
2103 regno = REGNO (SUBREG_REG (reg));
2104 if (regno < FIRST_PSEUDO_REGISTER)
2105 regno = subreg_regno (reg);
2107 else
2108 regno = REGNO (reg);
2110 if (regno < FIRST_PSEUDO_REGISTER)
2112 mark_life (regno, GET_MODE (reg), 1);
2114 /* If the register was to have been born earlier that the present
2115 insn, mark it as live where it is actually born. */
2116 if (birth < 2 * this_insn_number)
2117 post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2119 else
2121 if (reg_qty[regno] == -2)
2122 alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2124 /* If this register has a quantity number, show that it isn't dead. */
2125 if (reg_qty[regno] >= 0)
2126 qty[reg_qty[regno]].death = -1;
2130 /* Record the death of REG in the current insn. If OUTPUT_P is nonzero,
2131 REG is an output that is dying (i.e., it is never used), otherwise it
2132 is an input (the normal case).
2133 If OUTPUT_P is 1, then we extend the life past the end of this insn. */
2135 static void
2136 wipe_dead_reg (rtx reg, int output_p)
2138 int regno = REGNO (reg);
2140 /* If this insn has multiple results,
2141 and the dead reg is used in one of the results,
2142 extend its life to after this insn,
2143 so it won't get allocated together with any other result of this insn.
2145 It is unsafe to use !single_set here since it will ignore an unused
2146 output. Just because an output is unused does not mean the compiler
2147 can assume the side effect will not occur. Consider if REG appears
2148 in the address of an output and we reload the output. If we allocate
2149 REG to the same hard register as an unused output we could set the hard
2150 register before the output reload insn. */
2151 if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2152 && multiple_sets (this_insn))
2154 int i;
2155 for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2157 rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2158 if (GET_CODE (set) == SET
2159 && !REG_P (SET_DEST (set))
2160 && !rtx_equal_p (reg, SET_DEST (set))
2161 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2162 output_p = 1;
2166 /* If this register is used in an auto-increment address, then extend its
2167 life to after this insn, so that it won't get allocated together with
2168 the result of this insn. */
2169 if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2170 output_p = 1;
2172 if (regno < FIRST_PSEUDO_REGISTER)
2174 mark_life (regno, GET_MODE (reg), 0);
2176 /* If a hard register is dying as an output, mark it as in use at
2177 the beginning of this insn (the above statement would cause this
2178 not to happen). */
2179 if (output_p)
2180 post_mark_life (regno, GET_MODE (reg), 1,
2181 2 * this_insn_number, 2 * this_insn_number + 1);
2184 else if (reg_qty[regno] >= 0)
2185 qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2188 /* Find a block of SIZE words of hard regs in reg_class CLASS
2189 that can hold something of machine-mode MODE
2190 (but actually we test only the first of the block for holding MODE)
2191 and still free between insn BORN_INDEX and insn DEAD_INDEX,
2192 and return the number of the first of them.
2193 Return -1 if such a block cannot be found.
2194 If QTYNO crosses calls, insist on a register preserved by calls,
2195 unless ACCEPT_CALL_CLOBBERED is nonzero.
2197 If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2198 register is available. If not, return -1. */
2200 static int
2201 find_free_reg (enum reg_class class, enum machine_mode mode, int qtyno,
2202 int accept_call_clobbered, int just_try_suggested,
2203 int born_index, int dead_index)
2205 int i, ins;
2206 HARD_REG_SET first_used, used;
2207 #ifdef ELIMINABLE_REGS
2208 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2209 #endif
2211 /* Validate our parameters. */
2212 gcc_assert (born_index >= 0 && born_index <= dead_index);
2214 /* Don't let a pseudo live in a reg across a function call
2215 if we might get a nonlocal goto. */
2216 if (current_function_has_nonlocal_label
2217 && qty[qtyno].n_calls_crossed > 0)
2218 return -1;
2220 if (accept_call_clobbered)
2221 COPY_HARD_REG_SET (used, call_fixed_reg_set);
2222 else if (qty[qtyno].n_calls_crossed == 0)
2223 COPY_HARD_REG_SET (used, fixed_reg_set);
2224 else
2225 COPY_HARD_REG_SET (used, call_used_reg_set);
2227 if (accept_call_clobbered)
2228 IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2230 for (ins = born_index; ins < dead_index; ins++)
2231 IOR_HARD_REG_SET (used, regs_live_at[ins]);
2233 IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2235 /* Don't use the frame pointer reg in local-alloc even if
2236 we may omit the frame pointer, because if we do that and then we
2237 need a frame pointer, reload won't know how to move the pseudo
2238 to another hard reg. It can move only regs made by global-alloc.
2240 This is true of any register that can be eliminated. */
2241 #ifdef ELIMINABLE_REGS
2242 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2243 SET_HARD_REG_BIT (used, eliminables[i].from);
2244 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2245 /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2246 that it might be eliminated into. */
2247 SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2248 #endif
2249 #else
2250 SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2251 #endif
2253 #ifdef CANNOT_CHANGE_MODE_CLASS
2254 cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2255 #endif
2257 /* Normally, the registers that can be used for the first register in
2258 a multi-register quantity are the same as those that can be used for
2259 subsequent registers. However, if just trying suggested registers,
2260 restrict our consideration to them. If there are copy-suggested
2261 register, try them. Otherwise, try the arithmetic-suggested
2262 registers. */
2263 COPY_HARD_REG_SET (first_used, used);
2265 if (just_try_suggested)
2267 if (qty_phys_num_copy_sugg[qtyno] != 0)
2268 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2269 else
2270 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2273 /* If at least one would be suitable, test each hard reg. */
2274 if (!hard_reg_set_subset_p (reg_class_contents[(int) ALL_REGS], first_used))
2275 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2277 #ifdef REG_ALLOC_ORDER
2278 int regno = reg_alloc_order[i];
2279 #else
2280 int regno = i;
2281 #endif
2282 if (!TEST_HARD_REG_BIT (first_used, regno)
2283 && HARD_REGNO_MODE_OK (regno, mode)
2284 && (qty[qtyno].n_calls_crossed == 0
2285 || accept_call_clobbered
2286 || !HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2288 int j;
2289 int size1 = hard_regno_nregs[regno][mode];
2290 j = 1;
2291 while (j < size1 && !TEST_HARD_REG_BIT (used, regno + j))
2292 j++;
2293 if (j == size1)
2295 /* Mark that this register is in use between its birth
2296 and death insns. */
2297 post_mark_life (regno, mode, 1, born_index, dead_index);
2298 return regno;
2300 #ifndef REG_ALLOC_ORDER
2301 /* Skip starting points we know will lose. */
2302 i += j;
2303 #endif
2307 /* If we are just trying suggested register, we have just tried copy-
2308 suggested registers, and there are arithmetic-suggested registers,
2309 try them. */
2311 /* If it would be profitable to allocate a call-clobbered register
2312 and save and restore it around calls, do that. */
2313 if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2314 && qty_phys_num_sugg[qtyno] != 0)
2316 /* Don't try the copy-suggested regs again. */
2317 qty_phys_num_copy_sugg[qtyno] = 0;
2318 return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2319 born_index, dead_index);
2322 /* We need not check to see if the current function has nonlocal
2323 labels because we don't put any pseudos that are live over calls in
2324 registers in that case. Avoid putting pseudos crossing calls that
2325 might throw into call used registers. */
2327 if (! accept_call_clobbered
2328 && flag_caller_saves
2329 && ! just_try_suggested
2330 && qty[qtyno].n_calls_crossed != 0
2331 && qty[qtyno].n_throwing_calls_crossed == 0
2332 && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2333 qty[qtyno].n_calls_crossed))
2335 i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2336 if (i >= 0)
2337 caller_save_needed = 1;
2338 return i;
2340 return -1;
2343 /* Mark that REGNO with machine-mode MODE is live starting from the current
2344 insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2345 is zero). */
2347 static void
2348 mark_life (int regno, enum machine_mode mode, int life)
2350 if (life)
2351 add_to_hard_reg_set (&regs_live, mode, regno);
2352 else
2353 remove_from_hard_reg_set (&regs_live, mode, regno);
2356 /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2357 is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2358 to insn number DEATH (exclusive). */
2360 static void
2361 post_mark_life (int regno, enum machine_mode mode, int life, int birth,
2362 int death)
2364 HARD_REG_SET this_reg;
2366 CLEAR_HARD_REG_SET (this_reg);
2367 add_to_hard_reg_set (&this_reg, mode, regno);
2369 if (life)
2370 while (birth < death)
2372 IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2373 birth++;
2375 else
2376 while (birth < death)
2378 AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2379 birth++;
2383 /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2384 is the register being clobbered, and R1 is a register being used in
2385 the equivalent expression.
2387 If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2388 in which it is used, return 1.
2390 Otherwise, return 0. */
2392 static int
2393 no_conflict_p (rtx insn, rtx r0 ATTRIBUTE_UNUSED, rtx r1)
2395 int ok = 0;
2396 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2397 rtx p, last;
2399 /* If R1 is a hard register, return 0 since we handle this case
2400 when we scan the insns that actually use it. */
2402 if (note == 0
2403 || (REG_P (r1) && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2404 || (GET_CODE (r1) == SUBREG && REG_P (SUBREG_REG (r1))
2405 && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2406 return 0;
2408 last = XEXP (note, 0);
2410 for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2411 if (INSN_P (p))
2413 if (find_reg_note (p, REG_DEAD, r1))
2414 ok = 1;
2416 /* There must be a REG_NO_CONFLICT note on every insn, otherwise
2417 some earlier optimization pass has inserted instructions into
2418 the sequence, and it is not safe to perform this optimization.
2419 Note that emit_no_conflict_block always ensures that this is
2420 true when these sequences are created. */
2421 if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2422 return 0;
2425 return ok;
2428 /* Return the number of alternatives for which the constraint string P
2429 indicates that the operand must be equal to operand 0 and that no register
2430 is acceptable. */
2432 static int
2433 requires_inout (const char *p)
2435 char c;
2436 int found_zero = 0;
2437 int reg_allowed = 0;
2438 int num_matching_alts = 0;
2439 int len;
2441 for ( ; (c = *p); p += len)
2443 len = CONSTRAINT_LEN (c, p);
2444 switch (c)
2446 case '=': case '+': case '?':
2447 case '#': case '&': case '!':
2448 case '*': case '%':
2449 case 'm': case '<': case '>': case 'V': case 'o':
2450 case 'E': case 'F': case 'G': case 'H':
2451 case 's': case 'i': case 'n':
2452 case 'I': case 'J': case 'K': case 'L':
2453 case 'M': case 'N': case 'O': case 'P':
2454 case 'X':
2455 /* These don't say anything we care about. */
2456 break;
2458 case ',':
2459 if (found_zero && ! reg_allowed)
2460 num_matching_alts++;
2462 found_zero = reg_allowed = 0;
2463 break;
2465 case '0':
2466 found_zero = 1;
2467 break;
2469 case '1': case '2': case '3': case '4': case '5':
2470 case '6': case '7': case '8': case '9':
2471 /* Skip the balance of the matching constraint. */
2473 p++;
2474 while (ISDIGIT (*p));
2475 len = 0;
2476 break;
2478 default:
2479 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
2480 && !EXTRA_ADDRESS_CONSTRAINT (c, p))
2481 break;
2482 /* Fall through. */
2483 case 'p':
2484 case 'g': case 'r':
2485 reg_allowed = 1;
2486 break;
2490 if (found_zero && ! reg_allowed)
2491 num_matching_alts++;
2493 return num_matching_alts;
2496 void
2497 dump_local_alloc (FILE *file)
2499 int i;
2500 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2501 if (reg_renumber[i] != -1)
2502 fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2505 /* Run old register allocator. Return TRUE if we must exit
2506 rest_of_compilation upon return. */
2507 static unsigned int
2508 rest_of_handle_local_alloc (void)
2510 int rebuild_notes;
2511 int max_regno = max_reg_num ();
2513 df_note_add_problem ();
2515 if (optimize > 1)
2516 df_remove_problem (df_live);
2517 /* Create a new version of df that has the special version of UR if
2518 we are doing optimization. */
2519 if (optimize)
2520 df_urec_add_problem ();
2521 #ifdef ENABLE_CHECKING
2522 df->changeable_flags |= DF_VERIFY_SCHEDULED;
2523 #endif
2524 df_analyze ();
2525 regstat_init_n_sets_and_refs ();
2526 regstat_compute_ri ();
2528 /* There is just too much going on in the register allocators to
2529 keep things up to date. At the end we have to rescan anyway
2530 because things change when the reload_completed flag is set.
2531 So we just turn off scanning and we will rescan by hand. */
2532 df_set_flags (DF_NO_INSN_RESCAN);
2535 /* If we are not optimizing, then this is the only place before
2536 register allocation where dataflow is done. And that is needed
2537 to generate these warnings. */
2538 if (warn_clobbered)
2539 generate_setjmp_warnings ();
2541 /* Determine if the current function is a leaf before running reload
2542 since this can impact optimizations done by the prologue and
2543 epilogue thus changing register elimination offsets. */
2544 current_function_is_leaf = leaf_function_p ();
2546 /* And the reg_equiv_memory_loc array. */
2547 VEC_safe_grow (rtx, gc, reg_equiv_memory_loc_vec, max_regno);
2548 memset (VEC_address (rtx, reg_equiv_memory_loc_vec), 0,
2549 sizeof (rtx) * max_regno);
2550 reg_equiv_memory_loc = VEC_address (rtx, reg_equiv_memory_loc_vec);
2552 allocate_initial_values (reg_equiv_memory_loc);
2554 regclass (get_insns (), max_regno);
2555 rebuild_notes = local_alloc ();
2557 /* Local allocation may have turned an indirect jump into a direct
2558 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
2559 instructions. */
2560 if (rebuild_notes)
2562 timevar_push (TV_JUMP);
2564 rebuild_jump_labels (get_insns ());
2565 purge_all_dead_edges ();
2566 timevar_pop (TV_JUMP);
2569 if (dump_file && (dump_flags & TDF_DETAILS))
2571 timevar_push (TV_DUMP);
2572 dump_flow_info (dump_file, dump_flags);
2573 dump_local_alloc (dump_file);
2574 timevar_pop (TV_DUMP);
2576 return 0;
2579 struct tree_opt_pass pass_local_alloc =
2581 "lreg", /* name */
2582 NULL, /* gate */
2583 rest_of_handle_local_alloc, /* execute */
2584 NULL, /* sub */
2585 NULL, /* next */
2586 0, /* static_pass_number */
2587 TV_LOCAL_ALLOC, /* tv_id */
2588 0, /* properties_required */
2589 0, /* properties_provided */
2590 0, /* properties_destroyed */
2591 0, /* todo_flags_start */
2592 TODO_dump_func |
2593 TODO_ggc_collect, /* todo_flags_finish */
2594 'l' /* letter */