gfortran.h (gfc_expr): Remove from_H, add "representation" struct.
[official-gcc.git] / gcc / local-alloc.c
blob01d107e5fea5436aa5ecd51867c0854832a37fa4
1 /* Allocate registers within a basic block, for GNU compiler.
2 Copyright (C) 1987, 1988, 1991, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software Foundation,
4 Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* Allocation of hard register numbers to pseudo registers is done in
24 two passes. In this pass we consider only regs that are born and
25 die once within one basic block. We do this one basic block at a
26 time. Then the next pass allocates the registers that remain.
27 Two passes are used because this pass uses methods that work only
28 on linear code, but that do a better job than the general methods
29 used in global_alloc, and more quickly too.
31 The assignments made are recorded in the vector reg_renumber
32 whose space is allocated here. The rtl code itself is not altered.
34 We assign each instruction in the basic block a number
35 which is its order from the beginning of the block.
36 Then we can represent the lifetime of a pseudo register with
37 a pair of numbers, and check for conflicts easily.
38 We can record the availability of hard registers with a
39 HARD_REG_SET for each instruction. The HARD_REG_SET
40 contains 0 or 1 for each hard reg.
42 To avoid register shuffling, we tie registers together when one
43 dies by being copied into another, or dies in an instruction that
44 does arithmetic to produce another. The tied registers are
45 allocated as one. Registers with different reg class preferences
46 can never be tied unless the class preferred by one is a subclass
47 of the one preferred by the other.
49 Tying is represented with "quantity numbers".
50 A non-tied register is given a new quantity number.
51 Tied registers have the same quantity number.
53 We have provision to exempt registers, even when they are contained
54 within the block, that can be tied to others that are not contained in it.
55 This is so that global_alloc could process them both and tie them then.
56 But this is currently disabled since tying in global_alloc is not
57 yet implemented. */
59 /* Pseudos allocated here can be reallocated by global.c if the hard register
60 is used as a spill register. Currently we don't allocate such pseudos
61 here if their preferred class is likely to be used by spills. */
63 #include "config.h"
64 #include "system.h"
65 #include "coretypes.h"
66 #include "tm.h"
67 #include "hard-reg-set.h"
68 #include "rtl.h"
69 #include "tm_p.h"
70 #include "flags.h"
71 #include "regs.h"
72 #include "function.h"
73 #include "insn-config.h"
74 #include "insn-attr.h"
75 #include "recog.h"
76 #include "output.h"
77 #include "toplev.h"
78 #include "except.h"
79 #include "integrate.h"
80 #include "reload.h"
81 #include "ggc.h"
82 #include "timevar.h"
83 #include "tree-pass.h"
85 /* Next quantity number available for allocation. */
87 static int next_qty;
89 /* Information we maintain about each quantity. */
90 struct qty
92 /* The number of refs to quantity Q. */
94 int n_refs;
96 /* The frequency of uses of quantity Q. */
98 int freq;
100 /* Insn number (counting from head of basic block)
101 where quantity Q was born. -1 if birth has not been recorded. */
103 int birth;
105 /* Insn number (counting from head of basic block)
106 where given quantity died. Due to the way tying is done,
107 and the fact that we consider in this pass only regs that die but once,
108 a quantity can die only once. Each quantity's life span
109 is a set of consecutive insns. -1 if death has not been recorded. */
111 int death;
113 /* Number of words needed to hold the data in given quantity.
114 This depends on its machine mode. It is used for these purposes:
115 1. It is used in computing the relative importance of qtys,
116 which determines the order in which we look for regs for them.
117 2. It is used in rules that prevent tying several registers of
118 different sizes in a way that is geometrically impossible
119 (see combine_regs). */
121 int size;
123 /* Number of times a reg tied to given qty lives across a CALL_INSN. */
125 int n_calls_crossed;
127 /* Number of times a reg tied to given qty lives across a CALL_INSN
128 that might throw. */
130 int n_throwing_calls_crossed;
132 /* The register number of one pseudo register whose reg_qty value is Q.
133 This register should be the head of the chain
134 maintained in reg_next_in_qty. */
136 int first_reg;
138 /* Reg class contained in (smaller than) the preferred classes of all
139 the pseudo regs that are tied in given quantity.
140 This is the preferred class for allocating that quantity. */
142 enum reg_class min_class;
144 /* Register class within which we allocate given qty if we can't get
145 its preferred class. */
147 enum reg_class alternate_class;
149 /* This holds the mode of the registers that are tied to given qty,
150 or VOIDmode if registers with differing modes are tied together. */
152 enum machine_mode mode;
154 /* the hard reg number chosen for given quantity,
155 or -1 if none was found. */
157 short phys_reg;
160 static struct qty *qty;
162 /* These fields are kept separately to speedup their clearing. */
164 /* We maintain two hard register sets that indicate suggested hard registers
165 for each quantity. The first, phys_copy_sugg, contains hard registers
166 that are tied to the quantity by a simple copy. The second contains all
167 hard registers that are tied to the quantity via an arithmetic operation.
169 The former register set is given priority for allocation. This tends to
170 eliminate copy insns. */
172 /* Element Q is a set of hard registers that are suggested for quantity Q by
173 copy insns. */
175 static HARD_REG_SET *qty_phys_copy_sugg;
177 /* Element Q is a set of hard registers that are suggested for quantity Q by
178 arithmetic insns. */
180 static HARD_REG_SET *qty_phys_sugg;
182 /* Element Q is the number of suggested registers in qty_phys_copy_sugg. */
184 static short *qty_phys_num_copy_sugg;
186 /* Element Q is the number of suggested registers in qty_phys_sugg. */
188 static short *qty_phys_num_sugg;
190 /* If (REG N) has been assigned a quantity number, is a register number
191 of another register assigned the same quantity number, or -1 for the
192 end of the chain. qty->first_reg point to the head of this chain. */
194 static int *reg_next_in_qty;
196 /* reg_qty[N] (where N is a pseudo reg number) is the qty number of that reg
197 if it is >= 0,
198 of -1 if this register cannot be allocated by local-alloc,
199 or -2 if not known yet.
201 Note that if we see a use or death of pseudo register N with
202 reg_qty[N] == -2, register N must be local to the current block. If
203 it were used in more than one block, we would have reg_qty[N] == -1.
204 This relies on the fact that if reg_basic_block[N] is >= 0, register N
205 will not appear in any other block. We save a considerable number of
206 tests by exploiting this.
208 If N is < FIRST_PSEUDO_REGISTER, reg_qty[N] is undefined and should not
209 be referenced. */
211 static int *reg_qty;
213 /* The offset (in words) of register N within its quantity.
214 This can be nonzero if register N is SImode, and has been tied
215 to a subreg of a DImode register. */
217 static char *reg_offset;
219 /* Vector of substitutions of register numbers,
220 used to map pseudo regs into hardware regs.
221 This is set up as a result of register allocation.
222 Element N is the hard reg assigned to pseudo reg N,
223 or is -1 if no hard reg was assigned.
224 If N is a hard reg number, element N is N. */
226 short *reg_renumber;
228 /* Set of hard registers live at the current point in the scan
229 of the instructions in a basic block. */
231 static HARD_REG_SET regs_live;
233 /* Each set of hard registers indicates registers live at a particular
234 point in the basic block. For N even, regs_live_at[N] says which
235 hard registers are needed *after* insn N/2 (i.e., they may not
236 conflict with the outputs of insn N/2 or the inputs of insn N/2 + 1.
238 If an object is to conflict with the inputs of insn J but not the
239 outputs of insn J + 1, we say it is born at index J*2 - 1. Similarly,
240 if it is to conflict with the outputs of insn J but not the inputs of
241 insn J + 1, it is said to die at index J*2 + 1. */
243 static HARD_REG_SET *regs_live_at;
245 /* Communicate local vars `insn_number' and `insn'
246 from `block_alloc' to `reg_is_set', `wipe_dead_reg', and `alloc_qty'. */
247 static int this_insn_number;
248 static rtx this_insn;
250 struct equivalence
252 /* Set when an attempt should be made to replace a register
253 with the associated src_p entry. */
255 char replace;
257 /* Set when a REG_EQUIV note is found or created. Use to
258 keep track of what memory accesses might be created later,
259 e.g. by reload. */
261 rtx replacement;
263 rtx *src_p;
265 /* Loop depth is used to recognize equivalences which appear
266 to be present within the same loop (or in an inner loop). */
268 int loop_depth;
270 /* The list of each instruction which initializes this register. */
272 rtx init_insns;
274 /* Nonzero if this had a preexisting REG_EQUIV note. */
276 int is_arg_equivalence;
279 /* reg_equiv[N] (where N is a pseudo reg number) is the equivalence
280 structure for that register. */
282 static struct equivalence *reg_equiv;
284 /* Nonzero if we recorded an equivalence for a LABEL_REF. */
285 static int recorded_label_ref;
287 static void alloc_qty (int, enum machine_mode, int, int);
288 static void validate_equiv_mem_from_store (rtx, rtx, void *);
289 static int validate_equiv_mem (rtx, rtx, rtx);
290 static int equiv_init_varies_p (rtx);
291 static int equiv_init_movable_p (rtx, int);
292 static int contains_replace_regs (rtx);
293 static int memref_referenced_p (rtx, rtx);
294 static int memref_used_between_p (rtx, rtx, rtx);
295 static void update_equiv_regs (void);
296 static void no_equiv (rtx, rtx, void *);
297 static void block_alloc (int);
298 static int qty_sugg_compare (int, int);
299 static int qty_sugg_compare_1 (const void *, const void *);
300 static int qty_compare (int, int);
301 static int qty_compare_1 (const void *, const void *);
302 static int combine_regs (rtx, rtx, int, int, rtx, int);
303 static int reg_meets_class_p (int, enum reg_class);
304 static void update_qty_class (int, int);
305 static void reg_is_set (rtx, rtx, void *);
306 static void reg_is_born (rtx, int);
307 static void wipe_dead_reg (rtx, int);
308 static int find_free_reg (enum reg_class, enum machine_mode, int, int, int,
309 int, int);
310 static void mark_life (int, enum machine_mode, int);
311 static void post_mark_life (int, enum machine_mode, int, int, int);
312 static int no_conflict_p (rtx, rtx, rtx);
313 static int requires_inout (const char *);
315 /* Allocate a new quantity (new within current basic block)
316 for register number REGNO which is born at index BIRTH
317 within the block. MODE and SIZE are info on reg REGNO. */
319 static void
320 alloc_qty (int regno, enum machine_mode mode, int size, int birth)
322 int qtyno = next_qty++;
324 reg_qty[regno] = qtyno;
325 reg_offset[regno] = 0;
326 reg_next_in_qty[regno] = -1;
328 qty[qtyno].first_reg = regno;
329 qty[qtyno].size = size;
330 qty[qtyno].mode = mode;
331 qty[qtyno].birth = birth;
332 qty[qtyno].n_calls_crossed = REG_N_CALLS_CROSSED (regno);
333 qty[qtyno].n_throwing_calls_crossed = REG_N_THROWING_CALLS_CROSSED (regno);
334 qty[qtyno].min_class = reg_preferred_class (regno);
335 qty[qtyno].alternate_class = reg_alternate_class (regno);
336 qty[qtyno].n_refs = REG_N_REFS (regno);
337 qty[qtyno].freq = REG_FREQ (regno);
340 /* Main entry point of this file. */
342 static int
343 local_alloc (void)
345 int i;
346 int max_qty;
347 basic_block b;
349 /* We need to keep track of whether or not we recorded a LABEL_REF so
350 that we know if the jump optimizer needs to be rerun. */
351 recorded_label_ref = 0;
353 /* Leaf functions and non-leaf functions have different needs.
354 If defined, let the machine say what kind of ordering we
355 should use. */
356 #ifdef ORDER_REGS_FOR_LOCAL_ALLOC
357 ORDER_REGS_FOR_LOCAL_ALLOC;
358 #endif
360 /* Promote REG_EQUAL notes to REG_EQUIV notes and adjust status of affected
361 registers. */
362 update_equiv_regs ();
364 /* This sets the maximum number of quantities we can have. Quantity
365 numbers start at zero and we can have one for each pseudo. */
366 max_qty = (max_regno - FIRST_PSEUDO_REGISTER);
368 /* Allocate vectors of temporary data.
369 See the declarations of these variables, above,
370 for what they mean. */
372 qty = XNEWVEC (struct qty, max_qty);
373 qty_phys_copy_sugg = XNEWVEC (HARD_REG_SET, max_qty);
374 qty_phys_num_copy_sugg = XNEWVEC (short, max_qty);
375 qty_phys_sugg = XNEWVEC (HARD_REG_SET, max_qty);
376 qty_phys_num_sugg = XNEWVEC (short, max_qty);
378 reg_qty = XNEWVEC (int, max_regno);
379 reg_offset = XNEWVEC (char, max_regno);
380 reg_next_in_qty = XNEWVEC (int, max_regno);
382 /* Determine which pseudo-registers can be allocated by local-alloc.
383 In general, these are the registers used only in a single block and
384 which only die once.
386 We need not be concerned with which block actually uses the register
387 since we will never see it outside that block. */
389 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
391 if (REG_BASIC_BLOCK (i) >= 0 && REG_N_DEATHS (i) == 1)
392 reg_qty[i] = -2;
393 else
394 reg_qty[i] = -1;
397 /* Force loop below to initialize entire quantity array. */
398 next_qty = max_qty;
400 /* Allocate each block's local registers, block by block. */
402 FOR_EACH_BB (b)
404 /* NEXT_QTY indicates which elements of the `qty_...'
405 vectors might need to be initialized because they were used
406 for the previous block; it is set to the entire array before
407 block 0. Initialize those, with explicit loop if there are few,
408 else with bzero and bcopy. Do not initialize vectors that are
409 explicit set by `alloc_qty'. */
411 if (next_qty < 6)
413 for (i = 0; i < next_qty; i++)
415 CLEAR_HARD_REG_SET (qty_phys_copy_sugg[i]);
416 qty_phys_num_copy_sugg[i] = 0;
417 CLEAR_HARD_REG_SET (qty_phys_sugg[i]);
418 qty_phys_num_sugg[i] = 0;
421 else
423 #define CLEAR(vector) \
424 memset ((vector), 0, (sizeof (*(vector))) * next_qty);
426 CLEAR (qty_phys_copy_sugg);
427 CLEAR (qty_phys_num_copy_sugg);
428 CLEAR (qty_phys_sugg);
429 CLEAR (qty_phys_num_sugg);
432 next_qty = 0;
434 block_alloc (b->index);
437 free (qty);
438 free (qty_phys_copy_sugg);
439 free (qty_phys_num_copy_sugg);
440 free (qty_phys_sugg);
441 free (qty_phys_num_sugg);
443 free (reg_qty);
444 free (reg_offset);
445 free (reg_next_in_qty);
447 return recorded_label_ref;
450 /* Used for communication between the following two functions: contains
451 a MEM that we wish to ensure remains unchanged. */
452 static rtx equiv_mem;
454 /* Set nonzero if EQUIV_MEM is modified. */
455 static int equiv_mem_modified;
457 /* If EQUIV_MEM is modified by modifying DEST, indicate that it is modified.
458 Called via note_stores. */
460 static void
461 validate_equiv_mem_from_store (rtx dest, rtx set ATTRIBUTE_UNUSED,
462 void *data ATTRIBUTE_UNUSED)
464 if ((REG_P (dest)
465 && reg_overlap_mentioned_p (dest, equiv_mem))
466 || (MEM_P (dest)
467 && true_dependence (dest, VOIDmode, equiv_mem, rtx_varies_p)))
468 equiv_mem_modified = 1;
471 /* Verify that no store between START and the death of REG invalidates
472 MEMREF. MEMREF is invalidated by modifying a register used in MEMREF,
473 by storing into an overlapping memory location, or with a non-const
474 CALL_INSN.
476 Return 1 if MEMREF remains valid. */
478 static int
479 validate_equiv_mem (rtx start, rtx reg, rtx memref)
481 rtx insn;
482 rtx note;
484 equiv_mem = memref;
485 equiv_mem_modified = 0;
487 /* If the memory reference has side effects or is volatile, it isn't a
488 valid equivalence. */
489 if (side_effects_p (memref))
490 return 0;
492 for (insn = start; insn && ! equiv_mem_modified; insn = NEXT_INSN (insn))
494 if (! INSN_P (insn))
495 continue;
497 if (find_reg_note (insn, REG_DEAD, reg))
498 return 1;
500 if (CALL_P (insn) && ! MEM_READONLY_P (memref)
501 && ! CONST_OR_PURE_CALL_P (insn))
502 return 0;
504 note_stores (PATTERN (insn), validate_equiv_mem_from_store, NULL);
506 /* If a register mentioned in MEMREF is modified via an
507 auto-increment, we lose the equivalence. Do the same if one
508 dies; although we could extend the life, it doesn't seem worth
509 the trouble. */
511 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
512 if ((REG_NOTE_KIND (note) == REG_INC
513 || REG_NOTE_KIND (note) == REG_DEAD)
514 && REG_P (XEXP (note, 0))
515 && reg_overlap_mentioned_p (XEXP (note, 0), memref))
516 return 0;
519 return 0;
522 /* Returns zero if X is known to be invariant. */
524 static int
525 equiv_init_varies_p (rtx x)
527 RTX_CODE code = GET_CODE (x);
528 int i;
529 const char *fmt;
531 switch (code)
533 case MEM:
534 return !MEM_READONLY_P (x) || equiv_init_varies_p (XEXP (x, 0));
536 case CONST:
537 case CONST_INT:
538 case CONST_DOUBLE:
539 case CONST_VECTOR:
540 case SYMBOL_REF:
541 case LABEL_REF:
542 return 0;
544 case REG:
545 return reg_equiv[REGNO (x)].replace == 0 && rtx_varies_p (x, 0);
547 case ASM_OPERANDS:
548 if (MEM_VOLATILE_P (x))
549 return 1;
551 /* Fall through. */
553 default:
554 break;
557 fmt = GET_RTX_FORMAT (code);
558 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
559 if (fmt[i] == 'e')
561 if (equiv_init_varies_p (XEXP (x, i)))
562 return 1;
564 else if (fmt[i] == 'E')
566 int j;
567 for (j = 0; j < XVECLEN (x, i); j++)
568 if (equiv_init_varies_p (XVECEXP (x, i, j)))
569 return 1;
572 return 0;
575 /* Returns nonzero if X (used to initialize register REGNO) is movable.
576 X is only movable if the registers it uses have equivalent initializations
577 which appear to be within the same loop (or in an inner loop) and movable
578 or if they are not candidates for local_alloc and don't vary. */
580 static int
581 equiv_init_movable_p (rtx x, int regno)
583 int i, j;
584 const char *fmt;
585 enum rtx_code code = GET_CODE (x);
587 switch (code)
589 case SET:
590 return equiv_init_movable_p (SET_SRC (x), regno);
592 case CC0:
593 case CLOBBER:
594 return 0;
596 case PRE_INC:
597 case PRE_DEC:
598 case POST_INC:
599 case POST_DEC:
600 case PRE_MODIFY:
601 case POST_MODIFY:
602 return 0;
604 case REG:
605 return (reg_equiv[REGNO (x)].loop_depth >= reg_equiv[regno].loop_depth
606 && reg_equiv[REGNO (x)].replace)
607 || (REG_BASIC_BLOCK (REGNO (x)) < 0 && ! rtx_varies_p (x, 0));
609 case UNSPEC_VOLATILE:
610 return 0;
612 case ASM_OPERANDS:
613 if (MEM_VOLATILE_P (x))
614 return 0;
616 /* Fall through. */
618 default:
619 break;
622 fmt = GET_RTX_FORMAT (code);
623 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
624 switch (fmt[i])
626 case 'e':
627 if (! equiv_init_movable_p (XEXP (x, i), regno))
628 return 0;
629 break;
630 case 'E':
631 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
632 if (! equiv_init_movable_p (XVECEXP (x, i, j), regno))
633 return 0;
634 break;
637 return 1;
640 /* TRUE if X uses any registers for which reg_equiv[REGNO].replace is true. */
642 static int
643 contains_replace_regs (rtx x)
645 int i, j;
646 const char *fmt;
647 enum rtx_code code = GET_CODE (x);
649 switch (code)
651 case CONST_INT:
652 case CONST:
653 case LABEL_REF:
654 case SYMBOL_REF:
655 case CONST_DOUBLE:
656 case CONST_VECTOR:
657 case PC:
658 case CC0:
659 case HIGH:
660 return 0;
662 case REG:
663 return reg_equiv[REGNO (x)].replace;
665 default:
666 break;
669 fmt = GET_RTX_FORMAT (code);
670 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
671 switch (fmt[i])
673 case 'e':
674 if (contains_replace_regs (XEXP (x, i)))
675 return 1;
676 break;
677 case 'E':
678 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
679 if (contains_replace_regs (XVECEXP (x, i, j)))
680 return 1;
681 break;
684 return 0;
687 /* TRUE if X references a memory location that would be affected by a store
688 to MEMREF. */
690 static int
691 memref_referenced_p (rtx memref, rtx x)
693 int i, j;
694 const char *fmt;
695 enum rtx_code code = GET_CODE (x);
697 switch (code)
699 case CONST_INT:
700 case CONST:
701 case LABEL_REF:
702 case SYMBOL_REF:
703 case CONST_DOUBLE:
704 case CONST_VECTOR:
705 case PC:
706 case CC0:
707 case HIGH:
708 case LO_SUM:
709 return 0;
711 case REG:
712 return (reg_equiv[REGNO (x)].replacement
713 && memref_referenced_p (memref,
714 reg_equiv[REGNO (x)].replacement));
716 case MEM:
717 if (true_dependence (memref, VOIDmode, x, rtx_varies_p))
718 return 1;
719 break;
721 case SET:
722 /* If we are setting a MEM, it doesn't count (its address does), but any
723 other SET_DEST that has a MEM in it is referencing the MEM. */
724 if (MEM_P (SET_DEST (x)))
726 if (memref_referenced_p (memref, XEXP (SET_DEST (x), 0)))
727 return 1;
729 else if (memref_referenced_p (memref, SET_DEST (x)))
730 return 1;
732 return memref_referenced_p (memref, SET_SRC (x));
734 default:
735 break;
738 fmt = GET_RTX_FORMAT (code);
739 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
740 switch (fmt[i])
742 case 'e':
743 if (memref_referenced_p (memref, XEXP (x, i)))
744 return 1;
745 break;
746 case 'E':
747 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
748 if (memref_referenced_p (memref, XVECEXP (x, i, j)))
749 return 1;
750 break;
753 return 0;
756 /* TRUE if some insn in the range (START, END] references a memory location
757 that would be affected by a store to MEMREF. */
759 static int
760 memref_used_between_p (rtx memref, rtx start, rtx end)
762 rtx insn;
764 for (insn = NEXT_INSN (start); insn != NEXT_INSN (end);
765 insn = NEXT_INSN (insn))
767 if (!INSN_P (insn))
768 continue;
770 if (memref_referenced_p (memref, PATTERN (insn)))
771 return 1;
773 /* Nonconst functions may access memory. */
774 if (CALL_P (insn)
775 && (! CONST_OR_PURE_CALL_P (insn)
776 || pure_call_p (insn)))
777 return 1;
780 return 0;
783 /* Find registers that are equivalent to a single value throughout the
784 compilation (either because they can be referenced in memory or are set once
785 from a single constant). Lower their priority for a register.
787 If such a register is only referenced once, try substituting its value
788 into the using insn. If it succeeds, we can eliminate the register
789 completely.
791 Initialize the REG_EQUIV_INIT array of initializing insns. */
793 static void
794 update_equiv_regs (void)
796 rtx insn;
797 basic_block bb;
798 int loop_depth;
799 regset_head cleared_regs;
800 int clear_regnos = 0;
802 reg_equiv = XCNEWVEC (struct equivalence, max_regno);
803 INIT_REG_SET (&cleared_regs);
804 reg_equiv_init = ggc_alloc_cleared (max_regno * sizeof (rtx));
805 reg_equiv_init_size = max_regno;
807 init_alias_analysis ();
809 /* Scan the insns and find which registers have equivalences. Do this
810 in a separate scan of the insns because (due to -fcse-follow-jumps)
811 a register can be set below its use. */
812 FOR_EACH_BB (bb)
814 loop_depth = bb->loop_depth;
816 for (insn = BB_HEAD (bb);
817 insn != NEXT_INSN (BB_END (bb));
818 insn = NEXT_INSN (insn))
820 rtx note;
821 rtx set;
822 rtx dest, src;
823 int regno;
825 if (! INSN_P (insn))
826 continue;
828 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
829 if (REG_NOTE_KIND (note) == REG_INC)
830 no_equiv (XEXP (note, 0), note, NULL);
832 set = single_set (insn);
834 /* If this insn contains more (or less) than a single SET,
835 only mark all destinations as having no known equivalence. */
836 if (set == 0)
838 note_stores (PATTERN (insn), no_equiv, NULL);
839 continue;
841 else if (GET_CODE (PATTERN (insn)) == PARALLEL)
843 int i;
845 for (i = XVECLEN (PATTERN (insn), 0) - 1; i >= 0; i--)
847 rtx part = XVECEXP (PATTERN (insn), 0, i);
848 if (part != set)
849 note_stores (part, no_equiv, NULL);
853 dest = SET_DEST (set);
854 src = SET_SRC (set);
856 /* See if this is setting up the equivalence between an argument
857 register and its stack slot. */
858 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
859 if (note)
861 gcc_assert (REG_P (dest));
862 regno = REGNO (dest);
864 /* Note that we don't want to clear reg_equiv_init even if there
865 are multiple sets of this register. */
866 reg_equiv[regno].is_arg_equivalence = 1;
868 /* Record for reload that this is an equivalencing insn. */
869 if (rtx_equal_p (src, XEXP (note, 0)))
870 reg_equiv_init[regno]
871 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
873 /* Continue normally in case this is a candidate for
874 replacements. */
877 if (!optimize)
878 continue;
880 /* We only handle the case of a pseudo register being set
881 once, or always to the same value. */
882 /* ??? The mn10200 port breaks if we add equivalences for
883 values that need an ADDRESS_REGS register and set them equivalent
884 to a MEM of a pseudo. The actual problem is in the over-conservative
885 handling of INPADDR_ADDRESS / INPUT_ADDRESS / INPUT triples in
886 calculate_needs, but we traditionally work around this problem
887 here by rejecting equivalences when the destination is in a register
888 that's likely spilled. This is fragile, of course, since the
889 preferred class of a pseudo depends on all instructions that set
890 or use it. */
892 if (!REG_P (dest)
893 || (regno = REGNO (dest)) < FIRST_PSEUDO_REGISTER
894 || reg_equiv[regno].init_insns == const0_rtx
895 || (CLASS_LIKELY_SPILLED_P (reg_preferred_class (regno))
896 && MEM_P (src) && ! reg_equiv[regno].is_arg_equivalence))
898 /* This might be setting a SUBREG of a pseudo, a pseudo that is
899 also set somewhere else to a constant. */
900 note_stores (set, no_equiv, NULL);
901 continue;
904 note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
906 /* cse sometimes generates function invariants, but doesn't put a
907 REG_EQUAL note on the insn. Since this note would be redundant,
908 there's no point creating it earlier than here. */
909 if (! note && ! rtx_varies_p (src, 0))
910 note = set_unique_reg_note (insn, REG_EQUAL, copy_rtx (src));
912 /* Don't bother considering a REG_EQUAL note containing an EXPR_LIST
913 since it represents a function call */
914 if (note && GET_CODE (XEXP (note, 0)) == EXPR_LIST)
915 note = NULL_RTX;
917 if (REG_N_SETS (regno) != 1
918 && (! note
919 || rtx_varies_p (XEXP (note, 0), 0)
920 || (reg_equiv[regno].replacement
921 && ! rtx_equal_p (XEXP (note, 0),
922 reg_equiv[regno].replacement))))
924 no_equiv (dest, set, NULL);
925 continue;
927 /* Record this insn as initializing this register. */
928 reg_equiv[regno].init_insns
929 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv[regno].init_insns);
931 /* If this register is known to be equal to a constant, record that
932 it is always equivalent to the constant. */
933 if (REG_N_SETS (regno) == 1
934 && note && ! rtx_varies_p (XEXP (note, 0), 0))
936 rtx note_value = XEXP (note, 0);
937 remove_note (insn, note);
938 set_unique_reg_note (insn, REG_EQUIV, note_value);
941 /* If this insn introduces a "constant" register, decrease the priority
942 of that register. Record this insn if the register is only used once
943 more and the equivalence value is the same as our source.
945 The latter condition is checked for two reasons: First, it is an
946 indication that it may be more efficient to actually emit the insn
947 as written (if no registers are available, reload will substitute
948 the equivalence). Secondly, it avoids problems with any registers
949 dying in this insn whose death notes would be missed.
951 If we don't have a REG_EQUIV note, see if this insn is loading
952 a register used only in one basic block from a MEM. If so, and the
953 MEM remains unchanged for the life of the register, add a REG_EQUIV
954 note. */
956 note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
958 if (note == 0 && REG_BASIC_BLOCK (regno) >= 0
959 && MEM_P (SET_SRC (set))
960 && validate_equiv_mem (insn, dest, SET_SRC (set)))
961 note = set_unique_reg_note (insn, REG_EQUIV, copy_rtx (SET_SRC (set)));
963 if (note)
965 int regno = REGNO (dest);
966 rtx x = XEXP (note, 0);
968 /* If we haven't done so, record for reload that this is an
969 equivalencing insn. */
970 if (!reg_equiv[regno].is_arg_equivalence)
971 reg_equiv_init[regno]
972 = gen_rtx_INSN_LIST (VOIDmode, insn, reg_equiv_init[regno]);
974 /* Record whether or not we created a REG_EQUIV note for a LABEL_REF.
975 We might end up substituting the LABEL_REF for uses of the
976 pseudo here or later. That kind of transformation may turn an
977 indirect jump into a direct jump, in which case we must rerun the
978 jump optimizer to ensure that the JUMP_LABEL fields are valid. */
979 if (GET_CODE (x) == LABEL_REF
980 || (GET_CODE (x) == CONST
981 && GET_CODE (XEXP (x, 0)) == PLUS
982 && (GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF)))
983 recorded_label_ref = 1;
985 reg_equiv[regno].replacement = x;
986 reg_equiv[regno].src_p = &SET_SRC (set);
987 reg_equiv[regno].loop_depth = loop_depth;
989 /* Don't mess with things live during setjmp. */
990 if (REG_LIVE_LENGTH (regno) >= 0 && optimize)
992 /* Note that the statement below does not affect the priority
993 in local-alloc! */
994 REG_LIVE_LENGTH (regno) *= 2;
996 /* If the register is referenced exactly twice, meaning it is
997 set once and used once, indicate that the reference may be
998 replaced by the equivalence we computed above. Do this
999 even if the register is only used in one block so that
1000 dependencies can be handled where the last register is
1001 used in a different block (i.e. HIGH / LO_SUM sequences)
1002 and to reduce the number of registers alive across
1003 calls. */
1005 if (REG_N_REFS (regno) == 2
1006 && (rtx_equal_p (x, src)
1007 || ! equiv_init_varies_p (src))
1008 && NONJUMP_INSN_P (insn)
1009 && equiv_init_movable_p (PATTERN (insn), regno))
1010 reg_equiv[regno].replace = 1;
1016 if (!optimize)
1017 goto out;
1019 /* A second pass, to gather additional equivalences with memory. This needs
1020 to be done after we know which registers we are going to replace. */
1022 for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
1024 rtx set, src, dest;
1025 unsigned regno;
1027 if (! INSN_P (insn))
1028 continue;
1030 set = single_set (insn);
1031 if (! set)
1032 continue;
1034 dest = SET_DEST (set);
1035 src = SET_SRC (set);
1037 /* If this sets a MEM to the contents of a REG that is only used
1038 in a single basic block, see if the register is always equivalent
1039 to that memory location and if moving the store from INSN to the
1040 insn that set REG is safe. If so, put a REG_EQUIV note on the
1041 initializing insn.
1043 Don't add a REG_EQUIV note if the insn already has one. The existing
1044 REG_EQUIV is likely more useful than the one we are adding.
1046 If one of the regs in the address has reg_equiv[REGNO].replace set,
1047 then we can't add this REG_EQUIV note. The reg_equiv[REGNO].replace
1048 optimization may move the set of this register immediately before
1049 insn, which puts it after reg_equiv[REGNO].init_insns, and hence
1050 the mention in the REG_EQUIV note would be to an uninitialized
1051 pseudo. */
1053 if (MEM_P (dest) && REG_P (src)
1054 && (regno = REGNO (src)) >= FIRST_PSEUDO_REGISTER
1055 && REG_BASIC_BLOCK (regno) >= 0
1056 && REG_N_SETS (regno) == 1
1057 && reg_equiv[regno].init_insns != 0
1058 && reg_equiv[regno].init_insns != const0_rtx
1059 && ! find_reg_note (XEXP (reg_equiv[regno].init_insns, 0),
1060 REG_EQUIV, NULL_RTX)
1061 && ! contains_replace_regs (XEXP (dest, 0)))
1063 rtx init_insn = XEXP (reg_equiv[regno].init_insns, 0);
1064 if (validate_equiv_mem (init_insn, src, dest)
1065 && ! memref_used_between_p (dest, init_insn, insn)
1066 /* Attaching a REG_EQUIV note will fail if INIT_INSN has
1067 multiple sets. */
1068 && set_unique_reg_note (init_insn, REG_EQUIV, copy_rtx (dest)))
1070 /* This insn makes the equivalence, not the one initializing
1071 the register. */
1072 reg_equiv_init[regno]
1073 = gen_rtx_INSN_LIST (VOIDmode, insn, NULL_RTX);
1078 /* Now scan all regs killed in an insn to see if any of them are
1079 registers only used that once. If so, see if we can replace the
1080 reference with the equivalent form. If we can, delete the
1081 initializing reference and this register will go away. If we
1082 can't replace the reference, and the initializing reference is
1083 within the same loop (or in an inner loop), then move the register
1084 initialization just before the use, so that they are in the same
1085 basic block. */
1086 FOR_EACH_BB_REVERSE (bb)
1088 loop_depth = bb->loop_depth;
1089 for (insn = BB_END (bb);
1090 insn != PREV_INSN (BB_HEAD (bb));
1091 insn = PREV_INSN (insn))
1093 rtx link;
1095 if (! INSN_P (insn))
1096 continue;
1098 /* Don't substitute into a non-local goto, this confuses CFG. */
1099 if (JUMP_P (insn)
1100 && find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
1101 continue;
1103 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1105 if (REG_NOTE_KIND (link) == REG_DEAD
1106 /* Make sure this insn still refers to the register. */
1107 && reg_mentioned_p (XEXP (link, 0), PATTERN (insn)))
1109 int regno = REGNO (XEXP (link, 0));
1110 rtx equiv_insn;
1112 if (! reg_equiv[regno].replace
1113 || reg_equiv[regno].loop_depth < loop_depth)
1114 continue;
1116 /* reg_equiv[REGNO].replace gets set only when
1117 REG_N_REFS[REGNO] is 2, i.e. the register is set
1118 once and used once. (If it were only set, but not used,
1119 flow would have deleted the setting insns.) Hence
1120 there can only be one insn in reg_equiv[REGNO].init_insns. */
1121 gcc_assert (reg_equiv[regno].init_insns
1122 && !XEXP (reg_equiv[regno].init_insns, 1));
1123 equiv_insn = XEXP (reg_equiv[regno].init_insns, 0);
1125 /* We may not move instructions that can throw, since
1126 that changes basic block boundaries and we are not
1127 prepared to adjust the CFG to match. */
1128 if (can_throw_internal (equiv_insn))
1129 continue;
1131 if (asm_noperands (PATTERN (equiv_insn)) < 0
1132 && validate_replace_rtx (regno_reg_rtx[regno],
1133 *(reg_equiv[regno].src_p), insn))
1135 rtx equiv_link;
1136 rtx last_link;
1137 rtx note;
1139 /* Find the last note. */
1140 for (last_link = link; XEXP (last_link, 1);
1141 last_link = XEXP (last_link, 1))
1144 /* Append the REG_DEAD notes from equiv_insn. */
1145 equiv_link = REG_NOTES (equiv_insn);
1146 while (equiv_link)
1148 note = equiv_link;
1149 equiv_link = XEXP (equiv_link, 1);
1150 if (REG_NOTE_KIND (note) == REG_DEAD)
1152 remove_note (equiv_insn, note);
1153 XEXP (last_link, 1) = note;
1154 XEXP (note, 1) = NULL_RTX;
1155 last_link = note;
1159 remove_death (regno, insn);
1160 REG_N_REFS (regno) = 0;
1161 REG_FREQ (regno) = 0;
1162 delete_insn (equiv_insn);
1164 reg_equiv[regno].init_insns
1165 = XEXP (reg_equiv[regno].init_insns, 1);
1167 /* Remember to clear REGNO from all basic block's live
1168 info. */
1169 SET_REGNO_REG_SET (&cleared_regs, regno);
1170 clear_regnos++;
1171 reg_equiv_init[regno] = NULL_RTX;
1173 /* Move the initialization of the register to just before
1174 INSN. Update the flow information. */
1175 else if (PREV_INSN (insn) != equiv_insn)
1177 rtx new_insn;
1179 new_insn = emit_insn_before (PATTERN (equiv_insn), insn);
1180 REG_NOTES (new_insn) = REG_NOTES (equiv_insn);
1181 REG_NOTES (equiv_insn) = 0;
1183 /* Make sure this insn is recognized before
1184 reload begins, otherwise
1185 eliminate_regs_in_insn will die. */
1186 INSN_CODE (new_insn) = INSN_CODE (equiv_insn);
1188 delete_insn (equiv_insn);
1190 XEXP (reg_equiv[regno].init_insns, 0) = new_insn;
1192 REG_BASIC_BLOCK (regno) = bb->index;
1193 REG_N_CALLS_CROSSED (regno) = 0;
1194 REG_N_THROWING_CALLS_CROSSED (regno) = 0;
1195 REG_LIVE_LENGTH (regno) = 2;
1197 if (insn == BB_HEAD (bb))
1198 BB_HEAD (bb) = PREV_INSN (insn);
1200 /* Remember to clear REGNO from all basic block's live
1201 info. */
1202 SET_REGNO_REG_SET (&cleared_regs, regno);
1203 clear_regnos++;
1204 reg_equiv_init[regno]
1205 = gen_rtx_INSN_LIST (VOIDmode, new_insn, NULL_RTX);
1212 /* Clear all dead REGNOs from all basic block's live info. */
1213 if (clear_regnos)
1215 unsigned j;
1217 if (clear_regnos > 8)
1219 FOR_EACH_BB (bb)
1221 AND_COMPL_REG_SET (bb->il.rtl->global_live_at_start,
1222 &cleared_regs);
1223 AND_COMPL_REG_SET (bb->il.rtl->global_live_at_end,
1224 &cleared_regs);
1227 else
1229 reg_set_iterator rsi;
1230 EXECUTE_IF_SET_IN_REG_SET (&cleared_regs, 0, j, rsi)
1232 FOR_EACH_BB (bb)
1234 CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_start, j);
1235 CLEAR_REGNO_REG_SET (bb->il.rtl->global_live_at_end, j);
1241 out:
1242 /* Clean up. */
1243 end_alias_analysis ();
1244 CLEAR_REG_SET (&cleared_regs);
1245 free (reg_equiv);
1248 /* Mark REG as having no known equivalence.
1249 Some instructions might have been processed before and furnished
1250 with REG_EQUIV notes for this register; these notes will have to be
1251 removed.
1252 STORE is the piece of RTL that does the non-constant / conflicting
1253 assignment - a SET, CLOBBER or REG_INC note. It is currently not used,
1254 but needs to be there because this function is called from note_stores. */
1255 static void
1256 no_equiv (rtx reg, rtx store ATTRIBUTE_UNUSED, void *data ATTRIBUTE_UNUSED)
1258 int regno;
1259 rtx list;
1261 if (!REG_P (reg))
1262 return;
1263 regno = REGNO (reg);
1264 list = reg_equiv[regno].init_insns;
1265 if (list == const0_rtx)
1266 return;
1267 reg_equiv[regno].init_insns = const0_rtx;
1268 reg_equiv[regno].replacement = NULL_RTX;
1269 /* This doesn't matter for equivalences made for argument registers, we
1270 should keep their initialization insns. */
1271 if (reg_equiv[regno].is_arg_equivalence)
1272 return;
1273 reg_equiv_init[regno] = NULL_RTX;
1274 for (; list; list = XEXP (list, 1))
1276 rtx insn = XEXP (list, 0);
1277 remove_note (insn, find_reg_note (insn, REG_EQUIV, NULL_RTX));
1281 /* Allocate hard regs to the pseudo regs used only within block number B.
1282 Only the pseudos that die but once can be handled. */
1284 static void
1285 block_alloc (int b)
1287 int i, q;
1288 rtx insn;
1289 rtx note, hard_reg;
1290 int insn_number = 0;
1291 int insn_count = 0;
1292 int max_uid = get_max_uid ();
1293 int *qty_order;
1294 int no_conflict_combined_regno = -1;
1296 /* Count the instructions in the basic block. */
1298 insn = BB_END (BASIC_BLOCK (b));
1299 while (1)
1301 if (!NOTE_P (insn))
1303 ++insn_count;
1304 gcc_assert (insn_count <= max_uid);
1306 if (insn == BB_HEAD (BASIC_BLOCK (b)))
1307 break;
1308 insn = PREV_INSN (insn);
1311 /* +2 to leave room for a post_mark_life at the last insn and for
1312 the birth of a CLOBBER in the first insn. */
1313 regs_live_at = XCNEWVEC (HARD_REG_SET, 2 * insn_count + 2);
1315 /* Initialize table of hardware registers currently live. */
1317 REG_SET_TO_HARD_REG_SET (regs_live,
1318 BASIC_BLOCK (b)->il.rtl->global_live_at_start);
1320 /* This loop scans the instructions of the basic block
1321 and assigns quantities to registers.
1322 It computes which registers to tie. */
1324 insn = BB_HEAD (BASIC_BLOCK (b));
1325 while (1)
1327 if (!NOTE_P (insn))
1328 insn_number++;
1330 if (INSN_P (insn))
1332 rtx link, set;
1333 int win = 0;
1334 rtx r0, r1 = NULL_RTX;
1335 int combined_regno = -1;
1336 int i;
1338 this_insn_number = insn_number;
1339 this_insn = insn;
1341 extract_insn (insn);
1342 which_alternative = -1;
1344 /* Is this insn suitable for tying two registers?
1345 If so, try doing that.
1346 Suitable insns are those with at least two operands and where
1347 operand 0 is an output that is a register that is not
1348 earlyclobber.
1350 We can tie operand 0 with some operand that dies in this insn.
1351 First look for operands that are required to be in the same
1352 register as operand 0. If we find such, only try tying that
1353 operand or one that can be put into that operand if the
1354 operation is commutative. If we don't find an operand
1355 that is required to be in the same register as operand 0,
1356 we can tie with any operand.
1358 Subregs in place of regs are also ok.
1360 If tying is done, WIN is set nonzero. */
1362 if (optimize
1363 && recog_data.n_operands > 1
1364 && recog_data.constraints[0][0] == '='
1365 && recog_data.constraints[0][1] != '&')
1367 /* If non-negative, is an operand that must match operand 0. */
1368 int must_match_0 = -1;
1369 /* Counts number of alternatives that require a match with
1370 operand 0. */
1371 int n_matching_alts = 0;
1373 for (i = 1; i < recog_data.n_operands; i++)
1375 const char *p = recog_data.constraints[i];
1376 int this_match = requires_inout (p);
1378 n_matching_alts += this_match;
1379 if (this_match == recog_data.n_alternatives)
1380 must_match_0 = i;
1383 r0 = recog_data.operand[0];
1384 for (i = 1; i < recog_data.n_operands; i++)
1386 /* Skip this operand if we found an operand that
1387 must match operand 0 and this operand isn't it
1388 and can't be made to be it by commutativity. */
1390 if (must_match_0 >= 0 && i != must_match_0
1391 && ! (i == must_match_0 + 1
1392 && recog_data.constraints[i-1][0] == '%')
1393 && ! (i == must_match_0 - 1
1394 && recog_data.constraints[i][0] == '%'))
1395 continue;
1397 /* Likewise if each alternative has some operand that
1398 must match operand zero. In that case, skip any
1399 operand that doesn't list operand 0 since we know that
1400 the operand always conflicts with operand 0. We
1401 ignore commutativity in this case to keep things simple. */
1402 if (n_matching_alts == recog_data.n_alternatives
1403 && 0 == requires_inout (recog_data.constraints[i]))
1404 continue;
1406 r1 = recog_data.operand[i];
1408 /* If the operand is an address, find a register in it.
1409 There may be more than one register, but we only try one
1410 of them. */
1411 if (recog_data.constraints[i][0] == 'p'
1412 || EXTRA_ADDRESS_CONSTRAINT (recog_data.constraints[i][0],
1413 recog_data.constraints[i]))
1414 while (GET_CODE (r1) == PLUS || GET_CODE (r1) == MULT)
1415 r1 = XEXP (r1, 0);
1417 /* Avoid making a call-saved register unnecessarily
1418 clobbered. */
1419 hard_reg = get_hard_reg_initial_reg (cfun, r1);
1420 if (hard_reg != NULL_RTX)
1422 if (REG_P (hard_reg)
1423 && REGNO (hard_reg) < FIRST_PSEUDO_REGISTER
1424 && !call_used_regs[REGNO (hard_reg)])
1425 continue;
1428 if (REG_P (r0) || GET_CODE (r0) == SUBREG)
1430 /* We have two priorities for hard register preferences.
1431 If we have a move insn or an insn whose first input
1432 can only be in the same register as the output, give
1433 priority to an equivalence found from that insn. */
1434 int may_save_copy
1435 = (r1 == recog_data.operand[i] && must_match_0 >= 0);
1437 if (REG_P (r1) || GET_CODE (r1) == SUBREG)
1438 win = combine_regs (r1, r0, may_save_copy,
1439 insn_number, insn, 0);
1441 if (win)
1442 break;
1446 /* Recognize an insn sequence with an ultimate result
1447 which can safely overlap one of the inputs.
1448 The sequence begins with a CLOBBER of its result,
1449 and ends with an insn that copies the result to itself
1450 and has a REG_EQUAL note for an equivalent formula.
1451 That note indicates what the inputs are.
1452 The result and the input can overlap if each insn in
1453 the sequence either doesn't mention the input
1454 or has a REG_NO_CONFLICT note to inhibit the conflict.
1456 We do the combining test at the CLOBBER so that the
1457 destination register won't have had a quantity number
1458 assigned, since that would prevent combining. */
1460 if (optimize
1461 && GET_CODE (PATTERN (insn)) == CLOBBER
1462 && (r0 = XEXP (PATTERN (insn), 0),
1463 REG_P (r0))
1464 && (link = find_reg_note (insn, REG_LIBCALL, NULL_RTX)) != 0
1465 && XEXP (link, 0) != 0
1466 && NONJUMP_INSN_P (XEXP (link, 0))
1467 && (set = single_set (XEXP (link, 0))) != 0
1468 && SET_DEST (set) == r0 && SET_SRC (set) == r0
1469 && (note = find_reg_note (XEXP (link, 0), REG_EQUAL,
1470 NULL_RTX)) != 0)
1472 if (r1 = XEXP (note, 0), REG_P (r1)
1473 /* Check that we have such a sequence. */
1474 && no_conflict_p (insn, r0, r1))
1475 win = combine_regs (r1, r0, 1, insn_number, insn, 1);
1476 else if (GET_RTX_FORMAT (GET_CODE (XEXP (note, 0)))[0] == 'e'
1477 && (r1 = XEXP (XEXP (note, 0), 0),
1478 REG_P (r1) || GET_CODE (r1) == SUBREG)
1479 && no_conflict_p (insn, r0, r1))
1480 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1482 /* Here we care if the operation to be computed is
1483 commutative. */
1484 else if (COMMUTATIVE_P (XEXP (note, 0))
1485 && (r1 = XEXP (XEXP (note, 0), 1),
1486 (REG_P (r1) || GET_CODE (r1) == SUBREG))
1487 && no_conflict_p (insn, r0, r1))
1488 win = combine_regs (r1, r0, 0, insn_number, insn, 1);
1490 /* If we did combine something, show the register number
1491 in question so that we know to ignore its death. */
1492 if (win)
1493 no_conflict_combined_regno = REGNO (r1);
1496 /* If registers were just tied, set COMBINED_REGNO
1497 to the number of the register used in this insn
1498 that was tied to the register set in this insn.
1499 This register's qty should not be "killed". */
1501 if (win)
1503 while (GET_CODE (r1) == SUBREG)
1504 r1 = SUBREG_REG (r1);
1505 combined_regno = REGNO (r1);
1508 /* Mark the death of everything that dies in this instruction,
1509 except for anything that was just combined. */
1511 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1512 if (REG_NOTE_KIND (link) == REG_DEAD
1513 && REG_P (XEXP (link, 0))
1514 && combined_regno != (int) REGNO (XEXP (link, 0))
1515 && (no_conflict_combined_regno != (int) REGNO (XEXP (link, 0))
1516 || ! find_reg_note (insn, REG_NO_CONFLICT,
1517 XEXP (link, 0))))
1518 wipe_dead_reg (XEXP (link, 0), 0);
1520 /* Allocate qty numbers for all registers local to this block
1521 that are born (set) in this instruction.
1522 A pseudo that already has a qty is not changed. */
1524 note_stores (PATTERN (insn), reg_is_set, NULL);
1526 /* If anything is set in this insn and then unused, mark it as dying
1527 after this insn, so it will conflict with our outputs. This
1528 can't match with something that combined, and it doesn't matter
1529 if it did. Do this after the calls to reg_is_set since these
1530 die after, not during, the current insn. */
1532 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1533 if (REG_NOTE_KIND (link) == REG_UNUSED
1534 && REG_P (XEXP (link, 0)))
1535 wipe_dead_reg (XEXP (link, 0), 1);
1537 /* If this is an insn that has a REG_RETVAL note pointing at a
1538 CLOBBER insn, we have reached the end of a REG_NO_CONFLICT
1539 block, so clear any register number that combined within it. */
1540 if ((note = find_reg_note (insn, REG_RETVAL, NULL_RTX)) != 0
1541 && NONJUMP_INSN_P (XEXP (note, 0))
1542 && GET_CODE (PATTERN (XEXP (note, 0))) == CLOBBER)
1543 no_conflict_combined_regno = -1;
1546 /* Set the registers live after INSN_NUMBER. Note that we never
1547 record the registers live before the block's first insn, since no
1548 pseudos we care about are live before that insn. */
1550 IOR_HARD_REG_SET (regs_live_at[2 * insn_number], regs_live);
1551 IOR_HARD_REG_SET (regs_live_at[2 * insn_number + 1], regs_live);
1553 if (insn == BB_END (BASIC_BLOCK (b)))
1554 break;
1556 insn = NEXT_INSN (insn);
1559 /* Now every register that is local to this basic block
1560 should have been given a quantity, or else -1 meaning ignore it.
1561 Every quantity should have a known birth and death.
1563 Order the qtys so we assign them registers in order of the
1564 number of suggested registers they need so we allocate those with
1565 the most restrictive needs first. */
1567 qty_order = XNEWVEC (int, next_qty);
1568 for (i = 0; i < next_qty; i++)
1569 qty_order[i] = i;
1571 #define EXCHANGE(I1, I2) \
1572 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1574 switch (next_qty)
1576 case 3:
1577 /* Make qty_order[2] be the one to allocate last. */
1578 if (qty_sugg_compare (0, 1) > 0)
1579 EXCHANGE (0, 1);
1580 if (qty_sugg_compare (1, 2) > 0)
1581 EXCHANGE (2, 1);
1583 /* ... Fall through ... */
1584 case 2:
1585 /* Put the best one to allocate in qty_order[0]. */
1586 if (qty_sugg_compare (0, 1) > 0)
1587 EXCHANGE (0, 1);
1589 /* ... Fall through ... */
1591 case 1:
1592 case 0:
1593 /* Nothing to do here. */
1594 break;
1596 default:
1597 qsort (qty_order, next_qty, sizeof (int), qty_sugg_compare_1);
1600 /* Try to put each quantity in a suggested physical register, if it has one.
1601 This may cause registers to be allocated that otherwise wouldn't be, but
1602 this seems acceptable in local allocation (unlike global allocation). */
1603 for (i = 0; i < next_qty; i++)
1605 q = qty_order[i];
1606 if (qty_phys_num_sugg[q] != 0 || qty_phys_num_copy_sugg[q] != 0)
1607 qty[q].phys_reg = find_free_reg (qty[q].min_class, qty[q].mode, q,
1608 0, 1, qty[q].birth, qty[q].death);
1609 else
1610 qty[q].phys_reg = -1;
1613 /* Order the qtys so we assign them registers in order of
1614 decreasing length of life. Normally call qsort, but if we
1615 have only a very small number of quantities, sort them ourselves. */
1617 for (i = 0; i < next_qty; i++)
1618 qty_order[i] = i;
1620 #define EXCHANGE(I1, I2) \
1621 { i = qty_order[I1]; qty_order[I1] = qty_order[I2]; qty_order[I2] = i; }
1623 switch (next_qty)
1625 case 3:
1626 /* Make qty_order[2] be the one to allocate last. */
1627 if (qty_compare (0, 1) > 0)
1628 EXCHANGE (0, 1);
1629 if (qty_compare (1, 2) > 0)
1630 EXCHANGE (2, 1);
1632 /* ... Fall through ... */
1633 case 2:
1634 /* Put the best one to allocate in qty_order[0]. */
1635 if (qty_compare (0, 1) > 0)
1636 EXCHANGE (0, 1);
1638 /* ... Fall through ... */
1640 case 1:
1641 case 0:
1642 /* Nothing to do here. */
1643 break;
1645 default:
1646 qsort (qty_order, next_qty, sizeof (int), qty_compare_1);
1649 /* Now for each qty that is not a hardware register,
1650 look for a hardware register to put it in.
1651 First try the register class that is cheapest for this qty,
1652 if there is more than one class. */
1654 for (i = 0; i < next_qty; i++)
1656 q = qty_order[i];
1657 if (qty[q].phys_reg < 0)
1659 #ifdef INSN_SCHEDULING
1660 /* These values represent the adjusted lifetime of a qty so
1661 that it conflicts with qtys which appear near the start/end
1662 of this qty's lifetime.
1664 The purpose behind extending the lifetime of this qty is to
1665 discourage the register allocator from creating false
1666 dependencies.
1668 The adjustment value is chosen to indicate that this qty
1669 conflicts with all the qtys in the instructions immediately
1670 before and after the lifetime of this qty.
1672 Experiments have shown that higher values tend to hurt
1673 overall code performance.
1675 If allocation using the extended lifetime fails we will try
1676 again with the qty's unadjusted lifetime. */
1677 int fake_birth = MAX (0, qty[q].birth - 2 + qty[q].birth % 2);
1678 int fake_death = MIN (insn_number * 2 + 1,
1679 qty[q].death + 2 - qty[q].death % 2);
1680 #endif
1682 if (N_REG_CLASSES > 1)
1684 #ifdef INSN_SCHEDULING
1685 /* We try to avoid using hard registers allocated to qtys which
1686 are born immediately after this qty or die immediately before
1687 this qty.
1689 This optimization is only appropriate when we will run
1690 a scheduling pass after reload and we are not optimizing
1691 for code size. */
1692 if (flag_schedule_insns_after_reload
1693 && !optimize_size
1694 && !SMALL_REGISTER_CLASSES)
1696 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1697 qty[q].mode, q, 0, 0,
1698 fake_birth, fake_death);
1699 if (qty[q].phys_reg >= 0)
1700 continue;
1702 #endif
1703 qty[q].phys_reg = find_free_reg (qty[q].min_class,
1704 qty[q].mode, q, 0, 0,
1705 qty[q].birth, qty[q].death);
1706 if (qty[q].phys_reg >= 0)
1707 continue;
1710 #ifdef INSN_SCHEDULING
1711 /* Similarly, avoid false dependencies. */
1712 if (flag_schedule_insns_after_reload
1713 && !optimize_size
1714 && !SMALL_REGISTER_CLASSES
1715 && qty[q].alternate_class != NO_REGS)
1716 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1717 qty[q].mode, q, 0, 0,
1718 fake_birth, fake_death);
1719 #endif
1720 if (qty[q].alternate_class != NO_REGS)
1721 qty[q].phys_reg = find_free_reg (qty[q].alternate_class,
1722 qty[q].mode, q, 0, 0,
1723 qty[q].birth, qty[q].death);
1727 /* Now propagate the register assignments
1728 to the pseudo regs belonging to the qtys. */
1730 for (q = 0; q < next_qty; q++)
1731 if (qty[q].phys_reg >= 0)
1733 for (i = qty[q].first_reg; i >= 0; i = reg_next_in_qty[i])
1734 reg_renumber[i] = qty[q].phys_reg + reg_offset[i];
1737 /* Clean up. */
1738 free (regs_live_at);
1739 free (qty_order);
1742 /* Compare two quantities' priority for getting real registers.
1743 We give shorter-lived quantities higher priority.
1744 Quantities with more references are also preferred, as are quantities that
1745 require multiple registers. This is the identical prioritization as
1746 done by global-alloc.
1748 We used to give preference to registers with *longer* lives, but using
1749 the same algorithm in both local- and global-alloc can speed up execution
1750 of some programs by as much as a factor of three! */
1752 /* Note that the quotient will never be bigger than
1753 the value of floor_log2 times the maximum number of
1754 times a register can occur in one insn (surely less than 100)
1755 weighted by frequency (max REG_FREQ_MAX).
1756 Multiplying this by 10000/REG_FREQ_MAX can't overflow.
1757 QTY_CMP_PRI is also used by qty_sugg_compare. */
1759 #define QTY_CMP_PRI(q) \
1760 ((int) (((double) (floor_log2 (qty[q].n_refs) * qty[q].freq * qty[q].size) \
1761 / (qty[q].death - qty[q].birth)) * (10000 / REG_FREQ_MAX)))
1763 static int
1764 qty_compare (int q1, int q2)
1766 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1769 static int
1770 qty_compare_1 (const void *q1p, const void *q2p)
1772 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1773 int tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1775 if (tem != 0)
1776 return tem;
1778 /* If qtys are equally good, sort by qty number,
1779 so that the results of qsort leave nothing to chance. */
1780 return q1 - q2;
1783 /* Compare two quantities' priority for getting real registers. This version
1784 is called for quantities that have suggested hard registers. First priority
1785 goes to quantities that have copy preferences, then to those that have
1786 normal preferences. Within those groups, quantities with the lower
1787 number of preferences have the highest priority. Of those, we use the same
1788 algorithm as above. */
1790 #define QTY_CMP_SUGG(q) \
1791 (qty_phys_num_copy_sugg[q] \
1792 ? qty_phys_num_copy_sugg[q] \
1793 : qty_phys_num_sugg[q] * FIRST_PSEUDO_REGISTER)
1795 static int
1796 qty_sugg_compare (int q1, int q2)
1798 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1800 if (tem != 0)
1801 return tem;
1803 return QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1806 static int
1807 qty_sugg_compare_1 (const void *q1p, const void *q2p)
1809 int q1 = *(const int *) q1p, q2 = *(const int *) q2p;
1810 int tem = QTY_CMP_SUGG (q1) - QTY_CMP_SUGG (q2);
1812 if (tem != 0)
1813 return tem;
1815 tem = QTY_CMP_PRI (q2) - QTY_CMP_PRI (q1);
1816 if (tem != 0)
1817 return tem;
1819 /* If qtys are equally good, sort by qty number,
1820 so that the results of qsort leave nothing to chance. */
1821 return q1 - q2;
1824 #undef QTY_CMP_SUGG
1825 #undef QTY_CMP_PRI
1827 /* Attempt to combine the two registers (rtx's) USEDREG and SETREG.
1828 Returns 1 if have done so, or 0 if cannot.
1830 Combining registers means marking them as having the same quantity
1831 and adjusting the offsets within the quantity if either of
1832 them is a SUBREG.
1834 We don't actually combine a hard reg with a pseudo; instead
1835 we just record the hard reg as the suggestion for the pseudo's quantity.
1836 If we really combined them, we could lose if the pseudo lives
1837 across an insn that clobbers the hard reg (eg, movmem).
1839 ALREADY_DEAD is nonzero if USEDREG is known to be dead even though
1840 there is no REG_DEAD note on INSN. This occurs during the processing
1841 of REG_NO_CONFLICT blocks.
1843 MAY_SAVE_COPY is nonzero if this insn is simply copying USEDREG to
1844 SETREG or if the input and output must share a register.
1845 In that case, we record a hard reg suggestion in QTY_PHYS_COPY_SUGG.
1847 There are elaborate checks for the validity of combining. */
1849 static int
1850 combine_regs (rtx usedreg, rtx setreg, int may_save_copy, int insn_number,
1851 rtx insn, int already_dead)
1853 int ureg, sreg;
1854 int offset = 0;
1855 int usize, ssize;
1856 int sqty;
1858 /* Determine the numbers and sizes of registers being used. If a subreg
1859 is present that does not change the entire register, don't consider
1860 this a copy insn. */
1862 while (GET_CODE (usedreg) == SUBREG)
1864 rtx subreg = SUBREG_REG (usedreg);
1866 if (REG_P (subreg))
1868 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1869 may_save_copy = 0;
1871 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1872 offset += subreg_regno_offset (REGNO (subreg),
1873 GET_MODE (subreg),
1874 SUBREG_BYTE (usedreg),
1875 GET_MODE (usedreg));
1876 else
1877 offset += (SUBREG_BYTE (usedreg)
1878 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1881 usedreg = subreg;
1884 if (!REG_P (usedreg))
1885 return 0;
1887 ureg = REGNO (usedreg);
1888 if (ureg < FIRST_PSEUDO_REGISTER)
1889 usize = hard_regno_nregs[ureg][GET_MODE (usedreg)];
1890 else
1891 usize = ((GET_MODE_SIZE (GET_MODE (usedreg))
1892 + (REGMODE_NATURAL_SIZE (GET_MODE (usedreg)) - 1))
1893 / REGMODE_NATURAL_SIZE (GET_MODE (usedreg)));
1895 while (GET_CODE (setreg) == SUBREG)
1897 rtx subreg = SUBREG_REG (setreg);
1899 if (REG_P (subreg))
1901 if (GET_MODE_SIZE (GET_MODE (subreg)) > UNITS_PER_WORD)
1902 may_save_copy = 0;
1904 if (REGNO (subreg) < FIRST_PSEUDO_REGISTER)
1905 offset -= subreg_regno_offset (REGNO (subreg),
1906 GET_MODE (subreg),
1907 SUBREG_BYTE (setreg),
1908 GET_MODE (setreg));
1909 else
1910 offset -= (SUBREG_BYTE (setreg)
1911 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1914 setreg = subreg;
1917 if (!REG_P (setreg))
1918 return 0;
1920 sreg = REGNO (setreg);
1921 if (sreg < FIRST_PSEUDO_REGISTER)
1922 ssize = hard_regno_nregs[sreg][GET_MODE (setreg)];
1923 else
1924 ssize = ((GET_MODE_SIZE (GET_MODE (setreg))
1925 + (REGMODE_NATURAL_SIZE (GET_MODE (setreg)) - 1))
1926 / REGMODE_NATURAL_SIZE (GET_MODE (setreg)));
1928 /* If UREG is a pseudo-register that hasn't already been assigned a
1929 quantity number, it means that it is not local to this block or dies
1930 more than once. In either event, we can't do anything with it. */
1931 if ((ureg >= FIRST_PSEUDO_REGISTER && reg_qty[ureg] < 0)
1932 /* Do not combine registers unless one fits within the other. */
1933 || (offset > 0 && usize + offset > ssize)
1934 || (offset < 0 && usize + offset < ssize)
1935 /* Do not combine with a smaller already-assigned object
1936 if that smaller object is already combined with something bigger. */
1937 || (ssize > usize && ureg >= FIRST_PSEUDO_REGISTER
1938 && usize < qty[reg_qty[ureg]].size)
1939 /* Can't combine if SREG is not a register we can allocate. */
1940 || (sreg >= FIRST_PSEUDO_REGISTER && reg_qty[sreg] == -1)
1941 /* Don't combine with a pseudo mentioned in a REG_NO_CONFLICT note.
1942 These have already been taken care of. This probably wouldn't
1943 combine anyway, but don't take any chances. */
1944 || (ureg >= FIRST_PSEUDO_REGISTER
1945 && find_reg_note (insn, REG_NO_CONFLICT, usedreg))
1946 /* Don't tie something to itself. In most cases it would make no
1947 difference, but it would screw up if the reg being tied to itself
1948 also dies in this insn. */
1949 || ureg == sreg
1950 /* Don't try to connect two different hardware registers. */
1951 || (ureg < FIRST_PSEUDO_REGISTER && sreg < FIRST_PSEUDO_REGISTER)
1952 /* Don't connect two different machine modes if they have different
1953 implications as to which registers may be used. */
1954 || !MODES_TIEABLE_P (GET_MODE (usedreg), GET_MODE (setreg)))
1955 return 0;
1957 /* Now, if UREG is a hard reg and SREG is a pseudo, record the hard reg in
1958 qty_phys_sugg for the pseudo instead of tying them.
1960 Return "failure" so that the lifespan of UREG is terminated here;
1961 that way the two lifespans will be disjoint and nothing will prevent
1962 the pseudo reg from being given this hard reg. */
1964 if (ureg < FIRST_PSEUDO_REGISTER)
1966 /* Allocate a quantity number so we have a place to put our
1967 suggestions. */
1968 if (reg_qty[sreg] == -2)
1969 reg_is_born (setreg, 2 * insn_number);
1971 if (reg_qty[sreg] >= 0)
1973 if (may_save_copy
1974 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg))
1976 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[sreg]], ureg);
1977 qty_phys_num_copy_sugg[reg_qty[sreg]]++;
1979 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg))
1981 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[sreg]], ureg);
1982 qty_phys_num_sugg[reg_qty[sreg]]++;
1985 return 0;
1988 /* Similarly for SREG a hard register and UREG a pseudo register. */
1990 if (sreg < FIRST_PSEUDO_REGISTER)
1992 if (may_save_copy
1993 && ! TEST_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg))
1995 SET_HARD_REG_BIT (qty_phys_copy_sugg[reg_qty[ureg]], sreg);
1996 qty_phys_num_copy_sugg[reg_qty[ureg]]++;
1998 else if (! TEST_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg))
2000 SET_HARD_REG_BIT (qty_phys_sugg[reg_qty[ureg]], sreg);
2001 qty_phys_num_sugg[reg_qty[ureg]]++;
2003 return 0;
2006 /* At this point we know that SREG and UREG are both pseudos.
2007 Do nothing if SREG already has a quantity or is a register that we
2008 don't allocate. */
2009 if (reg_qty[sreg] >= -1
2010 /* If we are not going to let any regs live across calls,
2011 don't tie a call-crossing reg to a non-call-crossing reg. */
2012 || (current_function_has_nonlocal_label
2013 && ((REG_N_CALLS_CROSSED (ureg) > 0)
2014 != (REG_N_CALLS_CROSSED (sreg) > 0))))
2015 return 0;
2017 /* We don't already know about SREG, so tie it to UREG
2018 if this is the last use of UREG, provided the classes they want
2019 are compatible. */
2021 if ((already_dead || find_regno_note (insn, REG_DEAD, ureg))
2022 && reg_meets_class_p (sreg, qty[reg_qty[ureg]].min_class))
2024 /* Add SREG to UREG's quantity. */
2025 sqty = reg_qty[ureg];
2026 reg_qty[sreg] = sqty;
2027 reg_offset[sreg] = reg_offset[ureg] + offset;
2028 reg_next_in_qty[sreg] = qty[sqty].first_reg;
2029 qty[sqty].first_reg = sreg;
2031 /* If SREG's reg class is smaller, set qty[SQTY].min_class. */
2032 update_qty_class (sqty, sreg);
2034 /* Update info about quantity SQTY. */
2035 qty[sqty].n_calls_crossed += REG_N_CALLS_CROSSED (sreg);
2036 qty[sqty].n_throwing_calls_crossed
2037 += REG_N_THROWING_CALLS_CROSSED (sreg);
2038 qty[sqty].n_refs += REG_N_REFS (sreg);
2039 qty[sqty].freq += REG_FREQ (sreg);
2040 if (usize < ssize)
2042 int i;
2044 for (i = qty[sqty].first_reg; i >= 0; i = reg_next_in_qty[i])
2045 reg_offset[i] -= offset;
2047 qty[sqty].size = ssize;
2048 qty[sqty].mode = GET_MODE (setreg);
2051 else
2052 return 0;
2054 return 1;
2057 /* Return 1 if the preferred class of REG allows it to be tied
2058 to a quantity or register whose class is CLASS.
2059 True if REG's reg class either contains or is contained in CLASS. */
2061 static int
2062 reg_meets_class_p (int reg, enum reg_class class)
2064 enum reg_class rclass = reg_preferred_class (reg);
2065 return (reg_class_subset_p (rclass, class)
2066 || reg_class_subset_p (class, rclass));
2069 /* Update the class of QTYNO assuming that REG is being tied to it. */
2071 static void
2072 update_qty_class (int qtyno, int reg)
2074 enum reg_class rclass = reg_preferred_class (reg);
2075 if (reg_class_subset_p (rclass, qty[qtyno].min_class))
2076 qty[qtyno].min_class = rclass;
2078 rclass = reg_alternate_class (reg);
2079 if (reg_class_subset_p (rclass, qty[qtyno].alternate_class))
2080 qty[qtyno].alternate_class = rclass;
2083 /* Handle something which alters the value of an rtx REG.
2085 REG is whatever is set or clobbered. SETTER is the rtx that
2086 is modifying the register.
2088 If it is not really a register, we do nothing.
2089 The file-global variables `this_insn' and `this_insn_number'
2090 carry info from `block_alloc'. */
2092 static void
2093 reg_is_set (rtx reg, rtx setter, void *data ATTRIBUTE_UNUSED)
2095 /* Note that note_stores will only pass us a SUBREG if it is a SUBREG of
2096 a hard register. These may actually not exist any more. */
2098 if (GET_CODE (reg) != SUBREG
2099 && !REG_P (reg))
2100 return;
2102 /* Mark this register as being born. If it is used in a CLOBBER, mark
2103 it as being born halfway between the previous insn and this insn so that
2104 it conflicts with our inputs but not the outputs of the previous insn. */
2106 reg_is_born (reg, 2 * this_insn_number - (GET_CODE (setter) == CLOBBER));
2109 /* Handle beginning of the life of register REG.
2110 BIRTH is the index at which this is happening. */
2112 static void
2113 reg_is_born (rtx reg, int birth)
2115 int regno;
2117 if (GET_CODE (reg) == SUBREG)
2119 regno = REGNO (SUBREG_REG (reg));
2120 if (regno < FIRST_PSEUDO_REGISTER)
2121 regno = subreg_regno (reg);
2123 else
2124 regno = REGNO (reg);
2126 if (regno < FIRST_PSEUDO_REGISTER)
2128 mark_life (regno, GET_MODE (reg), 1);
2130 /* If the register was to have been born earlier that the present
2131 insn, mark it as live where it is actually born. */
2132 if (birth < 2 * this_insn_number)
2133 post_mark_life (regno, GET_MODE (reg), 1, birth, 2 * this_insn_number);
2135 else
2137 if (reg_qty[regno] == -2)
2138 alloc_qty (regno, GET_MODE (reg), PSEUDO_REGNO_SIZE (regno), birth);
2140 /* If this register has a quantity number, show that it isn't dead. */
2141 if (reg_qty[regno] >= 0)
2142 qty[reg_qty[regno]].death = -1;
2146 /* Record the death of REG in the current insn. If OUTPUT_P is nonzero,
2147 REG is an output that is dying (i.e., it is never used), otherwise it
2148 is an input (the normal case).
2149 If OUTPUT_P is 1, then we extend the life past the end of this insn. */
2151 static void
2152 wipe_dead_reg (rtx reg, int output_p)
2154 int regno = REGNO (reg);
2156 /* If this insn has multiple results,
2157 and the dead reg is used in one of the results,
2158 extend its life to after this insn,
2159 so it won't get allocated together with any other result of this insn.
2161 It is unsafe to use !single_set here since it will ignore an unused
2162 output. Just because an output is unused does not mean the compiler
2163 can assume the side effect will not occur. Consider if REG appears
2164 in the address of an output and we reload the output. If we allocate
2165 REG to the same hard register as an unused output we could set the hard
2166 register before the output reload insn. */
2167 if (GET_CODE (PATTERN (this_insn)) == PARALLEL
2168 && multiple_sets (this_insn))
2170 int i;
2171 for (i = XVECLEN (PATTERN (this_insn), 0) - 1; i >= 0; i--)
2173 rtx set = XVECEXP (PATTERN (this_insn), 0, i);
2174 if (GET_CODE (set) == SET
2175 && !REG_P (SET_DEST (set))
2176 && !rtx_equal_p (reg, SET_DEST (set))
2177 && reg_overlap_mentioned_p (reg, SET_DEST (set)))
2178 output_p = 1;
2182 /* If this register is used in an auto-increment address, then extend its
2183 life to after this insn, so that it won't get allocated together with
2184 the result of this insn. */
2185 if (! output_p && find_regno_note (this_insn, REG_INC, regno))
2186 output_p = 1;
2188 if (regno < FIRST_PSEUDO_REGISTER)
2190 mark_life (regno, GET_MODE (reg), 0);
2192 /* If a hard register is dying as an output, mark it as in use at
2193 the beginning of this insn (the above statement would cause this
2194 not to happen). */
2195 if (output_p)
2196 post_mark_life (regno, GET_MODE (reg), 1,
2197 2 * this_insn_number, 2 * this_insn_number + 1);
2200 else if (reg_qty[regno] >= 0)
2201 qty[reg_qty[regno]].death = 2 * this_insn_number + output_p;
2204 /* Find a block of SIZE words of hard regs in reg_class CLASS
2205 that can hold something of machine-mode MODE
2206 (but actually we test only the first of the block for holding MODE)
2207 and still free between insn BORN_INDEX and insn DEAD_INDEX,
2208 and return the number of the first of them.
2209 Return -1 if such a block cannot be found.
2210 If QTYNO crosses calls, insist on a register preserved by calls,
2211 unless ACCEPT_CALL_CLOBBERED is nonzero.
2213 If JUST_TRY_SUGGESTED is nonzero, only try to see if the suggested
2214 register is available. If not, return -1. */
2216 static int
2217 find_free_reg (enum reg_class class, enum machine_mode mode, int qtyno,
2218 int accept_call_clobbered, int just_try_suggested,
2219 int born_index, int dead_index)
2221 int i, ins;
2222 HARD_REG_SET first_used, used;
2223 #ifdef ELIMINABLE_REGS
2224 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
2225 #endif
2227 /* Validate our parameters. */
2228 gcc_assert (born_index >= 0 && born_index <= dead_index);
2230 /* Don't let a pseudo live in a reg across a function call
2231 if we might get a nonlocal goto. */
2232 if (current_function_has_nonlocal_label
2233 && qty[qtyno].n_calls_crossed > 0)
2234 return -1;
2236 if (accept_call_clobbered)
2237 COPY_HARD_REG_SET (used, call_fixed_reg_set);
2238 else if (qty[qtyno].n_calls_crossed == 0)
2239 COPY_HARD_REG_SET (used, fixed_reg_set);
2240 else
2241 COPY_HARD_REG_SET (used, call_used_reg_set);
2243 if (accept_call_clobbered)
2244 IOR_HARD_REG_SET (used, losing_caller_save_reg_set);
2246 for (ins = born_index; ins < dead_index; ins++)
2247 IOR_HARD_REG_SET (used, regs_live_at[ins]);
2249 IOR_COMPL_HARD_REG_SET (used, reg_class_contents[(int) class]);
2251 /* Don't use the frame pointer reg in local-alloc even if
2252 we may omit the frame pointer, because if we do that and then we
2253 need a frame pointer, reload won't know how to move the pseudo
2254 to another hard reg. It can move only regs made by global-alloc.
2256 This is true of any register that can be eliminated. */
2257 #ifdef ELIMINABLE_REGS
2258 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
2259 SET_HARD_REG_BIT (used, eliminables[i].from);
2260 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
2261 /* If FRAME_POINTER_REGNUM is not a real register, then protect the one
2262 that it might be eliminated into. */
2263 SET_HARD_REG_BIT (used, HARD_FRAME_POINTER_REGNUM);
2264 #endif
2265 #else
2266 SET_HARD_REG_BIT (used, FRAME_POINTER_REGNUM);
2267 #endif
2269 #ifdef CANNOT_CHANGE_MODE_CLASS
2270 cannot_change_mode_set_regs (&used, mode, qty[qtyno].first_reg);
2271 #endif
2273 /* Normally, the registers that can be used for the first register in
2274 a multi-register quantity are the same as those that can be used for
2275 subsequent registers. However, if just trying suggested registers,
2276 restrict our consideration to them. If there are copy-suggested
2277 register, try them. Otherwise, try the arithmetic-suggested
2278 registers. */
2279 COPY_HARD_REG_SET (first_used, used);
2281 if (just_try_suggested)
2283 if (qty_phys_num_copy_sugg[qtyno] != 0)
2284 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_copy_sugg[qtyno]);
2285 else
2286 IOR_COMPL_HARD_REG_SET (first_used, qty_phys_sugg[qtyno]);
2289 /* If at least one would be suitable, test each hard reg. */
2290 if (!hard_reg_set_subset_p (reg_class_contents[(int) ALL_REGS], first_used))
2291 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2293 #ifdef REG_ALLOC_ORDER
2294 int regno = reg_alloc_order[i];
2295 #else
2296 int regno = i;
2297 #endif
2298 if (!TEST_HARD_REG_BIT (first_used, regno)
2299 && HARD_REGNO_MODE_OK (regno, mode)
2300 && (qty[qtyno].n_calls_crossed == 0
2301 || accept_call_clobbered
2302 || !HARD_REGNO_CALL_PART_CLOBBERED (regno, mode)))
2304 int j;
2305 int size1 = hard_regno_nregs[regno][mode];
2306 j = 1;
2307 while (j < size1 && !TEST_HARD_REG_BIT (used, regno + j))
2308 j++;
2309 if (j == size1)
2311 /* Mark that this register is in use between its birth
2312 and death insns. */
2313 post_mark_life (regno, mode, 1, born_index, dead_index);
2314 return regno;
2316 #ifndef REG_ALLOC_ORDER
2317 /* Skip starting points we know will lose. */
2318 i += j;
2319 #endif
2323 /* If we are just trying suggested register, we have just tried copy-
2324 suggested registers, and there are arithmetic-suggested registers,
2325 try them. */
2327 /* If it would be profitable to allocate a call-clobbered register
2328 and save and restore it around calls, do that. */
2329 if (just_try_suggested && qty_phys_num_copy_sugg[qtyno] != 0
2330 && qty_phys_num_sugg[qtyno] != 0)
2332 /* Don't try the copy-suggested regs again. */
2333 qty_phys_num_copy_sugg[qtyno] = 0;
2334 return find_free_reg (class, mode, qtyno, accept_call_clobbered, 1,
2335 born_index, dead_index);
2338 /* We need not check to see if the current function has nonlocal
2339 labels because we don't put any pseudos that are live over calls in
2340 registers in that case. Avoid putting pseudos crossing calls that
2341 might throw into call used registers. */
2343 if (! accept_call_clobbered
2344 && flag_caller_saves
2345 && ! just_try_suggested
2346 && qty[qtyno].n_calls_crossed != 0
2347 && qty[qtyno].n_throwing_calls_crossed == 0
2348 && CALLER_SAVE_PROFITABLE (qty[qtyno].n_refs,
2349 qty[qtyno].n_calls_crossed))
2351 i = find_free_reg (class, mode, qtyno, 1, 0, born_index, dead_index);
2352 if (i >= 0)
2353 caller_save_needed = 1;
2354 return i;
2356 return -1;
2359 /* Mark that REGNO with machine-mode MODE is live starting from the current
2360 insn (if LIFE is nonzero) or dead starting at the current insn (if LIFE
2361 is zero). */
2363 static void
2364 mark_life (int regno, enum machine_mode mode, int life)
2366 if (life)
2367 add_to_hard_reg_set (&regs_live, mode, regno);
2368 else
2369 remove_from_hard_reg_set (&regs_live, mode, regno);
2372 /* Mark register number REGNO (with machine-mode MODE) as live (if LIFE
2373 is nonzero) or dead (if LIFE is zero) from insn number BIRTH (inclusive)
2374 to insn number DEATH (exclusive). */
2376 static void
2377 post_mark_life (int regno, enum machine_mode mode, int life, int birth,
2378 int death)
2380 HARD_REG_SET this_reg;
2382 CLEAR_HARD_REG_SET (this_reg);
2383 add_to_hard_reg_set (&this_reg, mode, regno);
2385 if (life)
2386 while (birth < death)
2388 IOR_HARD_REG_SET (regs_live_at[birth], this_reg);
2389 birth++;
2391 else
2392 while (birth < death)
2394 AND_COMPL_HARD_REG_SET (regs_live_at[birth], this_reg);
2395 birth++;
2399 /* INSN is the CLOBBER insn that starts a REG_NO_NOCONFLICT block, R0
2400 is the register being clobbered, and R1 is a register being used in
2401 the equivalent expression.
2403 If R1 dies in the block and has a REG_NO_CONFLICT note on every insn
2404 in which it is used, return 1.
2406 Otherwise, return 0. */
2408 static int
2409 no_conflict_p (rtx insn, rtx r0 ATTRIBUTE_UNUSED, rtx r1)
2411 int ok = 0;
2412 rtx note = find_reg_note (insn, REG_LIBCALL, NULL_RTX);
2413 rtx p, last;
2415 /* If R1 is a hard register, return 0 since we handle this case
2416 when we scan the insns that actually use it. */
2418 if (note == 0
2419 || (REG_P (r1) && REGNO (r1) < FIRST_PSEUDO_REGISTER)
2420 || (GET_CODE (r1) == SUBREG && REG_P (SUBREG_REG (r1))
2421 && REGNO (SUBREG_REG (r1)) < FIRST_PSEUDO_REGISTER))
2422 return 0;
2424 last = XEXP (note, 0);
2426 for (p = NEXT_INSN (insn); p && p != last; p = NEXT_INSN (p))
2427 if (INSN_P (p))
2429 if (find_reg_note (p, REG_DEAD, r1))
2430 ok = 1;
2432 /* There must be a REG_NO_CONFLICT note on every insn, otherwise
2433 some earlier optimization pass has inserted instructions into
2434 the sequence, and it is not safe to perform this optimization.
2435 Note that emit_no_conflict_block always ensures that this is
2436 true when these sequences are created. */
2437 if (! find_reg_note (p, REG_NO_CONFLICT, r1))
2438 return 0;
2441 return ok;
2444 /* Return the number of alternatives for which the constraint string P
2445 indicates that the operand must be equal to operand 0 and that no register
2446 is acceptable. */
2448 static int
2449 requires_inout (const char *p)
2451 char c;
2452 int found_zero = 0;
2453 int reg_allowed = 0;
2454 int num_matching_alts = 0;
2455 int len;
2457 for ( ; (c = *p); p += len)
2459 len = CONSTRAINT_LEN (c, p);
2460 switch (c)
2462 case '=': case '+': case '?':
2463 case '#': case '&': case '!':
2464 case '*': case '%':
2465 case 'm': case '<': case '>': case 'V': case 'o':
2466 case 'E': case 'F': case 'G': case 'H':
2467 case 's': case 'i': case 'n':
2468 case 'I': case 'J': case 'K': case 'L':
2469 case 'M': case 'N': case 'O': case 'P':
2470 case 'X':
2471 /* These don't say anything we care about. */
2472 break;
2474 case ',':
2475 if (found_zero && ! reg_allowed)
2476 num_matching_alts++;
2478 found_zero = reg_allowed = 0;
2479 break;
2481 case '0':
2482 found_zero = 1;
2483 break;
2485 case '1': case '2': case '3': case '4': case '5':
2486 case '6': case '7': case '8': case '9':
2487 /* Skip the balance of the matching constraint. */
2489 p++;
2490 while (ISDIGIT (*p));
2491 len = 0;
2492 break;
2494 default:
2495 if (REG_CLASS_FROM_CONSTRAINT (c, p) == NO_REGS
2496 && !EXTRA_ADDRESS_CONSTRAINT (c, p))
2497 break;
2498 /* Fall through. */
2499 case 'p':
2500 case 'g': case 'r':
2501 reg_allowed = 1;
2502 break;
2506 if (found_zero && ! reg_allowed)
2507 num_matching_alts++;
2509 return num_matching_alts;
2512 void
2513 dump_local_alloc (FILE *file)
2515 int i;
2516 for (i = FIRST_PSEUDO_REGISTER; i < max_regno; i++)
2517 if (reg_renumber[i] != -1)
2518 fprintf (file, ";; Register %d in %d.\n", i, reg_renumber[i]);
2521 /* Run old register allocator. Return TRUE if we must exit
2522 rest_of_compilation upon return. */
2523 static unsigned int
2524 rest_of_handle_local_alloc (void)
2526 int rebuild_notes;
2528 /* Determine if the current function is a leaf before running reload
2529 since this can impact optimizations done by the prologue and
2530 epilogue thus changing register elimination offsets. */
2531 current_function_is_leaf = leaf_function_p ();
2533 /* Allocate the reg_renumber array. */
2534 allocate_reg_info (max_regno, FALSE, TRUE);
2536 /* And the reg_equiv_memory_loc array. */
2537 VEC_safe_grow (rtx, gc, reg_equiv_memory_loc_vec, max_regno);
2538 memset (VEC_address (rtx, reg_equiv_memory_loc_vec), 0,
2539 sizeof (rtx) * max_regno);
2540 reg_equiv_memory_loc = VEC_address (rtx, reg_equiv_memory_loc_vec);
2542 allocate_initial_values (reg_equiv_memory_loc);
2544 regclass (get_insns (), max_reg_num ());
2545 rebuild_notes = local_alloc ();
2547 /* Local allocation may have turned an indirect jump into a direct
2548 jump. If so, we must rebuild the JUMP_LABEL fields of jumping
2549 instructions. */
2550 if (rebuild_notes)
2552 timevar_push (TV_JUMP);
2554 rebuild_jump_labels (get_insns ());
2555 purge_all_dead_edges ();
2556 delete_unreachable_blocks ();
2558 timevar_pop (TV_JUMP);
2561 if (dump_file && (dump_flags & TDF_DETAILS))
2563 timevar_push (TV_DUMP);
2564 dump_flow_info (dump_file, dump_flags);
2565 dump_local_alloc (dump_file);
2566 timevar_pop (TV_DUMP);
2568 return 0;
2571 struct tree_opt_pass pass_local_alloc =
2573 "lreg", /* name */
2574 NULL, /* gate */
2575 rest_of_handle_local_alloc, /* execute */
2576 NULL, /* sub */
2577 NULL, /* next */
2578 0, /* static_pass_number */
2579 TV_LOCAL_ALLOC, /* tv_id */
2580 0, /* properties_required */
2581 0, /* properties_provided */
2582 0, /* properties_destroyed */
2583 0, /* todo_flags_start */
2584 TODO_dump_func |
2585 TODO_ggc_collect, /* todo_flags_finish */
2586 'l' /* letter */