1 /* LRA (local register allocator) driver and LRA utilities.
2 Copyright (C) 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 /* The Local Register Allocator (LRA) is a replacement of former
24 reload pass. It is focused to simplify code solving the reload
25 pass tasks, to make the code maintenance easier, and to implement new
26 perspective optimizations.
28 The major LRA design solutions are:
29 o division small manageable, separated sub-tasks
30 o reflection of all transformations and decisions in RTL as more
32 o insn constraints as a primary source of the info (minimizing
33 number of target-depended macros/hooks)
35 In brief LRA works by iterative insn process with the final goal is
36 to satisfy all insn and address constraints:
37 o New reload insns (in brief reloads) and reload pseudos might be
39 o Some pseudos might be spilled to assign hard registers to
41 o Changing spilled pseudos to stack memory or their equivalences;
42 o Allocation stack memory changes the address displacement and
43 new iteration is needed.
45 Here is block diagram of LRA passes:
48 | Undo inheritance | --------------- ---------------
49 | for spilled pseudos)| | Memory-memory | | New (and old) |
50 | and splits (for |<----| move coalesce |<-----| pseudos |
51 | pseudos got the | --------------- | assignment |
52 Start | same hard regs) | ---------------
53 | --------------------- ^
54 V | ---------------- |
55 ----------- V | Update virtual | |
56 | Remove |----> ------------>| register | |
57 | scratches | ^ | displacements | |
58 ----------- | ---------------- |
61 ---------------- No ------------ pseudos -------------------
62 | Spilled pseudo | change |Constraints:| or insns | Inheritance/split |
63 | to memory |<-------| RTL |--------->| transformations |
64 | substitution | | transfor- | | in EBB scope |
65 ---------------- | mations | -------------------
68 -------------------------
69 | Hard regs substitution, |
70 | devirtalization, and |------> Finish
71 | restoring scratches got |
73 -------------------------
75 To speed up the process:
76 o We process only insns affected by changes on previous
78 o We don't use DFA-infrastructure because it results in much slower
79 compiler speed than a special IR described below does;
80 o We use a special insn representation for quick access to insn
81 info which is always *synchronized* with the current RTL;
82 o Insn IR is minimized by memory. It is divided on three parts:
83 o one specific for each insn in RTL (only operand locations);
84 o one common for all insns in RTL with the same insn code
85 (different operand attributes from machine descriptions);
86 o one oriented for maintenance of live info (list of pseudos).
88 o all insns where the pseudo is referenced;
89 o live info (conflicting hard regs, live ranges, # of
91 o data used for assigning (preferred hard regs, costs etc).
93 This file contains LRA driver, LRA utility functions and data, and
94 code for dealing with scratches. */
98 #include "coretypes.h"
103 #include "insn-config.h"
104 #include "insn-codes.h"
107 #include "addresses.h"
108 #include "hard-reg-set.h"
110 #include "function.h"
112 #include "basic-block.h"
114 #include "tree-pass.h"
122 /* Hard registers currently not available for allocation. It can
123 changed after some hard registers become not eliminable. */
124 HARD_REG_SET lra_no_alloc_regs
;
126 static int get_new_reg_value (void);
127 static void expand_reg_info (void);
128 static void invalidate_insn_recog_data (int);
129 static int get_insn_freq (rtx
);
130 static void invalidate_insn_data_regno_info (lra_insn_recog_data_t
, rtx
, int);
132 /* Expand all regno related info needed for LRA. */
134 expand_reg_data (void)
138 ira_expand_reg_equiv ();
141 /* Create and return a new reg of ORIGINAL mode. If ORIGINAL is NULL
142 or of VOIDmode, use MD_MODE for the new reg. Initialize its
143 register class to RCLASS. Print message about assigning class
144 RCLASS containing new register name TITLE unless it is NULL. Use
145 attributes of ORIGINAL if it is a register. The created register
146 will have unique held value. */
148 lra_create_new_reg_with_unique_value (enum machine_mode md_mode
, rtx original
,
149 enum reg_class rclass
, const char *title
)
151 enum machine_mode mode
;
154 if (original
== NULL_RTX
|| (mode
= GET_MODE (original
)) == VOIDmode
)
156 lra_assert (mode
!= VOIDmode
);
157 new_reg
= gen_reg_rtx (mode
);
158 if (original
== NULL_RTX
|| ! REG_P (original
))
160 if (lra_dump_file
!= NULL
)
161 fprintf (lra_dump_file
, " Creating newreg=%i", REGNO (new_reg
));
165 if (ORIGINAL_REGNO (original
) >= FIRST_PSEUDO_REGISTER
)
166 ORIGINAL_REGNO (new_reg
) = ORIGINAL_REGNO (original
);
167 REG_USERVAR_P (new_reg
) = REG_USERVAR_P (original
);
168 REG_POINTER (new_reg
) = REG_POINTER (original
);
169 REG_ATTRS (new_reg
) = REG_ATTRS (original
);
170 if (lra_dump_file
!= NULL
)
171 fprintf (lra_dump_file
, " Creating newreg=%i from oldreg=%i",
172 REGNO (new_reg
), REGNO (original
));
174 if (lra_dump_file
!= NULL
)
177 fprintf (lra_dump_file
, ", assigning class %s to%s%s r%d",
178 reg_class_names
[rclass
], *title
== '\0' ? "" : " ",
179 title
, REGNO (new_reg
));
180 fprintf (lra_dump_file
, "\n");
183 setup_reg_classes (REGNO (new_reg
), rclass
, NO_REGS
, rclass
);
187 /* Analogous to the previous function but also inherits value of
190 lra_create_new_reg (enum machine_mode md_mode
, rtx original
,
191 enum reg_class rclass
, const char *title
)
196 = lra_create_new_reg_with_unique_value (md_mode
, original
, rclass
, title
);
197 if (original
!= NULL_RTX
&& REG_P (original
))
198 lra_reg_info
[REGNO (new_reg
)].val
= lra_reg_info
[REGNO (original
)].val
;
202 /* Set up for REGNO unique hold value. */
204 lra_set_regno_unique_value (int regno
)
206 lra_reg_info
[regno
].val
= get_new_reg_value ();
209 /* Invalidate INSN related info used by LRA. */
211 lra_invalidate_insn_data (rtx insn
)
213 lra_invalidate_insn_regno_info (insn
);
214 invalidate_insn_recog_data (INSN_UID (insn
));
217 /* Mark INSN deleted and invalidate the insn related info used by
220 lra_set_insn_deleted (rtx insn
)
222 lra_invalidate_insn_data (insn
);
223 SET_INSN_DELETED (insn
);
226 /* Delete an unneeded INSN and any previous insns who sole purpose is
227 loading data that is dead in INSN. */
229 lra_delete_dead_insn (rtx insn
)
231 rtx prev
= prev_real_insn (insn
);
234 /* If the previous insn sets a register that dies in our insn,
236 if (prev
&& GET_CODE (PATTERN (prev
)) == SET
237 && (prev_dest
= SET_DEST (PATTERN (prev
)), REG_P (prev_dest
))
238 && reg_mentioned_p (prev_dest
, PATTERN (insn
))
239 && find_regno_note (insn
, REG_DEAD
, REGNO (prev_dest
))
240 && ! side_effects_p (SET_SRC (PATTERN (prev
))))
241 lra_delete_dead_insn (prev
);
243 lra_set_insn_deleted (insn
);
246 /* Target checks operands through operand predicates to recognize an
247 insn. We should have a special precaution to generate add insns
248 which are frequent results of elimination.
250 Emit insns for x = y + z. X can be used to store intermediate
251 values and should be not in Y and Z when we use X to store an
252 intermediate value. Y + Z should form [base] [+ index[ * scale]] [
253 + disp] where base and index are registers, disp and scale are
254 constants. Y should contain base if it is present, Z should
255 contain disp if any. index[*scale] can be part of Y or Z. */
257 lra_emit_add (rtx x
, rtx y
, rtx z
)
261 rtx a1
, a2
, base
, index
, disp
, scale
, index_scale
;
264 insn
= gen_add3_insn (x
, y
, z
);
265 old
= max_reg_num ();
266 if (insn
!= NULL_RTX
)
270 disp
= a2
= NULL_RTX
;
271 if (GET_CODE (y
) == PLUS
)
285 index_scale
= scale
= NULL_RTX
;
286 if (GET_CODE (a1
) == MULT
)
289 index
= XEXP (a1
, 0);
290 scale
= XEXP (a1
, 1);
293 else if (a2
!= NULL_RTX
&& GET_CODE (a2
) == MULT
)
296 index
= XEXP (a2
, 0);
297 scale
= XEXP (a2
, 1);
306 || (index
!= NULL_RTX
&& ! REG_P (index
))
307 || (disp
!= NULL_RTX
&& ! CONSTANT_P (disp
))
308 || (scale
!= NULL_RTX
&& ! CONSTANT_P (scale
)))
310 /* Its is not an address generation. Probably we have no 3 op
311 add. Last chance is to use 2-op add insn. */
312 lra_assert (x
!= y
&& x
!= z
);
313 emit_move_insn (x
, z
);
314 insn
= gen_add2_insn (x
, y
);
319 if (index_scale
== NULL_RTX
)
321 if (disp
== NULL_RTX
)
323 /* Generate x = index_scale; x = x + base. */
324 lra_assert (index_scale
!= NULL_RTX
&& base
!= NULL_RTX
);
325 emit_move_insn (x
, index_scale
);
326 insn
= gen_add2_insn (x
, base
);
329 else if (scale
== NULL_RTX
)
331 /* Try x = base + disp. */
332 lra_assert (base
!= NULL_RTX
);
333 last
= get_last_insn ();
334 insn
= emit_move_insn (x
, gen_rtx_PLUS (GET_MODE (base
),
336 if (recog_memoized (insn
) < 0)
338 delete_insns_since (last
);
339 /* Generate x = disp; x = x + base. */
340 emit_move_insn (x
, disp
);
341 insn
= gen_add2_insn (x
, base
);
344 /* Generate x = x + index. */
345 if (index
!= NULL_RTX
)
347 insn
= gen_add2_insn (x
, index
);
353 /* Try x = index_scale; x = x + disp; x = x + base. */
354 last
= get_last_insn ();
355 insn
= emit_move_insn (x
, index_scale
);
357 if (recog_memoized (insn
) >= 0)
359 insn
= gen_add2_insn (x
, disp
);
360 if (insn
!= NULL_RTX
)
363 insn
= gen_add2_insn (x
, disp
);
364 if (insn
!= NULL_RTX
)
373 delete_insns_since (last
);
374 /* Generate x = disp; x = x + base; x = x + index_scale. */
375 emit_move_insn (x
, disp
);
376 insn
= gen_add2_insn (x
, base
);
378 insn
= gen_add2_insn (x
, index_scale
);
384 /* Functions emit_... can create pseudos -- so expand the pseudo
386 if (old
!= max_reg_num ())
390 /* The number of emitted reload insns so far. */
391 int lra_curr_reload_num
;
393 /* Emit x := y, processing special case when y = u + v or y = u + v *
394 scale + w through emit_add (Y can be an address which is base +
395 index reg * scale + displacement in general case). X may be used
396 as intermediate result therefore it should be not in Y. */
398 lra_emit_move (rtx x
, rtx y
)
402 if (GET_CODE (y
) != PLUS
)
404 if (rtx_equal_p (x
, y
))
406 old
= max_reg_num ();
407 emit_move_insn (x
, y
);
409 lra_reg_info
[ORIGINAL_REGNO (x
)].last_reload
= ++lra_curr_reload_num
;
410 /* Function emit_move can create pseudos -- so expand the pseudo
412 if (old
!= max_reg_num ())
416 lra_emit_add (x
, XEXP (y
, 0), XEXP (y
, 1));
419 /* Update insn operands which are duplication of operands whose
420 numbers are in array of NOPS (with end marker -1). The insn is
421 represented by its LRA internal representation ID. */
423 lra_update_dups (lra_insn_recog_data_t id
, signed char *nops
)
426 struct lra_static_insn_data
*static_id
= id
->insn_static_data
;
428 for (i
= 0; i
< static_id
->n_dups
; i
++)
429 for (j
= 0; (nop
= nops
[j
]) >= 0; j
++)
430 if (static_id
->dup_num
[i
] == nop
)
431 *id
->dup_loc
[i
] = *id
->operand_loc
[nop
];
436 /* This page contains code dealing with info about registers in the
439 /* Pools for insn reg info. */
440 static alloc_pool insn_reg_pool
;
442 /* Initiate pool for insn reg info. */
444 init_insn_regs (void)
447 = create_alloc_pool ("insn regs", sizeof (struct lra_insn_reg
), 100);
450 /* Create LRA insn related info about referenced REGNO with TYPE
451 (in/out/inout), biggest reference mode MODE, flag that it is
452 reference through subreg (SUBREG_P), flag that is early clobbered
453 in the insn (EARLY_CLOBBER), and reference to the next insn reg
455 static struct lra_insn_reg
*
456 new_insn_reg (int regno
, enum op_type type
, enum machine_mode mode
,
457 bool subreg_p
, bool early_clobber
, struct lra_insn_reg
*next
)
459 struct lra_insn_reg
*ir
;
461 ir
= (struct lra_insn_reg
*) pool_alloc (insn_reg_pool
);
463 ir
->biggest_mode
= mode
;
464 ir
->subreg_p
= subreg_p
;
465 ir
->early_clobber
= early_clobber
;
471 /* Free insn reg info IR. */
473 free_insn_reg (struct lra_insn_reg
*ir
)
475 pool_free (insn_reg_pool
, ir
);
478 /* Free insn reg info list IR. */
480 free_insn_regs (struct lra_insn_reg
*ir
)
482 struct lra_insn_reg
*next_ir
;
484 for (; ir
!= NULL
; ir
= next_ir
)
491 /* Finish pool for insn reg info. */
493 finish_insn_regs (void)
495 free_alloc_pool (insn_reg_pool
);
500 /* This page contains code dealing LRA insn info (or in other words
501 LRA internal insn representation). */
503 struct target_lra_int default_target_lra_int
;
504 #if SWITCHABLE_TARGET
505 struct target_lra_int
*this_target_lra_int
= &default_target_lra_int
;
508 /* Map INSN_CODE -> the static insn data. This info is valid during
509 all translation unit. */
510 struct lra_static_insn_data
*insn_code_data
[LAST_INSN_CODE
];
512 /* Debug insns are represented as a special insn with one input
513 operand which is RTL expression in var_location. */
515 /* The following data are used as static insn operand data for all
516 debug insns. If structure lra_operand_data is changed, the
517 initializer should be changed too. */
518 static struct lra_operand_data debug_operand_data
=
520 NULL
, /* alternative */
521 VOIDmode
, /* We are not interesting in the operand mode. */
526 /* The following data are used as static insn data for all debug
527 insns. If structure lra_static_insn_data is changed, the
528 initializer should be changed too. */
529 static struct lra_static_insn_data debug_insn_static_data
=
532 0, /* Duplication operands #. */
533 -1, /* Commutative operand #. */
534 1, /* Operands #. There is only one operand which is debug RTL
536 0, /* Duplications #. */
537 0, /* Alternatives #. We are not interesting in alternatives
538 because we does not proceed debug_insns for reloads. */
539 NULL
, /* Hard registers referenced in machine description. */
540 NULL
/* Descriptions of operands in alternatives. */
543 /* Called once per compiler work to initialize some LRA data related
546 init_insn_code_data_once (void)
548 memset (insn_code_data
, 0, sizeof (insn_code_data
));
549 memset (op_alt_data
, 0, sizeof (op_alt_data
));
552 /* Called once per compiler work to finalize some LRA data related to
555 finish_insn_code_data_once (void)
559 for (i
= 0; i
< LAST_INSN_CODE
; i
++)
561 if (insn_code_data
[i
] != NULL
)
562 free (insn_code_data
[i
]);
563 if (op_alt_data
[i
] != NULL
)
564 free (op_alt_data
[i
]);
568 /* Initialize LRA info about operands in insn alternatives. */
570 init_op_alt_data (void)
574 for (i
= 0; i
< LAST_INSN_CODE
; i
++)
575 if (op_alt_data
[i
] != NULL
)
577 free (op_alt_data
[i
]);
578 op_alt_data
[i
] = NULL
;
582 /* Return static insn data, allocate and setup if necessary. Although
583 dup_num is static data (it depends only on icode), to set it up we
584 need to extract insn first. So recog_data should be valid for
585 normal insn (ICODE >= 0) before the call. */
586 static struct lra_static_insn_data
*
587 get_static_insn_data (int icode
, int nop
, int ndup
, int nalt
)
589 struct lra_static_insn_data
*data
;
592 lra_assert (icode
< LAST_INSN_CODE
);
593 if (icode
>= 0 && (data
= insn_code_data
[icode
]) != NULL
)
595 lra_assert (nop
>= 0 && ndup
>= 0 && nalt
>= 0);
596 n_bytes
= sizeof (struct lra_static_insn_data
)
597 + sizeof (struct lra_operand_data
) * nop
598 + sizeof (int) * ndup
;
599 data
= XNEWVAR (struct lra_static_insn_data
, n_bytes
);
600 data
->n_operands
= nop
;
602 data
->n_alternatives
= nalt
;
603 data
->operand
= ((struct lra_operand_data
*)
604 ((char *) data
+ sizeof (struct lra_static_insn_data
)));
605 data
->dup_num
= ((int *) ((char *) data
->operand
606 + sizeof (struct lra_operand_data
) * nop
));
611 insn_code_data
[icode
] = data
;
612 for (i
= 0; i
< nop
; i
++)
614 data
->operand
[i
].constraint
615 = insn_data
[icode
].operand
[i
].constraint
;
616 data
->operand
[i
].mode
= insn_data
[icode
].operand
[i
].mode
;
617 data
->operand
[i
].strict_low
= insn_data
[icode
].operand
[i
].strict_low
;
618 data
->operand
[i
].is_operator
619 = insn_data
[icode
].operand
[i
].is_operator
;
620 data
->operand
[i
].type
621 = (data
->operand
[i
].constraint
[0] == '=' ? OP_OUT
622 : data
->operand
[i
].constraint
[0] == '+' ? OP_INOUT
624 data
->operand
[i
].is_address
= false;
626 for (i
= 0; i
< ndup
; i
++)
627 data
->dup_num
[i
] = recog_data
.dup_num
[i
];
632 /* The current length of the following array. */
633 int lra_insn_recog_data_len
;
635 /* Map INSN_UID -> the insn recog data (NULL if unknown). */
636 lra_insn_recog_data_t
*lra_insn_recog_data
;
638 /* Initialize LRA data about insns. */
640 init_insn_recog_data (void)
642 lra_insn_recog_data_len
= 0;
643 lra_insn_recog_data
= NULL
;
647 /* Expand, if necessary, LRA data about insns. */
649 check_and_expand_insn_recog_data (int index
)
653 if (lra_insn_recog_data_len
> index
)
655 old
= lra_insn_recog_data_len
;
656 lra_insn_recog_data_len
= index
* 3 / 2 + 1;
657 lra_insn_recog_data
= XRESIZEVEC (lra_insn_recog_data_t
,
659 lra_insn_recog_data_len
);
660 for (i
= old
; i
< lra_insn_recog_data_len
; i
++)
661 lra_insn_recog_data
[i
] = NULL
;
664 /* Finish LRA DATA about insn. */
666 free_insn_recog_data (lra_insn_recog_data_t data
)
668 if (data
->operand_loc
!= NULL
)
669 free (data
->operand_loc
);
670 if (data
->dup_loc
!= NULL
)
671 free (data
->dup_loc
);
672 if (data
->arg_hard_regs
!= NULL
)
673 free (data
->arg_hard_regs
);
674 if (HAVE_ATTR_enabled
&& data
->alternative_enabled_p
!= NULL
)
675 free (data
->alternative_enabled_p
);
676 if (data
->icode
< 0 && NONDEBUG_INSN_P (data
->insn
))
678 if (data
->insn_static_data
->operand_alternative
!= NULL
)
679 free (data
->insn_static_data
->operand_alternative
);
680 free_insn_regs (data
->insn_static_data
->hard_regs
);
681 free (data
->insn_static_data
);
683 free_insn_regs (data
->regs
);
688 /* Finish LRA data about all insns. */
690 finish_insn_recog_data (void)
693 lra_insn_recog_data_t data
;
695 for (i
= 0; i
< lra_insn_recog_data_len
; i
++)
696 if ((data
= lra_insn_recog_data
[i
]) != NULL
)
697 free_insn_recog_data (data
);
699 free (lra_insn_recog_data
);
702 /* Setup info about operands in alternatives of LRA DATA of insn. */
704 setup_operand_alternative (lra_insn_recog_data_t data
)
707 int icode
= data
->icode
;
708 struct lra_static_insn_data
*static_data
= data
->insn_static_data
;
711 && (static_data
->operand_alternative
= op_alt_data
[icode
]) != NULL
)
713 static_data
->commutative
= -1;
714 nop
= static_data
->n_operands
;
717 static_data
->operand_alternative
= NULL
;
720 nalt
= static_data
->n_alternatives
;
721 static_data
->operand_alternative
= XNEWVEC (struct operand_alternative
,
723 memset (static_data
->operand_alternative
, 0,
724 nalt
* nop
* sizeof (struct operand_alternative
));
726 op_alt_data
[icode
] = static_data
->operand_alternative
;
727 for (i
= 0; i
< nop
; i
++)
730 struct operand_alternative
*op_alt_start
, *op_alt
;
731 const char *p
= static_data
->operand
[i
].constraint
;
733 static_data
->operand
[i
].early_clobber
= 0;
734 op_alt_start
= &static_data
->operand_alternative
[i
];
736 for (j
= 0; j
< nalt
; j
++)
738 op_alt
= op_alt_start
+ j
* nop
;
739 op_alt
->cl
= NO_REGS
;
740 op_alt
->constraint
= p
;
741 op_alt
->matches
= -1;
742 op_alt
->matched
= -1;
744 if (*p
== '\0' || *p
== ',')
746 op_alt
->anything_ok
= 1;
756 while (c
!= ',' && c
!= '\0');
757 if (c
== ',' || c
== '\0')
765 case '=': case '+': case '*':
766 case 'E': case 'F': case 'G': case 'H':
767 case 's': case 'i': case 'n':
768 case 'I': case 'J': case 'K': case 'L':
769 case 'M': case 'N': case 'O': case 'P':
770 /* These don't say anything we care about. */
774 /* We currently only support one commutative pair of
776 if (static_data
->commutative
< 0)
777 static_data
->commutative
= i
;
779 lra_assert (data
->icode
< 0); /* Asm */
781 /* The last operand should not be marked
783 lra_assert (i
!= nop
- 1);
787 op_alt
->reject
+= LRA_LOSER_COST_FACTOR
;
790 op_alt
->reject
+= LRA_MAX_REJECT
;
793 op_alt
->earlyclobber
= 1;
794 static_data
->operand
[i
].early_clobber
= 1;
797 case '0': case '1': case '2': case '3': case '4':
798 case '5': case '6': case '7': case '8': case '9':
801 op_alt
->matches
= strtoul (p
, &end
, 10);
802 static_data
->operand_alternative
803 [j
* nop
+ op_alt
->matches
].matched
= i
;
808 case TARGET_MEM_CONSTRAINT
:
809 op_alt
->memory_ok
= 1;
812 op_alt
->decmem_ok
= 1;
815 op_alt
->incmem_ok
= 1;
818 op_alt
->nonoffmem_ok
= 1;
821 op_alt
->offmem_ok
= 1;
824 op_alt
->anything_ok
= 1;
828 static_data
->operand
[i
].is_address
= true;
829 op_alt
->is_address
= 1;
830 op_alt
->cl
= (reg_class_subunion
[(int) op_alt
->cl
]
831 [(int) base_reg_class (VOIDmode
,
839 reg_class_subunion
[(int) op_alt
->cl
][(int) GENERAL_REGS
];
843 if (EXTRA_MEMORY_CONSTRAINT (c
, p
))
845 op_alt
->memory_ok
= 1;
848 if (EXTRA_ADDRESS_CONSTRAINT (c
, p
))
850 static_data
->operand
[i
].is_address
= true;
851 op_alt
->is_address
= 1;
853 = (reg_class_subunion
855 [(int) base_reg_class (VOIDmode
, ADDR_SPACE_GENERIC
,
861 = (reg_class_subunion
864 REG_CLASS_FROM_CONSTRAINT ((unsigned char) c
, p
)]);
867 p
+= CONSTRAINT_LEN (c
, p
);
873 /* Recursively process X and collect info about registers, which are
874 not the insn operands, in X with TYPE (in/out/inout) and flag that
875 it is early clobbered in the insn (EARLY_CLOBBER) and add the info
876 to LIST. X is a part of insn given by DATA. Return the result
878 static struct lra_insn_reg
*
879 collect_non_operand_hard_regs (rtx
*x
, lra_insn_recog_data_t data
,
880 struct lra_insn_reg
*list
,
881 enum op_type type
, bool early_clobber
)
883 int i
, j
, regno
, last
;
885 enum machine_mode mode
;
886 struct lra_insn_reg
*curr
;
888 enum rtx_code code
= GET_CODE (op
);
889 const char *fmt
= GET_RTX_FORMAT (code
);
891 for (i
= 0; i
< data
->insn_static_data
->n_operands
; i
++)
892 if (x
== data
->operand_loc
[i
])
893 /* It is an operand loc. Stop here. */
895 for (i
= 0; i
< data
->insn_static_data
->n_dups
; i
++)
896 if (x
== data
->dup_loc
[i
])
897 /* It is a dup loc. Stop here. */
899 mode
= GET_MODE (op
);
903 op
= SUBREG_REG (op
);
904 code
= GET_CODE (op
);
905 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (op
)))
907 mode
= GET_MODE (op
);
908 if (GET_MODE_SIZE (mode
) > REGMODE_NATURAL_SIZE (mode
))
914 if ((regno
= REGNO (op
)) >= FIRST_PSEUDO_REGISTER
)
916 for (last
= regno
+ hard_regno_nregs
[regno
][mode
];
919 if (! TEST_HARD_REG_BIT (lra_no_alloc_regs
, regno
))
921 for (curr
= list
; curr
!= NULL
; curr
= curr
->next
)
922 if (curr
->regno
== regno
&& curr
->subreg_p
== subreg_p
923 && curr
->biggest_mode
== mode
)
925 if (curr
->type
!= type
)
926 curr
->type
= OP_INOUT
;
927 if (curr
->early_clobber
!= early_clobber
)
928 curr
->early_clobber
= true;
933 /* This is a new hard regno or the info can not be
934 integrated into the found structure. */
938 /* This clobber is to inform popping floating
940 && ! (FIRST_STACK_REG
<= regno
941 && regno
<= LAST_STACK_REG
));
943 list
= new_insn_reg (regno
, type
, mode
, subreg_p
,
944 early_clobber
, list
);
952 list
= collect_non_operand_hard_regs (&SET_DEST (op
), data
,
953 list
, OP_OUT
, false);
954 list
= collect_non_operand_hard_regs (&SET_SRC (op
), data
,
958 /* We treat clobber of non-operand hard registers as early
959 clobber (the behavior is expected from asm). */
960 list
= collect_non_operand_hard_regs (&XEXP (op
, 0), data
,
963 case PRE_INC
: case PRE_DEC
: case POST_INC
: case POST_DEC
:
964 list
= collect_non_operand_hard_regs (&XEXP (op
, 0), data
,
965 list
, OP_INOUT
, false);
967 case PRE_MODIFY
: case POST_MODIFY
:
968 list
= collect_non_operand_hard_regs (&XEXP (op
, 0), data
,
969 list
, OP_INOUT
, false);
970 list
= collect_non_operand_hard_regs (&XEXP (op
, 1), data
,
974 fmt
= GET_RTX_FORMAT (code
);
975 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
978 list
= collect_non_operand_hard_regs (&XEXP (op
, i
), data
,
980 else if (fmt
[i
] == 'E')
981 for (j
= XVECLEN (op
, i
) - 1; j
>= 0; j
--)
982 list
= collect_non_operand_hard_regs (&XVECEXP (op
, i
, j
), data
,
989 /* Set up and return info about INSN. Set up the info if it is not set up
991 lra_insn_recog_data_t
992 lra_set_insn_recog_data (rtx insn
)
994 lra_insn_recog_data_t data
;
997 unsigned int uid
= INSN_UID (insn
);
998 struct lra_static_insn_data
*insn_static_data
;
1000 check_and_expand_insn_recog_data (uid
);
1001 if (DEBUG_INSN_P (insn
))
1005 icode
= INSN_CODE (insn
);
1007 /* It might be a new simple insn which is not recognized yet. */
1008 INSN_CODE (insn
) = icode
= recog_memoized (insn
);
1010 data
= XNEW (struct lra_insn_recog_data
);
1011 lra_insn_recog_data
[uid
] = data
;
1013 data
->used_insn_alternative
= -1;
1014 data
->icode
= icode
;
1016 if (DEBUG_INSN_P (insn
))
1018 data
->insn_static_data
= &debug_insn_static_data
;
1019 data
->dup_loc
= NULL
;
1020 data
->arg_hard_regs
= NULL
;
1021 data
->alternative_enabled_p
= NULL
;
1022 data
->operand_loc
= XNEWVEC (rtx
*, 1);
1023 data
->operand_loc
[0] = &INSN_VAR_LOCATION_LOC (insn
);
1029 enum machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
1030 const char *constraints
[MAX_RECOG_OPERANDS
];
1032 nop
= asm_noperands (PATTERN (insn
));
1033 data
->operand_loc
= data
->dup_loc
= NULL
;
1035 /* Its is a special insn like USE or CLOBBER. */
1036 data
->insn_static_data
= insn_static_data
1037 = get_static_insn_data (-1, 0, 0, 1);
1040 /* expand_asm_operands makes sure there aren't too many
1042 lra_assert (nop
<= MAX_RECOG_OPERANDS
);
1044 data
->operand_loc
= XNEWVEC (rtx
*, nop
);
1045 /* Now get the operand values and constraints out of the
1047 decode_asm_operands (PATTERN (insn
), NULL
,
1049 constraints
, operand_mode
, NULL
);
1053 const char *p
= recog_data
.constraints
[0];
1055 for (p
= constraints
[0]; *p
; p
++)
1058 data
->insn_static_data
= insn_static_data
1059 = get_static_insn_data (-1, nop
, 0, n
);
1060 for (i
= 0; i
< nop
; i
++)
1062 insn_static_data
->operand
[i
].mode
= operand_mode
[i
];
1063 insn_static_data
->operand
[i
].constraint
= constraints
[i
];
1064 insn_static_data
->operand
[i
].strict_low
= false;
1065 insn_static_data
->operand
[i
].is_operator
= false;
1066 insn_static_data
->operand
[i
].is_address
= false;
1069 for (i
= 0; i
< insn_static_data
->n_operands
; i
++)
1070 insn_static_data
->operand
[i
].type
1071 = (insn_static_data
->operand
[i
].constraint
[0] == '=' ? OP_OUT
1072 : insn_static_data
->operand
[i
].constraint
[0] == '+' ? OP_INOUT
1074 data
->alternative_enabled_p
= NULL
;
1078 insn_extract (insn
);
1079 data
->insn_static_data
= insn_static_data
1080 = get_static_insn_data (icode
, insn_data
[icode
].n_operands
,
1081 insn_data
[icode
].n_dups
,
1082 insn_data
[icode
].n_alternatives
);
1083 n
= insn_static_data
->n_operands
;
1088 locs
= XNEWVEC (rtx
*, n
);
1089 memcpy (locs
, recog_data
.operand_loc
, n
* sizeof (rtx
*));
1091 data
->operand_loc
= locs
;
1092 n
= insn_static_data
->n_dups
;
1097 locs
= XNEWVEC (rtx
*, n
);
1098 memcpy (locs
, recog_data
.dup_loc
, n
* sizeof (rtx
*));
1100 data
->dup_loc
= locs
;
1101 if (HAVE_ATTR_enabled
)
1105 n
= insn_static_data
->n_alternatives
;
1106 lra_assert (n
>= 0);
1107 data
->alternative_enabled_p
= bp
= XNEWVEC (bool, n
);
1108 /* Cache the insn because we don't want to call extract_insn
1109 from get_attr_enabled as extract_insn modifies
1110 which_alternative. The attribute enabled should not depend
1111 on insn operands, operand modes, operand types, and operand
1112 constraints. It should depend on the architecture. If it
1113 is not true, we should rewrite this file code to use
1114 extract_insn instead of less expensive insn_extract. */
1115 recog_data
.insn
= insn
;
1116 for (i
= 0; i
< n
; i
++)
1118 which_alternative
= i
;
1119 bp
[i
] = get_attr_enabled (insn
);
1123 if (GET_CODE (PATTERN (insn
)) == CLOBBER
|| GET_CODE (PATTERN (insn
)) == USE
)
1124 insn_static_data
->hard_regs
= NULL
;
1126 insn_static_data
->hard_regs
1127 = collect_non_operand_hard_regs (&PATTERN (insn
), data
,
1128 NULL
, OP_IN
, false);
1129 setup_operand_alternative (data
);
1130 data
->arg_hard_regs
= NULL
;
1134 int n_hard_regs
, regno
, arg_hard_regs
[FIRST_PSEUDO_REGISTER
];
1137 /* Finding implicit hard register usage. We believe it will be
1138 not changed whatever transformations are used. Call insns
1139 are such example. */
1140 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1142 link
= XEXP (link
, 1))
1143 if (GET_CODE (XEXP (link
, 0)) == USE
1144 && REG_P (XEXP (XEXP (link
, 0), 0)))
1146 regno
= REGNO (XEXP (XEXP (link
, 0), 0));
1147 lra_assert (regno
< FIRST_PSEUDO_REGISTER
);
1148 /* It is an argument register. */
1149 for (i
= (hard_regno_nregs
1150 [regno
][GET_MODE (XEXP (XEXP (link
, 0), 0))]) - 1;
1153 arg_hard_regs
[n_hard_regs
++] = regno
+ i
;
1155 if (n_hard_regs
!= 0)
1157 arg_hard_regs
[n_hard_regs
++] = -1;
1158 data
->arg_hard_regs
= XNEWVEC (int, n_hard_regs
);
1159 memcpy (data
->arg_hard_regs
, arg_hard_regs
,
1160 sizeof (int) * n_hard_regs
);
1163 /* Some output operand can be recognized only from the context not
1164 from the constraints which are empty in this case. Call insn may
1165 contain a hard register in set destination with empty constraint
1166 and extract_insn treats them as an input. */
1167 for (i
= 0; i
< insn_static_data
->n_operands
; i
++)
1171 struct lra_operand_data
*operand
= &insn_static_data
->operand
[i
];
1173 /* ??? Should we treat 'X' the same way. It looks to me that
1174 'X' means anything and empty constraint means we do not
1176 if (operand
->type
!= OP_IN
|| *operand
->constraint
!= '\0'
1177 || operand
->is_operator
)
1179 pat
= PATTERN (insn
);
1180 if (GET_CODE (pat
) == SET
)
1182 if (data
->operand_loc
[i
] != &SET_DEST (pat
))
1185 else if (GET_CODE (pat
) == PARALLEL
)
1187 for (j
= XVECLEN (pat
, 0) - 1; j
>= 0; j
--)
1189 set
= XVECEXP (PATTERN (insn
), 0, j
);
1190 if (GET_CODE (set
) == SET
1191 && &SET_DEST (set
) == data
->operand_loc
[i
])
1199 operand
->type
= OP_OUT
;
1204 /* Return info about insn give by UID. The info should be already set
1206 static lra_insn_recog_data_t
1207 get_insn_recog_data_by_uid (int uid
)
1209 lra_insn_recog_data_t data
;
1211 data
= lra_insn_recog_data
[uid
];
1212 lra_assert (data
!= NULL
);
1216 /* Invalidate all info about insn given by its UID. */
1218 invalidate_insn_recog_data (int uid
)
1220 lra_insn_recog_data_t data
;
1222 data
= lra_insn_recog_data
[uid
];
1223 lra_assert (data
!= NULL
);
1224 free_insn_recog_data (data
);
1225 lra_insn_recog_data
[uid
] = NULL
;
1228 /* Update all the insn info about INSN. It is usually called when
1229 something in the insn was changed. Return the updated info. */
1230 lra_insn_recog_data_t
1231 lra_update_insn_recog_data (rtx insn
)
1233 lra_insn_recog_data_t data
;
1235 unsigned int uid
= INSN_UID (insn
);
1236 struct lra_static_insn_data
*insn_static_data
;
1238 check_and_expand_insn_recog_data (uid
);
1239 if ((data
= lra_insn_recog_data
[uid
]) != NULL
1240 && data
->icode
!= INSN_CODE (insn
))
1242 invalidate_insn_data_regno_info (data
, insn
, get_insn_freq (insn
));
1243 invalidate_insn_recog_data (uid
);
1247 return lra_get_insn_recog_data (insn
);
1248 insn_static_data
= data
->insn_static_data
;
1249 data
->used_insn_alternative
= -1;
1250 if (DEBUG_INSN_P (insn
))
1252 if (data
->icode
< 0)
1255 enum machine_mode operand_mode
[MAX_RECOG_OPERANDS
];
1256 const char *constraints
[MAX_RECOG_OPERANDS
];
1258 nop
= asm_noperands (PATTERN (insn
));
1261 lra_assert (nop
== data
->insn_static_data
->n_operands
);
1262 /* Now get the operand values and constraints out of the
1264 decode_asm_operands (PATTERN (insn
), NULL
,
1266 constraints
, operand_mode
, NULL
);
1267 #ifdef ENABLE_CHECKING
1271 for (i
= 0; i
< nop
; i
++)
1273 (insn_static_data
->operand
[i
].mode
== operand_mode
[i
]
1274 && insn_static_data
->operand
[i
].constraint
== constraints
[i
]
1275 && ! insn_static_data
->operand
[i
].is_operator
);
1279 #ifdef ENABLE_CHECKING
1283 for (i
= 0; i
< insn_static_data
->n_operands
; i
++)
1285 (insn_static_data
->operand
[i
].type
1286 == (insn_static_data
->operand
[i
].constraint
[0] == '=' ? OP_OUT
1287 : insn_static_data
->operand
[i
].constraint
[0] == '+' ? OP_INOUT
1294 insn_extract (insn
);
1295 n
= insn_static_data
->n_operands
;
1297 memcpy (data
->operand_loc
, recog_data
.operand_loc
, n
* sizeof (rtx
*));
1298 n
= insn_static_data
->n_dups
;
1300 memcpy (data
->dup_loc
, recog_data
.dup_loc
, n
* sizeof (rtx
*));
1301 #if HAVE_ATTR_enabled
1302 #ifdef ENABLE_CHECKING
1307 n
= insn_static_data
->n_alternatives
;
1308 bp
= data
->alternative_enabled_p
;
1309 lra_assert (n
>= 0 && bp
!= NULL
);
1310 /* Cache the insn to prevent extract_insn call from
1311 get_attr_enabled. */
1312 recog_data
.insn
= insn
;
1313 for (i
= 0; i
< n
; i
++)
1315 which_alternative
= i
;
1316 lra_assert (bp
[i
] == get_attr_enabled (insn
));
1325 /* Set up that INSN is using alternative ALT now. */
1327 lra_set_used_insn_alternative (rtx insn
, int alt
)
1329 lra_insn_recog_data_t data
;
1331 data
= lra_get_insn_recog_data (insn
);
1332 data
->used_insn_alternative
= alt
;
1335 /* Set up that insn with UID is using alternative ALT now. The insn
1336 info should be already set up. */
1338 lra_set_used_insn_alternative_by_uid (int uid
, int alt
)
1340 lra_insn_recog_data_t data
;
1342 check_and_expand_insn_recog_data (uid
);
1343 data
= lra_insn_recog_data
[uid
];
1344 lra_assert (data
!= NULL
);
1345 data
->used_insn_alternative
= alt
;
1350 /* This page contains code dealing with common register info and
1353 /* The size of the following array. */
1354 static int reg_info_size
;
1355 /* Common info about each register. */
1356 struct lra_reg
*lra_reg_info
;
1358 /* Last register value. */
1359 static int last_reg_value
;
1361 /* Return new register value. */
1363 get_new_reg_value (void)
1365 return ++last_reg_value
;
1368 /* Pools for copies. */
1369 static alloc_pool copy_pool
;
1371 /* Vec referring to pseudo copies. */
1372 static vec
<lra_copy_t
> copy_vec
;
1374 /* Initialize I-th element of lra_reg_info. */
1376 initialize_lra_reg_info_element (int i
)
1378 bitmap_initialize (&lra_reg_info
[i
].insn_bitmap
, ®_obstack
);
1380 lra_reg_info
[i
].no_stack_p
= false;
1382 CLEAR_HARD_REG_SET (lra_reg_info
[i
].conflict_hard_regs
);
1383 lra_reg_info
[i
].preferred_hard_regno1
= -1;
1384 lra_reg_info
[i
].preferred_hard_regno2
= -1;
1385 lra_reg_info
[i
].preferred_hard_regno_profit1
= 0;
1386 lra_reg_info
[i
].preferred_hard_regno_profit2
= 0;
1387 lra_reg_info
[i
].live_ranges
= NULL
;
1388 lra_reg_info
[i
].nrefs
= lra_reg_info
[i
].freq
= 0;
1389 lra_reg_info
[i
].last_reload
= 0;
1390 lra_reg_info
[i
].restore_regno
= -1;
1391 lra_reg_info
[i
].val
= get_new_reg_value ();
1392 lra_reg_info
[i
].copies
= NULL
;
1395 /* Initialize common reg info and copies. */
1397 init_reg_info (void)
1402 reg_info_size
= max_reg_num () * 3 / 2 + 1;
1403 lra_reg_info
= XNEWVEC (struct lra_reg
, reg_info_size
);
1404 for (i
= 0; i
< reg_info_size
; i
++)
1405 initialize_lra_reg_info_element (i
);
1407 = create_alloc_pool ("lra copies", sizeof (struct lra_copy
), 100);
1408 copy_vec
.create (100);
1412 /* Finish common reg info and copies. */
1414 finish_reg_info (void)
1418 for (i
= 0; i
< reg_info_size
; i
++)
1419 bitmap_clear (&lra_reg_info
[i
].insn_bitmap
);
1420 free (lra_reg_info
);
1422 free_alloc_pool (copy_pool
);
1423 copy_vec
.release ();
1426 /* Expand common reg info if it is necessary. */
1428 expand_reg_info (void)
1430 int i
, old
= reg_info_size
;
1432 if (reg_info_size
> max_reg_num ())
1434 reg_info_size
= max_reg_num () * 3 / 2 + 1;
1435 lra_reg_info
= XRESIZEVEC (struct lra_reg
, lra_reg_info
, reg_info_size
);
1436 for (i
= old
; i
< reg_info_size
; i
++)
1437 initialize_lra_reg_info_element (i
);
1440 /* Free all copies. */
1442 lra_free_copies (void)
1446 while (copy_vec
.length () != 0)
1448 cp
= copy_vec
.pop ();
1449 lra_reg_info
[cp
->regno1
].copies
= lra_reg_info
[cp
->regno2
].copies
= NULL
;
1450 pool_free (copy_pool
, cp
);
1454 /* Create copy of two pseudos REGNO1 and REGNO2. The copy execution
1455 frequency is FREQ. */
1457 lra_create_copy (int regno1
, int regno2
, int freq
)
1462 lra_assert (regno1
!= regno2
);
1463 regno1_dest_p
= true;
1464 if (regno1
> regno2
)
1468 regno1_dest_p
= false;
1472 cp
= (lra_copy_t
) pool_alloc (copy_pool
);
1473 copy_vec
.safe_push (cp
);
1474 cp
->regno1_dest_p
= regno1_dest_p
;
1476 cp
->regno1
= regno1
;
1477 cp
->regno2
= regno2
;
1478 cp
->regno1_next
= lra_reg_info
[regno1
].copies
;
1479 lra_reg_info
[regno1
].copies
= cp
;
1480 cp
->regno2_next
= lra_reg_info
[regno2
].copies
;
1481 lra_reg_info
[regno2
].copies
= cp
;
1482 if (lra_dump_file
!= NULL
)
1483 fprintf (lra_dump_file
, " Creating copy r%d%sr%d@%d\n",
1484 regno1
, regno1_dest_p
? "<-" : "->", regno2
, freq
);
1487 /* Return N-th (0, 1, ...) copy. If there is no copy, return
1490 lra_get_copy (int n
)
1492 if (n
>= (int) copy_vec
.length ())
1499 /* This page contains code dealing with info about registers in
1502 /* Process X of insn UID recursively and add info (operand type is
1503 given by TYPE, flag of that it is early clobber is EARLY_CLOBBER)
1504 about registers in X to the insn DATA. */
1506 add_regs_to_insn_regno_info (lra_insn_recog_data_t data
, rtx x
, int uid
,
1507 enum op_type type
, bool early_clobber
)
1511 enum machine_mode mode
;
1514 struct lra_insn_reg
*curr
;
1516 code
= GET_CODE (x
);
1517 mode
= GET_MODE (x
);
1519 if (GET_CODE (x
) == SUBREG
)
1522 code
= GET_CODE (x
);
1523 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (x
)))
1525 mode
= GET_MODE (x
);
1526 if (GET_MODE_SIZE (mode
) > REGMODE_NATURAL_SIZE (mode
))
1534 if (bitmap_set_bit (&lra_reg_info
[regno
].insn_bitmap
, uid
))
1536 data
->regs
= new_insn_reg (regno
, type
, mode
, subreg_p
,
1537 early_clobber
, data
->regs
);
1542 for (curr
= data
->regs
; curr
!= NULL
; curr
= curr
->next
)
1543 if (curr
->regno
== regno
)
1545 if (curr
->subreg_p
!= subreg_p
|| curr
->biggest_mode
!= mode
)
1546 /* The info can not be integrated into the found
1548 data
->regs
= new_insn_reg (regno
, type
, mode
, subreg_p
,
1549 early_clobber
, data
->regs
);
1552 if (curr
->type
!= type
)
1553 curr
->type
= OP_INOUT
;
1554 if (curr
->early_clobber
!= early_clobber
)
1555 curr
->early_clobber
= true;
1566 add_regs_to_insn_regno_info (data
, SET_DEST (x
), uid
, OP_OUT
, false);
1567 add_regs_to_insn_regno_info (data
, SET_SRC (x
), uid
, OP_IN
, false);
1570 /* We treat clobber of non-operand hard registers as early
1571 clobber (the behavior is expected from asm). */
1572 add_regs_to_insn_regno_info (data
, XEXP (x
, 0), uid
, OP_OUT
, true);
1574 case PRE_INC
: case PRE_DEC
: case POST_INC
: case POST_DEC
:
1575 add_regs_to_insn_regno_info (data
, XEXP (x
, 0), uid
, OP_INOUT
, false);
1577 case PRE_MODIFY
: case POST_MODIFY
:
1578 add_regs_to_insn_regno_info (data
, XEXP (x
, 0), uid
, OP_INOUT
, false);
1579 add_regs_to_insn_regno_info (data
, XEXP (x
, 1), uid
, OP_IN
, false);
1582 if ((code
!= PARALLEL
&& code
!= EXPR_LIST
) || type
!= OP_OUT
)
1583 /* Some targets place small structures in registers for return
1584 values of functions, and those registers are wrapped in
1585 PARALLEL that we may see as the destination of a SET. Here
1588 (call_insn 13 12 14 2 (set (parallel:BLK [
1589 (expr_list:REG_DEP_TRUE (reg:DI 0 ax)
1591 (expr_list:REG_DEP_TRUE (reg:DI 1 dx)
1592 (const_int 8 [0x8]))
1594 (call (mem:QI (symbol_ref:DI (... */
1596 fmt
= GET_RTX_FORMAT (code
);
1597 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1600 add_regs_to_insn_regno_info (data
, XEXP (x
, i
), uid
, type
, false);
1601 else if (fmt
[i
] == 'E')
1603 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1604 add_regs_to_insn_regno_info (data
, XVECEXP (x
, i
, j
), uid
,
1611 /* Return execution frequency of INSN. */
1613 get_insn_freq (rtx insn
)
1617 if ((bb
= BLOCK_FOR_INSN (insn
)) != NULL
)
1618 return REG_FREQ_FROM_BB (bb
);
1621 lra_assert (lra_insn_recog_data
[INSN_UID (insn
)]
1622 ->insn_static_data
->n_operands
== 0);
1623 /* We don't care about such insn, e.g. it might be jump with
1629 /* Invalidate all reg info of INSN with DATA and execution frequency
1630 FREQ. Update common info about the invalidated registers. */
1632 invalidate_insn_data_regno_info (lra_insn_recog_data_t data
, rtx insn
,
1638 struct lra_insn_reg
*ir
, *next_ir
;
1640 uid
= INSN_UID (insn
);
1641 debug_p
= DEBUG_INSN_P (insn
);
1642 for (ir
= data
->regs
; ir
!= NULL
; ir
= next_ir
)
1647 bitmap_clear_bit (&lra_reg_info
[i
].insn_bitmap
, uid
);
1648 if (i
>= FIRST_PSEUDO_REGISTER
&& ! debug_p
)
1650 lra_reg_info
[i
].nrefs
--;
1651 lra_reg_info
[i
].freq
-= freq
;
1652 lra_assert (lra_reg_info
[i
].nrefs
>= 0 && lra_reg_info
[i
].freq
>= 0);
1658 /* Invalidate all reg info of INSN. Update common info about the
1659 invalidated registers. */
1661 lra_invalidate_insn_regno_info (rtx insn
)
1663 invalidate_insn_data_regno_info (lra_get_insn_recog_data (insn
), insn
,
1664 get_insn_freq (insn
));
1667 /* Update common reg info from reg info of insn given by its DATA and
1668 execution frequency FREQ. */
1670 setup_insn_reg_info (lra_insn_recog_data_t data
, int freq
)
1673 struct lra_insn_reg
*ir
;
1675 for (ir
= data
->regs
; ir
!= NULL
; ir
= ir
->next
)
1676 if ((i
= ir
->regno
) >= FIRST_PSEUDO_REGISTER
)
1678 lra_reg_info
[i
].nrefs
++;
1679 lra_reg_info
[i
].freq
+= freq
;
1683 /* Set up insn reg info of INSN. Update common reg info from reg info
1686 lra_update_insn_regno_info (rtx insn
)
1689 lra_insn_recog_data_t data
;
1690 struct lra_static_insn_data
*static_data
;
1693 if (! INSN_P (insn
))
1695 data
= lra_get_insn_recog_data (insn
);
1696 static_data
= data
->insn_static_data
;
1697 freq
= get_insn_freq (insn
);
1698 invalidate_insn_data_regno_info (data
, insn
, freq
);
1699 uid
= INSN_UID (insn
);
1700 for (i
= static_data
->n_operands
- 1; i
>= 0; i
--)
1701 add_regs_to_insn_regno_info (data
, *data
->operand_loc
[i
], uid
,
1702 static_data
->operand
[i
].type
,
1703 static_data
->operand
[i
].early_clobber
);
1704 if ((code
= GET_CODE (PATTERN (insn
))) == CLOBBER
|| code
== USE
)
1705 add_regs_to_insn_regno_info (data
, XEXP (PATTERN (insn
), 0), uid
,
1706 code
== USE
? OP_IN
: OP_OUT
, false);
1707 if (NONDEBUG_INSN_P (insn
))
1708 setup_insn_reg_info (data
, freq
);
1711 /* Return reg info of insn given by it UID. */
1712 struct lra_insn_reg
*
1713 lra_get_insn_regs (int uid
)
1715 lra_insn_recog_data_t data
;
1717 data
= get_insn_recog_data_by_uid (uid
);
1723 /* This page contains code dealing with stack of the insns which
1724 should be processed by the next constraint pass. */
1726 /* Bitmap used to put an insn on the stack only in one exemplar. */
1727 static sbitmap lra_constraint_insn_stack_bitmap
;
1729 /* The stack itself. */
1730 vec
<rtx
> lra_constraint_insn_stack
;
1732 /* Put INSN on the stack. If ALWAYS_UPDATE is true, always update the reg
1733 info for INSN, otherwise only update it if INSN is not already on the
1736 lra_push_insn_1 (rtx insn
, bool always_update
)
1738 unsigned int uid
= INSN_UID (insn
);
1740 lra_update_insn_regno_info (insn
);
1741 if (uid
>= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap
))
1742 lra_constraint_insn_stack_bitmap
=
1743 sbitmap_resize (lra_constraint_insn_stack_bitmap
, 3 * uid
/ 2, 0);
1744 if (bitmap_bit_p (lra_constraint_insn_stack_bitmap
, uid
))
1746 bitmap_set_bit (lra_constraint_insn_stack_bitmap
, uid
);
1747 if (! always_update
)
1748 lra_update_insn_regno_info (insn
);
1749 lra_constraint_insn_stack
.safe_push (insn
);
1752 /* Put INSN on the stack. */
1754 lra_push_insn (rtx insn
)
1756 lra_push_insn_1 (insn
, false);
1759 /* Put INSN on the stack and update its reg info. */
1761 lra_push_insn_and_update_insn_regno_info (rtx insn
)
1763 lra_push_insn_1 (insn
, true);
1766 /* Put insn with UID on the stack. */
1768 lra_push_insn_by_uid (unsigned int uid
)
1770 lra_push_insn (lra_insn_recog_data
[uid
]->insn
);
1773 /* Take the last-inserted insns off the stack and return it. */
1777 rtx insn
= lra_constraint_insn_stack
.pop ();
1778 bitmap_clear_bit (lra_constraint_insn_stack_bitmap
, INSN_UID (insn
));
1782 /* Return the current size of the insn stack. */
1784 lra_insn_stack_length (void)
1786 return lra_constraint_insn_stack
.length ();
1789 /* Push insns FROM to TO (excluding it) going in reverse order. */
1791 push_insns (rtx from
, rtx to
)
1795 if (from
== NULL_RTX
)
1797 for (insn
= from
; insn
!= to
; insn
= PREV_INSN (insn
))
1799 lra_push_insn (insn
);
1802 /* Emit insns BEFORE before INSN and insns AFTER after INSN. Put the
1803 insns onto the stack. Print about emitting the insns with
1806 lra_process_new_insns (rtx insn
, rtx before
, rtx after
, const char *title
)
1810 if (lra_dump_file
!= NULL
&& (before
!= NULL_RTX
|| after
!= NULL_RTX
))
1812 debug_rtl_slim (lra_dump_file
, insn
, insn
, -1, 0);
1813 if (before
!= NULL_RTX
)
1815 fprintf (lra_dump_file
," %s before:\n", title
);
1816 debug_rtl_slim (lra_dump_file
, before
, NULL_RTX
, -1, 0);
1818 if (after
!= NULL_RTX
)
1820 fprintf (lra_dump_file
, " %s after:\n", title
);
1821 debug_rtl_slim (lra_dump_file
, after
, NULL_RTX
, -1, 0);
1823 fprintf (lra_dump_file
, "\n");
1825 if (before
!= NULL_RTX
)
1827 emit_insn_before (before
, insn
);
1828 push_insns (PREV_INSN (insn
), PREV_INSN (before
));
1830 if (after
!= NULL_RTX
)
1832 for (last
= after
; NEXT_INSN (last
) != NULL_RTX
; last
= NEXT_INSN (last
))
1834 emit_insn_after (after
, insn
);
1835 push_insns (last
, insn
);
1841 /* This page contains code dealing with scratches (changing them onto
1842 pseudos and restoring them from the pseudos).
1844 We change scratches into pseudos at the beginning of LRA to
1845 simplify dealing with them (conflicts, hard register assignments).
1847 If the pseudo denoting scratch was spilled it means that we do need
1848 a hard register for it. Such pseudos are transformed back to
1849 scratches at the end of LRA. */
1851 /* Description of location of a former scratch operand. */
1854 rtx insn
; /* Insn where the scratch was. */
1855 int nop
; /* Number of the operand which was a scratch. */
1858 typedef struct sloc
*sloc_t
;
1860 /* Locations of the former scratches. */
1861 static vec
<sloc_t
> scratches
;
1863 /* Bitmap of scratch regnos. */
1864 static bitmap_head scratch_bitmap
;
1866 /* Bitmap of scratch operands. */
1867 static bitmap_head scratch_operand_bitmap
;
1869 /* Return true if pseudo REGNO is made of SCRATCH. */
1871 lra_former_scratch_p (int regno
)
1873 return bitmap_bit_p (&scratch_bitmap
, regno
);
1876 /* Return true if the operand NOP of INSN is a former scratch. */
1878 lra_former_scratch_operand_p (rtx insn
, int nop
)
1880 return bitmap_bit_p (&scratch_operand_bitmap
,
1881 INSN_UID (insn
) * MAX_RECOG_OPERANDS
+ nop
) != 0;
1884 /* Change scratches onto pseudos and save their location. */
1886 remove_scratches (void)
1889 bool insn_changed_p
;
1893 lra_insn_recog_data_t id
;
1894 struct lra_static_insn_data
*static_id
;
1896 scratches
.create (get_max_uid ());
1897 bitmap_initialize (&scratch_bitmap
, ®_obstack
);
1898 bitmap_initialize (&scratch_operand_bitmap
, ®_obstack
);
1900 FOR_BB_INSNS (bb
, insn
)
1903 id
= lra_get_insn_recog_data (insn
);
1904 static_id
= id
->insn_static_data
;
1905 insn_changed_p
= false;
1906 for (i
= 0; i
< static_id
->n_operands
; i
++)
1907 if (GET_CODE (*id
->operand_loc
[i
]) == SCRATCH
1908 && GET_MODE (*id
->operand_loc
[i
]) != VOIDmode
)
1910 insn_changed_p
= true;
1911 *id
->operand_loc
[i
] = reg
1912 = lra_create_new_reg (static_id
->operand
[i
].mode
,
1913 *id
->operand_loc
[i
], ALL_REGS
, NULL
);
1914 add_reg_note (insn
, REG_UNUSED
, reg
);
1915 lra_update_dup (id
, i
);
1916 loc
= XNEW (struct sloc
);
1919 scratches
.safe_push (loc
);
1920 bitmap_set_bit (&scratch_bitmap
, REGNO (*id
->operand_loc
[i
]));
1921 bitmap_set_bit (&scratch_operand_bitmap
,
1922 INSN_UID (insn
) * MAX_RECOG_OPERANDS
+ i
);
1923 if (lra_dump_file
!= NULL
)
1924 fprintf (lra_dump_file
,
1925 "Removing SCRATCH in insn #%u (nop %d)\n",
1926 INSN_UID (insn
), i
);
1929 /* Because we might use DF right after caller-saves sub-pass
1930 we need to keep DF info up to date. */
1931 df_insn_rescan (insn
);
1935 /* Changes pseudos created by function remove_scratches onto scratches. */
1937 restore_scratches (void)
1942 rtx last
= NULL_RTX
;
1943 lra_insn_recog_data_t id
= NULL
;
1945 for (i
= 0; scratches
.iterate (i
, &loc
); i
++)
1947 if (last
!= loc
->insn
)
1950 id
= lra_get_insn_recog_data (last
);
1952 if (REG_P (*id
->operand_loc
[loc
->nop
])
1953 && ((regno
= REGNO (*id
->operand_loc
[loc
->nop
]))
1954 >= FIRST_PSEUDO_REGISTER
)
1955 && lra_get_regno_hard_regno (regno
) < 0)
1957 /* It should be only case when scratch register with chosen
1958 constraint 'X' did not get memory or hard register. */
1959 lra_assert (lra_former_scratch_p (regno
));
1960 *id
->operand_loc
[loc
->nop
]
1961 = gen_rtx_SCRATCH (GET_MODE (*id
->operand_loc
[loc
->nop
]));
1962 lra_update_dup (id
, loc
->nop
);
1963 if (lra_dump_file
!= NULL
)
1964 fprintf (lra_dump_file
, "Restoring SCRATCH in insn #%u(nop %d)\n",
1965 INSN_UID (loc
->insn
), loc
->nop
);
1968 for (i
= 0; scratches
.iterate (i
, &loc
); i
++)
1970 scratches
.release ();
1971 bitmap_clear (&scratch_bitmap
);
1972 bitmap_clear (&scratch_operand_bitmap
);
1977 #ifdef ENABLE_CHECKING
1979 /* Function checks RTL for correctness. If FINAL_P is true, it is
1980 done at the end of LRA and the check is more rigorous. */
1982 check_rtl (bool final_p
)
1987 lra_insn_recog_data_t id
;
1989 lra_assert (! final_p
|| reload_completed
);
1991 FOR_BB_INSNS (bb
, insn
)
1992 if (NONDEBUG_INSN_P (insn
)
1993 && GET_CODE (PATTERN (insn
)) != USE
1994 && GET_CODE (PATTERN (insn
)) != CLOBBER
1995 && GET_CODE (PATTERN (insn
)) != ADDR_VEC
1996 && GET_CODE (PATTERN (insn
)) != ADDR_DIFF_VEC
1997 && GET_CODE (PATTERN (insn
)) != ASM_INPUT
)
2001 extract_insn (insn
);
2002 lra_assert (constrain_operands (1));
2005 if (insn_invalid_p (insn
, false))
2006 fatal_insn_not_found (insn
);
2007 if (asm_noperands (PATTERN (insn
)) >= 0)
2009 id
= lra_get_insn_recog_data (insn
);
2010 /* The code is based on assumption that all addresses in
2011 regular instruction are legitimate before LRA. The code in
2012 lra-constraints.c is based on assumption that there is no
2013 subreg of memory as an insn operand. */
2014 for (i
= 0; i
< id
->insn_static_data
->n_operands
; i
++)
2016 rtx op
= *id
->operand_loc
[i
];
2019 && (GET_MODE (op
) != BLKmode
2020 || GET_CODE (XEXP (op
, 0)) != SCRATCH
)
2021 && ! memory_address_p (GET_MODE (op
), XEXP (op
, 0))
2022 /* Some ports don't recognize the following addresses
2023 as legitimate. Although they are legitimate if
2024 they satisfies the constraints and will be checked
2025 by insn constraints which we ignore here. */
2026 && GET_CODE (XEXP (op
, 0)) != UNSPEC
2027 && GET_RTX_CLASS (GET_CODE (XEXP (op
, 0))) != RTX_AUTOINC
)
2028 fatal_insn_not_found (insn
);
2032 #endif /* #ifdef ENABLE_CHECKING */
2034 /* Determine if the current function has an exception receiver block
2035 that reaches the exit block via non-exceptional edges */
2037 has_nonexceptional_receiver (void)
2041 basic_block
*tos
, *worklist
, bb
;
2043 /* If we're not optimizing, then just err on the safe side. */
2047 /* First determine which blocks can reach exit via normal paths. */
2048 tos
= worklist
= XNEWVEC (basic_block
, n_basic_blocks
+ 1);
2051 bb
->flags
&= ~BB_REACHABLE
;
2053 /* Place the exit block on our worklist. */
2054 EXIT_BLOCK_PTR
->flags
|= BB_REACHABLE
;
2055 *tos
++ = EXIT_BLOCK_PTR
;
2057 /* Iterate: find everything reachable from what we've already seen. */
2058 while (tos
!= worklist
)
2062 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
2063 if (e
->flags
& EDGE_ABNORMAL
)
2070 basic_block src
= e
->src
;
2072 if (!(src
->flags
& BB_REACHABLE
))
2074 src
->flags
|= BB_REACHABLE
;
2080 /* No exceptional block reached exit unexceptionally. */
2086 /* Process recursively X of INSN and add REG_INC notes if necessary. */
2088 add_auto_inc_notes (rtx insn
, rtx x
)
2090 enum rtx_code code
= GET_CODE (x
);
2094 if (code
== MEM
&& auto_inc_p (XEXP (x
, 0)))
2096 add_reg_note (insn
, REG_INC
, XEXP (XEXP (x
, 0), 0));
2100 /* Scan all X sub-expressions. */
2101 fmt
= GET_RTX_FORMAT (code
);
2102 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2105 add_auto_inc_notes (insn
, XEXP (x
, i
));
2106 else if (fmt
[i
] == 'E')
2107 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2108 add_auto_inc_notes (insn
, XVECEXP (x
, i
, j
));
2114 /* Remove all REG_DEAD and REG_UNUSED notes and regenerate REG_INC.
2115 We change pseudos by hard registers without notification of DF and
2116 that can make the notes obsolete. DF-infrastructure does not deal
2117 with REG_INC notes -- so we should regenerate them here. */
2119 update_inc_notes (void)
2126 FOR_BB_INSNS (bb
, insn
)
2127 if (NONDEBUG_INSN_P (insn
))
2129 pnote
= ®_NOTES (insn
);
2132 if (REG_NOTE_KIND (*pnote
) == REG_INC
)
2133 *pnote
= XEXP (*pnote
, 1);
2135 pnote
= &XEXP (*pnote
, 1);
2138 add_auto_inc_notes (insn
, PATTERN (insn
));
2143 /* Set to 1 while in lra. */
2144 int lra_in_progress
;
2146 /* Start of reload pseudo regnos before the new spill pass. */
2147 int lra_constraint_new_regno_start
;
2149 /* Inheritance pseudo regnos before the new spill pass. */
2150 bitmap_head lra_inheritance_pseudos
;
2152 /* Split regnos before the new spill pass. */
2153 bitmap_head lra_split_regs
;
2155 /* Reload pseudo regnos before the new assign pass which still can be
2156 spilled after the assinment pass. */
2157 bitmap_head lra_optional_reload_pseudos
;
2159 /* First UID of insns generated before a new spill pass. */
2160 int lra_constraint_new_insn_uid_start
;
2162 /* File used for output of LRA debug information. */
2163 FILE *lra_dump_file
;
2165 /* True if we should try spill into registers of different classes
2166 instead of memory. */
2167 bool lra_reg_spill_p
;
2169 /* Set up value LRA_REG_SPILL_P. */
2171 setup_reg_spill_flag (void)
2175 if (targetm
.spill_class
!= NULL
)
2176 for (cl
= 0; cl
< (int) LIM_REG_CLASSES
; cl
++)
2177 for (mode
= 0; mode
< MAX_MACHINE_MODE
; mode
++)
2178 if (targetm
.spill_class ((enum reg_class
) cl
,
2179 (enum machine_mode
) mode
) != NO_REGS
)
2181 lra_reg_spill_p
= true;
2184 lra_reg_spill_p
= false;
2187 /* True if the current function is too big to use regular algorithms
2188 in LRA. In other words, we should use simpler and faster algorithms
2189 in LRA. It also means we should not worry about generation code
2190 for caller saves. The value is set up in IRA. */
2193 /* Major LRA entry function. F is a file should be used to dump LRA
2199 bool live_p
, scratch_p
, inserted_p
;
2203 timevar_push (TV_LRA
);
2205 init_insn_recog_data ();
2207 #ifdef ENABLE_CHECKING
2211 COPY_HARD_REG_SET (lra_no_alloc_regs
, ira_no_alloc_regs
);
2213 lra_live_range_iter
= lra_coalesce_iter
= 0;
2214 lra_constraint_iter
= lra_constraint_iter_after_spill
= 0;
2215 lra_inheritance_iter
= lra_undo_inheritance_iter
= 0;
2217 setup_reg_spill_flag ();
2219 /* We can not set up reload_in_progress because it prevents new
2221 lra_in_progress
= 1;
2226 /* Function remove_scratches can creates new pseudos for clobbers --
2227 so set up lra_constraint_new_regno_start before its call to
2228 permit changing reg classes for pseudos created by this
2230 lra_constraint_new_regno_start
= max_reg_num ();
2231 remove_scratches ();
2232 scratch_p
= lra_constraint_new_regno_start
!= max_reg_num ();
2234 /* A function that has a non-local label that can reach the exit
2235 block via non-exceptional paths must save all call-saved
2237 if (cfun
->has_nonlocal_label
&& has_nonexceptional_receiver ())
2238 crtl
->saves_all_registers
= 1;
2240 if (crtl
->saves_all_registers
)
2241 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2242 if (! call_used_regs
[i
] && ! fixed_regs
[i
] && ! LOCAL_REGNO (i
))
2243 df_set_regs_ever_live (i
, true);
2245 /* We don't DF from now and avoid its using because it is to
2246 expensive when a lot of RTL changes are made. */
2247 df_set_flags (DF_NO_INSN_RESCAN
);
2248 lra_constraint_insn_stack
.create (get_max_uid ());
2249 lra_constraint_insn_stack_bitmap
= sbitmap_alloc (get_max_uid ());
2250 bitmap_clear (lra_constraint_insn_stack_bitmap
);
2251 lra_live_ranges_init ();
2252 lra_constraints_init ();
2253 lra_curr_reload_num
= 0;
2254 push_insns (get_last_insn (), NULL_RTX
);
2255 /* It is needed for the 1st coalescing. */
2256 lra_constraint_new_insn_uid_start
= get_max_uid ();
2257 bitmap_initialize (&lra_inheritance_pseudos
, ®_obstack
);
2258 bitmap_initialize (&lra_split_regs
, ®_obstack
);
2259 bitmap_initialize (&lra_optional_reload_pseudos
, ®_obstack
);
2265 bitmap_clear (&lra_optional_reload_pseudos
);
2266 /* We should try to assign hard registers to scratches even
2267 if there were no RTL transformations in
2269 if (! lra_constraints (lra_constraint_iter
== 0)
2270 && (lra_constraint_iter
> 1
2271 || (! scratch_p
&& ! caller_save_needed
)))
2273 /* Constraint transformations may result in that eliminable
2274 hard regs become uneliminable and pseudos which use them
2275 should be spilled. It is better to do it before pseudo
2278 For example, rs6000 can make
2279 RS6000_PIC_OFFSET_TABLE_REGNUM uneliminable if we started
2280 to use a constant pool. */
2281 lra_eliminate (false);
2282 /* Do inheritance only for regular algorithms. */
2285 /* We need live ranges for lra_assign -- so build them. */
2286 lra_create_live_ranges (true);
2288 /* If we don't spill non-reload and non-inheritance pseudos,
2289 there is no sense to run memory-memory move coalescing.
2290 If inheritance pseudos were spilled, the memory-memory
2291 moves involving them will be removed by pass undoing
2297 /* Do coalescing only for regular algorithms. */
2298 if (! lra_assign () && lra_coalesce ())
2300 if (lra_undo_inheritance ())
2304 bitmap_clear (&lra_inheritance_pseudos
);
2305 bitmap_clear (&lra_split_regs
);
2306 if (! lra_need_for_spills_p ())
2310 /* We need full live info for spilling pseudos into
2311 registers instead of memory. */
2312 lra_create_live_ranges (lra_reg_spill_p
);
2316 /* Assignment of stack slots changes elimination offsets for
2317 some eliminations. So update the offsets here. */
2318 lra_eliminate (false);
2319 lra_constraint_new_regno_start
= max_reg_num ();
2320 lra_constraint_new_insn_uid_start
= get_max_uid ();
2321 lra_constraint_iter_after_spill
= 0;
2323 restore_scratches ();
2324 lra_eliminate (true);
2325 lra_final_code_change ();
2326 lra_in_progress
= 0;
2327 lra_clear_live_ranges ();
2328 lra_live_ranges_finish ();
2329 lra_constraints_finish ();
2331 sbitmap_free (lra_constraint_insn_stack_bitmap
);
2332 lra_constraint_insn_stack
.release ();
2333 finish_insn_recog_data ();
2334 regstat_free_n_sets_and_refs ();
2336 reload_completed
= 1;
2337 update_inc_notes ();
2339 inserted_p
= fixup_abnormal_edges ();
2341 /* We've possibly turned single trapping insn into multiple ones. */
2342 if (cfun
->can_throw_non_call_exceptions
)
2345 blocks
= sbitmap_alloc (last_basic_block
);
2346 bitmap_ones (blocks
);
2347 find_many_sub_basic_blocks (blocks
);
2348 sbitmap_free (blocks
);
2352 commit_edge_insertions ();
2354 /* Replacing pseudos with their memory equivalents might have
2355 created shared rtx. Subsequent passes would get confused
2356 by this, so unshare everything here. */
2357 unshare_all_rtl_again (get_insns ());
2359 #ifdef ENABLE_CHECKING
2363 timevar_pop (TV_LRA
);
2366 /* Called once per compiler to initialize LRA data once. */
2368 lra_init_once (void)
2370 init_insn_code_data_once ();
2373 /* Initialize LRA whenever register-related information is changed. */
2377 init_op_alt_data ();
2380 /* Called once per compiler to finish LRA data which are initialize
2383 lra_finish_once (void)
2385 finish_insn_code_data_once ();