2012-12-01 Alessandro Fanfarillo <alessandro.fanfarillo@gmail.com>
[official-gcc.git] / gcc / lra.c
blobd89c1d177533ed874e377d9e6e6ec4694bc2108f
1 /* LRA (local register allocator) driver and LRA utilities.
2 Copyright (C) 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Vladimir Makarov <vmakarov@redhat.com>.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 /* The Local Register Allocator (LRA) is a replacement of former
24 reload pass. It is focused to simplify code solving the reload
25 pass tasks, to make the code maintenance easier, and to implement new
26 perspective optimizations.
28 The major LRA design solutions are:
29 o division small manageable, separated sub-tasks
30 o reflection of all transformations and decisions in RTL as more
31 as possible
32 o insn constraints as a primary source of the info (minimizing
33 number of target-depended macros/hooks)
35 In brief LRA works by iterative insn process with the final goal is
36 to satisfy all insn and address constraints:
37 o New reload insns (in brief reloads) and reload pseudos might be
38 generated;
39 o Some pseudos might be spilled to assign hard registers to
40 new reload pseudos;
41 o Changing spilled pseudos to stack memory or their equivalences;
42 o Allocation stack memory changes the address displacement and
43 new iteration is needed.
45 Here is block diagram of LRA passes:
47 ---------------------
48 | Undo inheritance | --------------- ---------------
49 | for spilled pseudos)| | Memory-memory | | New (and old) |
50 | and splits (for |<----| move coalesce |<-----| pseudos |
51 | pseudos got the | --------------- | assignment |
52 Start | same hard regs) | ---------------
53 | --------------------- ^
54 V | ---------------- |
55 ----------- V | Update virtual | |
56 | Remove |----> ------------>| register | |
57 | scratches | ^ | displacements | |
58 ----------- | ---------------- |
59 | | |
60 | V New |
61 ---------------- No ------------ pseudos -------------------
62 | Spilled pseudo | change |Constraints:| or insns | Inheritance/split |
63 | to memory |<-------| RTL |--------->| transformations |
64 | substitution | | transfor- | | in EBB scope |
65 ---------------- | mations | -------------------
66 | ------------
68 -------------------------
69 | Hard regs substitution, |
70 | devirtalization, and |------> Finish
71 | restoring scratches got |
72 | memory |
73 -------------------------
75 To speed up the process:
76 o We process only insns affected by changes on previous
77 iterations;
78 o We don't use DFA-infrastructure because it results in much slower
79 compiler speed than a special IR described below does;
80 o We use a special insn representation for quick access to insn
81 info which is always *synchronized* with the current RTL;
82 o Insn IR is minimized by memory. It is divided on three parts:
83 o one specific for each insn in RTL (only operand locations);
84 o one common for all insns in RTL with the same insn code
85 (different operand attributes from machine descriptions);
86 o one oriented for maintenance of live info (list of pseudos).
87 o Pseudo data:
88 o all insns where the pseudo is referenced;
89 o live info (conflicting hard regs, live ranges, # of
90 references etc);
91 o data used for assigning (preferred hard regs, costs etc).
93 This file contains LRA driver, LRA utility functions and data, and
94 code for dealing with scratches. */
96 #include "config.h"
97 #include "system.h"
98 #include "coretypes.h"
99 #include "tm.h"
100 #include "hard-reg-set.h"
101 #include "rtl.h"
102 #include "tm_p.h"
103 #include "regs.h"
104 #include "insn-config.h"
105 #include "insn-codes.h"
106 #include "recog.h"
107 #include "output.h"
108 #include "addresses.h"
109 #include "flags.h"
110 #include "function.h"
111 #include "expr.h"
112 #include "basic-block.h"
113 #include "except.h"
114 #include "tree-pass.h"
115 #include "timevar.h"
116 #include "target.h"
117 #include "vec.h"
118 #include "ira.h"
119 #include "lra-int.h"
120 #include "df.h"
122 /* Hard registers currently not available for allocation. It can
123 changed after some hard registers become not eliminable. */
124 HARD_REG_SET lra_no_alloc_regs;
126 static int get_new_reg_value (void);
127 static void expand_reg_info (void);
128 static void invalidate_insn_recog_data (int);
129 static int get_insn_freq (rtx);
130 static void invalidate_insn_data_regno_info (lra_insn_recog_data_t, rtx, int);
132 /* Expand all regno related info needed for LRA. */
133 static void
134 expand_reg_data (void)
136 resize_reg_info ();
137 expand_reg_info ();
138 ira_expand_reg_equiv ();
141 /* Create and return a new reg of ORIGINAL mode. If ORIGINAL is NULL
142 or of VOIDmode, use MD_MODE for the new reg. Initialize its
143 register class to RCLASS. Print message about assigning class
144 RCLASS containing new register name TITLE unless it is NULL. Use
145 attributes of ORIGINAL if it is a register. The created register
146 will have unique held value. */
148 lra_create_new_reg_with_unique_value (enum machine_mode md_mode, rtx original,
149 enum reg_class rclass, const char *title)
151 enum machine_mode mode;
152 rtx new_reg;
154 if (original == NULL_RTX || (mode = GET_MODE (original)) == VOIDmode)
155 mode = md_mode;
156 lra_assert (mode != VOIDmode);
157 new_reg = gen_reg_rtx (mode);
158 if (original == NULL_RTX || ! REG_P (original))
160 if (lra_dump_file != NULL)
161 fprintf (lra_dump_file, " Creating newreg=%i", REGNO (new_reg));
163 else
165 if (ORIGINAL_REGNO (original) >= FIRST_PSEUDO_REGISTER)
166 ORIGINAL_REGNO (new_reg) = ORIGINAL_REGNO (original);
167 REG_USERVAR_P (new_reg) = REG_USERVAR_P (original);
168 REG_POINTER (new_reg) = REG_POINTER (original);
169 REG_ATTRS (new_reg) = REG_ATTRS (original);
170 if (lra_dump_file != NULL)
171 fprintf (lra_dump_file, " Creating newreg=%i from oldreg=%i",
172 REGNO (new_reg), REGNO (original));
174 if (lra_dump_file != NULL)
176 if (title != NULL)
177 fprintf (lra_dump_file, ", assigning class %s to%s%s r%d",
178 reg_class_names[rclass], *title == '\0' ? "" : " ",
179 title, REGNO (new_reg));
180 fprintf (lra_dump_file, "\n");
182 expand_reg_data ();
183 setup_reg_classes (REGNO (new_reg), rclass, NO_REGS, rclass);
184 return new_reg;
187 /* Analogous to the previous function but also inherits value of
188 ORIGINAL. */
190 lra_create_new_reg (enum machine_mode md_mode, rtx original,
191 enum reg_class rclass, const char *title)
193 rtx new_reg;
195 new_reg
196 = lra_create_new_reg_with_unique_value (md_mode, original, rclass, title);
197 if (original != NULL_RTX && REG_P (original))
198 lra_reg_info[REGNO (new_reg)].val = lra_reg_info[REGNO (original)].val;
199 return new_reg;
202 /* Set up for REGNO unique hold value. */
203 void
204 lra_set_regno_unique_value (int regno)
206 lra_reg_info[regno].val = get_new_reg_value ();
209 /* Invalidate INSN related info used by LRA. */
210 void
211 lra_invalidate_insn_data (rtx insn)
213 lra_invalidate_insn_regno_info (insn);
214 invalidate_insn_recog_data (INSN_UID (insn));
217 /* Mark INSN deleted and invalidate the insn related info used by
218 LRA. */
219 void
220 lra_set_insn_deleted (rtx insn)
222 lra_invalidate_insn_data (insn);
223 SET_INSN_DELETED (insn);
226 /* Delete an unneeded INSN and any previous insns who sole purpose is
227 loading data that is dead in INSN. */
228 void
229 lra_delete_dead_insn (rtx insn)
231 rtx prev = prev_real_insn (insn);
232 rtx prev_dest;
234 /* If the previous insn sets a register that dies in our insn,
235 delete it too. */
236 if (prev && GET_CODE (PATTERN (prev)) == SET
237 && (prev_dest = SET_DEST (PATTERN (prev)), REG_P (prev_dest))
238 && reg_mentioned_p (prev_dest, PATTERN (insn))
239 && find_regno_note (insn, REG_DEAD, REGNO (prev_dest))
240 && ! side_effects_p (SET_SRC (PATTERN (prev))))
241 lra_delete_dead_insn (prev);
243 lra_set_insn_deleted (insn);
246 /* Target checks operands through operand predicates to recognize an
247 insn. We should have a special precaution to generate add insns
248 which are frequent results of elimination.
250 Emit insns for x = y + z. X can be used to store intermediate
251 values and should be not in Y and Z when we use X to store an
252 intermediate value. Y + Z should form [base] [+ index[ * scale]] [
253 + disp] where base and index are registers, disp and scale are
254 constants. Y should contain base if it is present, Z should
255 contain disp if any. index[*scale] can be part of Y or Z. */
256 void
257 lra_emit_add (rtx x, rtx y, rtx z)
259 int old;
260 rtx insn, last;
261 rtx a1, a2, base, index, disp, scale, index_scale;
262 bool ok_p;
264 insn = gen_add3_insn (x, y, z);
265 old = max_reg_num ();
266 if (insn != NULL_RTX)
267 emit_insn (insn);
268 else
270 disp = a2 = NULL_RTX;
271 if (GET_CODE (y) == PLUS)
273 a1 = XEXP (y, 0);
274 a2 = XEXP (y, 1);
275 disp = z;
277 else
279 a1 = y;
280 if (CONSTANT_P (z))
281 disp = z;
282 else
283 a2 = z;
285 index_scale = scale = NULL_RTX;
286 if (GET_CODE (a1) == MULT)
288 index_scale = a1;
289 index = XEXP (a1, 0);
290 scale = XEXP (a1, 1);
291 base = a2;
293 else if (a2 != NULL_RTX && GET_CODE (a2) == MULT)
295 index_scale = a2;
296 index = XEXP (a2, 0);
297 scale = XEXP (a2, 1);
298 base = a1;
300 else
302 base = a1;
303 index = a2;
305 if (! REG_P (base)
306 || (index != NULL_RTX && ! REG_P (index))
307 || (disp != NULL_RTX && ! CONSTANT_P (disp))
308 || (scale != NULL_RTX && ! CONSTANT_P (scale)))
310 /* Its is not an address generation. Probably we have no 3 op
311 add. Last chance is to use 2-op add insn. */
312 lra_assert (x != y && x != z);
313 emit_move_insn (x, z);
314 insn = gen_add2_insn (x, y);
315 emit_insn (insn);
317 else
319 if (index_scale == NULL_RTX)
320 index_scale = index;
321 if (disp == NULL_RTX)
323 /* Generate x = index_scale; x = x + base. */
324 lra_assert (index_scale != NULL_RTX && base != NULL_RTX);
325 emit_move_insn (x, index_scale);
326 insn = gen_add2_insn (x, base);
327 emit_insn (insn);
329 else if (scale == NULL_RTX)
331 /* Try x = base + disp. */
332 lra_assert (base != NULL_RTX);
333 last = get_last_insn ();
334 insn = emit_move_insn (x, gen_rtx_PLUS (GET_MODE (base),
335 base, disp));
336 if (recog_memoized (insn) < 0)
338 delete_insns_since (last);
339 /* Generate x = disp; x = x + base. */
340 emit_move_insn (x, disp);
341 insn = gen_add2_insn (x, base);
342 emit_insn (insn);
344 /* Generate x = x + index. */
345 if (index != NULL_RTX)
347 insn = gen_add2_insn (x, index);
348 emit_insn (insn);
351 else
353 /* Try x = index_scale; x = x + disp; x = x + base. */
354 last = get_last_insn ();
355 insn = emit_move_insn (x, index_scale);
356 ok_p = false;
357 if (recog_memoized (insn) >= 0)
359 insn = gen_add2_insn (x, disp);
360 if (insn != NULL_RTX)
362 emit_insn (insn);
363 insn = gen_add2_insn (x, disp);
364 if (insn != NULL_RTX)
366 emit_insn (insn);
367 ok_p = true;
371 if (! ok_p)
373 delete_insns_since (last);
374 /* Generate x = disp; x = x + base; x = x + index_scale. */
375 emit_move_insn (x, disp);
376 insn = gen_add2_insn (x, base);
377 emit_insn (insn);
378 insn = gen_add2_insn (x, index_scale);
379 emit_insn (insn);
384 /* Functions emit_... can create pseudos -- so expand the pseudo
385 data. */
386 if (old != max_reg_num ())
387 expand_reg_data ();
390 /* The number of emitted reload insns so far. */
391 int lra_curr_reload_num;
393 /* Emit x := y, processing special case when y = u + v or y = u + v *
394 scale + w through emit_add (Y can be an address which is base +
395 index reg * scale + displacement in general case). X may be used
396 as intermediate result therefore it should be not in Y. */
397 void
398 lra_emit_move (rtx x, rtx y)
400 int old;
402 if (GET_CODE (y) != PLUS)
404 if (rtx_equal_p (x, y))
405 return;
406 old = max_reg_num ();
407 emit_move_insn (x, y);
408 if (REG_P (x))
409 lra_reg_info[ORIGINAL_REGNO (x)].last_reload = ++lra_curr_reload_num;
410 /* Function emit_move can create pseudos -- so expand the pseudo
411 data. */
412 if (old != max_reg_num ())
413 expand_reg_data ();
414 return;
416 lra_emit_add (x, XEXP (y, 0), XEXP (y, 1));
419 /* Update insn operands which are duplication of operands whose
420 numbers are in array of NOPS (with end marker -1). The insn is
421 represented by its LRA internal representation ID. */
422 void
423 lra_update_dups (lra_insn_recog_data_t id, signed char *nops)
425 int i, j, nop;
426 struct lra_static_insn_data *static_id = id->insn_static_data;
428 for (i = 0; i < static_id->n_dups; i++)
429 for (j = 0; (nop = nops[j]) >= 0; j++)
430 if (static_id->dup_num[i] == nop)
431 *id->dup_loc[i] = *id->operand_loc[nop];
436 /* This page contains code dealing with info about registers in the
437 insns. */
439 /* Pools for insn reg info. */
440 static alloc_pool insn_reg_pool;
442 /* Initiate pool for insn reg info. */
443 static void
444 init_insn_regs (void)
446 insn_reg_pool
447 = create_alloc_pool ("insn regs", sizeof (struct lra_insn_reg), 100);
450 /* Create LRA insn related info about referenced REGNO with TYPE
451 (in/out/inout), biggest reference mode MODE, flag that it is
452 reference through subreg (SUBREG_P), flag that is early clobbered
453 in the insn (EARLY_CLOBBER), and reference to the next insn reg
454 info (NEXT). */
455 static struct lra_insn_reg *
456 new_insn_reg (int regno, enum op_type type, enum machine_mode mode,
457 bool subreg_p, bool early_clobber, struct lra_insn_reg *next)
459 struct lra_insn_reg *ir;
461 ir = (struct lra_insn_reg *) pool_alloc (insn_reg_pool);
462 ir->type = type;
463 ir->biggest_mode = mode;
464 if (GET_MODE_SIZE (mode) > GET_MODE_SIZE (lra_reg_info[regno].biggest_mode))
465 lra_reg_info[regno].biggest_mode = mode;
466 ir->subreg_p = subreg_p;
467 ir->early_clobber = early_clobber;
468 ir->regno = regno;
469 ir->next = next;
470 return ir;
473 /* Free insn reg info IR. */
474 static void
475 free_insn_reg (struct lra_insn_reg *ir)
477 pool_free (insn_reg_pool, ir);
480 /* Free insn reg info list IR. */
481 static void
482 free_insn_regs (struct lra_insn_reg *ir)
484 struct lra_insn_reg *next_ir;
486 for (; ir != NULL; ir = next_ir)
488 next_ir = ir->next;
489 free_insn_reg (ir);
493 /* Finish pool for insn reg info. */
494 static void
495 finish_insn_regs (void)
497 free_alloc_pool (insn_reg_pool);
502 /* This page contains code dealing LRA insn info (or in other words
503 LRA internal insn representation). */
505 struct target_lra_int default_target_lra_int;
506 #if SWITCHABLE_TARGET
507 struct target_lra_int *this_target_lra_int = &default_target_lra_int;
508 #endif
510 /* Map INSN_CODE -> the static insn data. This info is valid during
511 all translation unit. */
512 struct lra_static_insn_data *insn_code_data[LAST_INSN_CODE];
514 /* Debug insns are represented as a special insn with one input
515 operand which is RTL expression in var_location. */
517 /* The following data are used as static insn operand data for all
518 debug insns. If structure lra_operand_data is changed, the
519 initializer should be changed too. */
520 static struct lra_operand_data debug_operand_data =
522 NULL, /* alternative */
523 VOIDmode, /* We are not interesting in the operand mode. */
524 OP_IN,
525 0, 0, 0, 0
528 /* The following data are used as static insn data for all debug
529 insns. If structure lra_static_insn_data is changed, the
530 initializer should be changed too. */
531 static struct lra_static_insn_data debug_insn_static_data =
533 &debug_operand_data,
534 0, /* Duplication operands #. */
535 -1, /* Commutative operand #. */
536 1, /* Operands #. There is only one operand which is debug RTL
537 expression. */
538 0, /* Duplications #. */
539 0, /* Alternatives #. We are not interesting in alternatives
540 because we does not proceed debug_insns for reloads. */
541 NULL, /* Hard registers referenced in machine description. */
542 NULL /* Descriptions of operands in alternatives. */
545 /* Called once per compiler work to initialize some LRA data related
546 to insns. */
547 static void
548 init_insn_code_data_once (void)
550 memset (insn_code_data, 0, sizeof (insn_code_data));
551 memset (op_alt_data, 0, sizeof (op_alt_data));
554 /* Called once per compiler work to finalize some LRA data related to
555 insns. */
556 static void
557 finish_insn_code_data_once (void)
559 int i;
561 for (i = 0; i < LAST_INSN_CODE; i++)
563 if (insn_code_data[i] != NULL)
564 free (insn_code_data[i]);
565 if (op_alt_data[i] != NULL)
566 free (op_alt_data[i]);
570 /* Initialize LRA info about operands in insn alternatives. */
571 static void
572 init_op_alt_data (void)
574 int i;
576 for (i = 0; i < LAST_INSN_CODE; i++)
577 if (op_alt_data[i] != NULL)
579 free (op_alt_data[i]);
580 op_alt_data[i] = NULL;
584 /* Return static insn data, allocate and setup if necessary. Although
585 dup_num is static data (it depends only on icode), to set it up we
586 need to extract insn first. So recog_data should be valid for
587 normal insn (ICODE >= 0) before the call. */
588 static struct lra_static_insn_data *
589 get_static_insn_data (int icode, int nop, int ndup, int nalt)
591 struct lra_static_insn_data *data;
592 size_t n_bytes;
594 lra_assert (icode < LAST_INSN_CODE);
595 if (icode >= 0 && (data = insn_code_data[icode]) != NULL)
596 return data;
597 lra_assert (nop >= 0 && ndup >= 0 && nalt >= 0);
598 n_bytes = sizeof (struct lra_static_insn_data)
599 + sizeof (struct lra_operand_data) * nop
600 + sizeof (int) * ndup;
601 data = XNEWVAR (struct lra_static_insn_data, n_bytes);
602 data->n_operands = nop;
603 data->n_dups = ndup;
604 data->n_alternatives = nalt;
605 data->operand = ((struct lra_operand_data *)
606 ((char *) data + sizeof (struct lra_static_insn_data)));
607 data->dup_num = ((int *) ((char *) data->operand
608 + sizeof (struct lra_operand_data) * nop));
609 if (icode >= 0)
611 int i;
613 insn_code_data[icode] = data;
614 for (i = 0; i < nop; i++)
616 data->operand[i].constraint
617 = insn_data[icode].operand[i].constraint;
618 data->operand[i].mode = insn_data[icode].operand[i].mode;
619 data->operand[i].strict_low = insn_data[icode].operand[i].strict_low;
620 data->operand[i].is_operator
621 = insn_data[icode].operand[i].is_operator;
622 data->operand[i].type
623 = (data->operand[i].constraint[0] == '=' ? OP_OUT
624 : data->operand[i].constraint[0] == '+' ? OP_INOUT
625 : OP_IN);
626 data->operand[i].is_address = false;
628 for (i = 0; i < ndup; i++)
629 data->dup_num[i] = recog_data.dup_num[i];
631 return data;
634 /* The current length of the following array. */
635 int lra_insn_recog_data_len;
637 /* Map INSN_UID -> the insn recog data (NULL if unknown). */
638 lra_insn_recog_data_t *lra_insn_recog_data;
640 /* Initialize LRA data about insns. */
641 static void
642 init_insn_recog_data (void)
644 lra_insn_recog_data_len = 0;
645 lra_insn_recog_data = NULL;
646 init_insn_regs ();
649 /* Expand, if necessary, LRA data about insns. */
650 static void
651 check_and_expand_insn_recog_data (int index)
653 int i, old;
655 if (lra_insn_recog_data_len > index)
656 return;
657 old = lra_insn_recog_data_len;
658 lra_insn_recog_data_len = index * 3 / 2 + 1;
659 lra_insn_recog_data = XRESIZEVEC (lra_insn_recog_data_t,
660 lra_insn_recog_data,
661 lra_insn_recog_data_len);
662 for (i = old; i < lra_insn_recog_data_len; i++)
663 lra_insn_recog_data[i] = NULL;
666 /* Finish LRA DATA about insn. */
667 static void
668 free_insn_recog_data (lra_insn_recog_data_t data)
670 if (data->operand_loc != NULL)
671 free (data->operand_loc);
672 if (data->dup_loc != NULL)
673 free (data->dup_loc);
674 if (data->arg_hard_regs != NULL)
675 free (data->arg_hard_regs);
676 if (HAVE_ATTR_enabled && data->alternative_enabled_p != NULL)
677 free (data->alternative_enabled_p);
678 if (data->icode < 0 && NONDEBUG_INSN_P (data->insn))
680 if (data->insn_static_data->operand_alternative != NULL)
681 free (data->insn_static_data->operand_alternative);
682 free_insn_regs (data->insn_static_data->hard_regs);
683 free (data->insn_static_data);
685 free_insn_regs (data->regs);
686 data->regs = NULL;
687 free (data);
690 /* Finish LRA data about all insns. */
691 static void
692 finish_insn_recog_data (void)
694 int i;
695 lra_insn_recog_data_t data;
697 for (i = 0; i < lra_insn_recog_data_len; i++)
698 if ((data = lra_insn_recog_data[i]) != NULL)
699 free_insn_recog_data (data);
700 finish_insn_regs ();
701 free (lra_insn_recog_data);
704 /* Setup info about operands in alternatives of LRA DATA of insn. */
705 static void
706 setup_operand_alternative (lra_insn_recog_data_t data)
708 int i, nop, nalt;
709 int icode = data->icode;
710 struct lra_static_insn_data *static_data = data->insn_static_data;
712 if (icode >= 0
713 && (static_data->operand_alternative = op_alt_data[icode]) != NULL)
714 return;
715 static_data->commutative = -1;
716 nop = static_data->n_operands;
717 if (nop == 0)
719 static_data->operand_alternative = NULL;
720 return;
722 nalt = static_data->n_alternatives;
723 static_data->operand_alternative = XNEWVEC (struct operand_alternative,
724 nalt * nop);
725 memset (static_data->operand_alternative, 0,
726 nalt * nop * sizeof (struct operand_alternative));
727 if (icode >= 0)
728 op_alt_data[icode] = static_data->operand_alternative;
729 for (i = 0; i < nop; i++)
731 int j;
732 struct operand_alternative *op_alt_start, *op_alt;
733 const char *p = static_data->operand[i].constraint;
735 static_data->operand[i].early_clobber = 0;
736 op_alt_start = &static_data->operand_alternative[i];
738 for (j = 0; j < nalt; j++)
740 op_alt = op_alt_start + j * nop;
741 op_alt->cl = NO_REGS;
742 op_alt->constraint = p;
743 op_alt->matches = -1;
744 op_alt->matched = -1;
746 if (*p == '\0' || *p == ',')
748 op_alt->anything_ok = 1;
749 continue;
752 for (;;)
754 char c = *p;
755 if (c == '#')
757 c = *++p;
758 while (c != ',' && c != '\0');
759 if (c == ',' || c == '\0')
761 p++;
762 break;
765 switch (c)
767 case '=': case '+': case '*':
768 case 'E': case 'F': case 'G': case 'H':
769 case 's': case 'i': case 'n':
770 case 'I': case 'J': case 'K': case 'L':
771 case 'M': case 'N': case 'O': case 'P':
772 /* These don't say anything we care about. */
773 break;
775 case '%':
776 /* We currently only support one commutative pair of
777 operands. */
778 if (static_data->commutative < 0)
779 static_data->commutative = i;
780 else
781 lra_assert (data->icode < 0); /* Asm */
783 /* The last operand should not be marked
784 commutative. */
785 lra_assert (i != nop - 1);
786 break;
788 case '?':
789 op_alt->reject += LRA_LOSER_COST_FACTOR;
790 break;
791 case '!':
792 op_alt->reject += LRA_MAX_REJECT;
793 break;
794 case '&':
795 op_alt->earlyclobber = 1;
796 static_data->operand[i].early_clobber = 1;
797 break;
799 case '0': case '1': case '2': case '3': case '4':
800 case '5': case '6': case '7': case '8': case '9':
802 char *end;
803 op_alt->matches = strtoul (p, &end, 10);
804 static_data->operand_alternative
805 [j * nop + op_alt->matches].matched = i;
806 p = end;
808 continue;
810 case TARGET_MEM_CONSTRAINT:
811 op_alt->memory_ok = 1;
812 break;
813 case '<':
814 op_alt->decmem_ok = 1;
815 break;
816 case '>':
817 op_alt->incmem_ok = 1;
818 break;
819 case 'V':
820 op_alt->nonoffmem_ok = 1;
821 break;
822 case 'o':
823 op_alt->offmem_ok = 1;
824 break;
825 case 'X':
826 op_alt->anything_ok = 1;
827 break;
829 case 'p':
830 static_data->operand[i].is_address = true;
831 op_alt->is_address = 1;
832 op_alt->cl = (reg_class_subunion[(int) op_alt->cl]
833 [(int) base_reg_class (VOIDmode,
834 ADDR_SPACE_GENERIC,
835 ADDRESS, SCRATCH)]);
836 break;
838 case 'g':
839 case 'r':
840 op_alt->cl =
841 reg_class_subunion[(int) op_alt->cl][(int) GENERAL_REGS];
842 break;
844 default:
845 if (EXTRA_MEMORY_CONSTRAINT (c, p))
847 op_alt->memory_ok = 1;
848 break;
850 if (EXTRA_ADDRESS_CONSTRAINT (c, p))
852 static_data->operand[i].is_address = true;
853 op_alt->is_address = 1;
854 op_alt->cl
855 = (reg_class_subunion
856 [(int) op_alt->cl]
857 [(int) base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
858 ADDRESS, SCRATCH)]);
859 break;
862 op_alt->cl
863 = (reg_class_subunion
864 [(int) op_alt->cl]
865 [(int)
866 REG_CLASS_FROM_CONSTRAINT ((unsigned char) c, p)]);
867 break;
869 p += CONSTRAINT_LEN (c, p);
875 /* Recursively process X and collect info about registers, which are
876 not the insn operands, in X with TYPE (in/out/inout) and flag that
877 it is early clobbered in the insn (EARLY_CLOBBER) and add the info
878 to LIST. X is a part of insn given by DATA. Return the result
879 list. */
880 static struct lra_insn_reg *
881 collect_non_operand_hard_regs (rtx *x, lra_insn_recog_data_t data,
882 struct lra_insn_reg *list,
883 enum op_type type, bool early_clobber)
885 int i, j, regno, last;
886 bool subreg_p;
887 enum machine_mode mode;
888 struct lra_insn_reg *curr;
889 rtx op = *x;
890 enum rtx_code code = GET_CODE (op);
891 const char *fmt = GET_RTX_FORMAT (code);
893 for (i = 0; i < data->insn_static_data->n_operands; i++)
894 if (x == data->operand_loc[i])
895 /* It is an operand loc. Stop here. */
896 return list;
897 for (i = 0; i < data->insn_static_data->n_dups; i++)
898 if (x == data->dup_loc[i])
899 /* It is a dup loc. Stop here. */
900 return list;
901 mode = GET_MODE (op);
902 subreg_p = false;
903 if (code == SUBREG)
905 op = SUBREG_REG (op);
906 code = GET_CODE (op);
907 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (op)))
909 mode = GET_MODE (op);
910 if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
911 subreg_p = true;
914 if (REG_P (op))
916 if ((regno = REGNO (op)) >= FIRST_PSEUDO_REGISTER)
917 return list;
918 for (last = regno + hard_regno_nregs[regno][mode];
919 regno < last;
920 regno++)
921 if (! TEST_HARD_REG_BIT (lra_no_alloc_regs, regno)
922 || TEST_HARD_REG_BIT (eliminable_regset, regno))
924 for (curr = list; curr != NULL; curr = curr->next)
925 if (curr->regno == regno && curr->subreg_p == subreg_p
926 && curr->biggest_mode == mode)
928 if (curr->type != type)
929 curr->type = OP_INOUT;
930 if (curr->early_clobber != early_clobber)
931 curr->early_clobber = true;
932 break;
934 if (curr == NULL)
936 /* This is a new hard regno or the info can not be
937 integrated into the found structure. */
938 #ifdef STACK_REGS
939 early_clobber
940 = (early_clobber
941 /* This clobber is to inform popping floating
942 point stack only. */
943 && ! (FIRST_STACK_REG <= regno
944 && regno <= LAST_STACK_REG));
945 #endif
946 list = new_insn_reg (regno, type, mode, subreg_p,
947 early_clobber, list);
950 return list;
952 switch (code)
954 case SET:
955 list = collect_non_operand_hard_regs (&SET_DEST (op), data,
956 list, OP_OUT, false);
957 list = collect_non_operand_hard_regs (&SET_SRC (op), data,
958 list, OP_IN, false);
959 break;
960 case CLOBBER:
961 /* We treat clobber of non-operand hard registers as early
962 clobber (the behavior is expected from asm). */
963 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
964 list, OP_OUT, true);
965 break;
966 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
967 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
968 list, OP_INOUT, false);
969 break;
970 case PRE_MODIFY: case POST_MODIFY:
971 list = collect_non_operand_hard_regs (&XEXP (op, 0), data,
972 list, OP_INOUT, false);
973 list = collect_non_operand_hard_regs (&XEXP (op, 1), data,
974 list, OP_IN, false);
975 break;
976 default:
977 fmt = GET_RTX_FORMAT (code);
978 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
980 if (fmt[i] == 'e')
981 list = collect_non_operand_hard_regs (&XEXP (op, i), data,
982 list, OP_IN, false);
983 else if (fmt[i] == 'E')
984 for (j = XVECLEN (op, i) - 1; j >= 0; j--)
985 list = collect_non_operand_hard_regs (&XVECEXP (op, i, j), data,
986 list, OP_IN, false);
989 return list;
992 /* Set up and return info about INSN. Set up the info if it is not set up
993 yet. */
994 lra_insn_recog_data_t
995 lra_set_insn_recog_data (rtx insn)
997 lra_insn_recog_data_t data;
998 int i, n, icode;
999 rtx **locs;
1000 unsigned int uid = INSN_UID (insn);
1001 struct lra_static_insn_data *insn_static_data;
1003 check_and_expand_insn_recog_data (uid);
1004 if (DEBUG_INSN_P (insn))
1005 icode = -1;
1006 else
1008 icode = INSN_CODE (insn);
1009 if (icode < 0)
1010 /* It might be a new simple insn which is not recognized yet. */
1011 INSN_CODE (insn) = icode = recog_memoized (insn);
1013 data = XNEW (struct lra_insn_recog_data);
1014 lra_insn_recog_data[uid] = data;
1015 data->insn = insn;
1016 data->used_insn_alternative = -1;
1017 data->icode = icode;
1018 data->regs = NULL;
1019 if (DEBUG_INSN_P (insn))
1021 data->insn_static_data = &debug_insn_static_data;
1022 data->dup_loc = NULL;
1023 data->arg_hard_regs = NULL;
1024 data->alternative_enabled_p = NULL;
1025 data->operand_loc = XNEWVEC (rtx *, 1);
1026 data->operand_loc[0] = &INSN_VAR_LOCATION_LOC (insn);
1027 return data;
1029 if (icode < 0)
1031 int nop;
1032 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1033 const char *constraints[MAX_RECOG_OPERANDS];
1035 nop = asm_noperands (PATTERN (insn));
1036 data->operand_loc = data->dup_loc = NULL;
1037 if (nop < 0)
1038 /* Its is a special insn like USE or CLOBBER. */
1039 data->insn_static_data = insn_static_data
1040 = get_static_insn_data (-1, 0, 0, 1);
1041 else
1043 /* expand_asm_operands makes sure there aren't too many
1044 operands. */
1045 lra_assert (nop <= MAX_RECOG_OPERANDS);
1046 if (nop != 0)
1047 data->operand_loc = XNEWVEC (rtx *, nop);
1048 /* Now get the operand values and constraints out of the
1049 insn. */
1050 decode_asm_operands (PATTERN (insn), NULL,
1051 data->operand_loc,
1052 constraints, operand_mode, NULL);
1053 n = 1;
1054 if (nop > 0)
1056 const char *p = recog_data.constraints[0];
1058 for (p = constraints[0]; *p; p++)
1059 n += *p == ',';
1061 data->insn_static_data = insn_static_data
1062 = get_static_insn_data (-1, nop, 0, n);
1063 for (i = 0; i < nop; i++)
1065 insn_static_data->operand[i].mode = operand_mode[i];
1066 insn_static_data->operand[i].constraint = constraints[i];
1067 insn_static_data->operand[i].strict_low = false;
1068 insn_static_data->operand[i].is_operator = false;
1069 insn_static_data->operand[i].is_address = false;
1072 for (i = 0; i < insn_static_data->n_operands; i++)
1073 insn_static_data->operand[i].type
1074 = (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1075 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1076 : OP_IN);
1077 data->alternative_enabled_p = NULL;
1079 else
1081 insn_extract (insn);
1082 data->insn_static_data = insn_static_data
1083 = get_static_insn_data (icode, insn_data[icode].n_operands,
1084 insn_data[icode].n_dups,
1085 insn_data[icode].n_alternatives);
1086 n = insn_static_data->n_operands;
1087 if (n == 0)
1088 locs = NULL;
1089 else
1091 locs = XNEWVEC (rtx *, n);
1092 memcpy (locs, recog_data.operand_loc, n * sizeof (rtx *));
1094 data->operand_loc = locs;
1095 n = insn_static_data->n_dups;
1096 if (n == 0)
1097 locs = NULL;
1098 else
1100 locs = XNEWVEC (rtx *, n);
1101 memcpy (locs, recog_data.dup_loc, n * sizeof (rtx *));
1103 data->dup_loc = locs;
1104 if (HAVE_ATTR_enabled)
1106 bool *bp;
1108 n = insn_static_data->n_alternatives;
1109 lra_assert (n >= 0);
1110 data->alternative_enabled_p = bp = XNEWVEC (bool, n);
1111 /* Cache the insn because we don't want to call extract_insn
1112 from get_attr_enabled as extract_insn modifies
1113 which_alternative. The attribute enabled should not depend
1114 on insn operands, operand modes, operand types, and operand
1115 constraints. It should depend on the architecture. If it
1116 is not true, we should rewrite this file code to use
1117 extract_insn instead of less expensive insn_extract. */
1118 recog_data.insn = insn;
1119 for (i = 0; i < n; i++)
1121 which_alternative = i;
1122 bp[i] = get_attr_enabled (insn);
1126 if (GET_CODE (PATTERN (insn)) == CLOBBER || GET_CODE (PATTERN (insn)) == USE)
1127 insn_static_data->hard_regs = NULL;
1128 else
1129 insn_static_data->hard_regs
1130 = collect_non_operand_hard_regs (&PATTERN (insn), data,
1131 NULL, OP_IN, false);
1132 setup_operand_alternative (data);
1133 data->arg_hard_regs = NULL;
1134 if (CALL_P (insn))
1136 rtx link;
1137 int n_hard_regs, regno, arg_hard_regs[FIRST_PSEUDO_REGISTER];
1139 n_hard_regs = 0;
1140 /* Finding implicit hard register usage. We believe it will be
1141 not changed whatever transformations are used. Call insns
1142 are such example. */
1143 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1144 link != NULL_RTX;
1145 link = XEXP (link, 1))
1146 if (GET_CODE (XEXP (link, 0)) == USE
1147 && REG_P (XEXP (XEXP (link, 0), 0)))
1149 regno = REGNO (XEXP (XEXP (link, 0), 0));
1150 lra_assert (regno < FIRST_PSEUDO_REGISTER);
1151 /* It is an argument register. */
1152 for (i = (hard_regno_nregs
1153 [regno][GET_MODE (XEXP (XEXP (link, 0), 0))]) - 1;
1154 i >= 0;
1155 i--)
1156 arg_hard_regs[n_hard_regs++] = regno + i;
1158 if (n_hard_regs != 0)
1160 arg_hard_regs[n_hard_regs++] = -1;
1161 data->arg_hard_regs = XNEWVEC (int, n_hard_regs);
1162 memcpy (data->arg_hard_regs, arg_hard_regs,
1163 sizeof (int) * n_hard_regs);
1166 /* Some output operand can be recognized only from the context not
1167 from the constraints which are empty in this case. Call insn may
1168 contain a hard register in set destination with empty constraint
1169 and extract_insn treats them as an input. */
1170 for (i = 0; i < insn_static_data->n_operands; i++)
1172 int j;
1173 rtx pat, set;
1174 struct lra_operand_data *operand = &insn_static_data->operand[i];
1176 /* ??? Should we treat 'X' the same way. It looks to me that
1177 'X' means anything and empty constraint means we do not
1178 care. */
1179 if (operand->type != OP_IN || *operand->constraint != '\0'
1180 || operand->is_operator)
1181 continue;
1182 pat = PATTERN (insn);
1183 if (GET_CODE (pat) == SET)
1185 if (data->operand_loc[i] != &SET_DEST (pat))
1186 continue;
1188 else if (GET_CODE (pat) == PARALLEL)
1190 for (j = XVECLEN (pat, 0) - 1; j >= 0; j--)
1192 set = XVECEXP (PATTERN (insn), 0, j);
1193 if (GET_CODE (set) == SET
1194 && &SET_DEST (set) == data->operand_loc[i])
1195 break;
1197 if (j < 0)
1198 continue;
1200 else
1201 continue;
1202 operand->type = OP_OUT;
1204 return data;
1207 /* Return info about insn give by UID. The info should be already set
1208 up. */
1209 static lra_insn_recog_data_t
1210 get_insn_recog_data_by_uid (int uid)
1212 lra_insn_recog_data_t data;
1214 data = lra_insn_recog_data[uid];
1215 lra_assert (data != NULL);
1216 return data;
1219 /* Invalidate all info about insn given by its UID. */
1220 static void
1221 invalidate_insn_recog_data (int uid)
1223 lra_insn_recog_data_t data;
1225 data = lra_insn_recog_data[uid];
1226 lra_assert (data != NULL);
1227 free_insn_recog_data (data);
1228 lra_insn_recog_data[uid] = NULL;
1231 /* Update all the insn info about INSN. It is usually called when
1232 something in the insn was changed. Return the updated info. */
1233 lra_insn_recog_data_t
1234 lra_update_insn_recog_data (rtx insn)
1236 lra_insn_recog_data_t data;
1237 int n;
1238 unsigned int uid = INSN_UID (insn);
1239 struct lra_static_insn_data *insn_static_data;
1241 check_and_expand_insn_recog_data (uid);
1242 if ((data = lra_insn_recog_data[uid]) != NULL
1243 && data->icode != INSN_CODE (insn))
1245 invalidate_insn_data_regno_info (data, insn, get_insn_freq (insn));
1246 invalidate_insn_recog_data (uid);
1247 data = NULL;
1249 if (data == NULL)
1250 return lra_get_insn_recog_data (insn);
1251 insn_static_data = data->insn_static_data;
1252 data->used_insn_alternative = -1;
1253 if (DEBUG_INSN_P (insn))
1254 return data;
1255 if (data->icode < 0)
1257 int nop;
1258 enum machine_mode operand_mode[MAX_RECOG_OPERANDS];
1259 const char *constraints[MAX_RECOG_OPERANDS];
1261 nop = asm_noperands (PATTERN (insn));
1262 if (nop >= 0)
1264 lra_assert (nop == data->insn_static_data->n_operands);
1265 /* Now get the operand values and constraints out of the
1266 insn. */
1267 decode_asm_operands (PATTERN (insn), NULL,
1268 data->operand_loc,
1269 constraints, operand_mode, NULL);
1270 #ifdef ENABLE_CHECKING
1272 int i;
1274 for (i = 0; i < nop; i++)
1275 lra_assert
1276 (insn_static_data->operand[i].mode == operand_mode[i]
1277 && insn_static_data->operand[i].constraint == constraints[i]
1278 && ! insn_static_data->operand[i].is_operator);
1280 #endif
1282 #ifdef ENABLE_CHECKING
1284 int i;
1286 for (i = 0; i < insn_static_data->n_operands; i++)
1287 lra_assert
1288 (insn_static_data->operand[i].type
1289 == (insn_static_data->operand[i].constraint[0] == '=' ? OP_OUT
1290 : insn_static_data->operand[i].constraint[0] == '+' ? OP_INOUT
1291 : OP_IN));
1293 #endif
1295 else
1297 insn_extract (insn);
1298 n = insn_static_data->n_operands;
1299 if (n != 0)
1300 memcpy (data->operand_loc, recog_data.operand_loc, n * sizeof (rtx *));
1301 n = insn_static_data->n_dups;
1302 if (n != 0)
1303 memcpy (data->dup_loc, recog_data.dup_loc, n * sizeof (rtx *));
1304 #if HAVE_ATTR_enabled
1305 #ifdef ENABLE_CHECKING
1307 int i;
1308 bool *bp;
1310 n = insn_static_data->n_alternatives;
1311 bp = data->alternative_enabled_p;
1312 lra_assert (n >= 0 && bp != NULL);
1313 /* Cache the insn to prevent extract_insn call from
1314 get_attr_enabled. */
1315 recog_data.insn = insn;
1316 for (i = 0; i < n; i++)
1318 which_alternative = i;
1319 lra_assert (bp[i] == get_attr_enabled (insn));
1322 #endif
1323 #endif
1325 return data;
1328 /* Set up that INSN is using alternative ALT now. */
1329 void
1330 lra_set_used_insn_alternative (rtx insn, int alt)
1332 lra_insn_recog_data_t data;
1334 data = lra_get_insn_recog_data (insn);
1335 data->used_insn_alternative = alt;
1338 /* Set up that insn with UID is using alternative ALT now. The insn
1339 info should be already set up. */
1340 void
1341 lra_set_used_insn_alternative_by_uid (int uid, int alt)
1343 lra_insn_recog_data_t data;
1345 check_and_expand_insn_recog_data (uid);
1346 data = lra_insn_recog_data[uid];
1347 lra_assert (data != NULL);
1348 data->used_insn_alternative = alt;
1353 /* This page contains code dealing with common register info and
1354 pseudo copies. */
1356 /* The size of the following array. */
1357 static int reg_info_size;
1358 /* Common info about each register. */
1359 struct lra_reg *lra_reg_info;
1361 /* Last register value. */
1362 static int last_reg_value;
1364 /* Return new register value. */
1365 static int
1366 get_new_reg_value (void)
1368 return ++last_reg_value;
1371 /* Pools for copies. */
1372 static alloc_pool copy_pool;
1374 /* Vec referring to pseudo copies. */
1375 static vec<lra_copy_t> copy_vec;
1377 /* Initialize I-th element of lra_reg_info. */
1378 static inline void
1379 initialize_lra_reg_info_element (int i)
1381 bitmap_initialize (&lra_reg_info[i].insn_bitmap, &reg_obstack);
1382 #ifdef STACK_REGS
1383 lra_reg_info[i].no_stack_p = false;
1384 #endif
1385 CLEAR_HARD_REG_SET (lra_reg_info[i].conflict_hard_regs);
1386 lra_reg_info[i].preferred_hard_regno1 = -1;
1387 lra_reg_info[i].preferred_hard_regno2 = -1;
1388 lra_reg_info[i].preferred_hard_regno_profit1 = 0;
1389 lra_reg_info[i].preferred_hard_regno_profit2 = 0;
1390 lra_reg_info[i].biggest_mode = VOIDmode;
1391 lra_reg_info[i].live_ranges = NULL;
1392 lra_reg_info[i].nrefs = lra_reg_info[i].freq = 0;
1393 lra_reg_info[i].last_reload = 0;
1394 lra_reg_info[i].restore_regno = -1;
1395 lra_reg_info[i].val = get_new_reg_value ();
1396 lra_reg_info[i].copies = NULL;
1399 /* Initialize common reg info and copies. */
1400 static void
1401 init_reg_info (void)
1403 int i;
1405 last_reg_value = 0;
1406 reg_info_size = max_reg_num () * 3 / 2 + 1;
1407 lra_reg_info = XNEWVEC (struct lra_reg, reg_info_size);
1408 for (i = 0; i < reg_info_size; i++)
1409 initialize_lra_reg_info_element (i);
1410 copy_pool
1411 = create_alloc_pool ("lra copies", sizeof (struct lra_copy), 100);
1412 copy_vec.create (100);
1416 /* Finish common reg info and copies. */
1417 static void
1418 finish_reg_info (void)
1420 int i;
1422 for (i = 0; i < reg_info_size; i++)
1423 bitmap_clear (&lra_reg_info[i].insn_bitmap);
1424 free (lra_reg_info);
1425 reg_info_size = 0;
1426 free_alloc_pool (copy_pool);
1427 copy_vec.release ();
1430 /* Expand common reg info if it is necessary. */
1431 static void
1432 expand_reg_info (void)
1434 int i, old = reg_info_size;
1436 if (reg_info_size > max_reg_num ())
1437 return;
1438 reg_info_size = max_reg_num () * 3 / 2 + 1;
1439 lra_reg_info = XRESIZEVEC (struct lra_reg, lra_reg_info, reg_info_size);
1440 for (i = old; i < reg_info_size; i++)
1441 initialize_lra_reg_info_element (i);
1444 /* Free all copies. */
1445 void
1446 lra_free_copies (void)
1448 lra_copy_t cp;
1450 while (copy_vec.length () != 0)
1452 cp = copy_vec.pop ();
1453 lra_reg_info[cp->regno1].copies = lra_reg_info[cp->regno2].copies = NULL;
1454 pool_free (copy_pool, cp);
1458 /* Create copy of two pseudos REGNO1 and REGNO2. The copy execution
1459 frequency is FREQ. */
1460 void
1461 lra_create_copy (int regno1, int regno2, int freq)
1463 bool regno1_dest_p;
1464 lra_copy_t cp;
1466 lra_assert (regno1 != regno2);
1467 regno1_dest_p = true;
1468 if (regno1 > regno2)
1470 int temp = regno2;
1472 regno1_dest_p = false;
1473 regno2 = regno1;
1474 regno1 = temp;
1476 cp = (lra_copy_t) pool_alloc (copy_pool);
1477 copy_vec.safe_push (cp);
1478 cp->regno1_dest_p = regno1_dest_p;
1479 cp->freq = freq;
1480 cp->regno1 = regno1;
1481 cp->regno2 = regno2;
1482 cp->regno1_next = lra_reg_info[regno1].copies;
1483 lra_reg_info[regno1].copies = cp;
1484 cp->regno2_next = lra_reg_info[regno2].copies;
1485 lra_reg_info[regno2].copies = cp;
1486 if (lra_dump_file != NULL)
1487 fprintf (lra_dump_file, " Creating copy r%d%sr%d@%d\n",
1488 regno1, regno1_dest_p ? "<-" : "->", regno2, freq);
1491 /* Return N-th (0, 1, ...) copy. If there is no copy, return
1492 NULL. */
1493 lra_copy_t
1494 lra_get_copy (int n)
1496 if (n >= (int) copy_vec.length ())
1497 return NULL;
1498 return copy_vec[n];
1503 /* This page contains code dealing with info about registers in
1504 insns. */
1506 /* Process X of insn UID recursively and add info (operand type is
1507 given by TYPE, flag of that it is early clobber is EARLY_CLOBBER)
1508 about registers in X to the insn DATA. */
1509 static void
1510 add_regs_to_insn_regno_info (lra_insn_recog_data_t data, rtx x, int uid,
1511 enum op_type type, bool early_clobber)
1513 int i, j, regno;
1514 bool subreg_p;
1515 enum machine_mode mode;
1516 const char *fmt;
1517 enum rtx_code code;
1518 struct lra_insn_reg *curr;
1520 code = GET_CODE (x);
1521 mode = GET_MODE (x);
1522 subreg_p = false;
1523 if (GET_CODE (x) == SUBREG)
1525 x = SUBREG_REG (x);
1526 code = GET_CODE (x);
1527 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (x)))
1529 mode = GET_MODE (x);
1530 if (GET_MODE_SIZE (mode) > REGMODE_NATURAL_SIZE (mode))
1531 subreg_p = true;
1534 if (REG_P (x))
1536 regno = REGNO (x);
1537 if (regno < FIRST_PSEUDO_REGISTER
1538 && TEST_HARD_REG_BIT (lra_no_alloc_regs, regno)
1539 && ! TEST_HARD_REG_BIT (eliminable_regset, regno))
1540 return;
1541 expand_reg_info ();
1542 if (bitmap_set_bit (&lra_reg_info[regno].insn_bitmap, uid))
1544 data->regs = new_insn_reg (regno, type, mode, subreg_p,
1545 early_clobber, data->regs);
1546 return;
1548 else
1550 for (curr = data->regs; curr != NULL; curr = curr->next)
1551 if (curr->regno == regno)
1553 if (curr->subreg_p != subreg_p || curr->biggest_mode != mode)
1554 /* The info can not be integrated into the found
1555 structure. */
1556 data->regs = new_insn_reg (regno, type, mode, subreg_p,
1557 early_clobber, data->regs);
1558 else
1560 if (curr->type != type)
1561 curr->type = OP_INOUT;
1562 if (curr->early_clobber != early_clobber)
1563 curr->early_clobber = true;
1565 return;
1567 gcc_unreachable ();
1571 switch (code)
1573 case SET:
1574 add_regs_to_insn_regno_info (data, SET_DEST (x), uid, OP_OUT, false);
1575 add_regs_to_insn_regno_info (data, SET_SRC (x), uid, OP_IN, false);
1576 break;
1577 case CLOBBER:
1578 /* We treat clobber of non-operand hard registers as early
1579 clobber (the behavior is expected from asm). */
1580 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_OUT, true);
1581 break;
1582 case PRE_INC: case PRE_DEC: case POST_INC: case POST_DEC:
1583 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false);
1584 break;
1585 case PRE_MODIFY: case POST_MODIFY:
1586 add_regs_to_insn_regno_info (data, XEXP (x, 0), uid, OP_INOUT, false);
1587 add_regs_to_insn_regno_info (data, XEXP (x, 1), uid, OP_IN, false);
1588 break;
1589 default:
1590 if ((code != PARALLEL && code != EXPR_LIST) || type != OP_OUT)
1591 /* Some targets place small structures in registers for return
1592 values of functions, and those registers are wrapped in
1593 PARALLEL that we may see as the destination of a SET. Here
1594 is an example:
1596 (call_insn 13 12 14 2 (set (parallel:BLK [
1597 (expr_list:REG_DEP_TRUE (reg:DI 0 ax)
1598 (const_int 0 [0]))
1599 (expr_list:REG_DEP_TRUE (reg:DI 1 dx)
1600 (const_int 8 [0x8]))
1602 (call (mem:QI (symbol_ref:DI (... */
1603 type = OP_IN;
1604 fmt = GET_RTX_FORMAT (code);
1605 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1607 if (fmt[i] == 'e')
1608 add_regs_to_insn_regno_info (data, XEXP (x, i), uid, type, false);
1609 else if (fmt[i] == 'E')
1611 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1612 add_regs_to_insn_regno_info (data, XVECEXP (x, i, j), uid,
1613 type, false);
1619 /* Return execution frequency of INSN. */
1620 static int
1621 get_insn_freq (rtx insn)
1623 basic_block bb;
1625 if ((bb = BLOCK_FOR_INSN (insn)) != NULL)
1626 return REG_FREQ_FROM_BB (bb);
1627 else
1629 lra_assert (lra_insn_recog_data[INSN_UID (insn)]
1630 ->insn_static_data->n_operands == 0);
1631 /* We don't care about such insn, e.g. it might be jump with
1632 addr_vec. */
1633 return 1;
1637 /* Invalidate all reg info of INSN with DATA and execution frequency
1638 FREQ. Update common info about the invalidated registers. */
1639 static void
1640 invalidate_insn_data_regno_info (lra_insn_recog_data_t data, rtx insn,
1641 int freq)
1643 int uid;
1644 bool debug_p;
1645 unsigned int i;
1646 struct lra_insn_reg *ir, *next_ir;
1648 uid = INSN_UID (insn);
1649 debug_p = DEBUG_INSN_P (insn);
1650 for (ir = data->regs; ir != NULL; ir = next_ir)
1652 i = ir->regno;
1653 next_ir = ir->next;
1654 free_insn_reg (ir);
1655 bitmap_clear_bit (&lra_reg_info[i].insn_bitmap, uid);
1656 if (i >= FIRST_PSEUDO_REGISTER && ! debug_p)
1658 lra_reg_info[i].nrefs--;
1659 lra_reg_info[i].freq -= freq;
1660 lra_assert (lra_reg_info[i].nrefs >= 0 && lra_reg_info[i].freq >= 0);
1663 data->regs = NULL;
1666 /* Invalidate all reg info of INSN. Update common info about the
1667 invalidated registers. */
1668 void
1669 lra_invalidate_insn_regno_info (rtx insn)
1671 invalidate_insn_data_regno_info (lra_get_insn_recog_data (insn), insn,
1672 get_insn_freq (insn));
1675 /* Update common reg info from reg info of insn given by its DATA and
1676 execution frequency FREQ. */
1677 static void
1678 setup_insn_reg_info (lra_insn_recog_data_t data, int freq)
1680 unsigned int i;
1681 struct lra_insn_reg *ir;
1683 for (ir = data->regs; ir != NULL; ir = ir->next)
1684 if ((i = ir->regno) >= FIRST_PSEUDO_REGISTER)
1686 lra_reg_info[i].nrefs++;
1687 lra_reg_info[i].freq += freq;
1691 /* Set up insn reg info of INSN. Update common reg info from reg info
1692 of INSN. */
1693 void
1694 lra_update_insn_regno_info (rtx insn)
1696 int i, uid, freq;
1697 lra_insn_recog_data_t data;
1698 struct lra_static_insn_data *static_data;
1699 enum rtx_code code;
1701 if (! INSN_P (insn))
1702 return;
1703 data = lra_get_insn_recog_data (insn);
1704 static_data = data->insn_static_data;
1705 freq = get_insn_freq (insn);
1706 invalidate_insn_data_regno_info (data, insn, freq);
1707 uid = INSN_UID (insn);
1708 for (i = static_data->n_operands - 1; i >= 0; i--)
1709 add_regs_to_insn_regno_info (data, *data->operand_loc[i], uid,
1710 static_data->operand[i].type,
1711 static_data->operand[i].early_clobber);
1712 if ((code = GET_CODE (PATTERN (insn))) == CLOBBER || code == USE)
1713 add_regs_to_insn_regno_info (data, XEXP (PATTERN (insn), 0), uid,
1714 code == USE ? OP_IN : OP_OUT, false);
1715 if (NONDEBUG_INSN_P (insn))
1716 setup_insn_reg_info (data, freq);
1719 /* Return reg info of insn given by it UID. */
1720 struct lra_insn_reg *
1721 lra_get_insn_regs (int uid)
1723 lra_insn_recog_data_t data;
1725 data = get_insn_recog_data_by_uid (uid);
1726 return data->regs;
1731 /* This page contains code dealing with stack of the insns which
1732 should be processed by the next constraint pass. */
1734 /* Bitmap used to put an insn on the stack only in one exemplar. */
1735 static sbitmap lra_constraint_insn_stack_bitmap;
1737 /* The stack itself. */
1738 vec<rtx> lra_constraint_insn_stack;
1740 /* Put INSN on the stack. If ALWAYS_UPDATE is true, always update the reg
1741 info for INSN, otherwise only update it if INSN is not already on the
1742 stack. */
1743 static inline void
1744 lra_push_insn_1 (rtx insn, bool always_update)
1746 unsigned int uid = INSN_UID (insn);
1747 if (always_update)
1748 lra_update_insn_regno_info (insn);
1749 if (uid >= SBITMAP_SIZE (lra_constraint_insn_stack_bitmap))
1750 lra_constraint_insn_stack_bitmap =
1751 sbitmap_resize (lra_constraint_insn_stack_bitmap, 3 * uid / 2, 0);
1752 if (bitmap_bit_p (lra_constraint_insn_stack_bitmap, uid))
1753 return;
1754 bitmap_set_bit (lra_constraint_insn_stack_bitmap, uid);
1755 if (! always_update)
1756 lra_update_insn_regno_info (insn);
1757 lra_constraint_insn_stack.safe_push (insn);
1760 /* Put INSN on the stack. */
1761 void
1762 lra_push_insn (rtx insn)
1764 lra_push_insn_1 (insn, false);
1767 /* Put INSN on the stack and update its reg info. */
1768 void
1769 lra_push_insn_and_update_insn_regno_info (rtx insn)
1771 lra_push_insn_1 (insn, true);
1774 /* Put insn with UID on the stack. */
1775 void
1776 lra_push_insn_by_uid (unsigned int uid)
1778 lra_push_insn (lra_insn_recog_data[uid]->insn);
1781 /* Take the last-inserted insns off the stack and return it. */
1783 lra_pop_insn (void)
1785 rtx insn = lra_constraint_insn_stack.pop ();
1786 bitmap_clear_bit (lra_constraint_insn_stack_bitmap, INSN_UID (insn));
1787 return insn;
1790 /* Return the current size of the insn stack. */
1791 unsigned int
1792 lra_insn_stack_length (void)
1794 return lra_constraint_insn_stack.length ();
1797 /* Push insns FROM to TO (excluding it) going in reverse order. */
1798 static void
1799 push_insns (rtx from, rtx to)
1801 rtx insn;
1803 if (from == NULL_RTX)
1804 return;
1805 for (insn = from; insn != to; insn = PREV_INSN (insn))
1806 if (INSN_P (insn))
1807 lra_push_insn (insn);
1810 /* Emit insns BEFORE before INSN and insns AFTER after INSN. Put the
1811 insns onto the stack. Print about emitting the insns with
1812 TITLE. */
1813 void
1814 lra_process_new_insns (rtx insn, rtx before, rtx after, const char *title)
1816 rtx last;
1818 if (lra_dump_file != NULL && (before != NULL_RTX || after != NULL_RTX))
1820 dump_insn_slim (lra_dump_file, insn);
1821 if (before != NULL_RTX)
1823 fprintf (lra_dump_file," %s before:\n", title);
1824 dump_rtl_slim (lra_dump_file, before, NULL_RTX, -1, 0);
1826 if (after != NULL_RTX)
1828 fprintf (lra_dump_file, " %s after:\n", title);
1829 dump_rtl_slim (lra_dump_file, after, NULL_RTX, -1, 0);
1831 fprintf (lra_dump_file, "\n");
1833 if (before != NULL_RTX)
1835 emit_insn_before (before, insn);
1836 push_insns (PREV_INSN (insn), PREV_INSN (before));
1838 if (after != NULL_RTX)
1840 for (last = after; NEXT_INSN (last) != NULL_RTX; last = NEXT_INSN (last))
1842 emit_insn_after (after, insn);
1843 push_insns (last, insn);
1849 /* This page contains code dealing with scratches (changing them onto
1850 pseudos and restoring them from the pseudos).
1852 We change scratches into pseudos at the beginning of LRA to
1853 simplify dealing with them (conflicts, hard register assignments).
1855 If the pseudo denoting scratch was spilled it means that we do need
1856 a hard register for it. Such pseudos are transformed back to
1857 scratches at the end of LRA. */
1859 /* Description of location of a former scratch operand. */
1860 struct sloc
1862 rtx insn; /* Insn where the scratch was. */
1863 int nop; /* Number of the operand which was a scratch. */
1866 typedef struct sloc *sloc_t;
1868 /* Locations of the former scratches. */
1869 static vec<sloc_t> scratches;
1871 /* Bitmap of scratch regnos. */
1872 static bitmap_head scratch_bitmap;
1874 /* Bitmap of scratch operands. */
1875 static bitmap_head scratch_operand_bitmap;
1877 /* Return true if pseudo REGNO is made of SCRATCH. */
1878 bool
1879 lra_former_scratch_p (int regno)
1881 return bitmap_bit_p (&scratch_bitmap, regno);
1884 /* Return true if the operand NOP of INSN is a former scratch. */
1885 bool
1886 lra_former_scratch_operand_p (rtx insn, int nop)
1888 return bitmap_bit_p (&scratch_operand_bitmap,
1889 INSN_UID (insn) * MAX_RECOG_OPERANDS + nop) != 0;
1892 /* Change scratches onto pseudos and save their location. */
1893 static void
1894 remove_scratches (void)
1896 int i;
1897 bool insn_changed_p;
1898 basic_block bb;
1899 rtx insn, reg;
1900 sloc_t loc;
1901 lra_insn_recog_data_t id;
1902 struct lra_static_insn_data *static_id;
1904 scratches.create (get_max_uid ());
1905 bitmap_initialize (&scratch_bitmap, &reg_obstack);
1906 bitmap_initialize (&scratch_operand_bitmap, &reg_obstack);
1907 FOR_EACH_BB (bb)
1908 FOR_BB_INSNS (bb, insn)
1909 if (INSN_P (insn))
1911 id = lra_get_insn_recog_data (insn);
1912 static_id = id->insn_static_data;
1913 insn_changed_p = false;
1914 for (i = 0; i < static_id->n_operands; i++)
1915 if (GET_CODE (*id->operand_loc[i]) == SCRATCH
1916 && GET_MODE (*id->operand_loc[i]) != VOIDmode)
1918 insn_changed_p = true;
1919 *id->operand_loc[i] = reg
1920 = lra_create_new_reg (static_id->operand[i].mode,
1921 *id->operand_loc[i], ALL_REGS, NULL);
1922 add_reg_note (insn, REG_UNUSED, reg);
1923 lra_update_dup (id, i);
1924 loc = XNEW (struct sloc);
1925 loc->insn = insn;
1926 loc->nop = i;
1927 scratches.safe_push (loc);
1928 bitmap_set_bit (&scratch_bitmap, REGNO (*id->operand_loc[i]));
1929 bitmap_set_bit (&scratch_operand_bitmap,
1930 INSN_UID (insn) * MAX_RECOG_OPERANDS + i);
1931 if (lra_dump_file != NULL)
1932 fprintf (lra_dump_file,
1933 "Removing SCRATCH in insn #%u (nop %d)\n",
1934 INSN_UID (insn), i);
1936 if (insn_changed_p)
1937 /* Because we might use DF right after caller-saves sub-pass
1938 we need to keep DF info up to date. */
1939 df_insn_rescan (insn);
1943 /* Changes pseudos created by function remove_scratches onto scratches. */
1944 static void
1945 restore_scratches (void)
1947 int regno;
1948 unsigned i;
1949 sloc_t loc;
1950 rtx last = NULL_RTX;
1951 lra_insn_recog_data_t id = NULL;
1953 for (i = 0; scratches.iterate (i, &loc); i++)
1955 if (last != loc->insn)
1957 last = loc->insn;
1958 id = lra_get_insn_recog_data (last);
1960 if (REG_P (*id->operand_loc[loc->nop])
1961 && ((regno = REGNO (*id->operand_loc[loc->nop]))
1962 >= FIRST_PSEUDO_REGISTER)
1963 && lra_get_regno_hard_regno (regno) < 0)
1965 /* It should be only case when scratch register with chosen
1966 constraint 'X' did not get memory or hard register. */
1967 lra_assert (lra_former_scratch_p (regno));
1968 *id->operand_loc[loc->nop]
1969 = gen_rtx_SCRATCH (GET_MODE (*id->operand_loc[loc->nop]));
1970 lra_update_dup (id, loc->nop);
1971 if (lra_dump_file != NULL)
1972 fprintf (lra_dump_file, "Restoring SCRATCH in insn #%u(nop %d)\n",
1973 INSN_UID (loc->insn), loc->nop);
1976 for (i = 0; scratches.iterate (i, &loc); i++)
1977 free (loc);
1978 scratches.release ();
1979 bitmap_clear (&scratch_bitmap);
1980 bitmap_clear (&scratch_operand_bitmap);
1985 #ifdef ENABLE_CHECKING
1987 /* Function checks RTL for correctness. If FINAL_P is true, it is
1988 done at the end of LRA and the check is more rigorous. */
1989 static void
1990 check_rtl (bool final_p)
1992 int i;
1993 basic_block bb;
1994 rtx insn;
1995 lra_insn_recog_data_t id;
1997 lra_assert (! final_p || reload_completed);
1998 FOR_EACH_BB (bb)
1999 FOR_BB_INSNS (bb, insn)
2000 if (NONDEBUG_INSN_P (insn)
2001 && GET_CODE (PATTERN (insn)) != USE
2002 && GET_CODE (PATTERN (insn)) != CLOBBER
2003 && GET_CODE (PATTERN (insn)) != ADDR_VEC
2004 && GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
2005 && GET_CODE (PATTERN (insn)) != ASM_INPUT)
2007 if (final_p)
2009 extract_insn (insn);
2010 lra_assert (constrain_operands (1));
2011 continue;
2013 if (insn_invalid_p (insn, false))
2014 fatal_insn_not_found (insn);
2015 if (asm_noperands (PATTERN (insn)) >= 0)
2016 continue;
2017 id = lra_get_insn_recog_data (insn);
2018 /* The code is based on assumption that all addresses in
2019 regular instruction are legitimate before LRA. The code in
2020 lra-constraints.c is based on assumption that there is no
2021 subreg of memory as an insn operand. */
2022 for (i = 0; i < id->insn_static_data->n_operands; i++)
2024 rtx op = *id->operand_loc[i];
2026 if (MEM_P (op)
2027 && (GET_MODE (op) != BLKmode
2028 || GET_CODE (XEXP (op, 0)) != SCRATCH)
2029 && ! memory_address_p (GET_MODE (op), XEXP (op, 0))
2030 /* Some ports don't recognize the following addresses
2031 as legitimate. Although they are legitimate if
2032 they satisfies the constraints and will be checked
2033 by insn constraints which we ignore here. */
2034 && GET_CODE (XEXP (op, 0)) != UNSPEC
2035 && GET_RTX_CLASS (GET_CODE (XEXP (op, 0))) != RTX_AUTOINC)
2036 fatal_insn_not_found (insn);
2040 #endif /* #ifdef ENABLE_CHECKING */
2042 /* Determine if the current function has an exception receiver block
2043 that reaches the exit block via non-exceptional edges */
2044 static bool
2045 has_nonexceptional_receiver (void)
2047 edge e;
2048 edge_iterator ei;
2049 basic_block *tos, *worklist, bb;
2051 /* If we're not optimizing, then just err on the safe side. */
2052 if (!optimize)
2053 return true;
2055 /* First determine which blocks can reach exit via normal paths. */
2056 tos = worklist = XNEWVEC (basic_block, n_basic_blocks + 1);
2058 FOR_EACH_BB (bb)
2059 bb->flags &= ~BB_REACHABLE;
2061 /* Place the exit block on our worklist. */
2062 EXIT_BLOCK_PTR->flags |= BB_REACHABLE;
2063 *tos++ = EXIT_BLOCK_PTR;
2065 /* Iterate: find everything reachable from what we've already seen. */
2066 while (tos != worklist)
2068 bb = *--tos;
2070 FOR_EACH_EDGE (e, ei, bb->preds)
2071 if (e->flags & EDGE_ABNORMAL)
2073 free (worklist);
2074 return true;
2076 else
2078 basic_block src = e->src;
2080 if (!(src->flags & BB_REACHABLE))
2082 src->flags |= BB_REACHABLE;
2083 *tos++ = src;
2087 free (worklist);
2088 /* No exceptional block reached exit unexceptionally. */
2089 return false;
2092 #ifdef AUTO_INC_DEC
2094 /* Process recursively X of INSN and add REG_INC notes if necessary. */
2095 static void
2096 add_auto_inc_notes (rtx insn, rtx x)
2098 enum rtx_code code = GET_CODE (x);
2099 const char *fmt;
2100 int i, j;
2102 if (code == MEM && auto_inc_p (XEXP (x, 0)))
2104 add_reg_note (insn, REG_INC, XEXP (XEXP (x, 0), 0));
2105 return;
2108 /* Scan all X sub-expressions. */
2109 fmt = GET_RTX_FORMAT (code);
2110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2112 if (fmt[i] == 'e')
2113 add_auto_inc_notes (insn, XEXP (x, i));
2114 else if (fmt[i] == 'E')
2115 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2116 add_auto_inc_notes (insn, XVECEXP (x, i, j));
2120 #endif
2122 /* Remove all REG_DEAD and REG_UNUSED notes and regenerate REG_INC.
2123 We change pseudos by hard registers without notification of DF and
2124 that can make the notes obsolete. DF-infrastructure does not deal
2125 with REG_INC notes -- so we should regenerate them here. */
2126 static void
2127 update_inc_notes (void)
2129 rtx *pnote;
2130 basic_block bb;
2131 rtx insn;
2133 FOR_EACH_BB (bb)
2134 FOR_BB_INSNS (bb, insn)
2135 if (NONDEBUG_INSN_P (insn))
2137 pnote = &REG_NOTES (insn);
2138 while (*pnote != 0)
2140 if (REG_NOTE_KIND (*pnote) == REG_INC)
2141 *pnote = XEXP (*pnote, 1);
2142 else
2143 pnote = &XEXP (*pnote, 1);
2145 #ifdef AUTO_INC_DEC
2146 add_auto_inc_notes (insn, PATTERN (insn));
2147 #endif
2151 /* Set to 1 while in lra. */
2152 int lra_in_progress;
2154 /* Start of pseudo regnos before the LRA. */
2155 int lra_new_regno_start;
2157 /* Start of reload pseudo regnos before the new spill pass. */
2158 int lra_constraint_new_regno_start;
2160 /* Inheritance pseudo regnos before the new spill pass. */
2161 bitmap_head lra_inheritance_pseudos;
2163 /* Split regnos before the new spill pass. */
2164 bitmap_head lra_split_regs;
2166 /* Reload pseudo regnos before the new assign pass which still can be
2167 spilled after the assinment pass. */
2168 bitmap_head lra_optional_reload_pseudos;
2170 /* First UID of insns generated before a new spill pass. */
2171 int lra_constraint_new_insn_uid_start;
2173 /* File used for output of LRA debug information. */
2174 FILE *lra_dump_file;
2176 /* True if we should try spill into registers of different classes
2177 instead of memory. */
2178 bool lra_reg_spill_p;
2180 /* Set up value LRA_REG_SPILL_P. */
2181 static void
2182 setup_reg_spill_flag (void)
2184 int cl, mode;
2186 if (targetm.spill_class != NULL)
2187 for (cl = 0; cl < (int) LIM_REG_CLASSES; cl++)
2188 for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
2189 if (targetm.spill_class ((enum reg_class) cl,
2190 (enum machine_mode) mode) != NO_REGS)
2192 lra_reg_spill_p = true;
2193 return;
2195 lra_reg_spill_p = false;
2198 /* True if the current function is too big to use regular algorithms
2199 in LRA. In other words, we should use simpler and faster algorithms
2200 in LRA. It also means we should not worry about generation code
2201 for caller saves. The value is set up in IRA. */
2202 bool lra_simple_p;
2204 /* Major LRA entry function. F is a file should be used to dump LRA
2205 debug info. */
2206 void
2207 lra (FILE *f)
2209 int i;
2210 bool live_p, scratch_p, inserted_p;
2212 lra_dump_file = f;
2214 timevar_push (TV_LRA);
2216 COPY_HARD_REG_SET (lra_no_alloc_regs, ira_no_alloc_regs);
2218 init_reg_info ();
2219 expand_reg_info ();
2221 init_insn_recog_data ();
2223 #ifdef ENABLE_CHECKING
2224 check_rtl (false);
2225 #endif
2227 lra_live_range_iter = lra_coalesce_iter = 0;
2228 lra_constraint_iter = lra_constraint_iter_after_spill = 0;
2229 lra_inheritance_iter = lra_undo_inheritance_iter = 0;
2231 setup_reg_spill_flag ();
2233 /* We can not set up reload_in_progress because it prevents new
2234 pseudo creation. */
2235 lra_in_progress = 1;
2237 /* Function remove_scratches can creates new pseudos for clobbers --
2238 so set up lra_constraint_new_regno_start before its call to
2239 permit changing reg classes for pseudos created by this
2240 simplification. */
2241 lra_constraint_new_regno_start = lra_new_regno_start = max_reg_num ();
2242 remove_scratches ();
2243 scratch_p = lra_constraint_new_regno_start != max_reg_num ();
2245 /* A function that has a non-local label that can reach the exit
2246 block via non-exceptional paths must save all call-saved
2247 registers. */
2248 if (cfun->has_nonlocal_label && has_nonexceptional_receiver ())
2249 crtl->saves_all_registers = 1;
2251 if (crtl->saves_all_registers)
2252 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2253 if (! call_used_regs[i] && ! fixed_regs[i] && ! LOCAL_REGNO (i))
2254 df_set_regs_ever_live (i, true);
2256 /* We don't DF from now and avoid its using because it is to
2257 expensive when a lot of RTL changes are made. */
2258 df_set_flags (DF_NO_INSN_RESCAN);
2259 lra_constraint_insn_stack.create (get_max_uid ());
2260 lra_constraint_insn_stack_bitmap = sbitmap_alloc (get_max_uid ());
2261 bitmap_clear (lra_constraint_insn_stack_bitmap);
2262 lra_live_ranges_init ();
2263 lra_constraints_init ();
2264 lra_curr_reload_num = 0;
2265 push_insns (get_last_insn (), NULL_RTX);
2266 /* It is needed for the 1st coalescing. */
2267 lra_constraint_new_insn_uid_start = get_max_uid ();
2268 bitmap_initialize (&lra_inheritance_pseudos, &reg_obstack);
2269 bitmap_initialize (&lra_split_regs, &reg_obstack);
2270 bitmap_initialize (&lra_optional_reload_pseudos, &reg_obstack);
2271 live_p = false;
2272 for (;;)
2274 for (;;)
2276 bitmap_clear (&lra_optional_reload_pseudos);
2277 /* We should try to assign hard registers to scratches even
2278 if there were no RTL transformations in
2279 lra_constraints. */
2280 if (! lra_constraints (lra_constraint_iter == 0)
2281 && (lra_constraint_iter > 1
2282 || (! scratch_p && ! caller_save_needed)))
2283 break;
2284 /* Constraint transformations may result in that eliminable
2285 hard regs become uneliminable and pseudos which use them
2286 should be spilled. It is better to do it before pseudo
2287 assignments.
2289 For example, rs6000 can make
2290 RS6000_PIC_OFFSET_TABLE_REGNUM uneliminable if we started
2291 to use a constant pool. */
2292 lra_eliminate (false);
2293 /* Do inheritance only for regular algorithms. */
2294 if (! lra_simple_p)
2295 lra_inheritance ();
2296 /* We need live ranges for lra_assign -- so build them. */
2297 lra_create_live_ranges (true);
2298 live_p = true;
2299 /* If we don't spill non-reload and non-inheritance pseudos,
2300 there is no sense to run memory-memory move coalescing.
2301 If inheritance pseudos were spilled, the memory-memory
2302 moves involving them will be removed by pass undoing
2303 inheritance. */
2304 if (lra_simple_p)
2305 lra_assign ();
2306 else
2308 /* Do coalescing only for regular algorithms. */
2309 if (! lra_assign () && lra_coalesce ())
2310 live_p = false;
2311 if (lra_undo_inheritance ())
2312 live_p = false;
2315 bitmap_clear (&lra_inheritance_pseudos);
2316 bitmap_clear (&lra_split_regs);
2317 if (! lra_need_for_spills_p ())
2318 break;
2319 if (! live_p)
2321 /* We need full live info for spilling pseudos into
2322 registers instead of memory. */
2323 lra_create_live_ranges (lra_reg_spill_p);
2324 live_p = true;
2326 lra_spill ();
2327 /* Assignment of stack slots changes elimination offsets for
2328 some eliminations. So update the offsets here. */
2329 lra_eliminate (false);
2330 lra_constraint_new_regno_start = max_reg_num ();
2331 lra_constraint_new_insn_uid_start = get_max_uid ();
2332 lra_constraint_iter_after_spill = 0;
2334 restore_scratches ();
2335 lra_eliminate (true);
2336 lra_final_code_change ();
2337 lra_in_progress = 0;
2338 lra_clear_live_ranges ();
2339 lra_live_ranges_finish ();
2340 lra_constraints_finish ();
2341 finish_reg_info ();
2342 sbitmap_free (lra_constraint_insn_stack_bitmap);
2343 lra_constraint_insn_stack.release ();
2344 finish_insn_recog_data ();
2345 regstat_free_n_sets_and_refs ();
2346 regstat_free_ri ();
2347 reload_completed = 1;
2348 update_inc_notes ();
2350 inserted_p = fixup_abnormal_edges ();
2352 /* We've possibly turned single trapping insn into multiple ones. */
2353 if (cfun->can_throw_non_call_exceptions)
2355 sbitmap blocks;
2356 blocks = sbitmap_alloc (last_basic_block);
2357 bitmap_ones (blocks);
2358 find_many_sub_basic_blocks (blocks);
2359 sbitmap_free (blocks);
2362 if (inserted_p)
2363 commit_edge_insertions ();
2365 /* Replacing pseudos with their memory equivalents might have
2366 created shared rtx. Subsequent passes would get confused
2367 by this, so unshare everything here. */
2368 unshare_all_rtl_again (get_insns ());
2370 #ifdef ENABLE_CHECKING
2371 check_rtl (true);
2372 #endif
2374 timevar_pop (TV_LRA);
2377 /* Called once per compiler to initialize LRA data once. */
2378 void
2379 lra_init_once (void)
2381 init_insn_code_data_once ();
2384 /* Initialize LRA whenever register-related information is changed. */
2385 void
2386 lra_init (void)
2388 init_op_alt_data ();
2391 /* Called once per compiler to finish LRA data which are initialize
2392 once. */
2393 void
2394 lra_finish_once (void)
2396 finish_insn_code_data_once ();